function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def dummy_export_progress_cb(*args, **kwargs):
pass | jflesch/paperwork-backend | [
19,
8,
19,
1,
1455881499
] |
def __init__(self, obj, export_format):
self.obj = obj
self.export_format = str(export_format)
self.can_change_quality = False | jflesch/paperwork-backend | [
19,
8,
19,
1,
1455881499
] |
def set_postprocess_func(self, postprocess_func):
raise NotImplementedError() | jflesch/paperwork-backend | [
19,
8,
19,
1,
1455881499
] |
def get_img(self):
"""
Returns a Pillow Image
"""
raise NotImplementedError() | jflesch/paperwork-backend | [
19,
8,
19,
1,
1455881499
] |
def get_file_extensions(self):
raise NotImplementedError() | jflesch/paperwork-backend | [
19,
8,
19,
1,
1455881499
] |
def __init__(self, config_path, user):
'''constructor
config_path -- the path where the base configuration is located
user -- the user account or identifier
'''
Cache.Cache.__init__(self, os.path.join(config_path,
user.strip()), 'pictures', True) | emesene/emesene | [
319,
167,
319,
34,
1262172530
] |
def list(self):
'''return a list of tuples (stamp, hash) of the elements on cache
'''
return self.parse().items() | emesene/emesene | [
319,
167,
319,
34,
1262172530
] |
def insert_url(self, url):
'''download and insert a new item into the cache
return the information (stamp, hash) on success None otherwise
item -- a path to an image
'''
path = os.path.join(tempfile.gettempdir(), "avatars")
try:
urlretrieve(url, path)
except IOError:
log.warning("Can't read url avatar")
return None
return self.insert(path) | emesene/emesene | [
319,
167,
319,
34,
1262172530
] |
def __add_entry(self, hash_):
'''add an entry to the information file with the current timestamp
and the hash_ of the file that was saved
return (stamp, hash)
'''
time_info = int(time.time())
handle = file(self.info_path, 'a')
handle.write('%s %s\n' % (str(time_info), hash_))
handle.close()
return time_info, hash_ | emesene/emesene | [
319,
167,
319,
34,
1262172530
] |
def remove(self, item):
'''remove an item from cache
return True on success False otherwise
item -- the name of the image to remove
'''
if item not in self:
return False
os.remove(os.path.join(self.path, item))
self.__remove_entry(item)
return True | emesene/emesene | [
319,
167,
319,
34,
1262172530
] |
def inherit_docs(cls):
for name, func in vars(cls).items():
if not func.__doc__:
for parent in cls.__bases__:
parfunc = getattr(parent, name)
if parfunc and getattr(parfunc, '__doc__', None):
func.__doc__ = parfunc.__doc__
break
return cls | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def __init__(self, size, position=None, colour=None):
Canvas.__init__(self, size, position, colour)
self._px_array = None | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def surface(self):
"""todo"""
if not self.has_surface:
ok = self._set_surface(self._get_surface()) # create surface
if not ok:
raise RuntimeError(Visual._compression_exception_message.format(
"surface"))
return self._surface | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def pixel_array(self):
"""todo"""
if self._px_array is None:
self._px_array = pygame.PixelArray(self.surface)
return self._px_array | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def pixel_array(self, value):
if self._px_array is None:
self._px_array = pygame.PixelArray(self.surface)
self._px_array = value | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def preload(self, inhibit_ogl_compress=False):
self.unlock_pixel_array()
return Canvas.preload(self, inhibit_ogl_compress) | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def decompress(self):
self.unlock_pixel_array()
return Canvas.decompress(self) | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def clear_surface(self):
self.unlock_pixel_array()
return Canvas.clear_surface(self) | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def unload(self, keep_surface=False):
if not keep_surface:
self.unlock_pixel_array()
return Canvas.unload(self, keep_surface) | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def scale(self, factors):
self.unlock_pixel_array()
return Canvas.scale(self, factors) | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def flip(self, booleans):
self.unlock_pixel_array()
return Canvas.flip(self, booleans) | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def scramble(self, grain_size):
self.unlock_pixel_array()
return Canvas.scramble(self, grain_size) | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def __init__(self, n_data_rows, data_row_colours,
width=600, y_range=(-100, 100),
background_colour=(180, 180, 180),
marker_colour=(200, 200, 200),
position=None,
axis_colour=None):
self.n_data_rows = n_data_rows
self.data_row_colours = data_row_colours
self.width = width
self.y_range = y_range
self._background_colour = background_colour
self.marker_colour = marker_colour
if axis_colour is None:
self.axis_colour = background_colour
else:
self.axis_colour = axis_colour
self._previous = [None] * n_data_rows
PGSurface.__init__(self, size=(self.width, self._height),
position=position)
self.clear_area() | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def y_range(self):
return self.y_range | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def y_range(self, values):
"""tuple with lower and upper values"""
self._y_range = values
self._height = self._y_range[1] - self._y_range[0]
self._plot_axis = (self._y_range[0] <= 0 and \
self._y_range[1] >= 0) | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def data_row_colours(self):
return self._data_row_colours | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def data_row_colours(self, values):
"""data_row_colours: list of colour"""
try:
if not isinstance(values[0], list) and \
not isinstance(values[0], tuple): # one dimensional
values = [values]
except:
values = [[]] # values is not listpixel_array
if len(values) != self.n_data_rows:
raise RuntimeError('Number of data row colour does not match the ' +
'defined number of data rows!')
self._data_row_colours = values | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def write_values(self, position, values, set_marker=False):
if set_marker:
self.pixel_array[position, :] = self.marker_colour
else:
self.pixel_array[position, :] = self._background_colour
if self._plot_axis and self.axis_colour != self._background_colour:
self.pixel_array[position, self._y_range[1]:self._y_range[1] + 1] = \
self.axis_colour
for c, plot_value in enumerate(self._y_range[1] - \
np.array(values, dtype=int)):
if plot_value >= 0 and self._previous[c] >= 0 \
and plot_value <= self._height and \
self._previous[c] <= self._height:
if self._previous[c] > plot_value:
self.pixel_array[position,
plot_value:self._previous[c] + 1] = \
self._data_row_colours[c]
else:
self.pixel_array[position,
self._previous[c]:plot_value + 1] = \
self._data_row_colours[c]
self._previous[c] = plot_value | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def __init__(self, n_data_rows, data_row_colours,
width=600, y_range=(-100, 100),
background_colour=(80, 80, 80),
marker_colour=(200, 200, 200),
position=None,
axis_colour=None):
super(PlotterThread, self).__init__()
self._plotter = Plotter(n_data_rows=n_data_rows,
data_row_colours=data_row_colours,
width=width, y_range=y_range,
background_colour=background_colour,
marker_colour=marker_colour,
position=position,
axis_colour=axis_colour)
self._new_values = []
self._lock_new_values = threading.Lock()
self._stop_request = threading.Event() | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def stop(self):
self.join() | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def run(self):
"""the plotter thread is constantly updating the the
pixel_area"""
while not self._stop_request.is_set():
# get data
if self._lock_new_values.acquire(False):
values = self._new_values
self._new_values = []
self._lock_new_values.release() # release to receive new values
else:
values = []
n = len(values)
if n > 0:
if n > self._plotter.width:
values = values[-1 * self._plotter.width:] # only the last
n = len(values)
self._plotter.pixel_array[:-1 * n, :] = \
self._plotter.pixel_array[n:, :]
for x in range(-1 * n, 0):
self._plotter.write_values(position=x,
values=values[x][0],
set_marker=values[x][1])
# Expyriment present
lock_expyriment.acquire()
self._plotter.present(update=False, clear=False)
lock_expyriment.release() | lindemann09/pytrak | [
4,
3,
4,
1,
1402558831
] |
def __init__(self,root):
commands = map(
lambda m: m[8:].replace('_','-'),
filter(
lambda m: m.startswith('command_'),
runner.__dict__.keys())
)
commands.sort()
commands = "commands: %s" % ', '.join(commands) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def command_cleanup(self,*args):
if not args or args == None or args == []: args = [ 'source', 'bin' ]
if 'source' in args:
self.log( 'Cleaning up "%s" directory ...' % self.boost_root )
self.rmtree( self.boost_root )
if 'bin' in args:
boost_bin_dir = os.path.join( self.boost_root, 'bin' )
self.log( 'Cleaning up "%s" directory ...' % boost_bin_dir )
self.rmtree( boost_bin_dir )
boost_binv2_dir = os.path.join( self.boost_root, 'bin.v2' )
self.log( 'Cleaning up "%s" directory ...' % boost_binv2_dir )
self.rmtree( boost_binv2_dir )
self.log( 'Cleaning up "%s" directory ...' % self.regression_results )
self.rmtree( self.regression_results ) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def command_get_tools(self):
#~ Get Boost.Build v2...
self.log( 'Getting Boost.Build v2...' )
if self.user and self.user != '':
os.chdir( os.path.dirname(self.tools_bb_root) )
self.svn_command( 'co %s %s' % (
self.svn_repository_url(repo_path['build']),
os.path.basename(self.tools_bb_root) ) )
else:
self.retry( lambda: self.download_tarball(
os.path.basename(self.tools_bb_root)+".tar.bz2",
self.tarball_url(repo_path['build']) ) )
self.unpack_tarball(
self.tools_bb_root+".tar.bz2",
os.path.basename(self.tools_bb_root) )
#~ Get Boost.Jam...
self.log( 'Getting Boost.Jam...' )
if self.user and self.user != '':
os.chdir( os.path.dirname(self.tools_bjam_root) )
self.svn_command( 'co %s %s' % (
self.svn_repository_url(repo_path['jam']),
os.path.basename(self.tools_bjam_root) ) )
else:
self.retry( lambda: self.download_tarball(
os.path.basename(self.tools_bjam_root)+".tar.bz2",
self.tarball_url(repo_path['jam']) ) )
self.unpack_tarball(
self.tools_bjam_root+".tar.bz2",
os.path.basename(self.tools_bjam_root) )
#~ Get the regression tools and utilities...
self.log( 'Getting regression tools an utilities...' )
if self.user and self.user != '':
os.chdir( os.path.dirname(self.tools_regression_root) )
self.svn_command( 'co %s %s' % (
self.svn_repository_url(repo_path['regression']),
os.path.basename(self.tools_regression_root) ) )
else:
self.retry( lambda: self.download_tarball(
os.path.basename(self.tools_regression_root)+".tar.bz2",
self.tarball_url(repo_path['regression']) ) )
self.unpack_tarball(
self.tools_regression_root+".tar.bz2",
os.path.basename(self.tools_regression_root) ) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def command_get_source(self):
self.refresh_timestamp()
self.log( 'Getting sources (%s)...' % self.timestamp() )
if self.user and self.user != '':
self.retry( self.svn_checkout )
else:
self.retry( self.get_tarball )
pass | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def command_update_source(self):
if self.user and self.user != '' \
or os.path.exists( os.path.join( self.boost_root, '.svn' ) ):
open( self.timestamp_path, 'w' ).close()
self.log( 'Updating sources from SVN (%s)...' % self.timestamp() )
self.retry( self.svn_update )
else:
self.command_get_source( )
pass | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def command_patch(self):
self.import_utils()
patch_boost_path = os.path.join( self.regression_root, self.patch_boost )
if os.path.exists( patch_boost_path ):
self.log( 'Found patch file "%s". Executing it.' % patch_boost_path )
os.chdir( self.regression_root )
utils.system( [ patch_boost_path ] )
pass | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def command_setup(self):
self.command_patch()
self.build_if_needed(self.bjam,self.bjam_toolset)
if self.pjl_toolset != 'python':
self.build_if_needed(self.process_jam_log,self.pjl_toolset) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def command_test(self, *args):
if not args or args == None or args == []: args = [ "test", "process" ]
self.import_utils()
self.log( 'Making "%s" directory...' % self.regression_results )
utils.makedirs( self.regression_results )
results_libs = os.path.join( self.regression_results, 'libs' )
results_status = os.path.join( self.regression_results, 'status' )
if "clean" in args:
self.command_test_clean()
if "test" in args:
self.command_test_run()
self.command_test_boost_build()
if "process" in args:
if self.pjl_toolset != 'python':
self.command_test_process() | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def command_test_clean(self):
results_libs = os.path.join( self.regression_results, 'libs' )
results_status = os.path.join( self.regression_results, 'status' )
self.rmtree( results_libs )
self.rmtree( results_status ) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def command_test_run(self):
self.import_utils()
if self.pjl_toolset != 'python':
test_cmd = '%s -d2 preserve-test-targets=off --dump-tests %s "--build-dir=%s" >>"%s" 2>&1' % (
self.bjam_cmd( self.toolsets ),
self.bjam_options,
self.regression_results,
self.regression_log )
else:
test_cmd = '%s -d1 preserve-test-targets=off --dump-tests --verbose-test %s "--build-dir=%s" "--out-xml=%s"' % (
self.bjam_cmd( self.toolsets ),
self.bjam_options,
self.regression_results,
self.regression_log )
self.log( 'Starting tests (%s)...' % test_cmd )
cd = os.getcwd()
os.chdir( os.path.join( self.boost_root, 'status' ) )
utils.system( [ test_cmd ] )
os.chdir( cd ) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def command_test_process(self):
self.import_utils()
self.log( 'Getting test case results out of "%s"...' % self.regression_log )
cd = os.getcwd()
os.chdir( os.path.join( self.boost_root, 'status' ) )
utils.checked_system( [
'"%s" "%s" <"%s"' % (
self.tool_path(self.process_jam_log),
self.regression_results,
self.regression_log )
] )
os.chdir( cd ) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def command_collect_logs(self):
self.import_utils()
comment_path = os.path.join( self.regression_root, self.comment )
if not os.path.exists( comment_path ):
self.log( 'Comment file "%s" not found; creating default comment.' % comment_path )
f = open( comment_path, 'w' )
f.write( '<p>Tests are run on %s platform.</p>' % self.platform_name() )
f.close()
source = 'tarball'
revision = ''
svn_root_file = os.path.join( self.boost_root, '.svn' )
svn_info_file = os.path.join( self.boost_root, 'svn_info.txt' )
if os.path.exists( svn_root_file ):
source = 'SVN'
self.svn_command( 'info --xml "%s" >"%s"' % (self.boost_root,svn_info_file) )
if os.path.exists( svn_info_file ):
f = open( svn_info_file, 'r' )
svn_info = f.read()
f.close()
i = svn_info.find( 'Revision:' )
if i < 0: i = svn_info.find( 'revision=' ) # --xml format
if i >= 0:
i += 10
while svn_info[i] >= '0' and svn_info[i] <= '9':
revision += svn_info[i]
i += 1
if self.pjl_toolset != 'python':
from collect_and_upload_logs import collect_logs
if self.incremental:
run_type = 'incremental'
else:
run_type = 'full'
collect_logs(
self.regression_results,
self.runner, self.tag, self.platform, comment_path,
self.timestamp_path,
self.user,
source, run_type,
self.dart_server, self.proxy,
revision )
else:
from process_jam_log import BJamLog2Results
if self.incremental:
run_type = '--incremental'
else:
run_type = ''
BJamLog2Results([
'--output='+os.path.join(self.regression_results,self.runner+'.xml'),
'--runner='+self.runner,
'--comment='+comment_path,
'--tag='+self.tag,
'--platform='+self.platform,
'--source='+source,
'--revision='+revision,
run_type,
self.regression_log
])
self.compress_file(
os.path.join(self.regression_results,self.runner+'.xml'),
os.path.join(self.regression_results,self.runner+'.zip')
) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def command_upload_logs(self):
self.import_utils()
from collect_and_upload_logs import upload_logs
if self.ftp:
self.retry(
lambda:
upload_logs(
self.regression_results,
self.runner, self.tag,
self.user,
self.ftp_proxy,
self.debug_level, self.send_bjam_log,
self.timestamp_path,
self.dart_server,
ftp_url = self.ftp )
)
else:
self.retry(
lambda:
upload_logs(
self.regression_results,
self.runner, self.tag,
self.user,
self.ftp_proxy,
self.debug_level, self.send_bjam_log,
self.timestamp_path,
self.dart_server )
) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def command_regression(self):
import socket
import string
try:
mail_subject = 'Boost regression for %s on %s' % ( self.tag,
string.split(socket.gethostname(), '.')[0] )
start_time = time.localtime()
if self.mail:
self.log( 'Sending start notification to "%s"' % self.mail )
self.send_mail(
'%s started at %s.' % ( mail_subject, format_time( start_time ) )
) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def command_show_revision(self):
modified = '$Date: 2010-01-13 13:03:18 -0500 (Wed, 13 Jan 2010) $'
revision = '$Revision: 58983 $'
import re
re_keyword_value = re.compile( r'^\$\w+:\s+(.*)\s+\$$' )
print '\n\tRevision: %s' % re_keyword_value.match( revision ).group( 1 )
print '\tLast modified on: %s\n' % re_keyword_value.match( modified ).group( 1 ) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def main(self):
for action in self.actions:
action_m = "command_"+action.replace('-','_')
if hasattr(self,action_m):
getattr(self,action_m)() | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def log(self,message):
sys.stdout.flush()
sys.stderr.flush()
sys.stderr.write( '# %s\n' % message )
sys.stderr.flush() | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def refresh_timestamp( self ):
if os.path.exists( self.timestamp_path ):
os.unlink( self.timestamp_path )
open( self.timestamp_path, 'w' ).close() | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def retry( self, f, max_attempts=5, sleep_secs=10 ):
for attempts in range( max_attempts, -1, -1 ):
try:
return f()
except Exception, msg:
self.log( '%s failed with message "%s"' % ( f.__name__, msg ) )
if attempts == 0:
self.log( 'Giving up.' )
raise
self.log( 'Retrying (%d more attempts).' % attempts )
time.sleep( sleep_secs ) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def import_utils(self):
global utils
if utils is None:
sys.path.append( self.xsl_reports_dir )
import utils as utils_module
utils = utils_module | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def tool_path( self, name_or_spec ):
if isinstance( name_or_spec, basestring ):
return os.path.join( self.regression_root, name_or_spec )
if os.path.exists( name_or_spec[ 'path' ] ):
return name_or_spec[ 'path' ]
if name_or_spec.has_key( 'build_path' ):
return name_or_spec[ 'build_path' ]
build_dir = name_or_spec[ 'build_dir' ]
self.log( 'Searching for "%s" in "%s"...' % ( name_or_spec[ 'name' ], build_dir ) )
for root, dirs, files in os.walk( build_dir ):
if name_or_spec[ 'name' ] in files:
return os.path.join( root, name_or_spec[ 'name' ] )
raise Exception( 'Cannot find "%s" in any of the following locations:\n%s' % (
name_or_spec[ 'name' ]
, '\n'.join( [ name_or_spec[ 'path' ], build_dir ] )
) ) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def bjam_build_cmd( self, *rest ):
if sys.platform == 'win32':
cmd = 'build.bat %s' % self.bjam_toolset
else:
cmd = './build.sh %s' % self.bjam_toolset
env_setup_key = 'BJAM_ENVIRONMENT_SETUP'
if os.environ.has_key( env_setup_key ):
return '%s & %s' % ( os.environ[env_setup_key], cmd )
return cmd | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def bjam_cmd( self, toolsets, args = '', *rest ):
build_path = self.regression_root
if build_path[-1] == '\\': build_path += '\\' | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def send_mail( self, subject, msg = '' ):
import smtplib
if not self.smtp_login:
server_name = 'mail.%s' % mail.split( '@' )[-1]
user_name = None
password = None
else:
server_name = self.smtp_login.split( '@' )[-1]
( user_name, password ) = string.split( self.smtp_login.split( '@' )[0], ':' )
log( ' Sending mail through "%s"...' % server_name )
smtp_server = smtplib.SMTP( server_name )
smtp_server.set_debuglevel( self.debug_level )
if user_name:
smtp_server.login( user_name, password )
smtp_server.sendmail( self.mail, [ self.mail ],
'Subject: %s\nTo: %s\n\n%s' % ( subject, self.mail, msg ) ) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def svn_checkout( self ):
os.chdir( self.regression_root )
self.svn_command( 'co %s %s' % (self.svn_repository_url(self.tag),'boost') ) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def svn_command( self, command ):
svn_anonymous_command_line = 'svn --non-interactive %(command)s'
svn_command_line = 'svn --non-interactive --username=%(user)s %(command)s' | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def svn_repository_url( self, path ):
if self.user != 'anonymous' and self.user != '':
return '%s%s' % (repo_root['user'],path)
else:
return '%s%s' % (repo_root['anon'],path) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def get_tarball( self, *args ):
if not args or args == []:
args = [ 'download', 'unpack' ]
tarball_path = None
if hasattr(self,'local') and self.local is not None:
tarball_path = self.local
elif 'download' in args:
tarball_path = self.download_tarball(self.boost_tarball_name(),self.boost_tarball_url())
if not tarball_path:
tarball_path = os.path.join( self.regression_root, self.boost_tarball_url() )
if 'unpack' in args:
self.unpack_tarball( tarball_path, self.boost_root )
pass | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def tarball_url( self, path ):
return 'http://beta.boost.org/development/snapshot.php/%s' % path | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def boost_tarball_url( self ):
return self.tarball_url( self.tag ) | kelvindk/Video-Stabilization | [
1,
3,
1,
1,
1494752444
] |
def __init__(
self,
data,
name="test",
workdir="analysis-raxml",
*args,
**kwargs):
# path attributes
self._kwargs = {
"f": "a",
"T": 4, # <- change to zero !?
"m": "GTRGAMMA",
"N": 100,
"x": 12345,
"p": 54321,
"o": None,
"binary": "",
}
# update kwargs for user args and drop key if value is None
self._kwargs.update(kwargs)
self._kwargs = {i: j for (i, j) in self._kwargs.items() if j is not None}
# check workdir
if workdir:
workdir = os.path.abspath(os.path.expanduser(workdir))
else:
workdir = os.path.abspath(os.path.curdir)
if not os.path.exists(workdir):
os.makedirs(workdir)
# store entered args in params object
self.params = Params()
self.params.n = name
self.params.w = workdir
self.params.s = os.path.abspath(os.path.expanduser(data))
# if arg append kwargs to top of list of binaries to search for
binaries = _get_binary_paths()
if self._kwargs["binary"]:
binaries = [self._kwargs["binary"]] + binaries
# sefind a binary from the list
self.params.binary = _check_binaries(binaries)
# set params
notparams = set(["workdir", "name", "data", "binary"])
for key in set(self._kwargs.keys()) - notparams:
self.params[key] = self._kwargs[key]
# attributesx
self.rasync = None
self.stdout = None
self.stderr = None
# results files
self.trees = Params()
self.trees.bestTree = OPJ(workdir, "RAxML_bestTree." + name)
self.trees.bipartitionsBranchLabels = OPJ(workdir, "RAxML_bipartitionsBranchLabels." + name)
self.trees.bipartitions = OPJ(workdir, "RAxML_bipartitions." + name)
self.trees.bootstrap = OPJ(workdir, "RAxML_bootstrap." + name)
self.trees.info = OPJ(workdir, "RAxML_info." + name) | dereneaton/ipyrad | [
63,
42,
63,
44,
1444236652
] |
def _command_list(self):
""" build the command list """
cmd = [
self.params.binary,
"-f", str(self.params.f),
"-T", str(self.params.T),
"-m", str(self.params.m),
"-n", str(self.params.n),
"-w", str(self.params.w),
"-s", str(self.params.s),
"-p", str(self.params.p),
]
if 'N' in self.params:
cmd += ["-N", str(self.params.N)]
if "x" in self.params:
cmd += ["-x", str(self.params.x)]
# ultrafast boostrap and mapping with -f d
# If no bootstraps then run -f D not -f a, and drop -x and -N
# if "-f D":
# add ougroups
if 'o' in self.params:
cmd += ["-o"]
cmd += [",".join(self.params.o)]
return cmd | dereneaton/ipyrad | [
63,
42,
63,
44,
1444236652
] |
def command(self):
""" returns command as a string """
return " ".join(self._command_list) | dereneaton/ipyrad | [
63,
42,
63,
44,
1444236652
] |
def _get_binary_paths():
# check for binary
list_binaries = [
"raxmlHPC-PTHREADS-AVX2",
"raxmlHPC-PTHREADS-AVX",
"raxmlHPC-PTHREADS-SSE3",
"raxmlHPC-PTHREADS",
]
# expand for env path
list_binaries = [os.path.join(sys.prefix, "bin", i) for i in list_binaries]
return list_binaries | dereneaton/ipyrad | [
63,
42,
63,
44,
1444236652
] |
def _call_raxml(command_list):
""" call the command as sps """
proc = subprocess.Popen(
command_list,
stderr=subprocess.STDOUT,
stdout=subprocess.PIPE
)
comm = proc.communicate()
return comm | dereneaton/ipyrad | [
63,
42,
63,
44,
1444236652
] |
def rx():
ifstat = open('/proc/net/dev').readlines()
for interface in ifstat:
#print '----', interface, '-----'
if INTERFACE in interface:
stat = float(interface.split()[1])
STATS[0:] = [stat] | soarpenguin/python-scripts | [
63,
40,
63,
1,
1397484324
] |
def setUp (self):
self.tb = gr.top_block ()
self.tp = drm.transm_params(1, 3, False, 0, 1, 0, 1, 1, 0, False, 24000, "station label", "text message")
vlen_msc = self.tp.msc().N_MUX() * self.tp.ofdm().M_TF()
vlen_sdc = self.tp.sdc().N()
vlen_fac = self.tp.fac().N() * self.tp.ofdm().M_TF()
self.cell_mapping = drm.cell_mapping_cc(self.tp, (vlen_msc, vlen_sdc, vlen_fac)) | kit-cel/gr-drm | [
64,
27,
64,
8,
1347630228
] |
def test_001_t (self):
# set up fg
self.tb.run ()
# check data | kit-cel/gr-drm | [
64,
27,
64,
8,
1347630228
] |
def check_create_table(conn):
global num
c = conn.cursor()
c.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='electrum_payments';")
data = c.fetchall()
if not data:
c.execute("""CREATE TABLE electrum_payments (address VARCHAR(40), amount FLOAT, confirmations INT(8), received_at TIMESTAMP, expires_at TIMESTAMP, paid INT(1), processed INT(1));""")
conn.commit()
c.execute("SELECT Count(address) FROM 'electrum_payments'")
num = c.fetchone()[0]
print "num rows", num | electrumalt/electrum-doge | [
20,
11,
20,
7,
1416825131
] |
def on_wallet_update():
for addr, v in pending_requests.items():
h = wallet.history.get(addr, [])
requested_amount = v.get('requested')
requested_confs = v.get('confirmations')
value = 0
for tx_hash, tx_height in h:
tx = wallet.transactions.get(tx_hash)
if not tx: continue
if wallet.verifier.get_confirmations(tx_hash) < requested_confs: continue
for o in tx.outputs:
o_address, o_value = o
if o_address == addr:
value += o_value
s = (value)/1.e8
print "balance for %s:"%addr, s, requested_amount
if s>= requested_amount:
print "payment accepted", addr
out_queue.put( ('payment', addr)) | electrumalt/electrum-doge | [
20,
11,
20,
7,
1416825131
] |
def do_stop(password):
global stopping
if password != my_password:
return "wrong password"
stopping = True
return "ok" | electrumalt/electrum-doge | [
20,
11,
20,
7,
1416825131
] |
def do_dump(password):
if password != my_password:
return "wrong password"
conn = sqlite3.connect(database);
cur = conn.cursor()
# read pending requests from table
cur.execute("SELECT oid, * FROM electrum_payments;")
data = cur.fetchall()
return map(row_to_dict, data) | electrumalt/electrum-doge | [
20,
11,
20,
7,
1416825131
] |
def send_command(cmd, params):
import jsonrpclib
server = jsonrpclib.Server('http://%s:%d'%(my_host, my_port))
try:
f = getattr(server, cmd)
except socket.error:
print "Server not running"
return 1 | electrumalt/electrum-doge | [
20,
11,
20,
7,
1416825131
] |
def db_thread():
conn = sqlite3.connect(database);
# create table if needed
check_create_table(conn)
while not stopping:
cur = conn.cursor()
# read pending requests from table
cur.execute("SELECT address, amount, confirmations FROM electrum_payments WHERE paid IS NULL;")
data = cur.fetchall()
# add pending requests to the wallet
for item in data:
addr, amount, confirmations = item
if addr in pending_requests:
continue
else:
with wallet.lock:
print "subscribing to %s"%addr
pending_requests[addr] = {'requested':float(amount), 'confirmations':int(confirmations)}
wallet.synchronizer.subscribe_to_addresses([addr])
wallet.up_to_date = False
try:
cmd, params = out_queue.get(True, 10)
except Queue.Empty:
cmd = ''
if cmd == 'payment':
addr = params
# set paid=1 for received payments
print "received payment from", addr
cur.execute("update electrum_payments set paid=1 where address='%s'"%addr)
elif cmd == 'request':
# add a new request to the table.
addr, amount, confs, minutes = params
sql = "INSERT INTO electrum_payments (address, amount, confirmations, received_at, expires_at, paid, processed)"\
+ " VALUES ('%s', %f, %d, datetime('now'), datetime('now', '+%d Minutes'), NULL, NULL);"%(addr, amount, confs, minutes)
print sql
cur.execute(sql)
# set paid=0 for expired requests
cur.execute("""UPDATE electrum_payments set paid=0 WHERE expires_at < CURRENT_TIMESTAMP and paid is NULL;""")
# do callback for addresses that received payment or expired
cur.execute("""SELECT oid, address, paid from electrum_payments WHERE paid is not NULL and processed is NULL;""")
data = cur.fetchall()
for item in data:
oid, address, paid = item
paid = bool(paid)
headers = {'content-type':'application/json'}
data_json = { 'address':address, 'password':cb_password, 'paid':paid }
data_json = json.dumps(data_json)
url = received_url if paid else expired_url
if not url:
continue
req = urllib2.Request(url, data_json, headers)
try:
response_stream = urllib2.urlopen(req)
print 'Got Response for %s' % address
cur.execute("UPDATE electrum_payments SET processed=1 WHERE oid=%d;"%(oid))
except urllib2.HTTPError:
print "cannot do callback", data_json
except ValueError, e:
print e
print "cannot do callback", data_json | electrumalt/electrum-doge | [
20,
11,
20,
7,
1416825131
] |
def ssh_key_string_to_obj(text):
key_f = StringIO(text)
key = None
try:
key = paramiko.RSAKey.from_private_key(key_f)
except paramiko.SSHException:
pass
try:
key = paramiko.DSSKey.from_private_key(key_f)
except paramiko.SSHException:
pass
return key | ibuler/coco | [
1,
2,
1,
1,
1508170309
] |
def ssh_key_gen(length=2048, type='rsa', password=None,
username='jumpserver', hostname=None):
"""Generate user ssh private and public key
Use paramiko RSAKey generate it.
:return private key str and public key str
"""
if hostname is None:
hostname = os.uname()[1]
f = StringIO()
try:
if type == 'rsa':
private_key_obj = paramiko.RSAKey.generate(length)
elif type == 'dsa':
private_key_obj = paramiko.DSSKey.generate(length)
else:
raise IOError('SSH private key must be `rsa` or `dsa`')
private_key_obj.write_private_key(f, password=password)
private_key = f.getvalue()
public_key = ssh_pubkey_gen(private_key_obj, username=username, hostname=hostname)
return private_key, public_key
except IOError:
raise IOError('These is error when generate ssh key.') | ibuler/coco | [
1,
2,
1,
1,
1508170309
] |
def to_unixtime(time_string, format_string):
with _STRPTIME_LOCK:
return int(calendar.timegm(time.strptime(str(time_string), format_string))) | ibuler/coco | [
1,
2,
1,
1,
1508170309
] |
def http_to_unixtime(time_string):
"""把HTTP Date格式的字符串转换为UNIX时间(自1970年1月1日UTC零点的秒数)。
HTTP Date形如 `Sat, 05 Dec 2015 11:10:29 GMT` 。
"""
return to_unixtime(time_string, _GMT_FORMAT) | ibuler/coco | [
1,
2,
1,
1,
1508170309
] |
def make_signature(access_key_secret, date=None):
if isinstance(date, bytes):
date = date.decode("utf-8")
if isinstance(date, int):
date_gmt = http_date(date)
elif date is None:
date_gmt = http_date(int(time.time()))
else:
date_gmt = date
data = str(access_key_secret) + "\n" + date_gmt
return content_md5(data) | ibuler/coco | [
1,
2,
1,
1,
1508170309
] |
def __init__(self, width=80, height=24):
self.screen = pyte.Screen(width, height)
self.stream = pyte.ByteStream()
self.stream.attach(self.screen)
self.ps1_pattern = re.compile(r'^\[?.*@.*\]?[\$#]\s|mysql>\s') | ibuler/coco | [
1,
2,
1,
1,
1508170309
] |
def parse_output(self, data, sep='\n'):
"""
Parse user command output
:param data: output data list like, [b'data', b'data']
:param sep: line separator
:return: output unicode data
"""
output = []
for d in data:
self.stream.feed(d)
for line in self.screen.display:
if line.strip():
output.append(line)
self.screen.reset()
return sep.join(output[0:-1]) | ibuler/coco | [
1,
2,
1,
1,
1508170309
] |
def wrap_with_line_feed(s, before=0, after=1):
if isinstance(s, bytes):
return b'\r\n' * before + s + b'\r\n' * after
return '\r\n' * before + s + '\r\n' * after | ibuler/coco | [
1,
2,
1,
1,
1508170309
] |
def wrap_with_warning(text, bolder=False):
return wrap_with_color(text, color='red', bolder=bolder) | ibuler/coco | [
1,
2,
1,
1,
1508170309
] |
def wrap_with_primary(text, bolder=False):
return wrap_with_color(text, color='green', bolder=bolder) | ibuler/coco | [
1,
2,
1,
1,
1508170309
] |
def __init__(self, id=None, pmid="", pmc="", doi="", url="",
authors=None, year="", title="", journal="", abstract=""):
super(Reference, self).__init__()
self._linked_items = set()
self.id = id or str(uuid4())
self.pmid = pmid
self.pmc = pmc
self.doi = doi
self.url = url
if authors is None:
self.authors = []
else:
self.authors = authors
self.year = year
self.title = title
self.journal = journal
self.abstract = abstract | JuBra/GEMEditor | [
1,
3,
1,
5,
1493549658
] |
def linked_items(self):
return self._linked_items.copy() | JuBra/GEMEditor | [
1,
3,
1,
5,
1493549658
] |
def annotation(self):
result = set()
if self.pmid:
result.add(Annotation("pubmed", self.pmid))
if self.pmc:
result.add(Annotation("pmc", self.pmc))
if self.doi:
result.add(Annotation("doi", self.doi))
return result | JuBra/GEMEditor | [
1,
3,
1,
5,
1493549658
] |
def remove_link(self, item, reciprocal=True):
""" Remove reference link from item
Parameters
----------
item: GEMEditor.model.classes.base.ReferenceLink
reciprocal: bool
"""
self._linked_items.discard(item)
if reciprocal:
item.remove_reference(self, reciprocal=False) | JuBra/GEMEditor | [
1,
3,
1,
5,
1493549658
] |
def reference_string(self):
""" Get the authors part of the usual citation of scientific literature i.e.:
Lastname F et al., YYYY if there are more than 2 authors
Lastname1 F1 and Lastname2 F2, YYYY if there are 2 authors
Lastname F, YYYY if there is only one author
Input tuple with (lastname, firstname, initials)
"""
# If there are more than 2 authors return a string
if len(self.authors) > 2:
return "{0} et al., {1}".format(self.authors[0].display_str, self.year)
elif len(self.authors) == 2:
return "{0} and {1}, {2}".format(self.authors[0].display_str,
self.authors[1].display_str,
self.year)
elif self.authors:
return "{0}, {1}".format(self.authors[0].display_str, self.year)
else:
return "" | JuBra/GEMEditor | [
1,
3,
1,
5,
1493549658
] |
def __new__(cls, lastname="", firstname="", initials=""):
self = super(Author, cls).__new__(cls,
lastname=lastname,
firstname=firstname,
initials=initials)
return self | JuBra/GEMEditor | [
1,
3,
1,
5,
1493549658
] |
def __init__(self, langid):
syndata.SyntaxDataBase.__init__(self, langid)
# Setup
self.SetLexer(stc.STC_LEX_HTML)
self.RegisterFeature(synglob.FEATURE_AUTOINDENT, AutoIndenter) | beiko-lab/gengis | [
28,
14,
28,
18,
1363614616
] |
def GetSyntaxSpec(self):
"""Syntax Specifications """
return _html.SYNTAX_ITEMS + SYNTAX_ITEMS | beiko-lab/gengis | [
28,
14,
28,
18,
1363614616
] |
def GetCommentPattern(self):
"""Returns a list of characters used to comment a block of code
@note: assuming pure php code for comment character(s)
"""
return [u'//'] | beiko-lab/gengis | [
28,
14,
28,
18,
1363614616
] |
def KeywordString(option=0):
"""Returns the specified Keyword String
@note: not used by most modules
"""
return PHP_KEYWORDS | beiko-lab/gengis | [
28,
14,
28,
18,
1363614616
] |
def __init__(self, context={}):
self.secret_manager = cis_publisher.secret.Manager()
self.context = context
self.report = None
self.config = cis_publisher.common.get_config()
self.s3_cache = None
self.s3_cache_require_update = False
# Only fields we care about for the user entries
# auth0 field->cis field map
self.az_cis_fields = {
"created_at": "created",
"given_name": "first_name",
"family_name": "last_name",
"name": None,
"nickname": None,
"user_id": "user_id",
"email": "primary_email",
"identities": "identities",
"blocked": "active",
}
self.az_blacklisted_connections = ["Mozilla-LDAP", "Mozilla-LDAP-Dev"]
self.az_whitelisted_connections = ["email", "github", "google-oauth2", "firefoxaccounts"]
self.az_users = None
self.all_cis_user_ids = None
self.user_ids_only = None | mozilla-iam/cis | [
11,
27,
11,
17,
1491581357
] |
def save_s3_cache(self, data):
"""
@data dict JSON
"""
if self.s3_cache_require_update is False:
return
s3 = boto3.client("s3")
bucket = os.environ.get("CIS_BUCKET_URL")
s3.put_object(Bucket=bucket, Key="cache.json", Body=json.dumps(data))
logger.info("Wrote S3 cache file") | mozilla-iam/cis | [
11,
27,
11,
17,
1491581357
] |
def fetch_all_cis_user_ids(self, publisher):
"""
Get all known CIS user ids for the whitelisted login methods
This is here because CIS only returns user ids per specific login methods
We also cache this
"""
self.s3_cache = self.get_s3_cache()
if self.s3_cache is not None:
self.all_cis_user_ids = self.s3_cache["all_cis_user_ids"]
return self.all_cis_user_ids
if self.all_cis_user_ids is not None:
return self.all_cis_user_ids
# Not cached, fetch it
self.s3_cache_require_update = True
# These are the users CIS knows about
self.all_cis_user_ids = []
for c in self.az_whitelisted_connections:
# FIXME we're not using the real login method here because
# Code in the CIS Vault matches against the start of `user_id` instead of the actual login method
# This is fine for most methods, except this one... ideally the code should change in the CIS Vault when it
# uses something else than DynamoDB and is able to match efficiently on other attributes
if c == "firefoxaccounts":
c = "oauth2|firefoxaccounts"
publisher.login_method = c
publisher.get_known_cis_users(include_inactive=False)
self.all_cis_user_ids += publisher.known_cis_users_by_user_id.keys()
# Invalidate publisher memory cache
publisher.known_cis_users = None
# XXX in case we got duplicates for some reason, we uniquify
self.all_cis_user_ids = list(set(self.all_cis_user_ids))
logger.info("Got {} known CIS users for all whitelisted login methods".format(len(self.all_cis_user_ids)))
return self.all_cis_user_ids | mozilla-iam/cis | [
11,
27,
11,
17,
1491581357
] |
def fetch_az_users(self, user_ids=None):
"""
Fetches ALL valid users from auth0'z database
Returns list of user attributes
"""
# Memory cached?
if self.az_users is not None:
return self.az_users
# S3 cached?
self.get_s3_cache()
if self.s3_cache is not None:
self.az_users = self.s3_cache["az_users"]
return self.az_users
# Don't use cache for just one user
if self.az_users is not None and (user_ids is not None and len(user_ids) != 1):
return self.az_users
# Not cached, fetch it
if user_ids is not None and len(user_ids) != 1:
self.s3_cache_require_update = True
az_api_url = self.config("AUTHZERO_API", namespace="cis", default="auth-dev.mozilla.auth0.com")
az_client_id = self.secret_manager.secret("az_client_id")
az_client_secret = self.secret_manager.secret("az_client_secret")
az_fields = self.az_cis_fields.keys()
# Build the connection query (excludes LDAP)
# Excluded: "Mozilla-LDAP", "Mozilla-LDAP-Dev"
# Excluded: Old users without any group
# This can also be retrieved from /api/v2/connections
# Ignore non-verified `email` (such as unfinished passwordless flows) as we don't consider these to be valid
# users
max_date = datetime.utcnow() - timedelta(days=31) # maximum login length + 1 day
max_date_str = max_date.strftime("%Y-%m-%d")
exclusion_query = (
f"logins_count:[2 TO *] AND NOT last_login:[* TO {max_date_str}] AND "
'(groups:(everyone) OR (NOT _exists_:"groups"))'
)
az_query = exclusion_query + " AND email_verified:true AND ("
t = ""
for azc in self.az_whitelisted_connections:
az_query = az_query + t + 'identities.connection:"{}"'.format(azc)
t = " OR "
az_query += ")"
# NOTE XXX: There is no way to tell auth0's ES "don't include matches where the first identity.connection is a
# blacklisted connection", so we do this instead. This 100% relies on auth0 user_ids NOT being opaque,
# unfortunately
az_query += ' AND NOT (user_id:"ad|*")'
# Build query for user_ids if some are specified (else it gets all of them)
# NOTE: We can't query all that many users because auth0 uses a GET query which is limited in size by httpd
# (nginx - 8kb by default)
if user_ids and len(user_ids) > 6:
logger.warning(
"Cannot query the requested number of user_ids from auth0, query would be too large. "
"Querying all user_ids instead."
)
user_ids = None
# we had to add this because it gets called by the CIS-New-User hook, where the query wouldn't work
# because exclusion_query excludes users who have only a single login success
elif len(user_ids) == 1:
logger.info("Restricting auth0 user query to single user_id: {}".format(user_ids[0]))
az_query = f'user_id:"{user_ids[0]}"'
elif user_ids:
logger.info("Restricting auth0 user query to user_ids: {}".format(user_ids))
# e.g.: user_id:"email|foo" OR user_id:"email|bar" OR user_id:"ad|Mozilla-LDAP|baz"
or_joined_user_query = " OR ".join([f'user_id:"{u}"' for u in user_ids])
az_query += f" AND ({or_joined_user_query})"
logger.debug("About to get Auth0 user list")
az_getter = GetToken(az_api_url)
az_token = az_getter.client_credentials(az_client_id, az_client_secret, "https://{}/api/v2/".format(az_api_url))
auth0 = Auth0(az_api_url, az_token["access_token"])
# Query the entire thing
logger.info("Querying auth0 user database, query is: {}".format(az_query))
user_list = []
# This is an artificial upper limit of 100*9999 (per_page*page) i.e. 999 900 users max - just in case things
# go wrong
retries = 15
backoff = 20
for p in range(0, 9999):
tmp = None
try:
tmp = auth0.users.list(page=p, per_page=100, fields=az_fields, q=az_query)["users"]
logger.debug("Requesting auth0 user list, at page {}".format(p))
except Auth0Error as e:
# 429 is Rate limit exceeded and we can still retry
if (e.error_code == 429 or e.status_code == 429) and retries > 0:
backoff += 1
logger.debug(
"Rate limit exceeded, backing off for {} seconds, retries left {} error: {}".format(
backoff, retries, e
)
)
retries -= 1
time.sleep(backoff)
else:
logger.warning("Error: {}".format(e))
raise
if tmp == [] or tmp is None:
# stop when our page is empty
logger.debug("Crawled {} pages from auth0 users API".format(p))
break
else:
user_list.extend(tmp)
logger.info("Received {} users from auth0".format(len(user_list)))
self.az_users = user_list
return self.az_users | mozilla-iam/cis | [
11,
27,
11,
17,
1491581357
] |
def process(self, publisher, user_ids):
"""
Process profiles and post them
@publisher object the publisher object to operate on
@user_ids list of user ids to process in this batch
"""
# Only process the requested user_ids from the list of all az users
# as the list is often containing all users, not just the ones we requested
todo_user_ids = list(set(self.get_az_user_ids()) & set(user_ids))
todo_users = []
for u in self.az_users:
if u["user_id"] in todo_user_ids:
todo_users.append(u)
profiles = self.convert_az_users(todo_users)
logger.info("Processing {} profiles".format(len(profiles)))
publisher.profiles = profiles
failures = []
try:
failures = publisher.post_all(user_ids=user_ids, create_users=True)
except Exception as e:
logger.error("Failed to post_all() profiles. Trace: {}".format(format_exc()))
raise e
if len(failures) > 0:
logger.error("Failed to post {} profiles: {}".format(len(failures), failures)) | mozilla-iam/cis | [
11,
27,
11,
17,
1491581357
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.