text stringlengths 0 1.05M | meta dict |
|---|---|
#AND HOW WE SPEND OUR DAYS IS, OFCOURSE, HOW WE SPEND OUR LIVES
#display images according to twitchbrain api data
"""
import tkinter as tk
class firstPage(Frame):
def __init__(self, parent):
Frame.__init__(self, parent)
self.parent = parent
self.initUI()
def initUI(self):
lblWelcome = tk.Label(text="Welcome to Twitch Brain", fg="black", bg="pink", font=("Helvetica", 16))
lblWelcome.pack()
lbl1 = tk.Label(text="What's the most viewed game on twich at this moment?", fg="black", bg="pink")
lbl1.pack()
lbl2 = tk.Label(text="What's the most view game on twitch of all time?", fg="black", bg="pink")
lbl2.pack()
btn = tk.Button(text="Nerf This!", fg="black", bg="pink", font=("",14))
btn.pack(pady=10)
def main():
root = tk.Tk()
root.geometry("500x500+300+300")
root.configure(background = "pink")
root.title("Twitch Brain 1.0")
app = firstPage(root)
root.mainloop()
if __name__ == '__main__':
main()
"""
#translate to grid layout
import tkinter as tk
class twitchBrain(tk.Tk):
def __init__(self, *args, **kwargs):
tk.Tk.__init__(self, *args, **kwargs)
container = tk.Frame(self)
container.pack(side="top", fill="both", expand=True)
container.grid_rowconfigure(0, weight=1)
container.grid_columnconfigure(0, weight=1)
self.frames = ()
frame = firstPage(container, self)
self.frames [firstPage] = frame
frame.grid(row=2, column=2, sticky="nsew")
self.show_frame(firstPage)
def show_frame(self, cont):
frame = self.frames[cont]
frame.tkraise()
def dispImage():
print("another image")
class firstPage(tk.Frame):
def __init__(self, parent):
tk.Frame.__init__(self, parent)
lblWelcome = tk.Label(text="Welcome to Twitch Brain", fg="black", bg="pink", font=("Helvetica", 16))
lblWelcome.grid(column=1, sticky=tk.E+tk.W, pady=10)
lblImage = tk.Label(text="Images go here", fg="black", bg="pink")
lblImage.grid(row=1, column=0, padx=10)
lblGame1 = tk.Label(text="#1 Game", fg="black", bg="pink")
lblGame1.grid(row=1, column=2, padx=10)
btn = tk.Button(text="Nerf This!", fg="black", bg="pink", font=("",14), command=dispImage)
btn.grid(column=1, sticky=tk.E+tk.W, pady=10)
def main():
root = tk.Tk()
# root.geometry("500x500+300+300")
root.configure(background = "pink")
root.title("Twitch Brain 1.0")
app = firstPage(root)
root.mainloop()
if __name__ == '__main__':
main()
| {
"repo_name": "jonathanpreston/twitchbrain",
"path": "GUI.py",
"copies": "1",
"size": "2619",
"license": "mpl-2.0",
"hash": 1139406246861698800,
"line_mean": 24.427184466,
"line_max": 108,
"alpha_frac": 0.5987017946,
"autogenerated": false,
"ratio": 3.1900121802679657,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4288713974867966,
"avg_score": null,
"num_lines": null
} |
"""Andor camera interface
The Andor SDK is generic for all of their cameras. However, this was
written more for using the Andor iXon line, so although it will
probably work with any Andor camera, there may be some unforeseen
bugs.
"""
from __future__ import print_function
import time
import traceback as tb
import ctypes
import numpy as np
from . import camera
from .exceptions import AndorError
from .andor_status_codes import *
from .andor_capabilities import *
def _int_ptr(val=0):
"""Utility function to create integer pointers."""
return ctypes.pointer(ctypes.c_int(val))
class AndorCamera(camera.Camera):
"""Class for controlling Andor cameras. This is designed
specifically with the iXon series cameras, but the Andor API is
rather generic so should work with most or all of their
cameras.
"""
# Utilities
# -------------------------------------------------------------------------
# Valid acquisition modes.
_acq_modes = {
"single": 1,
"accumulate": 2,
"kinetics": 3,
"fast kinetics": 4,
"continuous": 5}
# Valid trigger modes.
# There are more that are not implemented here, some of which are
# only valid on particular camera models.
_trigger_modes = {
"internal": 0,
"external": 1,
"external start": 6,
"software": 10}
def _chk(self, status):
"""Checks the error status of an Andor DLL function call. If
something catastrophic happened, an AndorError exception is
raised. In non-critical cases, warnings are given.
Parameters
----------
status : int
The return code from an Andor DLL function.
Raises
------
AndorError
Whenever something very bad happens. Generally, this
should hopefully only be whenever the user is trying to do
something stupid.
"""
if not self.real_camera:
return
if status == ANDOR_STATUS['DRV_ACQUIRING']:
self.logger.warn(
"Action not completed when data acquisition is in progress!")
self.logger.debug(''.join(tb.format_list(tb.extract_stack())))
elif status == ANDOR_STATUS['DRV_TEMPERATURE_OFF']:
#self.logger.warn("Temperature control is off.")
pass
elif status == ANDOR_STATUS['DRV_TEMPERATURE_NOT_REACHED']:
self.logger.warn("Temperature set point not yet reached.")
elif status == ANDOR_STATUS['DRV_TEMPERATURE_DRIFT']:
self.logger.warn("Temperature is drifting.")
elif status == ANDOR_STATUS['DRV_TEMP_NOT_STABILIZED']:
self.logger.warn("Temperature set point reached but not yet stable.")
elif status == ANDOR_STATUS['DRV_TEMPERATURE_STABILIZED']:
pass
elif status == ANDOR_STATUS['DRV_IDLE']:
stack = tb.extract_stack()
self.logger.warn(
'Function call resulted in DRV_IDLE.\n' + \
''.join(tb.format_list(stack)))
elif status != ANDOR_STATUS['DRV_SUCCESS']:
raise AndorError("Andor returned the status message " + \
ANDOR_CODES[status])
# Setup and shutdown
# -------------------------------------------------------------------------
def _initialize(self, **kwargs):
"""Initialize the Andor camera.
Keyword arguments
-----------------
use_noise_filter : bool
When True, use the "median" post-processing noise filter
provided by the Andor SDK.
wait_for_temp : bool
When False, don't wait for the temperature to reach -20
before shutting off. Andor recommends waiting, but for
quicker debugging, it is useful to not wait to rerun a
program. Defaults to True.
"""
# Try to load the Andor DLL
# TODO: library name in Linux?
self.clib = ctypes.windll.atmcd32d
# Initialize the camera and get the detector size
# TODO: directory to Initialize?
self._chk(self.clib.Initialize("."))
xpx, ypx = _int_ptr(), _int_ptr()
self._chk(self.clib.GetDetector(xpx, ypx))
self.shape = [xpx.contents.value, ypx.contents.value]
#self._chk(self.clib.SetReadMode(4)) # image read mode
self.set_crop([1, self.shape[0], 1, self.shape[1]])
self.set_bins(1)
self.use_noise_filter = kwargs.get('use_noise_filter', False)
self.wait_for_temp = kwargs.get('wait_for_temp', True)
# Set default acquisition and trigger modes
self.set_acquisition_mode('continuous')
self.set_trigger_mode('software')
# Set maximum preamp gain
gains = ctypes.c_int()
self._chk(self.clib.GetNumberPreAmpGains(ctypes.pointer(gains)))
self._chk(self.clib.SetPreAmpGain(gains.value - 1))
# Enable EM gain mode
# TODO: This is not general for all Andor cameras!
self._chk(self.clib.SetEMGainMode(0))
gmin, gmax = ctypes.c_int(), ctypes.c_int()
self._chk(self.clib.GetEMGainRange(ctypes.pointer(gmin), ctypes.pointer(gmax)))
self.logger.debug(
"EM gain range = [%i, %i]" % (gmin.value, gmax.value))
def get_camera_properties(self):
"""Code for getting camera properties should go here."""
# Get generic Andor properties
self.props.load('andor.json')
# Get generic camera-specific properties.
caps = AndorCapabilities()
caps.ulSize = 12*32
if self.real_camera:
self._chk(self.clib.GetCapabilities(ctypes.pointer(caps)))
# Get cooler temperature range and initial set point.
if self.real_camera:
min_, max_ = ctypes.c_int(), ctypes.c_int()
self._chk(self.clib.GetTemperatureRange(ctypes.pointer(min_), ctypes.pointer(max_)))
else:
min_, max_ = ctypes.c_int(-80), ctypes.c_int(30)
self.temperature_set_point = self.props['init_set_point']
self.set_cooler_temperature(self.temperature_set_point)
self.temp_stabilized = False
# Update properties.
# TODO: actually set things based on the result of GetCapabilities
new_props = {
'pixels': self.shape,
'gain_adjust': True,
'temp_control': True,
'temp_range': [min_.value, max_.value],
'shutter': True,
}
self.props.update(new_props)
def close(self):
"""Turn off temperature regulation and safely shutdown the
camera.
The Andor SDK guide indicates that for classic and ICCD
systems, it is best to wait until the temperature is above -20
degrees C before shutting down, so this will wait until that
condition is met.
"""
self.stop()
self.close_shutter()
self.set_gain(0)
self.cooler_off()
if not self.real_camera:
return
if self.wait_for_temp:
while True:
try:
temp = self.get_cooler_temperature()
if temp > -20:
break
else:
time.sleep(5)
self.logger.info(
"Waiting for CCD to warm up." + \
" Current temperature = %i" % temp)
except KeyboardInterrupt:
result = raw_input("Are you sure you want to exit? y/[n] >>> ")
if result.lower() == 'y':
break
self._chk(self.clib.ShutDown())
# Image acquisition
# -------------------------------------------------------------------------
def set_acquisition_mode(self, mode):
"""Set the image acquisition mode."""
if mode not in self._acq_modes:
raise AndorError(
"Acquisition mode must be one of " + repr(self._acq_modes))
self.acq_mode = mode
self.logger.info('Setting acquisition mode to ' + mode)
self._chk(self.clib.SetAcquisitionMode(
ctypes.c_int(self._acq_modes[mode])))
# Have 0 kinetic cycle time for continuous acquisition mode
if mode == 'continuous':
self._chk(self.clib.SetKineticCycleTime(0))
def _acquire_image_data(self):
"""Acquire the most recent image data from the camera. This
will work best in single image acquisition mode.
"""
# TODO: Check that acquisition was actually started!
# Wait for acquisition to finish
#self.clib.WaitForAcquisition()
while False:
status = ctypes.c_int(0)
self.clib.GetStatus(ctypes.pointer(status))
if ANDOR_CODES[status.value] != 'DRV_SUCCESS':
print(ANDOR_CODES[status.value])
else:
break
time.sleep(0.1)
# Allocate image storage
img_size = self.shape[0]*self.shape[1]/self.bins**2
c_array = ctypes.c_long*img_size
c_img = c_array()
# Trigger or wait for a trigger then acquire data
if self.trigger_mode == self._trigger_modes['software']:
self._chk(self.clib.SendSoftwareTrigger())
self.clib.WaitForAcquisition()
self._chk(self.clib.GetMostRecentImage(ctypes.pointer(c_img), ctypes.c_ulong(img_size)))
# Apply noise filter if requested.
if self.use_noise_filter:
c_img_filtered = c_array()
self._chk(self.clib.PostProcessNoiseFilter(
ctypes.pointer(c_img), ctypes.pointer(c_img_filtered),
ctypes.sizeof(c_img), 0, 1, 0,
self.shape[0], self.shape[1]))
c_img = c_img_filtered
# Pythonize and return.
img_array = np.frombuffer(c_img, dtype=ctypes.c_long)
img_array.shape = np.array(self.shape)/self.bins
return img_array
# Triggering
# -------------------------------------------------------------------------
def get_trigger_mode(self):
"""Query the current trigger mode."""
self.logger.debug("Trigger mode: " + str(self.trigger_mode))
return self.trigger_mode
def set_trigger_mode(self, mode):
"""Setup trigger mode.
Parameters
----------
mode : str
Specifies the mode to use and must be one of the (non-case
sensitive) strings found in self._trigger_modes.
"""
mode = mode.lower()
if mode not in self._trigger_modes:
raise AndorError("Invalid trigger mode: " + mode)
self.trigger_mode = self._trigger_modes[mode]
self.logger.info("Setting trigger mode to " + mode)
self._chk(self.clib.SetTriggerMode(self.trigger_mode))
#if mode == 'external':
# self.set_acquisition_mode('continuous')
def start(self):
"""Start accepting triggers."""
self.logger.info('Calling StartAcquisition()')
self._chk(self.clib.StartAcquisition())
def stop(self):
"""Stop acquisition."""
self.logger.info('Calling AbortAcquisition()')
status = self.clib.AbortAcquisition()
if status != ANDOR_STATUS['DRV_IDLE']:
self._chk(status)
# Shutter control
# -------------------------------------------------------------------------
def _set_shutter(self, state):
"""Open or close the shutter."""
assert state in ['open', 'closed']
if state == 'open':
self._chk(self.clib.SetShutter(1, 1, 20, 20))
else:
self._chk(self.clib.SetShutter(1, 2, 20, 20))
# Gain and exposure time
# -------------------------------------------------------------------------
def _update_exposure_time(self, t):
"""Set the exposure time in ms."""
self.t_ms = t
t_s = self.t_ms/1000.
self.logger.info('Setting exposure time to %.03f s.' % t_s)
self._chk(self.clib.SetExposureTime(ctypes.c_float(t_s)))
exposure = ctypes.c_float()
accumulate = ctypes.c_float()
kinetic = ctypes.c_float()
self.clib.GetAcquisitionTimings(
ctypes.pointer(exposure),
ctypes.pointer(accumulate),
ctypes.pointer(kinetic))
self.logger.debug(
'Results of GetAcquisitionTimings:\n' + \
'\texposure = %.03f\n' % exposure.value + \
'\taccumulate = %.03f\n' % accumulate.value + \
'\tkinetic = %.03f' % kinetic.value)
def get_gain(self):
"""Query the current gain settings."""
gain = _int_ptr()
self._chk(self.clib.GetEMCCDGain(gain))
return gain.contents.value
def set_gain(self, gain, **kwargs):
"""Set the camera gain and mode.
TODO: EM gain is specific to certain cameras, and even for the
ones that have it, you may not want it. Therefore, this should
be changed to be more general at some point.
Parameters
----------
gain : int
EM gain for the camera between 0 and 255.
"""
assert 0 <= gain <= 255
self.logger.info("Setting gain to %i." % gain)
result = self.clib.SetEMCCDGain(ctypes.c_int(gain))
if result in (ANDOR_STATUS['DRV_SUCCESS'], ANDOR_STATUS['DRV_P1INVALID']):
self.gain = gain
elif result == ANDOR_STATUS['DRV_P1INVALID']:
self.logger.warn("Andor reports the specified gain value is invalid.")
# TODO: why does this happen?
else:
self._chk(result)
# Cooling
# -------------------------------------------------------------------------
def cooler_on(self):
"""Turn on the TEC."""
self.logger.info("Turning cooler on.")
self.cooler_active = True
self._chk(self.clib.CoolerON())
def cooler_off(self):
"""Turn off the TEC."""
self.logger.info("Turning cooler off.")
self.cooler_active = False
self._chk(self.clib.CoolerOFF())
def get_cooler_temperature(self):
"""Check the TEC temperature."""
if not self.real_camera:
return 20
temp = _int_ptr()
status = self.clib.GetTemperature(temp)
unstable_codes = (
ANDOR_STATUS['DRV_TEMPERATURE_OFF'],
ANDOR_STATUS['DRV_TEMPERATURE_NOT_REACHED'],
ANDOR_STATUS['DRV_TEMPERATURE_DRIFT'],
ANDOR_STATUS['DRV_TEMP_NOT_STABILIZED']
)
if status == ANDOR_STATUS['DRV_TEMPERATURE_STABILIZED']:
self.temp_stabilized = True
elif status in unstable_codes:
self.temp_stabilized = False
else:
self._chk(status)
return temp.contents.value
def set_cooler_temperature(self, temp):
"""Set the cooler temperature to temp."""
self.temperature_set_point = temp
self.logger.info("Temperature set point changed to %i" % temp)
if temp > self.props['temp_range'][1] or temp < self.props['temp_range'][0]:
raise ValueError(
"Invalid set point. Valid range is " + \
repr(self.props['temp_range']))
self._chk(self.clib.SetTemperature(temp))
# Cropping and binning
# -------------------------------------------------------------------------
def _update_crop(self, crop):
"""Define the portion of the CCD to actually collect data
from. Using a reduced sensor area typically allows for faster
readout.
TODO: The proper way to do this is to use the
SetIsolatedCropMode function, but I am not really clear
on what the arguments are supposed to be. This also just
doesn't work for some reason.
"""
self.logger.info("Setting new crop to: " + ', '.join([str(x) for x in crop]))
self._chk(self.clib.SetImage(
self.bins, self.bins,
self.crop[0], self.crop[1], self.crop[2], self.crop[3]))
#self._chk(self.clib.SetIsolatedCropMode(
# 1, self.crop[3], self.crop[1], self.crop[2], self.crop[0]))
def set_bins(self, bins):
"""Set binning to bins x bins."""
self.bins = bins
self.logger.info('Updating binning to ' + str(bins))
self._chk(self.clib.SetImage(
self.bins, self.bins,
self.crop[0], self.crop[1], self.crop[2], self.crop[3]))
if __name__ == "__main__":
import logging
logging.basicConfig(level=logging.DEBUG)
with AndorCamera(temperature=10, success_value=ANDOR_STATUS['DRV_SUCCESS'], real=False) as cam:
cam.set_exposure_time(10)
cam.set_trigger_mode('external')
cam.open_shutter()
cam.start()
cam.test_real_time_acquisition()
cam.stop()
cam.close_shutter()
| {
"repo_name": "mivade/qCamera",
"path": "qcamera/andor.py",
"copies": "1",
"size": "17045",
"license": "bsd-2-clause",
"hash": -2588544046977759000,
"line_mean": 35.8142548596,
"line_max": 99,
"alpha_frac": 0.557113523,
"autogenerated": false,
"ratio": 4.06413924654268,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.002728347070158648,
"num_lines": 463
} |
"""Andor."""
# --- import --------------------------------------------------------------------------------------
import os
import pathlib
import time
import numpy as np
from ._data import Data
from .. import exceptions as wt_exceptions
from ..kit import _timestamp as timestamp
# --- define --------------------------------------------------------------------------------------
__all__ = ["from_Solis"]
# --- from function -------------------------------------------------------------------------------
def from_Solis(filepath, name=None, parent=None, verbose=True) -> Data:
"""Create a data object from Andor Solis software (ascii exports).
Parameters
----------
filepath : path-like
Path to .txt file.
Can be either a local or remote file (http/ftp).
Can be compressed with gz/bz2, decompression based on file name.
name : string (optional)
Name to give to the created data object. If None, filename is used.
Default is None.
parent : WrightTools.Collection (optional)
Collection to place new data object within. Default is None.
verbose : boolean (optional)
Toggle talkback. Default is True.
Returns
-------
data
New data object.
"""
# parse filepath
filestr = os.fspath(filepath)
filepath = pathlib.Path(filepath)
if not ".asc" in filepath.suffixes:
wt_exceptions.WrongFileTypeWarning.warn(filepath, ".asc")
# parse name
if not name:
name = filepath.name.split(".")[0]
# create data
ds = np.DataSource(None)
f = ds.open(filestr, "rt")
axis0 = []
arr = []
attrs = {}
while True:
line = f.readline().strip()[:-1]
if len(line) == 0:
break
else:
line = line.split(",")
line = [float(x) for x in line]
axis0.append(line.pop(0))
arr.append(line)
i = 0
while i < 3:
line = f.readline().strip()
if len(line) == 0:
i += 1
else:
try:
key, val = line.split(":", 1)
except ValueError:
pass
else:
attrs[key.strip()] = val.strip()
f.close()
created = attrs["Date and Time"] # is this UTC?
created = time.strptime(created, "%a %b %d %H:%M:%S %Y")
created = timestamp.TimeStamp(time.mktime(created)).RFC3339
kwargs = {"name": name, "kind": "Solis", "source": filestr, "created": created}
if parent is None:
data = Data(**kwargs)
else:
data = parent.create_data(**kwargs)
arr = np.array(arr)
arr /= float(attrs["Exposure Time (secs)"])
# signal has units of Hz because time normalized
arr = data.create_channel(name="signal", values=arr, signed=False, units="Hz")
axis0 = np.array(axis0)
if float(attrs["Grating Groove Density (l/mm)"]) == 0:
xname = "xindex"
xunits = None
else:
xname = "wm"
xunits = "nm"
data.create_variable(name=xname, values=axis0[:, None], units=xunits)
data.create_variable(name="yindex", values=np.arange(arr.shape[1])[None, :], units=None)
data.transform(data.variables[0].natural_name, "yindex")
for key, val in attrs.items():
data.attrs[key] = val
# finish
if verbose:
print("data created at {0}".format(data.fullpath))
print(" axes: {0}".format(data.axis_names))
print(" shape: {0}".format(data.shape))
return data
| {
"repo_name": "wright-group/WrightTools",
"path": "WrightTools/data/_solis.py",
"copies": "1",
"size": "3508",
"license": "mit",
"hash": -6146749965863240000,
"line_mean": 28.2333333333,
"line_max": 99,
"alpha_frac": 0.5324971494,
"autogenerated": false,
"ratio": 3.9460067491563553,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49785038985563557,
"avg_score": null,
"num_lines": null
} |
"""Andrea and Maria each have an array of integers. Andrea wants to change her array to match Maria's. For each element
of her array, she can increment or decrement one digit in one item in one move. How many moves will it take Andrea to
match Maria's array. No reordering of the digits is allowed.
For example, consider two arrays: Andrea's = [123, 543] and Maria's = [321, 279].
For the first integer, Andrea can increment the 1 twice to achieve 3. The 2's are equal already. Finally, she decrements
her 3 twice to equal 1. It took 4 moves to reach her goal.
For the second integer, she decrements 5 three times, increments 4 three times and 3 six times. It took 12 moves to
convert the second array element.
In total, it took 16 moves to convert both values comprising the complete array.
Function Description
Complete the function minimumMoves in the editor below. The function must return the integer number of moves to convert
Andrea's array to match Maria's.
minimumMoves has the following parameter(s):
a[a0,...an-1]: Andreas's array of integers
m[m0,...mn-1]: Maria's array of integers
Constraints
1 ≤ n ≤ 105
1 ≤ ai, mi ≤ 109
The lengths of a and m are equal, |a| = |m|.
The elements ai and mi have an equal number of digits."""
import math
import os
import random
import re
import sys
# Complete the minimumMoves function below.
def minimummoves(an, ma):
andrea = list(str(an))
maria = list(str(ma))
andrea = [int(x) for x in andrea]
maria = [int(x) for x in maria]
answer = 0
for i in range(len(andrea)):
answer += abs(andrea[i] - maria[i])
print(answer)
minimummoves(1234, 2345)
| {
"repo_name": "pycharmer/py-training",
"path": "algorithms/minimummoves.py",
"copies": "1",
"size": "1666",
"license": "apache-2.0",
"hash": -5249121630485899000,
"line_mean": 29.7037037037,
"line_max": 120,
"alpha_frac": 0.7201447527,
"autogenerated": false,
"ratio": 3.425619834710744,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9639600958117549,
"avg_score": 0.0012327258586390384,
"num_lines": 54
} |
# Andrea Masi 2014 eraclitux@gmail.com
import json
import time
from ipcampy.globals import cam_types
def __parse_args(cam_c):
"""Arrange class init params from conf file.
Returns: a dict with values."""
user = None
pswd = None
name = None
address = "{address}:{port}".format(**cam_c)
if "user" in cam_c:
user = cam_c["user"]
if "pswd" in cam_c:
pswd = cam_c["pswd"]
if "name" in cam_c:
name = cam_c["name"]
return {"user": user, "pswd": pswd, "name": name, "address": address}
def load_cams(conf_file):
"""Reads cams conf from file and intantiate appropiate classes.
Returns: an array of IpCam classes."""
with open(conf_file, "r") as c_file:
lines = c_file.readlines()
cams_conf = [json.loads(j) for j in lines]
cams = []
for cam_c in cams_conf:
init_params = __parse_args(cam_c)
cams.append(cam_types[cam_c["type"]](**init_params))
return cams
def watch(cams, path=None, delay=10):
"""Get screenshots from all cams at defined intervall."""
while True:
for c in cams:
c.snap(path)
time.sleep(delay)
| {
"repo_name": "eraclitux/ipcampy",
"path": "ipcampy/sentry.py",
"copies": "1",
"size": "1164",
"license": "mit",
"hash": -2219697606991500000,
"line_mean": 28.8461538462,
"line_max": 73,
"alpha_frac": 0.5970790378,
"autogenerated": false,
"ratio": 3.1716621253405997,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42687411631405997,
"avg_score": null,
"num_lines": null
} |
# Andrea Masi 2014 eraclitux@gmail.com
import os
import datetime
def ensure_secret():
"""Check if secret key to encryot sessions exists,
generate it otherwise."""
home_dir = os.environ['HOME']
file_name = home_dir + "/.ipcamweb"
if os.path.exists(file_name):
with open(file_name, "r") as s_file:
secret = s_file.readline()
else:
secret = os.urandom(24)
with open(file_name, "w") as s_file:
secret = s_file.write(secret+"\n")
return secret
def list_snapshots_days(path, cam_id):
"""Returns a list of (date, dir) in which snapshopts are present"""
screenshoots_path = path + "/" + str(cam_id)
if os.path.exists(screenshoots_path):
days = []
for day_dir in os.listdir(screenshoots_path):
date = datetime.datetime.strptime(day_dir, "%d%m%Y").strftime('%d/%m/%y')
days.append((date, day_dir))
return days
else:
return []
def list_snapshots_hours(path, cam_id, day):
"""Returns a list of hour/min in which snapshopts are present"""
screenshoots_path = path+"/"+str(cam_id)+"/"+day
if os.path.exists(screenshoots_path):
hours = []
for hour_dir in sorted(os.listdir(screenshoots_path)):
hrm = datetime.datetime.strptime(hour_dir, "%H%M").strftime('%H:%M')
hours.append((hrm, hour_dir))
return hours
else:
return []
def list_snapshots_for_a_minute(path, cam_id, day, hourm):
"""Returns a list of screenshots"""
screenshoots_path = path+"/"+str(cam_id)+"/"+day+"/"+hourm
if os.path.exists(screenshoots_path):
screenshots = [scr for scr in sorted(os.listdir(screenshoots_path))]
return screenshots
else:
return []
| {
"repo_name": "eraclitux/ipcampy",
"path": "ipcamweb/utils.py",
"copies": "1",
"size": "1768",
"license": "mit",
"hash": -1123826951785026700,
"line_mean": 33.6666666667,
"line_max": 85,
"alpha_frac": 0.604638009,
"autogenerated": false,
"ratio": 3.4,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9491264966449631,
"avg_score": 0.0026746085100736905,
"num_lines": 51
} |
# Andrei Antonescu
# Initial Date: January 30, 2014
# Last Updated: February 18, 2014
#
# We assume we only use two motors PORT_A & PORT_B
from BrickPi import * #import BrickPi.py file to use BrickPi operations
import math
eps = 1e-8
m1 = PORT_A
m2 = PORT_B
m3 = PORT_C
encToCm = 42 #47 # 53.03 # 42
encToDeg = 6.88 # 6.05
encToDegSonar = 10.20 # 1.12
difRot = 5.6 # 5.6
BrickPiSetup() # setup the serial port for communication
BrickPiSetupSensors() #Send the properties of sensors to BrickPi
def enableSensor(s, stype):
BrickPi.SensorType[s] = stype
BrickPiSetupSensors()
def enableMotor(m):
BrickPi.MotorEnable[m] = 1
def disableMotor(m):
BrickPi.MotorEnable[m] = 0
def setSpeed(m, speed=250):
BrickPi.MotorSpeed[m] = speed
def update():
BrickPiUpdateValues()
def enc(m):
return BrickPi.Encoder[m]
def sensor(s):
return BrickPi.Sensor[s]
def toRad(deg):
return deg / 180 * math.pi
def toDeg(rad):
return rad * 180 / math.pi
def calibrate(sa, sb, dif, step=5):
dif = int(dif)
ga = 1 if sa > 0 else -1
gb = 1 if sb > 0 else -1
if sa < 0 and sb < 0:
dif *= -1
sa = mod(sa)
sb = mod(sb)
step = max(dif / 5, -dif / 5)
if dif > 0:
ex = min(250 - sb, step)
sb += ex
sa = max(30, sa - step + ex)
elif dif < 0:
ex = min(250 - sa, step)
sa += ex
sb = max(30, sb - step + ex)
return [sa * ga, sb * gb]
def distMod(enc0, enc1):
return enc1 - enc0 if enc0 < enc1 else enc0 - enc1
def mod(x):
return x if x > 0 else -x
def dist(x1, y1, x2, y2):
return math.sqrt( (x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2) )
# Returns the distance from a point to a wall segment
def intersect(wall, x, y, theta):
a = (wall[3] - wall[1]) * (wall[0] - x) - (wall[2] - wall[0]) * (wall[1] - y)
b = (wall[3] - wall[1]) * math.cos(theta) - (wall[2] - wall[0]) * math.sin(theta)
m = a / (b if b != 0 else 1e-14)
# If m is less than zero the intersection is behind
if m < 0:
return 9999999
xi = x + m * math.cos(theta)
yi = y + m * math.sin(theta)
#Check if the intersection point lies on the line segment
if xi < min(wall[0],wall[2]) - eps or xi > max(wall[0],wall[2]) + eps:
return 9999999
if yi < min(wall[1],wall[3]) - eps or yi > max(wall[1],wall[3]) + eps:
return 9999999
return m
# Returns the incidence angle from a point to a wall segment
def incidence(wall, x, y, theta):
a = math.cos(theta) * (wall[1] - wall[3]) + math.sin(theta) * (wall[2] - wall[0])
b = math.sqrt( (wall[1] - wall[3]) * (wall[1] - wall[3]) + (wall[2] - wall[0]) * (wall[2] - wall[0]) )
return math.acos(a / b)
'''
# Oy parallel wall
if wall[0] == wall[2]:
if abs(theta) < math.pi / 2.0:
return abs(theta)
else:
return math.pi - abs(theta)
# Ox parallel wall
if abs(theta) < math.pi / 2.0:
return math.pi / 2.0 - abs(theta)
return math.pi - abs(theta)
'''
def most_common(lst):
return max(set(lst), key=lst.count)
def median(lst):
lst.sort()
length = len(lst)
if length % 2:
return lst[length / 2 + 1]
return lst[length / 2] * 0.5 + lst[length / 2 + 1] * 0.5
def move(cm, speed, obstacle=False):
enableMotor(m1)
enableMotor(m2)
setSpeed(m1, 0)
setSpeed(m2, 0)
update()
a = enc(m1)
b = enc(m2)
normal = a - b
speed1 = speed
speed2 = speed
ind = 0
cnt = 0
sumLastEnc = 0.0
while cnt / encToCm < cm:
ind += 1
setSpeed(m1, speed1)
setSpeed(m2, speed2)
update()
# get new encoder values
stepEnc = (mod(a - enc(m1)) + mod(b - enc(m2))) / 2.0
sumLastEnc += stepEnc
cnt += stepEnc
# Try to guess if something is going wrong
if obstacle == True and ind % 10 == 0:
meanEnc = sumLastEnc / 10.0
sumLastEnc = 0.0
print meanEnc, cnt / ind, stepEnc
if meanEnc < (cnt / ind) * 0.8:
return -1
# Replace old encoder values
a = enc(m1)
b = enc(m2)
dif = a - b
spds = calibrate(speed, speed, dif - normal)
speed1 = spds[0]
speed2 = spds[1]
#print dif - normal, speed1, speed2, '#', stepEnc
setSpeed(m1, 0)
setSpeed(m2, 0)
update()
return cnt / encToCm - cm
def rotate(deg, speed):
enableMotor(m1)
enableMotor(m2)
setSpeed(m1, 0)
setSpeed(m2, 0)
update()
a = enc(m1)
b = enc(m2)
normal = a - b
step = -difRot if deg > 0 else difRot
inispeed1 = -speed if deg > 0 else speed
inispeed2 = speed if deg > 0 else -speed
deg = mod(deg)
speed1 = inispeed1
speed2 = inispeed2
cnt = 0
while cnt / encToDeg < deg:
setSpeed(m1, speed1)
setSpeed(m2, speed2)
update()
cnt += (mod(a - enc(m1)) + mod(b - enc(m2))) / 2.0
a = enc(m1)
b = enc(m2)
dif = a - b
# Expected increase
#normal += step
#spds = calibrate(inispeed1, inispeed2, dif - normal)
#speed1 = spds[0]
#speed2 = spds[1]
#print normal, dif, speed1, speed2
setSpeed(m1, 0)
setSpeed(m2, 0)
update()
return cnt / encToDeg - deg
def rotate_sonar(deg, speed = 250):
enableMotor(m3)
setSpeed(m3, 0)
speed = -speed if deg < 0 else speed
update()
c = enc(m3)
cnt = 0
while cnt / encToDegSonar < deg:
#print 'iter', cnt / encToDegSonar, deg
setSpeed(m3, speed)
update()
cnt += mod(c - enc(m3)) * 1.0
c = enc(m3)
setSpeed(m3, 0)
update()
return cnt / encToDegSonar - deg
| {
"repo_name": "andrei-alpha/robotics",
"path": "utils.py",
"copies": "1",
"size": "5342",
"license": "mit",
"hash": 3601251321875410000,
"line_mean": 21.5400843882,
"line_max": 104,
"alpha_frac": 0.5909771621,
"autogenerated": false,
"ratio": 2.5944633317144246,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3685440493814424,
"avg_score": null,
"num_lines": null
} |
# Andrei Antonescu
# Initial Date: January 30, 2014
# Last Updated: January 30, 2014
#
# We assume we only use two motors PORT_A & PORT_B
from BrickPi import * #import BrickPi.py file to use BrickPi operations
m1 = PORT_A
m2 = PORT_B
BrickPiSetup() # setup the serial port for communication
BrickPiSetupSensors() #Send the properties of sensors to BrickPi
def enableMotor(m):
BrickPi.MotorEnable[m] = 1
def disableMotor(m):
BrickPi.MotorEnable[m] = 0
def setSpeed(m, speed=250):
BrickPi.MotorSpeed[m] = speed
def update():
BrickPiUpdateValues()
def enc(m):
return BrickPi.Encoder[m]
def calibrate(sa, sb, dif, step=5):
ga = 1 if sa > 0 else -1
gb = 1 if sb > 0 else -1
sa = max(sa, -sa)
sb = max(sb, -sb)
step = max(dif / 5, -dif / 5)
if dif > 0:
ex = min(250 - sb, step)
sb += ex
sa = max(50, sa - step + ex)
elif dif < 0:
ex = min(250 - sa, step)
sa += ex
sb = max(50, sb - step + ex)
return [sa * ga, sb * gb]
def dist(enc0, enc1):
return enc1 - enc0 if enc0 < enc1 else enc0 - enc1
def move(cm, speed):
enableMotor(m1)
enableMotor(m2)
setSpeed(m1, 0)
setSpeed(m2, 0)
update()
a = enc(m1)
b = enc(m2)
normal = a - b
speed1 = speed
speed2 = speed
cnt = 0
while cnt / 42 < cm:
setSpeed(m1, speed1)
setSpeed(m2, speed2)
update()
cnt += (dist(a, enc(m1)) + dist(b, enc(m2))) / 2.0
a = enc(m1)
b = enc(m2)
dif = a - b
spds = calibrate(speed, speed, dif - normal)
speed1 = spds[0]
speed2 = spds[1]
#print dif - normal, speed1, speed2
time.sleep(.1)
def rotate(deg):
enableMotor(m1)
enableMotor(m2)
setSpeed(m1, 0)
setSpeed(m2, 0)
update()
a = enc(m1)
b = enc(m2)
normal = a - b
inispeed1 = 250 if deg > 0 else -250
inispeed2 = -250 if deg > 0 else 250
speed1 = inispeed1
speed2 = inispeed2
cnt = 0
while cnt / 6.5 < deg:
setSpeed(m1, speed1)
setSpeed(m2, speed2)
update()
cnt += (dist(a, enc(m1)) + dist(b, enc(m2))) / 2.0
a = enc(m1)
b = enc(m2)
dif = a - b
spds = calibrate(inispeed1, inispeed2, dif - normal)
speed1 = spds[0]
speed2 = spds[1]
#print dif - normal, speed1, speed2
time.sleep(.1)
"""
while True:
print "Spin right"
BrickPi.MotorSpeed[PORT_A] = 200 #Set the speed of MotorA (-255 to 255)
BrickPi.MotorSpeed[PORT_B] = -200 #Set the speed of MotorB (-255 to 255)
ot = time.time()
while(time.time() - ot < 20): #running while loop for 3 seconds
BrickPiUpdateValues() # Ask BrickPi to update values for sensors/motors
time.sleep(.1) # sleep for 100 ms
print "Spin left"
BrickPi.MotorSpeed[PORT_A] = -200 #Set the speed of MotorA (-255 to 255)
BrickPi.MotorSpeed[PORT_B] = 200 #Set the speed of MotorB (-255 to 255)
ot = time.time()
while(time.time() - ot < 20): #running while loop for 3 seconds
BrickPiUpdateValues() # Ask BrickPi to update values for sensors/motors
time.sleep(.1) # sleep for 100 ms
"""
| {
"repo_name": "andrei-alpha/robotics",
"path": "samples/motor_rotate.py",
"copies": "1",
"size": "3083",
"license": "mit",
"hash": -3700901736813010400,
"line_mean": 22.3560606061,
"line_max": 85,
"alpha_frac": 0.5974699968,
"autogenerated": false,
"ratio": 2.7115215479331574,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8606824377294197,
"avg_score": 0.04043343348779219,
"num_lines": 132
} |
# Andres Felipe Gomez
# Not Finished! :(
# 03/28/2014
import pickle
import math
def posIntersect(termList_X,termList_Y,k):
a = 1;
b = 1;
intersection = [];
while a < len(termList_X) and b < len(termList_Y):
if len(termList_X[a]) < len(termList_Y[b]):
posArray_A = termList_Y[b]
posArray_B = termList_X[a]
else:
posArray_A = termList_X[a]
posArray_B = termList_Y[b]
# print "docs: %d = %d " %(posArray_A[0],posArray_B[0])
if posArray_A[0] == posArray_B[0]:
doc = posIntersectFind(posArray_A,posArray_B,k);
if (doc != 0 ) :
intersection.append(doc)
a += 1;
b += 1;
elif termList_X[a][0] < termList_Y[b][0]:
a += 1;
else :
b += 1;
return intersection
def posIntersectFind(posArray_A,posArray_B,k):
i = 2;
j = 2;
intersection = [];
while i < len(posArray_A):
# print (posArray_A);
# print (posArray_B);
while j < len(posArray_B):
# print "%d : %d " % (posArray_A[i] , posArray_B[j]);
if (abs(posArray_A[i] - posArray_B[j]) <= k):
# print "Found"
intersection.append(posArray_A[0]);
return posArray_A[0];
elif (posArray_B[j] > posArray_A[i]):
# print "break"
i = len(posArray_A)
break;
j += 1;
# i = 1;
# print intersection
return 0;
def posIntersectListArray(termList_A,termArray_B,k):
a = 1;
b = 0;
intersection = [];
while a < len(termList_A) and b < len(termArray_B):
if termList_A[a][0] == termArray_B[0]:
intersection.append(termList_A[a][0]);
a += 1;
b += 1;
elif termList_A[a][0] < termArray_B[0]:
a += 1;
else :
b += 1;
return (intersection);
def posIntersectArray(termArray_A,termArray_B,k):
a = 0;
b = 0;
intersection = [];
while a < len(termArray_A) and b < len(termArray_B):
if termArray_A[a] == termArray_B[b]:
intersection.append(termArray_A[a]);
a += 1;
b += 1;
elif termArray_A[a] < termArray_B[b]:
a += 1;
else :
b += 1;
return (intersection);
def tfIdf(termList, N, queryDocs):
df = termList[0]
idf = 0.0
idf = N/float(df)
idf = math.log(idf);
tfIdf = {}
print("tfIdf : %d * %f" % (df, idf))
i = 1
while i < len (termList):
if termList[i][0] in queryDocs:
tfIdf[termList[i][0]] = termList[i][1]*float(idf)
i += 1
return tfIdf
def euclidianNormalized(tfIdfArrayDic, termsDoc):
# print tfIdfArrayDic
magnitude = []
normalizedArray = []
for doc in termsDoc:
result = 0
for term in tfIdfArrayDic:
result += math.pow(float(term[doc]),2)
magnitude.append(math.sqrt(result))
del normalizedArray[:]
for term in tfIdfArrayDic:
normalized = {}
i = 0
# print "term"
for key,value in term.iteritems():
normalized[key] = float(value) / float(magnitude[i])
normalizedArray.append(normalized);
# print normalized
# print "magnitude"
# print magnitude
return magnitude
if __name__ == '__main__':
# Loads the posting Index
indexFile = open("posIndex.dat", "rb");
# docsFile = "docs.txt";
docsFile = open("docs.txt")
data = docsFile.readlines()
docsFile.close()
docsList =[]
for n, line in enumerate(data, 1):
docsList.append(line.rstrip());
# print '{:2}.'.format(n), line.rstrip()
posIndex = pickle.load(indexFile);
indexFile.close();
docsFile.close();
query = "state middle university"
# query = raw_input('Please enter your query: ');
queryTerms = ' '.join(query.split());
queryTerms = queryTerms.split(' ');
# let /k be the size of the query terms
k = len(queryTerms);
i = 0;
for term in queryTerms:
queryTerms[i] = term.lower();
i += 1
# print (queryTerms);
termIndex = []
for term in queryTerms:
if term in posIndex.keys():
termInDoc = []
# print "%s -->\t %s\n" % (term, posIndex[term]);
for docId in posIndex[term]:
termInDoc.append(docId);
# print (docId);
termIndex.append(termInDoc);
i = i +1;
# else:
# print "%s -->\n" % (term);
# for term in termIndex:
# print term
M = 0
while M < 10:
l1 = posIntersect(termIndex[0],termIndex[1],k);
# print l1
l2 = posIntersect(termIndex[0],termIndex[2],k);
# print l2
l3 = posIntersect(termIndex[1],termIndex[2],k);
# print l3
l12 = posIntersectArray(l1,l2,k);
# print l12
# find intersection l1 and l2 = l12
l123 = posIntersectArray(l12,l3,k);
print l123
M = len(l123);
tfIdfArray = [];
if ( M>=10 ):
# print "Compute"
for termList in termIndex:
# print termList
tfIdfArray.append(tfIdf(termList, len(docsList), l123));
print tfIdfArray
euclidianNormalized(tfIdfArray, l123);
# compute scores
#Compute scores using qqq.ddd = ltn.ltc
else:
# print "Increase"
k *= 0.5;
print "# Andres Felipe Gomez \nNot Finished! \n03/28/2014"
print "out"
# find intersection l12 and l3
#if size of l123 > 10 compute score
#else k= 1.5k and repeat
#Show results decreasing order of ranking, Ranking - URL
| {
"repo_name": "anfego/search_IR",
"path": "queryAnswer.py",
"copies": "1",
"size": "4873",
"license": "mit",
"hash": 4641505936510449000,
"line_mean": 19.3891213389,
"line_max": 60,
"alpha_frac": 0.6150215473,
"autogenerated": false,
"ratio": 2.5274896265560165,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3642511173856017,
"avg_score": null,
"num_lines": null
} |
#Andrew Dhawan
#Sept 8 2016
#count_cycles.py
#This code enables the count of unique cycles of a particular length
#implements a DFS on an adjacency matrix for a weighted graph.
#Element (i,j)=1 of the adjacency matrix indicates a directed edge
#from node j to node i.
import numpy as np
def find_path_to_vertex (graph, cur_vert, dest_vert, itera, start, cur_traj):
global count
if (itera==0 and cur_vert==dest_vert):
count +=1
# print("Cycle found: " + str(dest_vert)+ cur_traj + str(cur_vert))
return 0
if (itera==0):
return 0
if (cur_vert == dest_vert and start==0):
return 0
if (start==0):
cur_traj += str(cur_vert)
for i in range(len(graph[:,cur_vert])):
if (graph[i,cur_vert] ==1 and itera>0):
if (cur_traj.find(str(i))<0):
find_path_to_vertex(graph, i, dest_vert, itera-1,0,cur_traj)
return 0
#adjacency matrix for the graph:
#G = np.array([[0,0,0,1],[0,0,0,1],[1,1,0,1],[0,1,0,0]])
G = np.array([[0, 0, 0, 0, 0, 0, 1, 0, 1, 0], [0, 0, 0, 0, 0, 1, 1, 0, 1, 0], [0, 0, 0, 0, 0, 0, 1, 0, 1, 1],[0, 0, 0, 0, 0, 1, 1, 1, 1, 0],[0, 0, 0, 0 ,0, 0, 1, 0, 1, 1],[0, 0, 0, 0, 0 ,0, 1, 0, 0, 0],[0, 0, 0, 0, 0, 1, 0, 0, 0, 1],[0, 0, 0 ,0 ,1 ,1, 1, 0, 1, 0],[1, 0 ,0, 1 ,0, 1, 1, 1, 0, 0],[1, 1, 1, 1, 0, 1, 1, 0, 1, 0]])
#find all cycles of length 2
count = 0
for i in range(len(G[:,1])):
find_path_to_vertex(G, i, i, 2,1,"")
print(count/2)
#find all cycles of length 3
count = 0
for i in range(len(G[:,1])):
find_path_to_vertex(G, i, i, 3,1,"")
print(count/3)
#find all cycles of length 4
count = 0
for i in range(len(G[:,1])):
find_path_to_vertex(G, i, i, 4,1,"")
print(count/4)
#find all cycles of length 5
count = 0
for i in range(len(G[:,1])):
find_path_to_vertex(G, i, i, 5,1,"")
print(count/5)
#find all cycles of length 6
count = 0
for i in range(len(G[:,1])):
find_path_to_vertex(G, i, i, 6,1,"")
print(count/6)
#find all cycles of length 7
count = 0
for i in range(len(G[:,1])):
find_path_to_vertex(G, i, i, 7,1,"")
print(count/7)
#find all cycles of length 8
count = 0
for i in range(len(G[:,1])):
find_path_to_vertex(G, i, i, 8,1,"")
print(count/8)
#find all cycles of length 9
count = 0
for i in range(len(G[:,1])):
find_path_to_vertex(G, i, i, 9,1,"")
print(count/9) | {
"repo_name": "andrewdhawan/alk-collateral-sensitivity",
"path": "count_cycles.py",
"copies": "1",
"size": "2252",
"license": "mit",
"hash": 856495963935038100,
"line_mean": 21.3069306931,
"line_max": 327,
"alpha_frac": 0.5910301954,
"autogenerated": false,
"ratio": 2.0585009140767823,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7852677266207647,
"avg_score": 0.059370768653827045,
"num_lines": 101
} |
import pymongo
# gets the next number in a sequence
def get_next_sequence_number(name):
connection = pymongo.MongoClient("mongodb://localhost")
db = connection.test
counters = db.counters
# let's get ourselves a sequence number
# note there are two other varients of this call as well:
# find_one_and_delete
# find_one_and_replace
# all these map to the the command find_and_modify
try:
counter = counters.find_one_and_update(filter={'type':name},
update={'$inc':{'value':-1}},
upsert=True,
return_document=pymongo.ReturnDocument.AFTER)
except Exception as e:
print ("Exception: ", type(e), e)
counter_value = counter['value']
return counter_value
print (get_next_sequence_number('uid'))
print (get_next_sequence_number('uid'))
print (get_next_sequence_number('uid'))
| {
"repo_name": "nesterione/experiments-of-programming",
"path": "MongoDB/Python/Week2/Classroom/using_find_and_modify.py",
"copies": "1",
"size": "1061",
"license": "apache-2.0",
"hash": 4367894597840134000,
"line_mean": 30.2058823529,
"line_max": 92,
"alpha_frac": 0.5928369463,
"autogenerated": false,
"ratio": 4.1936758893280635,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5286512835628063,
"avg_score": null,
"num_lines": null
} |
#Andrew MacInnes
import mediacloud, datetime, logging, unittest
logging.basicConfig(filename="logger.log", filemode = 'w', level=logging.INFO)
def activateNow():
mc = mediacloud.api.MediaCloud('API goes here')
logging.info('Successful implementation.')
#Find the annual number of sentences in US mainstream media containing Facebook between the years 2000 and 2015.
res2000 = mc.sentenceCount('(Facebook)', solr_filter=[mc.publish_date_query( datetime.date(1999,12,31), datetime.date(2000,12,31) ), 'media_sets_id:1' ])
res2001 = mc.sentenceCount('(Facebook)', solr_filter=[mc.publish_date_query( datetime.date(2000,12,31), datetime.date(2001,12,31) ), 'media_sets_id:1' ])
res2002 = mc.sentenceCount('(Facebook)', solr_filter=[mc.publish_date_query( datetime.date(2001,12,31), datetime.date(2002,12,31) ), 'media_sets_id:1' ])
res2003 = mc.sentenceCount('(Facebook)', solr_filter=[mc.publish_date_query( datetime.date(2002,12,31), datetime.date(2003,12,31) ), 'media_sets_id:1' ])
res2004 = mc.sentenceCount('(Facebook)', solr_filter=[mc.publish_date_query( datetime.date(2003,12,31), datetime.date(2004,12,31) ), 'media_sets_id:1' ])
res2005 = mc.sentenceCount('(Facebook)', solr_filter=[mc.publish_date_query( datetime.date(2004,12,31), datetime.date(2005,12,31) ), 'media_sets_id:1' ])
res2006 = mc.sentenceCount('(Facebook)', solr_filter=[mc.publish_date_query( datetime.date(2005,12,31), datetime.date(2006,12,31) ), 'media_sets_id:1' ])
res2007 = mc.sentenceCount('(Facebook)', solr_filter=[mc.publish_date_query( datetime.date(2006,12,31), datetime.date(2007,12,31) ), 'media_sets_id:1' ])
res2008 = mc.sentenceCount('(Facebook)', solr_filter=[mc.publish_date_query( datetime.date(2007,12,31), datetime.date(2008,12,31) ), 'media_sets_id:1' ])
res2009 = mc.sentenceCount('(Facebook)', solr_filter=[mc.publish_date_query( datetime.date(2008,12,31), datetime.date(2009,12,31) ), 'media_sets_id:1' ])
res2010 = mc.sentenceCount('(Facebook)', solr_filter=[mc.publish_date_query( datetime.date(2009,12,31), datetime.date(2010,12,31) ), 'media_sets_id:1' ])
res2011 = mc.sentenceCount('(Facebook)', solr_filter=[mc.publish_date_query( datetime.date(2010,12,31), datetime.date(2011,12,31) ), 'media_sets_id:1' ])
res2012 = mc.sentenceCount('(Facebook)', solr_filter=[mc.publish_date_query( datetime.date(2011,12,31), datetime.date(2012,12,31) ), 'media_sets_id:1' ])
res2013 = mc.sentenceCount('(Facebook)', solr_filter=[mc.publish_date_query( datetime.date(2012,12,31), datetime.date(2013,12,31) ), 'media_sets_id:1' ])
res2014 = mc.sentenceCount('(Facebook)', solr_filter=[mc.publish_date_query( datetime.date(2013,12,31), datetime.date(2014,12,31) ), 'media_sets_id:1' ])
res2015 = mc.sentenceCount('(Facebook)', solr_filter=[mc.publish_date_query( datetime.date(2014,12,31), datetime.date(2015,11,18) ), 'media_sets_id:1' ])
return(res2000, res2001, res2002, res2003, res2004, res2005, res2006, res2007, res2008, res2009, res2010, res2011, res2012, res2013, res2014, res2015)
res2000, res2001, res2002, res2003, res2004, res2005, res2006, res2007, res2008, res2009, res2010, res2011, res2012, res2013, res2014, res2015 = activateNow()
print "2000 %s " % res2000['count']
print "2001 %s " % res2001['count']
print "2002 %s " % res2002['count']
print "2003 %s " % res2003['count']
print "2004 %s " % res2004['count']
print "2005 %s " % res2005['count']
print "2006 %s " % res2006['count']
print "2007 %s " % res2007['count']
print "2008 %s " % res2008['count']
print "2009 %s " % res2009['count']
print "2010 %s " % res2010['count']
print "2011 %s " % res2011['count']
print "2012 %s " % res2012['count']
print "2013 %s " % res2013['count']
print "2014 %s " % res2014['count']
print "2015 %s " % res2015['count']
| {
"repo_name": "amacinn/MAS500",
"path": "homework2.py",
"copies": "1",
"size": "3746",
"license": "mit",
"hash": -676208755728137600,
"line_mean": 75.4489795918,
"line_max": 158,
"alpha_frac": 0.7058195408,
"autogenerated": false,
"ratio": 2.818660647103085,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4024480187903085,
"avg_score": null,
"num_lines": null
} |
#Andrew Michaud
#12/3/11
#CS 5 Green
#Picobot Project Milestone
import random
ROWS = 20
COLUMNS = 20
STATES = 5
TRIALS = 20
STEPS = 800
allowedPatterns = ['xxxx','Nxxx','NExx','NxWx','xxxS','xExS','xxWS','xExx',
'xxWx']
class Program:
def __init__(self):
"""Initializes a program by creating an empty rules dictionary"""
self.rulesDict = {}
def randomize(self):
"""Randomizes a program, filling its dictionary with a set of rules
that are random but cover every possible Picobot state"""
#These are all possible directions for movement.
directions = ['N', 'E', 'W', 'S', 'X']
#The method loops over all patterns and rules to create a rule for
#every possible situation picobot can find itself in.
for pattern in allowedPatterns:
for state in range(STATES):
#This for loop is different. It loops over the pattern the
#function is currently looking at, and removes the directions
#that are in that pattern from the list of possible directions.
#This prevents the method from creating a rule that is not
#possible to fulfill.
for char in pattern:
if char in directions:
directions.remove(char)
#When that is finished, the method creates a key based on the
#state and pattern it is looking at, and creates a value based
#on a random direction and state. Both are added to the
#dictionary.
current = (state, pattern)
newDirection = random.choice(directions)
newState = random.choice(range(STATES))
self.rulesDict[current] = (newDirection, newState)
#The directions are reset for the next time through the loop.
directions = ['N', 'E', 'W', 'S', 'X']
def getMove(self, state, pattern):
"""Given a Picobot's current state, finds its next move"""
#This function simply uses the rules dictionary to find the next move
#for the given situation.
newState = self.rulesDict[(state, pattern)]
return newState
def mutate(self):
"""Chooses a rule at random and changes the move and new state"""
#A random key is chosen.
victim = random.choice(self.rulesDict.keys())
directions = ['N', 'E', 'W', 'S', 'X']
#The old value for that key is stored.
oldNext = self.rulesDict[victim]
newNext = oldNext
#This while loop prevents the same rule from being chosen again.
while newNext == oldNext:
#The same process as in the randomize method is used here to
#choose a new value for this key.
for char in victim[1]:
if char!='x' and char in directions:
directions.remove(char)
randDirection = random.choice(directions)
randState = random.choice(range(STATES))
newNext = (randDirection, randState)
#The key is assigned the new value the method has chosen.
self.rulesDict[victim] = newNext
def crossover(self, other):
"""Creates an offspring program based on two parent programs (self
and other)"""
#A random state is chosen for the crossover point, and an offspring
#program is initialized.
crossState = random.choice(range(STATES))
offspring = Program()
#This loops over every key in the self program, and adds every rule
#for a state below (or at) our crossover point to the offspring's
#dictionary.
for current in self.rulesDict.keys():
if current[0] <= crossState:
offspring.rulesDict[current] = self.rulesDict[current]
#This does the same, but for states above the crossover point and for
#the "other" program.
for current in other.rulesDict.keys():
if current[0] > crossState:
offspring.rulesDict[current] = other.rulesDict[current]
#The offspring is returned to whoever called the function.
return offspring
def __repr__(self):
"""Prints out the code representing a program"""
#This is the string that will hold the program.
code = ""
#The method sorts the keys to make the output look nicer.
keyList=self.rulesDict.keys()
keyList.sort()
#This loops over every key.
for item in keyList:
#The tuples for both the keys and the values are broken into
#strings, and arranged in the way picobot expects its rules
#to be. This makes it easier to read for the user and enables
#the code to be pasted directly into the picobot simulator for
#testing/fun.
first = str(item[0])+" "+str(item[1])
second = str(self.rulesDict[item][0])+" "+str(self.rulesDict[item][1])
code+=first + " -> " + second+"\n"
#The code is returned at the end.
return code
#Testing the Program class.
#I created two new programs p1 and p2. They were started with empty
#dictionaries, and returned nothing if called. I then randomized them to get
#two random programs. I also got to test the __repr__ method in finding what
#the results were so I could paste them here.
##p1:
##0 NExx -> X 0
##0 NxWx -> E 1
##0 Nxxx -> W 1
##0 xExS -> W 1
##0 xExx -> N 1
##0 xxWS -> X 1
##0 xxWx -> N 0
##0 xxxS -> W 0 This rule is mutated below.
##0 xxxx -> E 0
##1 NExx -> W 1
##1 NxWx -> X 1
##1 Nxxx -> W 1
##1 xExS -> X 1
##1 xExx -> X 0
##1 xxWS -> E 1
##1 xxWx -> X 1
##1 xxxS -> E 0
##1 xxxx -> E 0
##p2:
##0 NExx -> X 0
##0 NxWx -> X 0
##0 Nxxx -> S 0
##0 xExS -> N 0
##0 xExx -> X 0
##0 xxWS -> E 1
##0 xxWx -> E 1
##0 xxxS -> E 1
##0 xxxx -> X 1
##1 NExx -> W 1
##1 NxWx -> S 1 This rule was mutated below.
##1 Nxxx -> X 0
##1 xExS -> N 1
##1 xExx -> X 0
##1 xxWS -> X 0
##1 xxWx -> E 1
##1 xxxS -> E 0
##1 xxxx -> X 1
#I mutated them next to test my mutate() function.
##p1:
##0 NExx -> X 0
##0 NxWx -> E 1
##0 Nxxx -> W 1
##0 xExS -> W 1
##0 xExx -> N 1
##0 xxWS -> X 1
##0 xxWx -> N 0
##0 xxxS -> N 1 This rule was mutated.
##0 xxxx -> E 0
##1 NExx -> W 1
##1 NxWx -> X 1
##1 Nxxx -> W 1
##1 xExS -> X 1
##1 xExx -> X 0
##1 xxWS -> E 1
##1 xxWx -> X 1
##1 xxxS -> E 0
##1 xxxx -> E 0
##p2:
##0 NExx -> X 0
##0 NxWx -> X 0
##0 Nxxx -> S 0
##0 xExS -> N 0
##0 xExx -> X 0
##0 xxWS -> E 1
##0 xxWx -> E 1
##0 xxxS -> E 1
##0 xxxx -> X 1
##1 NExx -> W 1
##1 NxWx -> S 0 This rule was mutated.
##1 Nxxx -> X 0
##1 xExS -> N 1
##1 xExx -> X 0
##1 xxWS -> X 0
##1 xxWx -> E 1
##1 xxxS -> E 0
##1 xxxx -> X 1
#Everything seems to be working there.
#Finally, I tested the crossover function, as "p1.crossover(p2)"
#The offspring:
##0 NExx -> X 0 It appears the function chose 0 as the crossover state.
##0 NxWx -> E 1 This would take every rule with a state less than or equal to
##0 Nxxx -> W 1 0 from p1 (which appears to have happened), and every rule with
##0 xExS -> W 1 a state greater than 0 from p2 (which also appears to have
##0 xExx -> N 1 happened. Everything seems in order here as well.
##0 xxWS -> X 1
##0 xxWx -> N 0
##0 xxxS -> N 1
##0 xxxx -> E 0
##1 NExx -> W 1
##1 NxWx -> S 0
##1 Nxxx -> X 0
##1 xExS -> N 1
##1 xExx -> X 0
##1 xxWS -> X 0
##1 xxWx -> E 1
##1 xxxS -> E 0
##1 xxxx -> X 1
| {
"repo_name": "andrewmichaud/picobot-GA",
"path": "milestone.py",
"copies": "1",
"size": "7790",
"license": "mit",
"hash": 7868360413167456000,
"line_mean": 31.5775862069,
"line_max": 82,
"alpha_frac": 0.5643132221,
"autogenerated": false,
"ratio": 3.354866494401378,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9229803922974709,
"avg_score": 0.037875158705333714,
"num_lines": 232
} |
# Andrew Michaud
# Original:
# 11/23/11-12/8/11
# Updated/cleaned:
# 17 Feb 2015
# CS 5 Green
# Final Project
# Comments on my project:
# I tested my code mostly by just running the GA function (usually with 200
# individuals and 10-20 generations), and obseving what happened. I tried
# changing several variables around, especially the mutation rate and top
# fraction. I also tried messing with the number of states. Some things I
# noticed: A higher mutation rate got good programs quickly, but was also
# likely to mess up those good programs. Fewer states led to worse programs,
# probably because the function had a harder time making good programs with
# fewer states.
# The best program I ever got was a .95 fitness program. It consistently
# covered .95 of the board, regardless of the starting point, and only ignored
# a strip on one wall of the board (the one behind its direction of motion. It
# moved up and down to cross the room, moving left, and moved all the way to
# the right after reaching the left wall. It did some weird motion that made it
# skip some of the right wall, and then repeated itself. I included it in code
# below, as a program and Picobot. It can be called as bestPicobot.
import random
import time
import math
import visual
from Vector import *
from Shapes import *
# These are various global variables that govern fitness testing and the
# generation of Program and Picobot objects. Specifically:
# These are used to make Picobot and Program objects. There are 23 rows and
# columns because that is the size of the online Picobot grid.
ROWS = 23
COLUMNS = 23
STATES = 5
allowedPatterns = ['xxxx', 'Nxxx', 'NExx', 'NxWx', 'xxxS', 'xExS', 'xxWS',
'xExx', 'xxWx']
# These are variables for evaluating fitness and creating new generations.
TRIALS = 20
STEPS = 500
MUTATIONRATE = 0.02
TOPFRACTION = 0.2
# This is used by the functions I wrote that generate the empty room for the
# Picobot object, and the set of keys for the Program class.
# Ideally, it can be set to any room type and Picobots will evolve to conquer
# it. It works well with an empty room, but I'm still working on the other
# types. I took out all the code that enabled me to generate code for mazes,
# because it never worked that well and it made things messy.
# Types: "Empty Room"
GRID = "Empty Room"
def GA(popSize, numGen):
"""Runs the genetic algorithm with a set population size and number of
generations"""
# These just print information about the testing for the user.
print "Grid size is "+str(ROWS)+" rows by "+str(COLUMNS)+" columns."
print "Fitness is measured using "+str(TRIALS)+" random trials and "\
+ str(STEPS) + " steps."
# An initial program list is created, and several variables are
# initialized for use later.
programs = newPop(popSize)
totalFitness = 0
avgFitness = 0
bestFitness = 0
# This loops over the desired number of generations,
for gen in range(numGen):
# creating a new empty fitness dictionary each time.
fitnessDict = {}
# The program list is looped over so each program can have its fitness
# evaluated, and stored in the dictionary. The programs are stored
# as values (their fitnesses being the keys) so they can be called back
# later if they happen to be the best program.
for program in programs:
fitness = evaluateFitness(program, TRIALS, STEPS)
fitnessDict[fitness] = program
# The keys are taken out as a list, sorted, and reversed so they go
# from greatest to smallest fitness.
fitnessList = fitnessDict.keys()
fitnessList.sort()
fitnessList.reverse()
# The best fitness is the maximum item in the list, and the total
# fitness is the sum of the list. It is averaged by dividing by the
# size of the population. The best bot is also defined here.
bestFitness = max(fitnessList)
bestBot = Picobot(10, 10, fitnessDict[bestFitness])
totalFitness = sum(fitnessList)
avgFitness = totalFitness/popSize
# Information about the generation is printed, including the fitnesses
# we've just calculated.
print "Generation "+str(gen)
print " Average fitness: "+str(avgFitness)
print " Best fitness: "+str(bestFitness)+"\n"
# Helper functions are used to generate the next generation of programs
# and to mutate a proprtion of the newly created programs.
programs = nextGen(TOPFRACTION, fitnessDict, fitnessList, popSize)
mutate(MUTATIONRATE, programs)
while True:
# This lets the best Picobot be visualized. It asks the user if he or
# she wants to visualize the program.
answer = input("Do you want to see this Picobot visualized? ")
# If he or she answers one of several versions of yes, the visualize
# method is run.
if answer == "Y" or answer == "Yes" or answer == "yes" or answer == "Yeah"\
or answer == 'y':
bestBot.visualize()
# Otherwise, the function just returns the best program, as a set of
# rules.
else:
return fitnessDict[bestFitness]
def nextGen(survivorProp, fitnessDict, fitnessList, popSize):
"""Creates the next generation based on the previous one"""
# This creates a new generation for GA().
# These will store programs for us.
newGen = []
survivors = []
# The function decides how many programs survive based on the what percent
# should survive. It rounds up.
numSurvivors = int(math.ceil(TOPFRACTION*popSize))
# As long as there are more spaces, the function adds more to the survivor
# list. Because fitnessList has been sorted, they are picked in order of
# fitness.
for survivor in range(numSurvivors):
survivors.append(fitnessDict[fitnessList[survivor]])
print len(fitnessDict)
# Next, it crosses over two parents at random until it has a population
# of the right size.
while len(newGen) < popSize:
parent1 = random.choice(survivors)
parent2 = random.choice(survivors)
# It makes sure two random parents aren't the same parent before
# crossing over.
if parent1 != parent2:
offspring = parent1.crossover(parent2)
newGen.append(offspring)
# The list of offpsring are returned at the end.
return newGen
def mutate(mutProp, programs):
"""Mutates a given percent of a population of programs"""
# This function starts by determining how many programs should be mutated.
# It rounds up or down.
mutNum = int(len(programs)*mutProp)
# As long as more need to be mutated, programs are chosen at random and
# mutated, only changing the outcome of one of their rules.
for index in range(mutNum):
program = random.choice(programs)
program.mutate()
def newPop(popSize):
"""Given a population size, returns a population of random Picobot
programs"""
# This is a helper program for GA().
programList = []
# It creates a random population of programs, up to a given population
# size.
for individual in range(popSize):
# The programs are created, randomized, added to a list, and returned
# at the end.
newProg = Program()
newProg.randomize()
programList.append(newProg)
return programList
def evaluateFitness(program, trials, steps):
"""Evaluates the fitness of a Picobot program"""
# This keeps track of the squares visited.
totalSquares = 0
# The function runs the program trials times.
for trial in range(trials):
# testBot = Picobot(0, 0, program)
# It is given to a Picobot object, along with a random starting row and
# column.
randRow = random.choice(range(1, ROWS+1))
randCol = random.choice(range(1, COLUMNS+1))
testBot = Picobot(randRow, randCol, program)
# The Picobot is run for the given number of steps and its squares
# tallied.
testBot.run(steps)
totalSquares += testBot.numvisited
# The squares are averaged then normalized by dividing by first the number
# of trials, then the number of squares. The fitness is returned in the
# end.
avgSquares = totalSquares/(trials*1.0)
fitness = avgSquares/(ROWS*COLUMNS)
return fitness
def gridMaker(roomType):
"""Creates an empty room for a Picobot object"""
# This creates an array for a Picobot.
array = []
# The function generates the room for an empty room.
row = []
# For an empty room, it starts by creating a wall COLUMNS columns long.
# Actually, two more, because we want the empty space to have
# ROWS*COLUMNS spaces.
for c in range(COLUMNS+2):
row.append("W")
array.append(row)
row = []
# For the rest of the rows, a wall is appended to the beginning, and
# empty spaces are added for as many columns as there should be. Another
# wall is added to the end. This whole row is added to the array.
for r in range(ROWS):
row = []
row.append("W")
for c in range(COLUMNS):
row.append(" ")
row.append("W")
array.append(row)
row = []
# Another wall is added to the end of the array, the same length as the
# first. This leaves the Picobot with a walled room with ROWS*COLUMNS
# empty squares in the middle.
for c in range(COLUMNS+2):
row.append("W")
array.append(row)
# The array is returned to the Picobot object.
return array
def keyMaker(roomType):
"""Creates a list of dictionary keys for a Picobot object, based on the room
type"""
# This program creates all possible keys for Picobot (all the situations it
# can find itself in) based on the given room type.
roomPatterns = allowedPatterns
# These are all possible directions for movement.
directions = ['N', 'E', 'W', 'S', 'X']
# These are variables that will be used shortly.
allowedDirections = []
keyList = []
# The method loops over all patterns and rules to create a rule for
# every possible situation picobot can find itself in.
for pattern in roomPatterns:
for state in range(STATES):
# This for loop is different. It loops over the pattern the
# function is currently looking at, and removes the directions that
# are in that pattern from the list of possible directions.
# This prevents the method from creating a rule that is not
# possible to fulfill.
for char in directions:
if char not in pattern:
allowedDirections.append(char)
# Each created key is appended to the list we will return, and
# the allowed directions are reset so they can be recreated.
keyList.append(((state, pattern), allowedDirections))
allowedDirections = []
return keyList
class Program:
def __init__(self):
"""Initializes a program by creating an empty rules dictionary"""
self.rulesDict = {}
def randomize(self):
"""Randomizes a program, filling its dictionary with a set of rules
that are random but cover every possible Picobot state"""
# This gives the keys to the program, so it knows the possible states
# for the current room type. GRID is a global variable we've set above.
values = keyMaker(GRID)
# For every key, a random direction and state are chosen. The keyMaker
# function provides the possible directions to travel in given the
# current state.
for key in values:
newDirection = random.choice(key[1])
newState = random.choice(range(STATES))
self.rulesDict[key[0]] = (newDirection, newState)
def getMove(self, state, pattern):
"""Given a Picobot's current state, finds its next move"""
# This function simply uses the rules dictionary to find the next move
# for the given situation.
newState = self.rulesDict[(state, pattern)]
return newState
def mutate(self):
"""Chooses a rule at random and changes the move and new state"""
# A random key is chosen.
victim = random.choice(self.rulesDict.keys())
directions = ['N', 'E', 'W', 'S', 'X']
# The old value for that key is stored.
oldNext = self.rulesDict[victim]
newNext = oldNext
# This while loop prevents the same rule from being chosen again.
while newNext == oldNext:
# The same process as in the randomize method is used here to
# choose a new value for this key.
for char in victim[1]:
if char != 'x' and char in directions:
directions.remove(char)
randDirection = random.choice(directions)
randState = random.choice(range(STATES))
newNext = (randDirection, randState)
# The key is assigned the new value the method has chosen.
self.rulesDict[victim] = newNext
def crossover(self, other):
"""Creates an offspring program based on two parent programs (self
and other)"""
# A random state is chosen for the crossover point, and an offspring
# program is initialized.
crossState = random.choice(range(STATES))
offspring = Program()
# This loops over every key in the self program, and adds every rule
# for a state below (or at) our crossover point to the offspring's
# dictionary.
for current in self.rulesDict.keys():
if current[0] <= crossState:
offspring.rulesDict[current] = self.rulesDict[current]
# This does the same, but for states above the crossover point and for
# the "other" program.
for current in other.rulesDict.keys():
if current[0] > crossState:
offspring.rulesDict[current] = other.rulesDict[current]
# The offspring is returned to whoever called the function.
return offspring
def __repr__(self):
"""Prints out the code representing a program"""
# This is the string that will hold the program.
code = []
# The method sorts the keys to make the output look nicer.
keyList = self.rulesDict.keys()
keyList.sort()
# This loops over every key.
for item in keyList:
# The tuples for both the keys and the values are broken into
# strings, and arranged in the way picobot expects its rules
# to be. This makes it easier to read for the user and enables
# the code to be pasted directly into the picobot simulator for
# testing/fun.
first = str(item[0])+" "+str(item[1])
second = str(self.rulesDict[item][0]) + " " + \
str(self.rulesDict[item][1])
code.append(" -> ".join((first, second)) + "\n")
# The code is returned at the end.
return " "+" ".join(code)
class Picobot:
def __init__(self, picobotrow, picobotcol, program):
"""Initializes a picobot with a row, column, and program."""
# This sets up the picobot with its row, column, and program.
self.row = picobotrow
self.column = picobotcol
self.program = program
# It has its initial state set to zero and its visited squares empty,
# to begin with.
self.state = 0
self.visited = []
self.numvisited = 0
# The picobot gets an array that represents the current room. gridMaker
# decides what the room looks like based on what the room is set to.
self.array = gridMaker(GRID)
return
def step(self):
"""Moves the Picobot one step using its program"""
# The Picobot starts by adding its current room and column to its visited
# squares, if it hasn't already. It also increments the number of visited
# squares.
if (self.row, self.column) not in self.visited:
self.visited.append((self.row, self.column))
self.numvisited += 1
# The Picobot creates a list of the four squares surrounding it. It
# records what is in those squares, and which direction they are in.
relevantSquares = []
relevantSquares.append((self.array[self.row-1][self.column], "N"))
relevantSquares.append((self.array[self.row][self.column+1], "E"))
relevantSquares.append((self.array[self.row][self.column-1], "W"))
relevantSquares.append((self.array[self.row+1][self.column], "S"))
# The surroundings are intialized (they'll be in the form 'xxxx').
surroundings = []
# For each square surrounding the Picobot, the Picobot decides if it's
# a wall or not. If it is, it appends the direction to its
# surroundings. If it isn't, it appends an 'x'.
for square in relevantSquares:
if square[0] == "W":
surroundings.append(square[1])
else:
surroundings.append("x")
# The surroundings are turned into a string, the way the other
# functions (and the online simulator) expects them to be.
surroundings = "".join(surroundings)
# The Picobot uses its program to decide what state it should switch to
# next, and which direction it should move in. getMove returns state
# and direction in the form ('N', 0).
there = self.program.getMove(self.state, surroundings)
# This changes the state
self.state = there[1]
direction = there[0]
# This actually moves the Picobot, by changing its position in the
# array.
if direction == "N":
self.row -= 1
elif direction == "E":
self.column += 1
elif direction == "W":
self.column -= 1
elif direction == "S":
self.row += 1
else:
self.row += 0
def run(self, steps):
"""Calls the step method "steps" times"""
# This simply calls step() as many times as we would like it to be
# called.
for steps in range(steps):
self.step()
def __repr__(self):
"""Displays the maze, Picobot's position, and visited squares."""
# The method makes every visited square a . in the array.
for square in self.visited:
self.array[square[0]][square[1]] = "."
# Picobot itself is represented as a "P". The walls have already
# been represented as "W"'s.
self.array[self.row][self.column] = "P"
room = ""
# The array is turned into a string, with each row broken separated
# from the next by a newline character.
for row in self.array:
for char in row:
room += char + " "
room += "\n"
# The room is returned at the end.
return room
def clear(self):
"""Clears a Picobot's visited squares so it can be run again"""
# This is used so you can draw the same program several times in a row
# and have a clean slate each time.
# It resets every visited square to an " ",
for square in self.visited:
self.array[square[0]][square[1]] = " "
# ...and clears both the visited tally and list.
self.visited = []
self.numvisited = 0
def visualize(self):
"""Shows the best Picobot program solving the empty room"""
# This is my visualization method.
# It starts by asking what type of visualization to use, and how many
# steps to run it for.
visType = input("What visualization type would you like to see?"\
"('ASCII', '2D', or '3D') ")
steps = input("How many steps would you "\
"like to run the visualization for? ")
# ASCII simply uses the representation I wrote for a Picobot object.
if visType == "ASCII":
# It starts by clearing it,
self.clear()
# then prints itself and steps itself for the number of steps we
# asked for.
for step in range(steps):
print self
self.step()
# 2D uses Turtle to draw the Picobot, and is therefore very slow.
elif visType == "2D":
# It clears itself, and speeds up the turtle slightly.
self.clear()
turtle.speed(0)
turtle.hideturtle()
# The walls are initialized as an empty list.
wallVectors = []
# The method loops through the array to find all walls, and adds
# their positions to the list of walls.
for row in range(len(self.array)):
for box in range(len(self.array[row])):
if self.array[row][box] == "W":
pos = Vector(box, row)
wallVectors.append(pos)
# To render properly in turtle, the positions are scaled. The array
# starts at (0,0), but we don't want turtle to start by rendering
# there. Every position is moved over by half the number of columns
# and up by half the number of rows.
for vector in wallVectors:
vector.x-=COLUMNS/2.0
vector.x = vector.x*10
vector.y-=ROWS/2.0
vector.y = vector.y*10
# Each wall is then rendered as a square, colored blue.
wall = Square(width = 10, center = vector, color="Blue")
wall.render()
# The Picobot position is also scaled.
xPos = (self.column - COLUMNS/2.0)*10
yPos = (self.row - ROWS/2.0)*10
botPos = Vector(xPos, yPos)
# The Picobot is rendered as a green square.
Picobot = Square(width=10, center=botPos, color="Green")
Picobot.render()
# For each step,
for step in range(steps):
# the current Picobot position is overridden with a grey
# square,
lastSquare = Vector(xPos, yPos)
lastVisited = Square(width=10, center=lastSquare, color="Gray")
lastVisited.render()
# Picobot's program is advanced, a new position is chosen,
self.step()
xPos = (self.column - COLUMNS/2.0)*10
yPos = (self.row - ROWS/2.0)*10
botPos = Vector(xPos, yPos)
# ...and Picobot is rendered again.
Picobot = Square(width=10, center=botPos, color="Green")
Picobot.render()
# The 3D rendering uses VPython.
elif visType == "3D":
# This is initialized much like the 2D rendering, but floor tiles
# are needed as well.
self.clear()
wallVectors = []
floorVectors = []
# So, both floor and wall tiles are stored. For reasons I didn't
# quite understand, the rows needed to be changed by making them
# negative and adding one. This made VPython properly render
# the Picobot.
for row in range(len(self.array)):
for box in range(len(self.array[row])):
if self.array[row][box] == "W":
pos = Vector(-row + 1, box)
wallVectors.append(pos)
else:
pos = Vector(-row + 1, box)
floorVectors.append(pos)
# Just like before, the walls are rendered as blue boxes, after
# being scaled to account for array/VPython differences. Because
# we are in 3D now, the board is set up so that it faces the user
# immediately, for convenience.
for vector in wallVectors:
vector.y -= ROWS/2.0
vector.x += COLUMNS/2.0
visual.box(pos=(vector.x, vector.y, 0), color=(0, 0, 1))
# VPython doesn't have a white background, so the floor is rendered
# as well. The floor is made of white boxes, which are set lower
# down and made less wide (deep, really) so that they look like
# tiles. The rows need to be scaled a bit differently than for
# Turtle, again for reasons I didn't quite understand (I just
# played with it until it worked).
for vector in floorVectors:
vector.y -= ROWS/2.0
vector.x += COLUMNS/2.0
visual.box(pos=(vector.x, vector.y, -.45), width=0.1)
# Picobot is rendered about the same, except its position also is
# scaled weirdly to make it work.
yPos = -(self.row - ROWS/2.0 - 1)
xPos = self.column - COLUMNS/2.0
# Picobot is a green box, same size as the walls.
Picobot = visual.box(pos=(xPos, yPos, 0), color=(0, 1, 0))
# It is made visible after rendering. It gets made invisible while
# we render squares that have been moved over, so it appears to
# move.
Picobot.visible = True
# For each step, we make Picobot invisible, render a covered square
# at Picobot's position (slightly less wide and higher than the
# floor squares, so in theory they don't overlap), and then redraw
# Picobot at its new position (making it visible again).
for step in range(steps):
Picobot.visible = False
lastSquare = (xPos, yPos)
lastVisited = visual.box(pos=(lastSquare[0], lastSquare[1],
-0.3),
width=.04,
color=(0.5, 0.5, 0.5))
self.step()
yPos = -(self.row - ROWS/2.0 - 1)
xPos = self.column - COLUMNS/2.0
Picobot = visual.box(pos=(xPos, yPos, .15), color=(0, 1, 0))
Picobot.visible = True
# Without this, Picbot renders too quickly to see. With this
# slight delay, it animates nicely.
time.sleep(.01)
def converter(s):
"""Takes picobot code, as a string, and returns a picobot dictionary"""
# I made this so I could feed programs returned by GA back into Picobots
# and render them. It takes a Picobot program, formatted as follows:
# """
# 2 xxxS -> W 0
# 2 xxxx -> S 2
# 3 NExx -> W 1
# """
# (spacing important) and turns it into a dictionary that a Picobot object
# can use.
picobotDict = {}
# It splits the input string at the newline characters,
L = s.split('\n')
# and for each item in that list,
for item in L[1:]:
# splices the item into a properly formatted key and value.
key = (int(item[1]), str(item[3:7]))
value = (str(item[11]), int(item[13]))
# The key and value are added to the dictionary.
picobotDict[key] = value
# The dictionary is returned.
return picobotDict
# This is the best program I've managed to make so far (fitness: about 0.95,
# independent of starting position, I believe). It's already defined as a
# program, dictionary, and Picobot object (scroll down for the last one). I
# used it for testing my visualize method and for general entertainment.
goodProgram = Program()
goodProgram.rulesDict = {
(1, 'xxWS'): ('E', 0), (3, 'xxxS'): ('N', 4), (3, 'NExx'): ('S', 3),
(3, 'xExS'): ('N', 2), (2, 'xxWS'): ('X', 3), (2, 'xExx'): ('W', 4),
(1, 'xxxS'): ('E', 1), (2, 'Nxxx'): ('X', 3), (4, 'Nxxx'): ('W', 3),
(0, 'xxxx'): ('S', 3), (1, 'xExS'): ('W', 3), (4, 'NxWx'): ('E', 4),
(2, 'xxxx'): ('X', 1), (4, 'xExS'): ('X', 1), (3, 'xxWS'): ('E', 4),
(4, 'xxWx'): ('N', 0), (0, 'xxxS'): ('N', 0), (1, 'xxxx'): ('N', 0),
(2, 'xExS'): ('W', 0), (3, 'NxWx'): ('S', 2), (2, 'NExx'): ('S', 3),
(4, 'xxWS'): ('X', 2), (4, 'xExx'): ('X', 1), (1, 'xExx'): ('W', 0),
(0, 'Nxxx'): ('X', 2), (4, 'xxxx'): ('N', 4), (2, 'NxWx'): ('S', 1),
(2, 'xxxS'): ('X', 0), (3, 'xxWx'): ('S', 3), (1, 'xxWx'): ('X', 0),
(4, 'NExx'): ('W', 0), (3, 'xxxx'): ('S', 0), (0, 'xxWS'): ('X', 2),
(1, 'NxWx'): ('S', 4), (0, 'xExx'): ('S', 2), (1, 'NExx'): ('X', 4),
(1, 'Nxxx'): ('W', 4), (2, 'xxWx'): ('S', 0), (3, 'Nxxx'): ('S', 0),
(4, 'xxxS'): ('X', 1), (3, 'xExx'): ('N', 1), (0, 'xExS'): ('N', 4),
(0, 'NxWx'): ('E', 2), (0, 'xxWx'): ('X', 2), (0, 'NExx'): ('W', 2)
}
bestPicobot = Picobot(10, 10, goodProgram)
| {
"repo_name": "andrewmichaud/picobot-GA",
"path": "Final Project.py",
"copies": "1",
"size": "29737",
"license": "mit",
"hash": -1599193711402300400,
"line_mean": 38.7914951989,
"line_max": 83,
"alpha_frac": 0.5833137169,
"autogenerated": false,
"ratio": 3.9958344531040044,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5079148170004004,
"avg_score": null,
"num_lines": null
} |
# Andrew Miller <amiller@cs.ucf.edu> 2011
#
# BlockPlayer - 3D model reconstruction using the Lattice-First algorithm
# See:
# "Interactive 3D Model Acquisition and Tracking of Building Block Structures"
# Andrew Miller, Brandyn White, Emiko Charbonneau, Zach Kanzler, and Joseph J. LaViola Jr.
# IEEE VR 2012, IEEE TVGC 2012
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from blockplayer import dataset
from blockplayer import normals
from blockplayer import preprocess
from blockplayer import config
import cv
import pylab
import numpy as np
def show_mask(name, m, rect):
im = cv.CreateImage((m.shape[1],m.shape[0]), 32, 3)
cv.SetData(im, np.ascontiguousarray(np.dstack(3*[m])))
(t,l),(b,r) = rect
cv.Rectangle(im, (t,l), (b,r), (255,255,0))
cv.ShowImage(name, im)
def once():
dataset.advance()
depthL,depthR = dataset.depthL,dataset.depthR
maskL,rectL = preprocess.threshold_and_mask(depthL,config.bgL)
maskR,rectR = preprocess.threshold_and_mask(depthR, config.bgR)
show_mask('maskL', maskL.astype('f'), rectL)
show_mask('maskR', maskR.astype('f'), rectR)
pylab.waitforbuttonpress(0.01)
def go():
while 1: once()
def show_backgrounds():
pylab.figure(1)
pylab.imshow(config.bgL['bgHi'])
pylab.draw()
pylab.figure(2)
pylab.clf()
pylab.imshow(config.bgR['bgHi'])
pylab.draw()
if __name__ == "__main__":
dataset.load_random_dataset()
go()
| {
"repo_name": "amiller/blockplayer",
"path": "demos/demo_preprocess.py",
"copies": "1",
"size": "1604",
"license": "mpl-2.0",
"hash": -4151994649448473000,
"line_mean": 27.1403508772,
"line_max": 93,
"alpha_frac": 0.6857855362,
"autogenerated": false,
"ratio": 3.0037453183520597,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9144441792513083,
"avg_score": 0.009017812407795258,
"num_lines": 57
} |
# Andrew Miller <amiller@cs.ucf.edu> 2011
# Zach Kanzler <they4kman@gmail.com>
#
# BlockPlayer - 3D model reconstruction using the Lattice-First algorithm
# See:
# "Interactive 3D Model Acquisition and Tracking of Building Block Structures"
# Andrew Miller, Brandyn White, Emiko Charbonneau, Zach Kanzler, and Joseph J. LaViola Jr.
# IEEE VR 2012, IEEE TVGC 2012
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import numpy as np
from OpenGL.GL import *
if not 'blocks' in globals():
blocks = {}
def clear():
global blocks
blocks = {}
def show_grid(name, grid,
color=np.array([1,0,0,1]), opacity=1.0,
line_color=np.array([1,1,1,1])):
assert color is None or color.shape == (4,) or color.shape[3]==3
d = {}
if color.shape == (4,):
d.update(grid_vertices(grid, None))
else:
d.update(grid_vertices(grid, color))
d['solid_color'] = color if color.shape == (4,) else None
d['line_color'] = line_color
global blocks
blocks[name] = d
def draw_block(blocks):
glEnableClientState(GL_VERTEX_ARRAY)
glVertexPointeri(blocks['vertices'])
# glColor(0.3,0.3,0.3)
if not blocks['solid_color'] is None:
glColor(*blocks['solid_color'])
else:
glEnableClientState(GL_COLOR_ARRAY)
glColorPointerub(blocks['color'])
glDrawElementsui(GL_QUADS, blocks['quad_inds'])
glDisableClientState(GL_COLOR_ARRAY)
glColor(*blocks['line_color'])
glDrawElementsui(GL_LINES, blocks['line_inds'])
glDisableClientState(GL_VERTEX_ARRAY)
def grid_vertices(grid, color=None):
return grid_vertices_numpy(grid, color)
def grid_vertices_numpy(grid, color=None):
"""
Given a boolean voxel grid, produce a list of vertices and indices
for drawing quads or line strips in opengl
"""
q = [[[1,1,0],[0,1,0],[0,1,1],[1,1,1]], \
[[1,0,1],[0,0,1],[0,0,0],[1,0,0]], \
[[1,1,1],[0,1,1],[0,0,1],[1,0,1]], \
[[1,0,0],[0,0,0],[0,1,0],[1,1,0]], \
[[0,1,1],[0,1,0],[0,0,0],[0,0,1]], \
[[1,1,0],[1,1,1],[1,0,1],[1,0,0]]]
normal = [np.cross(np.subtract(qz[0],qz[1]),np.subtract(qz[0],qz[2]))
for qz in q]
blocks = np.array(grid.nonzero()).transpose().reshape(-1,1,3)
q = np.array(q).reshape(1,-1,3)
vertices = (q + blocks).reshape(-1,3)
coords = (q*0 + blocks).astype('u1').reshape(-1,3)
if not color is None:
assert color.shape[3] == 3
color = color[grid,:].reshape(-1,1,3)
cc = (q.astype('u1')*0+color).reshape(-1,3)
assert cc.dtype == np.uint8
else:
cc = coords
normals = np.tile(normal, (len(blocks),4)).reshape(-1,3)
line_inds = np.arange(0,len(blocks)*6).reshape(-1,1)*4 + [0,1,1,2,2,3,3,0]
quad_inds = np.arange(0,len(blocks)*6).reshape(-1,1)*4 + [0,1,2,3]
return dict(blocks=blocks, vertices=vertices, coords=coords,
normals=normals, line_inds=line_inds, quad_inds=quad_inds,
color=cc)
def draw():
global blocks
for block in blocks.values():
draw_block(block)
| {
"repo_name": "amiller/blockplayer",
"path": "blockplayer/blockdraw.py",
"copies": "1",
"size": "3259",
"license": "mpl-2.0",
"hash": 3658298909119999500,
"line_mean": 29.7452830189,
"line_max": 93,
"alpha_frac": 0.6011046333,
"autogenerated": false,
"ratio": 2.8866253321523474,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3987729965452347,
"avg_score": null,
"num_lines": null
} |
# Andrew Powell
#
# This script is necessary to convert the binary into a format acceptable by Xilinx to load into their BRAM.
import argparse, struct, binascii, array, time
if __name__ == '__main__':
# Create the parser.
parser = argparse.ArgumentParser(description='Convert binary to a coe files.')
# Create argument for getting file name.
parser.add_argument('binary_name',metavar='binary_name',type=str,nargs=1,
help='File name of input binary. The extension should be included.')
parser.add_argument('coe_name',metavar='coe_name',type=str,nargs=1,
help='File name of output coe. The extension should be included.')
parser.add_argument('--swap_bytes',dest='swap_bytes',action='store_const',
const=True,default=False,
help='Swaps the bytes in each word')
parser.add_argument('--plain_hex',dest='plain_hex',action='store_const',
const=True,default=False,
help='Swaps the bytes in each word')
parser.add_argument('--word_count',metavar='word_count',type=int,nargs=1,
help='Numbers of words to store in coef from the binary.')
parser.add_argument('--full_count',metavar='full_count',type=int,nargs=1,default=[-1],
help='Size of ram in number of words.')
parser.add_argument('--vhdl_hex',dest='vhdl_hex',action='store_const',
const=True,default=False,
help='Generates VHDL package with hex.')
parser.add_argument('--package_name',dest='package_name',type=str,nargs=1,
help='Name of VHDL package with hex',default=[None])
# Perform the parsing.
args = parser.parse_args()
binary_name = args.binary_name[0]
coe_name = args.coe_name[0]
swap_bytes_flag = args.swap_bytes
plain_hex_flag = args.plain_hex
try: word_count_value = args.word_count[0]
except TypeError: word_count_value = -1
full_count_value = args.full_count[0]
vhdl_hex = args.vhdl_hex
if vhdl_hex: plain_hex_flag = True
package_name = args.package_name[0]
if package_name==None: package_name = 'bram_pack'
# # Print the file names.
# print('Binary name: ' + binary_name)
# print('Coe name: ' + coe_name)
# print('Swap: ' + repr(swap_bytes_flag))
# print('Word Count: ' + repr(word_count_value))
# Perform the conversion.
bytes_per_word = 4
radix = 16
# First open binary for reading and the coe file for writing.
with open(binary_name,mode='rb') as binary_file, open(coe_name,mode='w') as coe_file:
# Acquire the binary file and determine the number of words in the file.
binary_content = binary_file.read()
words_in_binary = len(binary_content)/bytes_per_word
# If a word count is specified, update if the new word count is valid.
if word_count_value>=0 and word_count_value<words_in_binary:
words_in_binary = word_count_value
# If plain hex is disabled, then insert the necessary headers into the coe files.
if not plain_hex_flag:
coe_file.write('memory_initialization_radix='+repr(radix)+';\n')
coe_file.write('memory_initialization_vector=\n')
# If VHDL flag is set, add the header to the package file.
if vhdl_hex:
# Set the full count size to word count if a full count isn't specified.
if full_count_value<0: full_count_value = words_in_binary
# Add the header.
coe_file.write( \
'library ieee;\n'+ \
'use ieee.std_logic_1164.all;\n'+ \
'\n'+ \
'package '+package_name+' is\n'+ \
'\n'+ \
' constant cpu_width : integer := 32;\n'+ \
' constant ram_size : integer := '+repr(full_count_value)+';\n'+ \
' subtype word_type is std_logic_vector(cpu_width-1 downto 0);\n'+ \
' type ram_type is array(0 to ram_size-1) of word_type;\n'+ \
' function load_hex return ram_type;\n'+ \
'\n'+ \
'end package;\n'+ \
'\n'+ \
'package body '+package_name+' is\n'+ \
'\n'+ \
' function load_hex return ram_type is\n'+ \
' variable ram_buffer : ram_type := (others=>(others=>\'0\'));\n'+ \
' begin\n')
# Perform the following operations for each word in the binary.
for each_word in range(words_in_binary):
# Acquire the word and convert it to an integer.
word_packed = binary_content[each_word*bytes_per_word:(each_word+1)*bytes_per_word]
word_int = struct.unpack('I',word_packed)
# If specified by command, swap the bytes in the word.
if swap_bytes_flag:
word_array = array.array('I',word_int)
word_array.byteswap()
word_int = word_array[0]
# Convert the word into the necessary string hex format.
word_hex = "%0.8X" % word_int
# Add the VHDL information if enabled.
if vhdl_hex:
word_hex = '\t\tram_buffer('+repr(each_word)+') := X"'+word_hex+'";'
# Write the word to the coe file.
coe_file.write(word_hex)
# If plain hex is disabled, insert necessary syntax for coe.
if not plain_hex_flag:
if each_word != words_in_binary-1: coe_file.write(',')
else: coe_file.write(';')
coe_file.write('\n')
# If the VHDL is enabled, add the end of vhdl package file.
if vhdl_hex:
coe_file.write('\t\treturn ram_buffer;\n\tend;\nend;\n');
| {
"repo_name": "andrewandrepowell/axiplasma",
"path": "misc/bin2coe.py",
"copies": "1",
"size": "5774",
"license": "mit",
"hash": -6714485008799538000,
"line_mean": 48.775862069,
"line_max": 108,
"alpha_frac": 0.5777623831,
"autogenerated": false,
"ratio": 3.649810366624526,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47275727497245257,
"avg_score": null,
"num_lines": null
} |
#Andrew St. Ours
#3/28/17
#Mountain bike game
#welcome
print("Welcome to Whistler Mountain Bike Park. \nToday you are going to"
"have a choice of three trails to ride on. \nA green circle trail, a blue"
" square trail and a black diamond trail.")
print("\nMake sure you pick the trail that applies best to your level of riding.")
#lists
x= ["bike", "helmet", "pair of gloves", "backpack", "water"]
y= ["bike", "helmet", "pair of gloves", "backpack", "water"]
z= ["bike", "helmet", "pair of gloves", "backpack", "water"]
print("\n\nOn your trip you have a",x,".")
print("\n\nAre you ready?")
input("\n\nPress [Enter] to continue up to the top of the mountain.")
#adding elemnets to the list
print("\n\nYou are now at the top and it is time to choose your trail.")
trail= input("\nWould you like the green, blue or black trail?\n")
#if statements
if trail == "green" or trail == "Green":
print("Ah we have a beginner I see. \nYou have recieve a nice"
" Whistler Mountain Bike t-shirt. \nIt will be added to"
" your things.")
x.append("t-shirt")
print(x)
if trail == "blue" or trail == "Blue":
print("Ooo we have a decent rider here. \nYou have recieved a pair"
" of riding shoes. \nThey will be added to your things.")
y.append("riding shoes")
print(y)
if trail == "black" or trail == "Black":
print("Mad respect for being an expert dude. \nYou have recieved a"
"check for $10000 to buy a new bike. \nThe check will be added"
" to your things.")
z.append("check")
print(z)
#Exit
print("\n\nI hope you had fun today and keep shredding!")
input("\n\nPress [Enter] to exit the park.")
| {
"repo_name": "astours17/Mountain-Biking-2.0",
"path": "St. Ours_MountainBikeGame.py",
"copies": "1",
"size": "1807",
"license": "mit",
"hash": 1388407096376418800,
"line_mean": 22.7534246575,
"line_max": 82,
"alpha_frac": 0.6065301605,
"autogenerated": false,
"ratio": 3.170175438596491,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9149455885320048,
"avg_score": 0.025449942755288685,
"num_lines": 73
} |
#Andrew Tan, 1/31, Section 010, Math Expressions
#Storing calculated values to variables
conv = 0.621
lightspeed = 299792.458
miles_lightspeed = lightspeed * conv
half_miles_lightspeed = miles_lightspeed * 0.5
quarter_miles_lightspeed = miles_lightspeed * 0.25
revolution = 66600
km_revolution = revolution / conv / 60 / 60
revolution_pct = format(km_revolution / lightspeed, ".15e")
#formatting 38s strings
L1 = format("Speed of light (Kilometers / sec):", "<38s")
L2 = format("Speed of light (Miles / sec):", "<38s")
L3 = format("Half speed of light (Miles / sec):", "<38s")
L4 = format("Quarter speed of light (Miles / sec):", "<38s")
L5 = format("66,000 miles per hour is equal to:", "<38s")
L6 = L5
#printing output
print(L1, lightspeed, "kps")
print(L2, miles_lightspeed, "mps")
print(L3, half_miles_lightspeed, "mps")
print(L4, quarter_miles_lightspeed, "mps")
print()
print("The earth moves 66,000 miles / hour around the sun")
print(L5, km_revolution, "kps")
print(L6, revolution_pct, "% of the speed of light")
| {
"repo_name": "sojournexx/python",
"path": "Assignments/TanAndrew_assign1_problem3.py",
"copies": "1",
"size": "1053",
"license": "mit",
"hash": -8976474035668110000,
"line_mean": 34.3103448276,
"line_max": 60,
"alpha_frac": 0.6847103514,
"autogenerated": false,
"ratio": 2.686224489795918,
"config_test": false,
"has_no_keywords": true,
"few_assignments": false,
"quality_score": 0.3870934841195918,
"avg_score": null,
"num_lines": null
} |
#Andrew Tan, 2/15, Section 010, NYU Calendar
#Ask user to input month and date
m = int(input("Enter a month (1-12): "))
d = int(input("Enter a day (1-31): "))
#Convert month format
if m == 1:
month = "January"
elif m == 2:
month = "Feburary"
elif m == 3:
month = "March"
elif m == 4:
month = "April"
elif m == 5:
month = "May"
elif m == 6:
month = "June"
elif m == 7:
month = "July"
elif m == 8:
month = "August"
elif m == 9:
month = "September"
elif m == 10:
month = "October"
elif m == 11:
month = "November"
elif m == 12:
month = "December"
#Check if date is valid
if m < 1 or m > 12 or d < 1:
print("That's not a valid date!")
elif m % 2 == 0 and d > 30 and m <= 7:
print("That's not a valid date!")
elif m % 2 != 0 and d > 31 and m <= 7:
print("That's not a valid date!")
elif m % 2 == 0 and d > 31 and m >= 8:
print("That's not a valid date!")
elif m % 2 != 0 and d > 30 and m >= 8:
print("That's not a valid date!")
elif m == 2 and d > 28:
print("That's not a valid date!")
#Check if date is within spring semester
elif m <= 1 and d < 23:
print("%s %d is before the start of the Spring 2017 term." %(month, d))
elif m >= 5 and d > 8:
print("%s %d is after the end of the Spring 2017 term." %(month, d))
#Check if date is a holiday
elif m == 2 and d == 20:
print("%s %d is President's day. NYU is not open on this day." %(month, d))
elif m == 3 and d >= 13 and d <= 19:
print("%s %d is Spring Break. NYU is not open on this day." %(month, d))
else:
print("%s %d is not a holiday at NYU. The university is open." %(month, d))
| {
"repo_name": "sojournexx/python",
"path": "Assignments/TanAndrew_assign3_problem3.py",
"copies": "1",
"size": "1692",
"license": "mit",
"hash": 7220874910001633000,
"line_mean": 25.737704918,
"line_max": 79,
"alpha_frac": 0.5502364066,
"autogenerated": false,
"ratio": 2.801324503311258,
"config_test": false,
"has_no_keywords": true,
"few_assignments": false,
"quality_score": 0.3851560909911258,
"avg_score": null,
"num_lines": null
} |
#Andrew Tan, 2/16, Section 010, Arrows
#Define right()
def right():
#Define variables
trow = tcol + tcol - 1
row = 1
#Top half
while row <= tcol:
rep = 1
while rep < row:
print(" ", end="")
rep += 1
print("*")
row += 1
#Bottom half
while row <= trow:
rep = trow - row
while rep > 0:
print(" ", end="")
rep -= 1
print("*")
row += 1
return
#Define left
def left():
#Define variables
trow = tcol + tcol - 1
row = 1
#Top half
while row <= tcol:
rep = tcol - row
while rep > 0:
print(" ", end="")
rep -= 1
print("*")
row += 1
#Bottom half
while row <= trow:
rep = 1
while rep < row - tcol + 1:
print(" ", end="")
rep += 1
print("*")
row += 1
return
#Check for valid number of columns and valid direction
while True:
tcol = int(input("How many columns? "))
if tcol < 0:
print("Invalid entry, try again!")
continue
direction = input("Direction? (l)eft or (r)ight: ")
if direction != "l" and direction != "r":
print("Invalid entry, try again!")
continue
break
#Run program
if direction == "r":
right()
elif direction == "l":
left()
| {
"repo_name": "sojournexx/python",
"path": "Assignments/TanAndrew_assign4_problem3.py",
"copies": "1",
"size": "1481",
"license": "mit",
"hash": -1307996440044984600,
"line_mean": 18.8591549296,
"line_max": 55,
"alpha_frac": 0.4328156651,
"autogenerated": false,
"ratio": 4.013550135501355,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9880105515652984,
"avg_score": 0.013252056989674509,
"num_lines": 71
} |
#Andrew Tan, 2/16, Section 010, Roll the Dice
from random import randint
result = False
while result == False:
s = int(input("How many sides on your dice? "))
#Check for valid data
if s < 3:
print("Sorry, that's not a valid size value. Please choose a positive number.")
continue
#Roll the dice and display roll result
else:
print()
print("Thanks! Here we go ...")
print()
counter = 0
doubles = 0
t1 = 0
t2 = 0
while result == False:
counter += 1
d1 = randint(1, s)
d2 = randint(1, s)
t1 = t1 + d1
t2 = t2 + d2
print("%d. die number 1 is %d and die number 2 is %d." %(counter, d1, d2))
#Check for doubles
if d1 != d2:
continue
else:
doubles += 1
#Check for snake eyes
if d1 != 1 and d2 != 1:
continue
else:
print()
print("You got snake eyes! Finally! On try number %d!" %(counter))
print("Along the way you rolled doubles %d times" %(doubles))
print("The average roll for die #1 was %.2f" %(t1 / counter))
print("The average roll for die #2 was %.2f" %(t2 / counter))
result = True
| {
"repo_name": "sojournexx/python",
"path": "Assignments/TanAndrew_assign4_problem1.py",
"copies": "1",
"size": "1502",
"license": "mit",
"hash": 5997404035860077000,
"line_mean": 29.2916666667,
"line_max": 87,
"alpha_frac": 0.4394141145,
"autogenerated": false,
"ratio": 4.230985915492957,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.011663088644873212,
"num_lines": 48
} |
#Andrew Tan, 2/17, Section 010, Pick Up Sticks
player = 1
#Check for valid number of sticks
while True:
sticks = int(input("How many sticks are on the table? (enter a number between 10 and 100): "))
if sticks < 10 or sticks > 100:
print("Invalid # of sticks, please try again.")
continue
break
#Display results of each turn
while True:
print()
print("There are %d sticks on the table." %(sticks))
print("Turn: Player %d" %(player))
#Check for valid number of sticks removed
removed = int(input("How many sticks do you want to remove from the table? (1, 2 or 3): "))
if removed < 1 or removed > 3 or removed > sticks:
print("Invalid number of sticks, try again.")
continue
sticks = sticks - removed
#End game when no sticks are left
if sticks == 0:
print("Player %d loses!" %(player))
break
#Proceed to the next turn
if player < 2:
player += 1
else:
player = 1
| {
"repo_name": "sojournexx/python",
"path": "Assignments/TanAndrew_assign4_problem2.py",
"copies": "1",
"size": "1025",
"license": "mit",
"hash": 2418455050262915600,
"line_mean": 29.0606060606,
"line_max": 98,
"alpha_frac": 0.5951219512,
"autogenerated": false,
"ratio": 3.7683823529411766,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48635043041411763,
"avg_score": null,
"num_lines": null
} |
#Andrew Tan, 2/2, Section 010, Data Size Converter
#Ask user to input file size
size_KB = float(input("Enter a file size, in kilobytes (KB): "))
#Converting and formatting the input
size_B = size_KB * 1024
size_b = size_B * 8
size_MB = size_KB / 1024
size_GB = size_MB / 1024
fsize_B = format(size_B, ">20,.2f")
fsize_b = format(size_b, ">20,.2f")
fsize_MB = format(size_MB, ">20,.2f")
fsize_GB = format(size_GB, ">20,.2f")
print()
#Display converted values in output
print("%.0f KB ..." %(size_KB))
print()
print("... in bits ", "%s" %(fsize_b), "bits")
print("... in bytes ", "%s" %(fsize_B), "bytes")
print("... in megabytes", "%s" %(fsize_MB), "MB")
print("... in gigabytes", "%s" %(fsize_GB), "GB")
'''
5 ways to crash
1. syntax error: fsize_B = format(size_B, >20,.2f) [missing quotation marks around the formatting argument]
2. logic error: size_B = size_KB * 1025 [1025 gives the wrong conversion]
3. runtime error: size_MB = size_KB / 0 [division by 0]
4. missing variables fsize_B, fsize_b, fsize_MB, fsize_GB will result in left justified values instead of right justified values
5. size_KB = input("Enter a file size, in kilobytes (KB): ") is missing a float function which will cause the variable to be a string which cannot be mathematically manipulated subsequently
'''
| {
"repo_name": "sojournexx/python",
"path": "Assignments/TanAndrew_assign2_problem3.py",
"copies": "1",
"size": "1336",
"license": "mit",
"hash": 7866315296839428000,
"line_mean": 35.1111111111,
"line_max": 189,
"alpha_frac": 0.6489520958,
"autogenerated": false,
"ratio": 2.8365180467091293,
"config_test": false,
"has_no_keywords": true,
"few_assignments": false,
"quality_score": 0.39854701425091293,
"avg_score": null,
"num_lines": null
} |
#Andrew Tan, 2/2, Section 010, Grade Calculator
#Ask user for name and class
name = input("What is your name? ")
course = input("What class are you in? ")
print()
#Ask user for weightage and test scores
weight_test = float(input("How much are tests worth in this class (i.e. 0.40 for 40%): "))
test1 = float(input("Enter test score #1: "))
test2 = float(input("Enter test score #2: "))
test3 = float(input("Enter test score #3: "))
print()
#Calculate and display test avg
test_avg = (test1 + test2 + test3) / 3
print("Your test average is: %.2f" %(test_avg))
print()
#Ask user for weightage and hw scores
weight_hw = float(input("How much are homework assignments worth in this class (i.e. 0.60 for 60%): "))
hw1 = float(input("Enter homework score #1: "))
hw2 = float(input("Enter homework score #2: "))
hw3 = float(input("Enter homework score #3: "))
print()
#Calculate and display hw avg
hw_avg = (hw1 + hw2 + hw3) / 3
print("Your homework average is: %.1f" %(hw_avg))
print()
#Calculate and display final score
total = hw_avg * weight_hw + test_avg * weight_test
print("Thanks, %s. Your final score in %s is %.2f" %(name, course, total))
| {
"repo_name": "sojournexx/python",
"path": "Assignments/TanAndrew_assign2_problem2.py",
"copies": "1",
"size": "1192",
"license": "mit",
"hash": -3629752821852159500,
"line_mean": 28.5641025641,
"line_max": 103,
"alpha_frac": 0.6543624161,
"autogenerated": false,
"ratio": 2.9651741293532337,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.41195365454532334,
"avg_score": null,
"num_lines": null
} |
#Andrew Tan, 3/30, Section 010, Part 1b
#Ask user for username and validate
while True:
username = str(input("Please enter a username: "))
if len(username) < 8 or len(username) > 15:
print("Username must be between 8 and 15 characters.\n")
continue
if username.isalnum() == False:
print("Username must contain only alphanumeric characters.\n")
continue
if str.isnumeric(username[0])== True:
print("The first character in your username cannot be a digit.\n")
continue
if username.islower() == True:
print("Your username must contain at least one uppercase character.\n")
continue
if username.isupper() == True:
print("Your username must contain at least one lowercase character.\n")
continue
if username.isalpha() == True:
print("Your username must contain at least one digit.\n")
continue
else:
print("Your username is valid!\n\n")
break
#Ask user for password and validate
while True:
#Define counters
lower = 0
upper = 0
num = 0
special = 0
invalid = False
password = str(input("Please enter a password: "))
for i in password:
if i.islower() == True:
lower += 1
elif i.isupper() == True:
upper += 1
elif i.isnumeric() == True:
num += 1
elif ord(i) >= 35 and ord(i) <= 38:
special += 1
else:
invalid = True
if len(password) < 8:
print("Passwords must be at least 8 characters long.\n")
continue
if (username in password):
print("You cannot use your username as part of your password.\n")
continue
if invalid == True:
print("Your password contains at least one invalid character.\n")
continue
if num == 0:
print("Your password must contain at least one digit.\n")
continue
if upper == 0:
print("Your password must contain at least one uppercase character.\n")
continue
if lower == 0:
print("Your password must contain at least one lowercase character.\n")
continue
if special == 0:
print("Your password must contain at least one 'special' character.\n")
continue
else:
print("Your password is valid!")
break
| {
"repo_name": "sojournexx/python",
"path": "Assignments/TanAndrew_assign7_part1b.py",
"copies": "1",
"size": "2463",
"license": "mit",
"hash": 2436013148459495000,
"line_mean": 30.4078947368,
"line_max": 79,
"alpha_frac": 0.5680064961,
"autogenerated": false,
"ratio": 4.569573283858998,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.014720967496066759,
"num_lines": 76
} |
#Andrew Tan, 3/30, Section 010, Part 2b
#Ask user for input
name = str.lower(input("Name: "))
#Clean up user input
index = 0
for i in name:
if ord(i) < 97 or ord(i) > 122:
name = name.replace(name[index], "", 1)
else:
index += 1
print("Your 'cleaned up' name is : %s" %(name))
#Create dictionary of traits
traits = {"0" : "emptiness",
"1" : "independence",
"2" : "quiet",
"3" : "charming",
"4" : "harmony",
"5" : "new directions",
"6" : "love",
"7" : "spirituality",
"8" : "organization",
"9" : "romatic",
"11" : "idealism",
"12" : "perfectionist",
"22" : "builder"}
#Calculate reduction value
reduction = 0
for i in name:
reduction = reduction + ord(i) - 96
while reduction not in range(10) and reduction not in [11, 12, 22]:
print("Reduction: %d" %(reduction))
str_reduction = str(reduction)
reduction = 0
for i in str_reduction:
reduction += int(i)
print("Reduction: %d" %(reduction))
print("Your name reduces to ... %d - %s!" %(reduction, str.capitalize(traits[str(reduction)])))
| {
"repo_name": "sojournexx/python",
"path": "Assignments/TanAndrew_assign7_part2b.py",
"copies": "1",
"size": "1211",
"license": "mit",
"hash": 5748453541545539000,
"line_mean": 27.5365853659,
"line_max": 95,
"alpha_frac": 0.5227085054,
"autogenerated": false,
"ratio": 3.2379679144385025,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4260676419838503,
"avg_score": null,
"num_lines": null
} |
#Andrew Tan, 3/30, Section 010, Part 3b
import random
#Define functions
def add_letters(word, num):
new_word = ""
for char in word:
insert = ""
for i in range(num):
rdm = 91
while rdm in range(91, 97):
rdm = random.randint(65, 122)
insert = insert + chr(rdm)
new_word = new_word + char + insert
return new_word
def remove_letters(word, num):
new_word = word[::num+1]
return new_word
def shift_characters(word, num):
new_word = ""
for char in word:
new_word = new_word + chr(ord(char)+num)
return new_word
#Ask user for mode selection and validate
while True:
mode = str(input("(e)ncode, (d)ecode or (q)uit: "))
#Encryption
if mode == "e":
while True:
key = int(input("Enter a number between 1 and 5: "))
if key >= 1 and key <= 5:
break
phrase = str(input("Enter a phrase to encode: "))
encrypted_word = shift_characters(add_letters(phrase, key), key)
print("Your encoded word is: %s\n" %(encrypted_word))
#Decryption
elif mode == "d":
while True:
key = int(input("Enter a number between 1 and 5: "))
if key >= 1 and key <= 5:
break
phrase = str(input("Enter a phrase to decode: "))
decrypted_word = shift_characters(remove_letters(phrase, key), -key)
print("Your decoded word is: %s\n" %(decrypted_word))
#Exit program
elif mode == "q":
break
| {
"repo_name": "sojournexx/python",
"path": "Assignments/TanAndrew_assign7_part3b.py",
"copies": "1",
"size": "1615",
"license": "mit",
"hash": -22560099993465176,
"line_mean": 27.3636363636,
"line_max": 76,
"alpha_frac": 0.5287925697,
"autogenerated": false,
"ratio": 3.738425925925926,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4767218495625926,
"avg_score": null,
"num_lines": null
} |
#Andrew Tan, 4/12, Section 010, Part 2
#Product lists
product_names = ["hamburger", "cheeseburger", "small fries"]
product_costs = [0.99, 1.29, 1.49]
product_stock = [10, 5, 20]
#Main program
while True:
#Ask user to select mode and check validity
option = str.lower(input("(s)earch, (l)ist, (a)dd, (r)emove, (u)pdate, r(e)port or (q)uit: "))
#Search mode
if option == "s":
pdt = str.lower(input("Enter a product name: "))
if pdt in product_names:
index = product_names.index(pdt)
print("We sell \"%s\" at %.2f per unit" %(pdt, product_costs[index]))
print("We currently have %d in stock\n" %(product_stock[index]))
else:
print("Sorry, we don't sell \"%s\"\n"%(pdt))
continue
#List mode
elif option == "l":
print("{:25s}{:7s}{:8s}".format("Product", "Price", "Quantity"))
for i in range(len(product_names)):
print("{:25.23s}{:<7.2f}{:<8d}".format(product_names[i], product_costs[i], product_stock[i]))
print()
continue
#Add mode
elif option == "a":
while True:
pdt = str.lower(input("Enter a new product name: "))
if pdt in product_names:
print("Sorry, we already sell that product. Try again.")
continue
else:
break
while True:
cost = float(input("Enter a product cost: "))
if cost <= 0:
print("Invalid cost. Try again.")
continue
else:
break
while True:
qty = int(input("How many of these products do we have? "))
if qty <= 0:
print("Invalid quantity. Try again.")
continue
else:
break
product_names.append(pdt)
product_costs.append(cost)
product_stock.append(qty)
print("Product added!\n")
continue
#Remove mode
elif option == "r":
pdt = str.lower(input("Enter a product name: "))
if pdt not in product_names:
print("Product doesn't exist. Can't remove.\n")
else:
index = product_names.index(pdt)
del product_names[index]
del product_costs[index]
del product_stock[index]
print("Product removed!\n")
continue
#Update mode
elif option == "u":
pdt = str.lower(input("Enter a product name: "))
if pdt not in product_names:
print("Product doesn't exist. Can't update.\n")
else:
index = product_names.index(pdt)
print("What would you like to update?")
item = str.lower(input("(n)ame, (c)ost or (q)uantity: "))
#Update name
if item == "n":
while True:
newname = str.lower(input("Enter a new name: "))
if newname in product_names:
print("Duplicate name!")
continue
else:
product_names[index] = newname
print("Product name has been updated\n")
break
#Update cost
elif item == "c":
while True:
newcost = float(input("Enter a new cost: "))
if newcost <= 0:
print("Invalid cost!")
continue
else:
product_costs[index] = newcost
print("Product cost has been updated\n")
break
#Update quantity
elif item == "q":
while True:
newqty = float(input("Enter a new quantity: "))
if newqty <= 0:
print("Invalid quantity!")
continue
else:
product_stock[index] = newqty
print("Product quantity has been updated\n")
break
#Invalid option
else:
print("Invalid option\n")
continue
#Report mode
elif option == "e":
totalcost = 0
for cost, qty in zip(product_costs, product_stock):
totalcost += cost*qty
print("{:29s}{:.2f} ({})".format("Most expensive product:", max(product_costs), product_names[product_costs.index(max(product_costs))]))
print("{:29s}{:.2f} ({})".format("Least expensive product:", min(product_costs), product_names[product_costs.index(min(product_costs))]))
print("{:29s}{:.2f}\n".format("Total value of all products:", totalcost))
continue
#Quit program
elif option == "q":
print("See you soon!")
break
#Invalid mode
else:
print("Invalid option, try again\n")
continue
| {
"repo_name": "sojournexx/python",
"path": "Assignments/TanAndrew_assign8_part2.py",
"copies": "1",
"size": "5111",
"license": "mit",
"hash": 6410638662772044000,
"line_mean": 33.7412587413,
"line_max": 145,
"alpha_frac": 0.4746624927,
"autogenerated": false,
"ratio": 4.432784041630529,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5407446534330529,
"avg_score": null,
"num_lines": null
} |
#Andrew Tan, 4/12, Section 010, Part 3
import random
#Define possible cards and associated values
cards = ['10 of Hearts', '9 of Hearts', '8 of Hearts', '7 of Hearts', '6 of Hearts', '5 of Hearts', '4 of Hearts', '3 of Hearts', '2 of Hearts', 'Ace of Hearts', 'King of Hearts', 'Queen of Hearts', 'Jack of Hearts', '10 of Diamonds', '9 of Diamonds', '8 of Diamonds', '7 of Diamonds', '6 of Diamonds', '5 of Diamonds', '4 of Diamonds', '3 of Diamonds', '2 of Diamonds', 'Ace of Diamonds', 'King of Diamonds', 'Queen of Diamonds', 'Jack of Diamonds', '10 of Clubs', '9 of Clubs', '8 of Clubs', '7 of Clubs', '6 of Clubs', '5 of Clubs', '4 of Clubs', '3 of Clubs', '2 of Clubs', 'Ace of Clubs', 'King of Clubs', 'Queen of Clubs', 'Jack of Clubs', '10 of Spades', '9 of Spades', '8 of Spades', '7 of Spades', '6 of Spades', '5 of Spades', '4 of Spades', '3 of Spades', '2 of Spades', 'Ace of Spades', 'King of Spades', 'Queen of Spades', 'Jack of Spades']
values = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 10, 10, 10, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 10, 10, 10, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 10, 10, 10, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 10, 10, 10]
#First deal
player_hand = []
player_points = 0
for i in range(2):
index = random.randint(0, len(cards))
player_hand.append(cards[index])
player_points += values[index]
print("Player hand: {} is worth {}".format(player_hand, player_points))
#Subsequent deals
while player_points < 21:
option = str.lower(input("(h)it or (s)tand? "))
if option == "h":
index = random.randint(0, len(cards))
player_hand.append(cards[index])
player_points += values[index]
print("You drew {}".format(cards[index]))
print("Player hand: {} is worth {}".format(player_hand, player_points))
continue
if option == "s":
print()
break
if player_points == 21:
print("Player got 21! Blackjack!")
winner = "Player"
elif player_points > 21:
print("Bust!")
winner = "Computer"
#Computer deals
elif player_points < 21:
computer_hand = []
computer_points = 0
for i in range(2):
index = random.randint(0, len(cards))
computer_hand.append(cards[index])
computer_points += values[index]
print("Computer hand: {} is worth {}".format(computer_hand, computer_points))
while computer_points < 21 and computer_points < player_points:
index = random.randint(0, len(cards))
computer_hand.append(cards[index])
computer_points += values[index]
print("Computer drew {}".format(cards[index]))
print("Computer hand: {} is worth {}".format(computer_hand, computer_points))
if computer_points == 21:
print("Computer got 21! Blackjack!")
winner = "Computer"
elif computer_points > 21:
print("Bust!")
winner = "Player"
else:
winner = "Computer"
#Display winner
print("{} wins!".format(winner))
| {
"repo_name": "sojournexx/python",
"path": "Assignments/TanAndrew_assign8_part3.py",
"copies": "1",
"size": "2988",
"license": "mit",
"hash": -497375878390467800,
"line_mean": 43.9692307692,
"line_max": 854,
"alpha_frac": 0.5967202142,
"autogenerated": false,
"ratio": 2.967229394240318,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.40639496084403176,
"avg_score": null,
"num_lines": null
} |
#Andrew Tan, 4/24, Section 010, Assignment 9
#Open file requested by user
try:
file = input("Enter a class file to grade (i.e. class1 for class1.txt): ")
file_obj = open(file+".txt", "r")
print("Successfully opened {}.txt\n".format(file))
alldata = file_obj.read()
student = alldata.split("\n")
unusable = 0
answerkey = "B,A,D,D,C,B,D,A,C,C,D,B,A,B,A,C,B,D,A,C,A,A,B,D,D"
answer = answerkey.split(",")
grades = []
student_id = []
for i in range(len(student)):
response = student[i].split(",")
if len(response) != 26:
unusable += 1
else:
#Grading
student_id.append(response[0])
del response[0]
points = 0
for qn in range(len(response)):
if response[qn] == answer[qn]:
points += 4
elif response[qn] == "":
points += 0
else:
points -= 1
grades.append(points)
#Calculate average
total = 0
for grd in grades:
total += grd
avg = total/len(grades)
#Calculate median
ordered_grades = grades
grades = sorted(ordered_grades)
if len(grades) % 2 == 0:
median = (grades[int(len(grades)/2-1)] + grades[int(len(grades)/2)]) / 2
else:
median = grades[int((len(grades))/2)]
#Calculate mode
count = []
unique = []
mode = []
for x in grades:
if x not in unique:
unique.append(x)
count.append(0)
for y in grades:
if x == y:
loc = unique.index(x)
count[loc] += 1
for unq, cnt in zip(unique, count):
if cnt == max(count):
mode.append(unq)
#Display statistics
print("Grade Summary:")
print("Total students: {}".format(len(student_id)))
print("Unusable lines in the file: {}".format(unusable))
print("Highest score: {}".format(max(grades)))
print("Lowest score: {}".format(min(grades)))
print("Mean score: {:.2f}".format(avg))
print("Median score: {}".format(int(median)))
print("Mode: {}".format(str(mode)[1:-1].replace(",", "")))
print("Range: {}".format(max(grades)-min(grades)))
#Write grades to file
file_object = open(file+"_grades.txt", "w")
for student, grd in zip(student_id, ordered_grades):
file_object.write("{},{:.2f}\n".format(student, float(grd)))
file_object.close()
#Curve grades
curve = str.lower(input("Would you like to curve the exam? 'y' or 'n': "))
if curve == "y":
while True:
desired_mean = float(input("Enter a desired mean (i.e. 75.0 to raise the mean score to 75.0): "))
if desired_mean <= avg:
print("Invalid curve, try again.")
continue
else:
curve = desired_mean - avg
curved_grades = list(ordered_grades)
for i in range(len(curved_grades)):
curved_grades[i] += curve
file_object = open(file+"_grades.txt", "w")
for student, grd, cgrd in zip(student_id, ordered_grades, curved_grades):
file_object.write("{},{},{}\n".format(student, int(grd), int(cgrd)+1))
file_object.close()
print("Done! Check your grade file!")
break
except:
print("File cannot be found.")
| {
"repo_name": "sojournexx/python",
"path": "Assignments/Assignment9/TanAndrew_assign9.py",
"copies": "1",
"size": "3574",
"license": "mit",
"hash": -170414812804154940,
"line_mean": 33.0392156863,
"line_max": 109,
"alpha_frac": 0.5047565753,
"autogenerated": false,
"ratio": 3.7463312368972748,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9695325969947011,
"avg_score": 0.011152368450052887,
"num_lines": 102
} |
#Andrew Tan, 5/3, Section 010, Part 3
import random
#Create dictionary from external file
file_obj = open("python_asg10_Roget_Thesaurus.txt", "r")
alldata = file_obj.read()
splitdata = alldata.split("\n")
thesaurus = {}
for i in range(len(splitdata)):
value = splitdata[i].split(",")
key = value[0]
del value[0]
thesaurus[key] = value
count = len(thesaurus.keys())
print("Total words in thesaurus: {}".format(count))
#Ask user to input probability of word replacement
chance = float(input("Enter a % chance to change a word: "))
chance = chance * 100
#Retrieve input from external file
file_obj2 = open("bieber_baby.txt", "r")
phrase = file_obj2.read()
#Replace words found in dictionary
phrase = phrase.replace(",", "")
phrase = phrase.replace(".", "")
phrase = phrase.replace("!", "")
split_phrase = phrase.split(" ")
for word in split_phrase:
if random.randint(1, 100) <= chance:
if word in thesaurus:
index = split_phrase.index(word)
split_phrase[index] = str.upper(thesaurus[word][random.randint(0, len(thesaurus[word])-1)])
phrase = " ".join(split_phrase)
#Display output
print(phrase)
| {
"repo_name": "sojournexx/python",
"path": "Assignments/Assignment10/TanAndrew_assign10_part3.py",
"copies": "1",
"size": "1201",
"license": "mit",
"hash": 3819503848248699000,
"line_mean": 24.6888888889,
"line_max": 103,
"alpha_frac": 0.6444629475,
"autogenerated": false,
"ratio": 3.0405063291139243,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9163367530502374,
"avg_score": 0.004320349222310006,
"num_lines": 45
} |
# Andrew Wilson
# Translated from Joe Groff's "An intro to modern OpenGL"
# http://duriansoftware.com/joe/An-intro-to-modern-OpenGL.-Chapter-1:-The-Graphics-Pipeline.html
from OpenGL.GL import *
from OpenGL.GLU import gluBuild2DMipmaps
import pygame, pygame.image, pygame.key
from pygame.locals import *
import numpy
import math
import sys
def make_buffer(target, buffer_data, size):
buffer = glGenBuffers(1)
glBindBuffer(target, buffer)
glBufferData(target, size, buffer_data, GL_STATIC_DRAW)
return buffer
def float_array(*args):
return numpy.array(args, dtype=GLfloat)
def short_array(*args):
return numpy.array(args, dtype=GLshort)
def float_array_buffer(*args):
array = float_array(*args)
return make_buffer(
GL_ARRAY_BUFFER,
array,
array.nbytes)
def short_element_buffer(*args):
array = short_array(*args)
return make_buffer(
GL_ELEMENT_ARRAY_BUFFER,
array,
array.nbytes)
def translation_matrix(x,y,z):
return numpy.matrix(
[[1,0,0,0],
[0,1,0,0],
[0,0,1,0],
[x,y,z,1]],
dtype=GLfloat)
def make_texture(filename, mipmaps=False):
image = pygame.image.load(filename)
pixels = pygame.image.tostring(image, "RGBA", True)
texture=glGenTextures(1)
glBindTexture(GL_TEXTURE_2D, texture)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_LINEAR_MIPMAP_LINEAR if mipmaps else GL_LINEAR)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE)
if mipmaps:
gluBuild2DMipmaps(
GL_TEXTURE_2D,
GL_RGBA8,
image.get_width(),
image.get_height(),
GL_RGBA, GL_UNSIGNED_BYTE,
pixels)
else:
glTexImage2D(
GL_TEXTURE_2D, 0,
GL_RGBA8,
image.get_width(), image.get_height(), 0,
GL_RGBA, GL_UNSIGNED_BYTE,
pixels)
return texture
def make_shader(type, source):
shader = glCreateShader(type)
glShaderSource(shader, source)
glCompileShader(shader)
retval = ctypes.c_uint()
glGetShaderiv(shader, GL_COMPILE_STATUS, retval)
if not retval:
print >> sys.stderr, "Failed to compile shader."
show_info_log(shader, glGetShaderiv, glGetShaderInfoLog)
glDeleteShader(shader)
raise Exception("Failed to compile shader.")
return shader
def show_info_log(object, getiv, getinfolog):
log_length = ctypes.c_int()
getiv(object, GL_INFO_LOG_LENGTH, log_length)
log = ctypes.create_string_buffer(log_length.value)
#getinfolog(object, log_length, None, log)
log = getinfolog(object)
print >> sys.stderr, log
def make_program(vertex_shader, fragment_shader):
program = glCreateProgram()
glAttachShader(program, vertex_shader)
glAttachShader(program, fragment_shader)
glLinkProgram(program)
retval = ctypes.c_int()
glGetProgramiv(program, GL_LINK_STATUS, retval)
if not retval:
print >> sys.stderr, "Failed to link shader program."
show_info_log(program, glGetProgramiv, glGetProgramInfoLog)
glDeleteProgram(program)
raise Exception("Failed to link shader program.")
return program
| {
"repo_name": "weeble/clockworkcodex_ogl",
"path": "ogl_helpers.py",
"copies": "1",
"size": "3395",
"license": "mit",
"hash": 8318609516410703000,
"line_mean": 30.1467889908,
"line_max": 96,
"alpha_frac": 0.6583210604,
"autogenerated": false,
"ratio": 3.318670576735093,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9382807483323283,
"avg_score": 0.018836830762361822,
"num_lines": 109
} |
class AnalysisInitDefaultValue(object):
APP_NAME = 'kltn'
A_PERMISSION = 1
A_CFG = 2
A_STRING = 3
A_PACKAGES = 4
A_PACKAGES_IE = 5
A_PACKAGES_II = 6
A_SEARCH_PACKAGES = 7
A_SEARCH_METHODS = 8
A_OBJECT_CREATED = 9
A_DynCode = 10
A_ReflectionCode = 11
APP_PACKAGE = "";
PlatformBuildVersionCode = ""
Normal_Permissions = [
"android.permission.ACCESS_LOCATION_EXTRA_COMMANDS",
"android.permission.ACCESS_NETWORK_STATE",
"android.permission.ACCESS_NOTIFICATION_POLICY",
"android.permission.ACCESS_WIFI_STATE",
"android.permission.BLUETOOTH",
"android.permission.BLUETOOTH_ADMIN",
"android.permission.BROADCAST_STICKY",
"android.permission.CHANGE_NETWORK_STATE",
"android.permission.CHANGE_WIFI_MULTICAST_STATE",
"android.permission.CHANGE_WIFI_STATE",
"android.permission.DISABLE_KEYGUARD",
"android.permission.EXPAND_STATUS_BAR",
"android.permission.FLASHLIGHT",
"android.permission.GET_PACKAGE_SIZE",
"android.permission.INTERNET",
"android.permission.KILL_BACKGROUND_PROCESSES",
"android.permission.MODIFY_AUDIO_SETTINGS",
"android.permission.NFC",
"android.permission.READ_SYNC_SETTINGS",
"android.permission.READ_SYNC_STATS",
"android.permission.RECEIVE_BOOT_COMPLETED",
"android.permission.REORDER_TASKS",
"android.permission.REQUEST_INSTALL_PACKAGES",
"android.permission.SET_TIME_ZONE",
"android.permission.SET_WALLPAPER",
"android.permission.SET_WALLPAPER_HINTS",
"android.permission.TRANSMIT_IR",
"android.permission.USE_FINGERPRINT",
"android.permission.VIBRATE",
"android.permission.WAKE_LOCK",
"android.permission.WRITE_SYNC_SETTINGS",
"com.android.alarm.permission.SET_ALARM",
"com.android.launcher.permission.INSTALL_SHORTCUT",
"com.android.launcher.permission.UNINSTALL_SHORTCUT"
]
Dangerous_Permissions = [
"android.permission.READ_CALENDAR",
"android.permission.WRITE_CALENDAR",
"android.permission.CAMERA",
"android.permission.READ_CONTACTS",
"android.permission.WRITE_CONTACTS",
"android.permission.GET_ACCOUNTS",
"android.permission.ACCESS_FINE_LOCATION",
"android.permission.ACCESS_COARSE_LOCATION",
"android.permission.RECORD_AUDIO",
"android.permission.READ_PHONE_STATE",
"android.permission.CALL_PHONE",
"android.permission.READ_CALL_LOG",
"android.permission.WRITE_CALL_LOG",
"com.android.voicemail.permission.ADD_VOICEMAIL",
"android.permission.USE_SIP",
"android.permission.PROCESS_OUTGOING_CALLS",
"android.permission.BODY_SENSORS",
"android.permission.SEND_SMS",
"android.permission.RECEIVE_SMS",
"android.permission.READ_SMS",
"android.permission.RECEIVE_WAP_PUSH",
"android.permission.RECEIVE_MMS",
"android.permission.READ_EXTERNAL_STORAGE",
"android.permission.WRITE_EXTERNAL_STORAGE"
]
Sensitive_APIs = ["getCellLocation",
"getNetworkOperator",
"getSubscriberId",
"getSimSerialNumber",
"getVoiceMailNumber",
"getLastKnownLocation",
"getAccounts",
"getPassword",
"peekAuthToken",
"getProfileConnectionState",
"getParams",
"getCertificate",
"getAllVisitedUrls",
"getDeviceId",
"getPhoneType",
"getLine1Number",
"getVoiceMailAlphaTag",
"getAllProviders",
"getGpsStatus",
"editProperties",
"getAuthToken",
"getUserData",
"getName",
"getProfileProxy",
"getUngzippedContent",
"getAllBookmarks",
"sendDataMessage",
"sendTextMessage",
"sendMultipartTextMessage"];
| {
"repo_name": "congthuc/androguard-2.0-custom",
"path": "resourcefactories/AnalysisInitDefaultValue.py",
"copies": "1",
"size": "4417",
"license": "apache-2.0",
"hash": -4989600342780129000,
"line_mean": 39.1545454545,
"line_max": 60,
"alpha_frac": 0.5800316957,
"autogenerated": false,
"ratio": 4.1280373831775705,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5208069078877571,
"avg_score": null,
"num_lines": null
} |
ANDROID_BOOM_ICON = "ANDROID_BOOM_ICON.png"
ANDROID_LOCK_BUTTON = "1412005436007.png"
CLASH_GAME_ICON = "1449765483207.png"
CLASH_RELOAD_BUTTON = "clash_reload_button.png"
CLASH_RELOAD_ANOTHER_DEVICE = "clash_reload_another_device.png"
CLASH_UPGRADE_ICON = "CLASH_UPGRADE_ICON.png"
CLASH_UPGRADE_BUTTON = Pattern("CLASH_UPGRADE_BUTTON.png").similar(0.66)
CLASH_UPGRADE_CONFIRM = "CLASH_UPGRADE_CONFIRM.png"
CLASH_COIN_PATTERN = "CLASH_COIN_PATTERN.png"
CLASH_WOOD_PATTERN = "CLASH_WOOD_PATTERN.png"
CLASH_STONE_PATTERN = "CLASH_STONE_PATTERN.png"
CLASH_IRON_PATTERN = "CLASH_IRON_PATTERN.png"
CLASH_UPGRADE_CANCEL = "CLASH_UPGRADE_CANCEL.png"
CLASH_GRASS = "CLASH_GRASS.png"
GENY_BACK_BUTTON = Pattern("1412065594056.png").similar(0.67)
GENY_PLAY_BUTTON = "1412005237101.png"
GENY_CONTINUE_BUTTON = "geny_continue_button.png"
App.focus("player")
all_upgrades_available = findAll(CLASH_UPGRADE_ICON)
for update in all_upgrades_available:
update.highlight(3)
update.click()
sleep(1)
try:
wait(CLASH_UPGRADE_BUTTON, 10)
click()
wait(CLASH_UPGRADE_CANCEL, 10)
click()
wait(CLASH_GRASS, 10)
click()
except:
print "likely a boat" | {
"repo_name": "mackuntu/CoCScript",
"path": "boom.sikuli/boom.py",
"copies": "1",
"size": "1201",
"license": "mit",
"hash": -3765352941671351300,
"line_mean": 34.3529411765,
"line_max": 72,
"alpha_frac": 0.7160699417,
"autogenerated": false,
"ratio": 2.6395604395604395,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3855630381260439,
"avg_score": null,
"num_lines": null
} |
"""Built-in module for archiving a project"""
from __future__ import absolute_import
from __future__ import print_function
import os
import zipfile
import dtf.properties as prop
import dtf.logging as log
from dtf.module import Module
class archive(Module): # pylint: disable=invalid-name
"""Module class for archiving a project"""
@classmethod
def usage(cls):
"""Module usage"""
print('dtf Archiver')
print('')
print('Subcommands:')
print(' create Archive the current project.')
print('')
return 0
def make_zip(self, zip_name):
"""Make a ZIP file"""
zip_f = None
try:
zip_f = zipfile.ZipFile(zip_name, 'w', zipfile.ZIP_DEFLATED)
except RuntimeError:
log.e(self.name, "ZIP_DEFLATE not available!")
return -1
# pylint: disable=unused-variable
for root, dirs, files in os.walk(os.getcwd()):
for file_to_add in files:
zip_f.write(os.path.join(root, file_to_add))
zip_f.close()
return 0
def do_create(self, args):
"""Create the archive"""
zip_name = ""
if len(args) == 0:
zip_name = "%s.zip" % prop.get_prop('Info', 'version-string')
else:
zip_name = args.pop()
log.i(self.name, "Archiving to '%s'..." % zip_name)
rtn = self.make_zip(zip_name)
if rtn != 0:
log.e(self.name, "Unable to archive project!")
return rtn
def execute(self, args):
"""Main module executor"""
self.name = self.__self__
rtn = 0
if len(args) < 1:
return self.usage()
sub_cmd = args.pop(0)
if sub_cmd == 'create':
rtn = self.do_create(args)
else:
print("Sub-command '%s' not found!" % sub_cmd)
rtn = self.usage()
return rtn
| {
"repo_name": "android-dtf/dtf",
"path": "python-dtf/dtf/core/cmds/archive.py",
"copies": "2",
"size": "2591",
"license": "apache-2.0",
"hash": -3578807915310148600,
"line_mean": 23.214953271,
"line_max": 74,
"alpha_frac": 0.5901196449,
"autogenerated": false,
"ratio": 3.8385185185185184,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5428638163418519,
"avg_score": null,
"num_lines": null
} |
"""Built-in module for client on device """
from __future__ import absolute_import
from __future__ import print_function
import os.path
from argparse import ArgumentParser
import dtf.logging as log
import dtf.properties as prop
import dtf.adb as adb
from dtf.module import Module
from dtf.constants import DTF_CLIENT
from dtf.globals import get_generic_global
from dtf.client import (DtfClient, RESP_OK, RESP_NO_READ, RESP_ERROR,
RESP_NO_WRITE, RESP_EXISTS, RESP_NO_EXIST,
ERR_SOCK)
DEFAULT_UPLOAD_PATH = '/data/data/com.dtf.client'
class client(Module): # pylint: disable=invalid-name
"""Module class for dtf client"""
adb = adb.DtfAdb()
client = DtfClient()
@classmethod
def usage(cls):
"""Display module usage"""
print('dtf Client Manager')
print('Subcommands:')
print(' download Download a file using dtfClient.')
print(' execute Execute a command using dtfClient.')
print(' install Install the dtf client on device.')
print(' status Print the install status of the client.')
print(' remove Uninstall the dtf client.')
print(" restart Restart dtfClient's socket service.")
print(' upload Upload file using dtfClient.')
print(' mode Configure connection mode.')
print('')
return 0
def do_install(self):
"""Install the dtf client on device"""
dtf_client_path = os.path.expanduser(
get_generic_global("Client", "apk_file"))
if not os.path.isfile(dtf_client_path):
log.e(self.name, "Unable to find APK file: %s" % dtf_client_path)
return -1
log.i(self.name, "Waiting for device to be connected...")
self.adb.wait_for_device()
log.i(self.name, "Removing old client if it exists...")
self.adb.uninstall(DTF_CLIENT)
log.i(self.name, "Installing dtf client...")
self.adb.install(dtf_client_path)
cmd = "am startservice -a com.dtf.action.name.INITIALIZE"
self.adb.shell_command(cmd)
busybox_path = "/data/data/%s/files/busybox" % DTF_CLIENT
prop.set_prop('Info', 'busybox', busybox_path)
log.i(self.name, "dtf client installed.")
return 0
def do_status(self):
"""Print the install status of the client"""
if self.adb.is_installed(DTF_CLIENT):
print('dtf Client Status: Installed')
print('')
else:
print('dtf Client Status: Not Installed')
print('')
def do_remove(self):
"""Uninstall the dtf client"""
log.i(self.name, "Waiting for device to be connected...")
self.adb.wait_for_device()
log.i(self.name, "Removing dtf client...")
self.adb.uninstall(DTF_CLIENT)
prop.del_prop('Info', 'busybox')
log.i(self.name, "dtf client removed!")
return 0
def do_upload(self, args):
"""Upload file to dtf client directory"""
parser = ArgumentParser(
prog='client upload',
description='Upload file to device with dtfClient.')
parser.add_argument('--path', dest='upload_path',
default=None, help="Specify a upload point.")
parser.add_argument('file_name', type=str,
help='The file to upload.')
args = parser.parse_args(args)
file_name = args.file_name
if args.upload_path is None:
upload_file_name = os.path.basename(file_name)
upload_path = "%s/%s" % (DEFAULT_UPLOAD_PATH, upload_file_name)
else:
upload_path = args.upload_path
if not os.path.isfile(file_name):
log.e(self.name, "File does not exist: %s" % file_name)
return -1
log.i(self.name, "Waiting for device to be connected...")
self.adb.wait_for_device()
log.i(self.name, "Device connected!")
# Is client installed?
if not self.adb.is_installed(DTF_CLIENT):
log.e(self.name, "dtf Client is not installed!")
return -1
resp = self.client.upload_file(file_name, upload_path)
if resp == RESP_OK:
log.i(self.name, "File upload success!")
return 0
# These are all error conditions
if resp == RESP_ERROR:
log.e(self.name, "General error!")
elif resp == RESP_EXISTS:
log.e(self.name, "Remote file exist!")
elif resp == RESP_NO_WRITE:
log.e(self.name, "No write permissions!")
elif resp == ERR_SOCK:
log.e(self.name, "Socket error!")
else:
log.e(self.name, "Unknown response, cannot proceed.")
# Getting here means error.
return -1
def do_download(self, args):
"""Download a file using the dtfClient API"""
parser = ArgumentParser(
prog='client download',
description='Download file from device with dtfClient.')
parser.add_argument('--path', dest='download_path',
default=None, help="Specify local path.")
parser.add_argument('file_name', type=str,
help='The file to download.')
args = parser.parse_args(args)
file_name = args.file_name
if args.download_path is None:
local_path = os.path.basename(file_name)
else:
local_path = args.download_path
if os.path.isfile(local_path):
log.e(self.name, "Local file '%s' already exists!" % local_path)
return -1
log.i(self.name, "Waiting for connected device...")
self.adb.wait_for_device()
log.i(self.name, "Device connected!")
# Is client installed?
if not self.adb.is_installed(DTF_CLIENT):
log.e(self.name, "dtf Client is not installed!")
return -1
resp = self.client.download_file(file_name, local_path)
if resp == RESP_OK:
log.i(self.name, "File download success!")
return 0
# These are all error conditions
if resp == RESP_ERROR:
log.e(self.name, "General error!")
elif resp == RESP_NO_EXIST:
log.e(self.name, "Remote file doesnt exist!")
elif resp == RESP_NO_READ:
log.e(self.name, "No read permissions!")
elif resp == ERR_SOCK:
log.e(self.name, "Socket error!")
else:
log.e(self.name, "Unknown response, cannot proceed.")
# Getting here means an error
return -1
def do_restart(self):
"""Restart the socket service on the dtfClient"""
log.i(self.name, "Waiting for device to be connected...")
self.adb.wait_for_device()
log.i(self.name, "Connected!")
cmd = "am startservice -a com.dtf.action.name.RESTART_SOCKET"
self.adb.shell_command(cmd)
return 0
def do_execute(self, args):
"""Execute a command using the dtfClient"""
if len(args) != 1:
print('Usage:')
print('dtf client execute [command]')
return -1
command_string = args.pop()
log.i(self.name, "Waiting for connected device...")
self.adb.wait_for_device()
log.i(self.name, "Device connected!")
# Is client installed?
if not self.adb.is_installed(DTF_CLIENT):
log.e(self.name, "dtf Client is not installed!")
return -1
response, resp_code = self.client.execute_command(command_string)
if resp_code == RESP_OK:
print(response)
return 0
elif resp_code == ERR_SOCK:
log.e(self.name, "Socket error!")
return -1
else:
log.e(self.name, "Something went wrong with the command (Err: %s)"
% ord(resp_code))
return -1
def do_mode(self, args):
"""Configure the debugging mode to use"""
if len(args) < 1:
current_mode = prop.get_prop('Client', 'mode')
print("Current Mode: %s" % current_mode)
print('')
print('Usage:')
print('dtf client mode [usb|wifi <ip:port>]')
return -1
mode = args.pop(0)
if mode not in [adb.MODE_USB, adb.MODE_WIFI]:
log.e(self.name, "Invalid mode!")
return -2
self.adb.wait_for_device()
# Wifi mode requires IP:Port
if mode == adb.MODE_WIFI:
if len(args) != 1:
log.e(self.name, "Wifi mode requires IP address:port!")
return -3
try:
ip_address, port = args[0].split(":")
except ValueError:
log.e(self.name, "Invalid IP address:port!")
return -4
log.i(self.name, "Setting Wifi mode to %s:%s..."
% (ip_address, port))
# Reconfigure the client
try:
self.client.set_to_wifi(ip_address, port)
except IOError:
log.e(self.name, "Unable to set to wifi mode!")
log.e(self.name, "Please reconnect your USB device.")
return -5
# Set the properties
prop.set_prop('Client', 'mode', adb.MODE_WIFI)
prop.set_prop('Client', 'ip-addr', ip_address)
prop.set_prop('Client', 'port', port)
# USB Takes no arguments
elif mode == adb.MODE_USB:
log.i(self.name, "Setting to USB mode...")
# Reconfigure the client
self.client.set_to_usb()
# Set the properties
prop.set_prop('Client', 'mode', adb.MODE_USB)
return 0
def execute(self, args):
"""Main module executor"""
self.name = self.__self__
rtn = 0
if len(args) < 1:
return self.usage()
sub_cmd = args.pop(0)
if sub_cmd == 'install':
rtn = self.do_install()
elif sub_cmd == 'status':
rtn = self.do_status()
elif sub_cmd == 'remove':
rtn = self.do_remove()
elif sub_cmd == 'upload':
rtn = self.do_upload(args)
elif sub_cmd == 'download':
rtn = self.do_download(args)
elif sub_cmd == 'restart':
rtn = self.do_restart()
elif sub_cmd == 'execute':
rtn = self.do_execute(args)
elif sub_cmd == 'mode':
rtn = self.do_mode(args)
else:
print("Sub-command '%s' not found!" % sub_cmd)
rtn = self.usage()
return rtn
| {
"repo_name": "android-dtf/dtf",
"path": "python-dtf/dtf/core/cmds/client.py",
"copies": "2",
"size": "11429",
"license": "apache-2.0",
"hash": -1896497214437262000,
"line_mean": 28.9188481675,
"line_max": 78,
"alpha_frac": 0.5562166419,
"autogenerated": false,
"ratio": 3.907350427350427,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 382
} |
"""Built-in module for creating dtf project"""
from __future__ import absolute_import
from __future__ import print_function
import os
import os.path
import re
import signal
import time
from dtf.module import Module
from dtf.properties import set_prop
import dtf.adb as adb
import dtf.constants as const
import dtf.core.compat as compat
import dtf.core.utils as utils
import dtf.logging as log
import dtf.packages as pkg
TAG = 'init'
TYPE_DALVIK = 'Dalvik'
TYPE_ART = 'ART'
SEANDROID_UNKNOWN = "Unknown"
SEANDROID_PERMISSIVE = "Permissive"
SEANDROID_ENFORCING = "Enforcing"
SEANDROID_DISABLED = "Disabled"
SEANDROID_OFF = "Off"
def rmfile(file_name):
"""Delete a file (that may or may not exist)"""
try:
os.remove(file_name)
except OSError:
pass
def mkdir(dir_name):
"""Create a directory (that may or may not exist)"""
try:
os.mkdir(dir_name)
except OSError:
pass
def get_set_value(set_data, match_key):
"""Extract value from SET output"""
for val in set_data:
try:
key, value = val.split('=', 1)
except ValueError:
continue
if key == match_key:
return value
return None
class init(Module): # pylint: disable=invalid-name
"""Module class for creating a dtf project"""
adb = None
# pylint: disable=unused-argument
@classmethod
def do_shutdown(cls, signum, frame):
"""Handle a Ctrl+C"""
log.w(TAG, "Exiting dtf initialization!")
rmfile(utils.CONFIG_FILE_NAME)
rmfile(utils.LOG_FILE_NAME)
exit(-4)
def getprop(self, value):
"""Call `getprop`"""
self.adb.shell_command("getprop %s" % value)
return self.adb.get_output()[0]
def determine_cpu_arch(self):
"""Determine the CPU architecture"""
arch = self.getprop('ro.product.cpu.abi')
if arch is None:
log.e(TAG, "Unable to determine processor architecture!")
return None
# We will offically support ARM and Intel. anything else
# is not supported.
if arch == "x86":
arch = "x86"
cpu_bits = "32"
elif arch == "x86_64":
arch = "x86"
cpu_bits = "64"
elif arch.find("armeabi") != -1:
arch = "arm"
cpu_bits = "32"
elif re.search("arm.*64", arch):
arch = "arm"
cpu_bits = "64"
else:
log.e(TAG, "Unsupported CPU profile: %s" % arch)
return None, None
return arch, cpu_bits
def determine_vm_type(self, sdk, cpu_bits):
"""Determine if we are Dalvik/ART"""
# ART was introduced in KitKat, so if we are less, its Dalvik.
if int(sdk) < 20:
log.d(TAG, "Using Dalvik based on SDK")
return TYPE_DALVIK
# Check for single persist.sys.dalvik.vm.lib
lib = self.getprop('persist.sys.dalvik.vm.lib')
lib2 = self.getprop('persist.sys.dalvik.vm.lib.2')
# None set, error
if lib == '' and lib2 == '':
log.e(TAG, "Unable to determine VM type!")
return None
# Both are set.
elif lib != '' and lib2 != '':
if cpu_bits == '64':
arm_dir = '/system/framework/arm64'
else:
arm_dir = '/system/framework/arm'
if self.adb.is_dir(arm_dir):
log.d(TAG, "Using ART based ARM directory.")
return TYPE_ART
else:
log.d(TAG, "Using Dalvik based on ARM directory.")
return TYPE_DALVIK
# One or the other is set.
else:
so_type = max([lib, lib2])
if so_type == 'libart.so':
log.d(TAG, "Using ART based on prop.")
return TYPE_ART
else:
log.d(TAG, "Using Dalvik based on prop.")
return TYPE_DALVIK
def determine_seandroid_state(self):
"""Determine if SEAndroid is used"""
self.adb.shell_command("getenforce")
response = self.adb.get_output()[0].lower()
if response.find("not found") != -1:
return SEANDROID_OFF
elif response == "permissive":
return SEANDROID_PERMISSIVE
elif response == "enforcing":
return SEANDROID_ENFORCING
elif response == "disabled":
return SEANDROID_DISABLED
else:
log.w(TAG, "Unable to determine SEAndroid state!")
return SEANDROID_UNKNOWN
def generate_version_string(self):
"""Generate the version string to use"""
brand = self.getprop('ro.product.brand')
name = self.getprop('ro.product.name')
version = self.getprop('ro.build.id')
version_string = "%s-%s_%s" % (brand, name, version)
print('dtf would like to use the following version string:')
print("\n%s\n" % version_string)
print("The version string is only used to identify this project.\n")
res = compat.raw_input("Would you like to change it? [N/y] ").lower()
if res == 'y':
return compat.raw_input("Please enter a custom version string: ")
else:
return version_string
@classmethod
def make_project_directories(cls):
"""Create all directories associated with a dtf project"""
mkdir(utils.REPORTS_DIRECTORY)
mkdir(utils.DBS_DIRECTORY)
mkdir(utils.LOCAL_MODULES_DIRECTORY)
return 0
def determine_device(self):
"""Determine which device to use"""
devices = self.adb.get_devices()
if len(devices) == 0:
log.e(TAG, "No devices found, exiting.")
return None
elif len(devices) == 1:
init_device = devices[0]
serial = init_device['serial']
res = compat.raw_input("Got serial '%s', is this correct? [Y/n] "
% serial)
if res.lower() == 'n':
log.e(TAG, "Initialization aborted.")
return None
else:
print('Found many devices. Please select from the following list:')
i = 1
for device in devices:
print("#%d. %s (%s)" % (i, device['serial'], device['status']))
i += 1
res = compat.raw_input("\nWhich device #? ")
try:
int_res = int(res)
init_device = devices[int_res - 1]
except (ValueError, IndexError):
log.e(TAG, "Invalid input!")
return None
return init_device
# pylint: disable=too-many-statements
def initialize_device(self, init_device):
"""Perform the actual initialization"""
device_serial = init_device['serial']
log.d(TAG, "Preparing device: %s" % device_serial)
utils.touch(utils.CONFIG_FILE_NAME)
set_prop('Info', 'serial', device_serial)
# Set the client section.
set_prop('Client', 'mode', adb.MODE_USB)
# Since we have a serial now, lets create a new DtfAdb instance
self.adb = adb.DtfAdb()
# Kernel
self.adb.shell_command('cat /proc/version')
kernel = self.adb.get_output()[0]
log.d(TAG, "Kernel version: %s" % kernel)
set_prop('Info', 'kernel', kernel)
# SDK
sdk = self.getprop('ro.build.version.sdk')
log.d(TAG, "Using SDK API %s" % sdk)
set_prop('Info', 'SDK', sdk)
if int(sdk) > const.API_MAX:
log.w(TAG, "API %s isn't supported by dtf (yet), results may vary!"
% sdk)
self.adb.shell_command('set')
set_output = self.adb.get_output()
# $PATH
path = get_set_value(set_output, 'PATH')
if path is None:
log.e(TAG, "Unable to get $PATH variable!")
self.do_shutdown(None, None)
log.d(TAG, "PATH : %s" % path)
set_prop('Info', 'path', path)
# $BOOTCLASSPTH
bootclasspath = get_set_value(set_output, 'BOOTCLASSPATH')
if bootclasspath is None:
log.e(TAG, "Unable to get $BOOTCLASSPATH variable!")
self.do_shutdown(None, None)
log.d(TAG, "BOOTCLASSPATH : %s" % bootclasspath)
set_prop('Info', 'bootclasspath-jars', bootclasspath)
# Version string
version_string = self.generate_version_string()
log.d(TAG, "Using version string: %s" % version_string)
set_prop('Info', 'version-string', version_string)
# Determine architecture and CPU bitness
arch, cpu_bits = self.determine_cpu_arch()
if cpu_bits is None:
self.do_shutdown(None, None)
log.d(TAG, "CPU Architecture: %s" % arch)
set_prop("Info", "cpu-arch", arch)
log.d(TAG, "Using %s-bit CPU" % cpu_bits)
set_prop('Info', 'cpu-bits', cpu_bits)
# Set the VM type (Dalvik|Art)
vm_type = self.determine_vm_type(sdk, cpu_bits)
if vm_type is None:
self.do_shutdown(None, None)
log.d(TAG, "Determined runtime: %s" % vm_type)
set_prop('Info', 'vmtype', vm_type)
# Determine SEAndroid
se_state = self.determine_seandroid_state()
log.d(TAG, "Determine SEAndroid state: %s" % se_state)
set_prop('Info', 'seandroid-state', se_state)
# Setup the directory structure
self.make_project_directories()
# Set directory related properties
set_prop('Local', 'reports-dir', utils.REPORTS_DIRECTORY)
set_prop('Local', 'db-dir', utils.DBS_DIRECTORY)
# Invoke client installation
rtn = pkg.launch_builtin_module('client', ['install'])
if rtn != 0:
log.w(TAG, "Unable to install dtf client. Try manually.")
return 0
def do_init(self):
"""Perform the initialization"""
log.i(TAG, "Project initialization started.")
signal.signal(signal.SIGINT, self.do_shutdown)
if os.path.isfile(utils.CONFIG_FILE_NAME):
log.e(TAG, "Configuration file already exists!")
return -1
compat.raw_input("\nPlease connect test device "
"(press Enter to continue) ")
# This might get in the way.
try:
del os.environ['ANDROID_SERIAL']
except KeyError:
pass
self.adb = adb.DtfAdb(no_serial=True)
log.i(TAG, "Restarting adb...")
self.adb.kill_server()
self.adb.start_server()
log.i(TAG, "Waiting for a device to be connected...")
time.sleep(1)
init_device = self.determine_device()
if init_device is None:
log.e(TAG, "Error determining device.")
return -2
# Is this device offline?
if init_device['status'] != adb.STATUS_DEVICE:
log.e(TAG, "Cannot initialize offline/bootloader device!")
log.e(TAG, "Try either: ")
log.e(TAG, " 1. Run: adb kill-server && dtf init")
log.e(TAG, " 2. Reboot the device.")
return -3
# Initialize device
if self.initialize_device(init_device) != 0:
log.e(TAG, "Error initializing device!")
return -4
log.i(TAG, "Device initialization complete!")
return 0
def execute(self, args): # pylint: disable=unused-argument
"""Main module executor"""
return self.do_init()
| {
"repo_name": "jakev/dtf",
"path": "python-dtf/dtf/core/cmds/init.py",
"copies": "2",
"size": "12225",
"license": "apache-2.0",
"hash": -1764139113007522300,
"line_mean": 27.7647058824,
"line_max": 79,
"alpha_frac": 0.5658895706,
"autogenerated": false,
"ratio": 3.8262910798122065,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5392180650412206,
"avg_score": null,
"num_lines": null
} |
"""Built-in module for getting the status of a project"""
from __future__ import absolute_import
from __future__ import print_function
from dtf.module import Module
import dtf.adb as DtfAdb
class status(Module): # pylint: disable=invalid-name
"""Module class for getting the status of a device"""
adb = DtfAdb.DtfAdb()
def execute(self, args): # pylint: disable=unused-argument
"""Main module executor"""
found = False
serial = DtfAdb.get_mode_serial()
devices = self.adb.get_devices()
for device in devices:
if device['serial'] == serial:
found = True
break
print("Status:", end=" ")
if found:
print('Connected')
else:
print('Not Connected')
print("Serial Number: %s" % serial)
return 0
| {
"repo_name": "android-dtf/dtf",
"path": "python-dtf/dtf/core/cmds/status.py",
"copies": "2",
"size": "1505",
"license": "apache-2.0",
"hash": 5816155541766705000,
"line_mean": 26.8703703704,
"line_max": 74,
"alpha_frac": 0.6584717608,
"autogenerated": false,
"ratio": 4.0675675675675675,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5726039328367568,
"avg_score": null,
"num_lines": null
} |
"""Built-in module for reseting a project"""
from __future__ import absolute_import
from __future__ import print_function
import os
import dtf.core.compat as compat
import dtf.core.utils as utils
import dtf.logging as log
from dtf.module import Module
TAG = 'reset'
class reset(Module): # pylint: disable=invalid-name
"""Module class for reseting a project"""
def execute(self, args): # pylint: disable=unused-argument,no-self-use
"""Main module executor"""
print('Are you sure you want to delete the dtf project in this '
'directory? This cannot be reversed! [y/N]', end=" ")
inp = compat.raw_input()
if inp.lower() == 'y':
os.remove(utils.CONFIG_FILE_NAME)
log.i(TAG, "Reset complete!")
return 0
else:
log.w(TAG, "Reset aborted.")
return -1
| {
"repo_name": "jakev/dtf",
"path": "python-dtf/dtf/core/cmds/reset.py",
"copies": "2",
"size": "1519",
"license": "apache-2.0",
"hash": -1685419935461034800,
"line_mean": 29.38,
"line_max": 75,
"alpha_frac": 0.6787360105,
"autogenerated": false,
"ratio": 3.8651399491094147,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 50
} |
"""dtf Constants"""
from __future__ import absolute_import
import dtf
VERSION = dtf.__version__
DTF_CLIENT = "com.dtf.client"
# API Constants
API_1 = 1
API_2 = 2
API_CUPCAKE = 3
API_DONUT = 4
API_ECLAIR = 5
API_ECLAIR_R1 = 6
API_ECLAIR_R2 = 7
API_FROYO = 8
API_GINGERBREAD = 9
API_GINGERBREAD_R1 = 10
API_HONEYCOMB = 11
API_HONEYCOMB_R1 = 12
API_HONEYCOMB_R2 = 13
API_ICE_CREAM_SANDWICH = 14
API_ICE_CREAM_SANDWICH_R1 = 15
API_JELLY_BEAN = 16
API_JELLY_BEAN_R1 = 17
API_JELLY_BEAN_R2 = 18
API_KITKAT = 19
API_WEAR = 20
API_LOLLIPOP = 21
API_LOLLIPOP_R1 = 22
API_MARSHMALLOW = 23
API_NOUGAT = 24
API_NOUGAT_R1 = 25
# Max Supported
API_MAX = API_NOUGAT_R1
| {
"repo_name": "android-dtf/dtf",
"path": "python-dtf/dtf/constants.py",
"copies": "2",
"size": "1299",
"license": "apache-2.0",
"hash": -7721596747282833000,
"line_mean": 23.5094339623,
"line_max": 74,
"alpha_frac": 0.7305619707,
"autogenerated": false,
"ratio": 2.634888438133874,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9365450408833874,
"avg_score": 0,
"num_lines": 53
} |
"""dtf logging framework"""
from __future__ import absolute_import
from sys import stdout
from time import localtime, strftime
from colored import attr
import dtf.core.utils as utils
import dtf.colors as colors
# Can override just like the shell
LOG_LEVEL_FILE = 4 # By default, log E-V
LOG_LEVEL_STDOUT = 4 # By default, log E-V
# Internals ###########################################################
LOG_FILE = None
# Open file on module import
TOP = utils.get_project_root()
if TOP is not None:
LOG_FILE = open("%s/%s" % (TOP, utils.LOG_FILE_NAME), 'a')
def __get_date():
"""Format current date"""
return strftime("%a %b %d %H:%M:%S %Z %Y", localtime())
def __log(buf, entry):
"""Low level print function"""
buf.write(entry)
# Low level stdout print
def __log_to_stdout(color, date, tag, message):
"""Write entry to stdout"""
entry = "%s[%s] %s - %s %s\n" % (color, date, tag, message, attr(0))
__log(stdout, entry)
# Low level file print
def __log_to_file(date, tag, message):
"""Write entry to stderr"""
if LOG_FILE is None:
return
entry = "[%s] %s - %s\n" % (date, tag, message)
__log(LOG_FILE, entry)
# ######################################################################
# Public Calls #########################################################
def e(tag, message): # pylint: disable=invalid-name
"""Print an error message"""
date = __get_date()
if LOG_LEVEL_STDOUT >= 1:
__log_to_stdout(colors.COLOR_ERR, date, tag+"/E", message)
if LOG_LEVEL_FILE >= 1:
__log_to_file(date, tag+"/E", message)
def w(tag, message): # pylint: disable=invalid-name
"""Print a warning message"""
date = __get_date()
if LOG_LEVEL_STDOUT >= 2:
__log_to_stdout(colors.COLOR_WARN, date, tag+"/W", message)
if LOG_LEVEL_FILE >= 2:
__log_to_file(date, tag+"/W", message)
def i(tag, message): # pylint: disable=invalid-name
"""Print an informational message (non-debug)"""
date = __get_date()
if LOG_LEVEL_STDOUT >= 3:
__log_to_stdout(colors.COLOR_INFO, date, tag+"/I", message)
if LOG_LEVEL_FILE >= 3:
__log_to_file(date, tag+"/I", message)
def v(tag, message): # pylint: disable=invalid-name
"""Print a verbose message (non-debug)"""
date = __get_date()
if LOG_LEVEL_STDOUT >= 4:
__log_to_stdout(colors.COLOR_VERB, date, tag+"/V", message)
if LOG_LEVEL_FILE >= 4:
__log_to_file(date, tag+"/V", message)
def d(tag, message): # pylint: disable=invalid-name
"""Print a debugging message"""
date = __get_date()
if LOG_LEVEL_STDOUT >= 5:
__log_to_stdout(colors.COLOR_DEB, date, tag+"/D", message)
if LOG_LEVEL_FILE >= 5:
__log_to_file(date, tag+"/D", message)
# Multi-line Logging
def e_ml(tag, messages):
"""Print a multi-line error message"""
if not isinstance(messages, list):
raise TypeError
for message in messages:
if message == "":
continue
e(tag, message)
def w_ml(tag, messages):
"""Print a multi-lne warning message"""
if not isinstance(messages, list):
raise TypeError
for message in messages:
if message == "":
continue
w(tag, message)
def i_ml(tag, messages):
"""Print a multi-line informational message"""
if not isinstance(messages, list):
raise TypeError
for message in messages:
if message == "":
continue
i(tag, message)
def v_ml(tag, messages):
"""Print a multi-line verbose message"""
if not isinstance(messages, list):
raise TypeError
for message in messages:
if message == "":
continue
v(tag, message)
def d_ml(tag, messages):
"""Print a multi-line debugging message"""
if not isinstance(messages, list):
raise TypeError
for message in messages:
if message == "":
continue
d(tag, message)
#########################################################################
| {
"repo_name": "jakev/dtf",
"path": "python-dtf/dtf/logging.py",
"copies": "2",
"size": "4744",
"license": "apache-2.0",
"hash": 523865761855982340,
"line_mean": 22.3694581281,
"line_max": 74,
"alpha_frac": 0.5807335582,
"autogenerated": false,
"ratio": 3.70625,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.52869835582,
"avg_score": null,
"num_lines": null
} |
"""dtf Module Template"""
from __future__ import absolute_import
import os
import dtf.logging as log
import dtf.properties as prop
import dtf.core.packagemanager as pm
from dtf.globals import DTF_PACKAGES_DIR
from dtf.exceptions import DtfException
TAG = "dtf-module"
def sub_cmd(name, usage=""):
"""Decorator for routing a sub command"""
def decorator(func):
"""Save name of the sub command + function"""
func.sub_cmd_name = name
func.sub_cmd_route = func.__name__
func.sub_cmd_usage = usage
return func
return decorator
class Module(object):
"""
Base class for creating a python module with dtf. Override the
fields below, and implement an execute(self, args) method.
"""
name = "MyModule"
version = "1.0.0"
license = "N/A"
author = "N/A"
about = "A basic dtf module."
requires = []
min_sdk = 0
launch_dir = ""
__self__ = ''
def run(self, args):
"""
Internal entry point for starting a module. It basically executes
the 'execute' method if it exists.
"""
# Save module name
self.__self__ = type(self).__name__
# Determine if we have an execute() method.
if hasattr(self, 'execute'):
# Do python logging override
try:
log.LOG_LEVEL_STDOUT = int(os.environ['GLOG_LEVEL'])
except KeyError:
pass
except ValueError:
log.w(TAG, "Invalid GLOG_LEVEL value (0-5 is allowed)")
result = getattr(self, 'execute')(args)
else:
log.e(TAG, "Module '%s' does not define a entry point!"
% self.__self__)
result = None
return result
@classmethod
def get_diff_dir(cls):
"""Determine which diffing db to use"""
# First check for a property override.
if prop.test_prop('Local', 'diff-data-dir'):
diff_dir = prop.get_prop('Local', 'diff-data-dir')
if not os.path.isdir(diff_dir):
raise DtfException("Unable to find diffing directory!")
else:
return diff_dir
# Not set
else:
sdk = prop.get_prop("Info", "sdk")
if pm.is_package_installed("aosp-data-%s" % sdk):
diff_dir = ("%s/aosp-data-%s"
% (DTF_PACKAGES_DIR, sdk))
return diff_dir
else:
raise DtfException("AOSP data not installed for this API!")
def cd_launch_dir(self):
"""Change to the launch directory"""
os.chdir(self.launch_dir)
| {
"repo_name": "jakev/dtf",
"path": "python-dtf/dtf/module.py",
"copies": "2",
"size": "3329",
"license": "apache-2.0",
"hash": -3798404996906875400,
"line_mean": 25.0078125,
"line_max": 75,
"alpha_frac": 0.5896665665,
"autogenerated": false,
"ratio": 4.020531400966184,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 128
} |
"""dtf public package querying"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import imp
import inspect
import os
import os.path
import shlex
import sys
import subprocess
import traceback
from contextlib import contextmanager
import dtf.adb as adb
import dtf.core.compat as compat
import dtf.core.packagemanager as pm
import dtf.core.utils as utils
import dtf.logging as log
import dtf.properties as prop
from dtf.exceptions import DtfException
from dtf.globals import (DTF_BINARIES_DIR, DTF_LIBRARIES_DIR,
DTF_MODULES_DIR, DTF_DB)
TAG = "dtf-packages"
USAGE_TAGS = ['-h', '--help', 'help']
# Internal
@contextmanager
def stdout_redirector(stream):
"""Redirect stdout to string object"""
old_stdout = sys.stdout
sys.stdout = stream
try:
yield
finally:
sys.stdout = old_stdout
def __update_path():
"""Update path with dtf libraries"""
# The first time `dtf` is executed, there is no main.db.
# As such, we basically need to assume that if main.db
# doesn't exist, don't do this.
if not os.path.isfile(DTF_DB):
return 0
for lib in pm.get_libraries(name_only=True):
lib_path = "%s/%s" % (DTF_LIBRARIES_DIR, lib)
sys.path.append(lib_path)
return 0
def __launch_python_module(path, cmd, args, chdir=True, skip_checks=False):
"""Launch a python module by path"""
mod_class = None
mod_inst = None
# We should always be in TOP (unless we are `pm`).
# However, we should save off the current directory.
launch_dir = os.getcwd()
if chdir and prop.TOP is not None:
os.chdir(prop.TOP)
# Next, get the path setup.
if __update_path() != 0:
log.e(TAG, "Unable to update library path!")
return -7
# If we got here, we try to load as a python module.
try:
module = imp.load_source(cmd, path)
# pylint:disable=bare-except
except: # NOQA
msg = sys.exc_info()[0]
log.e(TAG, "An Exception occured while calling load_source()")
log.e(TAG, "Exception: %s" % msg)
return -9
if module is None:
log.e(TAG, "Error launching module '%s'." % cmd)
return -5
try:
mod_class = getattr(module, cmd)
mod_inst = mod_class()
mod_inst.launch_dir = launch_dir
except AttributeError:
log.e(TAG, "Unable to find class '%s' in module!" % cmd)
return -6
if not skip_checks and __do_python_prelaunch_checks(mod_inst) != 0:
log.e(TAG, "Module prelaunch checks failed.")
return -8
# Save module name
mod_inst.__self__ = type(mod_inst).__name__
# Do python logging override
try:
log.LOG_LEVEL_STDOUT = int(os.environ['GLOG_LEVEL'])
except KeyError:
pass
except ValueError:
log.w(TAG, "Invalid GLOG_LEVEL value (0-5 is allowed)")
return __do_launch_python_module(mod_inst, args)
def __launch_bash_module(module_path, args):
"""Launch a bash module by path"""
# First, make sure we can even execute the file.
if not utils.is_executable(module_path):
log.e(TAG, "Module is marked executable!")
return -6
cmd = list()
# Build the command string
cmd = [module_path] + args
# Update the environment
new_env = os.environ
# These are used for sourcing
new_env['DTF_LOG'] = "%s/bash/dtf_log.sh" % utils.get_dtf_lib_dir()
new_env['DTF_CORE'] = "%s/bash/dtf_core.sh" % utils.get_dtf_lib_dir()
# We need to be in TOP to get the serial.
# First, store the current dir.
new_env['LAUNCH_DIR'] = os.getcwd()
os.chdir(prop.TOP)
# We want the serial to be already set
serial = adb.get_mode_serial()
new_env['ANDROID_SERIAL'] = serial
# Global exception handling
try:
popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=new_env)
lines_iterator = iter(popen.stdout.readline, b"")
for line in lines_iterator:
sys.stdout.write(line)
return popen.returncode
except Exception: # pylint:disable=broad-except
try:
exc_traceback = sys.exc_info()
finally:
log.e(TAG, "Unhandled Exception in module!")
for line in traceback.format_exception(*exc_traceback)[3:]:
line = line.strip("\n")
if line == "":
continue
print(line)
return -5
except KeyboardInterrupt:
log.e(TAG, "Bash module forcibly killed!")
return -6
def __do_python_prelaunch_checks(mod_inst):
"""Do pre-launch checks"""
# Make sure requirements are met
for requirement in mod_inst.requires:
if utils.which(requirement) is None:
log.e(TAG, "Unable to execute! Unmet dependency: %s"
% requirement)
return -1
# Check for a minimum SDK
try:
sdk = int(prop.get_prop('Info', 'sdk'))
except prop.PropertyError:
log.e(TAG, "Unable to get SDK, is this project corrupt?")
return -1
min_sdk = int(mod_inst.min_sdk)
if min_sdk != 0 and sdk < min_sdk:
log.e(TAG, "This module requires SDK %d or higher!" % min_sdk)
return -1
return 0
# pylint:disable=too-many-branches,too-many-return-statements
def __do_launch_python_module(mod_inst, args):
"""Perform the actual launch"""
rtn = 0
# There are two ways to launch a module.
# The first is by having an execute(..) function.
# The second, is to use the new sub_cmd decorators.
# We check for execute, and call it. if we dont have it,
# we look for @sub_cmds, and fail if we dont have any/cant
# find one.
try:
if hasattr(mod_inst, 'execute'):
return getattr(mod_inst, 'execute')(args)
# no exec. how about a sub_cmd?
else:
sub_cmd_map = __determine_sub_cmd_map(mod_inst)
# No mappings.
if len(sub_cmd_map) == 0:
log.e(TAG, "Module '%s' has no exec or mappings!"
% mod_inst.__self__)
return -14
# Mappings exist. But do args?
if len(args) == 0:
return __auto_generate_usage(mod_inst, sub_cmd_map)
sub_cmd = args.pop(0)
# First, check for usage.
if sub_cmd in USAGE_TAGS:
return __auto_generate_usage(mod_inst, sub_cmd_map)
elif sub_cmd in sub_cmd_map:
entry_method = sub_cmd_map[sub_cmd][0]
launch = getattr(mod_inst, entry_method.sub_cmd_route)
if __route_has_args(launch):
return launch(args)
else:
return launch()
else:
__auto_generate_usage(mod_inst, sub_cmd_map)
log.e(TAG, "Module '%s' has no mapping for '%s'!"
% (mod_inst.__self__, sub_cmd))
return -15
# Global exception handling
except Exception: # pylint:disable=broad-except
try:
exc_traceback = sys.exc_info()
finally:
log.e(TAG, "Unhandled Exception in module!")
for line in traceback.format_exception(*exc_traceback)[3:]:
line = line.strip("\n")
if not line:
continue
print(line)
rtn = -10
except KeyboardInterrupt:
log.e(TAG, "Python module forcibly killed!")
rtn = -11
return rtn
def __determine_sub_cmd_map(mod_inst):
"""Determine the mapping for sub commands"""
arg_map = {}
for _, inst in inspect.getmembers(mod_inst):
if hasattr(inst, 'sub_cmd_name'):
arg_map[inst.sub_cmd_name] = (inst, inst.sub_cmd_usage)
return arg_map
def __route_has_args(method_inst):
"""Check if arguments exist"""
args = inspect.getargspec(method_inst)
return bool(len(args[0]) > 1)
def __auto_generate_usage(mod_inst, arg_map):
"""Generate a usage"""
print("dtf Module %s v%s" % (mod_inst.name, mod_inst.version))
print("")
print("Subcommands:")
for key, args in arg_map.iteritems():
print(" %s%s" % (key.ljust(13), args[1]))
print("")
return 0
# End Internal
# Launching stuff
def launch_builtin_module(cmd, args, chdir=True, skip_checks=False):
"""Launch a dtf built-in python command"""
launch_path = "%s/core/cmds/%s.py" % (utils.get_pydtf_dir(), cmd)
return __launch_python_module(launch_path, cmd, args, chdir=chdir,
skip_checks=skip_checks)
def launch_local_module(root, cmd, args):
"""Launch a local module"""
module_path = "%s/%s/%s" % (root, utils.LOCAL_MODULES_DIRECTORY, cmd)
# If we are dealing with a bash script, just run and exit.
if pm.is_bash_module(module_path):
log.d(TAG, "This is a bash module!")
return __launch_bash_module(module_path, args)
return __launch_python_module(module_path, cmd, args)
def launch_module(cmd, args, redirect=False):
"""Launch a global (non-local module)"""
module_path = "%s/%s" % (DTF_MODULES_DIR, cmd)
# If the caller explicitly asked to save stdout, lets do it.
if redirect:
captured_f = compat.StringIO()
with stdout_redirector(captured_f):
if pm.is_bash_module(module_path):
rtn = __launch_bash_module(module_path, args)
else:
rtn = __launch_python_module(module_path, cmd, args)
out = captured_f.getvalue()
captured_f.close()
return out, rtn
else:
# If we are dealing with a bash script, just run and exit.
if pm.is_bash_module(module_path):
return __launch_bash_module(module_path, args)
return __launch_python_module(module_path, cmd, args)
def launch_binary(binary, args, launcher=None):
"""Launch a binary"""
path_to_binary = "%s/%s" % (DTF_BINARIES_DIR, binary)
if not is_binary_installed(binary):
raise DtfException("Binary %s not found!" % binary)
if args is None:
lex_args = []
else:
lex_args = shlex.split(args)
if launcher is None:
cmd = [path_to_binary] + lex_args
else:
lex_launcher = shlex.split(launcher)
cmd = lex_launcher + [path_to_binary] + lex_args
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=False)
stdout = proc.stdout.read().split("\n")
stderr = proc.stderr.read().split("\n")
rtn = proc.wait()
return stdout, stderr, rtn
# Determining if stuff is installed
def is_binary_installed(name):
"""Determine if binary is installed"""
return pm.is_binary_installed(name)
def is_library_installed(name):
"""Determine if library is installed"""
return pm.is_library_installed(name)
def is_module_installed(name):
"""Determine if module is installed"""
return pm.is_module_installed(name)
def is_package_installed(name):
"""Determine if package is installed"""
return pm.is_package_installed(name)
def find_local_module(root, name):
"""Determine if a local module exists"""
return pm.find_local_module(root, name)
| {
"repo_name": "jakev/dtf",
"path": "python-dtf/dtf/packages.py",
"copies": "2",
"size": "12037",
"license": "apache-2.0",
"hash": -3890898177285191700,
"line_mean": 25.4549450549,
"line_max": 75,
"alpha_frac": 0.6018110825,
"autogenerated": false,
"ratio": 3.6631162507608033,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5264927333260804,
"avg_score": null,
"num_lines": null
} |
""" dtf's package manager """
from __future__ import absolute_import
from __future__ import print_function
import os
import os.path
import tempfile
import zipfile
from argparse import ArgumentParser
import requests
from dtf.module import Module
import dtf.globals
import dtf.logging as log
import dtf.core.compat as compat
import dtf.core.item
import dtf.core.manifestparser as mp
import dtf.core.packagemanager as packagemanager
import dtf.core.utils as utils
TAG = "pm"
DTF_DATA_DIR = dtf.globals.DTF_DATA_DIR
DTF_BINARIES_DIR = dtf.globals.DTF_BINARIES_DIR
DTF_LIBRARIES_DIR = dtf.globals.DTF_LIBRARIES_DIR
DTF_MODULES_DIR = dtf.globals.DTF_MODULES_DIR
DTF_PACKAGES_DIR = dtf.globals.DTF_PACKAGES_DIR
DTF_DB = dtf.globals.DTF_DB
TYPE_BINARY = dtf.core.item.TYPE_BINARY
TYPE_LIBRARY = dtf.core.item.TYPE_LIBRARY
TYPE_MODULE = dtf.core.item.TYPE_MODULE
TYPE_PACKAGE = dtf.core.item.TYPE_PACKAGE
# No log to file.
log.LOG_LEVEL_FILE = 0
LIST_QUIET = 0
LIST_DEFAULT = 1
LIST_VERBOSE = 2
class pm(Module): # pylint: disable=invalid-name,too-many-public-methods
"""Module class for dtf pm"""
@classmethod
def usage(cls):
"""Print Usage"""
print('dtf Package Manager')
print('')
print('Subcommands:')
print(' delete Delete an item from main database.')
print(' export Export entire main database to dtf ZIP.')
print(' install Install a dtf ZIP or single item.')
print(' list List all installed items.')
print(' purge Purge all installed items, reset DB.')
print(' repo Manage content repos.')
print(' upgrade Upgrade repo content.')
print('')
return 0
def do_install(self, args):
"""Attempt to install new content"""
parser = ArgumentParser(
prog='pm install',
description='Install a item or DTF ZIP of items.')
parser.add_argument('--zip', dest='zipfile', default=None,
help='Install a DTF ZIP file containing items.')
parser.add_argument('--single', metavar="ITEM", dest='single_type',
default=None, help='Install a single item.')
parser.add_argument('--name', metavar="val", dest='single_name',
default=None, help="Item name [SINGLE ONLY].")
parser.add_argument('--local_name', metavar="val",
dest='single_local_name', default=None,
help="Item local name [SINGLE ONLY].")
parser.add_argument('--install_name', metavar="val",
dest='single_install_name', default=None,
help="Item install name [SINGLE ONLY].")
parser.add_argument('--version', metavar="val", dest='single_version',
default=None,
help="Item version (#.# format) [SINGLE ONLY].")
parser.add_argument('--author', nargs='+', metavar="val",
dest='single_author', default=None,
help="Item author (email is fine). [SINGLE ONLY].")
parser.add_argument('--about', nargs='+', metavar="val",
dest='single_about', default=None,
help="About string for a module. [SINGLE ONLY].")
parser.add_argument('--auto', dest='single_auto', action='store_const',
const=True, default=False,
help="Automatically parse module [SINGLE ONLY].")
parser.add_argument('--force', dest='force', action='store_const',
const=True, default=False,
help="Force installation of component(s).")
parser.add_argument('--new-only', dest='new_only',
action='store_const', const=True, default=False,
help="Install only if new version.")
parsed_args = parser.parse_args(args)
zip_file_name = parsed_args.zipfile
single_type = parsed_args.single_type
force_mode = parsed_args.force
new_only = parsed_args.new_only
if zip_file_name is not None and single_type is not None:
log.e(TAG, "Cannot install both DTF ZIP and single item. Exiting.")
return -1
if zip_file_name is None and single_type is None:
log.e(TAG, "ZIP mode or single item mode not detected. Exiting.")
return -2
# Install zip.
if zip_file_name is not None:
if zipfile.is_zipfile(zip_file_name):
return packagemanager.install_zip(zip_file_name,
force=force_mode,
new_only=new_only)
else:
log.e(TAG, "'%s' is not a valid ZIP file or does not exist."
% (zip_file_name))
return -3
# Install single.
else:
return self.parse_and_install_single(parsed_args, single_type)
@classmethod
def do_delete(cls, args):
"""Attempt to remove content"""
parser = ArgumentParser(
prog='pm delete',
description='Remove a item from disk and database.')
parser.add_argument('--type', metavar="val", dest='item_type',
default=None, help='The type of the item')
parser.add_argument('--name', metavar="val", dest='item_name',
default=None, help="Item to uninstall.")
parser.add_argument('--force', dest='force', action='store_const',
const=True, default=False,
help="Force deletion of component.")
parsed_args = parser.parse_args(args)
force_mode = parsed_args.force
name = parsed_args.item_name
if name is None:
log.e(TAG, "'--name' is required for delete mode. Exiting.")
return -1
item_type = parsed_args.item_type
if item_type == TYPE_BINARY:
rtn = packagemanager.delete_binary(name, force=force_mode)
elif item_type == TYPE_LIBRARY:
rtn = packagemanager.delete_library(name, force=force_mode)
elif item_type == TYPE_MODULE:
rtn = packagemanager.delete_module(name, force=force_mode)
elif item_type == TYPE_PACKAGE:
rtn = packagemanager.delete_package(name, force=force_mode)
else:
log.e(TAG, "Invalid type passed to delete. Exiting.")
rtn = -2
return rtn
def do_export(self, args):
"""Perform an export"""
rtn = 0
parser = ArgumentParser(prog='pm export',
description='Export installed content.')
parser.add_argument('output_name', type=str,
help='The output file name.')
parsed_args = parser.parse_args(args)
output_name = parsed_args.output_name
if os.path.isfile(output_name):
log.e(TAG, "Output file already exists!")
return -1
# Generate a list of populated items.
export_items = self.generate_export_items()
if len(export_items) == 0:
log.e(TAG, "Nothing to export!")
return -2
export_zip = mp.ExportZip(output_name)
for item in export_items:
export_zip.add_item(item)
export_zip.finalize()
log.i(TAG, "Export completed!")
return rtn
def do_list(self, args):
"""List installed content"""
rtn = 0
parser = ArgumentParser(prog='pm list',
description='List installed components.')
parser.add_argument('-v', dest='verbose', action='store_const',
const=True, default=False,
help="Show additional details about components.")
parser.add_argument('-q', dest='quiet', action='store_const',
const=True, default=False,
help="Show only names of components.")
parser.add_argument('type', type=str, nargs='?',
help='Show only requested type.')
parsed_args = parser.parse_args(args)
d_filter = parsed_args.type
verbose = parsed_args.verbose
quiet = parsed_args.quiet
if verbose and quiet:
log.e(TAG, "Unable to be verbose and quiet!")
return -1
if verbose:
verbosity = LIST_VERBOSE
elif quiet:
verbosity = LIST_QUIET
else:
verbosity = LIST_DEFAULT
if d_filter is not None:
if d_filter == "binaries":
self.print_installed_binaries(verbosity)
elif d_filter == "libraries":
self.print_installed_libraries(verbosity)
elif d_filter == "modules":
self.print_installed_modules(verbosity)
elif d_filter == "packages":
self.print_installed_packages(verbosity)
else:
log.e(TAG, "Unknown filter specified : %s" % d_filter)
rtn = -3
else:
self.print_installed_binaries(verbosity)
self.print_installed_libraries(verbosity)
self.print_installed_modules(verbosity)
self.print_installed_packages(verbosity)
return rtn
@classmethod
def do_purge(cls):
"""Purge dtf DB"""
print('!!!! WARNING !!!!')
print('')
print('This will delete all installed content and reset the database!')
print('Note: This will not delete any project data.')
print('Are you sure you want to do this? [N/y]', end=" ")
res = compat.raw_input()
if res.lower() == "y":
return packagemanager.purge()
else:
return 0
def do_repo(self, args):
"""Manage repos"""
if len(args) < 1:
print('Usage: dtf pm repo ACTION [args]')
print('')
print(' ACTIONs')
print(' add [repo_name] [url]')
print(' remove [repo_name]')
print(' list')
return 0
cmd = args.pop(0)
if cmd == 'add':
return self.do_repo_add(args)
elif cmd == 'remove':
return self.do_repo_remove(args)
elif cmd == 'list':
return self.do_repo_list()
else:
log.e(TAG, "Invalid repo command: %s"
% cmd)
return -1
def do_upgrade(self, args):
"""Do content upgrade"""
parser = ArgumentParser(prog='pm upgrade',
description='Upgrade managed content.')
parser.add_argument('-v', '--dont-verify-ssl', dest='verify',
action='store_const', const=False, default=True,
help="Allow SSL certificate issues.")
parser.add_argument('-a', '--allow-http', dest='allow_http',
action='store_const', const=True, default=False,
help="Allow HTTP downloads.")
parser.add_argument('-f', '--force', dest='force',
action='store_const', const=True, default=False,
help="Force install of component(s).")
parser.add_argument('-p', '--prompt-all', dest='new_only',
action='store_const', const=False, default=True,
help="Prompt install regardless of version.")
parsed_args = parser.parse_args(args)
verify = parsed_args.verify
allow_http = parsed_args.allow_http
force = parsed_args.force
new_only = parsed_args.new_only
for repo_name, url in packagemanager.get_repos():
log.i(TAG, "Requesting content from '%s' (%s).."
% (repo_name, url))
if utils.is_http_url(url) and not allow_http:
log.w(TAG, "Skipping '%s' due to HTTP (use --allow-http)"
% repo_name)
continue
file_f = self.download_temp_file(url, verify=verify)
if file_f is None:
continue
if not zipfile.is_zipfile(file_f.name):
log.w(TAG, "Pulled content is not a valid ZIP file, skipping!")
continue
log.i(TAG, "Starting install...")
packagemanager.install_zip(file_f.name, force=force,
new_only=new_only)
file_f.close()
log.i(TAG, "Upgrade complete.")
return 0
@classmethod
def do_repo_add(cls, args):
"""Add a repo"""
if len(args) != 2:
log.e(TAG, "A repo name and URL is required!")
return -1
repo_name = args.pop(0)
url = args.pop(0)
return packagemanager.add_repo(repo_name, url)
@classmethod
def do_repo_remove(cls, args):
"""remove a repo"""
if len(args) != 1:
log.e(TAG, "Must specify a repo name!")
return -1
repo_name = args.pop()
return packagemanager.remove_repo(repo_name)
@classmethod
def do_repo_list(cls):
"""List out repos"""
print('Configured repos:')
for repo, url in packagemanager.get_repos():
print(" %s (%s)" % (repo, url))
return 0
@classmethod
def format_version(cls, version_string):
"""Format version of item"""
if version_string is None:
return "No Version"
else:
return "v%s" % version_string
@classmethod
def generate_export_items(cls):
"""Create a list of items"""
items = list()
# Get all binaries
for binary in packagemanager.get_binaries():
binary.install_name = binary.name
binary.local_name = "%s/%s" % (DTF_BINARIES_DIR, binary.name)
items.append(binary)
# Get all libraries
for library in packagemanager.get_libraries():
library.install_name = library.name
library.local_name = "%s/%s" % (DTF_LIBRARIES_DIR, library.name)
items.append(library)
# Get all modules
for module in packagemanager.get_modules():
module.install_name = module.name
module.local_name = "%s/%s" % (DTF_MODULES_DIR, module.name)
items.append(module)
# Get all packages
for package in packagemanager.get_packages():
package.install_name = package.name
package.local_name = "%s/%s" % (DTF_PACKAGES_DIR, package.name)
items.append(package)
return items
def print_installed_binaries(self, verbosity):
"""Print installed binaries"""
binary_list = packagemanager.get_binaries()
# If we are trying to be quiet, just print each item.
if verbosity == LIST_QUIET:
for binary in binary_list:
print(binary.name)
return
# Otherwise, iterate over and print more
print('Installed Binaries')
for binary in binary_list:
# Format version
version = self.format_version(binary.version)
print("\t%s (%s)" % (binary.name, version))
if verbosity == LIST_VERBOSE:
print("\t About: %s" % binary.about)
print("\t Author: %s" % binary.author)
return 0
def print_installed_libraries(self, verbosity):
"""Print installed libraries"""
library_list = packagemanager.get_libraries()
# If we are trying to be quiet, just print each item.
if verbosity == LIST_QUIET:
for library in library_list:
print(library.name)
return
# Otherwise, iterate over and print more
print('Installed Libraries')
for library in library_list:
# Format version
version = self.format_version(library.version)
print("\t%s (%s)" % (library.name, version))
if verbosity == LIST_VERBOSE:
print("\t About: %s" % library.about)
print("\t Author: %s" % library.author)
return 0
def print_installed_modules(self, verbosity):
"""Print installed modules"""
module_list = packagemanager.get_modules()
# If we are trying to be quiet, just print each item.
if verbosity == LIST_QUIET:
for module in module_list:
print(module.name)
return
# Otherwise, iterate over and print more
print('Installed Modules')
for module in module_list:
# Format version
version = self.format_version(module.version)
print("\t%s (%s)" % (module.name, version))
if verbosity == LIST_VERBOSE:
print("\t About: %s" % module.about)
print("\t Author: %s" % module.author)
return 0
def print_installed_packages(self, verbosity):
"""Print installed packages"""
package_list = packagemanager.get_packages()
# If we are trying to be quiet, just print each item.
if verbosity == LIST_QUIET:
for package in package_list:
print(package.name)
return
# Otherwise, iterate over and print more
print('Installed Packages')
for package in package_list:
# Format version
version = self.format_version(package.version)
print("\t%s (%s)" % (package.name, version))
if verbosity == LIST_VERBOSE:
print("\t About: %s" % package.about)
print("\t Author: %s" % package.author)
return 0
@classmethod
def auto_parse_module(cls, args):
"""Automatically parse module and return Item"""
item = None
name = args.single_name
install_name = args.single_install_name
local_name = args.single_local_name
if install_name is None:
log.d(TAG, "install_name is null, using name...")
install_name = os.path.basename(name)
if local_name is None:
log.d(TAG, "local_name is null, using name...")
local_name = name
# Does the resource even exist?
if not os.path.isfile(local_name):
log.e(TAG, "Local module resource '%s' does not exist!"
% (local_name))
return None
if packagemanager.is_python_module(local_name, install_name):
log.d(TAG, "Python mode selected")
item = packagemanager.parse_python_module(local_name,
install_name)
if item is None:
log.e(TAG, "Error parsing Python module!")
return None
elif packagemanager.is_bash_module(local_name):
log.d(TAG, "Bash mode selected")
item = packagemanager.parse_bash_module(local_name,
install_name)
if item is None:
log.e(TAG, "Error parsing Bash module!")
return None
else:
log.e(TAG, "Auto parse for Python and Bash failed!")
return None
return item
def parse_single_item(self, args): # pylint: disable=too-many-branches
"""Parse args, return Item"""
item = dtf.core.item.Item()
if args.single_name is None:
log.e(TAG, "No '--name' specified in single item mode. Exiting.")
return None
item.name = args.single_name
if args.single_type not in dtf.core.item.VALID_TYPES:
log.e(TAG, "Invalid type passed to single. Exiting.")
return None
item.type = args.single_type
version = args.single_version
if version is not None:
if dtf.core.item.is_valid_version(version):
item.version = version
else:
log.e(TAG, "Version string is not valid. Exiting.")
return None
else:
log.w(TAG, "No version provided, using v1.0.0")
item.version = "1.0.0"
try:
item.author = " ".join(args.single_author)
except TypeError:
item.author = None
try:
item.about = " ".join(args.single_about)
except TypeError:
item.about = None
install_name = args.single_install_name
local_name = args.single_local_name
if install_name is None:
log.d(TAG, "install_name is null, using name...")
install_name = os.path.basename(args.single_name)
if local_name is None:
log.d(TAG, "local_name is null, using name...")
local_name = args.single_name
item.install_name = install_name
item.local_name = local_name
if self.check_local_exists(item):
return item
else:
return None
def parse_and_install_single(self, args, single_type):
"""Parse and install single item"""
force_mode = args.force
# Check for auto-mode:
if args.single_auto:
# Only modules can be auto-parsed
if single_type == TYPE_MODULE:
log.i(TAG, "Attempting to auto parse...")
item = self.auto_parse_module(args)
if item is None:
log.e(TAG, "Error autoparsing module!")
return -9
else:
log.e(TAG, "Autoparse is only available for modules!")
return -4
# Not auto
else:
item = self.parse_single_item(args)
if item is None:
log.e(TAG, "Error parsing single item!")
return -5
return packagemanager.install_single(item, force=force_mode)
@classmethod
def check_local_exists(cls, item):
"""Check if local item exists and print error"""
if item.type == TYPE_BINARY:
if not os.path.isfile(item.local_name):
log.e(TAG, "Local item '%s' does not exist. Exiting."
% (item.local_name))
return None
elif item.type == TYPE_LIBRARY:
if not os.path.isdir(item.local_name):
log.e(TAG, "Local directory '%s' does not exist. Exiting."
% (item.local_name))
return None
elif item.type == TYPE_MODULE:
if not os.path.isfile(item.local_name):
log.e(TAG, "Local item '%s' does not exist. Exiting."
% (item.local_name))
return None
elif item.type == TYPE_PACKAGE:
if not os.path.isdir(item.local_name):
log.e(TAG, "Local directory '%s' does not exist. Exiting."
% (item.local_name))
return None
return item
@classmethod
def download_temp_file(cls, url, verify=True):
"""Download a file from URL to tempfile"""
try:
req = requests.get(url, verify=verify, stream=True)
except requests.exceptions.RequestException as excpt:
log.e(TAG, "Error downloading repo data!")
print(excpt)
return None
temp_f = tempfile.NamedTemporaryFile()
for chunk in req.iter_content(chunk_size=1024):
if chunk:
temp_f.write(chunk)
# Reset the seek
temp_f.seek(0)
return temp_f
def execute(self, args):
"""Main module executor"""
self.name = self.__self__
rtn = 0
if len(args) < 1:
return self.usage()
sub_cmd = args.pop(0)
if sub_cmd == "install":
rtn = self.do_install(args)
elif sub_cmd == "delete":
rtn = self.do_delete(args)
elif sub_cmd == "export":
rtn = self.do_export(args)
elif sub_cmd == "list":
rtn = self.do_list(args)
elif sub_cmd == "purge":
rtn = self.do_purge()
elif sub_cmd == "repo":
rtn = self.do_repo(args)
elif sub_cmd == "upgrade":
rtn = self.do_upgrade(args)
else:
log.e(TAG, "Sub-command '%s' not found!" % sub_cmd)
rtn = self.usage()
return rtn
| {
"repo_name": "jakev/dtf",
"path": "python-dtf/dtf/core/cmds/pm.py",
"copies": "2",
"size": "25450",
"license": "apache-2.0",
"hash": -3494094761748516400,
"line_mean": 31.0933165195,
"line_max": 79,
"alpha_frac": 0.54,
"autogenerated": false,
"ratio": 4.27515538384008,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 793
} |
""" dtf Utilities """
from __future__ import absolute_import
from hashlib import md5
import errno
import os
import os.path
import shutil
import stat
import dtf.core.compat as compat
CONFIG_FILE_NAME = '.dtfini'
LOG_FILE_NAME = '.dtflog'
REPORTS_DIRECTORY = 'reports'
DBS_DIRECTORY = '.dbs'
LOCAL_MODULES_DIRECTORY = 'local_modules'
TAG = 'dtf-utils'
def __upsearch(file_name, dir_name):
"""Recursively find a file, searching up."""
if os.path.isfile("%s/%s" % (dir_name, file_name)):
return dir_name
else:
new_dir = os.path.abspath(os.path.join(dir_name, os.pardir))
if dir_name == new_dir:
return None
return __upsearch(file_name, new_dir)
def get_project_root():
"""Search for and return the dtf project root."""
return __upsearch(CONFIG_FILE_NAME, os.getcwd())
def get_pydtf_dir():
"""Return the location of the dtf dist-packages directory."""
return os.path.dirname(os.path.split(os.path.abspath(__file__))[0])
def get_dtf_data_dir():
"""Return the location of the dtf data directory."""
return os.path.expanduser('~') + '/.dtf'
def get_dtf_lib_dir():
"""Return the location of the dtf lib dir."""
return "/usr/local/lib/android-dtf"
def md5_local(file_path):
"""MD5 a local file"""
file_f = open(file_path, 'rb')
local_m = md5()
while True:
data = file_f.read(128)
if not data:
break
local_m.update(data)
return local_m.hexdigest()
def is_exe(fpath):
""" Check if file is an executable"""
# stackoverflow.com/questions/377017/test-if-executable-exists-in-python
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
def which(program):
"""Test if program is pathed."""
# stackoverflow.com/questions/377017/test-if-executable-exists-in-python
fpath = os.path.split(program)[0]
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
def is_executable(file_name):
"""Check if a file can be executed"""
return bool(stat.S_IXUSR & os.stat(file_name)[stat.ST_MODE])
def mkdir_recursive(path):
"""Recursively create a directory"""
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
# http://stackoverflow.com/questions/1158076/implement-touch-using-python
def touch(file_name, times=None):
"""Touch a file"""
with open(file_name, 'a'):
os.utime(file_name, times)
def delete_file(file_name):
"""Delete a file (show errors optional)"""
try:
os.remove(file_name)
except OSError:
pass
return 0
def delete_tree(directory_name):
"""Delete a directory recursively"""
try:
shutil.rmtree(directory_name)
except OSError:
pass
return 0
def file_in_zip(zip_f, file_name):
"""Determine if a file exists in a ZIP"""
try:
zip_f.read(file_name)
return True
except KeyError:
return False
def directory_in_zip(zip_f, directory_name):
"""Determine if a directory exists in a ZIP"""
return any(x.startswith("%s/" % directory_name.rstrip("/"))
for x in zip_f.namelist())
def is_valid_url(url_string):
"""Test and return valid URL"""
parsed_url = compat.urlparse.urlparse(url_string)
return bool(parsed_url.scheme)
def is_http_url(url_string):
"""Check scheme of a URL"""
return bool(compat.urlparse.urlparse(url_string).scheme == 'http')
| {
"repo_name": "android-dtf/dtf",
"path": "python-dtf/dtf/core/utils.py",
"copies": "2",
"size": "4466",
"license": "apache-2.0",
"hash": -6970317325029768000,
"line_mean": 20.3684210526,
"line_max": 76,
"alpha_frac": 0.6372592924,
"autogenerated": false,
"ratio": 3.5137686860739574,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5151027978473958,
"avg_score": null,
"num_lines": null
} |
"""Internal Package Manager"""
from __future__ import absolute_import
from __future__ import print_function
import imp
import os
import os.path
import re
import sqlite3
from shutil import copy, rmtree
import dtf.globals
import dtf.logging as log
import dtf.core.compat as compat
import dtf.core.item
import dtf.core.manifestparser as mp
import dtf.core.utils as utils
TAG = "dtf_package_manager"
MANIFEST_NAME = "manifest.xml"
DTF_DATA_DIR = dtf.globals.DTF_DATA_DIR
DTF_BINARIES_DIR = dtf.globals.DTF_BINARIES_DIR
DTF_LIBRARIES_DIR = dtf.globals.DTF_LIBRARIES_DIR
DTF_MODULES_DIR = dtf.globals.DTF_MODULES_DIR
DTF_PACKAGES_DIR = dtf.globals.DTF_PACKAGES_DIR
DTF_DB = dtf.globals.DTF_DB
# Helpers ###################################################
def copy_file(local_name, install_name, install_dir):
"""Copy a file"""
install_path = install_dir + install_name
log.d(TAG, "Copying '%s' to '%s'..." % (local_name, install_path))
copy(local_name, install_path)
os.chmod(install_path, 0o755)
log.d(TAG, "Copy complete!")
return 0
def copy_tree(local_name, install_name, install_dir):
"""Copy a directory recursively"""
install_path = install_dir + install_name + '/'
# We need to remove the first one
rmtree(install_path, ignore_errors=True)
# Make the new directory.
os.makedirs(install_path)
for root, dirs, files in os.walk(local_name):
# Create new directories.
if len(dirs) != 0:
for local_dir in [os.path.join(root, name) for name in dirs]:
new_dir = local_dir.replace(local_name+'/', '', 1)
log.d(TAG, "Making dir %s" % (install_path + new_dir))
os.makedirs(install_path + new_dir)
if len(files) != 0:
for local_file in [os.path.join(root, name) for name in files]:
new_file = local_file.replace(local_name+'/', '', 1)
log.d(TAG, "Copying file '%s' to '%s'"
% (local_file, install_path + new_file))
copy(local_file, install_path + new_file)
return 0
def get_dict_attrib(in_dict, key, default=None):
"""Attempt to retrieve attribute from dictionary"""
try:
return in_dict[key]
except KeyError:
return default
def get_item_attrib(item, attrib):
"""Load attribute of item from DB"""
item_type = item.type
if item_type == dtf.core.item.TYPE_MODULE:
table = "modules"
elif item_type == dtf.core.item.TYPE_LIBRARY:
table = "libraries"
elif item_type == dtf.core.item.TYPE_BINARY:
table = "binaries"
elif item_type == dtf.core.item.TYPE_PACKAGE:
table = "packages"
else:
log.e(TAG, "Unknown type '%s' in getItem Attribute. Returning"
% item_type)
return None
dtf_db = sqlite3.connect(DTF_DB)
cur = dtf_db.cursor()
sql = ("SELECT %s "
"FROM %s "
"WHERE name='%s' "
'LIMIT 1' % (attrib, table, item.name))
cur.execute(sql)
try:
return cur.fetchone()[0]
except TypeError:
return None
# End helpers ###############################################
def is_bash_module(module_path):
"""Check shebang to determine bash"""
with open(module_path, 'r') as file_f:
shebang = file_f.readline().rstrip('\n')
if re.match("^#!/.*sh$", shebang):
return 1
else:
return 0
def is_python_module(module_path, name):
"""Try to load as a Python module"""
try:
imp.load_source(name, module_path)
except (NameError, SyntaxError):
return False
except ImportError:
log.w(TAG, "This is a python module, but has non-existent imports!")
return False
return True
def parse_python_module(module_path, name):
"""Parse as a Python module"""
module = imp.load_source(name, module_path)
if module is None:
log.e(TAG, "Error launching module '%s'." % name)
return None
try:
mod_class = getattr(module, name)
mod_inst = mod_class()
except AttributeError:
log.e(TAG, "Unable to find class '%s' in module!" % name)
return None
item = dtf.core.item.Item()
item.type = dtf.core.item.TYPE_MODULE
item.name = name
item.local_name = module_path
item.install_name = name
item.author = mod_inst.author
item.about = mod_inst.about
version = mod_inst.version
if version is not None:
if dtf.core.item.is_valid_version(version):
item.version = version
else:
log.e(TAG, "Invalid version specified. Exiting.")
return None
else:
item.version = None
# Remove the compiled file name
compiled_python_file = "%sc" % module_path
if os.path.isfile(compiled_python_file):
os.remove(compiled_python_file)
return item
def parse_bash_module(module_path, name):
"""Parse as a bash module"""
attributes = dict()
# Parse file for "#@", strings, store in dictionary.
for line in open(module_path).read().split("\n"):
match = re.match("#@[a-zA-Z ]+:.*", line)
if match is None:
continue
try:
attribute, value = match.group(0).replace("#@", "").split(":")
value = value.lstrip(" ")
except ValueError:
log.w(TAG, "Unable to parse the module version, not including.")
continue
attributes[attribute] = value
item = dtf.core.item.Item()
item.type = dtf.core.item.TYPE_MODULE
item.name = name
item.local_name = module_path
item.install_name = name
item.author = get_dict_attrib(attributes, "Author")
item.about = get_dict_attrib(attributes, "About")
version = get_dict_attrib(attributes, "Version")
if version is not None:
if dtf.core.item.is_valid_version(version):
item.version = version
else:
log.e(TAG, "Invalid version specified. Exiting.")
return None
else:
item.version = None
# Return our item.
return item
# Getting Installed Data
def get_binaries(name_only=False):
"""Return a list of binaries"""
bins = list()
dtf_db = sqlite3.connect(DTF_DB)
cur = dtf_db.cursor()
# This just returns the name
if name_only:
sql = ('SELECT name '
'FROM binaries ')
for binary in cur.execute(sql):
bins.append(binary[0])
# This returns a list of items
else:
sql = ('SELECT name, version, '
'about, author '
'FROM binaries '
'ORDER BY name')
cur.execute(sql)
while True:
item = dtf.core.item.Item()
line = cur.fetchone()
if line is None:
break
item.type = dtf.core.item.TYPE_BINARY
item.name = line[0]
item.version = line[1]
item.about = line[2]
item.author = line[3]
bins.append(item)
return bins
def get_libraries(name_only=False):
"""Return a list of libraries"""
libs = list()
dtf_db = sqlite3.connect(DTF_DB)
cur = dtf_db.cursor()
# This just returns the name
if name_only:
sql = ('SELECT name '
'FROM libraries ')
for lib in cur.execute(sql):
libs.append(lib[0])
# This returns a list of items
else:
sql = ('SELECT name, version, '
'about, author '
'FROM libraries '
'ORDER BY name')
cur.execute(sql)
while True:
item = dtf.core.item.Item()
line = cur.fetchone()
if line is None:
break
item.type = dtf.core.item.TYPE_LIBRARY
item.name = line[0]
item.version = line[1]
item.about = line[2]
item.author = line[3]
libs.append(item)
return libs
def get_modules(name_only=False):
"""Return a list of modules"""
mods = list()
dtf_db = sqlite3.connect(DTF_DB)
cur = dtf_db.cursor()
# This just returns the name
if name_only:
sql = ('SELECT name '
'FROM modules ')
for mod in cur.execute(sql):
mods.append(mod[0])
# This returns a list of items
else:
sql = ('SELECT name, version, '
'about, author '
'FROM modules '
'ORDER BY name')
cur.execute(sql)
while True:
item = dtf.core.item.Item()
line = cur.fetchone()
if line is None:
break
item.type = dtf.core.item.TYPE_MODULE
item.name = line[0]
item.version = line[1]
item.about = line[2]
item.author = line[3]
mods.append(item)
return mods
def get_packages(name_only=False):
"""Return a list of packages"""
packages = list()
dtf_db = sqlite3.connect(DTF_DB)
cur = dtf_db.cursor()
# This just returns the name
if name_only:
sql = ('SELECT name '
'FROM packages ')
for pack in cur.execute(sql):
packages.append(pack[0])
# This returns a list of items
else:
sql = ('SELECT name, version, '
'about, author '
'FROM packages '
'ORDER BY name')
cur.execute(sql)
while True:
item = dtf.core.item.Item()
line = cur.fetchone()
if line is None:
break
item.type = dtf.core.item.TYPE_PACKAGE
item.name = line[0]
item.version = line[1]
item.about = line[2]
item.author = line[3]
packages.append(item)
return packages
def is_binary_installed(name):
"""Determine if a binary is installed"""
return __item_installed(name, dtf.core.item.TYPE_BINARY)
def is_library_installed(name):
"""Determine if a library is installed"""
return __item_installed(name, dtf.core.item.TYPE_LIBRARY)
def is_module_installed(name):
"""Determine if a module is installed"""
return __item_installed(name, dtf.core.item.TYPE_MODULE)
def is_package_installed(name):
"""Determine if a package is installed"""
return __item_installed(name, dtf.core.item.TYPE_PACKAGE)
def find_local_module(root, name):
"""Determine if a local module exists"""
local_module_path = "%s/%s/%s" % (root,
utils.LOCAL_MODULES_DIRECTORY, name)
if os.path.isfile(local_module_path):
return 1
else:
return 0
# Repo management
def add_repo(repo_name, url):
"""Add a repo to the DB"""
# First, validate the URL
if not utils.is_valid_url(url):
log.e(TAG, "Invalid URL provided (missing http/s)")
return -2
# Next, make sure this repo doesnt exist
if __has_repo(repo_name):
log.e(TAG, "Repo name '%s' already exists!" % repo_name)
return -3
return __add_repo(repo_name, url)
def remove_repo(repo_name):
"""Remove a repo"""
if not __has_repo(repo_name):
log.e(TAG, "Repo name '%s' doesn't exist!" % repo_name)
return -3
return __do_repo_delete(repo_name)
def get_repos():
"""Get listing of repos"""
return __do_get_repos()
# INTERNAL ONLY #######################################################
def __prompt_install(local_item, installed_item):
"""Prompt user for item installation"""
print('Installed Item Details:')
print(str(installed_item))
print('')
print('New Item Details:')
print(str(local_item))
print('')
print("Do you want to install this %s? [y/N]"
% (installed_item.type), end=" ")
resp = compat.raw_input()
return bool(resp.lower() == "y")
def __prompt_delete(installed_item):
"""Prompt user to item deletion"""
print('Installed Item Details:')
print(str(installed_item))
print('')
print("Are you sure you want to delete this item (NO UNDO)? [y/N]",
end=" ")
resp = compat.raw_input()
return bool(resp.lower() == "y")
def __load_item(item):
"""Create new fully-loaded Item"""
itm = dtf.core.item.Item()
itm.name = item.name
itm.type = item.type
itm.install_name = get_item_attrib(item, "install_name")
itm.local_name = None
itm.author = get_item_attrib(item, "author")
itm.about = get_item_attrib(item, "about")
itm.version = get_item_attrib(item, "version")
return itm
def __item_installed(name, item_type):
"""Generic test for installed content"""
if item_type == dtf.core.item.TYPE_MODULE:
table = "modules"
elif item_type == dtf.core.item.TYPE_LIBRARY:
table = "libraries"
elif item_type == dtf.core.item.TYPE_BINARY:
table = "binaries"
elif item_type == dtf.core.item.TYPE_PACKAGE:
table = "packages"
else:
raise KeyError
dtf_db = sqlite3.connect(DTF_DB)
cur = dtf_db.cursor()
sql = ("SELECT id "
"FROM %s "
"WHERE name='%s' "
'LIMIT 1' % (table, name))
cur.execute(sql)
return bool(cur.fetchone() is not None)
def __do_single_binary_install(item):
"""Perform single binary installation"""
name = item.name
local_name = item.local_name
install_name = item.install_name
# First copy the new file.
if copy_file(local_name, install_name, DTF_BINARIES_DIR) != 0:
log.e(TAG, "Error copying binary '%s'" % (local_name))
return -1
# Update database
if __update_binary(item) == 0:
log.e(TAG, "Failed to update binary '%s' details in database."
% (name))
return -2
log.i(TAG, "Binary '%s' installed successfully!" % name)
return 0
def __do_single_library_install(item):
"""Perform single library installation"""
name = item.name
local_name = item.local_name
install_name = item.install_name
# First copy the new tree.
if copy_tree(local_name, install_name, DTF_LIBRARIES_DIR) != 0:
log.e(TAG, "Error copying library '%s'" % (local_name))
return -1
# Update database
if __update_library(item) == 0:
log.e(TAG, "Failed to update library '%s' details in database."
% (name))
return -2
log.i(TAG, "Library '%s' installed successfully!" % name)
return 0
def __do_single_module_install(item):
"""Perform single module installation"""
name = item.name
local_name = item.local_name
install_name = item.install_name
# First copy the new file.
if copy_file(local_name, install_name, DTF_MODULES_DIR) != 0:
log.e(TAG, "Error copying module '%s'" % (local_name))
return -1
# Update database
if __update_module(item) == 0:
log.e(TAG, "Failed to update module '%s' details in database."
% (name))
return -2
log.i(TAG, "Module '%s' installed successfully!" % name)
return 0
def __do_single_package_install(item):
"""Perform single package installation"""
name = item.name
local_name = item.local_name
install_name = item.install_name
# First copy the new tree.
if copy_tree(local_name, install_name, DTF_PACKAGES_DIR) != 0:
log.e(TAG, "Error copying package '%s'" % (local_name))
return -1
# Update database
if __update_package(item) == 0:
log.e(TAG, "Failed to update package '%s' details in database."
% (name))
return -2
log.i(TAG, "Package '%s' installed successfully!" % name)
return 0
def __do_zip_binary_install(export_zip, item):
"""Perform the ZIP binary installation"""
# First copy the new file.
if export_zip.install_item_to(item, DTF_BINARIES_DIR) != 0:
log.e(TAG, "Error copying binary '%s'" % item.local_name)
return -1
# Update database
if __update_binary(item) == 0:
log.e(TAG, "Failed to update binary '%s' details in database."
% (item.name))
return -2
return 0
def __do_zip_library_install(export_zip, item):
"""Perform the ZIP library installation"""
# First copy the new file.
if export_zip.install_item_to(item, DTF_LIBRARIES_DIR) != 0:
log.e(TAG, "Error copying library '%s'" % item.local_name)
return -1
# Update database
if __update_library(item) == 0:
log.e(TAG, "Failed to update library '%s' details in database."
% (item.name))
return -2
return 0
def __do_zip_module_install(export_zip, item):
"""Perform the ZIP module installation"""
# First copy the new file.
if export_zip.install_item_to(item, DTF_MODULES_DIR) != 0:
log.e(TAG, "Error copying module '%s'" % item.local_name)
return -1
# Update database
if __update_module(item) == 0:
log.e(TAG, "Failed to update module '%s' details in database."
% (item.name))
return -2
return 0
def __do_zip_package_install(export_zip, item):
"""Perform the ZIP package installation"""
# First copy the new file.
if export_zip.install_item_to(item, DTF_PACKAGES_DIR) != 0:
log.e(TAG, "Error copying package '%s'" % item.local_name)
return -1
# Update database
if __update_package(item) == 0:
log.e(TAG, "Failed to update package '%s' details in database."
% (item.name))
return -2
return 0
def __update_binary(item):
"""Update a binary in the DB"""
conn = sqlite3.connect(DTF_DB)
cur = conn.cursor()
# Remove the line first.
sql = ('DELETE FROM binaries '
"WHERE name='%s'" % item.name)
cur.execute(sql)
entry = [(item.name, item.version, item.author,
item.install_name)]
# Update a Binary Entry
sql = ('INSERT INTO binaries (name, version, '
'author, install_name)'
'VALUES (?, ?, ?, ?)')
cur.executemany(sql, entry)
conn.commit()
return cur.rowcount
def __update_library(item):
"""Update a library in the DB"""
conn = sqlite3.connect(DTF_DB)
cur = conn.cursor()
# Remove the line first.
sql = ('DELETE FROM libraries '
"WHERE name='%s'" % item.name)
cur.execute(sql)
entry = [(item.name, item.version, item.author,
item.install_name)]
# Update a Library Entry
sql = ('INSERT INTO libraries (name, version, '
'author, install_name)'
'VALUES (?, ?, ?, ?)')
cur.executemany(sql, entry)
conn.commit()
return cur.rowcount
def __update_module(item):
"""Update module in the DB"""
conn = sqlite3.connect(DTF_DB)
cur = conn.cursor()
# Remove the line first.
sql = ('DELETE FROM modules '
"WHERE name='%s'" % item.name)
cur.execute(sql)
entry = [(item.name, item.about, item.version,
item.author, item.install_name)]
# Update a Module Entry
sql = ('INSERT INTO modules (name, about, version, '
'author, install_name)'
'VALUES (?, ?, ?, ?, ?)')
cur.executemany(sql, entry)
conn.commit()
return cur.rowcount
def __update_package(item):
"""Update package in the DB"""
conn = sqlite3.connect(DTF_DB)
cur = conn.cursor()
# Remove the line first.
sql = ('DELETE FROM packages '
"WHERE name='%s'" % item.name)
cur.execute(sql)
entry = [(item.name, item.version, item.author,
item.install_name)]
# Update a Package Entry
sql = ('INSERT INTO packages (name, version, '
'author, install_name)'
'VALUES (?, ?, ?, ?)')
cur.executemany(sql, entry)
conn.commit()
return cur.rowcount
def __add_repo(repo_name, url):
"""Add a repo to DB"""
conn = sqlite3.connect(DTF_DB)
cur = conn.cursor()
entry = [(repo_name, url)]
sql = ('INSERT INTO repos (repo_name, url)'
'VALUES (?, ?)')
cur.executemany(sql, entry)
conn.commit()
return 0
def __has_repo(repo_name):
"""Check if a repo exists"""
dtf_db = sqlite3.connect(DTF_DB)
cur = dtf_db.cursor()
sql = ('SELECT id '
'FROM repos '
"WHERE repo_name='%s' "
'LIMIT 1' % repo_name)
cur.execute(sql)
return bool(cur.fetchone() is not None)
def __do_get_repos():
"""return list(repo, url)"""
repo_list = list()
dtf_db = sqlite3.connect(DTF_DB)
cur = dtf_db.cursor()
sql = ('SELECT repo_name, url '
'FROM repos')
for repo in cur.execute(sql):
repo_list.append((repo[0], repo[1]))
return repo_list
def __do_binary_delete(item):
"""Perform the binary removal"""
file_path = DTF_BINARIES_DIR + item.install_name
if utils.delete_file(file_path) != 0:
log.e(TAG, "Error removing binary file! Continuing.")
conn = sqlite3.connect(DTF_DB)
cur = conn.cursor()
# Remove the line first.
sql = ('DELETE FROM binaries '
"WHERE name='%s'" % item.name)
cur.execute(sql)
conn.commit()
return 0
def __do_library_delete(item):
"""Perform the library removal"""
file_path = DTF_LIBRARIES_DIR + item.install_name
if utils.delete_tree(file_path) != 0:
log.e(TAG, "Error removing tree! Continuing.")
conn = sqlite3.connect(DTF_DB)
cur = conn.cursor()
# Remove the line first.
sql = ('DELETE FROM libraries '
"WHERE name='%s'" % item.name)
cur.execute(sql)
conn.commit()
return 0
def __do_module_delete(item):
"""Perform the module removal"""
file_path = DTF_MODULES_DIR + item.install_name
if utils.delete_file(file_path) != 0:
log.e(TAG, "Error removing module file! Continuing.")
conn = sqlite3.connect(DTF_DB)
cur = conn.cursor()
# Remove the line first.
sql = ('DELETE FROM modules '
"WHERE name='%s'" % item.name)
cur.execute(sql)
conn.commit()
return 0
def __do_package_delete(item):
"""Perform the package removal"""
file_path = DTF_PACKAGES_DIR + item.install_name
if utils.delete_tree(file_path) != 0:
log.e(TAG, "Error removing tree! Continuing.")
conn = sqlite3.connect(DTF_DB)
cur = conn.cursor()
# Remove the line first.
sql = ('DELETE FROM packages '
"WHERE name='%s'" % item.name)
cur.execute(sql)
conn.commit()
return 0
def __do_repo_delete(repo_name):
"""Remove repo"""
conn = sqlite3.connect(DTF_DB)
cur = conn.cursor()
sql = ('DELETE FROM repos '
"WHERE repo_name='%s'" % repo_name)
cur.execute(sql)
conn.commit()
return 0
# End INTERNAL #####################################################
# Database Initialization ##########################################
def initialize_db():
"""Initialize dtf main.db"""
dtf_db = sqlite3.connect(DTF_DB)
# Create Binaries Table
sql = ('CREATE TABLE IF NOT EXISTS binaries'
'('
'id INTEGER PRIMARY KEY AUTOINCREMENT, '
'name TEXT UNIQUE NOT NULL, '
'about TEXT, '
'version TEXT, '
'author TEXT, '
'install_name TEXT'
')')
rtn = dtf_db.execute(sql)
if not rtn:
log.e(TAG, "Error creating binaries table, exiting")
return rtn
# Create Libraries Table
sql = ('CREATE TABLE IF NOT EXISTS libraries'
'('
'id INTEGER PRIMARY KEY AUTOINCREMENT, '
'name TEXT UNIQUE NOT NULL, '
'about TEXT, '
'version TEXT, '
'author TEXT, '
'install_name TEXT'
')')
rtn = dtf_db.execute(sql)
if not rtn:
log.e(TAG, "Error creating libraries table, exiting")
return rtn
# Create Modules Table
sql = ('CREATE TABLE IF NOT EXISTS modules'
'('
'id INTEGER PRIMARY KEY AUTOINCREMENT, '
'name TEXT UNIQUE NOT NULL, '
'about TEXT, '
'version TEXT, '
'author TEXT, '
'install_name TEXT'
')')
rtn = dtf_db.execute(sql)
if not rtn:
log.e(TAG, "Error creating modules table, exiting")
return rtn
# Create Packages Table
sql = ('CREATE TABLE IF NOT EXISTS packages'
'('
'id INTEGER PRIMARY KEY AUTOINCREMENT, '
'name TEXT UNIQUE NOT NULL, '
'about TEXT, '
'version TEXT, '
'author TEXT, '
'install_name TEXT'
')')
rtn = dtf_db.execute(sql)
if not rtn:
log.e(TAG, "Error creating packages table, exiting")
return rtn
# Create Repo Table
sql = ('CREATE TABLE IF NOT EXISTS repos'
'('
'id INTEGER PRIMARY KEY AUTOINCREMENT, '
'repo_name TEXT UNIQUE NOT NULL, '
'url TEXT'
')')
rtn = dtf_db.execute(sql)
if not rtn:
log.e(TAG, "Error creating repos table, exiting")
return rtn
dtf_db.commit()
dtf_db.close()
return 0
def create_data_dirs():
"""Create the .dtf/ directory structure"""
# First create the main dir.
if not os.path.isdir(DTF_DATA_DIR):
try:
os.mkdir(DTF_DATA_DIR)
except OSError:
log.e(TAG, "Unable to create dtf data directory!")
return -6
# Now the subdirectories. Be less strict about errors for these.
try:
os.mkdir(DTF_MODULES_DIR)
os.mkdir(DTF_PACKAGES_DIR)
os.mkdir(DTF_BINARIES_DIR)
os.mkdir(DTF_LIBRARIES_DIR)
except OSError:
pass
return 0
# End Initialization ###############################################
# Internal Package Installation ####################################
# pylint: disable=too-many-arguments,too-many-return-statements
def __generic_install(item, force_mode, new_only, check_function,
install_function, install_args):
"""Generic check and caller"""
try:
# First check if we know about this item
if check_function(item.name):
log.d(TAG, "Item exists, need to check")
# If forced, don't even check.
if force_mode:
log.i(TAG, "Forcing component installation: %s (%s)"
% (item.name, item.type))
return install_function(*install_args)
installed_item = __load_item(item)
# Ok, next, we check the versions.
if dtf.core.item.item_is_newer(installed_item, item):
log.i(TAG, "Upgrading %s from v%s to v%s"
% (item.name, installed_item.version, item.version))
return install_function(*install_args)
elif new_only:
log.w(TAG, "Skipping due to older version: %s" % item.name)
return 0
# Otherwise we need to prompt
else:
print("[WARNING] An item with this name is already installed."
" See details below.")
if __prompt_install(item, installed_item):
log.d(TAG, "User would like to install")
return install_function(*install_args)
else:
log.w(TAG, "Installation skipped.")
return 0
else:
log.i(TAG, "Installing new item: %s (%s)"
% (item.name, item.type))
return install_function(*install_args)
except KeyError:
log.w(TAG, "Error checking if the item was installed. Skipping")
return -4
# Install Content ######################################################
def install_zip(zip_file_name, force=False, new_only=False):
"""Install a ZIP file"""
rtn = 0
export_zip = mp.ExportZip(zip_file_name)
for item in export_zip.iter_items():
if not export_zip.assert_item(item):
log.w(TAG, "'%s' defined, but local file '%s' does not exist!"
% (item.name, item.local_name))
continue
item_type = item.type
if item_type == dtf.core.item.TYPE_BINARY:
rtn |= __generic_install(item, force, new_only,
is_binary_installed,
__do_zip_binary_install,
(export_zip, item))
elif item_type == dtf.core.item.TYPE_LIBRARY:
rtn |= __generic_install(item, force, new_only,
is_library_installed,
__do_zip_library_install,
(export_zip, item))
elif item_type == dtf.core.item.TYPE_MODULE:
rtn |= __generic_install(item, force, new_only,
is_module_installed,
__do_zip_module_install,
(export_zip, item))
elif item_type == dtf.core.item.TYPE_PACKAGE:
rtn |= __generic_install(item, force, new_only,
is_package_installed,
__do_zip_package_install,
(export_zip, item))
return rtn
def install_single(item, force=False):
"""Install a single item"""
item_type = item.type
if item_type == dtf.core.item.TYPE_BINARY:
return __generic_install(item, force, False, is_binary_installed,
__do_single_binary_install, (item,))
elif item_type == dtf.core.item.TYPE_LIBRARY:
return __generic_install(item, force, False, is_library_installed,
__do_single_library_install, (item,))
elif item_type == dtf.core.item.TYPE_MODULE:
return __generic_install(item, force, False, is_module_installed,
__do_single_module_install, (item,))
elif item_type == dtf.core.item.TYPE_PACKAGE:
return __generic_install(item, force, False, is_package_installed,
__do_single_package_install, (item,))
# End Package Installation ##############################################
# Removing Content ######################################################
def delete_binary(name, force=False):
"""Remove a binary"""
rtn = 0
# First check if we know about the binary
if is_binary_installed(name):
item = dtf.core.item.Item()
item.name = name
item.type = dtf.core.item.TYPE_BINARY
installed_item = __load_item(item)
# Prompt for removal
if not force:
if __prompt_delete(installed_item):
log.d(TAG, "User would like to remove")
rtn = __do_binary_delete(installed_item)
else:
log.i(TAG, "Binary deletion skipped.")
# Force
else:
log.d(TAG, "Forcing component removal.")
rtn = __do_binary_delete(installed_item)
else:
log.e(TAG, "No binary installed with this name.")
rtn = -1
return rtn
def delete_library(name, force=False):
"""Remove a library"""
rtn = 0
# First check if we know about the library
if is_library_installed(name):
item = dtf.core.item.Item()
item.name = name
item.type = dtf.core.item.TYPE_LIBRARY
installed_item = __load_item(item)
# Prompt for removal
if not force:
if __prompt_delete(installed_item):
log.d(TAG, "User would like to remove")
rtn = __do_library_delete(installed_item)
else:
log.i(TAG, "Library deletion skipped.")
# Force
else:
log.d(TAG, "Forcing component removal.")
rtn = __do_library_delete(installed_item)
else:
log.e(TAG, "No library installed with this name.")
rtn = -1
return rtn
def delete_module(name, force=False):
"""Remove a module"""
rtn = 0
# First check if we know about the module
if is_module_installed(name):
item = dtf.core.item.Item()
item.name = name
item.type = dtf.core.item.TYPE_MODULE
installed_item = __load_item(item)
# Prompt for removal
if not force:
if __prompt_delete(installed_item):
log.d(TAG, "User would like to remove")
rtn = __do_module_delete(installed_item)
else:
log.i(TAG, "Module deletion skipped.")
# Force
else:
log.d(TAG, "Forcing component removal.")
rtn = __do_module_delete(installed_item)
else:
log.e(TAG, "No module installed with this name.")
rtn = -1
return rtn
def delete_package(name, force=False):
"""Remove a package"""
rtn = 0
# First check if we know about the package
if is_package_installed(name):
item = dtf.core.item.Item()
item.name = name
item.type = dtf.core.item.TYPE_PACKAGE
installed_item = __load_item(item)
# Prompt for removal
if not force:
if __prompt_delete(installed_item):
log.d(TAG, "User would like to remove")
rtn = __do_package_delete(installed_item)
else:
log.i(TAG, "Package deletion skipped.")
# Force
else:
log.d(TAG, "Forcing component removal.")
rtn = __do_package_delete(installed_item)
else:
log.e(TAG, "No package installed with this name.")
rtn = -1
return rtn
# Database Removal ######################################################
def purge():
"""Perform database purge"""
log.i(TAG, "Starting purge....")
dtf_db = sqlite3.connect(DTF_DB)
cur = dtf_db.cursor()
# Remove Binaries
sql = ('SELECT name, install_name '
'FROM binaries')
for row in cur.execute(sql):
binary_name = row[0]
install_name = row[1]
full_path = DTF_BINARIES_DIR + install_name
log.d(TAG, "Removing binary '%s'" % binary_name)
if utils.delete_file(full_path) != 0:
log.e(TAG, "Error removing binary file! Continuing.")
# Remove Libraries
sql = ('SELECT name, install_name '
'FROM libraries')
for row in cur.execute(sql):
library_name = row[0]
install_name = row[1]
full_path = DTF_LIBRARIES_DIR + install_name
log.d(TAG, "Removing library '%s'" % library_name)
if utils.delete_tree(full_path) != 0:
log.e(TAG, "Error removing library! Continuing.")
# Remove Modules
sql = ('SELECT name, install_name '
'FROM modules')
for row in cur.execute(sql):
module_name = row[0]
install_name = row[1]
full_path = DTF_MODULES_DIR + install_name
log.d(TAG, "Removing module '%s'" % module_name)
if utils.delete_file(full_path) != 0:
log.e(TAG, "Error removing module file! Continuing.")
# Remove Packages
sql = ('SELECT name, install_name '
'FROM packages')
for row in cur.execute(sql):
package_name = row[0]
install_name = row[1]
full_path = DTF_PACKAGES_DIR + install_name
log.d(TAG, "Removing package '%s'" % package_name)
if utils.delete_tree(full_path) != 0:
log.e(TAG, "Error removing package! Continuing.")
# Drop the DB.
cur.execute("DROP TABLE IF EXISTS binaries")
cur.execute("DROP TABLE IF EXISTS libraries")
cur.execute("DROP TABLE IF EXISTS modules")
cur.execute("DROP TABLE IF EXISTS packages")
dtf_db.commit()
# Rebuilding
if initialize_db() != 0:
log.e(TAG, "Unable to re-create dtf db!")
return -1
log.i(TAG, "Purge complete!")
return 0
# End Database Removal ##################################################
| {
"repo_name": "jakev/dtf",
"path": "python-dtf/dtf/core/packagemanager.py",
"copies": "2",
"size": "36927",
"license": "apache-2.0",
"hash": 691212545166494100,
"line_mean": 23.8332212508,
"line_max": 78,
"alpha_frac": 0.5547160614,
"autogenerated": false,
"ratio": 3.7827289489858638,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5337445010385864,
"avg_score": null,
"num_lines": null
} |
"""Python helper for using `adb`"""
from __future__ import absolute_import
from __future__ import print_function
import os
import os.path
import tempfile
import shutil
from subprocess import Popen, PIPE
from dtf.properties import get_prop
from dtf.constants import DTF_CLIENT
ADB_BINARY = "adb"
STATUS_DEVICE = 'device'
STATUS_OFFLINE = 'offline'
STATUS_BOOTLOADER = 'bootloader'
MODE_USB = 'usb'
MODE_WIFI = 'wifi'
def get_mode_serial():
"""Return the serial dependent on the mode"""
if get_prop('Client', 'mode') == MODE_USB:
return get_prop("Info", "serial")
else:
return ("%s:%s" % (get_prop('Client', 'ip-addr'),
get_prop('Client', 'port')))
# pylint:disable=too-many-public-methods
class DtfAdb(object):
"""Python wrapper class for `adb`"""
serial = ''
pre_1_0_36 = False
no_serial = False
stdout = None
stderr = None
returncode = ''
def __init__(self, no_serial=False):
"""Object initialization"""
self.no_serial = no_serial
if not self.no_serial:
self.serial = get_mode_serial()
# Determine if we are the new version of adb
self.pre_1_0_36 = self.__is_old_adb_version()
def __is_old_adb_version(self):
"""Determine if adb is the new version"""
self.__run_command("")
try:
version_line = self.get_errors()[0]
version = version_line.split()[-1]
split_version = version.split(".")
if split_version[0] == "1" and split_version[1] == "0":
if int(split_version[2]) < 36:
return True
return False
except IndexError:
return False
def __run_command(self, in_cmd):
"""Internal function to run `adb` command"""
if self.no_serial:
cmd = ("%s %s" % (ADB_BINARY, in_cmd)).split(' ')
else:
cmd = ("%s -s %s %s"
% (ADB_BINARY, self.serial, in_cmd)).split(' ')
proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=False)
tmp_out = proc.stdout.read().replace("\r", '')
tmp_err = proc.stderr.read().replace("\r", '')
self.stdout = tmp_out.split("\n")
self.stderr = tmp_err.split("\n")
proc.terminate()
def get_output(self):
"""Read output stream"""
return self.stdout
def get_errors(self):
"""Read error stream"""
return self.stderr
def shell_command(self, cmd):
"""Execute a shell command on device"""
self.__run_command("shell %s" % cmd)
def wait_for_device(self):
"""Block until device is found"""
self.__run_command("wait-for-device")
def get_devices(self):
"""List all connected devices"""
self.__run_command("devices")
device_list = list()
output = self.get_output()
# Remove the "List of devices..."
output.pop(0)
# ...Also the two '' at the end.
output.pop()
output.pop()
if len(output) == 0:
return device_list
for device in output:
try:
serial, status = device.split('\t')
except ValueError:
continue
device_list.append({'serial': serial, 'status': status})
return device_list
def pull(self, file_name, local="./"):
"""Pull file off device"""
if self.pre_1_0_36:
self.__run_command("pull %s %s" % (file_name, local))
else:
self.__run_command("pull %s %s" % (file_name, local))
def pull_dir(self, dir_name, local="./"):
"""Pull a directory off device"""
if self.pre_1_0_36:
self.__run_command("pull %s %s" % (dir_name, local))
else:
temp_pull = tempfile.mkdtemp()
base = os.path.basename(dir_name.rstrip("/"))
self.__run_command("pull %s %s" % (dir_name, temp_pull))
full_tmp = "%s/%s" % (temp_pull, base)
shutil.copytree(full_tmp, local)
shutil.rmtree(temp_pull)
def push(self, local_file_name, upload_path):
"""Push a file to a device"""
self.__run_command("push %s %s" % (local_file_name, upload_path))
def run_as(self, user, cmd):
"""Run as a user"""
self.shell_command("run-as %s %s" % (user, cmd))
def busybox(self, cmd):
"""Execute a busybox command on device"""
busybox = get_prop("Info", "busybox")
self.shell_command("run-as %s %s %s" % (DTF_CLIENT, busybox, cmd))
def is_file(self, file_name):
"""Check if a file exists on device"""
self.shell_command("ls %s" % file_name)
return bool(self.stdout[0] == file_name)
def is_dir(self, dir_name):
"""Check if a directory exists on device"""
self.shell_command("ls -ld %s" % dir_name)
line = [x for x in self.stdout if x][0]
if line[-26:] == " No such file or directory":
return False
elif line[0] == 'd':
return True
else:
return False
def install(self, apk_path):
"""Install an APK on a device"""
self.__run_command("install %s" % apk_path)
def uninstall(self, app_name):
"""Uninstall an application on a device"""
self.__run_command("uninstall %s" % app_name)
def is_installed(self, app_name):
"""Check if an application is installed on device"""
self.shell_command("pm list packages %s" % app_name)
if self.get_output() == ['']:
return 0
else:
return 1
def add_forward(self, local, remote):
"""Add an adb forward rule"""
forward_string = "forward %s %s" % (local, remote)
self.__run_command(forward_string)
def remove_forward(self, local):
"""Remove a adb forward rule"""
remove_string = "forward --remove %s" % local
self.__run_command(remove_string)
def kill_server(self):
"""Kill the adb daemon"""
self.__run_command("kill-server")
def start_server(self):
"""Start the adb daemon"""
self.__run_command("start-server")
def usb(self):
"""Restart device in USB mode"""
self.__run_command("usb")
def tcpip(self, port):
"""Restart device in TCP/IP mode"""
self.__run_command("tcpip %s" % port)
def connect(self, ip_addr, port):
"""Connect to IP/port"""
self.__run_command("connect %s:%s" % (ip_addr, port))
# Any news is bad news.
if self.get_output() != ['']:
return None
print('returning whatever')
return 0
| {
"repo_name": "android-dtf/dtf",
"path": "python-dtf/dtf/adb.py",
"copies": "2",
"size": "7433",
"license": "apache-2.0",
"hash": 8695547176905548000,
"line_mean": 22.6719745223,
"line_max": 74,
"alpha_frac": 0.5563029732,
"autogenerated": false,
"ratio": 3.775012696800406,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5331315670000406,
"avg_score": null,
"num_lines": null
} |
"""Python wrapper for Android tools"""
from __future__ import absolute_import
from subprocess import Popen, PIPE
from dtf.globals import get_binding
def aapt(cmd):
"""aapt wrapper"""
aapt_path = get_binding("dtf_aapt")
cmd = ("%s %s" % (aapt_path, cmd)).split(' ')
proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=False)
stdout, stderr = proc.communicate()
stdout = stdout.split("\n")
stderr = stderr.split("\n")
rtn = proc.returncode
return stdout, stderr, rtn
def apktool(cmd):
"""apktool wrapper"""
apktool_path = get_binding("dtf_apktool")
java_args = "java -Xmx512M -jar"
cmd = ("%s %s %s" % (java_args, apktool_path, cmd)).split(' ')
proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=False)
stdout, stderr = proc.communicate()
stdout = stdout.split("\n")
stderr = stderr.split("\n")
rtn = proc.returncode
return stdout, stderr, rtn
def smali(cmd):
"""smali wrapper"""
smali_path = get_binding("dtf_smali")
java_args = "java -Xmx512M -jar"
cmd = ("%s %s %s" % (java_args, smali_path, cmd)).split(' ')
proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=False)
stdout, stderr = proc.communicate()
stdout = stdout.split("\n")
stderr = stderr.split("\n")
rtn = proc.returncode
return stdout, stderr, rtn
def baksmali(cmd):
"""baksmali wrapper"""
baksmali_path = get_binding("dtf_baksmali")
java_args = "java -Xmx512M -jar"
cmd = ("%s %s %s" % (java_args, baksmali_path, cmd)).split(' ')
proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=False)
stdout, stderr = proc.communicate()
stdout = stdout.split("\n")
stderr = stderr.split("\n")
rtn = proc.returncode
return stdout, stderr, rtn
def axmlprinter2(manifest_file_name, out_file_name):
"""axmlprinter2 wrapper"""
axmlprinter2_path = get_binding("dtf_axmlprinter2")
java_args = "java -Xmx256M -jar"
cmd = ("%s %s %s"
% (java_args, axmlprinter2_path, manifest_file_name)).split(' ')
proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=False)
stdout = proc.communicate()[0]
rtn = proc.returncode
if len(stdout) == 0:
return -1
out_f = open(out_file_name, 'wb')
try:
out_f.write(stdout)
finally:
out_f.close()
return rtn
| {
"repo_name": "android-dtf/dtf",
"path": "python-dtf/dtf/included.py",
"copies": "2",
"size": "2995",
"license": "apache-2.0",
"hash": 3721216194837697500,
"line_mean": 22.5826771654,
"line_max": 75,
"alpha_frac": 0.6404006678,
"autogenerated": false,
"ratio": 3.331479421579533,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9971880089379532,
"avg_score": 0,
"num_lines": 127
} |
"""API for interacting with dtfClient application"""
from __future__ import absolute_import
from __future__ import print_function
import os
import socket
import struct
from dtf.adb import DtfAdb
import dtf.logging as log
CMD_DOWNLOAD = 'd'
CMD_UPLOAD = 'u'
CMD_EXECUTE = 'e'
RESP_OK = chr(0)
RESP_ERROR = chr(1)
RESP_NO_EXIST = chr(-1 % 256)
RESP_NO_READ = chr(-2 % 256)
RESP_EXISTS = chr(-3 % 256)
RESP_NO_WRITE = chr(-4 % 256)
ERR_SOCK = -1
SIZE_LONG = 8
SIZE_INTEGER = 4
SIZE_FILENAME = 256
SIZE_CMD = 512
SIZE_TRANSFER = 1024
TAG = "dtfClient"
DTF_SOCKET = "dtf_socket"
FORWARD_SOCKET = "localabstract:" + DTF_SOCKET
def bytes_to_int(byte_stream):
"""Convert bytes to integer"""
return struct.unpack(">L", byte_stream)[0]
def bytes_to_long(byte_stream):
"""Convert bytes to long"""
return struct.unpack(">Q", byte_stream)[0]
def long_to_bytes(long_in):
"""Convert a long into byte stream"""
return struct.pack(">Q", long_in)
class DtfClient(object):
"""Python class for dtfClient"""
serial = ''
stdout = None
stderr = None
adb = None
def __init__(self):
"""Object initialization"""
self.adb = DtfAdb()
def __enable_forward(self):
"""Setup forwarding for talking to dtfClient"""
self.adb.add_forward(FORWARD_SOCKET, FORWARD_SOCKET)
def __disable_forward(self):
"""Remove forwarding rule"""
self.adb.remove_forward(FORWARD_SOCKET)
@classmethod
def __sock_connect(cls, socket_name,
socket_family=socket.AF_UNIX,
socket_type=socket.SOCK_STREAM):
""" Connect to socket_name.
First try abstract and fall back to filesystem
"""
# Create an unbound and not-connected socket.
try:
sock = socket.socket(socket_family, socket_type)
except socket.error as err:
log.e(TAG, "Socket creation failed: " + err.message)
return None
try:
log.d(TAG, "Connecting to abstract socket...")
# \0 denotes an abstract socket
sock.connect('\0' + socket_name)
except socket.error:
# abstract socket connection failed - it probably doesn't exist
# see jakev/dtf GitHub Issue #35
log.d(TAG, "Connecting to abstract socket failed. Does it exist?")
try:
log.d(TAG, "Connecting to filesystem socket...")
sock.connect('/tmp/' + socket_name)
except socket.error as err:
log.d(TAG, "Connecting to filesystem socket failed: "
+ err.message)
log.e(TAG, "Connecting to socket failed, giving up.")
return None
else:
log.d(TAG, "Connected to filesystem socket!")
else:
log.d(TAG, "Connected to abstract socket!")
return sock
@classmethod
def __safe_recv(cls, sock, size, response=None):
"""Handle any issues sending data"""
try:
sock.recv(size)
except socket.error as err:
log.e(TAG, "Error calling recv(): %s" % err)
return response
def __do_download(self, remote_file_name, local_file_name):
"""Download a file using the dtfClient"""
# Get a connected socket
sock = self.__sock_connect(DTF_SOCKET)
if sock is None:
log.e(TAG, "Cannot __do_download, socket failure.")
return ERR_SOCK
sock.send(CMD_DOWNLOAD)
resp_code = self.__safe_recv(sock, 1, response=RESP_ERROR)
if resp_code != RESP_OK:
log.e(TAG, "Server rejected download request!")
return resp_code
padded_file_name = remote_file_name.ljust(SIZE_FILENAME, '\0')
log.d(TAG, "Sending filename to server")
sock.send(padded_file_name)
log.d(TAG, "Filename sent.")
binary_file_size = sock.recv(SIZE_LONG)
# This is an error
if len(binary_file_size) == 1:
return binary_file_size
long_file_size = bytes_to_long(binary_file_size)
log.d(TAG, "File size from server: %d" % long_file_size)
sock.send(RESP_OK)
local_f = open(local_file_name, 'wb')
bytes_left = long_file_size
transfer_success = False
while True:
if bytes_left <= SIZE_TRANSFER:
local_buf = self.__safe_recv(sock, bytes_left)
if local_buf is None:
break
local_f.write(local_buf)
local_f.close()
transfer_success = True
break
else:
local_buf = self.__safe_recv(sock, SIZE_TRANSFER)
if local_buf is None:
break
local_f.write(local_buf)
bytes_left -= SIZE_TRANSFER
if not transfer_success:
log.e(TAG, "Error downloading file!")
return RESP_ERROR
sock.send(RESP_OK)
log.d(TAG, "Transfer complete!")
return RESP_OK
# pylint:disable=too-many-return-statements
def __do_upload(self, local_file_name, remote_file_name):
"""Do file upload"""
# Get a connected socket
sock = self.__sock_connect(DTF_SOCKET)
if sock is None:
log.e(TAG, "Cannot __do_upload, socket failure.")
return ERR_SOCK
statinfo = os.stat(local_file_name)
file_size = statinfo.st_size
local_f = open(local_file_name, 'rb')
sock.send(CMD_UPLOAD)
resp_code = self.__safe_recv(sock, 1, response=RESP_ERROR)
if resp_code != RESP_OK:
log.e(TAG, "Server rejected upload request!")
return resp_code
log.d(TAG, "Sending filesize to server")
sock.send(long_to_bytes(file_size))
resp = sock.recv(1)
if resp != RESP_OK:
log.e(TAG, "Error submitting filesize!")
return resp
padded_file_name = remote_file_name.ljust(SIZE_FILENAME, '\0')
log.d(TAG, "Sending the filename...")
sock.send(padded_file_name)
resp = self.__safe_recv(sock, 1, response=RESP_ERROR)
if resp != RESP_OK:
log.e(TAG, "Error with filename!")
return resp
bytes_left = file_size
while True:
if bytes_left <= SIZE_TRANSFER:
sock.send(local_f.read(bytes_left))
local_f.close()
break
else:
sock.send(local_f.read(SIZE_TRANSFER))
bytes_left -= SIZE_TRANSFER
resp = self.__safe_recv(sock, 1, response=RESP_ERROR)
if resp != RESP_OK:
log.e(TAG, "Error uploading file!")
return resp
return RESP_OK
def __do_execute(self, command_string):
"""Do file execute"""
response = None
# Get a connected socket
sock = self.__sock_connect(DTF_SOCKET)
if sock is None:
log.e(TAG, "Cannot __do_execute, socket failure.")
return ("", ERR_SOCK)
sock.send(CMD_EXECUTE)
resp_code = self.__safe_recv(sock, 1, response=RESP_ERROR)
if resp_code != RESP_OK:
log.e(TAG, "Server rejected execute request!")
return (response, resp_code)
full_command = command_string.ljust(SIZE_CMD, '\0')
log.d(TAG, "Sending execute string to server")
sock.send(full_command)
log.d(TAG, "Command sent.")
binary_cmd_size = sock.recv(SIZE_INTEGER)
# This is an error.
if len(binary_cmd_size) == 1:
return (response, binary_cmd_size)
int_cmd_size = bytes_to_int(binary_cmd_size)
sock.send(RESP_OK)
if int_cmd_size == 0:
log.d(TAG, "Response is empty string!")
return ("", RESP_OK)
bytes_left = int_cmd_size
response = ""
transfer_success = False
while True:
if bytes_left <= SIZE_TRANSFER:
local_buf = self.__safe_recv(sock, bytes_left)
if local_buf is None:
break
response += local_buf
transfer_success = True
break
else:
local_buf = self.__safe_recv(sock, SIZE_TRANSFER)
if local_buf is None:
break
response += local_buf
bytes_left -= SIZE_TRANSFER
if not transfer_success:
log.e(TAG, "Error downloading file!")
return ("", RESP_ERROR)
sock.send(RESP_OK)
log.d(TAG, "Command complete!")
return (response, RESP_OK)
# Public API Starts here
def upload_file(self, local_file_name, remote_file):
"""Upload a file using the dtfClient"""
self.__enable_forward()
resp_code = self.__do_upload(local_file_name, remote_file)
self.__disable_forward()
return resp_code
def download_file(self, remote_file_name, local_file):
"""Download a file using the dtfClient"""
self.__enable_forward()
resp_code = self.__do_download(remote_file_name, local_file)
self.__disable_forward()
return resp_code
def execute_command(self, cmd_string):
"""Execute command using dtfClient"""
if cmd_string == "":
return (None, None)
self.__enable_forward()
output, resp_code = self.__do_execute(cmd_string)
self.__disable_forward()
return (output, resp_code)
def set_to_usb(self):
"""Set current connection to USB mode"""
self.adb.usb()
def set_to_wifi(self, ip_addr, port):
"""Set current connection to TCP and connect"""
if ip_addr is None or port is None:
log.e(TAG, "IP and port cannot be none!")
return None
self.adb.tcpip(port)
if self.adb.connect(ip_addr, port) is None:
raise IOError
# End public API
| {
"repo_name": "jakev/dtf",
"path": "python-dtf/dtf/client.py",
"copies": "2",
"size": "10785",
"license": "apache-2.0",
"hash": -3451163636892931600,
"line_mean": 25.6296296296,
"line_max": 78,
"alpha_frac": 0.5629114511,
"autogenerated": false,
"ratio": 3.90619340818544,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.546910485928544,
"avg_score": null,
"num_lines": null
} |
""" dtf Checker script """
from __future__ import absolute_import
import os
import os.path
import shlex
import subprocess
import sys
from argparse import ArgumentParser
import dtf.core.utils as utils
import dtf.core.packagemanager as pm
import dtf.core.item
import dtf.logging as log
from dtf.globals import DTF_DB, DTF_LIBRARIES_DIR
TAG = "dtf_check"
def update_path():
"""Update path with dtf libraries"""
if not os.path.isfile(DTF_DB):
return 0
for lib in pm.get_libraries(name_only=True):
lib_path = "%s/%s" % (DTF_LIBRARIES_DIR, lib)
sys.path.append(lib_path)
return 0
def check_required():
"""Determine if all dependencies are accounted for"""
# Check for pylint
if utils.which("pylint") == "":
log.e(TAG, "pylint is required!")
return False
# Check for flake8
if utils.which("flake8") == "":
log.e(TAG, "flake8 is required!")
return False
# Check for shellcheck
if utils.which("shellcheck") == "":
log.e(TAG, "shellcheck is required!")
return False
# Check for checkbashisms
if utils.which("checkbashisms") == "":
log.e(TAG, "checkbashisms is required!")
return False
return True
def do_checks(file_name, all_checks, strict_checks):
"""Perform checks"""
# Update path to include libs
update_path()
if not os.path.isfile(file_name):
log.e(TAG, "[FAIL] %s is not a file." % file_name)
return -1
base_name = os.path.basename(file_name)
module_name = os.path.splitext(base_name)[0]
log.d(TAG, "Full File: %s" % file_name)
log.d(TAG, "Base name: %s" % base_name)
log.d(TAG, "Module name: %s" % module_name)
# First, is this python, or bash?
if pm.is_python_module(file_name, module_name):
log.i(TAG, "[PASS] Is python, doing python checks...")
return do_python_checks(file_name, module_name,
all_checks, strict_checks)
elif pm.is_bash_module(file_name):
log.i(TAG, "[PASS] Is bash, doing bash checks...")
return do_bash_checks(file_name, module_name,
all_checks, strict_checks)
else:
log.e(TAG, "[FAIL] This is not recognized as either python or bash!")
return -2
def do_python_checks(file_name, module_name, all_checks, strict_checks):
"""Run all python checks"""
# First attempt to auto parse
item = pm.parse_python_module(file_name, module_name)
if item is None:
log.e(TAG, "[FAIL] Auto parse failed!")
return -1
if do_auto_checks(item, strict_checks) != 0:
return -1
if all_checks:
log.d(TAG, "Running pylint...")
if run_command("pylint \"%s\"" % file_name) != 0:
log.e(TAG, "[FAIL] pylint failed.")
return -1
log.d(TAG, "Running flake8...")
if run_command("flake8 \"%s\"" % file_name) != 0:
log.e(TAG, "[FAIL] flake8 failed.")
return -1
log.i(TAG, "[PASS] All checks passed!")
return 0
def do_bash_checks(file_name, module_name, all_checks, strict_checks):
"""Run app bash checks"""
# First attempt to execute.
if not utils.is_executable(file_name):
log.e(TAG, "[FAIL] Module is not marked executable!")
return -1
# Next attempt to auto parse
item = pm.parse_bash_module(file_name, module_name)
if item is None:
log.e(TAG, "[FAIL] Auto parse failed!")
return -1
if do_auto_checks(item, strict_checks) != 0:
return -1
if all_checks:
log.d(TAG, "Running checkbashisms...")
if run_command("checkbashisms -f \"%s\"" % file_name) != 0:
log.e(TAG, "[FAIL] checkbashisms failed.")
return -1
log.d(TAG, "Running shellcheck...")
if run_command("shellcheck \"%s\"" % file_name) != 0:
log.e(TAG, "[FAIL] shellcheck failed.")
return -1
log.i(TAG, "[PASS] All checks passed!")
return 0
def do_auto_checks(item, strict_checks):
"""Run the auto checks"""
# Check Version
if do_version_checks(item, strict_checks) != 0:
return -1
# Check rest
elif do_other_checks(item, strict_checks) != 0:
return -1
else:
return 0
def do_version_checks(item, strict):
"""Run version checks"""
if item.version is None:
log.w(TAG, "[WARN] Version is none, this should be set!")
if strict:
return -1
elif not dtf.core.item.is_valid_version(item.version):
log.e(TAG, "[FAIL] invalid version (must be semvar)")
return -1
else:
log.i(TAG, "[PASS] Valid version.")
return 0
def do_other_checks(item, strict):
"""Run rest of the auto checkks"""
if item.about is None:
log.w(TAG, "[WARN] About string is none, this should be set!")
if strict:
return -1
else:
log.i(TAG, "[PASS] Valid about.")
# Check Author
if item.author is None:
log.w(TAG, "[WARN] Author is none, this should be set!")
if strict:
return -1
else:
log.i(TAG, "[PASS] Valid author.")
return 0
def run_command(cmd):
"""Run a command"""
lexed = shlex.split(cmd)
proc = subprocess.Popen(lexed)
proc.communicate()
return proc.returncode
def main():
"""Main loop"""
parser = ArgumentParser(prog='dtf_checker',
description='Check module for syntax & style.')
parser.add_argument('module_name', metavar="module_name", type=str,
nargs='+', default=None,
help='The module to check.')
parser.add_argument('-a', '--all', dest='all_checks', action='store_const',
const=True, default=False,
help="Run more vigorous checks.")
parser.add_argument('-s', '--strict', dest='strict', action='store_const',
const=True, default=False,
help="Treat warnins as errors.")
parsed_args = parser.parse_args()
all_checks = parsed_args.all_checks
strict_checks = parsed_args.strict
module_name = parsed_args.module_name[0]
# First, lets make sure have stuff
if not check_required():
return -1
# Do python logging override
try:
log.LOG_LEVEL_STDOUT = int(os.environ['GLOG_LEVEL'])
except KeyError:
pass
except ValueError:
log.w(TAG, "Invalid GLOG_LEVEL value (0-5 is allowed)")
# Do checks
return do_checks(module_name, all_checks, strict_checks)
if __name__ == "__main__":
sys.exit(main())
| {
"repo_name": "android-dtf/dtf",
"path": "python-dtf/dtf/checker.py",
"copies": "2",
"size": "7348",
"license": "apache-2.0",
"hash": 2940988571323704000,
"line_mean": 25.3369175627,
"line_max": 79,
"alpha_frac": 0.5952640174,
"autogenerated": false,
"ratio": 3.650273224043716,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5245537241443716,
"avg_score": null,
"num_lines": null
} |
""" dtf Main Launcher """
from __future__ import absolute_import
from __future__ import print_function
import sys
import subprocess
import os
import os.path
# Coverage is used for tests.
try:
import coverage
except ImportError:
pass
else:
coverage.process_startup()
import dtf.constants as constants
import dtf.core.autoconfig as autoconfig
import dtf.core.utils as utils
import dtf.core.packagemanager as packagemanager
import dtf.packages as pkg
import dtf.logging as log
import dtf.globals as dtfglobals
# Check for version before anything
if sys.version_info < (2, 6, 0):
sys.stderr.write("dtf requires python version 2.6 or higher!")
sys.exit(1)
TAG = "dtf"
BUILT_IN_LIST = ['archive', 'client', 'local', 'prop', 'reset',
'status']
def usage():
"""Print dtf usage brief"""
print("Android Device Testing Framework (dtf) v%s"
% constants.VERSION)
print('Usage: dtf [module|command] <arguments>')
print('')
print("Run with '-h' or 'help' for additional information.")
return -1
def usage_full():
"""Print full dtf usage"""
print("Android Device Testing Framework (dtf) v%s"
% constants.VERSION)
print('Usage: dtf [module|command] <arguments>')
print(' Built-in Commands:')
print(' archive Archive your dtf project files.')
print(' binding Print dtf helper bindings.')
print(' client Install/remove the dtf client.')
print(' help Prints this help screen.')
print(' init Initializes a project.')
print(' local Display all local modules.')
print(' pm The dtf package manager.')
print(' prop The dtf property manager.')
print(' reset Removes the dtf config from current directory.')
print(' status Determine if project device is attached.')
print(' upgrade Perform dtf upgrades.')
print(' version Print version number (--full for verbose).')
return 0
def is_first_run():
"""Determine if this is first run"""
if os.path.isdir(dtfglobals.DTF_INCLUDED_DIR):
return False
else:
return True
def find_built_in_module(cmd):
"""Determine if the command is a built in module"""
return bool(cmd in BUILT_IN_LIST)
def check_dependencies():
"""Determine if all dependencies are accounted for"""
# Check for adb
if utils.which("adb") == "":
log.e(TAG, "dtf requires `adb` (part of Android SDK)!")
return -5
# Check for Java
if utils.which("java") != "":
out = subprocess.check_output(["java", "-version"],
stderr=subprocess.STDOUT)
java_ver = out.split('\n')[0]
if java_ver.find("1.7") == -1 and java_ver.find("1.8") == -1:
log.e(TAG, "dtf requires Java 1.7 or 1.8!")
return -5
else:
log.e(TAG, "dtf requires Java 1.7!")
return -5
return 0
def print_version(args):
"""Print the version (short or long)"""
# Long version
if len(args) > 0 and args[0] == '--full':
apk_version = dtfglobals.get_generic_global(
dtfglobals.CONFIG_SECTION_CLIENT, 'apk_version')
bundle_version = dtfglobals.get_generic_global(
dtfglobals.CONFIG_SECTION_BINDINGS, 'version')
python_version = constants.VERSION
print("Python Version: %s" % python_version)
print("dtfClient Version: %s" % apk_version)
print("Bindings Version Date: %s" % bundle_version)
else:
print(constants.VERSION)
return 0
def main():
"""Main loop"""
rtn = 0
# First, lets make sure dtf has the dependencies we want.
if check_dependencies() != 0:
return -2
# If this is first run, we need to do a couple of things.
# Note: I exit here; doesn't matter what you tried to run.
if is_first_run():
sys.exit(do_first_run_process())
# Next, check args.
if len(sys.argv) < 2:
sys.exit(usage())
# Remove the execute path
sys.argv.pop(0)
# Remove and store cmd_name
command_name = sys.argv.pop(0)
# Help menu
if command_name in ['-h', '--help', 'help']:
sys.exit(usage_full())
# Version information
elif command_name in ['-v', '--version', 'version']:
sys.exit(print_version(sys.argv))
# Almost all commands with dtf require you to be in a project directory,
# but some don't. Check for those next.
elif command_name == 'pm':
return pkg.launch_builtin_module('pm', sys.argv, chdir=False,
skip_checks=True)
elif command_name in ['init', 'binding', 'upgrade']:
return pkg.launch_builtin_module(command_name, sys.argv,
skip_checks=True)
# Ok, now we need to get to the top of the project directory.
project_root = utils.get_project_root()
if project_root is None:
log.e(TAG, "Unable to find a project root! Is this a dtf project?")
return -3
# Next, we check the following:
# 1. Is it a built-in command?
# 2. Is it a local module?
# 3. Is it a module we know about?
if find_built_in_module(command_name):
rtn = pkg.launch_builtin_module(command_name, sys.argv)
elif pkg.find_local_module(project_root, command_name):
rtn = pkg.launch_local_module(project_root, command_name, sys.argv)
elif pkg.is_module_installed(command_name):
rtn = pkg.launch_module(command_name, sys.argv)
else:
log.e(TAG, "Module or command '%s' not found!" % command_name)
rtn = -4
return rtn
def do_first_run_process():
"""Perform the file time run install"""
log.i(TAG, "First time launch of dtf detected...")
# Set things up if they haven't been already
if packagemanager.create_data_dirs() != 0:
log.e(TAG, "Unable to setup dtf data directories!")
return -4
if not os.path.isfile(dtfglobals.DTF_DB):
if packagemanager.initialize_db() != 0:
log.e(TAG, "Error creating and populating dtf db!!")
return -7
if autoconfig.initialize_from_local(is_full=True) != 0:
log.e(TAG, "Unable to initialize global settings!")
return -5
log.i(TAG, "Initial auto setup is completed!")
return 0
if __name__ == "__main__":
sys.exit(main())
| {
"repo_name": "jakev/dtf",
"path": "python-dtf/dtf/launcher.py",
"copies": "2",
"size": "7087",
"license": "apache-2.0",
"hash": 8241999957593001000,
"line_mean": 27.5766129032,
"line_max": 76,
"alpha_frac": 0.6201495696,
"autogenerated": false,
"ratio": 3.728037874802735,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 248
} |
"""Integration tests for the general launcher"""
from __future__ import absolute_import
import dtf.testutils as testutils
def test_dtf():
"""Just attempt to run dtf"""
rtn = testutils.dtf("")
assert(rtn.return_code == 255)
def test_version():
"""Attempt to obtain version"""
rtn = testutils.dtf("--version")
assert(rtn.return_code == 0)
rtn = testutils.dtf("-v")
assert(rtn.return_code == 0)
rtn = testutils.dtf("version")
assert(rtn.return_code == 0)
def test_help():
"""Attempt to print help/useage"""
rtn = testutils.dtf("--help")
assert(rtn.return_code == 0)
rtn = testutils.dtf("-h")
assert(rtn.return_code == 0)
rtn = testutils.dtf("help")
assert(rtn.return_code == 0)
def test_non_project():
"""Attempt to run a built-in without a .dtfini"""
rtn = testutils.dtf("archive")
assert(rtn.return_code == 253)
def test_python_precheck_sdk_missing():
"""Attempt to run python module with missing SDK"""
testutils.deploy_config_raw("")
rtn = testutils.dtf("archive")
testutils.undeploy()
assert(rtn.return_code == 248)
def test_python_load_imp_exception():
"""Attempt to run builtin and fail to parse load_imp"""
testutils.deploy_config_raw("")
rtn = testutils.dtf("status")
testutils.undeploy()
assert(rtn.return_code == 247)
| {
"repo_name": "jakev/dtf",
"path": "python-dtf/tests/integration/test_dtf.py",
"copies": "2",
"size": "2020",
"license": "apache-2.0",
"hash": -2506270132906108000,
"line_mean": 21.9545454545,
"line_max": 74,
"alpha_frac": 0.6722772277,
"autogenerated": false,
"ratio": 3.435374149659864,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5107651377359864,
"avg_score": null,
"num_lines": null
} |
"""Integration tests for the "pm" utility"""
from __future__ import absolute_import
import dtf.testutils as testutils
class PmTests(testutils.BasicIntegrationTest):
"""Wraper for integration tests"""
def test_no_args(self):
"""Running with no args"""
rtn = self.run_cmd("pm")
assert(rtn.return_code == 0)
def test_not_subcommand(self):
"""Call invalid subcommand"""
rtn = self.run_cmd("pm NOT_EXIST")
assert(rtn.return_code == 0)
| {
"repo_name": "jakev/dtf",
"path": "python-dtf/tests/integration/pm/test_pm.py",
"copies": "2",
"size": "1140",
"license": "apache-2.0",
"hash": 3248918203303493600,
"line_mean": 29,
"line_max": 74,
"alpha_frac": 0.7035087719,
"autogenerated": false,
"ratio": 3.8,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.55035087719,
"avg_score": null,
"num_lines": null
} |
"""Integration tests for the "prop" utility"""
from __future__ import absolute_import
import dtf.testutils as testutils
def test_no_args():
"""Running with no args"""
testutils.deploy_config(testutils.get_default_config())
rtn = testutils.dtf("prop")
testutils.undeploy()
assert(rtn.return_code == 0)
def test_not_subcommand():
"""Call invalid subcommand"""
testutils.deploy_config(testutils.get_default_config())
rtn = testutils.dtf("prop NON_EXIST")
testutils.undeploy()
assert(rtn.return_code == 0)
| {
"repo_name": "jakev/dtf",
"path": "python-dtf/tests/integration/prop/test_prop.py",
"copies": "2",
"size": "1197",
"license": "apache-2.0",
"hash": 7643663376108679000,
"line_mean": 25.6,
"line_max": 74,
"alpha_frac": 0.7218045113,
"autogenerated": false,
"ratio": 3.7058823529411766,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5427686864241177,
"avg_score": null,
"num_lines": null
} |
"""pytest for using dtf property manager"""
from __future__ import absolute_import
import pytest
import dtf.properties as prop
import dtf.testutils as testutils
# prop_set() tests
def test_set_new_property():
"""Attempt to set a new property (existing section)"""
value = '1'
contents = ("[info]\n"
"real = not_real")
testutils.deploy_config_raw(contents)
prop.set_prop('info', 'sdk', value)
assert prop.get_prop('info', 'sdk') == value
testutils.undeploy()
def test_set_new_section_property():
"""Set a property that has no section (yet)"""
value = '1'
testutils.deploy_config_raw("")
prop.set_prop('info', 'sdk', value)
assert prop.get_prop('info', 'sdk') == value
testutils.undeploy()
return 0
def test_set_existing_property():
"""Set a property that already exists"""
value = 'new'
contents = ("[Info]\n"
"sdk = old")
testutils.deploy_config_raw(contents)
prop.set_prop('info', 'sdk', value)
assert prop.get_prop('info', 'sdk') == value
testutils.undeploy()
return 0
def test_set_property_casing():
"""Set a prop and try to retrieve with casing"""
sdk = '1'
testutils.deploy_config_raw("")
prop.set_prop('INFO', 'sdk', sdk)
assert prop.get_prop('info', 'sdk') == sdk
assert prop.get_prop('Info', 'sdk') == sdk
assert prop.get_prop('INFO', 'sdk') == sdk
testutils.undeploy()
return 0
# prop_get() tests
def test_get_empty_config():
"""Attempts to get a property without a valid config"""
testutils.deploy_config_raw("")
with pytest.raises(prop.PropertyError):
prop.get_prop('info', 'sdk')
testutils.undeploy()
return 0
def test_get_property():
"""Attempts to get a valid property"""
sdk = '23'
contents = ("[Info]\n"
"sdk = %s" % sdk)
testutils.deploy_config_raw(contents)
assert prop.get_prop('info', 'sdk') == sdk
testutils.undeploy()
return 0
def test_get_property_no_option():
"""Attempt to get property that doesnt exist"""
contents = ("[Info]\n"
"vmtype = arm64")
testutils.deploy_config_raw(contents)
with pytest.raises(prop.PropertyError):
prop.get_prop('info', 'sdk')
testutils.undeploy()
return 0
def test_get_property_casing():
"""Get a prop with alternating casing"""
sdk = '23'
contents = ("[Info]\n"
"sdk = %s" % sdk)
testutils.deploy_config_raw(contents)
assert prop.get_prop('info', 'sdk') == sdk
assert prop.get_prop('Info', 'sdk') == sdk
assert prop.get_prop('INFO', 'sdk') == sdk
testutils.undeploy()
return 0
# prop_del() tests
def test_del_empty_config():
"""Attempts to delete a property without a valid config"""
testutils.deploy_config_raw("")
assert prop.del_prop('info', 'sdk') != 0
testutils.undeploy()
return 0
def test_del_property():
"""Attempts to delete a valid property"""
contents = ("[Info]\n"
"sdk = 23")
testutils.deploy_config_raw(contents)
prop.del_prop('info', 'sdk')
testutils.undeploy()
return 0
def test_del_property_invalid():
"""Attempts to delete a property that doesnt exist"""
contents = ("[Info]\n"
"vmtype = 64")
testutils.deploy_config_raw(contents)
assert prop.del_prop('info', 'sdk') != 0
testutils.undeploy()
return 0
def test_del_property_casing():
"""Delete a prop with alternating casing"""
sdk = '23'
contents = ("[Info]\n"
"sdk = %s" % sdk)
testutils.deploy_config_raw(contents)
prop.del_prop('info', 'sdk')
testutils.undeploy()
return 0
# prop_test() tests
def test_test_empty_config():
"""Test a property without a valid config"""
testutils.deploy_config_raw("")
assert prop.test_prop('info', 'sdk') == 0
testutils.undeploy()
return 0
def test_test_property():
"""Test a valid property"""
contents = ("[Info]\n"
"sdk = 23")
testutils.deploy_config_raw(contents)
assert prop.test_prop('info', 'sdk') == 1
testutils.undeploy()
return 0
def test_test_invalid_property():
"""Test a missingproperty"""
contents = ("[Info]\n"
"vmtype = arm64")
testutils.deploy_config_raw(contents)
assert prop.test_prop('info', 'sdk') == 0
testutils.undeploy()
return 0
def test_test_property_casing():
"""Test a prop with alternating casing"""
sdk = '23'
contents = ("[Info]\n"
"sdk = %s" % sdk)
testutils.deploy_config_raw(contents)
assert prop.test_prop('info', 'sdk') == 1
testutils.undeploy()
return 0
| {
"repo_name": "jakev/dtf",
"path": "python-dtf/tests/unit/test_prop.py",
"copies": "2",
"size": "5430",
"license": "apache-2.0",
"hash": 7465317381772302000,
"line_mean": 18.1197183099,
"line_max": 74,
"alpha_frac": 0.6121546961,
"autogenerated": false,
"ratio": 3.5653315824031515,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5177486278503152,
"avg_score": null,
"num_lines": null
} |
"""Wrapper API for using colors in dtf modules"""
from __future__ import absolute_import
from colored import fg, attr
import dtf.globals as glbl
COLOR_ERR = fg(1)
COLOR_WARN = fg(3)
COLOR_INFO = fg(2)
COLOR_VERB = fg(6)
COLOR_DEB = fg(5)
def __use_colors():
"""Check if colors should be used"""
return bool(glbl.get_generic_global('Config', 'use_colors') == '1')
def error(message):
"""Color format a message for errors"""
if __use_colors():
return "%s%s%s" % (COLOR_ERR, message, attr(0))
else:
return message
def warning(message):
"""Color format a message for warnings"""
if __use_colors():
return "%s%s%s" % (COLOR_WARN, message, attr(0))
else:
return message
def info(message):
"""Color format a message for informational messages"""
if __use_colors():
return "%s%s%s" % (COLOR_INFO, message, attr(0))
else:
return message
def verbose(message):
"""Color format a message for verbose messages"""
if __use_colors():
return "%s%s%s" % (COLOR_VERB, message, attr(0))
else:
return message
def debug(message):
"""Color format a message for debugging"""
if __use_colors():
return "%s%s%s" % (COLOR_DEB, message, attr(0))
else:
return message
def bold(message):
"""Format a bold message"""
if __use_colors():
return "%s%s%s" % (attr('bold'), message, attr(0))
else:
return message
| {
"repo_name": "jakev/dtf",
"path": "python-dtf/dtf/colors.py",
"copies": "2",
"size": "2125",
"license": "apache-2.0",
"hash": 506867387172876400,
"line_mean": 21.6063829787,
"line_max": 74,
"alpha_frac": 0.6423529412,
"autogenerated": false,
"ratio": 3.589527027027027,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5231879968227027,
"avg_score": null,
"num_lines": null
} |
"""Compatibility helpers"""
# pylint: disable=invalid-name,input-builtin,raw_input-builtin
# pylint: disable=wrong-import-position
from __future__ import absolute_import
try:
raw_input = raw_input # pylint: disable=redefined-builtin
except NameError:
raw_input = input
try:
from io import StringIO
except ImportError:
from cStringIO import StringIO
StringIO = StringIO
try:
import urlparse
except ImportError:
# pylint: disable=no-name-in-module,import-error
import urllib.parse as urlparse
urlparse = urlparse
| {
"repo_name": "jakev/dtf",
"path": "python-dtf/dtf/core/compat.py",
"copies": "2",
"size": "1187",
"license": "apache-2.0",
"hash": -5687848118975606000,
"line_mean": 27.9512195122,
"line_max": 74,
"alpha_frac": 0.7548441449,
"autogenerated": false,
"ratio": 3.9304635761589406,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 41
} |
"""dtf Libraries Template"""
from __future__ import absolute_import
import os.path
import sqlite3
import dtf.core.utils as utils
import dtf.properties as prop
TAG = "dtf-library"
class DtfDbException(Exception): # pylint: disable=too-few-public-methods
"""Base class for DB Exceptions"""
def __init__(self, message):
"""Raise new exception"""
# Call the base class constructor with the parameters it needs
Exception.__init__(self, message)
class DbLibrary(object):
"""
Base class for creating a python database library with dtf.
"""
# This needs to be set
db_name = ""
db_path = ""
db_connection = None
def __init__(self, safe=False, project_dir=None):
"""Initialize new instance"""
if project_dir is None:
db_path = "%s/%s/%s" % (prop.TOP, utils.DBS_DIRECTORY,
self.db_name)
else:
db_path = "%s/%s/%s" % (project_dir, utils.DBS_DIRECTORY,
self.db_name)
if safe and not os.path.isfile(db_path):
raise DtfDbException("Database file not found : %s!" % db_path)
self.db_path = db_path
self.db_connection = sqlite3.connect(db_path)
# Call post init for anyone needing additional stuff here
self.post_init()
def post_init(self):
"""Post-init to allow additional processing after __init__"""
pass
# The following are not meant to be overridden.
def commit(self):
"""Commit DB changes"""
if self.db_connection is None:
raise DtfDbException("No active DB connection!")
return self.db_connection.commit()
def get_cursor(self):
"""Obtain handle to cursor"""
if self.db_connection is None:
raise DtfDbException("No active DB connection!")
return self.db_connection.cursor()
def close(self):
"""Close handle to DB"""
if self.db_connection is None:
raise DtfDbException("No Active DB connection!")
self.db_connection.close()
return
@classmethod
def exists(cls):
"""Simple check to determine if DB exists"""
db_path = "%s/%s/%s" % (prop.TOP, utils.DBS_DIRECTORY,
cls.db_name)
return bool(os.path.isfile(db_path))
| {
"repo_name": "android-dtf/dtf",
"path": "python-dtf/dtf/library.py",
"copies": "2",
"size": "3032",
"license": "apache-2.0",
"hash": -760827638996407700,
"line_mean": 25.1379310345,
"line_max": 75,
"alpha_frac": 0.6184036939,
"autogenerated": false,
"ratio": 4.03728362183755,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.565568731573755,
"avg_score": null,
"num_lines": null
} |
"""Helpers for interacting with manifest.xml + packages"""
from __future__ import absolute_import
import os
import tempfile
import zipfile
from shutil import copy, rmtree
from lxml import etree
import dtf.globals
import dtf.logging as log
import dtf.core.item
import dtf.core.utils as utils
TAG = "dtf-manifestparser"
MANIFEST_NAME = "manifest.xml"
def __get_xml_attrib(element, attrib, default=None):
"""Attempt to retrieve XML attribute"""
try:
return element.attrib[attrib]
except KeyError:
return default
def __item_from_xml(item, relative_root="./"):
"""create Item object from XML"""
item_type = __get_xml_attrib(item, "type")
if item_type is None:
log.e(TAG, "Found tag with no 'type' attribute, skipping!")
return None
if item_type not in dtf.core.item.VALID_TYPES:
log.e(TAG, "Illegal 'type' attribute found, skipping!")
return None
name = __get_xml_attrib(item, "name")
if name is None:
log.e(TAG, "Found NULL named moduled, skipping!")
return None
# Ok, lets start. We can generically parse.
local_item = dtf.core.item.Item()
local_item.type = item_type
local_item.version = __get_xml_attrib(item, "version")
local_item.author = __get_xml_attrib(item, "author")
local_item.about = __get_xml_attrib(item, "about")
if local_item.version is None:
log.w(TAG, "No version for '%s', using 1.0.0" % name)
local_item.version = "1.0.0"
install_name = __get_xml_attrib(item, "installName")
local_name = __get_xml_attrib(item, "localName")
if install_name is None:
log.d(TAG, "install_name is null, using name...")
install_name = name
if local_name is None:
log.d(TAG, "local_name is null, using name...")
local_name = name
else:
local_name = os.path.normpath("%s/%s" % (relative_root, local_name))
local_item.name = name
local_item.install_name = install_name
local_item.local_name = local_name
return local_item
def parse_manifest(manifest_data, relative_root="./"):
"""Parse a blob of manifest data"""
manifest = None
manifest_root = None
item_list = list()
# Read Manifest
try:
manifest_root = etree.XML(manifest_data)
except etree.XMLSyntaxError:
log.e(TAG, "Error parsing XML file! Exiting.")
return -4
# Processing Stuff
xml_items = manifest_root.xpath("/Items/Item")
for xml_item in xml_items:
item = __item_from_xml(xml_item, relative_root=relative_root)
if item is None:
continue
item_list.append(item)
manifest = Manifest()
manifest.items = item_list
manifest.root_dir = relative_root
return manifest
def parse_manifest_file(manifest_file_name):
"""Parse a standalone manifest.xml"""
manifest_data = open(manifest_file_name).read()
root_dir = os.path.normpath(os.path.dirname(manifest_file_name))
return parse_manifest(manifest_data, relative_root=root_dir)
class Manifest(object):
"""Class wrapper for interacting with manifest.xml"""
items = None
root_dir = ""
def __init__(self):
"""Initializer method"""
self.items = list()
@classmethod
def item_to_xml_binaries(cls, etree_root, export_items):
"""Export all binaries"""
# Add binaries
bin_items = [item for item in export_items
if item.type == dtf.core.item.TYPE_BINARY]
for item in bin_items:
item_xml = etree.SubElement(etree_root, 'Item')
item_xml.attrib['type'] = dtf.core.item.TYPE_BINARY
item_xml.attrib['name'] = item.name
if item.version is None:
log.w(TAG, "Skipping version for %s" % item.name)
else:
item_xml.attrib['version'] = item.version
if item.author is None:
log.w(TAG, "Skipping author for %s" % item.name)
else:
item_xml.attrib['author'] = item.author
item_xml.attrib['localName'] = "binaries/%s" % item.install_name
return
@classmethod
def item_to_xml_libraries(cls, etree_root, export_items):
"""Export all libraries"""
lib_items = [item for item in export_items
if item.type == dtf.core.item.TYPE_LIBRARY]
for item in lib_items:
item_xml = etree.SubElement(etree_root, 'Item')
item_xml.attrib['type'] = dtf.core.item.TYPE_LIBRARY
item_xml.attrib['name'] = item.name
if item.version is None:
log.w(TAG, "Skipping version for %s" % item.name)
else:
item_xml.attrib['version'] = item.version
if item.author is None:
log.w(TAG, "Skipping author for %s" % item.name)
else:
item_xml.attrib['author'] = item.author
item_xml.attrib['localName'] = "libraries/%s" % item.install_name
return
@classmethod
def item_to_xml_modules(cls, etree_root, export_items):
"""Export all modules"""
mod_items = [item for item in export_items
if item.type == dtf.core.item.TYPE_MODULE]
for item in mod_items:
item_xml = etree.SubElement(etree_root, 'Item')
item_xml.attrib['type'] = dtf.core.item.TYPE_MODULE
item_xml.attrib['name'] = item.name
if item.version is None:
log.w(TAG, "Skipping version for %s" % item.name)
else:
item_xml.attrib['version'] = item.version
if item.about is None:
log.w(TAG, "Skipping about for %s" % item.name)
else:
item_xml.attrib['about'] = item.about
if item.author is None:
log.w(TAG, "Skipping author for %s" % item.name)
else:
item_xml.attrib['author'] = item.author
item_xml.attrib['localName'] = "modules/%s" % item.install_name
return
@classmethod
def item_to_xml_packages(cls, etree_root, export_items):
"""Export all packages"""
pkg_items = [item for item in export_items
if item.type == dtf.core.item.TYPE_PACKAGE]
for item in pkg_items:
item_xml = etree.SubElement(etree_root, 'Item')
item_xml.attrib['type'] = dtf.core.item.TYPE_PACKAGE
item_xml.attrib['name'] = item.name
if item.version is None:
log.w(TAG, "Skipping version for %s" % item.name)
else:
item_xml.attrib['version'] = item.version
if item.author is None:
log.w(TAG, "Skipping author for %s" % item.name)
else:
item_xml.attrib['author'] = item.author
item_xml.attrib['localName'] = "packages/%s" % item.install_name
return
def to_string(self):
"""Generate XML string list of items"""
temp_manifest_f = tempfile.NamedTemporaryFile()
root = etree.Element('Items')
# Add binaries
self.item_to_xml_binaries(root, self.items)
# Add libraries
self.item_to_xml_libraries(root, self.items)
# Add modules
self.item_to_xml_modules(root, self.items)
# Add packages
self.item_to_xml_packages(root, self.items)
# Write it all out
export_tree = etree.ElementTree(root)
export_tree.write(temp_manifest_f, pretty_print=True)
temp_manifest_f.flush()
return temp_manifest_f
class ExportZip(object):
"""Class wrapper for interacting with ZIP exports"""
zip_name = ""
def __init__(self, zip_name):
self.zip_name = zip_name
self.manifest = Manifest()
# If the file exists, we open and parse the manifest.
if os.path.isfile(zip_name):
self.zip_f = zipfile.ZipFile(zip_name, 'r',
compression=zipfile.ZIP_DEFLATED)
if not utils.file_in_zip(self.zip_f, MANIFEST_NAME):
log.e(TAG, "Manifest doesnt exist in ZIP!")
raise KeyError
manifest_data = self.zip_f.read(MANIFEST_NAME)
self.manifest = parse_manifest(manifest_data)
# Otherwise, we just create a new one.
else:
self.zip_f = zipfile.ZipFile(zip_name, 'w',
compression=zipfile.ZIP_DEFLATED)
self.manifest = Manifest()
def __add_file(self, subdir, item):
"""Add a file to the correct subdirectory"""
install_to = "%s/%s" % (subdir, item.install_name)
log.d(TAG, "Adding '%s' as '%s'"
% (item.install_name, install_to))
self.zip_f.write(item.local_name, install_to)
def __add_tree(self, subdir, item):
"""Add entire tree to ZIP"""
for root, _, files in os.walk(item.local_name):
for file_name in files:
file_path = os.path.join(root, file_name)
stripped_path = file_path.replace(item.local_name, "", 1)
install_to = os.path.normpath("%s/%s/%s"
% (subdir, item.name,
stripped_path))
log.d(TAG, "Adding dir '%s' as '%s'"
% (file_path, install_to))
self.zip_f.write(file_path, install_to)
def __copy_zip_file(self, local_name, install_name, install_dir):
"""Copy file in ZIP to directory"""
install_path = install_dir + install_name
log.d(TAG, "Copying '%s' to '%s'..." % (local_name, install_path))
temp_f = tempfile.NamedTemporaryFile(mode='w', delete=False)
temp_f.write(self.zip_f.read(local_name))
temp_f.flush()
copy(temp_f.name, install_path)
os.chmod(install_path, 0o755)
temp_f.close()
os.remove(temp_f.name)
log.d(TAG, "Copy complete!")
return 0
def __copy_zip_tree(self, local_name, install_name, install_dir):
"""Copy directory in ZIP to directory"""
install_path = install_dir + install_name + '/'
# We need to remove the first one
rmtree(install_path, ignore_errors=True)
reduced_list = [file_f for file_f in self.zip_f.namelist()
if file_f.startswith(local_name) and
file_f != local_name+'/']
self.zip_f.extractall(dtf.globals.DTF_DATA_DIR, reduced_list)
log.d(TAG, "Copy complete!")
return 0
def __add_export_content(self):
"""Add content to our ZIP file"""
for item in self.manifest.items:
if item.type == dtf.core.item.TYPE_BINARY:
self.__add_file("binaries/", item)
elif item.type == dtf.core.item.TYPE_LIBRARY:
self.__add_tree("libraries/", item)
elif item.type == dtf.core.item.TYPE_MODULE:
self.__add_file("modules/", item)
elif item.type == dtf.core.item.TYPE_PACKAGE:
self.__add_tree("packages/", item)
def add_item(self, item):
"""Add an item to manifest list"""
self.manifest.items.append(item)
def assert_item(self, item):
"""Confirm item is backed for files"""
if item.type in [dtf.core.item.TYPE_MODULE,
dtf.core.item.TYPE_BINARY]:
if not utils.file_in_zip(self.zip_f, item.local_name):
return False
elif item.type in [dtf.core.item.TYPE_LIBRARY,
dtf.core.item.TYPE_PACKAGE]:
if not utils.directory_in_zip(self.zip_f, item.local_name):
return False
return True
def iter_items(self):
"""Return a iterable list of items"""
return self.manifest.items
def install_item_to(self, item, dest_dir):
"""Install item to a location"""
if utils.file_in_zip(self.zip_f, item.local_name):
return self.__copy_zip_file(item.local_name,
item.install_name, dest_dir)
elif utils.directory_in_zip(self.zip_f, item.local_name):
return self.__copy_zip_tree(item.local_name,
item.install_name, dest_dir)
else:
return -1
def finalize(self):
"""Write out all data and add content"""
# Add the manifest XML
temp_manifest = self.manifest.to_string()
self.zip_f.write(temp_manifest.name, MANIFEST_NAME)
# Now, for each item in the manifest, add the actual content
self.__add_export_content()
# Close it all out
self.zip_f.close()
| {
"repo_name": "jakev/dtf",
"path": "python-dtf/dtf/core/manifestparser.py",
"copies": "2",
"size": "13559",
"license": "apache-2.0",
"hash": -6254035857757036000,
"line_mean": 28.2219827586,
"line_max": 77,
"alpha_frac": 0.5726823512,
"autogenerated": false,
"ratio": 3.817286036036036,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 464
} |
"""Integration tests for the "pm repo" utility"""
from __future__ import absolute_import
import dtf.testutils as testutils
class PmRepoTests(testutils.BasicIntegrationTest):
"""Wraper for integration tests"""
def test_no_args(self):
"""Running repo with no args"""
rtn = self.run_cmd("pm repo")
assert(rtn.return_code == 0)
def test_invalid_cmd(self):
"""Run repo with invalid cmd"""
rtn = self.run_cmd("pm repo NOTHING")
assert(rtn.return_code == 255)
def test_repo_add_valid(self):
"""Try to readd a repo with same name"""
rtn = self.run_cmd("pm repo add core-mods https://somethingsilly.com")
assert(rtn.return_code == 0)
def test_repo_add_wrong_args(self):
"""Run add with incorrect args"""
rtn = self.run_cmd("pm repo add")
assert(rtn.return_code == 255)
def test_repo_add_invalid_url(self):
"""Try to add invalid repo URL"""
rtn = self.run_cmd("pm repo add core-mods somethingsilly.com")
assert(rtn.return_code == 254)
def test_repo_add_already_exists(self):
"""Try to re-add a repo with same name"""
rtn = self.run_cmd("pm repo add core-mods https://somethingsilly.com")
assert(rtn.return_code == 0)
rtn = self.run_cmd("pm repo add core-mods https://somethingsilly.com")
assert(rtn.return_code == 253)
def test_repo_remove_valid(self):
"""Add then remove a repo"""
rtn = self.run_cmd("pm repo add core-mods https://somethingsilly.com")
assert(rtn.return_code == 0)
rtn = self.run_cmd("pm repo remove core-mods")
assert(rtn.return_code == 0)
def test_repo_remove_wrong_args(self):
"""Run remove with incorrect args"""
rtn = self.run_cmd("pm repo remove")
assert(rtn.return_code == 255)
def test_repo_remove_nonexist(self):
"""Attempt to remove not exist repo"""
rtn = self.run_cmd("pm repo remove silly")
assert(rtn.return_code == 253)
def test_repo_list_empty(self):
"""List no repos"""
rtn = self.run_cmd("pm repo list")
assert(rtn.return_code == 0)
def test_repo_list_valid(self):
"""List no repos"""
rtn = self.run_cmd("pm repo add mods-core https://silly.com")
assert(rtn.return_code == 0)
rtn = self.run_cmd("pm repo list")
assert(rtn.return_code == 0)
| {
"repo_name": "android-dtf/dtf",
"path": "python-dtf/tests/integration/pm/test_pm_repo.py",
"copies": "2",
"size": "3105",
"license": "apache-2.0",
"hash": 7594503449437171000,
"line_mean": 27.2272727273,
"line_max": 78,
"alpha_frac": 0.6305958132,
"autogenerated": false,
"ratio": 3.540478905359179,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5171074718559179,
"avg_score": null,
"num_lines": null
} |
"""Item class"""
# Eventually this will be changed
# pylint: disable=too-many-instance-attributes
from __future__ import absolute_import
import semantic_version
TYPE_MODULE = 'module'
TYPE_LIBRARY = 'library'
TYPE_BINARY = 'binary'
TYPE_PACKAGE = 'package'
VALID_TYPES = [TYPE_BINARY, TYPE_LIBRARY,
TYPE_MODULE, TYPE_PACKAGE]
def is_valid_version(version_string):
"""Check if version string is correct"""
try:
semantic_version.Version(version_string)
except ValueError:
return False
return True
def item_is_newer(installed_item, item):
"""Determine if an item is newer"""
return bool(semantic_version.Version(installed_item.version) <
semantic_version.Version(item.version))
class Item(object): # pylint: disable=too-few-public-methods
"""Class for working with content"""
install_name = None
local_name = None
name = None
type = type
author = None
about = None
version = None
def __init__(self):
"""Initialize new object"""
self.install_name = None
self.local_name = None
self.name = None
self.type = None
self.author = None
self.about = None
self.version = None
def __repr__(self):
"""Tostring for item"""
temp = "Name: %s (%s)\n" % (self.name, self.type)
if self.type == TYPE_MODULE:
temp += " About: %s\n" % self.about
temp += " Installs as: %s\n" % self.install_name
temp += " Author: %s\n" % self.author
temp += " Version: %s\n" % self.version
return temp
| {
"repo_name": "android-dtf/dtf",
"path": "python-dtf/dtf/core/item.py",
"copies": "2",
"size": "2273",
"license": "apache-2.0",
"hash": 1642358140430819000,
"line_mean": 25.1264367816,
"line_max": 74,
"alpha_frac": 0.6454025517,
"autogenerated": false,
"ratio": 3.801003344481605,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5446405896181605,
"avg_score": null,
"num_lines": null
} |
# androidhelper_r6.py - for SL4A R6, created on 13-Jul-2012
#
# To simplify development of SL4A Python scripts in IDEs by enabling hints &
# autocompletion through a helper class defining API functions containing
# help text in DocStrings
#
# To use, copy androidhelper_r6.py to your script's folder and import it as below
# try:
# import androidhelper_r6 as android
# except:
# import android
#
# Generated using AndroidHelperPyGenerator.java
# Licensed under Apache License, Ver. 2.0, http://www.apache.org/licenses/LICENSE-2.0
import android
class Android(android.Android):
#******** ActivityResultFacade Functions : min SDK 3 ********
def setResultBoolean(self,resultCode,resultValue):
'''
setResultBoolean(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Boolean resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultBoolean",resultCode,resultValue)
def setResultBooleanArray(self,resultCode,resultValue):
'''
setResultBooleanArray(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Boolean[] resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultBooleanArray",resultCode,resultValue)
def setResultByte(self,resultCode,resultValue):
'''
setResultByte(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Byte resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultByte",resultCode,resultValue)
def setResultByteArray(self,resultCode,resultValue):
'''
setResultByteArray(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Byte[] resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultByteArray",resultCode,resultValue)
def setResultChar(self,resultCode,resultValue):
'''
setResultChar(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Character resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultChar",resultCode,resultValue)
def setResultCharArray(self,resultCode,resultValue):
'''
setResultCharArray(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Character[] resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultCharArray",resultCode,resultValue)
def setResultDouble(self,resultCode,resultValue):
'''
setResultDouble(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Double resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultDouble",resultCode,resultValue)
def setResultDoubleArray(self,resultCode,resultValue):
'''
setResultDoubleArray(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Double[] resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultDoubleArray",resultCode,resultValue)
def setResultFloat(self,resultCode,resultValue):
'''
setResultFloat(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Float resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultFloat",resultCode,resultValue)
def setResultFloatArray(self,resultCode,resultValue):
'''
setResultFloatArray(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Float[] resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultFloatArray",resultCode,resultValue)
def setResultInteger(self,resultCode,resultValue):
'''
setResultInteger(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Integer resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultInteger",resultCode,resultValue)
def setResultIntegerArray(self,resultCode,resultValue):
'''
setResultIntegerArray(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Integer[] resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultIntegerArray",resultCode,resultValue)
def setResultLong(self,resultCode,resultValue):
'''
setResultLong(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Long resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultLong",resultCode,resultValue)
def setResultLongArray(self,resultCode,resultValue):
'''
setResultLongArray(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Long[] resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultLongArray",resultCode,resultValue)
def setResultSerializable(self,resultCode,resultValue):
'''
setResultSerializable(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Serializable resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultSerializable",resultCode,resultValue)
def setResultShort(self,resultCode,resultValue):
'''
setResultShort(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Short resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultShort",resultCode,resultValue)
def setResultShortArray(self,resultCode,resultValue):
'''
setResultShortArray(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
Short[] resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultShortArray",resultCode,resultValue)
def setResultString(self,resultCode,resultValue):
'''
setResultString(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
String resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultString",resultCode,resultValue)
def setResultStringArray(self,resultCode,resultValue):
'''
setResultStringArray(
Integer resultCode: The result code to propagate back to the originating activity, often RESULT_CANCELED (0) or RESULT_OK (-1),
String[] resultValue)
Sets the result of a script execution. Whenever the script APK is called via startActivityForResult(), the resulting intent will contain SCRIPT_RESULT extra with the given value.
'''
return self._rpc("setResultStringArray",resultCode,resultValue)
#******** AndroidFacade Functions : min SDK 3 ********
def environment(self):
'''
environment()
A map of various useful environment details
'''
return self._rpc("environment")
def getClipboard(self):
'''
getClipboard()
Read text from the clipboard.
Returns:
The text in the clipboard.
'''
return self._rpc("getClipboard")
def getConstants(self,classname):
'''
getConstants(
String classname: Class to get constants from)
Get list of constants (static final fields) for a class
'''
return self._rpc("getConstants",classname)
def getInput(self,title,message):
'''
getInput(
String title[optional, default SL4A Input]: title of the input box,
String message[optional, default Please enter value:]: message to display above the input box)
Queries the user for a text input.
Deprecated in r3! Please use dialogGetInput instead.
'''
return self._rpc("getInput",title,message)
def getIntent(self):
'''
getIntent()
Returns the intent that launched the script.
'''
return self._rpc("getIntent")
def getPackageVersion(self,packageName):
'''
getPackageVersion(
String packageName)
Returns package version name.
'''
return self._rpc("getPackageVersion",packageName)
def getPackageVersionCode(self,packageName):
'''
getPackageVersionCode(
String packageName)
Returns package version code.
'''
return self._rpc("getPackageVersionCode",packageName)
def getPassword(self,title,message):
'''
getPassword(
String title[optional, default SL4A Password Input]: title of the input box,
String message[optional, default Please enter password:]: message to display above the input box)
Queries the user for a password.
Deprecated in r3! Please use dialogGetPassword instead.
'''
return self._rpc("getPassword",title,message)
def log(self,message):
'''
log(
String message)
Writes message to logcat.
'''
return self._rpc("log",message)
def makeIntent(self,action,uri,type,extras,categories,packagename,classname,flags):
'''
makeIntent(
String action,
String uri[optional],
String type[optional]: MIME type/subtype of the URI,
JSONObject extras[optional]: a Map of extras to add to the Intent,
JSONArray categories[optional]: a List of categories to add to the Intent,
String packagename[optional]: name of package. If used, requires classname to be useful,
String classname[optional]: name of class. If used, requires packagename to be useful,
Integer flags[optional]: Intent flags)
Create an Intent.
Returns:
An object representing an Intent
'''
return self._rpc("makeIntent",action,uri,type,extras,categories,packagename,classname,flags)
def makeToast(self,message):
'''
makeToast(
String message)
Displays a short-duration Toast notification.
'''
return self._rpc("makeToast",message)
def notify(self,title,message):
'''
notify(
String title: title,
String message)
Displays a notification that will be canceled when the user clicks on it.
'''
return self._rpc("notify",title,message)
def requiredVersion(self,requiredVersion):
'''
requiredVersion(
Integer requiredVersion)
Checks if version of SL4A is greater than or equal to the specified version.
'''
return self._rpc("requiredVersion",requiredVersion)
def sendBroadcast(self,action,uri,type,extras,packagename,classname):
'''
sendBroadcast(
String action,
String uri[optional],
String type[optional]: MIME type/subtype of the URI,
JSONObject extras[optional]: a Map of extras to add to the Intent,
String packagename[optional]: name of package. If used, requires classname to be useful,
String classname[optional]: name of class. If used, requires packagename to be useful)
Send a broadcast.
'''
return self._rpc("sendBroadcast",action,uri,type,extras,packagename,classname)
def sendBroadcastIntent(self,intent):
'''
sendBroadcastIntent(
Intent intent: Intent in the format as returned from makeIntent)
Send Broadcast Intent
'''
return self._rpc("sendBroadcastIntent",intent)
def sendEmail(self,to,subject,body,attachmentUri):
'''
sendEmail(
String to: A comma separated list of recipients.,
String subject,
String body,
String attachmentUri[optional])
Launches an activity that sends an e-mail message to a given recipient.
'''
return self._rpc("sendEmail",to,subject,body,attachmentUri)
def setClipboard(self,text):
'''
setClipboard(
String text)
Put text in the clipboard.
'''
return self._rpc("setClipboard",text)
def startActivity(self,action,uri,type,extras,wait,packagename,classname):
'''
startActivity(
String action,
String uri[optional],
String type[optional]: MIME type/subtype of the URI,
JSONObject extras[optional]: a Map of extras to add to the Intent,
Boolean wait[optional]: block until the user exits the started activity,
String packagename[optional]: name of package. If used, requires classname to be useful,
String classname[optional]: name of class. If used, requires packagename to be useful)
Starts an activity.
'''
return self._rpc("startActivity",action,uri,type,extras,wait,packagename,classname)
def startActivityForResult(self,action,uri,type,extras,packagename,classname):
'''
startActivityForResult(
String action,
String uri[optional],
String type[optional]: MIME type/subtype of the URI,
JSONObject extras[optional]: a Map of extras to add to the Intent,
String packagename[optional]: name of package. If used, requires classname to be useful,
String classname[optional]: name of class. If used, requires packagename to be useful)
Starts an activity and returns the result.
Returns:
A Map representation of the result Intent.
'''
return self._rpc("startActivityForResult",action,uri,type,extras,packagename,classname)
def startActivityForResultIntent(self,intent):
'''
startActivityForResultIntent(
Intent intent: Intent in the format as returned from makeIntent)
Starts an activity and returns the result.
Returns:
A Map representation of the result Intent.
'''
return self._rpc("startActivityForResultIntent",intent)
def startActivityIntent(self,intent,wait):
'''
startActivityIntent(
Intent intent: Intent in the format as returned from makeIntent,
Boolean wait[optional]: block until the user exits the started activity)
Start Activity using Intent
'''
return self._rpc("startActivityIntent",intent,wait)
def vibrate(self,duration):
'''
vibrate(
Integer duration[optional, default 300]: duration in milliseconds)
Vibrates the phone or a specified duration in milliseconds.
'''
return self._rpc("vibrate",duration)
#******** ApplicationManagerFacade Functions : min SDK 3 ********
def forceStopPackage(self,packageName):
'''
forceStopPackage(
String packageName: name of package)
Force stops a package.
'''
return self._rpc("forceStopPackage",packageName)
def getLaunchableApplications(self):
'''
getLaunchableApplications()
Returns a list of all launchable application class names.
'''
return self._rpc("getLaunchableApplications")
def getRunningPackages(self):
'''
getRunningPackages()
Returns a list of packages running activities or services.
Returns:
List of packages running activities.
'''
return self._rpc("getRunningPackages")
def launch(self,className):
'''
launch(
String className)
Start activity with the given class name.
'''
return self._rpc("launch",className)
#******** BatteryManagerFacade Functions : min SDK 3 ********
def batteryCheckPresent(self):
'''
batteryCheckPresent()
Returns the most recently received battery presence data.
'''
return self._rpc("batteryCheckPresent")
def batteryGetHealth(self):
'''
batteryGetHealth()
Returns the most recently received battery health data:
1 - unknown;
2 - good;
3 - overheat;
4 - dead;
5 - over voltage;
6 - unspecified failure;
'''
return self._rpc("batteryGetHealth")
def batteryGetLevel(self):
'''
batteryGetLevel()
Returns the most recently received battery level (percentage).
'''
return self._rpc("batteryGetLevel")
def batteryGetPlugType(self):
'''
batteryGetPlugType()
Returns the most recently received plug type data:
-1 - unknown
0 - unplugged;
1 - power source is an AC charger
2 - power source is a USB port
'''
return self._rpc("batteryGetPlugType")
def batteryGetStatus(self):
'''
batteryGetStatus()
Returns the most recently received battery status data:
1 - unknown;
2 - charging;
3 - discharging;
4 - not charging;
5 - full;
'''
return self._rpc("batteryGetStatus")
def batteryGetTechnology(self):
'''
batteryGetTechnology()
Returns the most recently received battery technology data.
'''
return self._rpc("batteryGetTechnology")
def batteryGetTemperature(self):
'''
batteryGetTemperature()
Returns the most recently received battery temperature.
'''
return self._rpc("batteryGetTemperature")
def batteryGetVoltage(self):
'''
batteryGetVoltage()
Returns the most recently received battery voltage.
'''
return self._rpc("batteryGetVoltage")
def batteryStartMonitoring(self):
'''
batteryStartMonitoring()
Starts tracking battery state.
Generates "battery" events.
'''
return self._rpc("batteryStartMonitoring")
def batteryStopMonitoring(self):
'''
batteryStopMonitoring()
Stops tracking battery state.
'''
return self._rpc("batteryStopMonitoring")
def readBatteryData(self):
'''
readBatteryData()
Returns the most recently recorded battery data.
'''
return self._rpc("readBatteryData")
#******** BluetoothFacade Functions : min SDK 5 ********
def bluetoothAccept(self,uuid,timeout):
'''
bluetoothAccept(
String uuid[optional, default 457807c0-4897-11df-9879-0800200c9a66],
Integer timeout[optional, default 0]: How long to wait for a new connection, 0 is wait for ever)
Listens for and accepts a Bluetooth connection. Blocks until the connection is established or fails.
'''
return self._rpc("bluetoothAccept",uuid,timeout)
def bluetoothActiveConnections(self):
'''
bluetoothActiveConnections()
Returns active Bluetooth connections.
'''
return self._rpc("bluetoothActiveConnections")
def bluetoothConnect(self,uuid,address):
'''
bluetoothConnect(
String uuid[optional, default 457807c0-4897-11df-9879-0800200c9a66]: The UUID passed here must match the UUID used by the server device.,
String address[optional]: The user will be presented with a list of discovered devices to choose from if an address is not provided.)
Connect to a device over Bluetooth. Blocks until the connection is established or fails.
Returns:
True if the connection was established successfully.
'''
return self._rpc("bluetoothConnect",uuid,address)
def bluetoothDiscoveryCancel(self):
'''
bluetoothDiscoveryCancel()
Cancel the current device discovery process.
Returns:
true on success, false on error
'''
return self._rpc("bluetoothDiscoveryCancel")
def bluetoothDiscoveryStart(self):
'''
bluetoothDiscoveryStart()
Start the remote device discovery process.
Returns:
true on success, false on error
'''
return self._rpc("bluetoothDiscoveryStart")
def bluetoothGetConnectedDeviceName(self,connID):
'''
bluetoothGetConnectedDeviceName(
String connID[optional, default null]: Connection id)
Returns the name of the connected device.
'''
return self._rpc("bluetoothGetConnectedDeviceName",connID)
def bluetoothGetLocalAddress(self):
'''
bluetoothGetLocalAddress()
Returns the hardware address of the local Bluetooth adapter.
'''
return self._rpc("bluetoothGetLocalAddress")
def bluetoothGetLocalName(self):
'''
bluetoothGetLocalName()
Gets the Bluetooth Visible device name
'''
return self._rpc("bluetoothGetLocalName")
def bluetoothGetRemoteDeviceName(self,address):
'''
bluetoothGetRemoteDeviceName(
String address: Bluetooth Address For Target Device)
Queries a remote device for it's name or null if it can't be resolved
'''
return self._rpc("bluetoothGetRemoteDeviceName",address)
def bluetoothGetScanMode(self):
'''
bluetoothGetScanMode()
Gets the scan mode for the local dongle.
Return values:
-1 when Bluetooth is disabled.
0 if non discoverable and non connectable.
1 connectable non discoverable.
3 connectable and discoverable.
'''
return self._rpc("bluetoothGetScanMode")
def bluetoothIsDiscovering(self):
'''
bluetoothIsDiscovering()
Return true if the local Bluetooth adapter is currently in the device discovery process.
'''
return self._rpc("bluetoothIsDiscovering")
def bluetoothMakeDiscoverable(self,duration):
'''
bluetoothMakeDiscoverable(
Integer duration[optional, default 300]: period of time, in seconds, during which the device should be discoverable)
Requests that the device be discoverable for Bluetooth connections.
'''
return self._rpc("bluetoothMakeDiscoverable",duration)
def bluetoothRead(self,bufferSize,connID):
'''
bluetoothRead(
Integer bufferSize[optional, default 4096],
String connID[optional, default null]: Connection id)
Read up to bufferSize ASCII characters.
'''
return self._rpc("bluetoothRead",bufferSize,connID)
def bluetoothReadBinary(self,bufferSize,connID):
'''
bluetoothReadBinary(
Integer bufferSize[optional, default 4096],
String connID[optional, default ]: Connection id)
Read up to bufferSize bytes and return a chunked, base64 encoded string.
'''
return self._rpc("bluetoothReadBinary",bufferSize,connID)
def bluetoothReadLine(self,connID):
'''
bluetoothReadLine(
String connID[optional, default null]: Connection id)
Read the next line.
'''
return self._rpc("bluetoothReadLine",connID)
def bluetoothReadReady(self,connID):
'''
bluetoothReadReady(
String connID[optional, default ]: Connection id)
Returns True if the next read is guaranteed not to block.
'''
return self._rpc("bluetoothReadReady",connID)
def bluetoothSetLocalName(self,name):
'''
bluetoothSetLocalName(
String name: New local name)
Sets the Bluetooth Visible device name, returns True on success
'''
return self._rpc("bluetoothSetLocalName",name)
def bluetoothStop(self,connID):
'''
bluetoothStop(
String connID[optional, default null]: Connection id)
Stops Bluetooth connection.
'''
return self._rpc("bluetoothStop",connID)
def bluetoothWrite(self,ascii,connID):
'''
bluetoothWrite(
String ascii,
String connID[optional, default ]: Connection id)
Sends ASCII characters over the currently open Bluetooth connection.
'''
return self._rpc("bluetoothWrite",ascii,connID)
def bluetoothWriteBinary(self,base64,connID):
'''
bluetoothWriteBinary(
String base64: A base64 encoded String of the bytes to be sent.,
String connID[optional, default ]: Connection id)
Send bytes over the currently open Bluetooth connection.
'''
return self._rpc("bluetoothWriteBinary",base64,connID)
def checkBluetoothState(self):
'''
checkBluetoothState()
Checks Bluetooth state.
Returns:
True if Bluetooth is enabled.
'''
return self._rpc("checkBluetoothState")
def toggleBluetoothState(self,enabled,prompt):
'''
toggleBluetoothState(
Boolean enabled[optional],
Boolean prompt[optional, default true]: Prompt the user to confirm changing the Bluetooth state.)
Toggle Bluetooth on and off.
Returns:
True if Bluetooth is enabled.
'''
return self._rpc("toggleBluetoothState",enabled,prompt)
#******** CameraFacade Functions : min SDK 3 ********
def cameraCapturePicture(self,targetPath,useAutoFocus):
'''
cameraCapturePicture(
String targetPath,
Boolean useAutoFocus[optional, default true])
Take a picture and save it to the specified path.
Returns:
A map of Booleans autoFocus and takePicture where True indicates success.
'''
return self._rpc("cameraCapturePicture",targetPath,useAutoFocus)
def cameraInteractiveCapturePicture(self,targetPath):
'''
cameraInteractiveCapturePicture(
String targetPath)
Starts the image capture application to take a picture and saves it to the specified path.
'''
return self._rpc("cameraInteractiveCapturePicture",targetPath)
#******** CommonIntentsFacade Functions : min SDK 3 ********
def pick(self,uri):
'''
pick(
String uri)
Display content to be picked by URI (e.g. contacts)
Returns:
A map of result values.
'''
return self._rpc("pick",uri)
def scanBarcode(self):
'''
scanBarcode()
Starts the barcode scanner.
Returns:
A Map representation of the result Intent.
'''
return self._rpc("scanBarcode")
def search(self,query):
'''
search(
String query)
Starts a search for the given query.
'''
return self._rpc("search",query)
def view(self,uri,type,extras):
'''
view(
String uri,
String type[optional]: MIME type/subtype of the URI,
JSONObject extras[optional]: a Map of extras to add to the Intent)
Start activity with view action by URI (i.e. browser, contacts, etc.).
'''
return self._rpc("view",uri,type,extras)
def viewContacts(self):
'''
viewContacts()
Opens the list of contacts.
'''
return self._rpc("viewContacts")
def viewHtml(self,path):
'''
viewHtml(
String path: the path to the HTML file)
Opens the browser to display a local HTML file.
'''
return self._rpc("viewHtml",path)
def viewMap(self,query):
'''
viewMap(
String query, e.g. pizza, 123 My Street)
Opens a map search for query (e.g. pizza, 123 My Street).
'''
return self._rpc("viewMap",query)
#******** ContactsFacade Functions : min SDK 3 ********
def contactsGet(self,attributes):
'''
contactsGet(
JSONArray attributes[optional])
Returns a List of all contacts.
Returns:
a List of contacts as Maps
'''
return self._rpc("contactsGet",attributes)
def contactsGetAttributes(self):
'''
contactsGetAttributes()
Returns a List of all possible attributes for contacts.
'''
return self._rpc("contactsGetAttributes")
def contactsGetById(self,id,attributes):
'''
contactsGetById(
Integer id,
JSONArray attributes[optional])
Returns contacts by ID.
'''
return self._rpc("contactsGetById",id,attributes)
def contactsGetCount(self):
'''
contactsGetCount()
Returns the number of contacts.
'''
return self._rpc("contactsGetCount")
def contactsGetIds(self):
'''
contactsGetIds()
Returns a List of all contact IDs.
'''
return self._rpc("contactsGetIds")
def pickContact(self):
'''
pickContact()
Displays a list of contacts to pick from.
Returns:
A map of result values.
'''
return self._rpc("pickContact")
def pickPhone(self):
'''
pickPhone()
Displays a list of phone numbers to pick from.
Returns:
The selected phone number.
'''
return self._rpc("pickPhone")
def queryAttributes(self,uri):
'''
queryAttributes(
String uri: The URI, using the content:// scheme, for the content to retrieve.)
Content Resolver Query Attributes
Returns:
a list of available columns for a given content uri
'''
return self._rpc("queryAttributes",uri)
def queryContent(self,uri,attributes,selection,selectionArgs,order):
'''
queryContent(
String uri: The URI, using the content:// scheme, for the content to retrieve.,
JSONArray attributes[optional]: A list of which columns to return. Passing null will return all columns,
String selection[optional]: A filter declaring which rows to return,
JSONArray selectionArgs[optional]: You may include ?s in selection, which will be replaced by the values from selectionArgs,
String order[optional]: How to order the rows)
Content Resolver Query
Returns:
result of query as Maps
'''
return self._rpc("queryContent",uri,attributes,selection,selectionArgs,order)
#******** EventFacade Functions : min SDK 3 ********
def eventClearBuffer(self):
'''
eventClearBuffer()
Clears all events from the event buffer.
'''
return self._rpc("eventClearBuffer")
def eventGetBrodcastCategories(self):
'''
eventGetBrodcastCategories()
Lists all the broadcast signals we are listening for
'''
return self._rpc("eventGetBrodcastCategories")
def eventPoll(self,number_of_events):
'''
eventPoll(
Integer number_of_events[optional, default 1])
Returns and removes the oldest n events (i.e. location or sensor update, etc.) from the event buffer.
Returns:
A List of Maps of event properties.
'''
return self._rpc("eventPoll",number_of_events)
def eventPost(self,name,data,enqueue):
'''
eventPost(
String name: Name of event,
String data: Data contained in event.,
Boolean enqueue[optional, default null]: Set to False if you don't want your events to be added to the event queue, just dispatched.)
Post an event to the event queue.
'''
return self._rpc("eventPost",name,data,enqueue)
def eventRegisterForBroadcast(self,category,enqueue):
'''
eventRegisterForBroadcast(
String category,
Boolean enqueue[optional, default true]: Should this events be added to the event queue or only dispatched)
Registers a listener for a new broadcast signal
'''
return self._rpc("eventRegisterForBroadcast",category,enqueue)
def eventUnregisterForBroadcast(self,category):
'''
eventUnregisterForBroadcast(
String category)
Stop listening for a broadcast signal
'''
return self._rpc("eventUnregisterForBroadcast",category)
def eventWait(self,timeout):
'''
eventWait(
Integer timeout[optional]: the maximum time to wait)
Blocks until an event occurs. The returned event is removed from the buffer.
Returns:
Map of event properties.
'''
return self._rpc("eventWait",timeout)
def eventWaitFor(self,eventName,timeout):
'''
eventWaitFor(
String eventName,
Integer timeout[optional]: the maximum time to wait (in ms))
Blocks until an event with the supplied name occurs. The returned event is not removed from the buffer.
Returns:
Map of event properties.
'''
return self._rpc("eventWaitFor",eventName,timeout)
def postEvent(self,name,data):
'''
rpcPostEvent(
String name,
String data)
Post an event to the event queue.
Deprecated in r4! Please use eventPost instead.
'''
return self._rpc("postEvent",name,data)
def receiveEvent(self):
'''
receiveEvent()
Returns and removes the oldest event (i.e. location or sensor update, etc.) from the event buffer.
Returns:
Map of event properties.
Deprecated in r4! Please use eventPoll instead.
'''
return self._rpc("receiveEvent")
def startEventDispatcher(self,port):
'''
startEventDispatcher(
Integer port[optional, default 0]: Port to use)
Opens up a socket where you can read for events posted
'''
return self._rpc("startEventDispatcher",port)
def stopEventDispatcher(self):
'''
stopEventDispatcher()
Stops the event server, you can't read in the port anymore
'''
return self._rpc("stopEventDispatcher")
def waitForEvent(self,eventName,timeout):
'''
waitForEvent(
String eventName,
Integer timeout[optional]: the maximum time to wait)
Blocks until an event with the supplied name occurs. The returned event is not removed from the buffer.
Returns:
Map of event properties.
Deprecated in r4! Please use eventWaitFor instead.
'''
return self._rpc("waitForEvent",eventName,timeout)
#******** LocationFacade Functions : min SDK 3 ********
def geocode(self,latitude,longitude,maxResults):
'''
geocode(
Double latitude,
Double longitude,
Integer maxResults[optional, default 1]: maximum number of results)
Returns a list of addresses for the given latitude and longitude.
Returns:
A list of addresses.
'''
return self._rpc("geocode",latitude,longitude,maxResults)
def getLastKnownLocation(self):
'''
getLastKnownLocation()
Returns the last known location of the device.
Returns:
A map of location information by provider.
'''
return self._rpc("getLastKnownLocation")
def locationProviderEnabled(self,provider):
'''
locationProviderEnabled(
String provider: Name of location provider)
Ask if provider is enabled
'''
return self._rpc("locationProviderEnabled",provider)
def locationProviders(self):
'''
locationProviders()
Returns availables providers on the phone
'''
return self._rpc("locationProviders")
def readLocation(self):
'''
readLocation()
Returns the current location as indicated by all available providers.
Returns:
A map of location information by provider.
'''
return self._rpc("readLocation")
def startLocating(self,minDistance,minUpdateDistance):
'''
startLocating(
Integer minDistance[optional, default 60000]: minimum time between updates in milliseconds,
Integer minUpdateDistance[optional, default 30]: minimum distance between updates in meters)
Starts collecting location data.
Generates "location" events.
'''
return self._rpc("startLocating",minDistance,minUpdateDistance)
def stopLocating(self):
'''
stopLocating()
Stops collecting location data.
'''
return self._rpc("stopLocating")
#******** MediaPlayerFacade Functions : min SDK 3 ********
def mediaIsPlaying(self,tag):
'''
mediaIsPlaying(
String tag[optional, default default]: string identifying resource)
Checks if media file is playing.
Returns:
true if playing
'''
return self._rpc("mediaIsPlaying",tag)
def mediaPlay(self,url,tag,play):
'''
mediaPlay(
String url: url of media resource,
String tag[optional, default default]: string identifying resource,
Boolean play[optional, default true]: start playing immediately)
Open a media file
Returns:
true if play successful
'''
return self._rpc("mediaPlay",url,tag,play)
def mediaPlayClose(self,tag):
'''
mediaPlayClose(
String tag[optional, default default]: string identifying resource)
Close media file
Returns:
true if successful
'''
return self._rpc("mediaPlayClose",tag)
def mediaPlayInfo(self,tag):
'''
mediaPlayInfo(
String tag[optional, default default]: string identifying resource)
Information on current media
Returns:
Media Information
'''
return self._rpc("mediaPlayInfo",tag)
def mediaPlayList(self):
'''
mediaPlayList()
Lists currently loaded media
Returns:
List of Media Tags
'''
return self._rpc("mediaPlayList")
def mediaPlayPause(self,tag):
'''
mediaPlayPause(
String tag[optional, default default]: string identifying resource)
pause playing media file
Returns:
true if successful
'''
return self._rpc("mediaPlayPause",tag)
def mediaPlaySeek(self,msec,tag):
'''
mediaPlaySeek(
Integer msec: Position in millseconds,
String tag[optional, default default]: string identifying resource)
Seek To Position
Returns:
New Position (in ms)
'''
return self._rpc("mediaPlaySeek",msec,tag)
def mediaPlaySetLooping(self,enabled,tag):
'''
mediaPlaySetLooping(
Boolean enabled[optional, default true],
String tag[optional, default default]: string identifying resource)
Set Looping
Returns:
True if successful
'''
return self._rpc("mediaPlaySetLooping",enabled,tag)
def mediaPlayStart(self,tag):
'''
mediaPlayStart(
String tag[optional, default default]: string identifying resource)
start playing media file
Returns:
true if successful
'''
return self._rpc("mediaPlayStart",tag)
#******** MediaRecorderFacade Functions : min SDK 3 ********
def recorderCaptureVideo(self,targetPath,duration,recordAudio):
'''
recorderCaptureVideo(
String targetPath,
Integer duration[optional],
Boolean recordAudio[optional, default true])
Records video (and optionally audio) from the camera and saves it to the given location.
Duration specifies the maximum duration of the recording session.
If duration is not provided this method will return immediately and the recording will only be stopped
when recorderStop is called or when a scripts exits.
Otherwise it will block for the time period equal to the duration argument.
'''
return self._rpc("recorderCaptureVideo",targetPath,duration,recordAudio)
def recorderStartMicrophone(self,targetPath):
'''
recorderStartMicrophone(
String targetPath)
Records audio from the microphone and saves it to the given location.
'''
return self._rpc("recorderStartMicrophone",targetPath)
def recorderStartVideo(self,targetPath,duration,videoSize):
'''
recorderStartVideo(
String targetPath,
Integer duration[optional, default 0],
Integer videoSize[optional, default 1])
Records video from the camera and saves it to the given location.
Duration specifies the maximum duration of the recording session.
If duration is 0 this method will return and the recording will only be stopped
when recorderStop is called or when a scripts exits.
Otherwise it will block for the time period equal to the duration argument.
videoSize: 0=160x120, 1=320x240, 2=352x288, 3=640x480, 4=800x480.
'''
return self._rpc("recorderStartVideo",targetPath,duration,videoSize)
def recorderStop(self):
'''
recorderStop()
Stops a previously started recording.
'''
return self._rpc("recorderStop")
def startInteractiveVideoRecording(self,path):
'''
startInteractiveVideoRecording(
String path)
Starts the video capture application to record a video and saves it to the specified path.
'''
return self._rpc("startInteractiveVideoRecording",path)
#******** PhoneFacade Functions : min SDK 3 ********
def checkNetworkRoaming(self):
'''
checkNetworkRoaming()
Returns true if the device is considered roaming on the current network, for GSM purposes.
'''
return self._rpc("checkNetworkRoaming")
def getCellLocation(self):
'''
getCellLocation()
Returns the current cell location.
'''
return self._rpc("getCellLocation")
def getDeviceId(self):
'''
getDeviceId()
Returns the unique device ID, for example, the IMEI for GSM and the MEID for CDMA phones. Return null if device ID is not available.
'''
return self._rpc("getDeviceId")
def getDeviceSoftwareVersion(self):
'''
getDeviceSoftwareVersion()
Returns the software version number for the device, for example, the IMEI/SV for GSM phones. Return null if the software version is not available.
'''
return self._rpc("getDeviceSoftwareVersion")
def getLine1Number(self):
'''
getLine1Number()
Returns the phone number string for line 1, for example, the MSISDN for a GSM phone. Return null if it is unavailable.
'''
return self._rpc("getLine1Number")
def getNeighboringCellInfo(self):
'''
getNeighboringCellInfo()
Returns the neighboring cell information of the device.
'''
return self._rpc("getNeighboringCellInfo")
def getNetworkOperator(self):
'''
getNetworkOperator()
Returns the numeric name (MCC+MNC) of current registered operator.
'''
return self._rpc("getNetworkOperator")
def getNetworkOperatorName(self):
'''
getNetworkOperatorName()
Returns the alphabetic name of current registered operator.
'''
return self._rpc("getNetworkOperatorName")
def getNetworkType(self):
'''
getNetworkType()
Returns a the radio technology (network type) currently in use on the device.
'''
return self._rpc("getNetworkType")
def getPhoneType(self):
'''
getPhoneType()
Returns the device phone type.
'''
return self._rpc("getPhoneType")
def getSimCountryIso(self):
'''
getSimCountryIso()
Returns the ISO country code equivalent for the SIM provider's country code.
'''
return self._rpc("getSimCountryIso")
def getSimOperator(self):
'''
getSimOperator()
Returns the MCC+MNC (mobile country code + mobile network code) of the provider of the SIM. 5 or 6 decimal digits.
'''
return self._rpc("getSimOperator")
def getSimOperatorName(self):
'''
getSimOperatorName()
Returns the Service Provider Name (SPN).
'''
return self._rpc("getSimOperatorName")
def getSimSerialNumber(self):
'''
getSimSerialNumber()
Returns the serial number of the SIM, if applicable. Return null if it is unavailable.
'''
return self._rpc("getSimSerialNumber")
def getSimState(self):
'''
getSimState()
Returns the state of the device SIM card.
'''
return self._rpc("getSimState")
def getSubscriberId(self):
'''
getSubscriberId()
Returns the unique subscriber ID, for example, the IMSI for a GSM phone. Return null if it is unavailable.
'''
return self._rpc("getSubscriberId")
def getVoiceMailAlphaTag(self):
'''
getVoiceMailAlphaTag()
Retrieves the alphabetic identifier associated with the voice mail number.
'''
return self._rpc("getVoiceMailAlphaTag")
def getVoiceMailNumber(self):
'''
getVoiceMailNumber()
Returns the voice mail number. Return null if it is unavailable.
'''
return self._rpc("getVoiceMailNumber")
def phoneCall(self,uri):
'''
phoneCall(
String uri)
Calls a contact/phone number by URI.
'''
return self._rpc("phoneCall",uri)
def phoneCallNumber(self,phone):
'''
phoneCallNumber(
String phone number)
Calls a phone number.
'''
return self._rpc("phoneCallNumber",phone)
def phoneDial(self,uri):
'''
phoneDial(
String uri)
Dials a contact/phone number by URI.
'''
return self._rpc("phoneDial",uri)
def phoneDialNumber(self,phone):
'''
phoneDialNumber(
String phone number)
Dials a phone number.
'''
return self._rpc("phoneDialNumber",phone)
def readPhoneState(self):
'''
readPhoneState()
Returns the current phone state and incoming number.
Returns:
A Map of "state" and "incomingNumber"
'''
return self._rpc("readPhoneState")
def startTrackingPhoneState(self):
'''
startTrackingPhoneState()
Starts tracking phone state.
Generates "phone" events.
'''
return self._rpc("startTrackingPhoneState")
def stopTrackingPhoneState(self):
'''
stopTrackingPhoneState()
Stops tracking phone state.
'''
return self._rpc("stopTrackingPhoneState")
#******** PreferencesFacade Functions : min SDK 3 ********
def prefGetAll(self,filename):
'''
prefGetAll(
String filename[optional]: Desired preferences file. If not defined, uses the default Shared Preferences.)
Get list of Shared Preference Values
Returns:
Map of key,value
'''
return self._rpc("prefGetAll",filename)
def prefGetValue(self,key,filename):
'''
prefGetValue(
String key,
String filename[optional]: Desired preferences file. If not defined, uses the default Shared Preferences.)
Read a value from shared preferences
'''
return self._rpc("prefGetValue",key,filename)
def prefPutValue(self,key,value,filename):
'''
prefPutValue(
String key,
Object value,
String filename[optional]: Desired preferences file. If not defined, uses the default Shared Preferences.)
Write a value to shared preferences
'''
return self._rpc("prefPutValue",key,value,filename)
#******** SensorManagerFacade Functions : min SDK 3 ********
def readSensors(self):
'''
readSensors()
Returns the most recently recorded sensor data.
'''
return self._rpc("readSensors")
def sensorsGetAccuracy(self):
'''
sensorsGetAccuracy()
Returns the most recently received accuracy value.
'''
return self._rpc("sensorsGetAccuracy")
def sensorsGetLight(self):
'''
sensorsGetLight()
Returns the most recently received light value.
'''
return self._rpc("sensorsGetLight")
def sensorsReadAccelerometer(self):
'''
sensorsReadAccelerometer()
Returns the most recently received accelerometer values.
Returns:
a List of Floats [(acceleration on the) X axis, Y axis, Z axis].
'''
return self._rpc("sensorsReadAccelerometer")
def sensorsReadMagnetometer(self):
'''
sensorsReadMagnetometer()
Returns the most recently received magnetic field values.
Returns:
a List of Floats [(magnetic field value for) X axis, Y axis, Z axis].
'''
return self._rpc("sensorsReadMagnetometer")
def sensorsReadOrientation(self):
'''
sensorsReadOrientation()
Returns the most recently received orientation values.
Returns:
a List of Doubles [azimuth, pitch, roll].
'''
return self._rpc("sensorsReadOrientation")
def startSensing(self,sampleSize):
'''
startSensing(
Integer sampleSize[optional, default 5]: number of samples for calculating average readings)
Starts recording sensor data to be available for polling.
Deprecated in 4! Please use startSensingTimed or startSensingThreshhold instead.
'''
return self._rpc("startSensing",sampleSize)
def startSensingThreshold(self,sensorNumber,threshold,axis):
'''
startSensingThreshold(
Integer sensorNumber: 1 = Orientation, 2 = Accelerometer, 3 = Magnetometer and 4 = Light,
Integer threshold: Threshold level for chosen sensor (integer),
Integer axis: 0 = No axis, 1 = X, 2 = Y, 3 = X+Y, 4 = Z, 5= X+Z, 6 = Y+Z, 7 = X+Y+Z)
Records to the Event Queue sensor data exceeding a chosen threshold.
Generates "threshold" events.
'''
return self._rpc("startSensingThreshold",sensorNumber,threshold,axis)
def startSensingTimed(self,sensorNumber,delayTime):
'''
startSensingTimed(
Integer sensorNumber: 1 = All, 2 = Accelerometer, 3 = Magnetometer and 4 = Light,
Integer delayTime: Minimum time between readings in milliseconds)
Starts recording sensor data to be available for polling.
Generates "sensors" events.
'''
return self._rpc("startSensingTimed",sensorNumber,delayTime)
def stopSensing(self):
'''
stopSensing()
Stops collecting sensor data.
'''
return self._rpc("stopSensing")
#******** SettingsFacade Functions : min SDK 3 ********
def checkAirplaneMode(self):
'''
checkAirplaneMode()
Checks the airplane mode setting.
Returns:
True if airplane mode is enabled.
'''
return self._rpc("checkAirplaneMode")
def checkRingerSilentMode(self):
'''
checkRingerSilentMode()
Checks the ringer silent mode setting.
Returns:
True if ringer silent mode is enabled.
'''
return self._rpc("checkRingerSilentMode")
def checkScreenOn(self):
'''
checkScreenOn()
Checks if the screen is on or off (requires API level 7).
Returns:
True if the screen is currently on.
'''
return self._rpc("checkScreenOn")
def getMaxMediaVolume(self):
'''
getMaxMediaVolume()
Returns the maximum media volume.
'''
return self._rpc("getMaxMediaVolume")
def getMaxRingerVolume(self):
'''
getMaxRingerVolume()
Returns the maximum ringer volume.
'''
return self._rpc("getMaxRingerVolume")
def getMediaVolume(self):
'''
getMediaVolume()
Returns the current media volume.
'''
return self._rpc("getMediaVolume")
def getRingerVolume(self):
'''
getRingerVolume()
Returns the current ringer volume.
'''
return self._rpc("getRingerVolume")
def getScreenBrightness(self):
'''
getScreenBrightness()
Returns the screen backlight brightness.
Returns:
the current screen brightness between 0 and 255
'''
return self._rpc("getScreenBrightness")
def getScreenTimeout(self):
'''
getScreenTimeout()
Returns the current screen timeout in seconds.
Returns:
the current screen timeout in seconds.
'''
return self._rpc("getScreenTimeout")
def getVibrateMode(self,ringer):
'''
getVibrateMode(
Boolean ringer[optional])
Checks Vibration setting. If ringer=true then query Ringer setting, else query Notification setting
Returns:
True if vibrate mode is enabled.
'''
return self._rpc("getVibrateMode",ringer)
def setMediaVolume(self,volume):
'''
setMediaVolume(
Integer volume)
Sets the media volume.
'''
return self._rpc("setMediaVolume",volume)
def setRingerVolume(self,volume):
'''
setRingerVolume(
Integer volume)
Sets the ringer volume.
'''
return self._rpc("setRingerVolume",volume)
def setScreenBrightness(self,value):
'''
setScreenBrightness(
Integer value: brightness value between 0 and 255)
Sets the the screen backlight brightness.
Returns:
the original screen brightness.
'''
return self._rpc("setScreenBrightness",value)
def setScreenTimeout(self,value):
'''
setScreenTimeout(
Integer value)
Sets the screen timeout to this number of seconds.
Returns:
The original screen timeout.
'''
return self._rpc("setScreenTimeout",value)
def toggleAirplaneMode(self,enabled):
'''
toggleAirplaneMode(
Boolean enabled[optional])
Toggles airplane mode on and off.
Returns:
True if airplane mode is enabled.
'''
return self._rpc("toggleAirplaneMode",enabled)
def toggleRingerSilentMode(self,enabled):
'''
toggleRingerSilentMode(
Boolean enabled[optional])
Toggles ringer silent mode on and off.
Returns:
True if ringer silent mode is enabled.
'''
return self._rpc("toggleRingerSilentMode",enabled)
def toggleVibrateMode(self,enabled,ringer):
'''
toggleVibrateMode(
Boolean enabled[optional],
Boolean ringer[optional])
Toggles vibrate mode on and off. If ringer=true then set Ringer setting, else set Notification setting
Returns:
True if vibrate mode is enabled.
'''
return self._rpc("toggleVibrateMode",enabled,ringer)
#******** SignalStrengthFacade Functions : min SDK 7 ********
def readSignalStrengths(self):
'''
readSignalStrengths()
Returns the current signal strengths.
Returns:
A map of "gsm_signal_strength"
'''
return self._rpc("readSignalStrengths")
def startTrackingSignalStrengths(self):
'''
startTrackingSignalStrengths()
Starts tracking signal strengths.
Generates "signal_strengths" events.
'''
return self._rpc("startTrackingSignalStrengths")
def stopTrackingSignalStrengths(self):
'''
stopTrackingSignalStrengths()
Stops tracking signal strength.
'''
return self._rpc("stopTrackingSignalStrengths")
#******** SmsFacade Functions : min SDK 3 ********
def smsDeleteMessage(self,id):
'''
smsDeleteMessage(
Integer id)
Deletes a message.
Returns:
True if the message was deleted
'''
return self._rpc("smsDeleteMessage",id)
def smsGetAttributes(self):
'''
smsGetAttributes()
Returns a List of all possible message attributes.
'''
return self._rpc("smsGetAttributes")
def smsGetMessageById(self,id,attributes):
'''
smsGetMessageById(
Integer id: message ID,
JSONArray attributes[optional])
Returns message attributes.
'''
return self._rpc("smsGetMessageById",id,attributes)
def smsGetMessageCount(self,unreadOnly,folder):
'''
smsGetMessageCount(
Boolean unreadOnly,
String folder[optional, default inbox])
Returns the number of messages.
'''
return self._rpc("smsGetMessageCount",unreadOnly,folder)
def smsGetMessageIds(self,unreadOnly,folder):
'''
smsGetMessageIds(
Boolean unreadOnly,
String folder[optional, default inbox])
Returns a List of all message IDs.
'''
return self._rpc("smsGetMessageIds",unreadOnly,folder)
def smsGetMessages(self,unreadOnly,folder,attributes):
'''
smsGetMessages(
Boolean unreadOnly,
String folder[optional, default inbox],
JSONArray attributes[optional])
Returns a List of all messages.
Returns:
a List of messages as Maps
'''
return self._rpc("smsGetMessages",unreadOnly,folder,attributes)
def smsMarkMessageRead(self,ids,read):
'''
smsMarkMessageRead(
JSONArray ids: List of message IDs to mark as read.,
Boolean read)
Marks messages as read.
Returns:
number of messages marked read
'''
return self._rpc("smsMarkMessageRead",ids,read)
def smsSend(self,destinationAddress,text):
'''
smsSend(
String destinationAddress: typically a phone number,
String text)
Sends an SMS.
'''
return self._rpc("smsSend",destinationAddress,text)
#******** SpeechRecognitionFacade Functions : min SDK 3 ********
def recognizeSpeech(self,prompt,language,languageModel):
'''
recognizeSpeech(
String prompt[optional]: text prompt to show to the user when asking them to speak,
String language[optional]: language override to inform the recognizer that it should expect speech in a language different than the one set in the java.util.Locale.getDefault(),
String languageModel[optional]: informs the recognizer which speech model to prefer (see android.speech.RecognizeIntent))
Recognizes user's speech and returns the most likely result.
Returns:
An empty string in case the speech cannot be recongnized.
'''
return self._rpc("recognizeSpeech",prompt,language,languageModel)
#******** TextToSpeechFacade Functions : min SDK 4 ********
def ttsIsSpeaking(self):
'''
ttsIsSpeaking()
Returns True if speech is currently in progress.
'''
return self._rpc("ttsIsSpeaking")
def ttsSpeak(self,message):
'''
ttsSpeak(
String message)
Speaks the provided message via TTS.
'''
return self._rpc("ttsSpeak",message)
#******** ToneGeneratorFacade Functions : min SDK 3 ********
def generateDtmfTones(self,phoneNumber,toneDuration):
'''
generateDtmfTones(
String phoneNumber,
Integer toneDuration[optional, default 100]: duration of each tone in milliseconds)
Generate DTMF tones for the given phone number.
'''
return self._rpc("generateDtmfTones",phoneNumber,toneDuration)
#******** UiFacade Functions : min SDK 3 ********
def addContextMenuItem(self,label,event,eventData):
'''
addContextMenuItem(
String label: label for this menu item,
String event: event that will be generated on menu item click,
Object eventData[optional])
Adds a new item to context menu.
'''
return self._rpc("addContextMenuItem",label,event,eventData)
def addOptionsMenuItem(self,label,event,eventData,iconName):
'''
addOptionsMenuItem(
String label: label for this menu item,
String event: event that will be generated on menu item click,
Object eventData[optional],
String iconName[optional]: Android system menu icon, see http://developer.android.com/reference/android/R.drawable.html)
Adds a new item to options menu.
'''
return self._rpc("addOptionsMenuItem",label,event,eventData,iconName)
def clearContextMenu(self):
'''
clearContextMenu()
Removes all items previously added to context menu.
'''
return self._rpc("clearContextMenu")
def clearOptionsMenu(self):
'''
clearOptionsMenu()
Removes all items previously added to options menu.
'''
return self._rpc("clearOptionsMenu")
def dialogCreateAlert(self,title,message):
'''
dialogCreateAlert(
String title[optional],
String message[optional])
Create alert dialog.
'''
return self._rpc("dialogCreateAlert",title,message)
def dialogCreateDatePicker(self,year,month,day):
'''
dialogCreateDatePicker(
Integer year[optional, default 1970],
Integer month[optional, default 1],
Integer day[optional, default 1])
Create date picker dialog.
'''
return self._rpc("dialogCreateDatePicker",year,month,day)
def dialogCreateHorizontalProgress(self,title,message,maximum):
'''
dialogCreateHorizontalProgress(
String title[optional],
String message[optional],
Integer maximum progress[optional, default 100])
Create a horizontal progress dialog.
'''
return self._rpc("dialogCreateHorizontalProgress",title,message,maximum)
def dialogCreateInput(self,title,message,defaultText,inputType):
'''
dialogCreateInput(
String title[optional, default Value]: title of the input box,
String message[optional, default Please enter value:]: message to display above the input box,
String defaultText[optional]: text to insert into the input box,
String inputType[optional]: type of input data, ie number or text)
Create a text input dialog.
'''
return self._rpc("dialogCreateInput",title,message,defaultText,inputType)
def dialogCreatePassword(self,title,message):
'''
dialogCreatePassword(
String title[optional, default Password]: title of the input box,
String message[optional, default Please enter password:]: message to display above the input box)
Create a password input dialog.
'''
return self._rpc("dialogCreatePassword",title,message)
def dialogCreateSeekBar(self,starting,maximum,title,message):
'''
dialogCreateSeekBar(
Integer starting value[optional, default 50],
Integer maximum value[optional, default 100],
String title,
String message)
Create seek bar dialog.
'''
return self._rpc("dialogCreateSeekBar",starting,maximum,title,message)
def dialogCreateSpinnerProgress(self,title,message,maximum):
'''
dialogCreateSpinnerProgress(
String title[optional],
String message[optional],
Integer maximum progress[optional, default 100])
Create a spinner progress dialog.
'''
return self._rpc("dialogCreateSpinnerProgress",title,message,maximum)
def dialogCreateTimePicker(self,hour,minute,is24hour):
'''
dialogCreateTimePicker(
Integer hour[optional, default 0],
Integer minute[optional, default 0],
Boolean is24hour[optional, default false]: Use 24 hour clock)
Create time picker dialog.
'''
return self._rpc("dialogCreateTimePicker",hour,minute,is24hour)
def dialogDismiss(self):
'''
dialogDismiss()
Dismiss dialog.
'''
return self._rpc("dialogDismiss")
def dialogGetInput(self,title,message,defaultText):
'''
dialogGetInput(
String title[optional, default Value]: title of the input box,
String message[optional, default Please enter value:]: message to display above the input box,
String defaultText[optional]: text to insert into the input box)
Queries the user for a text input.
'''
return self._rpc("dialogGetInput",title,message,defaultText)
def dialogGetPassword(self,title,message):
'''
dialogGetPassword(
String title[optional, default Password]: title of the password box,
String message[optional, default Please enter password:]: message to display above the input box)
Queries the user for a password.
'''
return self._rpc("dialogGetPassword",title,message)
def dialogGetResponse(self):
'''
dialogGetResponse()
Returns dialog response.
'''
return self._rpc("dialogGetResponse")
def dialogGetSelectedItems(self):
'''
dialogGetSelectedItems()
This method provides list of items user selected.
Returns:
Selected items
'''
return self._rpc("dialogGetSelectedItems")
def dialogSetCurrentProgress(self,current):
'''
dialogSetCurrentProgress(
Integer current)
Set progress dialog current value.
'''
return self._rpc("dialogSetCurrentProgress",current)
def dialogSetItems(self,items):
'''
dialogSetItems(
JSONArray items)
Set alert dialog list items.
'''
return self._rpc("dialogSetItems",items)
def dialogSetMaxProgress(self,max):
'''
dialogSetMaxProgress(
Integer max)
Set progress dialog maximum value.
'''
return self._rpc("dialogSetMaxProgress",max)
def dialogSetMultiChoiceItems(self,items,selected):
'''
dialogSetMultiChoiceItems(
JSONArray items,
JSONArray selected[optional]: list of selected items)
Set dialog multiple choice items and selection.
'''
return self._rpc("dialogSetMultiChoiceItems",items,selected)
def dialogSetNegativeButtonText(self,text):
'''
dialogSetNegativeButtonText(
String text)
Set alert dialog button text.
'''
return self._rpc("dialogSetNegativeButtonText",text)
def dialogSetNeutralButtonText(self,text):
'''
dialogSetNeutralButtonText(
String text)
Set alert dialog button text.
'''
return self._rpc("dialogSetNeutralButtonText",text)
def dialogSetPositiveButtonText(self,text):
'''
dialogSetPositiveButtonText(
String text)
Set alert dialog positive button text.
'''
return self._rpc("dialogSetPositiveButtonText",text)
def dialogSetSingleChoiceItems(self,items,selected):
'''
dialogSetSingleChoiceItems(
JSONArray items,
Integer selected[optional, default 0]: selected item index)
Set dialog single choice items and selected item.
'''
return self._rpc("dialogSetSingleChoiceItems",items,selected)
def dialogShow(self):
'''
dialogShow()
Show dialog.
'''
return self._rpc("dialogShow")
def fullDismiss(self):
'''
fullDismiss()
Dismiss Full Screen.
'''
return self._rpc("fullDismiss")
def fullKeyOverride(self,keycodes,enable):
'''
fullKeyOverride(
JSONArray keycodes: List of keycodes to override,
Boolean enable[optional, default true]: Turn overriding or off)
Override default key actions
'''
return self._rpc("fullKeyOverride",keycodes,enable)
def fullQuery(self):
'''
fullQuery()
Get Fullscreen Properties
'''
return self._rpc("fullQuery")
def fullQueryDetail(self,id):
'''
fullQueryDetail(
String id: id of layout widget)
Get fullscreen properties for a specific widget
'''
return self._rpc("fullQueryDetail",id)
def fullSetList(self,id,list):
'''
fullSetList(
String id: id of layout widget,
JSONArray list: List to set)
Attach a list to a fullscreen widget
'''
return self._rpc("fullSetList",id,list)
def fullSetProperty(self,id,property,value):
'''
fullSetProperty(
String id: id of layout widget,
String property: name of property to set,
String value: value to set property to)
Set fullscreen widget property
'''
return self._rpc("fullSetProperty",id,property,value)
def fullSetTitle(self,title):
'''
fullSetTitle(
String title: Activity Title)
Set the Full Screen Activity Title
'''
return self._rpc("fullSetTitle",title)
def fullShow(self,layout,title):
'''
fullShow(
String layout: String containing View layout,
String title[optional]: Activity Title)
Show Full Screen.
'''
return self._rpc("fullShow",layout,title)
def webViewShow(self,url,wait):
'''
webViewShow(
String url,
Boolean wait[optional]: block until the user exits the WebView)
Display a WebView with the given URL.
'''
return self._rpc("webViewShow",url,wait)
#******** WakeLockFacade Functions : min SDK 3 ********
def wakeLockAcquireBright(self):
'''
wakeLockAcquireBright()
Acquires a bright wake lock (CPU on, screen bright).
'''
return self._rpc("wakeLockAcquireBright")
def wakeLockAcquireDim(self):
'''
wakeLockAcquireDim()
Acquires a dim wake lock (CPU on, screen dim).
'''
return self._rpc("wakeLockAcquireDim")
def wakeLockAcquireFull(self):
'''
wakeLockAcquireFull()
Acquires a full wake lock (CPU on, screen bright, keyboard bright).
'''
return self._rpc("wakeLockAcquireFull")
def wakeLockAcquirePartial(self):
'''
wakeLockAcquirePartial()
Acquires a partial wake lock (CPU on).
'''
return self._rpc("wakeLockAcquirePartial")
def wakeLockRelease(self):
'''
wakeLockRelease()
Releases the wake lock.
'''
return self._rpc("wakeLockRelease")
#******** WebCamFacade Functions : min SDK 8 ********
def cameraStartPreview(self,resolutionLevel,jpegQuality,filepath):
'''
cameraStartPreview(
Integer resolutionLevel[optional, default 0]: increasing this number provides higher resolution,
Integer jpegQuality[optional, default 20]: a number from 0-100,
String filepath[optional]: Path to store jpeg files.)
Start Preview Mode. Throws 'preview' events.
Returns:
True if successful
'''
return self._rpc("cameraStartPreview",resolutionLevel,jpegQuality,filepath)
def cameraStopPreview(self):
'''
cameraStopPreview()
Stop the preview mode.
'''
return self._rpc("cameraStopPreview")
def webcamAdjustQuality(self,resolutionLevel,jpegQuality):
'''
webcamAdjustQuality(
Integer resolutionLevel[optional, default 0]: increasing this number provides higher resolution,
Integer jpegQuality[optional, default 20]: a number from 0-100)
Adjusts the quality of the webcam stream while it is running.
'''
return self._rpc("webcamAdjustQuality",resolutionLevel,jpegQuality)
def webcamStart(self,resolutionLevel,jpegQuality,port):
'''
webcamStart(
Integer resolutionLevel[optional, default 0]: increasing this number provides higher resolution,
Integer jpegQuality[optional, default 20]: a number from 0-100,
Integer port[optional, default 0]: If port is specified, the webcam service will bind to port, otherwise it will pick any available port.)
Starts an MJPEG stream and returns a Tuple of address and port for the stream.
'''
return self._rpc("webcamStart",resolutionLevel,jpegQuality,port)
def webcamStop(self):
'''
webcamStop()
Stops the webcam stream.
'''
return self._rpc("webcamStop")
#******** WifiFacade Functions : min SDK 3 ********
def checkWifiState(self):
'''
checkWifiState()
Checks Wifi state.
Returns:
True if Wifi is enabled.
'''
return self._rpc("checkWifiState")
def toggleWifiState(self,enabled):
'''
toggleWifiState(
Boolean enabled[optional])
Toggle Wifi on and off.
Returns:
True if Wifi is enabled.
'''
return self._rpc("toggleWifiState",enabled)
def wifiDisconnect(self):
'''
wifiDisconnect()
Disconnects from the currently active access point.
Returns:
True if the operation succeeded.
'''
return self._rpc("wifiDisconnect")
def wifiGetConnectionInfo(self):
'''
wifiGetConnectionInfo()
Returns information about the currently active access point.
'''
return self._rpc("wifiGetConnectionInfo")
def wifiGetScanResults(self):
'''
wifiGetScanResults()
Returns the list of access points found during the most recent Wifi scan.
'''
return self._rpc("wifiGetScanResults")
def wifiLockAcquireFull(self):
'''
wifiLockAcquireFull()
Acquires a full Wifi lock.
'''
return self._rpc("wifiLockAcquireFull")
def wifiLockAcquireScanOnly(self):
'''
wifiLockAcquireScanOnly()
Acquires a scan only Wifi lock.
'''
return self._rpc("wifiLockAcquireScanOnly")
def wifiLockRelease(self):
'''
wifiLockRelease()
Releases a previously acquired Wifi lock.
'''
return self._rpc("wifiLockRelease")
def wifiReassociate(self):
'''
wifiReassociate()
Reassociates with the currently active access point.
Returns:
True if the operation succeeded.
'''
return self._rpc("wifiReassociate")
def wifiReconnect(self):
'''
wifiReconnect()
Reconnects to the currently active access point.
Returns:
True if the operation succeeded.
'''
return self._rpc("wifiReconnect")
def wifiStartScan(self):
'''
wifiStartScan()
Starts a scan for Wifi access points.
Returns:
True if the scan was initiated successfully.
'''
return self._rpc("wifiStartScan")
| {
"repo_name": "brousch/sl4a_pydroid_mock_api",
"path": "src/android/utils/androidhelper_r6.py",
"copies": "1",
"size": "83027",
"license": "bsd-3-clause",
"hash": 4184474359236994000,
"line_mean": 29.0497382199,
"line_max": 187,
"alpha_frac": 0.6031411469,
"autogenerated": false,
"ratio": 4.819305781286278,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5922446928186279,
"avg_score": null,
"num_lines": null
} |
ANDROID = ("Mozilla/5.0 (Linux; Android 7.1.1; SM-J510FN Build/NMF26X) "
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Mobile Safari/537.36")
CHROME = ("Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.75 Safari/537.36")
CHROME_OS = ("Mozilla/5.0 (X11; CrOS armv7l 10718.34.0) "
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.40 Safari/537.36")
EDGE = ("Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/17.17134")
FIREFOX = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:61.0) Gecko/20100101 Firefox/61.0"
FIREFOX_MAC = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:61.0) Gecko/20100101 Firefox/61.0"
IE_6 = "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0; WOW64; Trident/4.0; SLCC1)"
IE_7 = "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; WOW64; Trident/4.0; SLCC1)"
IE_8 = "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; WOW64; Trident/4.0; SLCC1)"
IE_9 = "Mozilla/5.0 (MSIE 9.0; Windows NT 6.1; Trident/5.0)"
IE_11 = "Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko"
IPAD = ("Mozilla/5.0 (iPad; CPU OS 11_4 like Mac OS X) "
"AppleWebKit/605.1.15 (KHTML, like Gecko) Version/11.0 Mobile/15E148 Safari/604.1")
IPHONE = IPHONE_6 = ("Mozilla/5.0 (iPhone; CPU iPhone OS 11_4 like Mac OS X) "
"AppleWebKit/605.1.15 (KHTML, like Gecko) Version/11.0 Mobile/15E148 Safari/604.1")
OPERA = ("Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36 OPR/54.0.2952.64")
SAFARI = ("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) "
"AppleWebKit/603.3.8 (KHTML, like Gecko) Version/10.1.2 Safari/603.3.8")
SAFARI_8 = ("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) "
"AppleWebKit/600.7.12 (KHTML, like Gecko) Version/8.0.7 Safari/600.7.12")
SAFARI_7 = ("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) "
"AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A")
WINDOWS_PHONE_8 = ("Mozilla/5.0 (compatible; MSIE 10.0; Windows Phone 8.0; "
"Trident/6.0; IEMobile/10.0; ARM; Touch; NOKIA; Lumia 920)")
| {
"repo_name": "back-to/streamlink",
"path": "src/streamlink/plugin/api/useragents.py",
"copies": "1",
"size": "2288",
"license": "bsd-2-clause",
"hash": -3584771509552638000,
"line_mean": 77.8965517241,
"line_max": 104,
"alpha_frac": 0.6512237762,
"autogenerated": false,
"ratio": 2.3228426395939086,
"config_test": false,
"has_no_keywords": true,
"few_assignments": false,
"quality_score": 0.8445719349907999,
"avg_score": 0.005669413177182014,
"num_lines": 29
} |
""" Android phone worker, OS version >5
"""
import logging
import re
import pkg_resources
import time
import threading
import subprocess
import pandas as pd
from netort.data_processing import Drain, get_nowait_from_queue
from netort.resource import manager as resource
from volta.common.interfaces import Phone
from volta.common.util import LogParser, Executioner
import warnings
warnings.filterwarnings("ignore", category=FutureWarning) # pandas sorting warnings
logger = logging.getLogger(__name__)
event_regexp = r"""
^(?P<date>\S+)
\s+
(?P<time>\S+)
\s+
\S+
\s+
\S+
\s+
\S+
\s+
\S+
\s+
(?P<value>.*)
$
"""
class AndroidPhone(Phone):
""" Android phone worker class - work w/ phone, read phone logs, run test apps and store data
Attributes:
source (string): path to data source, phone id (adb devices)
lightning_apk_path (string, optional): path to lightning app
may be url, e.g. 'http://myhost.tld/path/to/file'
may be path to file, e.g. '/home/users/netort/path/to/file.apk'
lightning_apk_class (string, optional): lightning class
test_apps (list, optional): list of apps to be installed to device for test
test_class (string, optional): app class to be started during test execution
test_package (string, optional): app package to be started during test execution
test_runner (string, optional): app runner to be started during test execution
"""
def __init__(self, config, core):
"""
Args:
config (VoltaConfig): module configuration data
"""
Phone.__init__(self, config, core)
self.logcat_stdout_reader = None
self.logcat_stderr_reader = None
# mandatory options
self.source = config.get_option('phone', 'source')
# lightning app configuration
self.lightning_apk_path = config.get_option(
'phone', 'lightning', pkg_resources.resource_filename(
'volta.providers.phones', 'binary/lightning-new3.apk'
)
)
self.lightning_apk_class = config.get_option('phone', 'lightning_class')
self.lightning_apk_fname = None
# test app configuration
self.test_apps = config.get_option('phone', 'test_apps')
self.test_class = config.get_option('phone', 'test_class')
self.test_package = config.get_option('phone', 'test_package')
self.test_runner = config.get_option('phone', 'test_runner')
self.cleanup_apps = config.get_option('phone', 'cleanup_apps')
try:
self.compiled_regexp = re.compile(
config.get_option('phone', 'event_regexp', event_regexp), re.VERBOSE | re.IGNORECASE
)
except SyntaxError:
logger.debug('Unable to parse specified regexp', exc_info=True)
raise RuntimeError(
"Unable to parse specified regexp: %s" % config.get_option('phone', 'event_regexp', event_regexp)
)
self.logcat_pipeline = None
self.test_performer = None
self.phone_q = None
subprocess.call('adb start-server', shell=True) # start adb server
self.__test_interaction_with_phone()
self.worker = None
self.closed = False
self.shellexec_metrics = config.get_option('phone', 'shellexec_metrics')
self.shellexec_executor = threading.Thread(target=self.__shell_executor)
self.shellexec_executor.setDaemon(True)
self.shellexec_executor.start()
self.my_metrics = {}
self.__create_my_metrics()
def __create_my_metrics(self):
self.my_metrics['events'] = self.core.data_session.new_event_metric(
name='events',
source='phone',
**self.config.get_option('phone', 'meta', {})
)
for key, value in self.shellexec_metrics.items():
self.my_metrics[key] = self.core.data_session.new_true_metric(
name='key',
source='phone',
_apply=value.get('apply') if value.get('apply') else '',
)
def __test_interaction_with_phone(self):
def read_process_queues_and_report(outs_q, errs_q):
outputs = get_nowait_from_queue(outs_q)
for chunk in outputs:
logger.debug('Command output: %s', chunk.strip())
if chunk.strip() == 'unknown':
worker.close()
raise RuntimeError(
'Phone "%s" has an unknown state. Please check device authorization and state' % self.source
)
errors = get_nowait_from_queue(errs_q)
if errors:
worker.close()
raise RuntimeError(
'There were errors trying to test connection to the phone %s. Errors :%s' % (
self.source, errors
)
)
cmd = "adb -s {device_id} get-state".format(device_id=self.source)
# get-state
worker = Executioner(cmd)
outs_q, errs_q = worker.execute()
while worker.is_finished() is None:
read_process_queues_and_report(outs_q, errs_q)
time.sleep(1)
read_process_queues_and_report(outs_q, errs_q)
while not outs_q.qsize() != 0 and errs_q.qsize() != 0:
time.sleep(0.5)
worker.close()
logger.info('Command \'%s\' executed on device %s. Retcode: %s', cmd, self.source, worker.is_finished())
def adb_execution(self, cmd):
def read_process_queues_and_report(outs_q, errs_q):
outputs = get_nowait_from_queue(outs_q)
for chunk in outputs:
logger.debug('Command \'%s\' output: %s', cmd, chunk.strip())
errors = get_nowait_from_queue(errs_q)
for err_chunk in errors:
logger.warning('Errors in command \'%s\' output: %s', cmd, err_chunk.strip())
worker = Executioner(cmd)
outs_q, errs_q = worker.execute()
while worker.is_finished() is None:
read_process_queues_and_report(outs_q, errs_q)
time.sleep(1)
read_process_queues_and_report(outs_q, errs_q)
while not outs_q.qsize() != 0 and errs_q.qsize() != 0:
time.sleep(0.5)
worker.close()
logger.info('Command \'%s\' executed on device %s. Retcode: %s', cmd, self.source, worker.is_finished())
if worker.is_finished() != 0:
raise RuntimeError('Failed to execute adb command \'%s\'' % cmd)
def prepare(self):
""" Phone preparation: install apps etc
pipeline:
install lightning
install apks
clean log
"""
# apps cleanup
for apk in self.cleanup_apps:
self.adb_execution("adb -s {device_id} uninstall {app}".format(device_id=self.source, app=apk))
# install lightning
self.lightning_apk_fname = resource.get_opener(self.lightning_apk_path).get_filename
logger.info('Installing lightning apk...')
self.adb_execution(
"adb -s {device_id} install -r -d -t {apk}".format(device_id=self.source, apk=self.lightning_apk_fname)
)
# install apks
for apk in self.test_apps:
apk_fname = resource.get_opener(apk).get_filename
self.adb_execution("adb -s {device_id} install -r -d -t {apk}".format(device_id=self.source, apk=apk_fname))
# clean logcat
self.adb_execution("adb -s {device_id} logcat -c".format(device_id=self.source))
def start(self, results):
""" Grab stage: starts log reader, make sync w/ flashlight
Args:
results (queue-like object): Phone should put there dataframes, format: ['sys_uts', 'message']
"""
self.phone_q = results
self.__start_async_logcat()
# start flashes app
self.adb_execution(
"adb -s {device_id} shell am start -n {package}/{runner}.MainActivity".format(
device_id=self.source,
package=self.lightning_apk_class,
runner=self.lightning_apk_class
)
)
logger.info('Waiting additional 15 seconds till flashlight app end its work...')
time.sleep(15)
def __start_async_logcat(self):
""" Start logcat read in subprocess and make threads to read its stdout/stderr to queues """
cmd = "adb -s {device_id} logcat".format(device_id=self.source)
self.worker = Executioner(cmd)
out_q, err_q = self.worker.execute()
self.logcat_pipeline = Drain(
LogParser(
out_q, self.compiled_regexp, self.config.get_option('phone', 'type')
),
self.my_metrics['events']
)
self.logcat_pipeline.start()
def run_test(self):
""" App stage: run app/phone tests """
if self.test_package:
cmd = "adb -s {device_id} shell am instrument -w -e class {test_class} {test_package}/{test_runner}".format(
test_class=self.test_class,
device_id=self.source,
test_package=self.test_package,
test_runner=self.test_runner
)
else:
logger.info('Infinite loop for volta because there are no tests specified, waiting for SIGINT')
cmd = '/bin/bash -c \'while [ 1 ]; do sleep 1; done\''
logger.info('Command \'%s\' executing...', cmd)
self.test_performer = Executioner(cmd)
self.test_performer.execute()
def end(self):
""" Stop test and grabbers """
self.closed = True
if self.worker:
self.worker.close()
if self.test_performer:
self.test_performer.close()
if self.logcat_pipeline:
self.logcat_pipeline.close()
# apps cleanup
for apk in self.cleanup_apps:
self.adb_execution("adb -s {device_id} uninstall {app}".format(device_id=self.source, app=apk))
def close(self):
pass
def get_info(self):
data = {}
if self.phone_q:
data['grabber_queue_size'] = self.phone_q.qsize()
if self.test_performer:
data['test_performer_is_finished'] = self.test_performer.is_finished()
return data
def __shell_executor(self):
while not self.closed:
for key, value in self.shellexec_metrics.items():
try:
if not self.shellexec_metrics[key].get('last_ts') \
or self.shellexec_metrics[key]['last_ts'] < int(time.time()) * 10**6:
metric_value = self.__execute_shellexec_metric(value.get('cmd'))
ts = int(time.time()) * 10 ** 6
if not value.get('start_time'):
self.shellexec_metrics[key]['start_time'] = ts
ts = 0
else:
ts = ts - self.shellexec_metrics[key]['start_time']
self.shellexec_metrics[key]['last_ts'] = ts
self.my_metrics[key].put(
pd.DataFrame(
data={
ts:
{'ts': ts, 'value': metric_value}
},
).T
)
else:
continue
except Exception:
logger.warning('Failed to collect shellexec metric: %s', key)
logger.debug('Failed to collect shellexec metric: %s', key, exc_info=True)
time.sleep(0.1)
@staticmethod
def __execute_shellexec_metric(cmd):
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
(stdout, stderr) = proc.communicate()
return stdout.strip()
| {
"repo_name": "yandex-load/volta",
"path": "volta/providers/phones/android.py",
"copies": "1",
"size": "12104",
"license": "mpl-2.0",
"hash": 327829086066743740,
"line_mean": 37.7948717949,
"line_max": 120,
"alpha_frac": 0.5583278255,
"autogenerated": false,
"ratio": 3.9349804941482445,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4993308319648245,
"avg_score": null,
"num_lines": null
} |
""" Android phone worker, OS version below 5
"""
import logging
import re
import queue as q
import time
import pkg_resources
from volta.common.interfaces import Phone
from volta.common.util import LogReader
from netort.data_processing import Drain
from netort.process import execute, popen
from netort.resource import manager as resource
logger = logging.getLogger(__name__)
event_regexp = r"""
^(?P<date>\S+)
\s+
(?P<time>\S+)
\s+
\S+
\s+
\S+
\s+
\S+
\s+
(?P<message>.*)
$
"""
class AndroidOldPhone(Phone):
""" Android Old phone worker class - work w/ phone, read phone logs, run test apps and store data
Attributes:
source (string): path to data source, phone id (adb devices)
unplug_type (string): type of test execution
`auto`: disable battery charge (by software) or use special USB cord limiting charge over USB
`manual`: disable phone from USB by your own hands during test exection and click your test
lightning_apk_path (string, optional): path to lightning app
may be url, e.g. 'http://myhost.tld/path/to/file'
may be path to file, e.g. '/home/users/netort/path/to/file.apk'
lightning_apk_class (string, optional): lightning class
test_apps (list, optional): list of apps to be installed to device for test
test_class (string, optional): app class to be started during test execution
test_package (string, optional): app package to be started during test execution
test_runner (string, optional): app runner to be started during test execution
Todo:
unplug_type manual - remove raw_input()
"""
def __init__(self, config):
"""
Args:
config (VoltaConfig): module configuration data
"""
Phone.__init__(self, config)
self.logcat_stdout_reader = None
self.logcat_stderr_reader = None
# mandatory options
self.source = config.get_option('phone', 'source')
#self.unplug_type = config.get('unplug_type', 'auto')
# lightning app configuration
self.lightning_apk_path = config.get_option(
'phone', 'lightning', pkg_resources.resource_filename(
'volta.providers.phones', 'binary/lightning-new3.apk'
)
)
self.lightning_apk_class = config.get_option('phone', 'lightning_class')
self.lightning_apk_fname = None
# test app configuration
self.test_apps = config.get_option('phone', 'test_apps')
self.test_class = config.get_option('phone', 'test_class')
self.test_package = config.get_option('phone', 'test_package')
self.test_runner = config.get_option('phone', 'test_runner')
self.cleanup_apps = config.get_option('phone', 'cleanup_apps')
self.regexp = config.get_option('phone', 'event_regexp', event_regexp)
try:
self.compiled_regexp = re.compile(self.regexp, re.VERBOSE | re.IGNORECASE)
except:
logger.debug('Unable to parse specified regexp', exc_info=True)
raise RuntimeError("Unable to parse specified regexp")
self.test_performer = None
def prepare(self):
""" Phone preparements stage: install apps etc
pipeline:
install lightning
install apks
clean log
"""
# apps cleanup
for apk in self.cleanup_apps:
execute("adb -s {device_id} uninstall {app}".format(device_id=self.source, app=apk))
# install lightning
self.lightning_apk_fname = resource.get_opener(self.lightning_apk_path).get_filename
logger.info('Installing lightning apk...')
execute("adb -s {device_id} install -r -d -t {apk}".format(device_id=self.source, apk=self.lightning_apk_fname))
# install apks
for apk in self.test_apps:
apk_fname = resource.get_opener(apk).get_filename
execute("adb -s {device_id} install -r -d -t {apk}".format(device_id=self.source, apk=apk_fname))
# clean logcat
execute("adb -s {device_id} logcat -c".format(device_id=self.source))
# unplug device or start logcat
#if self.unplug_type == 'manual':
# logger.info('Detach the phone %s from USB and press enter to continue...', self.source)
# # TODO make API and remove this
# raw_input()
def start(self, results):
""" Grab stage: starts log reader, make sync w/ flashlight
pipeline:
if uplug_type is manual:
remind user to start flashlight app
if unplug_type is auto:
start async logcat reader
start lightning flashes
Args:
results (queue-like object): Phone should put there dataframes, format: ['sys_uts', 'message']
"""
self.phone_q = results
#if self.unplug_type == 'manual':
# logger.info("It's time to start flashlight app!")
# return
#if self.unplug_type == 'auto':
self.__start_async_logcat()
# start flashes app
execute(
"adb -s {device_id} shell am start -n {package}/{runner}.MainActivity".format(
device_id=self.source,
package=self.lightning_apk_class,
runner=self.lightning_apk_class
)
)
logger.info('Waiting 15 seconds till flashlight app end its work...')
time.sleep(15)
return
def __start_async_logcat(self):
""" Start logcat read in subprocess and make threads to read its stdout/stderr to queues """
cmd = "adb -s {device_id} logcat -v time".format(device_id=self.source)
logger.debug("Execute : %s", cmd)
self.logcat_process = popen(cmd)
self.logcat_reader_stdout = LogReader(self.logcat_process.stdout, self.compiled_regexp)
self.drain_logcat_stdout = Drain(self.logcat_reader_stdout, self.phone_q)
self.drain_logcat_stdout.start()
self.phone_q_err=q.Queue()
self.logcat_reader_stderr = LogReader(self.logcat_process.stderr, self.compiled_regexp)
self.drain_logcat_stderr = Drain(self.logcat_reader_stderr, self.phone_q_err)
self.drain_logcat_stderr.start()
def run_test(self):
""" App stage: run app/phone tests """
if self.test_package:
command = "adb -s {device_id} shell am instrument -w -e class {test_class} {test_package}/{test_runner}".format(
test_class=self.test_class,
device_id=self.source,
test_package=self.test_package,
test_runner=self.test_runner
)
else:
logger.info('Infinite loop for volta because there are no tests specified, waiting for SIGINT')
command = 'while [ 1 ]; do sleep 1; done'
self.test_performer = PhoneTestPerformer(command)
self.test_performer.start()
return
def end(self):
""" Stop test and grabbers """
if self.test_performer:
self.test_performer.close()
self.logcat_reader_stdout.close()
self.logcat_reader_stderr.close()
self.logcat_process.kill()
self.drain_logcat_stdout.close()
self.drain_logcat_stderr.close()
# apps cleanup
for apk in self.cleanup_apps:
execute("adb -s {device_id} uninstall {app}".format(device_id=self.source, app=apk))
return
def get_info(self):
data = {}
if self.drain_logcat_stdout:
data['grabber_alive'] = self.drain_logcat_stdout.isAlive()
if self.phone_q:
data['grabber_queue_size'] = self.phone_q.qsize()
if self.test_performer:
data['test_performer_alive'] = self.test_performer.isAlive()
return data
| {
"repo_name": "yandex-load/volta",
"path": "volta/providers/phones/android_old.py",
"copies": "1",
"size": "7907",
"license": "mpl-2.0",
"hash": -3099440624369598000,
"line_mean": 36.6523809524,
"line_max": 124,
"alpha_frac": 0.6070570381,
"autogenerated": false,
"ratio": 3.8439474963539135,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9915135415344951,
"avg_score": 0.007173823821792285,
"num_lines": 210
} |
"""Android SDK support"""
import os
import sys
if sys.version_info[0] >= 3:
from urllib.request import urlretrieve
else:
from urllib import urlretrieve
import zipfile
import subprocess
import hashlib
from mod import log, util
from mod.tools import java, javac
tools_urls = {
'win': 'https://dl.google.com/android/repository/sdk-tools-windows-3859397.zip',
'osx': 'https://dl.google.com/android/repository/sdk-tools-darwin-3859397.zip',
'linux': 'https://dl.google.com/android/repository/sdk-tools-linux-3859397.zip'
}
tools_archives = {
'win': 'sdk-tools-windows-3859397.zip',
'osx': 'sdk-tools-darwin-3859397.zip',
'linux': 'sdk-tools-linux-3859397.zip'
}
#-------------------------------------------------------------------------------
def get_sdk_dir(fips_dir) :
return util.get_workspace_dir(fips_dir) + '/fips-sdks/android'
#-------------------------------------------------------------------------------
def check_exists(fips_dir) :
"""check if the android sdk has been installed"""
return os.path.isdir(get_sdk_dir(fips_dir))
#-------------------------------------------------------------------------------
def get_adb_path(fips_dir):
return get_sdk_dir(fips_dir) + '/platform-tools/adb'
#-------------------------------------------------------------------------------
def get_tools_url() :
return tools_urls[util.get_host_platform()]
#-------------------------------------------------------------------------------
def get_tools_archive_path(fips_dir):
return get_sdk_dir(fips_dir) + '/' + tools_archives[util.get_host_platform()]
#-------------------------------------------------------------------------------
# convert a cmake target into a valid Android package name,
# some characters are invalid for package names and must be replaced
# NOTE: the same rules must be applied in the android-create-apk.py
# helper script which is run as a build job!
#
def target_to_package_name(target):
return 'org.fips.'+target.replace('-','_')
#-------------------------------------------------------------------------------
def install_package(fips_dir, pkg):
log.colored(log.BLUE, '>>> install Android SDK package: {}'.format(pkg))
sdkmgr_dir = get_sdk_dir(fips_dir) + '/tools/bin/'
sdkmgr_path = sdkmgr_dir + 'sdkmanager'
cmd = '{} --verbose {}'.format(sdkmgr_path, pkg)
subprocess.call(cmd, cwd=sdkmgr_dir, shell=True)
#-------------------------------------------------------------------------------
def ensure_sdk_dirs(fips_dir) :
if not os.path.isdir(get_sdk_dir(fips_dir)) :
os.makedirs(get_sdk_dir(fips_dir))
#-------------------------------------------------------------------------------
def uncompress(fips_dir, path) :
# the python zip module doesn't preserve the executable flags, so just
# call unzip on Linux and OSX
if util.get_host_platform() in ['osx', 'linux']:
subprocess.call('unzip {}'.format(path), cwd=get_sdk_dir(fips_dir), shell=True)
else:
with zipfile.ZipFile(path, 'r') as archive:
archive.extractall(get_sdk_dir(fips_dir))
#-------------------------------------------------------------------------------
def compute_sha256(path, converter=lambda x: x, chunk_size=65536) :
if not os.path.isfile(path) :
return None
result = hashlib.sha256()
with open(path, 'rb') as file :
chunk = file.read(chunk_size)
while chunk :
result.update(converter(chunk))
chunk = file.read(chunk_size)
return result.hexdigest()
#-------------------------------------------------------------------------------
def strip_whitespace(bin_str) :
for ws in [b' ', b'\t', b'\n', b'\r', b'\x0b', b'\x0c']:
bin_str = bin_str.replace(ws, b'')
return bin_str
#-------------------------------------------------------------------------------
def setup(fips_dir, proj_dir) :
"""setup the Android SDK and NDK"""
log.colored(log.YELLOW, '=== setup Android SDK/NDK :')
# first make sure that java is present, otherwise the Android
# SDK setup will finish without errors, but is not actually usable
if not java.check_exists(fips_dir) or not javac.check_exists(fips_dir) :
log.error("please install Java JDK 8 (see './fips diag tools')")
ensure_sdk_dirs(fips_dir)
# download the command line tools archive
tools_archive_path = get_tools_archive_path(fips_dir)
tools_url = get_tools_url()
log.info("downloading '{}'...".format(tools_url))
urlretrieve(tools_url, tools_archive_path, util.url_download_hook)
log.info("\nunpacking '{}'...".format(tools_archive_path))
uncompress(fips_dir, tools_archive_path)
# install the required SDK components through sdkmanager
install_package(fips_dir, '"platforms;android-28"')
install_package(fips_dir, '"build-tools;29.0.3"')
install_package(fips_dir, 'platform-tools')
install_package(fips_dir, 'ndk-bundle')
# check for potentially breaking changes in build setup
fips_cmake = fips_dir + '/cmake-toolchains/android.toolchain.orig'
ndk_cmake = get_sdk_dir(fips_dir) + '/ndk-bundle/build/cmake/android.toolchain.cmake'
if compute_sha256(ndk_cmake, strip_whitespace) != compute_sha256(fips_cmake, strip_whitespace) :
log.warn('android.toolchain.cmake in fips might be outdated...')
log.colored(log.GREEN, "done.")
| {
"repo_name": "floooh/fips",
"path": "mod/android.py",
"copies": "1",
"size": "5417",
"license": "mit",
"hash": -6341821918640888000,
"line_mean": 40.3511450382,
"line_max": 100,
"alpha_frac": 0.5499353886,
"autogenerated": false,
"ratio": 3.7565880721220526,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9761946216551611,
"avg_score": 0.008915448834088142,
"num_lines": 131
} |
"""Android SDK support"""
import os
import urllib
import zipfile
import subprocess
from mod import log, util
from mod.tools import java
sdk_urls = {
'win' : 'http://dl.google.com/android/android-sdk_r22.6.2-windows.zip',
'osx' : 'http://dl.google.com/android/android-sdk_r22.6.2-macosx.zip',
'linux' : 'http://dl.google.com/android/android-sdk_r22.6.2-linux.tgz'
}
sdk_archives = {
'win' : 'android-sdk_r22.6.2-windows.zip',
'osx' : 'android-sdk_r22.6.2-macosx.zip',
'linux' : 'android-sdk_r22.6.2-linux.tgz'
}
sdk_paths = {
'win' : 'android-sdk-windows',
'osx' : 'android-sdk-macosx',
'linux' : 'android-sdk-linux'
}
ndk_urls = {
'win' : 'http://dl.google.com/android/ndk/android-ndk-r9d-windows-x86.zip',
'osx' : 'http://dl.google.com/android/ndk/android-ndk-r9d-darwin-x86_64.tar.bz2',
'linux' : 'http://dl.google.com/android/ndk/android-ndk-r9d-linux-x86_64.tar.bz2'
}
ndk_archives = {
'win' : 'android-ndk-r9d-windows-x86.zip',
'osx' : 'android-ndk-r9d-darwin-x86_64.tar.bz2',
'linux' : 'android-ndk-r9d-linux-x86_64.tar.bz2'
}
#-------------------------------------------------------------------------------
def get_sdk_url() :
return sdk_urls[util.get_host_platform()]
#-------------------------------------------------------------------------------
def get_ndk_url() :
return ndk_urls[util.get_host_platform()]
#-------------------------------------------------------------------------------
def get_sdk_dir(fips_dir) :
return util.get_workspace_dir(fips_dir) + '/fips-sdks/' + util.get_host_platform()
#-------------------------------------------------------------------------------
def get_androidsdk_dir(fips_dir) :
return get_sdk_dir(fips_dir) + '/' + sdk_paths[util.get_host_platform()]
#-------------------------------------------------------------------------------
def get_androidndk_dir(fips_dir) :
return get_sdk_dir(fips_dir) + '/android-ndk-r9d'
#-------------------------------------------------------------------------------
def get_androidsdk_archive_path(fips_dir) :
return get_sdk_dir(fips_dir) + '/' + sdk_archives[util.get_host_platform()]
#-------------------------------------------------------------------------------
def get_androidndk_archive_path(fips_dir) :
return get_sdk_dir(fips_dir) + '/' + ndk_archives[util.get_host_platform()]
#-------------------------------------------------------------------------------
def get_adb_path(fips_dir) :
return get_androidsdk_dir(fips_dir) + '/platform-tools/adb'
#-------------------------------------------------------------------------------
def ensure_sdk_dirs(fips_dir) :
if not os.path.isdir(get_sdk_dir(fips_dir)) :
os.makedirs(get_sdk_dir(fips_dir))
#-------------------------------------------------------------------------------
def uncompress(fips_dir, path) :
if '.zip' in path :
with zipfile.ZipFile(path, 'r') as archive:
archive.extractall(get_sdk_dir(fips_dir))
elif '.bz2' or '.tgz' in path :
# note: for some reason python's tarfile
# module cannot completely unpack the
# Android NDK tar.gz2 file (tested on OSX with python 2.7),
# so fall back to command line tar
subprocess.call('tar -xvf {}'.format(path), cwd=get_sdk_dir(fips_dir), shell=True)
#-------------------------------------------------------------------------------
def update_android_sdk(fips_dir, proj_dir) :
# FIXME: hardcoded version numbers should be configurable
if util.get_host_platform() == 'win' :
cmd = '{}/tools/android.bat update sdk -f -u --all --filter tools,platform-tools,build-tools-19.1.0,android-19'.format(get_androidsdk_dir(fips_dir))
else :
cmd = 'sh {}/tools/android update sdk -f -u --all --filter tools,platform-tools,build-tools-19.1.0,android-19'.format(get_androidsdk_dir(fips_dir))
print cmd
subprocess.call(cmd, cwd=fips_dir, shell=True)
#-------------------------------------------------------------------------------
def setup(fips_dir, proj_dir) :
"""setup the Android SDK and NDK"""
log.colored(log.YELLOW, '=== setup Android SDK/NDK :')
# first make sure that java is present, otherwise the Android
# SDK setup will finish without errors, but is not actually usable
if not java.check_exists(fips_dir) :
log.error("please install java first (see './fips diag tools')")
ensure_sdk_dirs(fips_dir)
# download and setup the Android SDK
sdk_archive_path = get_androidsdk_archive_path(fips_dir)
if not os.path.isfile(sdk_archive_path) :
sdk_url = get_sdk_url()
log.info("downloading '{}'...".format(sdk_url))
urllib.urlretrieve(sdk_url, sdk_archive_path, util.url_download_hook)
else :
log.info("'{}' already exists".format(sdk_archive_path))
log.info("\nunpacking '{}'...".format(sdk_archive_path))
uncompress(fips_dir, sdk_archive_path)
log.info("downloading additional SDK files...")
update_android_sdk(fips_dir, proj_dir)
# download the Android NDK
ndk_archive_path = get_androidndk_archive_path(fips_dir)
if not os.path.isfile(ndk_archive_path) :
ndk_url = get_ndk_url()
log.info("downloading '{}'...".format(ndk_url))
urllib.urlretrieve(ndk_url, ndk_archive_path, util.url_download_hook)
else :
log.info("'{}' already exists".format(ndk_archive_path))
log.info("\nunpacking '{}'...".format(ndk_archive_path))
uncompress(fips_dir, ndk_archive_path)
log.colored(log.GREEN, "done.")
#-------------------------------------------------------------------------------
def check_exists(fips_dir) :
"""check if the android sdk/ndk has been installed"""
return os.path.isdir(get_androidsdk_dir(fips_dir)) and os.path.isdir(get_androidndk_dir(fips_dir))
| {
"repo_name": "mgerhardy/fips",
"path": "mod/android.py",
"copies": "1",
"size": "5896",
"license": "mit",
"hash": 6202272012334493000,
"line_mean": 40.8156028369,
"line_max": 156,
"alpha_frac": 0.5318860244,
"autogenerated": false,
"ratio": 3.3729977116704806,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9329395286798793,
"avg_score": 0.015097689854337617,
"num_lines": 141
} |
""" ands_doi.py """
from django.conf import settings
from django.template.loader import render_to_string
from django.utils.importlib import import_module
from urllib2 import HTTPError
from tardis.tardis_portal.models import ExperimentParameter, \
ExperimentParameterSet, ParameterName, Schema
import re
import urllib2
import logging
logger = logging.getLogger(__name__)
DOI_NAME = 'doi' # the ParameterName.name for the DOI
class DOIService(object):
"""
DOIService
Mints DOIs using ANDS' Cite My Data service
POSTs DataCite XML to a web services endpoint
"""
def __init__(self, experiment):
"""
:param experiment: The experiment model object
:type experiment: :class: `tardis.tardis_portal.models.Experiment`
"""
if hasattr(settings, 'DOI_ENABLE') and settings.DOI_ENABLE:
self.experiment = experiment
provider = settings.DOI_XML_PROVIDER
module_name, constructor_name = provider.rsplit('.', 1)
module = import_module(module_name)
constructor = getattr(module, constructor_name)
self.doi_provider = constructor(experiment)
self.schema = Schema.objects.get(namespace=settings.DOI_NAMESPACE)
self.doi_name = ParameterName.objects.get(name=DOI_NAME)
else:
raise Exception('DOI is not enabled')
def get_or_mint_doi(self, url):
"""
:param url: the URL the DOI will resolve to
:type url: string
:return: the DOI string
:rtype string
"""
doi = self.get_doi()
if not doi:
doi = self._mint_doi(url)
logger.info("minted DOI %s" % doi)
self._save_doi(doi)
return doi
def get_doi(self):
"""
:return: DOI or None
:rtype string
"""
doi_params = ExperimentParameter.objects.filter(name=self.doi_name,
parameterset__schema=self.schema,
parameterset__experiment=self.experiment)
if doi_params.count() == 1:
return doi_params[0].string_value
return None
def _save_doi(self, doi):
paramset = self._get_or_create_doi_parameterset()
ep = ExperimentParameter(parameterset=paramset, name=self.doi_name,\
string_value=doi)
ep.save()
return doi
def _mint_doi(self, url):
headers = {
'Content-type': 'application/x-www-form-urlencoded',
'Accept': 'text/plain'
}
post_data = 'xml=' + self._datacite_xml()
base_url = settings.DOI_MINT_URL
app_id = settings.DOI_APP_ID
mint_url = "%s?app_id=%s&url=%s" % (base_url, app_id, url)
doi_response = DOIService._post(mint_url, post_data, headers)
doi = DOIService._read_doi(doi_response)
if hasattr(settings, 'DOI_RELATED_INFO_ENABLE') and settings.DOI_RELATED_INFO_ENABLE:
import tardis.apps.related_info.related_info as ri
rih = ri.RelatedInfoHandler(self.experiment.id)
doi_info = {
ri.type_name: 'website',
ri.identifier_type_name: 'doi',
ri.identifier_name: doi,
ri.title_name: '',
ri.notes_name: '',
}
rih.add_info(doi_info)
return doi
def _datacite_xml(self):
return self.doi_provider.datacite_xml()
def _get_or_create_doi_parameterset(self):
eps, _ = ExperimentParameterSet.objects.\
get_or_create(experiment=self.experiment,\
schema=self.schema)
return eps
@staticmethod
def _read_doi(doi_response):
matches = re.match(r'\[MT001\] DOI (.+) was successfully minted.', doi_response)
if not matches:
raise Exception('unrecognised response: %s' + doi_response)
return matches.group(1)
@staticmethod
def _post(url, post_data, headers):
try:
request = urllib2.Request(url, post_data, headers)
response = urllib2.urlopen(request)
return response.read()
except HTTPError as e:
logger.error(e.read())
raise e
class DOIXMLProvider(object):
"""
DOIXMLProvider
provides datacite XML metadata for a given experiment
"""
def __init__(self, experiment):
self.experiment = experiment
def datacite_xml(self):
"""
:return: datacite XML for self.experiment
:rtype: string
"""
from datetime import date
from django.template import Context
import os
template = os.path.join(settings.DOI_TEMPLATE_DIR, 'default.xml')
ex = self.experiment
c = Context()
c['title'] = ex.title
c['institution_name'] = ex.institution_name
c['publication_year'] = date.today().year
c['creator_names'] = [a.author for a in ex.author_experiment_set.all()]
doi_xml = render_to_string(template, context_instance=c)
return doi_xml
| {
"repo_name": "steveandroulakis/mytardis",
"path": "tardis/tardis_portal/ands_doi.py",
"copies": "2",
"size": "5160",
"license": "bsd-3-clause",
"hash": -1109024231310581800,
"line_mean": 30.6564417178,
"line_max": 93,
"alpha_frac": 0.5831395349,
"autogenerated": false,
"ratio": 3.8943396226415095,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.000730590798408389,
"num_lines": 163
} |
""" ands_doi.py """
import re
import urllib2
from urllib2 import HTTPError
import logging
from django.conf import settings
from django.template.loader import render_to_string
from django.utils.importlib import import_module
from tardis.tardis_portal.models import ExperimentParameter, \
ExperimentParameterSet, ParameterName, Schema
logger = logging.getLogger(__name__)
DOI_NAME = 'doi' # the ParameterName.name for the DOI
class DOIService(object):
"""
DOIService
Mints DOIs using ANDS' Cite My Data service
POSTs DataCite XML to a web services endpoint
"""
def __init__(self, experiment):
"""
:param experiment: The experiment model object
:type experiment: :class: `tardis.tardis_portal.models.Experiment`
"""
if hasattr(settings, 'DOI_ENABLE') and settings.DOI_ENABLE:
self.experiment = experiment
provider = settings.DOI_XML_PROVIDER
module_name, constructor_name = provider.rsplit('.', 1)
module = import_module(module_name)
constructor = getattr(module, constructor_name)
self.doi_provider = constructor(experiment)
self.schema = Schema.objects.get(namespace=settings.DOI_NAMESPACE)
self.doi_name = ParameterName.objects.get(
schema=self.schema, name=DOI_NAME)
else:
raise Exception('DOI is not enabled')
def get_or_mint_doi(self, url):
"""
:param url: the URL the DOI will resolve to
:type url: string
:return: the DOI string
:rtype string
"""
doi = self.get_doi()
if not doi:
doi = self._mint_doi(url)
logger.info("minted DOI %s" % doi)
self._save_doi(doi)
return doi
def get_doi(self):
"""
:return: DOI or None
:rtype string
"""
doi_params = ExperimentParameter.objects.filter(
name=self.doi_name,
parameterset__schema=self.schema,
parameterset__experiment=self.experiment)
if doi_params.count() == 1:
return doi_params[0].string_value
return None
def _save_doi(self, doi):
paramset = self._get_or_create_doi_parameterset()
ep = ExperimentParameter(parameterset=paramset, name=self.doi_name,
string_value=doi)
ep.save()
return doi
def _mint_doi(self, url):
headers = {
'Content-type': 'application/x-www-form-urlencoded',
'Accept': 'text/plain'
}
post_data = 'xml=' + self._datacite_xml()
base_url = settings.DOI_MINT_URL
app_id = settings.DOI_APP_ID
mint_url = "%s?app_id=%s&url=%s" % (base_url, app_id, url)
doi_response = DOIService._post(mint_url, post_data, headers)
doi = DOIService._read_doi(doi_response)
if hasattr(settings, 'DOI_RELATED_INFO_ENABLE') and \
settings.DOI_RELATED_INFO_ENABLE:
import tardis.apps.related_info.related_info as ri
rih = ri.RelatedInfoHandler(self.experiment.id)
doi_info = {
ri.type_name: 'website',
ri.identifier_type_name: 'doi',
ri.identifier_name: doi,
ri.title_name: '',
ri.notes_name: '',
}
rih.add_info(doi_info)
return doi
def _datacite_xml(self):
return self.doi_provider.datacite_xml()
def _get_or_create_doi_parameterset(self):
eps, _ = ExperimentParameterSet.objects.get_or_create(
experiment=self.experiment, schema=self.schema)
return eps
@staticmethod
def _read_doi(doi_response):
matches = re.match(r'\[MT001\] DOI (.+) was successfully minted.',
doi_response)
if not matches:
raise Exception('unrecognised response: %s' + doi_response)
return matches.group(1)
@staticmethod
def _post(url, post_data, headers):
try:
request = urllib2.Request(url, post_data, headers)
response = urllib2.urlopen(request)
return response.read()
except HTTPError as e:
logger.error(e.read())
raise e
class DOIXMLProvider(object):
"""
DOIXMLProvider
provides datacite XML metadata for a given experiment
"""
def __init__(self, experiment):
self.experiment = experiment
def datacite_xml(self):
"""
:return: datacite XML for self.experiment
:rtype: string
"""
from datetime import date
from django.template import Context
import os
template = os.path.join(settings.DOI_TEMPLATE_DIR, 'default.xml')
ex = self.experiment
c = Context()
c['title'] = ex.title
c['institution_name'] = ex.institution_name
c['publication_year'] = date.today().year
c['creator_names'] = [a.author for a in ex.experimentauthor_set.all()]
doi_xml = render_to_string(template, context_instance=c)
return doi_xml
| {
"repo_name": "pansapiens/mytardis",
"path": "tardis/tardis_portal/ands_doi.py",
"copies": "3",
"size": "5163",
"license": "bsd-3-clause",
"hash": -8043420527047528000,
"line_mean": 30.1024096386,
"line_max": 78,
"alpha_frac": 0.5858996707,
"autogenerated": false,
"ratio": 3.870314842578711,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.595621451327871,
"avg_score": null,
"num_lines": null
} |
# Andy Kotz final project: graphing calculator with regressions
from ggame import App, Color, LineStyle, Sprite, RectangleAsset, TextAsset
from ggame import CircleAsset, ImageAsset
from math import sin, cos, radians
SCREEN_WIDTH = 1900
SCREEN_HEIGHT = 1000
def correlation(xlistpts,ylistpts):
N = len(xlistpts)
corgofor = 0
Exylist = []
while corgofor <= len(xlistpts)-1:
jum = xlistpts[corgofor]*ylistpts[corgofor]
Exylist.append(jum)
corgofor += 1
Exy = sum(Exylist)
Ex = sum(xlistpts)
Ey = sum(ylistpts)
Ex2list = []
Ey2list = []
for j in xlistpts:
jummy = j**2
Ex2list.append(jummy)
for i in ylistpts:
jumby = i**2
Ey2list.append(jumby)
Ex2 = sum(Ex2list)
Ey2 = sum(Ey2list)
numerator = (N*Exy)-(Ex*Ey)
denominator = (((N*Ex2)-(Ex)**2)*((N*Ey2)-(Ey)**2))**0.5
r = numerator/denominator
return (r)
def quadreg(xlistpts,ylistpts):
N = len(xlistpts)
Ex = sum(xlistpts)
Ey = sum(ylistpts)
Ex2list = []
Ex3list = []
Ex4list = []
for j in xlistpts:
jummy = j**2
Ex2list.append(jummy)
Ex2 = sum(Ex2list)
for j in xlistpts:
jummy = j**3
Ex2list.append(jummy)
Ex3 = sum(Ex3list)
for j in xlistpts:
jummy = j**4
Ex2list.append(jummy)
Ex4 = sum(Ex4list)
corgofor = 0
Exylist = []
while corgofor <= len(xlistpts)-1:
jum = xlistpts[corgofor]*ylistpts[corgofor]
Exylist.append(jum)
corgofor += 1
Exy = sum(Exylist)
Ex2y = Ex2+Ey
Exx = (Ex2)-(((Ex)**2)/N)
Exy = (Exy) - ((Ex*Ey)/N)
Exx2 = (Ex3) - ((Ex2*Ex)/N)
Ex2y = (Ex2y) - ((Ex2*Ey)/N)
Ex2x2 = (Ex4) - (((Ex2)**2)/N)
a = ((Ex2y*Exx)-(Exy*Exx2))/((Exx*Ex2x2)-(Exx2)**2)
b = ((Exy*Ex2x2)-(Ex2y*Exx2))/((Exx*Ex2x2)-(Exx2)**2)
c = (Ey/N)-(b*(Ex/N))-(a*(Ex2/N))
returnlist = [a,b,c]
return(returnlist)
coords = None
esetreg = TextAsset("Congratulations! you win 1 million dollars!", style = '40pt Arial')
red = Color(0xff0000, 1.0)
green = Color(0x00ff00, 1.0)
blue = Color(0x0000ff, 1.0)
black = Color(0x000000, 1.0)
purple = Color(0x9B30FF, 1.0)
grey = Color(0xd3d3d3, 0.7)
thinline = LineStyle(0, black)
yaxis = RectangleAsset(1, 1000, thinline, black)
xaxis = RectangleAsset(1900, 1, thinline, black)
ycursor = RectangleAsset(1, 1000, thinline, grey)
xcursor = RectangleAsset(1900, 1, thinline, grey)
class Xcursorclass(Sprite):
def __init__(self, position):
super().__init__(xcursor, position)
class Ycursorclass(Sprite):
def __init__(self, position):
super().__init__(ycursor, position)
xcurse = Xcursorclass((0,0))
ycurse = Ycursorclass((0,0))
xaxisrulings = RectangleAsset(1, 7, thinline, black)
yaxisrulings = RectangleAsset(7, 1, thinline, black)
thinline = LineStyle(0, black)
circle = CircleAsset(3, thinline, blue)
circlebig = CircleAsset(6, thinline, red)
Sprite (xaxis, (0, 500))
Sprite (yaxis, (950, 0))
smiley = ImageAsset("smileyface.jpg")
yaxisrulingsprites = [Sprite(yaxisrulings, (947.5, y*20)) for y in range(-100, 100, 1)]
xaxisrulingsprites = [Sprite(xaxisrulings, (x*20+10, 497)) for x in range(-150, 150, 1)]
xcoordinates2 = range(-1500, 1500, 1)
xcoordinates = []
for x in xcoordinates2:
x = x/32
xcoordinates.append(x)
pointpos = 1
linetypelist = input("choose function, plot (f,p). Separate by commas: ")
linetypelist = linetypelist.split(",")
for linetype in linetypelist:
if linetype == "f":
function = input("y=")
for x in xcoordinates:
yval = (-20*(eval(function))+500)
if yval >= 0 and yval <= 1000:
Sprite (circle, ((20*x+950), yval))
if linetype == "p":
again = True
ylistpts=[]
xlistpts=[]
while again == True:
point = input("input point x,y. press q to quit, qr or lr to regress: ")
if point == "q" or point == "qr" or point == "lr":
again = False
if again == True:
point = point.split(",")
xlistpts.append(float(point[0]))
ylistpts.append(float(point[1]))
if point == "lr":
xlistmean = (sum(xlistpts))/len(xlistpts)
ylistmean = (sum(ylistpts))/len(ylistpts)
xmeanlist = []
ymeanlist = []
for i in xlistpts:
x = i-xlistmean
x = x**2
xmeanlist.append(x)
for i in ylistpts:
y = i-ylistmean
y = y**2
ymeanlist.append(y)
sdx = (sum(xmeanlist)/len(xmeanlist))**0.5
sdy = (sum(ymeanlist)/len(ymeanlist))**0.5
rval = correlation(xlistpts, ylistpts)
regreslope = rval*(sdy/sdx)
regreintercept = ylistmean - (regreslope*xlistmean)
regreinterceptprint = str(round(10*regreintercept)/10)
oper = "+"+regreinterceptprint
if regreintercept < 0:
oper = "-"+regreinterceptprint
if regreintercept == 0:
oper = ""
print ("Regression: y="+str((round(10*regreslope))/10)+"x"+ oper +". r = " + str(round(10000*rval)/10000))
for x in xcoordinates:
yval = (-20*(regreslope*x+regreintercept)+500)
if yval >= 0 and yval <= 1000:
Sprite (circle, ((20*x+950), yval))
if point == "qr":
abc = quadreg(xlistpts,ylistpts)
quada = abc[0]
quadb = abc[1]
quadc = abc[2]
for x in xcoordinates:
yval = (-20*(quada*(x**2)+quadb*x+quadc)+500)
if yval >= 0 and yval <= 1000:
Sprite (circle, ((20*x+950), yval))
goforh = 0
while goforh <= len(xlistpts)-1:
Sprite(circlebig, (20*float(xlistpts[goforh])+950, -20*float(ylistpts[goforh])+500))
goforh += 1
goforlist = 1
while goforlist <= len(xlistpts)-1:
pointz = TextAsset("("+str(xlistpts[goforlist-1])+","+str(ylistpts[goforlist-1])+"), ("+str(xlistpts[goforlist])+","+str(ylistpts[goforlist])+")", style = '8pt Arial')
goforlist+=2
Sprite (pointz, (10, pointpos*15))
pointpos+=1
if linetype in ['a', 'b', 'c', 'd', 'e', 's', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'q', 'r', 't', 'u', 'v', 'w', 'x', 'y', 'z']:
Sprite (esetreg, (200, 200))
def mousePosition(event):
global text
global coords
if coords != None:
coords.destroy()
xcurse.y = event.y-7
ycurse.x = event.x-9
text = TextAsset("(" + str(round((event.x-959)/20)) + "," + str(round((-(event.y-507))/20)) + ")", style = '10pt Arial')
coords = Sprite(text, (event.x-7, event.y-22))
def mouseclick(event):
Sprite (smiley, (100, 100))
myapp = App(SCREEN_WIDTH, SCREEN_HEIGHT)
myapp.run()
myapp.listenMouseEvent('mousemove', mousePosition)
myapp.listenMouseEvent('mouseclick', mouseclick)
| {
"repo_name": "Aqkotz/Final-Project",
"path": "Graphingcalc.py",
"copies": "1",
"size": "7239",
"license": "mit",
"hash": 4221696171644461600,
"line_mean": 35.3768844221,
"line_max": 183,
"alpha_frac": 0.5471750242,
"autogenerated": false,
"ratio": 2.9826946847960447,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.40298697089960445,
"avg_score": null,
"num_lines": null
} |
# an early attempt to abstract access to "coverage data". Also,
# because the Coverage.py API doesn't give access to their internal
# Analysis instances anyway :/ and using a 5-tuple is annoying
class CoverageAnalysis(object):
"""
Coverage's 'Analysis' instances aren't part of the public API, but
dealing with a 4- or 5- tuple as "the" data source is annoying.
So, for now just making this wrapper. NOTE: that I currently mimic
the API of coverage.Analysis objects for an easy swap if need-be...
The constructor args are the same kind, order as returned by
Coverage.analyze2()
"""
def __init__(self, fname, executable, excluded, missing, missing_frmt):
self.fname = fname
self.statements = executable
self.excluded = excluded
self.missing = missing
# unused
self._missing_formatted = missing_frmt
def create_analysis(covdata, name):
"""
Returns a CoverageAnalysis instance.
:param covdata: a Coverage() instance
:param name: file or module name
"""
args = covdata.analysis2(name)
return CoverageAnalysis(*args)
| {
"repo_name": "meejah/cuvner",
"path": "cuv/analysis.py",
"copies": "1",
"size": "1132",
"license": "mit",
"hash": -1919143646005927400,
"line_mean": 31.3428571429,
"line_max": 75,
"alpha_frac": 0.6828621908,
"autogenerated": false,
"ratio": 4.161764705882353,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5344626896682353,
"avg_score": null,
"num_lines": null
} |
"""An easy interface for adding keyboard shortcuts using decorators."""
import ctypes
import functools
import operator
import sys
import uuid
import objc_util
_app = objc_util.UIApplication.sharedApplication()
_controller = _app.keyWindow().rootViewController()
# Modifiers for special keys
_modifiers = {
"shift": 1 << 17,
"control": 1 << 18, "ctrl": 1 << 18,
"option": 1 << 19, "alt": 1 << 19,
"command": 1 << 20, "cmd": 1 << 20
}
# Input strings for special keys
_special_keys = {
"up": "UIKeyInputUpArrow",
"down": "UIKeyInputDownArrow",
"left": "UIKeyInputLeftArrow",
"right": "UIKeyInputRightArrow",
"escape": "UIKeyInputEscape", "esc": "UIKeyInputEscape"
}
# HELPER METHODS
def _add_method(cls, func):
# void, object, selector
type_encoding = "v@:"
sel_name = str(uuid.uuid4())
sel = objc_util.sel(sel_name)
class_ptr = objc_util.object_getClass(cls.ptr)
# ----------------- Modified from objc_util.add_method ------------------ #
parsed_types = objc_util.parse_types(type_encoding)
restype, argtypes, _ = parsed_types
imp = ctypes.CFUNCTYPE(restype, *argtypes)(func)
objc_util.retain_global(imp)
if isinstance(type_encoding, str):
type_encoding = type_encoding.encode('ascii')
objc_util.class_addMethod(class_ptr, sel, imp, type_encoding)
# ----------------------------------------------------------------------- #
return sel
def _tokenize_shortcut_string(shortcut):
"""Split a plaintext string representing a keyboard shortcut into each
individual key in the shortcut.
Valid separator characters are any combination of " ", "+", "-", and ",".
"""
# Tokenize the string
out = [shortcut]
for separator in (" ", "-", "+", ","):
new = []
for piece in out:
new.extend(piece.split(separator))
out = new[:]
tokens = [i.strip().lower() for i in out if i.strip().lower()]
# Sort the tokens to place modifiers first
return sorted(tokens, key=lambda tok: tok not in _modifiers)
def _validate_tokens(tokens):
"""Raise appropriate errors for ridiculous key commands.
This will throw descriptive errors for keyboard keyboard shortcuts like:
- Cmd + Shift + P + I
- Ctrl + Elephant
- Ctrl + Cmd + Shift
"""
exceptions = tuple(_modifiers) + tuple(_special_keys)
# Disallow muultiple non-modifier keys
non_modifier_tokens = [tok for tok in tokens if tok not in _modifiers]
if len(non_modifier_tokens) > 1:
raise ValueError(
"Only one non-modifier key is allowed in a shortcut"
)
if len(non_modifier_tokens) < 1:
raise ValueError(
"At least one non-modifier key is required in a shortcut"
)
# Disallow invalid key names
for tok in tokens:
if len(tok) > 1 and tok not in exceptions:
raise ValueError(
"{} is not a valid keyboard key".format(tok)
)
# TRACKING OF COMMANDS
_registered_commands = {}
# REGISTERING
def _add_shortcut(shortcut, function, title=None):
"""Bind a function to a keyboard shortcut."""
# Wrap function to accept and ignore arguments
def wrapper(*args, **kwargs):
function()
# Parse shortcut
tokens = _tokenize_shortcut_string(shortcut)
_validate_tokens(tokens)
modifiers = tokens[:-1]
inp = tokens[-1]
# Process components
mod_bitmask = functools.reduce(
operator.ior,
[_modifiers[mod] for mod in modifiers],
0
)
if inp in _special_keys:
inp = _special_keys[inp]
# Make the command
sel = _add_method(_controller, wrapper)
kc = objc_util.ObjCClass("UIKeyCommand")
if title is not None:
c = kc.keyCommandWithInput_modifierFlags_action_discoverabilityTitle_(
inp,
mod_bitmask,
sel,
title
)
else:
c = kc.keyCommandWithInput_modifierFlags_action_(
inp,
mod_bitmask,
sel
)
_registered_commands[frozenset(tokens)] = cp
_controller.addKeyCommand_(c)
# MAIN INTERFACE
def bind(shortcut, title=None):
"""A decorator for binding keyboard shortcuts.
Example:
>>> @bind(Command + T)
>>> def test_func():
... print("Hello!")
The shortcut definition syntax is designed to be flexible, so the following
shortcut names are all equivalent:
- Command + Shift + Escape
- cmd-shift-esc
- CMD SHIFT ESCAPE
- command, shift, esc
A few non-alphanumeric keys are supported with special names:
- up
- down
- left
- right
- escape / esc
"""
return functools.partial(_add_shortcut, shortcut, title=title)
if __name__ == "__main__":
import console
@bind("Command Shift Escape", "Say Hi")
def hi():
console.alert("Hello")
| {
"repo_name": "controversial/ui2",
"path": "ui2/kb_shortcuts.py",
"copies": "1",
"size": "4965",
"license": "mit",
"hash": 7207142454583288000,
"line_mean": 26.131147541,
"line_max": 79,
"alpha_frac": 0.5985901309,
"autogenerated": false,
"ratio": 3.924901185770751,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5023491316670751,
"avg_score": null,
"num_lines": null
} |
# An easy Sudoku Solver.
# A pretty print
def pprint(puzzle):
for i in puzzle:
print i
# Inital board
puzzle = [[5,3,0,0,7,0,0,0,0],
[6,0,0,1,9,5,0,0,0],
[0,9,8,0,0,0,0,6,0],
[8,0,0,0,6,0,0,0,3],
[4,0,0,8,0,3,0,0,1],
[7,0,0,0,2,0,0,0,6],
[0,6,0,0,0,0,2,8,0],
[0,0,0,4,1,9,0,0,5],
[0,0,0,0,8,0,0,7,9]]
# Solution of initial board
solution = [[5,3,4,6,7,8,9,1,2],
[6,7,2,1,9,5,3,4,8],
[1,9,8,3,4,2,5,6,7],
[8,5,9,7,6,1,4,2,3],
[4,2,6,8,5,3,7,9,1],
[7,1,3,9,2,4,8,5,6],
[9,6,1,5,3,7,2,8,4],
[2,8,7,4,1,9,6,3,5],
[3,4,5,2,8,6,1,7,9]]
# Find which numbers are no more allowed within the row
def get_row_found(puzzle, row):
return [x for x in puzzle[row] if x]
# Find which numbers are no more allowed within the column
def get_col_found(puzzle, cell):
return [puzzle[row][cell] for row,x in enumerate(puzzle) if x]
# Find which numbers are no more allowed within the same sub-grid
def get_grid_found(puzzle, row, cell):
return [puzzle[(row//3)*3+r][(cell//3)*3+c] for r in range(3) for c in range(3) ]
# Find how many numbers are unknown
def get_unknown(puzzle):
return sum([ 1 for r in range(9) for x in puzzle[r] if not x])
# Just pause after you have checked all the lines
pause_after = False
# The actual Sudoku solver
from collections import defaultdict
def sudoku(puzzle):
# keeping a dictionary for each cell options
available = defaultdict(list)
# to keep track of found cells...
found = set()
while get_unknown(puzzle)>0:
for row in range(len(puzzle)):
for cell in range(len(puzzle[row])):
# if the cell has already a number in it
if puzzle[row][cell] != 0:
available[row,cell] = []
found.add((row,cell))
# the cell is yet to be found
else:
# elaborate the options of a cell considering what is already in the row, the column and the sub-grid
available[row,cell] = [x for x in range(1,10) if x not in get_row_found(puzzle, row)+get_col_found(puzzle, cell)+get_grid_found(puzzle, row, cell)]
# if the cell has only 1 option then solve it
if len(available[row,cell]) == 1:
puzzle[row][cell] = available[row,cell][0]
found.add((row,cell))
print row, cell, available[row,cell], [""," <--- FounD!"][len(available[row,cell])==1]
# when you have read the row, print the board situation with the new found cells
pprint(puzzle)
# and print how many cells are left to be found
print "remaining: ",get_unknown(puzzle)
if pause_after: raw_input("Press Enter to check next row...")
return puzzle
# Calling the function and printing the final solution
pprint(sudoku(puzzle))
print puzzle == solution
| {
"repo_name": "FA810/My_Codes",
"path": "[Python]_Easy_Sudoku_Solver/sudoku_solver.py",
"copies": "1",
"size": "2781",
"license": "apache-2.0",
"hash": -3355908168740858000,
"line_mean": 33.3333333333,
"line_max": 152,
"alpha_frac": 0.6116504854,
"autogenerated": false,
"ratio": 2.5702402957486137,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.36818907811486135,
"avg_score": null,
"num_lines": null
} |
"""An easy way to get ROX-Filer to do things."""
# Note: do a double-fork in case it's an old version of the filer
# and doesn't automatically background itself.
def _spawn(argv):
from os import fork, _exit, execvp, waitpid
child = fork()
if child == 0:
# We are the child
child = fork()
if child == 0:
# Grandchild
try:
execvp(argv[0], argv)
except:
pass
print "Warning: exec('%s') failed!" % argv[0]
_exit(1)
elif child == -1:
print "Error: fork() failed!"
_exit(1)
elif child == -1:
print "Error: fork() failed!"
waitpid(child, 0)
def spawn_rox(args):
"""Run rox (either from PATH or through Zero Install) with the
given arguments."""
import os.path
for dir in os.environ.get('PATH', '').split(':'):
path = os.path.join(dir, 'rox')
if os.path.isfile(path):
_spawn(('rox',) + args)
return
if os.path.exists('/uri/0install/rox.sourceforge.net'):
_spawn(('/bin/0run', 'rox.sourceforge.net/rox 2002-01-01') + args)
else:
print "Didn't find rox in PATH, and Zero Install not present. Trying 'rox' anyway..."
_spawn(('rox',) + args)
def open_dir(dir):
"Open 'dir' in a new filer window."
spawn_rox(('-d', dir))
def examine(file):
"""'file' may have changed (maybe you just created it, for example). Update
any filer views of it."""
spawn_rox(('-x', file))
def show_file(file):
"""Open a directory and draw the user's attention to this file. Useful for
'Up' toolbar buttons that show where a file is saved."""
spawn_rox(('-s', file))
| {
"repo_name": "leuschel/logen",
"path": "old_logen/pylogen/rox/filer.py",
"copies": "1",
"size": "1508",
"license": "apache-2.0",
"hash": -9218998541311222000,
"line_mean": 27.4528301887,
"line_max": 87,
"alpha_frac": 0.6432360743,
"autogenerated": false,
"ratio": 2.839924670433145,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.39831607447331446,
"avg_score": null,
"num_lines": null
} |
# an eazy app for login
from flask import request, session, url_for, escape, redirect
def register(app):
@app.route('/')
def index():
if 'username' in session:
return '''
<p>Logged in as %s</p>
<a href="http://localhost:5000/logout">logout</a>
''' % escape(session['username'])
return '''
<p>You are not logged in</p>
<a href="http://localhost:5000/login">login</a>
'''
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'POST':
session['username'] = request.form['username']
return redirect(url_for('index'))
return '''
<form action="" method="post">
<p><input type=text name=username>
<p><input type=submit value=Login>
</form>
'''
@app.route('/logout')
def logout():
# remove the username from the session if it's there
session.pop('username', None)
return redirect(url_for('index'))
| {
"repo_name": "imxana/Flask_init",
"path": "apps/eazy_login.py",
"copies": "1",
"size": "1085",
"license": "mit",
"hash": -5172118883596827000,
"line_mean": 24.2325581395,
"line_max": 65,
"alpha_frac": 0.5105990783,
"autogenerated": false,
"ratio": 4.1891891891891895,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5199788267489189,
"avg_score": null,
"num_lines": null
} |
""" An editor whose content is provided by a traits UI. """
# Standard library imports.
import logging
# Enthought library imports.
from enthought.traits.api import Instance, Str
from enthought.traits.ui.api import UI
# Local imports.
from editor import Editor
# Logging.
logger = logging.getLogger(__name__)
class TraitsUIEditor(Editor):
""" An editor whose content is provided by a traits UI. """
#### 'TraitsUIEditor' interface ###########################################
# The traits UI that represents the editor.
#
# The framework sets this to the value returned by 'create_ui'.
ui = Instance(UI)
# The name of the traits UI view used to create the UI (if not specified,
# the default traits UI view is used).
view = Str
###########################################################################
# 'IWorkbenchPart' interface.
###########################################################################
#### Trait initializers ###################################################
def _name_default(self):
""" Trait initializer. """
return str(self.obj)
#### Methods ##############################################################
def create_control(self, parent):
""" Creates the toolkit-specific control that represents the editor.
'parent' is the toolkit-specific control that is the editor's parent.
Overridden to call 'create_ui' to get the traits UI.
"""
self.ui = self.create_ui(parent)
return self.ui.control
def destroy_control(self):
""" Destroys the toolkit-specific control that represents the editor.
Overridden to call 'dispose' on the traits UI.
"""
# Give the traits UI a chance to clean itself up.
if self.ui is not None:
logger.debug('disposing traits UI for editor [%s]', self)
self.ui.dispose()
self.ui = None
return
###########################################################################
# 'TraitsUIEditor' interface.
###########################################################################
def create_ui(self, parent):
""" Creates the traits UI that represents the editor.
By default it calls 'edit_traits' on the editor's 'obj'. If you
want more control over the creation of the traits UI then override!
"""
ui = self.obj.edit_traits(
parent=parent, view=self.view, kind='subpanel'
)
return ui
#### EOF ######################################################################
| {
"repo_name": "enthought/traitsgui",
"path": "enthought/pyface/workbench/traits_ui_editor.py",
"copies": "1",
"size": "2641",
"license": "bsd-3-clause",
"hash": 7895458114552525000,
"line_mean": 27.7065217391,
"line_max": 79,
"alpha_frac": 0.4982961,
"autogenerated": false,
"ratio": 5.040076335877862,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005434782608695652,
"num_lines": 92
} |
""" An editor whose content is provided by a traits UI. """
# Standard library imports.
import logging
# Enthought library imports.
from traits.api import Instance, Str
from traitsui.api import UI
# Local imports.
from editor import Editor
# Logging.
logger = logging.getLogger(__name__)
class TraitsUIEditor(Editor):
""" An editor whose content is provided by a traits UI. """
#### 'TraitsUIEditor' interface ###########################################
# The traits UI that represents the editor.
#
# The framework sets this to the value returned by 'create_ui'.
ui = Instance(UI)
# The name of the traits UI view used to create the UI (if not specified,
# the default traits UI view is used).
view = Str
###########################################################################
# 'IWorkbenchPart' interface.
###########################################################################
#### Trait initializers ###################################################
def _name_default(self):
""" Trait initializer. """
return str(self.obj)
#### Methods ##############################################################
def create_control(self, parent):
""" Creates the toolkit-specific control that represents the editor.
'parent' is the toolkit-specific control that is the editor's parent.
Overridden to call 'create_ui' to get the traits UI.
"""
self.ui = self.create_ui(parent)
return self.ui.control
def destroy_control(self):
""" Destroys the toolkit-specific control that represents the editor.
Overridden to call 'dispose' on the traits UI.
"""
# Give the traits UI a chance to clean itself up.
if self.ui is not None:
logger.debug('disposing traits UI for editor [%s]', self)
self.ui.dispose()
self.ui = None
return
###########################################################################
# 'TraitsUIEditor' interface.
###########################################################################
def create_ui(self, parent):
""" Creates the traits UI that represents the editor.
By default it calls 'edit_traits' on the editor's 'obj'. If you
want more control over the creation of the traits UI then override!
"""
ui = self.obj.edit_traits(
parent=parent, view=self.view, kind='subpanel'
)
return ui
#### EOF ######################################################################
| {
"repo_name": "brett-patterson/pyface",
"path": "pyface/workbench/traits_ui_editor.py",
"copies": "2",
"size": "2620",
"license": "bsd-3-clause",
"hash": -4416264199868605000,
"line_mean": 27.4782608696,
"line_max": 79,
"alpha_frac": 0.4954198473,
"autogenerated": false,
"ratio": 5.087378640776699,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.65827984880767,
"avg_score": null,
"num_lines": null
} |
# An eduPersonTargetedID comprises
# the entity name of the identity provider, the entity name of the service
# provider, and a opaque string value.
# These strings are separated by "!" symbols. This form is advocated by
# Internet2 and may overtake the other form in due course.
import hashlib
import shelve
import logging
import six
logger = logging.getLogger(__name__)
class Eptid(object):
def __init__(self, secret):
self._db = {}
self.secret = secret
def make(self, idp, sp, args):
md5 = hashlib.md5()
for arg in args:
md5.update(arg.encode("utf-8"))
if isinstance(sp, six.binary_type):
md5.update(sp)
else:
md5.update(sp.encode('utf-8'))
if isinstance(self.secret, six.binary_type):
md5.update(self.secret)
else:
md5.update(self.secret.encode('utf-8'))
md5.digest()
hashval = md5.hexdigest()
if isinstance(hashval, six.binary_type):
hashval = hashval.decode('ascii')
return "!".join([idp, sp, hashval])
def __getitem__(self, key):
if six.PY3 and isinstance(key, six.binary_type):
key = key.decode('utf-8')
return self._db[key]
def __setitem__(self, key, value):
if six.PY3 and isinstance(key, six.binary_type):
key = key.decode('utf-8')
self._db[key] = value
def get(self, idp, sp, *args):
# key is a combination of sp_entity_id and object id
key = ("__".join([sp, args[0]])).encode("utf-8")
try:
return self[key]
except KeyError:
val = self.make(idp, sp, args)
self[key] = val
return val
def close(self):
pass
class EptidShelve(Eptid):
def __init__(self, secret, filename):
Eptid.__init__(self, secret)
if six.PY3:
if filename.endswith('.db'):
filename = filename.rsplit('.db', 1)[0]
self._db = shelve.open(filename, writeback=True, protocol=2)
def close(self):
self._db.close()
| {
"repo_name": "cloudera/hue",
"path": "desktop/core/ext-py/pysaml2-4.9.0/src/saml2/eptid.py",
"copies": "2",
"size": "2103",
"license": "apache-2.0",
"hash": -970160093800439300,
"line_mean": 28.2083333333,
"line_max": 74,
"alpha_frac": 0.5706134094,
"autogenerated": false,
"ratio": 3.6258620689655174,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 72
} |
# An eduPersonTargetedID comprises
# the entity name of the identity provider, the entity name of the service
# provider, and a opaque string value.
# These strings are separated by "!" symbols. This form is advocated by
# Internet2 and may overtake the other form in due course.
import hashlib
import shelve
import logging
logger = logging.getLogger(__name__)
class Eptid(object):
def __init__(self, secret):
self._db = {}
try:
self.secret = secret.encode()
except AttributeError:
self.secret = secret
def make(self, idp, sp, args):
md5 = hashlib.md5()
for arg in args:
md5.update(arg.encode("utf-8"))
md5.update(sp.encode())
md5.update(self.secret)
md5.digest()
hashval = md5.hexdigest()
return "!".join([idp, sp, hashval])
def __getitem__(self, key):
try:
return self._db[key]
except AttributeError:
return self._db[key.decode()]
def __setitem__(self, key, value):
try:
self._db[key] = value
except AttributeError:
self._db[key.decode()] = value
def get(self, idp, sp, *args):
# key is a combination of sp_entity_id and object id
key = ("__".join([sp, args[0]])).encode("utf-8")
try:
return self[key]
except KeyError:
val = self.make(idp, sp, args)
self[key] = val
return val
class EptidShelve(Eptid):
def __init__(self, secret, filename):
Eptid.__init__(self, secret)
self._db = shelve.open(filename, writeback=True)
| {
"repo_name": "rohe/pysaml2-3",
"path": "src/saml2/eptid.py",
"copies": "1",
"size": "1658",
"license": "bsd-2-clause",
"hash": 2063017421756846600,
"line_mean": 27.1016949153,
"line_max": 75,
"alpha_frac": 0.5693606755,
"autogenerated": false,
"ratio": 3.794050343249428,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4863411018749428,
"avg_score": null,
"num_lines": null
} |
# A Needle in the Haystack
# https://www.codewars.com/kata/56676e8fabd2d1ff3000000c
def find_needle(haystack: list) -> str:
return 'found the needle at position %d' % haystack.index('needle')
def find_needle_5(haystack: list) -> str:
return (lambda idx: {
idx: 'found the needle at position %s' % idx,
None: 'needle is not in the haystack',
}.get(idx))(next((idx for idx, val in enumerate(haystack) if val == 'needle'), None))
def find_needle_4(haystack: list) -> str:
idx = next((idx for idx, val in enumerate(haystack) if val == 'needle'), None)
return 'found the needle at position %d' % idx if idx is not None else 'needle is not in the haystack'
def find_needle_3(haystack: list) -> str:
return ("found the needle at position %s" % haystack.index(
'needle')) if 'needle' in haystack else "needle is not in the haystack"
def find_needle_2(haystack: list) -> str:
try:
return "found the needle at position %s" % haystack.index('needle')
except ValueError:
return "needle is not in the haystack"
def find_needle_1(haystack: list) -> str:
index = None
try:
index = haystack.index('needle')
except ValueError:
pass
return ("found the needle at position %s" % index) if index is not None else "needle is not in the haystack"
| {
"repo_name": "davidlukac/codekata-python",
"path": "codewars/needle_in_haystack.py",
"copies": "1",
"size": "1342",
"license": "mit",
"hash": 7423942598545808000,
"line_mean": 31.7317073171,
"line_max": 112,
"alpha_frac": 0.6557377049,
"autogenerated": false,
"ratio": 3.0430839002267573,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9193778354419826,
"avg_score": 0.0010086501413864074,
"num_lines": 41
} |
"""An Effect to be used with ScrollView to prevent scrolling beyond
the bounds, but politely.
A ScrollView constructed with StiffScrollEffect,
eg. ScrollView(effect_cls=StiffScrollEffect), will get harder to
scroll as you get nearer to its edges. You can scroll all the way to
the edge if you want to, but it will take more finger-movement than
usual.
Unlike DampedScrollEffect, it is impossible to overscroll with
StiffScrollEffect. That means you cannot push the contents of the
ScrollView far enough to see what's beneath them. This is appropriate
if the ScrollView contains, eg., a background image, like a desktop
wallpaper. Overscrolling may give the impression that there is some
reason to overscroll, even if just to take a peek beneath, and that
impression may be misleading.
StiffScrollEffect was written by Zachary Spector. His other stuff is at:
https://github.com/LogicalDash/
He can be reached, and possibly hired, at:
zacharyspector@gmail.com
"""
from time import time
from kivy.animation import AnimationTransition
from kivy.effects.kinetic import KineticEffect
from kivy.properties import (
ObjectProperty,
NumericProperty)
from kivy.uix.widget import Widget
class StiffScrollEffect(KineticEffect):
drag_threshold = NumericProperty('20sp')
'''Minimum distance to travel before the movement is considered as a
drag.'''
min = NumericProperty(0)
'''Minimum boundary to stop the scrolling at.'''
max = NumericProperty(0)
'''Maximum boundary to stop the scrolling at.'''
max_friction = NumericProperty(1)
'''How hard should it be to scroll, at the worst?'''
body = NumericProperty(0.7)
'''Proportion of the range in which you can scroll unimpeded.'''
scroll = NumericProperty(0.)
'''Computed value for scrolling'''
transition_min = ObjectProperty(AnimationTransition.in_cubic)
'''The AnimationTransition function to use when adjusting the friction
near the minimum end of the effect.
'''
transition_max = ObjectProperty(AnimationTransition.in_cubic)
'''The AnimationTransition function to use when adjusting the friction
near the maximum end of the effect.
'''
target_widget = ObjectProperty(None, allownone=True, baseclass=Widget)
'''The widget to apply the effect to.'''
displacement = NumericProperty(0)
'''The absolute distance moved in either direction.'''
scroll = NumericProperty(0.)
'''The distance to be used for scrolling.'''
def __init__(self, **kwargs):
'''Set ``self.base_friction`` to the value of ``self.friction`` just
after instantiation, so that I can reset to that value later.
'''
super(StiffScrollEffect, self).__init__(**kwargs)
self.base_friction = self.friction
def update_velocity(self, dt):
'''Before actually updating my velocity, meddle with ``self.friction``
to make it appropriate to where I'm at, currently.
'''
hard_min = self.min
hard_max = self.max
if hard_min > hard_max:
hard_min, hard_max = hard_max, hard_min
margin = (1. - self.body) * (hard_max - hard_min)
soft_min = hard_min + margin
soft_max = hard_max - margin
if self.value < soft_min:
try:
prop = (soft_min - self.value) / (soft_min - hard_min)
self.friction = self.base_friction + abs(
self.max_friction - self.base_friction
) * self.transition_min(prop)
except ZeroDivisionError:
pass
elif self.value > soft_max:
try:
# normalize how far past soft_max I've gone as a
# proportion of the distance between soft_max and hard_max
prop = (self.value - soft_max) / (hard_max - soft_max)
self.friction = self.base_friction + abs(
self.max_friction - self.base_friction
) * self.transition_min(prop)
except ZeroDivisionError:
pass
else:
self.friction = self.base_friction
return super(StiffScrollEffect, self).update_velocity(dt)
def on_value(self, *args):
'''Prevent moving beyond my bounds, and update ``self.scroll``'''
if self.value < self.min:
self.velocity = 0
self.scroll = self.min
elif self.value > self.max:
self.velocity = 0
self.scroll = self.max
else:
self.scroll = self.value
def start(self, val, t=None):
'''Start movement with ``self.friction`` = ``self.base_friction``'''
self.is_manual = True
t = t or time()
self.velocity = self.displacement = 0
self.friction = self.base_friction
self.history = [(t, val)]
def update(self, val, t=None):
'''Reduce the impact of whatever change has been made to me, in
proportion with my current friction.
'''
t = t or time()
hard_min = self.min
hard_max = self.max
if hard_min > hard_max:
hard_min, hard_max = hard_max, hard_min
gamut = hard_max - hard_min
margin = (1. - self.body) * gamut
soft_min = hard_min + margin
soft_max = hard_max - margin
distance = val - self.history[-1][1]
reach = distance + self.value
if (
distance < 0 and reach < soft_min) or (
distance > 0 and soft_max < reach):
distance -= distance * self.friction
self.apply_distance(distance)
self.history.append((t, val))
if len(self.history) > self.max_history:
self.history.pop(0)
self.displacement += abs(distance)
self.trigger_velocity_update()
def stop(self, val, t=None):
'''Work out whether I've been flung.'''
self.is_manual = False
self.displacement += abs(val - self.history[-1][1])
if self.displacement <= self.drag_threshold:
self.velocity = 0
return super(StiffScrollEffect, self).stop(val, t)
| {
"repo_name": "kivy-garden/garden.stiffscroll",
"path": "__init__.py",
"copies": "1",
"size": "6125",
"license": "mit",
"hash": 4754640940292052000,
"line_mean": 37.28125,
"line_max": 78,
"alpha_frac": 0.6275918367,
"autogenerated": false,
"ratio": 3.9414414414414414,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5069033278141442,
"avg_score": null,
"num_lines": null
} |
"""An efficient computation of the intersection of rays and blocks"""
import numpy as np
import dask.array as da
from dask import delayed
from dask.threaded import get
class Ray(object):
def __init__(self,r0,n):
self.r0 = np.array(r0)
self.n = np.array(n)
self.n /= np.linalg.norm(self.n)
self.inv_n = 1./self.n
def __call__(self,t):
return self.r0 + t*self.n
def slab_method_ray_box(ray,x_min,y_min,z_min,x_max,y_max,z_max):
tx1 = (x_min - ray.r0[0])*ray.inv_n[0]
tx2 = (x_max - ray.r0[0])*ray.inv_n[0]
if np.isnan(tx1):
tx1 = 0.
if np.isnan(tx2):
tx2 = 0.
tmin_x = min(tx1,tx2)
tmax_x = max(tx1,tx2)
ty1 = (y_min - ray.r0[1])*ray.inv_n[1]
ty2 = (y_max - ray.r0[1])*ray.inv_n[1]
if np.isnan(ty1):
ty1 = 0.
if np.isnan(ty2):
ty2 = 0.
tmin_y = min(ty1,ty2)
tmax_y = max(ty1,ty2)
tz1 = (z_min - ray.r0[2])*ray.inv_n[2]
tz2 = (z_max - ray.r0[2])*ray.inv_n[2]
if np.isnan(tz1):
tz1 = 0.
if np.isnan(tz2):
tz2 = 0.
tmin_z = min(tz1,tz2)
tmax_z = max(tz1,tz2)
tmax = max(tmin_x,tmin_y,tmin_z)
tmin = min(tmax_x,tmax_y,tmax_z)
if (tmax < tmin and tmax > 0):
return np.linalg.norm(ray.n*(tmax - tmin)),ray.r0+ray.n*(tmax + tmin)/2.
else:
return 0.,[0.,0.,0.]
def slab_method_3d_ray(ray,xvec,yvec,zvec):
"""Rays is a list of ray objects, xvec, yvec, zvec are centers of voxels.
Create an array of shape [nr,nx,ny,nz] that is the length of ray ijk in voxel"""
#abssicas are xvec - dx/2. + one more
dx = xvec[1] - xvec[0]
dy = yvec[1] - yvec[0]
dz = zvec[1] - zvec[0]
nx = len(xvec)
ny = len(yvec)
nz = len(zvec)
ox = np.ones(nx)
oy = np.ones(ny)
oz = np.ones(nz)
tx1 = (xvec-dx/2. - ray.r0[0])*ray.inv_n[0]
tx2 = (xvec+dx/2. - ray.r0[0])*ray.inv_n[0]
tx1[np.isnan(tx1)] = 0.
tx2[np.isnan(tx2)] = 0.
tmin_x = np.min([tx1,tx2],axis=0)
tmax_x = np.max([tx1,tx2],axis=0)
ty1 = (yvec-dy/2. - ray.r0[1])*ray.inv_n[1]
ty2 = (yvec+dy/2. - ray.r0[1])*ray.inv_n[1]
ty1[np.isnan(ty1)] = 0.
ty2[np.isnan(ty2)] = 0.
tmin_y = np.min([ty1,ty2],axis=0)
tmax_y = np.max([ty1,ty2],axis=0)
tz1 = (zvec-dz/2. - ray.r0[2])*ray.inv_n[2]
tz2 = (zvec+dz/2. - ray.r0[2])*ray.inv_n[2]
tz1[np.isnan(tz1)] = 0.
tz2[np.isnan(tz2)] = 0.
tmin_z = np.min([tz1,tz2],axis=0)
tmax_z = np.max([tz1,tz2],axis=0)
max_x = np.einsum("i,j,k->ijk",tmax_x,oy,oz)
max_y = np.einsum("i,j,k->jik",tmax_y,ox,oz)
max_z = np.einsum("i,j,k->kji",tmax_z,oy,ox)
min_x = np.einsum("i,j,k->ijk",tmin_x,oy,oz)
min_y = np.einsum("i,j,k->jik",tmin_y,ox,oz)
min_z = np.einsum("i,j,k->kji",tmin_z,oy,ox)
tmax_ = np.max([min_x,min_y,min_z],axis=0)
tmin_ = np.min([max_x,max_y,max_z],axis=0)
intersection = np.bitwise_or(np.bitwise_and(tmax_ < tmin_, tmax_>0),tmax_< 0)
seg = np.einsum("ijk,l->ijkl",(tmax_ - tmin_),ray.n)
seg *= seg
seg = np.sum(seg,axis=-1)
np.sqrt(seg,out=seg)
out = np.where(intersection,tmax_-tmin_,0.)
return out
def slab_method_3d(rays, xvec, yvec, zvec,out=None):
"""Rays is a list of ray objects, xvec, yvec, zvec are centers of voxels.
Create an array of shape [nr,nx,ny,nz] that is the length of ray ijk in voxel"""
#abssicas are xvec - dx/2. + one more
dx = xvec[1] - xvec[0]
dy = yvec[1] - yvec[0]
dz = zvec[1] - zvec[0]
nx = len(xvec)
ny = len(yvec)
nz = len(zvec)
nr = len(rays)
ox = np.ones(nx)
oy = np.ones(ny)
oz = np.ones(nz)
if out is not None:
assert out.shape[0] == nr and out.shape[1] == nx and out.shape[2] == ny and out.shape[3] == nz
else:
out = np.zeros([nr,nx,ny,nz],dtype=float)
max_xy = np.zeros([nx,ny],dtype=float)
min_xy = np.zeros([nx,ny],dtype=float)
intersection_xy = np.zeros([nx,ny],dtype=float)
max_xz = np.zeros([nx,nz],dtype=float)
min_xz = np.zeros([nx,nz],dtype=float)
intersection_xz = np.zeros([nx,nz],dtype=float)
max_yz = np.zeros([ny,nz],dtype=float)
min_yz = np.zeros([ny,nz],dtype=float)
intersection_yz = np.zeros([ny,nz],dtype=float)
ray_idx = 0
for ray in rays:
out[ray_idx,...] = slab_method_3d_ray_dask(ray, xvec, yvec, zvec).compute()
ray_idx += 1
return out
def slab_method_3d_dask(rays, xvec, yvec, zvec,out=None,num_threads = None):
"""Rays is a list of ray objects, xvec, yvec, zvec are centers of voxels.
Create an array of shape [nr,nx,ny,nz] that is the length of ray ijk in voxel"""
#abssicas are xvec - dx/2. + one more
dx = xvec[1] - xvec[0]
dy = yvec[1] - yvec[0]
dz = zvec[1] - zvec[0]
nx = len(xvec)
ny = len(yvec)
nz = len(zvec)
nr = len(rays)
ox = np.ones(nx)
oy = np.ones(ny)
oz = np.ones(nz)
if out is not None:
assert out.shape[0] == nr and out.shape[1] == nx and out.shape[2] == ny and out.shape[3] == nz
else:
out = np.zeros([nr,nx,ny,nz],dtype=float)
max_xy = np.zeros([nx,ny],dtype=float)
min_xy = np.zeros([nx,ny],dtype=float)
intersection_xy = np.zeros([nx,ny],dtype=float)
max_xz = np.zeros([nx,nz],dtype=float)
min_xz = np.zeros([nx,nz],dtype=float)
intersection_xz = np.zeros([nx,nz],dtype=float)
max_yz = np.zeros([ny,nz],dtype=float)
min_yz = np.zeros([ny,nz],dtype=float)
intersection_yz = np.zeros([ny,nz],dtype=float)
ray_idx = 0
out = da.stack([da.from_delayed(delayed(slab_method_3d_ray)(ray,xvec,yvec,zvec),shape=(nx,ny,nz),dtype=float) for ray in rays],axis=0)
return out.compute(get=get,num_workers=num_threads)
| {
"repo_name": "Joshuaalbert/IonoTomo",
"path": "src/ionotomo/geometry/slab_method.py",
"copies": "1",
"size": "5771",
"license": "apache-2.0",
"hash": -3337192477802009600,
"line_mean": 30.3641304348,
"line_max": 138,
"alpha_frac": 0.5643735921,
"autogenerated": false,
"ratio": 2.3326596604688765,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8230782624801336,
"avg_score": 0.03325012555350815,
"num_lines": 184
} |
"""An elegant and powerfull implementation of a value object.
This module provides base classes to easilly create new value
objects. Although value objects should in general be immutable
it is possible to create a mutable instance of a value object
when this is required.
"""
class _ValueBase:
def _is_same_type(self, other, companion_class):
return isinstance(other, (type(self), companion_class))
def _is_equal(self, other, companion_class):
if not self._is_same_type(other, companion_class):
return False
for name in self:
if getattr(self, name) != getattr(other, name):
return False
return True
def __iter__(self):
"""Return an iterable with the attribute names."""
return iter(self._attributes)
def __str__(self):
"""Return a string representation of the object."""
attributes = ','.join('{}={}'.format(name, getattr(self, name))
for name in self)
return '{}({})'.format(type(self).__name__, attributes)
def __repr__(self):
"""Return a printable representation of the object.
Generally when passing this representation to the eval
function it will return a duplicate of the object, however
this depends on the values of the attributes.
"""
attributes = ','.join('{}={}'.format(name, repr(getattr(self, name)))
for name in self)
return '{}({})'.format(type(self).__name__, attributes)
def __hash__(self):
"""Return a hash of the values.
The hash is computed by putting the values of the attributes
in a hash and calculating the hash of that tuple.
"""
return hash(tuple(getattr(self, attr) for attr in self))
class _MutableValueBase(_ValueBase):
"""Base class for the mutable version of a value.
This is the base class for mutable versions of value objects and
is not meant to instantiates directly. When defining a new value
object the Mutable attribute is automatically set to a newly
created subclass of this base class.
A mutable value object has the same attributes as the immutable
value object it is derived from, however the attributes are
writable.
Unlike an immutable value object where all the attributes must be
supplied to the constructor, with a mutable value object it is
possible to assign only a subset of attributes and assign the rest
of the attributes later.
Also unlike an immutable value object it is possible to define
extra attributes like in normal python classes, for example to be
used as temporary variables. These extra attributes will be
ignored when creating an immutable value object from the mutable
value object.
"""
def __init__(self, source=None, **kwargs):
"""Create from a source object and/or keyword arguments.
If source is given the attributes are initialized from
equally named attributes of the source object. This means the
class be initialized with for example an instance of the
complementary Value class or with a named tuple. If the
source object has attributes that are not defined in the
Value class they are silently ignored.
If additional keyword argument are given they will be assigned
to the corresponding attributes. Unlike in the Value class
keywords that do not correspond to defined attributes will be
added to the class as new attributes. Any attributes that
were also present in the source object will be overwritten
by keyword arguments.
Contrary to the immutable value object it is not an error if
not all of the attributes are given, indeed one of the
purposes of the mutable version of the value object is to
allow object creation to happen in stages.
"""
if source:
for name in self._attributes:
try:
setattr(self, name, getattr(source, name))
except AttributeError:
pass
for name, value in kwargs.items():
setattr(self, name, value)
def to_immutable(self):
"""Create an immutable value object from this mutable instance.
Create an new instance of the complementary (immutable) Value
class and pass 'self' as the source object. This has the effect
of returning an immutable version of the object. Keep in mind
that any assigned attributes that are not part of the Value's
definition will not be part of the returned object.
"""
return self.Immutable(self)
def __eq__(self, other):
"""Test equality to another value object instance.
An instance of a mutable value object is equal to another
instance of the same value object or it's immutable companion
class if and only if all attributes compare equal. Any
additional attributes in a mutable class are ignored.
An instance of a mutable value object is never equal to any
other type, even if the attributes are the same.
"""
return self._is_equal(other, self.Immutable)
__hash__ = _ValueBase.__hash__
class ValueMeta(type):
"""Meta class for creating value objects.
The ValueMeta class is responsible for setting several special
class attributes on a value class. It is the meta class of the
Value class, so normally the user should not have to specify
a meta class when creating their own value objects. However if the
user wishes to use their own meta class for a value object they
must make sure their meta class inherits from this class.
"""
def __init__(cls, name, bases, namespace):
"""Initialize the class.
Set the list of attributes and generate a mutable
companion class.
"""
# pylint: disable = protected-access
super().__init__(name, bases, namespace)
attributes = {name for name in dir(cls)
if not name.startswith('_') and
not callable(getattr(cls, name))}
attributes -= {'Mutable', 'to_mutable'}
cls._attributes = attributes
class SubclassedMutable(_MutableValueBase):
pass
cls.Mutable = SubclassedMutable
cls.Mutable.__name__ = 'Mutable' + name
cls.Mutable.Immutable = cls
cls.Mutable._attributes = attributes
class Value(_ValueBase, metaclass=ValueMeta):
'''Subclass this to define a new value object.
The Value class is intended to be subclassed by the user to
define a new value object. Alternatively the class can be
instantiated directly to create an empty value object.
To define a new value object the user should define a new class
that inherits from this class. The attributes of the value object
are defined by defining class attributes with the desired names
in the subclass body. The value of the attributes should be a
docstring describing the attribute. Valid attribute names are
any valid python variable name not starting with an underscore
except 'Mutable', 'to_mutable', 'Immutable' and 'to_immutable'.
Example::
class Point(ezvalue.Value):
"""Defines a cartesian point in 2D space."""
x = """The x-coordinate in meters."""
y = """The y-coordinate in meters."""
Note: If the inherited class specifies a meta class than the
meta class must inherit from ValueMeta.
The resulting value object is an immutable object with the
specified attributes. Attempting to assign a value to an
attribute or creating a new attribute will raise an
AttributeError.
'''
def __init__(self, source=None, **kwargs):
"""Create from a source object and/or keyword arguments.
If the source argument is given the attributes will be
initialized from equally named attributes in the source
object. This for example allows the value object to be
created from a named tuple. If the source object has fields
that are not defined in the value object they will be ignored.
Alternatively the objects can be initialized by supplying the
names and values as keyword arguments.
If both a source object and keyword arguments are given the
keyword arguments supplement or overwrite the values from the
source object. This allows the user to create a modified
version of a value object by supplying the original as the
source object and overwriting values as needed with
keyword arguments.
If the value object defines an attribute that is not present
in either the source object or the keyword arguments then
an AttributeError is raised. Extra keyword arguments that
are not defined in the value object are silently ignored.
"""
for name in self:
if name in kwargs:
value = kwargs[name]
elif source:
value = getattr(source, name)
else:
raise AttributeError("Attribute '{}' not specified."
.format(name))
setattr(self, name, value)
def to_mutable(self):
"""Return a mutable copy of the value object.
Create a new instance of the complementary mutable value
object and pass 'self' as the source object.
"""
return self.Mutable(source=self)
def __eq__(self, other):
"""Test equality to another value object instance.
An instance of a value object is equal to another instance of
the same value object or it's mutable companion class if and
only if all attributes compare equal. Any additional
attributes in a mutable class are ignored.
An instance of a value object is never equal to any other
type, even if the attributes are the same. To compare to for
example a named tuple, first create a new value object from
the named tuple:
>>> import collections
>>> class MyValueObject(Value): foo = 'Test attribute'
...
>>> my_value = MyValueObject(foo=1)
>>> MyNamedTuple = collections.namedtuple('Foo', ('foo'))
>>> my_value == MyValueObject(MyNamedTuple(foo=1))
True
"""
return self._is_equal(other, self.Mutable)
__hash__ = _ValueBase.__hash__
def __setattr__(self, name, value):
"""Raise AttributeError because object is immutable."""
if name in self.__dict__ or name not in self:
raise AttributeError('Object is immutable.')
else:
super().__setattr__(name, value)
| {
"repo_name": "snah/ezvalue",
"path": "ezvalue/__init__.py",
"copies": "1",
"size": "10865",
"license": "mit",
"hash": -8910079865606561000,
"line_mean": 38.7985347985,
"line_max": 77,
"alpha_frac": 0.6534744593,
"autogenerated": false,
"ratio": 5.018475750577367,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 273
} |
"""An ellipse widget."""
from typing import Optional
from kivy.graphics.vertex_instructions import Ellipse as KivyEllipse
from kivy.graphics.context_instructions import Color, Rotate, Scale
from kivy.properties import NumericProperty
from mpfmc.uix.widget import Widget
MYPY = False
if MYPY: # pragma: no cover
from mpfmc.core.mc import MpfMc
class Ellipse(Widget):
"""An ellipse widget."""
widget_type_name = 'Ellipse'
animation_properties = ('x', 'y', 'width', 'pos', 'height', 'size', 'color',
'angle_start', 'angle_end', 'opacity', 'rotation', 'scale')
merge_settings = ('width', 'height')
def __init__(self, mc: "MpfMc", config: dict, key: Optional[str] = None, **kwargs) -> None:
del kwargs
super().__init__(mc=mc, config=config, key=key)
# Bind to all properties that when changed need to force
# the widget to be redrawn
self.bind(pos=self._draw_widget,
size=self._draw_widget,
color=self._draw_widget,
rotation=self._draw_widget,
scale=self._draw_widget,
segments=self._draw_widget,
angle_start=self._draw_widget,
angle_end=self._draw_widget)
self._draw_widget()
def _draw_widget(self, *args) -> None:
del args
if self.canvas is None:
return
anchor = (self.x - self.anchor_offset_pos[0], self.y - self.anchor_offset_pos[1])
self.canvas.clear()
with self.canvas:
Color(*self.color)
Rotate(angle=self.rotation, origin=anchor)
Scale(self.scale).origin = anchor
KivyEllipse(pos=self.pos, size=self.size,
segments=self.segments,
angle_start=self.angle_start,
angle_end=self.angle_end)
#
# Properties
#
segments = NumericProperty(180)
'''Defines how many segments will be used for drawing the ellipse. The
drawing will be smoother if you have many segments.
'''
angle_start = NumericProperty(0)
'''Specifies the starting angle, in degrees, of the disk portion of
the ellipse.
'''
angle_end = NumericProperty(360)
'''Specifies the ending angle, in degrees, of the disk portion of
the ellipse.
'''
rotation = NumericProperty(0)
scale = NumericProperty(1.0)
widget_classes = [Ellipse]
| {
"repo_name": "missionpinball/mpf_mc",
"path": "mpfmc/widgets/ellipse.py",
"copies": "1",
"size": "2477",
"license": "mit",
"hash": -7602672219098168000,
"line_mean": 29.5802469136,
"line_max": 95,
"alpha_frac": 0.5938635446,
"autogenerated": false,
"ratio": 3.925515055467512,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.000558482631527899,
"num_lines": 81
} |
"""An embedding store to make fast prediction of all preceding theorems.
This module contains the class TheoremEmbeddingStore that is used for
storing theorem embeddings and can compute goal parameter scoring for a large
number of theorems.
"""
from __future__ import absolute_import
from __future__ import division
# Import Type Annotations
from __future__ import print_function
import os
import numpy as np
import tensorflow as tf
from typing import List, Optional, Text
from deepmath.deephol import io_util
from deepmath.deephol import predictions
from deepmath.deephol.utilities import normalization_lib
from deepmath.proof_assistant import proof_assistant_pb2
class TheoremEmbeddingStore(object):
"""An embedding stores computes and stores embeddings in the given order.
Either compute_embeddings or read_embeddings should be called before
save_embeddings or get_thm_score_for_preceding_thms are called.
"""
def __init__(self, predictor: predictions.Predictions) -> None:
"""Initialize the prediction lib.
Stores the prediction objects and initializes with an empty store.
Args:
predictor: An object conforming to the interface of predictor.Predictions.
"""
self.predictor = predictor
self.thm_embeddings = None
self.assumptions = []
self.assumption_embeddings = None
def compute_assumption_embeddings(self, assumptions: List[Text]) -> None:
"""DEPRECATED - Compute embeddings for a list of assumptions and store them.
The assumptions are preprocessed by truncting by the truncation value
specified in the constructor.
Args:
assumptions: List of assumptions. Their order will be preserved.
"""
raise NotImplementedError(
'Computing embedding of assumptions is not implemented.')
def compute_embeddings_for_thms_from_db(
self, theorem_database: proof_assistant_pb2.TheoremDatabase) -> None:
normalized_thms = [
normalization_lib.normalize(thm).conclusion
for thm in theorem_database.theorems
]
self.thm_embeddings = self.predictor.batch_thm_embedding(normalized_thms)
def compute_embeddings_for_thms_from_db_file(self, file_path: Text) -> None:
"""Compute the embeddings for the theorems given in a test file.
Args:
file_path: Path to the text protobuf file containing the theorem database.
"""
tf.logging.info('Reading theorems database from "%s"', file_path)
theorem_database = io_util.load_theorem_database_from_file(file_path)
self.compute_embeddings_for_thms_from_db(theorem_database)
def read_embeddings(self, file_path: Text) -> None:
"""Read the embeddings and theorem list from the specified files.
Args:
file_path: Path to the file in which the embeddings are stored.
"""
tf.logging.info('Reading embeddings from "%s"', file_path)
with tf.gfile.Open(file_path, 'rb') as f:
self.thm_embeddings = np.load(f)
def save_embeddings(self, file_path: Text):
"""Save the embeddings and theorem list to the specified files.
Args:
file_path: The name of the file path in which the embeddings are stored.
The directory and all parent directories are created if necessary.
"""
dir_name = os.path.dirname(file_path)
tf.logging.info('Writing embeddings "%s"', file_path)
if not tf.gfile.Exists(dir_name):
tf.gfile.MakeDirs(dir_name)
assert tf.gfile.Exists(dir_name)
with tf.gfile.Open(file_path, 'wb') as f:
np.save(f, self.thm_embeddings)
def get_embeddings_for_preceding_thms(self, thm_index):
assert thm_index <= self.thm_embeddings.shape[0]
assert thm_index >= 0
return self.thm_embeddings[:thm_index]
def get_thm_scores_for_preceding_thms(self,
goal_embedding,
thm_index: Optional[int] = None,
tactic_id: Optional[int] = None):
"""Get the predicted pairwise scores in a numpy array.
For the given goal embedding (which is either the embedding of the goal term
or the embedding of the current proof state), get all the theorem scores
that preceed the given theorem in theorem list and all the local
assumptions stored in this store. The theorem parameter thm must be either
None or be in the theorem list, otherwise an assertion will fail.
Args:
goal_embedding: 1D embedding with the embedding of the given goal.
thm_index: Theorem index in the list of theorems in this store or None, in
which case all of the theorems are scored.
tactic_id: Optionally tactic that the theorem parameters will be used in.
Returns:
A 1D numpy array with the same length as the sum of the length
of preceding thms and assumptions. It is the concatenated array of the
scores for the preceding thms and assumptions in the same order given as
in the those arrays: first the theorem scores, then the assumption scores.
"""
if thm_index is None:
thm_index = self.thm_embeddings.shape[0]
else:
assert thm_index <= self.thm_embeddings.shape[0]
assert thm_index >= 0
assert not self.assumptions
assert not self.assumption_embeddings
thm_embeddings = self.thm_embeddings[:thm_index]
assert len(thm_embeddings) == thm_index + len(self.assumptions)
return self.predictor.batch_thm_scores(goal_embedding, thm_embeddings,
tactic_id)
| {
"repo_name": "tensorflow/deepmath",
"path": "deepmath/deephol/embedding_store.py",
"copies": "1",
"size": "5490",
"license": "apache-2.0",
"hash": 8998751481031010000,
"line_mean": 39.3676470588,
"line_max": 80,
"alpha_frac": 0.7018214936,
"autogenerated": false,
"ratio": 3.8661971830985915,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.003132601316459826,
"num_lines": 136
} |
"""An emcee example which gets probabilties from a set of external
processes, rather than from a Python function. We use a Pool-like
object which provides map to pass to emcee.
This example starts the remote() method of itself in different
processes to compute the lnprob. The remote process returns the
probability for a chi2 fit of a+b*x to some data.
Note that by using a command line using the "ssh" command, this
example can be extended to run on many computers simultaneously.
Note that this example will not work on Windows, as Windows does not
allow select.select to be used on pipes from subprocesses.
Jeremy Sanders 2012
"""
from __future__ import print_function
import subprocess
import select
import atexit
import collections
import os
import sys
import numpy as np
import emcee
# make sure pools are finished at end
_pools = []
def _finish_pools():
while _pools:
_pools[0].finish()
atexit.register(_finish_pools)
class Pool(object):
"""Pool object manages external commands and sends and receives
values."""
def __init__(self, commands):
"""Start up remote procesess."""
# list of open subprocesses
self.popens = []
# input text buffers for processes
self.buffer = collections.defaultdict(str)
for cmd in commands:
p = subprocess.Popen(cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
self.init_subprocess(p)
self.popens.append(p)
# keep track of open pool objects
_pools.append(self)
def finish(self):
"""Finish all processes."""
# tell processes to finish
for p in self.popens:
self.close_subprocess(p)
# wait until they have closed
for p in self.popens:
p.wait()
del self.popens[:]
# make sure we don't finish twice
del _pools[ _pools.index(self) ]
def init_subprocess(self, popen):
"""Initialise the subprocess given by popen.
Override this if required."""
def close_subprocess(self, popen):
"""Finish process given by popen.
Override this if required
"""
popen.stdin.close()
def send_parameters(self, stdin, params):
"""Send parameters to remote subprocess.
By default just writes a line with parameters + \n
Override this for more complex behaviour
"""
txt = ' '.join([str(x) for x in params])
stdin.write(txt + '\n')
stdin.flush()
def identify_lnprob(self, text):
"""Is the log probability in this text from the remote
process? Return value if yes, or None.
Override this if process returns more than a single value
"""
if text[-1] != '\n':
return None
try:
return float(text.strip())
except ValueError:
return None
def get_lnprob(self, stdout):
"""Called when the subprocess has written something to stdout.
If the process has returned a lnprob, return its value.
If it has not, return None.
"""
# Read text available. This is more complex than we expect as
# we might not get the full text.
txt = os.read(stdout.fileno(), 4096)
# add to buffered text
self.buffer[stdout] += txt
val = self.identify_lnprob(self.buffer[stdout])
if val is not None:
self.buffer[stdout] = ''
return val
else:
return None
def map(self, function, paramlist):
"""Return a list of lnprob values for the list parameter sets
given.
Note: function is never called!
"""
# create a map of index to parameter set
inparams = zip(range(len(paramlist)), paramlist)
# what we're going to return
results = [None]*len(inparams)
# systems which are waiting to do work
freepopens = set( self.popens )
# Stdout from systems currently doing work. Maps stdout ->
# (output index, Popen object)
waitingstdout = {}
# repeat while work to do, or work being done
while inparams or waitingstdout:
# start job if possible
while freepopens and inparams:
idx, params = inparams[0]
popen = iter(freepopens).next()
# send the process the parameters
self.send_parameters(popen.stdin, params)
# move to next parameters and mark popen as busy
del inparams[0]
waitingstdout[popen.stdout] = (idx, popen)
freepopens.remove(popen)
# see whether any stdouts have output
stdouts = select.select( waitingstdout.keys(), [], [], 0.001 )[0]
for stdout in stdouts:
# see whether process has written out probability
lnprob = self.get_lnprob(stdout)
if lnprob is not None:
# record result
idx, popen = waitingstdout[stdout]
results[idx] = lnprob
# open process up for work again
del waitingstdout[stdout]
freepopens.add(popen)
return results
def main():
# subprocesses to run
cmds = [ [ sys.executable, __file__, 'remote' ]
for i in range(4) ]
# start subprocesses
pool = Pool( cmds )
# two parameter chi2 fit to data (see remote below)
ndim, nwalkers, nburn, nchain = 2, 100, 100, 1000
# some wild initial parameters
p0 = [np.random.rand(ndim) for i in range(nwalkers)]
# Start sampler. Note lnprob function is None as it is not used.
sampler = emcee.EnsembleSampler(nwalkers, ndim, None, pool=pool)
# Burn in period
pos, prob, state = sampler.run_mcmc(p0, nburn)
sampler.reset()
# Proper run
sampler.run_mcmc(pos, nchain, rstate0=state)
# Print out median parameters (a, b)
print("a = %g, b = %g" % ( np.median(sampler.chain[:,:,0]),
np.median(sampler.chain[:,:,1]) ))
def remote():
"""Return chi2 probability of fit to data."""
# our fake data and error bars
x = np.arange(9)
y = np.array([1.97,2.95,4.1,5.04,5.95,6.03,8,8.85,10.1])
err = 0.2
while True:
line = sys.stdin.readline()
if not line:
# calling process has closed stdin
break
params = [float(v) for v in line.split()]
mody = params[0] + params[1]*x
chi2 = np.sum( ((y-mody) / err)**2 )
lnprob = -0.5*chi2
sys.stdout.write(str(lnprob)+'\n')
sys.stdout.flush()
if __name__ == '__main__':
if len(sys.argv) == 2 and sys.argv[1] == 'remote':
remote()
else:
main()
| {
"repo_name": "jellis18/emcee3",
"path": "examples/subprocessing.py",
"copies": "13",
"size": "6888",
"license": "mit",
"hash": -2942462712121313300,
"line_mean": 30.1674208145,
"line_max": 77,
"alpha_frac": 0.5846399535,
"autogenerated": false,
"ratio": 4.1493975903614455,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
"""
Classes for interaction with the Dark Sky API
"""
##########################################################################
# Imports
##########################################################################
import sys
import os
sys.path.insert(0, os.getcwd())
from pprint import pprint
import requests
from anemoi.config import settings
from anemoi.exceptions import *
from anemoi.utils.decorators import rate_limit
from anemoi.utils.mixins import LoggableMixin
from anemoi.utils.geo import zip_codes
##########################################################################
# Module variables
##########################################################################
BASE_URL = 'https://api.darksky.net/forecast'
##########################################################################
# Classes
##########################################################################
class DarkSky(LoggableMixin):
BASE_URL = BASE_URL
def __init__(self, access_token, *args, **kwargs):
self.access_token = access_token
self.zip_map = zip_codes()
super(DarkSky, self).__init__(*args, **kwargs)
@property
def request_params(self):
"""
Returns default request parameters as a dict
"""
return {
'lang': 'en',
'units': 'us',
'exclude': []
}
def _zip_to_coordinates(self, zip_code):
"""
Returns a tuple of the lat/long values for a give zip code
params:
zip_code: (int or str) the requested zip code
returns:
str: the converted latitude
str: the converted longitude
"""
try:
item = self.zip_map[int(zip_code)]
except KeyError as e:
raise DarkSkyUnknownZipException('Unknown zip code of {}'.format(zip_code))
return item['latitude'], item['longitude']
def _construct_url(self, latitude, longitude):
"""
Returns a full request URL based on supplied lat/long
params:
latitude: (float) latitude for request
longitude: (float) longitude for request
returns:
string: URL for API request
"""
return '{}/{}/{},{}'.format(self.BASE_URL, self.access_token, latitude, longitude)
def _request(self, latitude, longitude):
"""
Performs actual API request with a given lat/long and returns json
response as a dict
params:
latitude: (float) latitude for request
longitude: (float) longitude for request
returns:
dict: converted api response from json string
"""
url = self._construct_url(latitude, longitude)
response = requests.get(url, params=self.request_params)
try:
response.raise_for_status()
except requests.RequestException as e:
self.logger.exception(e)
raise DarkSkyException(e.message)
return response.json()
def _humanize_humidity(self, humidity):
"""
Takes an number representing the humidity and returns a string
characterizing how one would perceive it.
"""
# TODO determine what input humidity feels like... may also need temp
pass
def _humanize_current(self, data):
"""
Takes expected output from API and returns a string characterizing the
current weather.
params:
data: (dict) response from dark sky api
returns:
string: human description of current conditions
"""
# TODO: cleanup summary values such as "Drizzle"
try:
return 'It is currently {} and {} degrees outside.'.format(
data['currently']['summary'],
int(data['currently']['temperature']),
)
except KeyError as e:
raise DarkSkyInvalidResponseException("Could not determine current forecast.")
def _humanize_tomorrow(self, data):
"""
Takes expected output from API and returns a string characterizing the
forecast for tomorrow's weather.
params:
data: (dict) response from dark sky api
returns:
string: human description of future weather
"""
try:
return data['daily']['summary']
except KeyError as e:
raise DarkSkyInvalidResponseException("Could not determine tomorrow's forecast.")
@rate_limit(limit=1000, period='month')
def forecast(self, zip_code, now=True):
"""
Returns the requested forecast based on supplied zip code.
params:
zip: (int) the zipcode to request
now: (bool) whether to return current or tomorrow's conditions
"""
latitude, longitude = self._zip_to_coordinates(zip_code)
raw = self._request(latitude, longitude)
if now:
return self._humanize_current(raw)
else:
return self._humanize_tomorrow(raw)
##########################################################################
# Execution
##########################################################################
if __name__ == '__main__':
obj = DarkSky(settings.dark_sky.access_token)
print(obj.forecast(20001))
| {
"repo_name": "looselycoupled/anemoi",
"path": "anemoi/apis/dark_sky.py",
"copies": "1",
"size": "5610",
"license": "mit",
"hash": -401668100610595800,
"line_mean": 29.4891304348,
"line_max": 93,
"alpha_frac": 0.5392156863,
"autogenerated": false,
"ratio": 4.675,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5714215686299999,
"avg_score": null,
"num_lines": null
} |
"""
SlackBot mixin for DarkSky functionality
"""
##########################################################################
# Imports
##########################################################################
from anemoi.config import settings
from anemoi.apis.dark_sky import DarkSky
##########################################################################
# Classes
##########################################################################
class DarkSkyMixin(object):
"""
Mixin to add dark sky response capabilities to the slack bot.
Commentary: This was inelegantly done as it was refactored late in the
process. Given more time I think this would be the right direction in order
to keep the SlackBot client extensible. (One would just have to add more mixins)
"""
def __init__(self, darksky_access_token, *args, **kwargs):
self.darksky = DarkSky(darksky_access_token)
self._register()
super(DarkSkyMixin, self).__init__(*args, **kwargs)
def _register(self):
if not hasattr(self, 'handlers'):
self.handlers = []
self.handlers.append(self._handle_weather_message)
def _handle_weather_current(self, msg, zipcode=settings.zip_code):
self.logger.info('Current weather request from {}'.format(msg.user))
content = self.darksky.forecast(zipcode, now=True)
self.reply(msg.channel, content)
def _handle_weather_tomorrow(self, msg, zipcode=settings.zip_code):
self.logger.info('Tomorrow weather request from {}'.format(msg.user))
content = self.darksky.forecast(zipcode, now=False)
self.reply(msg.channel, content)
def _handle_weather_message(self, msg):
if msg._asks_for_weather_currently:
self._handle_weather_current(msg)
if msg._asks_for_weather_tomorrow:
self._handle_weather_tomorrow(msg)
##########################################################################
# Execution
##########################################################################
if __name__ == '__main__':
pass
| {
"repo_name": "looselycoupled/anemoi",
"path": "anemoi/bots/slack/mixins.py",
"copies": "1",
"size": "2363",
"license": "mit",
"hash": 1986651069248988200,
"line_mean": 32.7571428571,
"line_max": 85,
"alpha_frac": 0.5438002539,
"autogenerated": false,
"ratio": 4.131118881118881,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5174919135018881,
"avg_score": null,
"num_lines": null
} |
"""
module description
"""
##########################################################################
# Imports
##########################################################################
from collections import namedtuple, Iterable
from anemoi.utils.decorators import memoized
##########################################################################
# Classes
##########################################################################
MESSAGE_FIELDS = ['bot_id', 'channel', 'source_team', 'team', 'text', 'ts', 'type', 'user', 'event_ts', 'subtype']
MessageBase = namedtuple('MessageBase', ' '.join(MESSAGE_FIELDS))
class SlackMessage(MessageBase):
"""
Class to encapsulate basic Slack message attributes along with additional
message related functionality.
"""
@property
def _is_for_bot(self):
bot_identifier = '<@{}>'.format(self.bot_id)
return bot_identifier in self.text
@property
def _asks_for_weather_currently(self):
# TODO: replace with regular expressions
text = self.text.lower()
return self._is_for_bot and (
'current weather' in text
or 'weather now' in text
or 'weather currently' in text
)
@property
def _asks_for_weather_tomorrow(self):
# TODO: replace with regular expressions
text = self.text.lower()
return self._is_for_bot and (
'tomorrow weather' in text
or 'weather tomorrow' in text
or "tomorrow's weather" in text
)
@property
def _asks_for_weather(self):
return self._asks_for_weather_currently or self._asks_for_weather_tomorrow
class SlackMessageFactory(object):
"""
Factory object for Slack communications of type 'message'
"""
def __init__(self, bot_id):
self.bot_id = bot_id
def create(self, data):
"""
Returns a new SlackMessage instance from supplied input dict.
params:
data: (dict) dict of communication item from Slack RTM service
returns:
SlackMessage
"""
data.update({'bot_id': self.bot_id})
for field in MESSAGE_FIELDS:
if field not in data:
data[field] = None
return SlackMessage(**data)
class SlackCommsFactory(object):
"""
Factory object for converting Slack communications to the appropriate
message class instance based on the dict 'type' key
"""
def __init__(self, bot_id):
self.bot_id = bot_id
@memoized
def factories(self):
return {
'message': SlackMessageFactory(self.bot_id)
}
def _create(self, data):
"""
Returns a new Slack communication instance from supplied input dict using
the 'type' key to determine the correct factory to use.
params:
data: (dict) dict of communication item from Slack RTM service
returns:
SlackMessage or None
"""
if data['type'] in self.factories:
return self.factories[data['type']].create(data)
def create(self, data):
"""
Returns a new SlackMessage instance of list of instances from supplied
dict or iterable.
params:
data: (dict or iterable) single or iterable of communication item from Slack RTM service
returns:
SlackMessage or list of SlackMessage or None
"""
if isinstance(data, dict):
return self._create(data)
if isinstance(data, Iterable):
return filter(lambda val: val is not None, [self._create(item) for item in data])
raise TypeError('create accepts only iterable or dict instances')
##########################################################################
# Execution
##########################################################################
if __name__ == '__main__':
pass
| {
"repo_name": "looselycoupled/anemoi",
"path": "anemoi/bots/slack/messages.py",
"copies": "1",
"size": "4172",
"license": "mit",
"hash": -9199661938950195000,
"line_mean": 27.1891891892,
"line_max": 114,
"alpha_frac": 0.5457813998,
"autogenerated": false,
"ratio": 4.5249457700650755,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5570727169865075,
"avg_score": null,
"num_lines": null
} |
"""
module description
"""
##########################################################################
# Imports
##########################################################################
import time
import signal
from collections import namedtuple
from slackclient import SlackClient
from concurrent.futures import ThreadPoolExecutor
from anemoi.utils.mixins import LoggableMixin
from anemoi.version import get_version
from anemoi.exceptions import SlackException, SlackBadResponse
from .messages import SlackCommsFactory
from .mixins import DarkSkyMixin
##########################################################################
# Classes
##########################################################################
class SlackBot(LoggableMixin, DarkSkyMixin):
"""
Basic Slack chat bot that can be extended by adding mixins.
TODO: Clean up the way mixins register themselves. Perhaps instead have
a list of message handlers that respond to different message types.
"""
def __init__(self, slack_access_token, slack_bot_id, *args, **kwargs):
self.access_token = slack_access_token
self.bot_id = slack_bot_id
self.message_factory = SlackCommsFactory(self.bot_id)
self._shutdown_sentinel = False
signal.signal(signal.SIGINT, self.request_shutdown)
super(SlackBot, self).__init__(*args, **kwargs)
def start(self):
"""
Public method to initiate the bot
"""
self.logger.info('SlackBot v{} starting up with bot ID: {}'.format(
get_version(),
self.bot_id
))
self.listen()
def request_shutdown(self, *args):
"""
Signal handler and public method to signal a shutdown of the bot
"""
self.logger.info('SlackBot shutdown has been requested')
self._shutdown_sentinel = True
def _filter_messages(self, data):
"""
Returns a list of message instances for known message types
"""
items = filter(lambda item: 'type' in item and item['type'] == 'message', data)
messages = self.message_factory.create(items)
return messages
def _process_message(self, msg):
"""
Cycles through known message handlers in so that a message can be processed. In
this manner multiple requests could be handled in a single message from the user.
"""
for handler in self.handlers:
handler(msg)
def reply(self, channel, content):
"""
Public method to post back to the Slack team. Used by mixins which
actually respond to the various message types.
"""
try:
response = self.client.api_call(
"chat.postMessage",
channel=channel,
text=content,
as_user=True
)
if not response['ok']:
self.logger.error('Error posting reply to channel')
raise SlackBadResponse('Slack response was not ok')
except Exception as e:
self.logger.exception(e)
raise
def listen(self, concurrency=4):
"""
Begins the process of listening for slack messages by connecting to the
RTM api and responds to each request using a thread pool.
"""
self.client = SlackClient(self.access_token)
with ThreadPoolExecutor(max_workers=concurrency) as pool:
if self.client.rtm_connect():
while not self._shutdown_sentinel:
incoming = self.client.rtm_read()
if incoming:
entreaties = self._filter_messages(incoming)
for msg in entreaties:
pool.submit(self._process_message, msg)
time.sleep(.5)
else:
print "Connection Failed, invalid token?"
##########################################################################
# Execution
##########################################################################
if __name__ == '__main__':
pass
| {
"repo_name": "looselycoupled/anemoi",
"path": "anemoi/bots/slack/bots.py",
"copies": "1",
"size": "4349",
"license": "mit",
"hash": -1316129038126605000,
"line_mean": 31.6992481203,
"line_max": 89,
"alpha_frac": 0.5477121177,
"autogenerated": false,
"ratio": 4.701621621621622,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0006990222690991467,
"num_lines": 133
} |
"""
primary configuration module for Anemoi project
"""
##########################################################################
## Imports
##########################################################################
from confire import Configuration
from confire import environ_setting
import os
##########################################################################
## Misc Configuration
##########################################################################
class SlackConfiguration(Configuration):
access_token = ""
bot_id = ""
class DarkSkyConfiguration(Configuration):
access_token = ""
##########################################################################
## Logging Configuration
##########################################################################
class LoggingConfiguration(Configuration):
"""
Specialized configuration system for the Python logging module. After 2.7
Python now accepts configurations for logging from a dictionary. This
configuration class exposes that dictionary on demand to the logging
system. Note that it is complex because of the nested, internal configs.
"""
version = 1
disable_existing_loggers = False
formatters = {
'simple': {
'format': '[%(asctime)s] %(levelname)s {%(name)s.%(funcName)s:%(lineno)d} %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S %z',
}
}
handlers = {
'null': {
'level': 'DEBUG',
'class': 'logging.NullHandler',
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple',
},
'logfile': {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': 'logs/anemoi.log',
'maxBytes': '67108864',
'formatter': 'simple',
}
}
loggers = {
'anemoi': {
'level': 'DEBUG',
'handlers': ['console', 'logfile'],
'propagagte': True,
},
'py.warnings': {
'level': 'DEBUG',
'handlers': ['console', 'logfile'],
'propagate': True,
}
}
def dict_config(self):
"""
Returns the dictionary configuration for use with the Python logging
module's logging.config.dictConfigClass function.
"""
return dict(self.options())
##########################################################################
## App Configuration
##########################################################################
class DefaultConfiguration(Configuration):
CONF_PATHS = [
'conf/settings.{}.yaml'.format(os.getenv('ANEMOI_ENV', 'development')),
]
slack = SlackConfiguration()
dark_sky = DarkSkyConfiguration()
logging = LoggingConfiguration()
zip_code = 20001
settings = DefaultConfiguration.load()
| {
"repo_name": "looselycoupled/anemoi",
"path": "anemoi/config.py",
"copies": "1",
"size": "3295",
"license": "mit",
"hash": -4746100763269381000,
"line_mean": 27.9035087719,
"line_max": 99,
"alpha_frac": 0.4861911988,
"autogenerated": false,
"ratio": 4.782293178519594,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5768484377319594,
"avg_score": null,
"num_lines": null
} |
'''Anemoi
'''
import os
from distutils.core import setup
from setuptools import find_packages
import numpy as np
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Physics',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Natural Language :: English',
]
with open('README.md') as fp:
LONG_DESCRIPTION = ''.join(fp.readlines())
setup(
name = 'anemoi',
# version = '0.1.1',
packages = find_packages(),
install_requires = ['numpy>=1.7',
'scipy>=0.13',
],
author = 'Brendan Smithyman',
author_email = 'brendan@bitsmithy.net',
description = 'Anemoi',
long_description = LONG_DESCRIPTION,
license = 'MIT',
keywords = 'full-waveform inversion',
# url = '',
download_url = 'http://github.com/uwoseis/anemoi',
classifiers = CLASSIFIERS,
platforms = ['Windows', 'Linux', 'Solaris', 'Mac OS-X', 'Unix'],
use_2to3 = False,
include_dirs=[np.get_include()],
)
| {
"repo_name": "uwoseis/anemoi",
"path": "setup.py",
"copies": "1",
"size": "1321",
"license": "mit",
"hash": 6162739952733339000,
"line_mean": 27.1063829787,
"line_max": 68,
"alpha_frac": 0.6351249054,
"autogenerated": false,
"ratio": 3.503978779840849,
"config_test": false,
"has_no_keywords": true,
"few_assignments": false,
"quality_score": 0.9511265393547099,
"avg_score": 0.02556765833874993,
"num_lines": 47
} |
"""
Provides zipcode and geographic data
"""
##########################################################################
# Imports
##########################################################################
import os
import unicodecsv as csv
##########################################################################
# Functions
##########################################################################
def zip_codes():
"""
Returns a dict of zip code to lat/long mappings based off of the CSV found
at https://gist.github.com/erichurst/7882666 (2013)
"""
filename = 'zip_lat_long.csv'
path = os.path.join(os.getcwd(), 'fixtures', 'csv', filename)
data = {}
with open(path) as f:
reader = csv.DictReader(f)
for row in reader:
data[int(row['ZIP'])] = {
'latitude': row['LAT'].strip(),
'longitude': row['LNG'].strip(),
}
return data
def zip2geo(zip_code):
"""
Returns a dict of lat/long info for supplied zip code
"""
return zip_codes()[zip_code]
##########################################################################
# Execution
##########################################################################
if __name__ == '__main__':
print zip2geo(20001)
| {
"repo_name": "looselycoupled/anemoi",
"path": "anemoi/utils/geo.py",
"copies": "1",
"size": "1553",
"license": "mit",
"hash": -3429706087893476400,
"line_mean": 26.7321428571,
"line_max": 78,
"alpha_frac": 0.4288473921,
"autogenerated": false,
"ratio": 4.4498567335243555,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5378704125624355,
"avg_score": null,
"num_lines": null
} |
""" An Enable component to render SVG documents.
"""
import sys
import time
from enable.api import Component
from traits.api import Any, Array, Bool, Float
from kiva.fonttools import Font
if sys.platform == 'win32':
now = time.clock
else:
now = time.time
class SVGComponent(Component):
""" An Enable component to render SVG documents.
"""
# The SVGDocument.
document = Any()
# The number of seconds it took to do the last draw.
last_render = Float()
# The profile manager.
profile_this = Any()
should_profile = Bool(False)
def _draw_mainlayer(self, gc, view_bounds=None, mode='default'):
if self.should_profile and self.profile_this is not None:
# Only profile the first draw.
self.should_profile = False
self.profile_this.start('Drawing')
start = now()
gc.clear()
width, height = self.bounds
gc.save_state()
if self.document is None:
# fixme: The Mac backend doesn't accept style/width as non-integers
# in set_font, but does for select_font...
if sys.platform == 'darwin':
gc.select_font("Helvetica", 36)
else:
gc.set_font(Font("Helvetica", 36))
gc.show_text_at_point("Could not parse document.", 20, height-56)
gc.restore_state()
if self.profile_this is not None:
self.profile_this.stop()
return
try:
# SVG origin is upper right with y positive is down.
# Set up the transforms to fix this up.
# FIXME: if the rendering stage fails, all subsequent renders are vertically flipped
gc.translate_ctm(0, height)
# TODO: bother with zoom?
# TODO: inspect the view bounds and scale to the shape of the
# component?
scale = 1.0
gc.scale_ctm(scale, -scale)
self.document.render(gc)
self.last_render = now() - start
finally:
gc.restore_state()
if self.profile_this is not None:
self.profile_this.stop()
def _document_changed(self):
self.should_profile = True
self.invalidate_and_redraw()
class ImageComponent(Component):
""" Simple component that just renders an RGB(A) array in the upper left
hand corner of the component.
"""
# The RGB(A) data.
image = Array()
def _draw_mainlayer(self, gc, view_bounds=None, mode='default'):
gc.clear()
if len(self.image.shape) != 3:
# No image.
return
gc.save_state()
try:
width, height = self.bounds
img_height, img_width = self.image.shape[:2]
gc.draw_image(self.image, (0.0,height-img_height,img_width,img_height))
finally:
gc.restore_state()
def _image_changed(self):
self.invalidate_and_redraw()
| {
"repo_name": "tommy-u/enable",
"path": "enable/savage/compliance/svg_component.py",
"copies": "1",
"size": "2992",
"license": "bsd-3-clause",
"hash": -6959667964877335000,
"line_mean": 28.0485436893,
"line_max": 96,
"alpha_frac": 0.5748663102,
"autogenerated": false,
"ratio": 4.0269179004037685,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5101784210603768,
"avg_score": null,
"num_lines": null
} |
"""An encapsulated thread-local variable that indicates whether future DB
writes should be "stuck" to the master."""
from functools import wraps
import threading
__all__ = ['this_thread_is_pinned', 'pin_this_thread', 'unpin_this_thread',
'use_master', 'db_write']
_locals = threading.local()
def this_thread_is_pinned():
"""Return whether the current thread should send all its reads to the
master DB."""
return getattr(_locals, 'pinned', False)
def pin_this_thread():
"""Mark this thread as "stuck" to the master for all DB access."""
_locals.pinned = True
def unpin_this_thread():
"""Unmark this thread as "stuck" to the master for all DB access.
If the thread wasn't marked, do nothing.
"""
_locals.pinned = False
class UseMaster(object):
"""A contextmanager/decorator to use the master database."""
def __call__(self, func):
@wraps(func)
def decorator(*args, **kw):
with self:
return func(*args, **kw)
return decorator
def __enter__(self):
_locals.old = this_thread_is_pinned()
pin_this_thread()
def __exit__(self, type, value, tb):
if not _locals.old:
unpin_this_thread()
use_master = UseMaster()
def mark_as_write(response):
"""Mark a response as having done a DB write."""
response._db_write = True
return response
def db_write(fn):
@wraps(fn)
def _wrapped(*args, **kw):
with use_master:
response = fn(*args, **kw)
return mark_as_write(response)
return _wrapped
| {
"repo_name": "sonico999/django-multidb-router",
"path": "multidb/pinning.py",
"copies": "1",
"size": "1594",
"license": "bsd-3-clause",
"hash": -5512141625307634000,
"line_mean": 22.7910447761,
"line_max": 75,
"alpha_frac": 0.6141781681,
"autogenerated": false,
"ratio": 3.7156177156177157,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4829795883717716,
"avg_score": null,
"num_lines": null
} |
"""An engine accessing network activations.
"""
# standard imports
from typing import Union, List, Tuple, Iterable
from pathlib import Path
import os
import json
import logging
# import math
# import functools
# import operator
# third party imports
import numpy as np
# toolbox imports
from network import Network, Classifier, ShapeAdaptor, ResizePolicy
from network.layers import Layer
from ..datasource import Datasource, Datafetcher
from ..base.prepare import Preparable
from ..base.data import Data
from ..util.array import adapt_data_format, DATA_FORMAT_CHANNELS_FIRST
from ..util import nphelper
from ..config import config
from . import Tool, Worker
# logging
LOG = logging.getLogger(__name__)
# FIXME[todo]: this is essentially a wraper around Network.
# Check if we could make the Network itself an ActivationTool
class ActivationTool(Tool, Network.Observer):
""".. :py:class:: Activation
The :py:class:`Activation` class encompassing network, current
activations, and the like.
An :py:class:`Activation` tool is :py:class:`Observable`. Changes
in the :py:class:`Activation` that may affect the computation of
activation are passed to observers (e.g. workers) calling the
:py:meth:`Observer.activation_changed` method in order to inform
them as to the exact nature of the model's change.
**Changes**
network_changed:
The underlying :py:class:`network.Network` has changed,
or its preparation state was altered. The new network
can be accessed vie the :py:attr:`network` property.
layer_changed:
The selected set of layers was changed.
Attributes
----------
_network: Network
Currently active network
_layers: List[Layer]
the layers of interest
_classification: bool
If True, the model will consider the current model
as a classifier and record the output of the output layer
in addition to the current (hidden) layer.
Data processing
---------------
The :py:class`ActivationTool` can be applied to `Data` objects. It
will pass the `Data` object as argument to the underlying
:py:class:`Network` and will store results as attributes in
the `Data` object. It will use the following attributes:
[tool_name]_activations:
A dictionary mapping layer names to (numpy) arrays of activation
values.
"""
def __init__(self, network: Network = None, data_format: str = None,
**kwargs) -> None:
"""Create a new ``Engine`` instance.
Parameters
----------
network: Network
Network providing activation values.
"""
super().__init__(**kwargs)
# adapters
self._shape_adaptor = ShapeAdaptor(ResizePolicy.Bilinear())
self._channel_adaptor = ShapeAdaptor(ResizePolicy.Channels())
self._data_format = data_format
# network related
self._network = None
self.network = network
@property
def data_format(self) -> str:
"""
"""
if self._data_format is not None:
return self._data_format
if self._network is not None:
return self._network.data_format
return None
#
# network
#
def network_changed(self, _network: Network, info: Network.Change) -> None:
"""React to changes of the :py:class:`Network`.
The :py:class:`ActivationTool` is interested when the
network becomes prepared (or unprepared). We just forward
these notifications.
"""
LOG.debug("Activation.network_changed(%s)", info)
if info.state_changed:
self.change('state_changed')
@property
def network(self) -> Network:
"""Get the currently selected network.
Returns
-------
The currently selected network or None if no network
is selected.
"""
return self._network
@network.setter
def network(self, network: Network) -> None:
if network is self._network:
return # nothing changed
if self._network is not None:
self.unobserve(self._network)
self._network = network
if network is not None:
interests = Network.Change('state_changed')
self.observe(network, interests)
# FIXME[old]: what is this supposed to do?
if network.prepared and self._shape_adaptor is not None:
self._shape_adaptor.setNetwork(network)
self._channel_adaptor.setNetwork(network)
self.change('tool_changed')
#
# Tool interface
#
external_result = ('activations', )
internal_arguments = ('inputs', 'layer_ids')
internal_result = ('activations_list', )
def _preprocess(self, inputs: np.ndarray, layer_ids: List[Layer] = None,
**kwargs) -> Data:
# pylint: disable=arguments-differ
# FIXME[todo]: inputs should probably be Datalike
"""Preprocess the arguments and construct a Data object.
"""
data = super()._preprocess(**kwargs)
array = inputs.array if isinstance(inputs, Data) else inputs
data.add_attribute('inputs', array)
unlist = False
if layer_ids is None:
layer_ids = list(self._network.layer_dict.keys())
elif not isinstance(layer_ids, list):
layer_ids, unlist = [layer_ids], True
data.add_attribute('layer_ids', layer_ids)
data.add_attribute('unlist', unlist)
return data
def _process(self, inputs: np.ndarray,
layers: List[Layer]) -> List[np.ndarray]:
# pylint: disable=arguments-differ
"""Perform the actual operation, that is the computation of
activation values for given input values.
Arguments
---------
inputs:
Input data.
layers:
A list of layers for which to compute activations.
"""
print(f"ActivationTool._process({type(inputs)}, {type(layers)})")
# LOG.info("ActivationTool: computing activations for data <%s>, "
# "layers=%s, activation format=%s",
# inputs.shape, layers, self.data_format)
if self._network is None:
return None
if not layers:
return layers
return self._network.get_activations(inputs, layers,
data_format=self.data_format)
def _postprocess(self, data: Data, what: str) -> None:
if what == 'activations':
activations_dict = dict(zip(data.layer_ids, data.activations_list))
data.add_attribute(what, activations_dict)
data.add_attribute('activations_dict', activations_dict)
else:
super()._postprocess(data, what)
def data_activations(self, data: Data, layer: Layer = None,
unit: int = None,
data_format: str = None) -> np.ndarray:
"""Get the precomputed activation values for the current
:py:class:`Data`.
Arguments
---------
data:
The :py:class:`Data` object in which precomputed activations
are stored.
layer:
The layer for which activation values should be obtained.
unit:
The unit for which activation values should be obtained.
data_format:
The data format (channel first or channel last) in which
activation values should be returned. If `None` (default),
the default format of this :py:class:`ActivationTool`
(according to :py:prop:`data_format`) is used.
Result
------
activations:
The requested activation valeus. The type depends on the
arguments:
If no `layer` is specified, the result will be a
dictionary mapping layer names (`str`) activation values
(`numpy.ndarray`).
If a `layer` is specified, the activation values (np.ndarray)
of that layer are returned.
If in addtion to `layer` also a `unit` is specified, only
the activation value(s) for that unit are returned.
"""
# FIXME[todo]: batch processing - add an 'index' argument ...
# activations: dict[layer: str, activation_values: np.ndarray]
activations = self.get_data_attribute(data, 'activations')
if data_format is None:
data_format = self.data_format
elif unit is not None:
LOG.warning("Providing a data_format (%s) has no effect "
"when querying unit activation", data_format)
if layer is None: # return the full dictionary
if data_format is self.data_format:
return activations # no tranformation required
# transform the data format of the activation values
return list(map(lambda activation:
adapt_data_format(activation, input_format=
self._data_format,
output_format=data_format),
activations))
if isinstance(layer, Layer):
layer = layer.id
activations = activations[layer]
if data_format is not self.data_format:
activations = adapt_data_format(activations,
input_format=self.data_format,
output_format=data_format)
if unit is None:
return activations
return (activations[unit] if data_format == DATA_FORMAT_CHANNELS_FIRST
else activations[..., unit])
class PersistentTool(Preparable):
"""
Arguments
---------
network:
datasource:
mode:
The mode of operation: `r` opens the archive for reading and
`w` for writing. In case of `w`, the archive will use existing
files if they exist or create new ones otherwise.
"""
# FIXME[hack]: put this in the config file ...
config.activations_directory = Path('/space/home/ulf/activations')
# _datasource:
# The Datasource for which activation values are computed
_datasource: Union[str, Datasource] = None
# _network:
# The Network by which activation values are obtained.
_network: Union[str, Network] = None
# _layers:
# The keys of the network layers for which activation values are
# stored in the ActivationsArchive
_layers: List[str] = None # sequence of layers
def __init__(self, network: Union[str, Network],
datasource: Union[str, Datasource],
layers = None, mode: str = 'r', **kwargs) -> None:
super().__init__(**kwargs)
self._network = network
self._datasource = datasource
self._layers = layers and [layer.key if isinstance(layer, Layer)
else layer for layer in self._layers]
self._mode = mode
self._directory = Path(config.activations_directory) /\
((network.key if isinstance(network, Network)
else network) + '-' +
(datasource.key if isinstance(datasource, Datasource)
else datasource))
LOG.info("ActivationsArchiveNumpy at '%s' initalized.",
self._directory)
@property
def directory(self) -> Path:
"""The name of the directory into which this
:py:class:`ActivationsArchiveNumpy` is stored on disk.
"""
return self._directory
def __len__(self) -> int:
"""The length of the archive. This is the :py:prop:`valid` size if the
archive is opened in read mode and the :py:prop:`total` size
if opened in write mode.
"""
return self.valid if self._mode == 'r' else self.total
@property
def valid(self) -> int:
"""The valid size of this archive. May be less than
:py:prop:`total` if the archive has not been fully filled yet.
"""
return None if self._meta is None else self._meta['valid']
@property
def total(self) -> int:
"""The total size of this archive. May be more than the
:py:prop:`valid` size if the archive has not been fully filled yet.
"""
return None if self._meta is None else self._meta['total']
def _preparable(self) -> bool:
if self._mode == 'r' and not self._directory.is_dir():
return False
if self._mode == 'w' and not isinstance(self._network, Network):
return False
return super()._preparable()
def _prepared(self) -> bool:
return self._meta is not None and super()._prepared()
def flush(self) -> None:
if self._mode != 'w':
raise ValueError(f"TopActivations in mode '{self._mode}' "
"is not writable")
if self._meta is not None:
with open(self.filename_meta, 'w') as outfile:
json.dump(self._meta, outfile)
class TopActivations(PersistentTool):
"""The :py:class:`TopActivations` stores the top activation values
for the layers of a :py:class:`Network`.
top: int
The number of activation layers to store
layers: Sequence[Layer]
The layers for which top activation values are stored.
top_indices: Mapping[Layer, np.ndarray]
A dictionary mapping layers to an array holding the indices
of data points for the top activations.
The arrays have a shape of (channels, top, indices), with indices
being the number of indices necessary to index an activation
map in the layer: for a dense layer, this is 1 (batch, )
while for 2D layers this is 3 (batch, row, column).
top_activations : Mapping[Layer, np.ndarray]
A dictionary mapping layers to an array holding the top activation
values. The arrays have the shape (channels, top),
with channels being the number of channels in the layer
and top the number of top activation values to be stored.
"""
@staticmethod
def _merge_top(target_indices, target_values,
new_indices, new_values) -> None:
# indices: shape = (channels, top, indices)
# values: shape = (channels, top)
top = target_values.shape[1]
indices = np.append(target_indices, new_indices, axis=1)
values = np.append(target_values, new_values, axis=1)
# top_indices: shape = (channels, top)
top_indices = nphelper.argtop(values, top, axis=1)
target_values[:] = np.take_along_axis(values, top_indices, axis=1)
# FIXME[bug]: ValueError: `indices` and `arr` must have the
# same number of dimensions
# target_indices[:] = np.take_along_axis(indices, top_indices, axis=1)
for coordinate in range(target_indices.shape[-1]):
target_indices[:, :, coordinate] = \
np.take_along_axis(indices[:, :, coordinate],
top_indices, axis=1)
def __init__(self, top: int = 9, **kwargs) -> None:
super().__init__(**kwargs)
self._top = top
self._meta = None
self._top_indices = None
self._top_activations = None
@property
def filename_meta(self) -> Path:
"""The name of the file holding the meta data for this
:py:class:`ActivationsArchiveNumpy`.
"""
return self._directory / f'top-{self._top}.json'
def filename_top(self, layer: str) -> Path:
return self._directory / f'top-{self._top}-{layer}.npy'
@property
def top_(self) -> int:
"""The number of top values to record per layer/channel
"""
return self._top
def layers(self, *what) -> Iterable[Tuple[str, type, Tuple[int]]]:
"""Iterate layer of layer information.
Arguments
---------
what:
Specifies the what information should be provided. Valid
values are: `'name'` the layer name,
`'layer'` the actual layer object (only available if the
:py:class:`Network`, not just the network key, has been provided
upon initialization of this :py:class:`ActinvationsArchive`).
"""
if not what:
what = ('name', )
elif 'layer' in what and not isinstance(self._network, Network):
raise ValueError("Iterating over Layers is only possible with "
"an initialized Network.")
for layer in self._layers:
name = layer.key if isinstance(layer, Layer) else layer
values = tuple((name if info == 'name' else
self._network[name] if info == 'layer' else '?')
for info in what)
yield values[0] if len(what) == 1 else values
def _prepare(self) -> None:
super()._prepare()
filename_meta = self.filename_meta
self._top_indices = {}
self._top_activations = {}
if filename_meta.exists():
with open(filename_meta, 'r') as file:
meta = json.load(file)
for name in self.layers('name'):
with self.filename_top(name).open('rb') as file:
self._top_indices[name] = np.load(file)
self._top_activations[name] = np.load(file)
else: # mode == 'w' and isinstance(network, Network):
length = len(self._datasource)
meta = {
'total': length,
'valid': 0,
'shape': {}
}
if self._layers is None:
self._layers = list(self._network.layer_names())
for name, layer in self.layers('name', 'layer'):
channels = layer.output_shape[-1]
indices = len(layer.output_shape) - 1 # -1 for the channel
# indices: (channels, top, indices)
self._top_indices[name] = \
np.full((channels, self._top, indices), -1, np.int)
# activations: (channels, top)
self._top_activations[name] = \
np.full((channels, self._top), np.NINF, np.float32)
meta['layers'] = self._layers
self._meta = meta
def _unprepare(self) -> None:
# make sure all information is stored
if self._mode == 'w':
self.flush()
self._meta = None
self._top_indices = None
self._top_activations = None
LOG.info("TopActivations at '%s' unprepared.",
self._directory)
super()._unprepare()
def flush(self) -> None:
super().flush()
if self._top_indices is not None:
for name in self.layers('name'):
with self.filename_top(name).open('wb') as file:
np.save(file, self._top_indices[name])
np.save(file, self._top_activations[name])
def __iadd__(self, values) -> object:
index = np.asarray([self._meta['valid']], dtype=np.int)
if isinstance(values, dict):
for layer, layer_values in values.items():
self._update_values(layer, layer_values[np.newaxis], index)
elif isinstance(values, list):
if len(values) != len(self._layers):
raise ValueError("Values should be a list of length"
f"{len(self._layers)} not {len(values)}!")
for layer, layer_values in zip(self._layers, values):
self._update_values(layer, layer_values[np.newaxis], index)
else:
raise ValueError("Values should be a list (of "
f"length {len(self._layers)}) "
f"or a dictionary, not {type(values)}")
self._meta['valid'] += 1
return self
def _update_values(self, layer: str, value: np.ndarray,
index: np.ndarray = None) -> None:
"""Update the top activation lists with new values.
Arguments
---------
layer:
The layer for which top activation values should be
updated.
value:
The activation map. This is expected to be of shape
(batch, position..., channel).
index:
The index of the activation value in the
:py:class:`Datasource`-
"""
layer, name = self._network[layer], layer
# slim_values have shape (batch*position..., channel)
slim_values = value.reshape((-1, value.shape[-1]))
# top_slim: array of shape (top, channel),
# containing the indices in the value array for the top elements
# for each channel (i.e. values from 0 to len(slim_values))
top = min(self._top, len(slim_values))
top_slim = nphelper.argtop(slim_values, top=top, axis=0)
# top_activations: (channel, top)
top_activations = np.take_along_axis(slim_values, top_slim, axis=0).T
# the index shape is (batch, positions...), without channel
shape = (len(value), ) + layer.output_shape[1:-1]
# top_indices have index as (batch, position..., channel)
# indices * (top, channel)
# -> (indices, top, channel)
# -> (channel, top, indices)
top_indices = np.stack(np.unravel_index(top_slim, shape)).T
# adapt the batch index
if index is not None:
top_indices[:, :, 0] = index[top_indices[:, :, 0]]
self._merge_top(self._top_indices[name], self._top_activations[name],
top_indices, top_activations)
def activations(self, layer, channel=...) -> np.ndarray:
return self._top_activations[layer][channel]
def indices(self, layer, channel=...) -> np.ndarray:
return self._top_indices[layer][channel]
def receptive_field(self, layer, channel, top=0) -> np.ndarray:
indices = self.indices(layer, channel)[top]
image = self._datasource[indices[0]]
return self._network.\
extract_receptive_field(layer, indices[1:-1], image)
def info(self) -> None:
"""Output a summary of this :py:class:`ActivationsArchiveNumpy`.
"""
print(f"Archive at {self.directory}: {self.valid}/{self.total}")
def old_merge_layer_top_activations(self, layer: Layer, top: int = None):
# channel last (batch, height, width, channel)
new_activations = \
self.activations(layer).reshape(-1, self.actviations.shape[-1])
batch_len = len(new_activations)
data_len = batch_len // self.actviations.shape[0]
start_index = self.index_batch_start * data_len
# activations has shape (batch, classes)
batch = np.arange(batch_len)
if top is None:
top_indices = np.argmax(new_activations, axis=-1)
else:
# Remark: here we could use np.argsort(-class_scores)[:n]
# but that may be slow for a large number classes,
# as it does a full sort. The numpy.partition provides a faster,
# though somewhat more complicated method.
top_indices_unsorted = \
np.argpartition(-new_activations, top)[batch, :top]
order = \
np.argsort((-new_activations)[batch, top_indices_unsorted.T].T)
new_top_indices = top_indices_unsorted[batch, order.T].T
if not start_index:
self._top_indices[layer] = new_top_indices
self._top_activations[layer] = new_activations[top_indices]
else:
merged_indices = np.append(self._top_indices[layer],
new_top_indices + start_index)
merged_activations = np.append(self._top_activations[layer],
new_activations[top_indices])
sort = np.argsort(merged_activations)
self._top_indices[layer] = merged_indices[:sort]
self._top_activations[layer] = merged_activations[:sort]
def old_top_activations(self, activations: np.ndarray, top: int = 9,
datasource_index: int = None) -> None:
"""Get the top activattion values and their indices in a
batch of activation maps.
Arguments
---------
activations:
A batch of activation maps of shape
(batch, position..., channels).
top:
The number of top values to extract.
datasource_index:
Result
------
top_activations:
This is an array of shape (top, channels)
top_indices:
This is an array of shape (top, 2, channels).
[n,0,channel] is the index of the datapoint in the datasource,
while [n,1,channel] is the (1-dimensional) index in the
activation map. This second index may have to be unraveled
to obtain real activation map coordinates.
"""
# remember the original shape
shape = activations.shape
# flatten activations per channel
# ([batch,] position..., channel) -> (indices, channel)
activations = np.reshape(activations, (-1, shape[-1]))
# get indices for top activations per channel, shape: (top, channels)
# Remark: here we could use np.argsort(-class_scores)[:n]
# but that may be slow for a large number classes,
# as it does a full sort. The numpy.partition provides a faster,
# though somewhat more complicated method.
top_indices_unsorted = \
np.argpartition(-activations, top, axis=0)[:top]
# get correspondig (unsorted) top activations: shape (top, channels)
top_activations = \
activations[np.arange(top), top_indices_unsorted.T].T
if isinstance(datasource_index, np.ndarray):
# working on a batch:
# math.prod ist only available from 3.8 onward ...
# batch_shape = (shape[0], math.prod(shape[1:-1]))
batch_shape = (shape[0], np.prod(shape[1:-1]))
# batch_shape = \
# (shape[0], functools.reduce(operator.mul, shape[1:-1]))
batch_indices, position_indices = \
np.unravel_index(top_indices_unsorted, batch_shape)
datasource_indices = datasource_index[batch_indices]
top_indices = np.append(datasource_indices[:, np.newaxis],
position_indices[:, np.newaxis], axis=1)
else:
# working on activations for a single input:
position_indices = top_indices_unsorted[:, np.newaxis]
datasource_indices = \
np.full(position_indices.shape, datasource_index, np.int)
# shape: (top, 2, channels)
top_indices = \
np.append(datasource_indices, position_indices, axis=1)
return top_activations, top_indices
def old_merge_top_activations(self, top_activations: np.ndarray,
top_indices: np.ndarray,
new_activations: np.ndarray,
new_indices: np.ndarray) -> None:
"""Merge activation values into top-n highscore. Both activation data
consists of two arrays, the first (top_activations) the
holding the actual activation values and the second
(top_indices) holding the corresponding indices of the top
scores.
Arguments
---------
top_activations:
activation values of shape (top, channels)
top_indices:
corresponding indices in dataset / position of shape
(top, 2, channels)
new_activations:
activation values of shape (top, channels)
new_indices:
corresponding indices in dataset / position of shape
(top, 2, channels)
"""
top = len(top_activations)
merged_indices = np.append(top_indices, new_indices)
merged_activations = np.append(top_activations, new_activations)
sort = np.argsort(-merged_activations, axis=0)
top_indices[:] = merged_indices[sort[:top]]
top_activations[:] = merged_activations[sort[:top]]
def old_init_layer_top_activations(self, layers = None, top: int = 9) -> None:
if layers is None:
layers = self._fixed_layers
for layer in layers:
self._top_activations[layer] = \
np.full((layer.filters, layer.filters), -np.inf)
# index: (datasource index, fiter index)
self._top_indices[layer] = \
np.full((layer.filters, 2, layer.filters),
np.nan, dtype=np.int)
def old_update_layer_top_activations(self, layers = None,
top: int = 9) -> None:
if layers is None:
layers = self._fixed_layers
for layer in layers:
top_activations, top_indices = \
self._top_activations(self.activations(layer),
datasource_index=self._data.index)
self._merge_top_activations(self._top_activations[layer],
self._top_indices[layer],
top_activations, top_indices)
class ActivationWorker(Worker):
"""A :py:class:`Worker` specialized to work with the
:py:class:`ActivationTool`.
layers:
The layers for which activations shall be computed.
data: (inherited from Worker)
The current input data
activations: dict
The activations for the current data
"""
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
self._layer_ids = []
self._fixed_layers = []
self._classification = False
self._activations = None
#
# Tool core functions
#
def _apply_tool(self, data: Data, **kwargs) -> None:
"""Apply the :py:class:`ActivationTool` on the given data.
"""
self.tool.apply(self, data, layers=self._layer_ids, **kwargs)
def activations(self, layer: Layer = None, unit: int = None,
data_format: str = None) -> np.ndarray:
"""Get the precomputed activation values for the current
:py:class:`Data`.
"""
activations = \
self._tool.data_activations(self._data, layer=layer, unit=unit,
data_format=data_format)
LOG.debug("ActivationWorker.activations(%s,unit=%s,data_format=%s):"
" %s", layer, unit, data_format,
len(activations) if layer is None else activations.shape)
return activations
def _ready(self) -> bool:
# FIXME[hack]
return (super()._ready() and
self._tool.network is not None and
self._tool.network.prepared)
def set_network(self, network: Network,
layers: List[Layer] = None) -> None:
"""Set the current network. Update will only be published if
not already selected.
Parameters
----------
network : str or int or network.network.Network
Key for the network
"""
LOG.info("Engine.set_network(%s): old=%s", network, self._network)
if network is not None and not isinstance(network, Network):
raise TypeError("Expecting a Network, "
f"not {type(network)} ({network})")
if self._tool is None:
raise RuntimeError("Trying to set a network "
"without having a Tool.")
self._tool.network = network
# set the layers (this will also trigger the computation
# of the activations)
self.set_layers(layers)
self.change(network_changed=True)
#
# Layer configuration
#
def set_layers(self, layers: List[Layer]) -> None:
"""Set the layers for which activations shall be computed.
"""
self._fixed_layers = \
layers if isinstance(layers, list) else list(layers)
self._update_layers()
def add_layer(self, layer: Union[str, Layer]) -> None:
"""Add a layer to the list of activation layers.
"""
if isinstance(layer, str):
self._fixed_layers.append(self.network[layer])
elif isinstance(layer, Layer):
self._fixed_layers.append(layer)
else:
raise TypeError("Invalid type for argument layer: {type(layer)}")
self._update_layers()
def remove_layer(self, layer: Layer) -> None:
"""Remove a layer from the list of activation layers.
"""
self._fixed_layers.remove(layer)
self._update_layers()
def set_classification(self, classification: bool = True) -> None:
"""Record the classification results. This assumes that the network
is a classifier and the results are provided in the last
layer.
"""
if classification != self._classification:
self._classification = classification
self._update_layers()
def _update_layers(self) -> None:
# Determining layers
layer_ids = list(map(lambda layer: layer.id, self._fixed_layers))
if self._classification and isinstance(self._network, Classifier):
class_scores_id = self._network.scores.id
if class_scores_id not in layer_ids:
layer_ids.append(class_scores_id)
got_new_layers = layer_ids > self._layer_ids and self._data is not None
self._layer_ids = layer_ids
if got_new_layers:
self.work(self._data)
#
# work on Datasource
#
def extract_activations(self, datasource: Datasource,
batch_size: int = 128) -> None:
samples = len(datasource)
# Here we could:
# np.memmap(filename, dtype='float32', mode='w+',
# shape=(samples,) + network[layer].output_shape[1:])
results = {
layer: np.ndarray((samples,) +
self.tool.network[layer].output_shape[1:])
for layer in self._layer_ids
}
fetcher = Datafetcher(datasource, batch_size=batch_size)
try:
index = 0
for batch in fetcher:
print("dl-activation: "
f"processing batch of length {len(batch)} "
f"with elements given as {type(batch.array)}, "
f"first element having index {batch[0].index} and "
f"shape {batch[0].array.shape} [{batch[0].array.dtype}]")
# self.work() will make `batch` the current data object
# of this Worker (self._data) and store activation values
# as attributes of that data object:
self.work(batch, busy_async=False)
# obtain the activation values from the current data object
activations = self.activations()
# print(type(activations), len(activations))
print("dl-activation: activations are of type "
f"{type(activations)} of length {len(activations)}")
if isinstance(activations, dict):
for index, (layer, values) in \
enumerate(activations.items()):
print(f"dl-activation: [{index}]: {values.shape}")
results[layer][index:index+len(batch)] = values
elif isinstance(activations, list):
print("dl-activation: "
f"first element is {type(activations[0])} "
f"with shape {activations[0].shape} "
f"[{activations[0].dtype}]")
for index, values in enumerate(activations):
print(f"dl-activation: [{index}]: {values.shape}")
layer = self._layer_ids[index]
results[layer][index:index+len(batch)] = values
print("dl-activation: batch finished in "
f"{self.tool.duration(self._data)*1000:.0f} ms.")
except KeyboardInterrupt:
# print(f"error procesing {data.filename} {data.shape}")
print("Keyboard interrupt")
# self.output_status(top, end='\n')
except InterruptedError:
print("Interrupted.")
finally:
print("dl-activation: finished processing")
# signal.signal(signal.SIGINT, original_sigint_handler)
# signal.signal(signal.SIGQUIT, original_sigquit_handler)
def iterate_activations(self, datasource: Datasource,
batch_size: int = 128): # -> Iterator
fetcher = Datafetcher(datasource, batch_size=batch_size)
index = 0
for data in fetcher:
print("iterate_activations: "
f"processing {'batch' if data.is_batch else 'data'}")
self.work(data, busy_async=False)
activations = self.activations()
if data.is_batch:
for index, view in enumerate(data):
yield {layer: activations[layer][index]
for layer in activations}
else:
yield activations
class ActivationsArchive(Preparable):
"""An :py:class:`ActinvationsArchive` represents an archive of
activation values obtained by applying a :py:class:`Network` to
a :py:class:`Datsource`.
Intended use:
>>> with ActivationsArchiveNumpy(network, datasource, mode='w') as archive:
>>> for index in range(archive.valid, archive.total):
>>> archive += network.get_activations(datasource[index])
>>> with ActivationsArchiveNumpy(network, datasource, mode='w') as archive:
>>> for batch in datasource.batches(batch_size=128):
>>> activation_tool.process(batch)
>>> archive += batch
>>> with ActivationsArchiveNumpy(network, datasource, mode='w') as archive:
>>> archive.fill()
"""
# FIXME[todo]:
# - allow ActivationTool instead of Network
# - make this class an ActivationTool
# - documentation
class ActivationsArchiveNumpy(ActivationsArchive, PersistentTool):
"""The :py:class:`ActivationsArchiveNumpy` realizes an
:py:class:`ActivationsArchive` based on the Numpy `memmap`
mechanism.
All files of the :py:class:`ActivationsArchiveNumpy` are stored in
the directory :py:prop:`directory`. Each layer gets a separate
file, called `[LAYER_NAME].dat`. Metadata for the archive are
stored in JSON format into the file `meta.json`.
The total size of an :py:class:`ActivationsArchiveNumpy`, that is
the number of data points for which activations are stored in the
archive, has to be provided uppon initialization and cannot be
changed afterwards. This number be accessed via the property
:py:prop:`total`. The archive supports incremental updates,
allowing to fill the archive in multiple steps and to continue
fill operations that were interrupted. The number of valid data
points filled so far is stored in the metadata as property
`valid` and can be accessed through the property :py:prop:`valid`.
Note: The numpy `memmap` mechanism does not provide means for
compression. Files are stored uncompressed and may have extreme
sizes for larger activation maps of datasources.
Note: Depending on the file system, memmap may create files of
desired size but only allocate disk space while filling the files.
This may result in an (uncatchable) bus error if the device runs
out of space.
"""
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
self._layers_memmap = None
self._meta = None
LOG.info("ActivationsArchiveNumpy at '%s' initalized.",
self._directory)
@property
def filename_meta(self) -> Path:
"""The name of the file holding the meta data for this
:py:class:`ActivationsArchiveNumpy`.
"""
return self._directory / 'meta.json'
def layers(self, *what) -> Iterable[Tuple[str, type, Tuple[int]]]:
"""Iterate layer of layer information.
Arguments
---------
what:
Specifies the what information should be provided. Valid
values are: `'name'` the layer name,
`'dtype'` the dtype of the layer,
`'shape'` the layer layer,
`'layer'` the actual layer object (only available if the
:py:class:`Network`, not just the network key, has been provided
upon initialization of this :py:class:`ActinvationsArchive`).
"""
if not what:
what = ('name', )
elif 'layer' in what and not isinstance(self._network, Network):
raise ValueError("Iterating over Layers is only possible with "
"an initialized Network.")
for layer in self._layers:
name = layer.key if isinstance(layer, Layer) else layer
memmap = self._layers_memmap[name]
yield ((name if info == 'name' else
memmap.dtype if info == 'dtype' else
memmap.shape[1:] if info == 'shape' else
memmap.nbytes if info == 'bytes' else
self._network[name] if info == 'layer' else '?')
for info in what)
def _prepare(self) -> None:
super()._prepare()
# prepare the meta data
filename_meta = self.filename_meta
if filename_meta.exists():
with open(filename_meta, 'r') as file:
meta = json.load(file)
if self._layers is None:
self._layers = list(meta['shape'].keys())
else: # mode == 'w' and isinstance(network, Network):
length = len(self._datasource)
meta = {
'total': length,
'valid': 0,
'dtype': 'float32', # FIXME[hack]: we should determine dtype
'shape': {}
}
if self._layers is None:
self._layers = list(self._network.layer_names())
for layer in self._layers:
meta['shape'][layer] = \
(length,) + self._network[layer].output_shape[1:]
os.makedirs(self.directory, exist_ok=True)
# check if all requested layers are availabe
available_layers = set(meta['shape'].keys())
requested_layers = set(self._layers)
if not requested_layers.issubset(available_layers):
raise ValueError(f"Some requested layers {requested_layers} "
f"are not available {available_layers}")
if self._mode == 'w' and available_layers != requested_layers:
# make sure that all available layers are written to avoid
# inconsistent data
raise ValueError(f"Some available layers {available_layers} "
"are not mentioned as write layers "
f"{requested_layers}")
self._meta = meta
# prepare the layer memmaps
layers = meta.shape.keys() if self._layers is None else self._layers
dtype = np.dtype(meta['dtype'])
self._layers_memmap = {}
for layer in layers:
layer_name = layer.key if isinstance(layer, Layer) else layer
layer_filename = self.directory / (layer_name + '.dat')
shape = tuple(meta['shape'][layer_name])
mode = 'r' if self._mode == 'r' else \
('r+' if layer_filename.exists() else 'w+')
self._layers_memmap[layer_name] = \
np.memmap(layer_filename, dtype=dtype, mode=mode, shape=shape)
LOG.info("ActivationsArchiveNumpy at '%s' with %d layers and "
"%d/%d entries prepared for mode '%s'.", self._directory,
len(self._layers), self.valid, self.total, self._mode)
def _unprepare(self) -> None:
# make sure all information is stored
if self._mode == 'w':
self.flush()
# close the memmap objects
if self._layers_memmap is not None:
for layer, memmap in self._layers_memmap.items():
del memmap
self._layers_memmap = None
self._meta = None
LOG.info("ActivationsArchiveNumpy at '%s' unprepared.",
self._directory)
super()._unprepare()
def flush(self) -> None:
"""Flush unwritten data to the disk. This will also update
the metadata file (:py:prop:`filename_meta`) to reflect the
current state of the archive. :py:meth:`flush` is automatically
called when upreparing or deleting this
:py:class:`ActivationsArchiveNumpy` object.
Note: flusing the data is only allowed (and only makes sense),
if this :py:class:`ActivationsArchiveNumpy` is in write mode.
"""
super().flush()
if self._layers_memmap is not None:
for layer, memmap in self._layers_memmap.items():
memmap.flush()
def __getitem__(self, key) -> None:
if isinstance(key, tuple):
layer = key[0]
index = key[1]
else:
layer = None
index = key
if layer is None:
return {layer: memmap[index]
for layer, memmap in self._layers_memmap.items()}
return self._layers_memmap[layer][index]
def __setitem__(self, key, values) -> None:
if self._mode != 'w':
raise ValueError(f"Archive in mode '{self._mode}' is not writable")
if isinstance(key, tuple):
layer = key[0]
index = key[1]
else:
layer = None
index = key
if layer is None:
if isinstance(values, dict):
for layer, layer_values in values.items():
self._update_values(layer, index, layer_values)
elif isinstance(values, list):
if len(values) != len(self._layers):
raise ValueError("Values should be a list of length"
f"{len(self._layers)} not {len(values)}!")
for layer, layer_values in zip(self._layers, values):
self._update_values(layer, index, layer_values)
else:
raise ValueError("Values should be a list (of "
f"length {len(self._layers)}) "
f"or a dictionary, not {type(values)}")
else:
self._update_values(layer, index, values)
def _update_values(self, layer, index, value) -> None:
if isinstance(layer, Layer):
layer = layer.key
try:
self._layers_memmap[layer][index] = value
except KeyError:
raise KeyError(f"Invalid layer '{layer}', valid layers "
f"are {list(self._layers_memmap.keys())}")
def __iadd__(self, values) -> object:
"""Add activation values to this
:py:class:`ActivationsArchiveNumpy`.
Arguments
---------
values:
The activation values to add. Currently only a list or
dictionary of activation values are supported.
"""
# FIXME[todo]: allow to add a batch of values
index = self._meta['valid']
self[index] = values
self._meta['valid'] += 1
return self
def fill(self, overwrite: bool = False) -> None:
"""Fill this :py:class:`ActinvationsArchive` by computing activation
values for data from the underlying :py:class:`Datasource`.
Arguments
---------
overwrite:
If `True`, the fill process will start with the first
data item, overwriting results from previous runs.
If `False`, the fill process will start from where the
last process stopped (if the archive is already filled
completly, no further computation is started).
"""
if not isinstance(self._network, Network):
self._network = Network[self._network]
self._network.prepare()
if not isinstance(self._datasource, Datasource):
self._datasource = Datasource[self._datasource]
self._datasource.prepare()
if overwrite:
self._meta['valid'] = 0
with self:
for index in range(self.valid, self.total):
self += self._network.get_activations(self._datasource[index])
def info(self) -> None:
"""Output a summary of this :py:class:`ActivationsArchiveNumpy`.
"""
print(f"Archive at {self.directory}: {self.valid}/{self.total}")
for name, dtype, shape, size in \
self.layers('name', 'dtype', 'shape', 'bytes'):
print(f" - {name+':':20s} {str(shape):20s} "
f"of type {str(dtype):10s} [{format_size(size)}]")
# FIXME[todo]: move to util
def format_size(num, suffix='B'):
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi' 'Ei', 'Zi']:
if num < 1024:
return f"{num}{unit}{suffix}"
num //= 1024
return f"{num}Yi{suffix}"
| {
"repo_name": "Petr-By/qtpyvis",
"path": "dltb/tool/activation.py",
"copies": "1",
"size": "49892",
"license": "mit",
"hash": 7270598977069679000,
"line_mean": 38.2232704403,
"line_max": 82,
"alpha_frac": 0.5706325663,
"autogenerated": false,
"ratio": 4.410927415789939,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5481559982089939,
"avg_score": null,
"num_lines": null
} |
"""An English grammar for chartparse.
This grammar was originally written by Steve Isard at the
University of Sussex. The vocabulary is designed to amuse
undergraduate Experimental Psychology students, hence the
references to pigeons and cages.
The grammar is almost entirely Steve's original. The only changes
are a few words, proper names, and the production:
NP -> det Nn PP
which was changed to
NP -> NP PP
The intent is to demonstrate ambiguous grouping of modifiers.
As in the original LIB CHART _[1], features on the categories
are ignored. There are three features used `case`, `num` and
`tr`. Thy could reasonably be handled in this file, via
compilation to a plain CFG, since their purpose is only
to enforce agreement.
References
----------
The original LIB CHART [1]_
.. [1] http://www.poplog.org/gospl/packages/pop11/lib/chart.p
>>> import chart
>>> chart.parse(["the","director",'is','clint', 'eastwood'])
['the', 'director', 'is', 'clint', 'eastwood']
Parse 1:
S
Np
det the
Nn
n director
Vp
cop is
Pn
n clint
Pn
n eastwood
1 parses
>>> import chart
>>> chart.parse(["show", "me","a","movie","where", "the","director",'is','clint', 'eastwood'],topcat='SImp',sep='_')
['show', 'me', 'a', 'movie', 'where', 'the', 'director', 'is', 'clint', 'eastwood']
Parse 1:
SImp
_Vp
__v show
__Np
___pn me
__Np
___Np
____det a
____Nn
_____n movie
___Relp
____rp where
____S
_____Np
______det the
______Nn
_______n director
_____Vp
______cop is
______Pn
_______n clint
_______Pn
________n eastwood
1 parses
"""
##
# Created 10 March 2014
# author: Chris Brew
# author: Stephen Isard
# license: Apache 2.0
##
from collections import namedtuple
import numpy.random as npr
class Rule(namedtuple('Rule', ('lhs','rhs'))):
"""One production of a context-free grammar.
Attributes
----------
lhs: string
The left hand side of the rule.
rhs: list [string]
The right hand side of the rule.
Examples
--------
>>> r = Rule('s',('np','vp'))
"""
def __repr__(self):
return "Rule(lhs='{lhs}', rhs={rhs})".format(
lhs=self.lhs,
rhs=self.rhs)
@property
def constraints(self):
return None
class Grammar(object):
"""
Class for creating grammars from text strings.
Parameters
----------
grammar: string
the grammar rules, lines of the form `lhs -> rhs (|rhs)*`
lexicon: string
the words, lines of the form `word category+`
Examples
--------
>>> g = Grammar(RULES, WORDS)
>>> g.grammar[0]
Rule(lhs='S', rhs=['Np', 'Vp'])
"""
def __init__(self, grammar, lexicon, state=None):
"""
Create a grammar from strings.
"""
self.state = (npr.RandomState(42) if state is None else state)
self.grammar = self.__rulify(grammar) + self.__lexicalize(lexicon)
def make_rule(self, lhs):
return Rule(lhs=lhs, rhs=rhs)
def __remove_balanced_brackets(self, string):
r = []
collecting = True
for ch in string:
if ch == "(":
collecting = False
elif ch == ")":
collecting = True
elif collecting:
r.append(ch)
return "".join(r)
def __rulify(self, s):
r = []
s = self.__remove_balanced_brackets(s)
lines = s.split('\n')
for line in lines:
lhs, rhs = line.split('->')
lhs = lhs.split()[0]
elems = rhs.split('|')
r += [Rule(lhs=lhs, rhs=elem.split())
for elem in elems]
return r
def __lexicalize(self, string):
string = self.__remove_balanced_brackets(string)
lines = string.split("\n")
rules = []
for line in lines:
a = line.split()
w = a[0]
r = "".join(a[1:])
elems = r.split('|')
for elem in elems:
a = elem.split()
rules.append(Rule(lhs=a[0], rhs=[w]))
return rules
RULES = """S(num) -> Np(num,case:subj) Vp(num) | S conj S
S(num) -> Np(num,case:subj) cop(num) ppart
S(num) -> Np(num,case:subj) cop(num) ppart passmarker Np(case:obj)
SImp -> Vp
Relp -> rp S
Np(num,case) -> det(num) Nn(num) | Np(num,case) Pp | pn(num,case) | Np(num,case) Relp | Np(case) conj Np(case)
Nn(num) -> n(num) | adj n(num)
Vp(num) -> v(num,tr:trans) Np(case:obj) | v(num,tr:intrans) | cop(num) adj | cop(num) Pn | v(num,tr:ditrans) Np Np
Vp(num) -> Vp(num) Pp
Pn -> n | n Pn
Pp -> prep Np(case:obj)"""
WORDS = """a det(num:sing)
and conj
are cop(num:pl)
ball n(num:sing)
big adj
bitten ppart
blue adj
boy n(num:sing)
boys n(num:pl)
by passmarker | prep
cage n(num:sing) | v(num:pl,tr:trans)
caged v(tr:trans) | ppart
cages n(num:pl) | v(num:sing,tr:trans)
chris n(num:sing)
clint n(num:sing)
computer n(num:sing)
computers n(num:pl)
director n(num:sing)
directors n(num:pl)
eastwood n(num:sing)
enormous adj
fifty det(num:pl)
four det(num:pl)
girl n(num:sing)
girls n(num:pl)
green adj
he pn(num:sing,case:subj)
her pn(num:sing,case:obj)
him pn(num:sing,case:obj)
hit v(tr:trans) | ppart
hits v(tr:trans,num:sing)
house n(num:sing)
in prep
is cop(num:sing)
little adj
me pn(num:sing)
mic pn(num:sing)
micro n(num:sing)
micros n(num:pl)
movie n(num:sing)
movies n(num:pl)
on prep
one n(num:sing) | pn(num:sing) | det(num:sing)
ones n(num:pl)
pdp11 n(num:sing)
pdp11s n(num:pl)
pigeon n(num:sing)
pigeons n(num:pl)
program n(num:sing) | v(num:pl,tr:trans)
programmed v(tr:trans) | ppart
programs n(num:pl) | v(num:sing,tr:trans)
punish v(num:pl,tr:trans)
punished v(tr:trans)|ppart
punishes v(num:sing,tr:trans)
ran v(tr:intrans)
rat n(num:sing)
rats n(num:pl)
red adj
reinforce v(num:pl,tr:trans)
reinforced v(tr:trans) | ppart
reinforces v(num:s,tr:trans)
room n(num:sing)
rooms n(num:pl)
run v(tr:intrans,num:pl)
runs v(tr:intrans,num:sing)
scientists n(num:pl)
she pn(num:sing,case:subj)
sheep n
show v(tr:ditrans)
steve pn(num:sing)
stuart pn(num:sing)
suffer v(num:pl,tr:intrans)
suffered v(tr:intrans)
suffers v(num:sing,tr:intrans)
that det(num:sing)
the det
them pn(num:pl,case:obj)
these det(num:pl)
they pn(num:pl,case:subj)
those det(num:pl)
three det(num:pl)
two det(num:pl)
undergraduates n(num:pl)
universities n(num:pl)
university n(num:sing)
was cop(num:sing)
were cop(num:pl)
when rp(rptype:tmp)
where rp(rptype:loc)
direct v(tr:trans)
wood n(num:sing)
would md
dye v(tr:trans)
or conj
rector n(num:sing)
east adj"""
GRAMMAR = Grammar(RULES, WORDS)
| {
"repo_name": "cbrew/chartparse",
"path": "python/chart/english.py",
"copies": "1",
"size": "6589",
"license": "apache-2.0",
"hash": -3468573060670117400,
"line_mean": 20.3928571429,
"line_max": 116,
"alpha_frac": 0.6075276977,
"autogenerated": false,
"ratio": 2.808610400682012,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.39161380983820115,
"avg_score": null,
"num_lines": null
} |
# An English text needs to be encrypted using the following encryption scheme.
# First, the spaces are removed from the text. Let L be the length of this
# text. Then, characters are written into a grid, whose rows and columns have
# the following constraints:
#
# floor(sqrt(L)) <= rows <= columns <= ceil(sqrt(L))
#
# For example, the sentence "if man was meant to stay on the ground god would
# have given us roots" after removing spaces is 54 characters long, so it is
# written in the form of a grid with 7 rows and 8 columns.
#
# ifmanwas
# meanttos
# tayonthe
# groundgo
# dwouldha
# vegivenu
# sroots
#
# Ensure that rows x columns >= L
#
# If multiple grids satisfy the above conditions, choose the one with the
# minimum area, i.e. rows x columns.
#
# The encoded message is obtained by displaying the characters in a column,
# inserting a space, and then displaying the next column and inserting a space,
# and so on. For example, the encoded message for the above rectangle is:
#
# imtgdvs fearwer mayoogo anouuio ntnnlvt wttddes aohghn sseoau
#
# You will be given a message in English with no spaces between the words. The
# maximum message length can be 81 characters. Print the encoded message.
#
# Here are some more examples:
#
# Sample Input:
# haveaniceday
#
# Sample Output:
# hae and via ecy
import math
s = raw_input().replace(' ', '')
columns = int(math.ceil(math.sqrt(len(s))))
encrypted = [""] * columns
for idx, val in enumerate(s):
encrypted[idx % columns] += val
print " ".join(encrypted)
| {
"repo_name": "chinhtle/python_fun",
"path": "hacker_rank/algorithms/implementation/encryption.py",
"copies": "1",
"size": "1547",
"license": "mit",
"hash": -3836117496010005500,
"line_mean": 29.3333333333,
"line_max": 79,
"alpha_frac": 0.7188106012,
"autogenerated": false,
"ratio": 3.4150110375275937,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4633821638727594,
"avg_score": null,
"num_lines": null
} |
""" An entity about whom data is collected.
:Authors: Sana dev team
:Version: 2.0
"""
import datetime
import os
from PIL import Image
from django.db import models
from mds.api.utils import make_uuid
__all__ = ["AbstractSubject","Subject"]
class AbstractSubject(models.Model):
""" The entity about whom data is collected. """
class Meta:
abstract = True
uuid = models.SlugField(max_length=36, unique=True, default=make_uuid, editable=False)
""" A universally unique identifier """
created = models.DateTimeField(auto_now_add=True)
""" When the object was created """
modified = models.DateTimeField(auto_now=True)
""" updated on modification """
voided = models.BooleanField(default=False)
class Subject(AbstractSubject):
""" Simple subject implementation as a medical patient.
"""
class Meta:
app_label = "core"
given_name = models.CharField(max_length=64)
family_name = models.CharField(max_length=64)
dob = models.DateTimeField()
gender = models.CharField(choices=(("M","M"),("F","F")),max_length=2)
image = models.ImageField(blank=True, upload_to="core/subject")
location = models.ForeignKey('Location', blank=True, to_field='uuid')
system_id = models.CharField(max_length=64, blank=True)
@property
def age(self):
""" Convenience wrapper to calculate the age. """
today = datetime.date.today()
if self.dob > datetime.date.today().replace(year=self.dob.year):
return today.year -self.dob.year - 1
else:
return today.year -self.dob.year
@property
def full_name(self):
return u'%s, %s' % (self.family_name,self.given_name)
def _generate_thumb(self, size=(96,96)):
try:
pth, fname = os.path.split(self.image.path)
thumb_pth = os.path.join(pth, "ico")
thumb = os.path.join(thumb_pth,fname)
if not os.path.exists(thumb_pth):
os.makedirs(thumb_pth)
im = Image.open(self.image.path)
thim = im.copy()
thim.thumbnail(size, Image.ANTIALIAS)
thim.save(thumb)
except:
pass
@property
def thumb_url(self):
try:
pth, fname = os.path.split(self.image.path)
thumb_pth = os.path.join(pth, "ico")
thumb = os.path.join(thumb_pth,fname)
if not os.path.exists(thumb):
self._generate_thumb()
url_path, _ = os.path.split(self.image.url)
thumb_url_path = os.path.join(url_path, "ico")
return os.path.join(thumb_url_path, fname)
except:
return self.image.url
def save(self, *args, **kwargs):
super(Subject, self).save(*args, **kwargs)
self._generate_thumb()
def __unicode__(self):
return u'%s, %s - %s' % (self.family_name, self.given_name, self.system_id)
| {
"repo_name": "SanaMobile/sana.mds",
"path": "src/mds/core/models/subject.py",
"copies": "1",
"size": "2957",
"license": "bsd-3-clause",
"hash": 1479905410492381400,
"line_mean": 30.1263157895,
"line_max": 90,
"alpha_frac": 0.5989178221,
"autogenerated": false,
"ratio": 3.6282208588957054,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9659445706075312,
"avg_score": 0.01353859498407863,
"num_lines": 95
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.