function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def Run(self):
_UninstallApk(self.devices, self.install_dict, self.args.package_name) | chrisdickinson/nojs | [
72,
3,
72,
5,
1464475027
] |
def Run(self):
_LaunchUrl(self.devices, self.args.args, self.args.command_line_flags_file,
self.args.url, self.apk_helper) | chrisdickinson/nojs | [
72,
3,
72,
5,
1464475027
] |
def Run(self):
logging.warning('Installing...')
_InstallApk(self.devices, self.apk_helper, self.install_dict)
logging.warning('Sending launch intent...')
_LaunchUrl(self.devices, self.args.args, self.args.command_line_flags_file,
self.args.url, self.apk_helper) | chrisdickinson/nojs | [
72,
3,
72,
5,
1464475027
] |
def Run(self):
device_utils.DeviceUtils.parallel(self.devices).ForceStop(
self.args.package_name) | chrisdickinson/nojs | [
72,
3,
72,
5,
1464475027
] |
def Run(self):
device_utils.DeviceUtils.parallel(self.devices).ClearApplicationState(
self.args.package_name) | chrisdickinson/nojs | [
72,
3,
72,
5,
1464475027
] |
def Run(self):
_ChangeFlags(self.devices, self.args.args,
self.args.command_line_flags_file) | chrisdickinson/nojs | [
72,
3,
72,
5,
1464475027
] |
def Run(self):
extra_args = shlex.split(self.args.args or '')
_RunGdb(self.devices[0], self.args.package_name, self.args.output_directory,
self.args.target_cpu, extra_args, bool(self.args.verbose_count)) | chrisdickinson/nojs | [
72,
3,
72,
5,
1464475027
] |
def Run(self):
mapping = self.args.proguard_mapping_path
if self.args.no_deobfuscate:
mapping = None
_RunLogcat(self.devices[0], self.args.package_name,
bool(self.args.verbose_count), mapping) | chrisdickinson/nojs | [
72,
3,
72,
5,
1464475027
] |
def Run(self):
_RunPs(self.devices, self.args.package_name) | chrisdickinson/nojs | [
72,
3,
72,
5,
1464475027
] |
def Run(self):
_RunDiskUsage(self.devices, self.args.package_name,
bool(self.args.verbose_count)) | chrisdickinson/nojs | [
72,
3,
72,
5,
1464475027
] |
def Run(self):
_RunMemUsage(self.devices, self.args.package_name) | chrisdickinson/nojs | [
72,
3,
72,
5,
1464475027
] |
def calls_exec(self):
return not self.args.cmd | chrisdickinson/nojs | [
72,
3,
72,
5,
1464475027
] |
def Run(self):
_RunShell(self.devices, self.args.package_name, self.args.cmd) | chrisdickinson/nojs | [
72,
3,
72,
5,
1464475027
] |
def _RegisterExtraArgs(self, group):
group.add_argument(
'compilation_filter',
choices=['verify', 'quicken', 'space-profile', 'space',
'speed-profile', 'speed'],
help='For WebView/Monochrome, use "speed". For other apks, use '
'"speed-profile".') | chrisdickinson/nojs | [
72,
3,
72,
5,
1464475027
] |
def _ParseArgs(parser, from_wrapper_script):
subparsers = parser.add_subparsers()
commands = [clazz(from_wrapper_script) for clazz in _COMMANDS]
for command in commands:
if from_wrapper_script or not command.needs_output_directory:
command.RegisterArgs(subparsers)
# Show extended help when no command is passed.
argv = sys.argv[1:]
if not argv:
argv = ['--help']
return parser.parse_args(argv) | chrisdickinson/nojs | [
72,
3,
72,
5,
1464475027
] |
def Run(output_directory, apk_path, incremental_json, command_line_flags_file,
target_cpu, proguard_mapping_path):
"""Entry point for generated wrapper scripts."""
constants.SetOutputDirectory(output_directory)
devil_chromium.Initialize(output_directory=output_directory)
parser = argparse.ArgumentParser()
exists_or_none = lambda p: p if p and os.path.exists(p) else None
parser.set_defaults(
command_line_flags_file=command_line_flags_file,
target_cpu=target_cpu,
apk_path=exists_or_none(apk_path),
incremental_json=exists_or_none(incremental_json),
proguard_mapping_path=proguard_mapping_path)
_RunInternal(parser, output_directory=output_directory) | chrisdickinson/nojs | [
72,
3,
72,
5,
1464475027
] |
def __init__(self, game,
key_press_handler=None,
mouse_click_handler=None,
mouse_release_handler=None,
tick_time=0,
width=600,
height=400,
key_press_handlers=None,
mouse_click_handlers=None,
mouse_release_handlers=None):
super(DefaultWindow, self).__init__()
self.game = game | eeue56/pycho | [
5,
1,
5,
17,
1392570857
] |
def timerEvent(self, event):
self.callbacks[event.timerId() - 1]() | eeue56/pycho | [
5,
1,
5,
17,
1392570857
] |
def map_point_to_game_world(self, x, y):
i = int((x / self.widget.width) * self.game.world.width)
j = int(((self.widget.height - y) / self.widget.height) * self.game.world.height)
return (i, j) | eeue56/pycho | [
5,
1,
5,
17,
1392570857
] |
def _defaultMousePressHandler(self, event, pointer_size=5):
x, y = self.map_point_to_game_world(event.x(), event.y())
# gradually grow the pointer to be bigger to
# allow for a greater control on what is clicked
for j in xrange(pointer_size):
try:
obj = self.game.world.colliding_object(None,
box_around(x, y, j, j))
except:
break
if obj is not None:
logging.error(obj)
break
else:
logging.error("Nothing found!") | eeue56/pycho | [
5,
1,
5,
17,
1392570857
] |
def unpause(self):
for timer, time in zip(self.timers, self.timer_times):
self.timers.start(time)
self.is_paused = False | eeue56/pycho | [
5,
1,
5,
17,
1392570857
] |
def mousePressEvent(self, event):
self._current_handler(self.mouse_click_handlers)(self, event) | eeue56/pycho | [
5,
1,
5,
17,
1392570857
] |
def test_between_time_formats(self, frame_or_series):
# GH#11818
rng = date_range("1/1/2000", "1/5/2000", freq="5min")
ts = DataFrame(np.random.randn(len(rng), 2), index=rng)
ts = tm.get_obj(ts, frame_or_series)
strings = [
("2:00", "2:30"),
("0200", "0230"),
("2:00am", "2:30am"),
("0200am", "0230am"),
("2:00:00", "2:30:00"),
("020000", "023000"),
("2:00:00am", "2:30:00am"),
("020000am", "023000am"),
]
expected_length = 28
for time_string in strings:
assert len(ts.between_time(*time_string)) == expected_length | pandas-dev/pandas | [
37157,
15883,
37157,
3678,
1282613853
] |
def test_localized_between_time(self, tzstr, frame_or_series):
tz = timezones.maybe_get_tz(tzstr)
rng = date_range("4/16/2012", "5/1/2012", freq="H")
ts = Series(np.random.randn(len(rng)), index=rng)
if frame_or_series is DataFrame:
ts = ts.to_frame()
ts_local = ts.tz_localize(tzstr)
t1, t2 = time(10, 0), time(11, 0)
result = ts_local.between_time(t1, t2)
expected = ts.between_time(t1, t2).tz_localize(tzstr)
tm.assert_equal(result, expected)
assert timezones.tz_compare(result.index.tz, tz) | pandas-dev/pandas | [
37157,
15883,
37157,
3678,
1282613853
] |
def test_between_time(self, inclusive_endpoints_fixture, frame_or_series):
rng = date_range("1/1/2000", "1/5/2000", freq="5min")
ts = DataFrame(np.random.randn(len(rng), 2), index=rng)
ts = tm.get_obj(ts, frame_or_series)
stime = time(0, 0)
etime = time(1, 0)
inclusive = inclusive_endpoints_fixture
filtered = ts.between_time(stime, etime, inclusive=inclusive)
exp_len = 13 * 4 + 1
if inclusive in ["right", "neither"]:
exp_len -= 5
if inclusive in ["left", "neither"]:
exp_len -= 4
assert len(filtered) == exp_len
for rs in filtered.index:
t = rs.time()
if inclusive in ["left", "both"]:
assert t >= stime
else:
assert t > stime
if inclusive in ["right", "both"]:
assert t <= etime
else:
assert t < etime
result = ts.between_time("00:00", "01:00")
expected = ts.between_time(stime, etime)
tm.assert_equal(result, expected)
# across midnight
rng = date_range("1/1/2000", "1/5/2000", freq="5min")
ts = DataFrame(np.random.randn(len(rng), 2), index=rng)
ts = tm.get_obj(ts, frame_or_series)
stime = time(22, 0)
etime = time(9, 0)
filtered = ts.between_time(stime, etime, inclusive=inclusive)
exp_len = (12 * 11 + 1) * 4 + 1
if inclusive in ["right", "neither"]:
exp_len -= 4
if inclusive in ["left", "neither"]:
exp_len -= 4
assert len(filtered) == exp_len
for rs in filtered.index:
t = rs.time()
if inclusive in ["left", "both"]:
assert (t >= stime) or (t <= etime)
else:
assert (t > stime) or (t <= etime)
if inclusive in ["right", "both"]:
assert (t <= etime) or (t >= stime)
else:
assert (t < etime) or (t >= stime) | pandas-dev/pandas | [
37157,
15883,
37157,
3678,
1282613853
] |
def test_between_time_axis(self, frame_or_series):
# GH#8839
rng = date_range("1/1/2000", periods=100, freq="10min")
ts = Series(np.random.randn(len(rng)), index=rng)
if frame_or_series is DataFrame:
ts = ts.to_frame()
stime, etime = ("08:00:00", "09:00:00")
expected_length = 7
assert len(ts.between_time(stime, etime)) == expected_length
assert len(ts.between_time(stime, etime, axis=0)) == expected_length
msg = f"No axis named {ts.ndim} for object type {type(ts).__name__}"
with pytest.raises(ValueError, match=msg):
ts.between_time(stime, etime, axis=ts.ndim) | pandas-dev/pandas | [
37157,
15883,
37157,
3678,
1282613853
] |
def test_between_time_axis_raises(self, axis):
# issue 8839
rng = date_range("1/1/2000", periods=100, freq="10min")
mask = np.arange(0, len(rng))
rand_data = np.random.randn(len(rng), len(rng))
ts = DataFrame(rand_data, index=rng, columns=rng)
stime, etime = ("08:00:00", "09:00:00")
msg = "Index must be DatetimeIndex"
if axis in ["columns", 1]:
ts.index = mask
with pytest.raises(TypeError, match=msg):
ts.between_time(stime, etime)
with pytest.raises(TypeError, match=msg):
ts.between_time(stime, etime, axis=0)
if axis in ["index", 0]:
ts.columns = mask
with pytest.raises(TypeError, match=msg):
ts.between_time(stime, etime, axis=1) | pandas-dev/pandas | [
37157,
15883,
37157,
3678,
1282613853
] |
def test_between_time_warn(self, include_start, include_end, frame_or_series):
# GH40245
rng = date_range("1/1/2000", "1/5/2000", freq="5min")
ts = DataFrame(np.random.randn(len(rng), 2), index=rng)
ts = tm.get_obj(ts, frame_or_series)
stime = time(0, 0)
etime = time(1, 0)
match = (
"`include_start` and `include_end` "
"are deprecated in favour of `inclusive`."
)
with tm.assert_produces_warning(FutureWarning, match=match):
_ = ts.between_time(stime, etime, include_start, include_end) | pandas-dev/pandas | [
37157,
15883,
37157,
3678,
1282613853
] |
def test_between_time_incompatiable_args_given(self, include_start, include_end):
# GH40245
rng = date_range("1/1/2000", "1/5/2000", freq="5min")
ts = DataFrame(np.random.randn(len(rng), 2), index=rng)
stime = time(0, 0)
etime = time(1, 0)
msg = (
"Deprecated arguments `include_start` and `include_end` cannot be "
"passed if `inclusive` has been given."
)
with pytest.raises(ValueError, match=msg):
ts.between_time(stime, etime, include_start, include_end, inclusive="left") | pandas-dev/pandas | [
37157,
15883,
37157,
3678,
1282613853
] |
def __init__(self,
path,
identifier,
options=None,
requirements=None,
identifier_requirement=True,
sign_with_identifier=False,
entitlements=None,
verify_options=None):
"""A build product to be codesigned.
Args:
path: The path to the product to be signed. This is relative to a
work directory containing the build products.
identifier: The unique identifier set when code signing. This is
only explicitly passed with the `--identifier` flag if
|sign_with_identifier| is True.
options: Options flags to pass to `codesign --options`, from
|CodeSignOptions|.
requirements: String for additional `--requirements` to pass to the
`codesign` command. These are joined with a space to the
|config.CodeSignConfig.codesign_requirements_basic| string. See
|CodeSignedProduct.requirements_string()| for details.
identifier_requirement: If True, a designated identifier requirement
based on |identifier| will be inserted into the requirements
string. If False, then no designated requirement will be
generated based on the identifier.
sign_with_identifier: If True, then the identifier will be specified
when running the `codesign` command. If False, `codesign` will
infer the identifier itself.
entitlements: File name of the entitlements file to sign the product
with. The file should reside in the |Paths.packaging_dir|.
verify_options: Flags to pass to `codesign --verify`, from
|VerifyOptions|.
"""
self.path = path
self.identifier = identifier
if not CodeSignOptions.valid(options):
raise ValueError('Invalid CodeSignOptions: {}'.format(options))
self.options = options
self.requirements = requirements
self.identifier_requirement = identifier_requirement
self.sign_with_identifier = sign_with_identifier
self.entitlements = entitlements
if not VerifyOptions.valid(verify_options):
raise ValueError('Invalid VerifyOptions: {}'.format(verify_options))
self.verify_options = verify_options | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def __repr__(self):
return 'CodeSignedProduct(identifier={0.identifier}, ' \
'options={0.options}, path={0.path})'.format(self) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def valid(cls, opts_to_check):
"""Tests if the specified |opts_to_check| are valid.
Args:
options: Iterable of option strings.
Returns:
True if all the options are valid, False if otherwise.
"""
if opts_to_check is None:
return True
valid_values = options.values()
return all([option in valid_values for option in opts_to_check]) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def __init__(self,
channel=None,
branding_code=None,
app_name_fragment=None,
packaging_name_fragment=None,
product_dirname=None,
creator_code=None,
channel_customize=False,
package_as_dmg=True,
package_as_pkg=False):
"""Creates a new Distribution object. All arguments are optional.
Args:
channel: The release channel for the product.
branding_code: A branding code helps track how users acquired the
product from various marketing channels.
app_name_fragment: If present, this string fragment is appended to
the |config.CodeSignConfig.app_product|. This renames the binary
and outer app bundle.
packaging_name_fragment: If present, this is appended to the
|config.CodeSignConfig.packaging_basename| to help differentiate
different |branding_code|s.
product_dirname: If present, this string value is set in the app's
Info.plist with the key "CrProductDirName". This key influences
the browser's default user-data-dir location.
creator_code: If present, this will set a new macOS creator code
in the Info.plist "CFBundleSignature" key and in the PkgInfo
file. If this is not specified, the original values from the
build products will be kept.
channel_customize: If True, then the product will be modified in
several ways:
- The |channel| will be appended to the
|config.CodeSignConfig.base_bundle_id|.
- The product will be renamed with |app_name_fragment|.
- Different assets will be used for icons in the app.
package_as_dmg: If True, then a .dmg file will be created containing
the product.
package_as_pkg: If True, then a .pkg file will be created containing
the product.
"""
self.channel = channel
self.branding_code = branding_code
self.app_name_fragment = app_name_fragment
self.packaging_name_fragment = packaging_name_fragment
self.product_dirname = product_dirname
self.creator_code = creator_code
self.channel_customize = channel_customize
self.package_as_dmg = package_as_dmg
self.package_as_pkg = package_as_pkg | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def base_config(self):
return base_config | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def distribution(self):
return this | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def app_product(self):
if this.channel_customize:
return '{} {}'.format(base_config.app_product,
this.app_name_fragment)
return base_config.app_product | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def base_bundle_id(self):
base_bundle_id = base_config.base_bundle_id
if this.channel_customize:
return base_bundle_id + '.' + this.channel
return base_bundle_id | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def provisioning_profile_basename(self):
profile = base_config.provisioning_profile_basename
if profile and this.channel_customize:
return '{}_{}'.format(profile, this.app_name_fragment)
return profile | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def packaging_basename(self):
if this.packaging_name_fragment:
return '{}-{}-{}'.format(
self.app_product.replace(' ', ''), self.version,
this.packaging_name_fragment)
return super(DistributionCodeSignConfig,
self).packaging_basename | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def __init__(self, input, output, work):
self._input = input
self._output = output
self._work = work | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def input(self):
return self._input | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def output(self):
return self._output | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def work(self):
return self._work | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def replace_work(self, new_work):
"""Creates a new Paths with the same input and output directories, but
with |work| set to |new_work|."""
return Paths(self.input, self.output, new_work) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def run_check(self, url):
self.session = requests.session()
self.session.headers = [('User-agent', "Sasha's pony checkup - http://ponycheckup.com/")]
try:
homepage = self.session.get(url, timeout=7)
check_record = Check(url=url)
check_record.hsts_header_found = self.check_supports_hsts(url)
check_record.xframe_header_found = True if 'X-Frame-Options' in homepage.headers else False
check_record.supports_https = self.check_supports_https(url)
check_record.heartbleed_vuln = self.check_heartbleed_vuln(url)
(check_record.admin_found, check_record.admin_forces_https) = self.check_admin(url)
(check_record.login_found, check_record.login_forces_https) = self.check_login(url)
check_record.allows_trace = self.check_trace(url)
check_record.runs_debug = self.check_runs_debug(url)
check_record.csrf_cookie_found = True if self.find_csrf_cookie() else False
session_cookie = self.find_session_cookie()
if session_cookie:
check_record.session_cookie_found = True
check_record.session_cookie_secure = session_cookie.secure
check_record.session_cookie_httponly = session_cookie.has_nonstandard_attr('httponly')
else:
check_record.session_cookie_found = False
check_record.update_recommendation_count()
check_record.save()
return check_record
except (requests.RequestException, SSLError, PyAsn1Error) as error:
return error | erikr/ponycheckup | [
62,
10,
62,
6,
1459590943
] |
def check_heartbleed_vuln(self, url):
try:
url = url.replace("http://", "").replace("/", "")
return bool(test_heartbleed(url))
except socket.error:
return False | erikr/ponycheckup | [
62,
10,
62,
6,
1459590943
] |
def check_runs_debug(self, url):
data = self.session.get(url+"/[][][][][]-this-tries-to-trigger-404....", timeout=7)
return "You're seeing this error because you have <code>DEBUG = True</code>" in data.content | erikr/ponycheckup | [
62,
10,
62,
6,
1459590943
] |
def check_admin(self, url):
response = self.session.get(url + "/admin", timeout=7)
if response.status_code == 404:
return (False, None)
data = response.content.lower()
admin_found = '"id_username"' in data and ("csrfmiddlewaretoken" in data or "Django" in data or "__admin_media_prefix__" in data)
return (admin_found, self._response_used_https(response)) | erikr/ponycheckup | [
62,
10,
62,
6,
1459590943
] |
def _response_used_https(self, response):
return response.url[:5] == "https" | erikr/ponycheckup | [
62,
10,
62,
6,
1459590943
] |
def __init__(self, cube):
self._element = cube | ioam/geoviews | [
461,
68,
461,
111,
1461083221
] |
def linecontours(self, kdims=None, vdims=None, mdims=None, **kwargs):
return self(LineContours, kdims, vdims, mdims, **kwargs) | ioam/geoviews | [
461,
68,
461,
111,
1461083221
] |
def image(self, kdims=None, vdims=None, mdims=None, **kwargs):
return self(Image, kdims, vdims, mdims, **kwargs) | ioam/geoviews | [
461,
68,
461,
111,
1461083221
] |
def polygons(self, kdims=None, vdims=None, mdims=None, **kwargs):
if kdims is None: kdims = self._element.kdims
el_type = Polygons if is_geographic(self._element, kdims) else HvPolygons
return self(el_type, kdims, vdims, mdims, **kwargs) | ioam/geoviews | [
461,
68,
461,
111,
1461083221
] |
def Cell(node):
# cells must stand on own line
if node.parent.cls not in ("Assign", "Assigns"):
node.auxiliary("cell")
return "{", ",", "}" | jonathf/matlab2cpp | [
169,
58,
169,
18,
1425388832
] |
def __init__(self):
self.done = False
self.done_evt = threading.Event()
self.isExploring = False
self.progress = 0.0
self.x = 0.0
self.y = 0.0
self.z = 0.0
self.currentPoseX = 0
self.currentPoseY = 0
self.currentPoseZ = 0
self.navigating = False | kuri-kustar/kuri_mbzirc_challenge_3 | [
2,
3,
2,
5,
1458114537
] |
def navigate(self):
rate = rospy.Rate(40) # 10hz
msg = SP.PoseStamped(
header=SP.Header(
frame_id="base_footprint", # no matter, plugin don't use TF
stamp=rospy.Time.now()), # stamp should update
)
while not rospy.is_shutdown():
msg.pose.position.x = self.x
msg.pose.position.y = self.y
msg.pose.position.z = self.z
# For demo purposes we will lock yaw/heading to north.
yaw_degrees = 0 # North
yaw = radians(yaw_degrees)
quaternion = quaternion_from_euler(0, 0, yaw)
msg.pose.orientation = SP.Quaternion(*quaternion) | kuri-kustar/kuri_mbzirc_challenge_3 | [
2,
3,
2,
5,
1458114537
] |
def setPose(self, x, y, z, delay=0, wait=True):
self.done = False
self.x = x
self.y = y
self.z = z
self.navigating = True
if wait:
rate = rospy.Rate(5)
while not self.done and not rospy.is_shutdown():
rate.sleep()
time.sleep(delay) | kuri-kustar/kuri_mbzirc_challenge_3 | [
2,
3,
2,
5,
1458114537
] |
def takeoff(self, z, delay=0, wait=True):
diff = z - self.currentPoseZ
while not abs(diff)<0.2:
diff = z - self.currentPoseZ
if diff>0:
self.setPose(self.currentPoseX,self.currentPoseY,self.currentPoseZ + 1, 0, False)
else:
self.setPose(self.currentPoseX,self.currentPoseY,self.currentPoseZ - 0.1, 0, False) | kuri-kustar/kuri_mbzirc_challenge_3 | [
2,
3,
2,
5,
1458114537
] |
def land(self, delay=0, wait=True):
altitude = self.currentPoseZ
while altitude > 0:
altitude = self.currentPoseZ
self.setPose(self.currentPoseX,self.currentPoseY,self.currentPoseZ - 0.5 ,2) | kuri-kustar/kuri_mbzirc_challenge_3 | [
2,
3,
2,
5,
1458114537
] |
def reached(self, topic):
def is_near(msg, x, y, d):
rospy.logdebug("Position %s: local: %d, target: %d, abs diff: %d",
msg, x, y, abs(x - y))
return abs(x - y) < d
self.currentPoseX = topic.pose.position.x
self.currentPoseY = topic.pose.position.y
self.currentPoseZ = topic.pose.position.z | kuri-kustar/kuri_mbzirc_challenge_3 | [
2,
3,
2,
5,
1458114537
] |
def explore(self):
print 'explore started '
rate = rospy.Rate(30)
self.newGoal = True
if self.isExploring == False:
#Change this later when we have a better exploration
#self.isExploring = True
while self.done == False:
time.sleep(1)
rospy.loginfo("Climb")
self.progress += 0.1
self.takeoff(5)
self.progress += 0.1
rospy.loginfo("Moving to Red_Object")
self.reached_object = False
red_object_id = -1
xspeed = 1
while self.reached_object == False:
self.client.send_goal(self.goal)
self.client.wait_for_result()
self.objects = self.client.get_result().tracked_objects.objects
islost = True
for obj in self.objects:
if red_object_id == -1 and (obj.color == 'RED' or obj.color == 'BLUE' or obj.color == 'GREEN'): #pick any nearby object
red_object_id = obj.object_id
if obj.object_id == red_object_id:
islost = False
print 'Moving to Drop zone', self.currentPoseX-obj.pose2.pose.position.x, self.currentPoseY-obj.pose2.pose.position.y, obj.pose.pose.position.x, obj.pose.pose.position.y
if fabs(obj.pose2.pose.position.x) < 0.01 and fabs(obj.pose2.pose.position.y) > 0.01:
print 'Moving Y'
self.setPose(self.x, self.currentPoseY+obj.pose2.pose.position.y*xspeed, self.z, 0 , False)
elif fabs(obj.pose2.pose.position.y) < 0.01 and fabs(obj.pose2.pose.position.x) > 0.01:
print 'Moving X'
self.setPose(self.currentPoseX-obj.pose2.pose.position.x*xspeed, self.y, self.z, 0 , False)
else:
print 'Moving XY'
self.setPose(self.currentPoseX-obj.pose2.pose.position.x*xspeed, self.currentPoseY+obj.pose2.pose.position.y*xspeed, self.z, 0 , True)
if fabs(obj.pose2.pose.position.x) < 0.3 and fabs(obj.pose2.pose.position.y) < 0.3 and self.z > 0.0:
print 'Moving Z'
land = 0.2
if self.z <= 3:
xspeed = 1
if self.z <= 1.5:
xspeed = 0.5
if self.z < 0.5:
land = 0.05
self.setPose(self.x, self.y, self.z - land * xspeed, 1, False)
if self.z <= 0.4:
self.reached_object = True
if islost == True:
red_object_id = -1
if red_object_id == -1:
rospy.loginfo("No object in sight, exploring")
#self.setPose(self.x, self.y - 5, self.z, 1, True)
rate.sleep()
time.sleep(10)
rospy.loginfo("Picked Object, climb")
self.takeoff(1)
self.takeoff(2)
self.takeoff(3)
self.takeoff(4)
self.takeoff(5)
#self.setPose(self.x, self.y, self.z)
time.sleep(10)
rospy.loginfo("Moving to DropZone")
self.setPose(1, -21, 5) ##Go near dropzone
self.progress += 0.1
self.reached_dropzone = False
xspeed = 3
while self.reached_dropzone == False:
self.client.send_goal(self.goal)
self.client.wait_for_result()
self.objects = self.client.get_result().tracked_objects.objects | kuri-kustar/kuri_mbzirc_challenge_3 | [
2,
3,
2,
5,
1458114537
] |
def main(args):
'''Initializes and cleanup ros node'''
rospy.init_node('dropzone_landing', anonymous=True)
d = dropzone_landing()
d.explore()
try:
rospy.spin()
except KeyboardInterrupt:
print "Shutting down ROS Image feature detector module"
cv2.destroyAllWindows() | kuri-kustar/kuri_mbzirc_challenge_3 | [
2,
3,
2,
5,
1458114537
] |
def test_plte():
qr = segno.make_qr('test')
assert qr.version < 7
dark = 'red'
buff_1 = io.BytesIO()
buff_2 = io.BytesIO()
qr.save(buff_1, kind='png', dark=dark, finder_dark=dark, version_dark='green')
qr.save(buff_2, kind='png', dark=dark)
assert buff_1.getvalue() == buff_2.getvalue() | heuer/segno | [
387,
42,
387,
12,
1470301732
] |
def test_plte3():
qr = segno.make_qr('test')
assert qr.version < 7
dark = 'red'
buff_1 = io.BytesIO()
buff_2 = io.BytesIO()
qr.save(buff_1, kind='png', dark=dark, finder_dark=dark, version_dark='green')
qr.save(buff_2, kind='png', dark=dark)
assert buff_1.getvalue() == buff_2.getvalue() | heuer/segno | [
387,
42,
387,
12,
1470301732
] |
def test_plte_micro2():
qr = segno.make_micro('RAIN')
dark = 'red'
buff_1 = io.BytesIO()
buff_2 = io.BytesIO()
qr.save(buff_1, kind='png', dark=dark, finder_dark=dark, dark_module='green')
qr.save(buff_2, kind='png', dark=dark)
assert buff_1.getvalue() == buff_2.getvalue() | heuer/segno | [
387,
42,
387,
12,
1470301732
] |
def __init__(self, source_port=0, destination_port=0, checksum=0,
payload=''):
# Call the superclass constructor
super(UDPMessage, self).__init__(payload=payload)
self.source_port = source_port
self.destination_port = destination_port
self.checksum = checksum | steffann/pylisp | [
7,
1,
7,
1,
1357508601
] |
def generate_pseudo_header(self, source, destination):
# Calculate the length of the UDP layer
udp_length = 8 + len(bytes(self.payload))
if isinstance(source, IPv4Address) \
and isinstance(destination, IPv4Address):
# Generate an IPv4 pseudo-header
header = BitStream('uint:32=%d, '
'uint:32=%d, '
'uint:16=17, '
'uint:16=%d' % (int(source),
int(destination),
udp_length))
elif isinstance(source, IPv6Address) \
and isinstance(destination, IPv6Address):
# Generate an IPv6 pseudo-header
header = BitStream('uint:128=%d, '
'uint:128=%d, '
'uint:32=%d, '
'uint:32=17' % (int(source),
int(destination),
udp_length))
else:
raise ValueError('Source and destination must belong to the same '
'IP version')
# Return the header bytes
return header.bytes | steffann/pylisp | [
7,
1,
7,
1,
1357508601
] |
def verify_checksum(self, source, destination):
# An all zero transmitted checksum value means that the transmitter
# generated no checksum (for debugging or for higher level protocols
# that don't care).
if self.checksum == 0:
return True
return self.checksum == self.calculate_checksum(source, destination) | steffann/pylisp | [
7,
1,
7,
1,
1357508601
] |
def get_lisp_data_packet(self):
return self.get_lisp_message(only_data=True) | steffann/pylisp | [
7,
1,
7,
1,
1357508601
] |
def from_bytes(cls, bitstream):
'''
Parse the given packet and update properties accordingly
'''
packet = cls()
# Convert to ConstBitStream (if not already provided)
if not isinstance(bitstream, ConstBitStream):
if isinstance(bitstream, Bits):
bitstream = ConstBitStream(auto=bitstream)
else:
bitstream = ConstBitStream(bytes=bitstream)
# Read the source and destination ports
(packet.source_port,
packet.destination_port) = bitstream.readlist('2*uint:16')
# Store the length
length = bitstream.read('uint:16')
if length < 8:
raise ValueError('Invalid UDP length')
# Read the checksum
packet.checksum = bitstream.read('uint:16')
# And the rest is payload
payload_bytes = length - 8
packet.payload = bitstream.read('bytes:%d' % payload_bytes)
# LISP-specific handling
if packet.source_port == 4341 or packet.destination_port == 4341:
# Payload is a LISP data packet
from pylisp.packet.lisp.data import DataPacket
packet.payload = DataPacket.from_bytes(packet.payload)
elif packet.source_port == 4342 or packet.destination_port == 4342:
# Payload is a LISP control message
from pylisp.packet.lisp.control.base import ControlMessage
packet.payload = ControlMessage.from_bytes(packet.payload)
# There should be no remaining bits
if bitstream.pos != bitstream.len:
raise ValueError('Bits remaining after processing packet')
# Verify that the properties make sense
packet.sanitize()
return packet | steffann/pylisp | [
7,
1,
7,
1,
1357508601
] |
def __init__(self, name, dtype, dims):
self.name = name
self.dtype = dtype
self.dims = dims
self.iotype = 0
self.used_count = 0
self.index = Operand.index
Operand.index = Operand.index + 1
self.iotype2str = {Operand.IOTYPE_INPUT: 'in', Operand.IOTYPE_OUTPUT: 'out', Operand.IOTYPE_INTERMEDIATE: 'inout'}
self.dtype2str = {Operand.DTYPE_FLOAT: 'DT_FLOAT', Operand.DTYPE_UINT8: 'DT_UINT8'} | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def __str__(self):
return "{}: (name: {}, iotype: {}, dtype: {}, dims: ({},{},{},{}) used_count: {})".format(self.index,
self.name, self.iotype2str[self.iotype], self.dtype2str[self.dtype],
self.dims[0], self.dims[1], self.dims[2], self.dims[3], self.used_count) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def __init__(self, graph_def, nodes, outfile, dump4tb):
self.graph_def = graph_def
self.nodes = nodes
self.outfile = outfile
self.dump4tb = dump4tb
self.layer_number = 0
self.output_names = []
self.name_node_dict = {}
self.edges = {}
self.conv_activations = {'Relu':0, 'Tanh':1, 'Sigmoid':2, 'None':3, 'LeakyRelu':4}
self.conv_paddings = {'VALID':0, 'SAME':1}
self.converted_nodes = set()
self.conv2d_scope_names = set()
self.conv2d_scopename_inputname_dict = {}
self.op2code = {'Conv2D':1, 'DepthToSpace':2, 'MirrorPad':3, 'Maximum':4, 'MathBinary':5}
self.mathbin2code = {'Sub':0, 'Add':1, 'Mul':2, 'RealDiv':3}
self.mirrorpad_mode = {'CONSTANT':0, 'REFLECT':1, 'SYMMETRIC':2}
self.name_operand_dict = {} | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def dump_for_tensorboard(self):
graph = tf.get_default_graph()
tf.import_graph_def(self.graph_def, name="")
tf.summary.FileWriter('/tmp/graph', graph)
print('graph saved, run "tensorboard --logdir=/tmp/graph" to see it') | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def dump_complex_conv2d_to_file(self, node, f):
assert(node.op == 'Conv2D')
self.layer_number = self.layer_number + 1
self.converted_nodes.add(node.name)
scope_name = TFConverter.get_scope_name(node.name)
#knode for kernel, bnode for bias, dnode for dilation, anode for activation
knode, bnode, dnode, anode = self.get_conv2d_params(scope_name)
if dnode is not None:
dilation = struct.unpack('i', dnode.attr['value'].tensor.tensor_content[0:4])[0]
else:
dilation = 1
if anode is not None:
activation = anode.op
else:
activation = 'None'
padding = node.attr['padding'].s.decode("utf-8")
# conv2d with dilation > 1 generates tens of nodes, not easy to parse them, so use this tricky method.
if dilation > 1 and scope_name + '/stack' in self.name_node_dict:
if self.name_node_dict[scope_name + '/stack'].op == "Const":
padding = 'SAME'
padding = self.conv_paddings[padding]
ktensor = knode.attr['value'].tensor
filter_height = ktensor.tensor_shape.dim[0].size
filter_width = ktensor.tensor_shape.dim[1].size
in_channels = ktensor.tensor_shape.dim[2].size
out_channels = ktensor.tensor_shape.dim[3].size
kernel = np.frombuffer(ktensor.tensor_content, dtype=np.float32)
kernel = kernel.reshape(filter_height, filter_width, in_channels, out_channels)
kernel = np.transpose(kernel, [3, 0, 1, 2])
has_bias = 1
np.array([self.op2code[node.op], dilation, padding, self.conv_activations[activation], in_channels, out_channels, filter_height, has_bias], dtype=np.uint32).tofile(f)
kernel.tofile(f)
btensor = bnode.attr['value'].tensor
if btensor.tensor_shape.dim[0].size == 1:
bias = struct.pack("f", btensor.float_val[0])
else:
bias = btensor.tensor_content
f.write(bias)
input_name = self.conv2d_scopename_inputname_dict[scope_name]
input_operand_index = self.add_operand(input_name, Operand.IOTYPE_INPUT)
if anode is not None:
output_operand_index = self.add_operand(anode.name, Operand.IOTYPE_OUTPUT)
else:
output_operand_index = self.add_operand(self.edges[bnode.name][0].name, Operand.IOTYPE_OUTPUT)
np.array([input_operand_index, output_operand_index], dtype=np.uint32).tofile(f) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def dump_depth2space_to_file(self, node, f):
assert(node.op == 'DepthToSpace')
self.layer_number = self.layer_number + 1
block_size = node.attr['block_size'].i
np.array([self.op2code[node.op], block_size], dtype=np.uint32).tofile(f)
self.converted_nodes.add(node.name)
input_operand_index = self.add_operand(node.input[0], Operand.IOTYPE_INPUT)
output_operand_index = self.add_operand(node.name, Operand.IOTYPE_OUTPUT)
np.array([input_operand_index, output_operand_index], dtype=np.uint32).tofile(f) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def dump_maximum_to_file(self, node, f):
assert(node.op == 'Maximum')
self.layer_number = self.layer_number + 1
ynode = self.name_node_dict[node.input[1]]
y = ynode.attr['value'].tensor.float_val[0]
np.array([self.op2code[node.op]], dtype=np.uint32).tofile(f)
np.array([y], dtype=np.float32).tofile(f)
self.converted_nodes.add(node.name)
input_operand_index = self.add_operand(node.input[0], Operand.IOTYPE_INPUT)
output_operand_index = self.add_operand(node.name, Operand.IOTYPE_OUTPUT)
np.array([input_operand_index, output_operand_index], dtype=np.uint32).tofile(f) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def dump_layers_to_file(self, f):
for node in self.nodes:
if node.name in self.converted_nodes:
continue
# conv2d with dilation generates very complex nodes, so handle it in special
if self.in_conv2d_scope(node.name):
if node.op == 'Conv2D':
self.dump_complex_conv2d_to_file(node, f)
continue
if node.op == 'Conv2D':
self.dump_simple_conv2d_to_file(node, f)
elif node.op == 'DepthToSpace':
self.dump_depth2space_to_file(node, f)
elif node.op == 'MirrorPad':
self.dump_mirrorpad_to_file(node, f)
elif node.op == 'Maximum':
self.dump_maximum_to_file(node, f)
elif node.op == 'Sub':
self.dump_mathbinary_to_file(node, f)
elif node.op == 'Add':
self.dump_mathbinary_to_file(node, f)
elif node.op == 'Mul':
self.dump_mathbinary_to_file(node, f)
elif node.op == 'RealDiv':
self.dump_mathbinary_to_file(node, f) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def dump_to_file(self):
with open(self.outfile, 'wb') as f:
f.write(header.str.encode('utf-8'))
np.array([header.major, header.minor], dtype=np.uint32).tofile(f)
self.dump_layers_to_file(f)
self.dump_operands_to_file(f)
np.array([self.layer_number, len(self.name_operand_dict)], dtype=np.uint32).tofile(f) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def generate_output_names(self):
used_names = []
for node in self.nodes:
for input in node.input:
used_names.append(input)
for node in self.nodes:
if node.name not in used_names:
self.output_names.append(node.name) | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def generate_edges(self):
for node in self.nodes:
for input in node.input:
if input in self.edges:
self.edges[input].append(node)
else:
self.edges[input] = [node] | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def get_scope_name(name):
index = name.rfind('/')
if index == -1:
return ""
return name[0:index] | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def generate_conv2d_scope_info(self):
# mostly, conv2d is a sub block in graph, get the scope name
for node in self.nodes:
if node.op == 'Conv2D':
scope = TFConverter.get_scope_name(node.name)
# for the case tf.nn.conv2d is called directly
if scope == '':
continue
# for the case tf.nn.conv2d is called within a scope
if scope + '/kernel' not in self.name_node_dict:
continue
self.conv2d_scope_names.add(scope)
# get the input name to the conv2d sub block
for node in self.nodes:
scope = TFConverter.get_scope_name(node.name)
if scope in self.conv2d_scope_names:
if node.op == 'Conv2D' or node.op == 'Shape':
for inp in node.input:
if TFConverter.get_scope_name(inp) != scope:
self.conv2d_scopename_inputname_dict[scope] = inp | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def node_name(node):
"""
Convenience function: Returns node.id, or node.name, or None
"""
return hasattr(node, 'id') and node.id or hasattr(node, 'name') and node.name | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def __init__(self, name, source):
self.name = name
self.source = source
self.used = False | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def __repr__(self):
return '<%s object %r from line %r at 0x%x>' % (self.__class__.__name__,
self.name,
self.source.lineno,
id(self)) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def __init__(self, name, source):
self.fullName = name
name = name.split('.')[0]
super(Importation, self).__init__(name, source) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def __init__(self, name, source):
super(FunctionDefinition, self).__init__(name, source)
self.signature = FunctionSignature(source) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def names(self):
"""Return a list of the names referenced by this binding."""
names = []
if isinstance(self.source, ast.List):
for node in self.source.elts:
if isinstance(node, ast.Str):
names.append(node.s)
return names | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def __repr__(self):
scope_cls = self.__class__.__name__
return '<%s at 0x%x %s>' % (scope_cls, id(self), dict.__repr__(self)) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def __init__(self):
Scope.__init__(self)
self.globals = self.always_used.copy() | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def __init__(self, node):
self.decorated = bool(any(node.decorator_list))
self.argument_names = ast.argument_names(node)
self.default_count = len(node.args.defaults)
self.kw_only_argument_names = ast.kw_only_argument_names(node)
self.kw_only_default_count = ast.kw_only_default_count(node)
self.has_var_arg = node.args.vararg is not None
self.has_kw_arg = node.args.kwarg is not None | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def maxArgumentCount(self):
return len(self.argument_names) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings):
self.settings = settings
self.ignore_errors = settings.get('ignore_frosted_errors', [])
self.ignore_lines = ignore_lines
file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None)
if file_specific_ignores:
self.ignore_errors += file_specific_ignores
self._node_handlers = {}
self._deferred_functions = []
self._deferred_assignments = []
self.dead_scopes = []
self.messages = []
self.filename = filename
if builtins:
self.frosted_builtins = self.frosted_builtins.union(builtins)
self.scope_stack = [ModuleScope()]
self.except_handlers = [()]
self.futures_allowed = True
self.root = tree
self.handle_children(tree)
self.run_deferred(self._deferred_functions)
self._deferred_functions = None
self.run_deferred(self._deferred_assignments)
self._deferred_assignments = None
del self.scope_stack[1:]
self.pop_scope()
self.check_dead_scopes()
self.check_plugins() | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def defer_function(self, callable):
"""Schedule a function handler to be called just before completion.
This is used for handling function bodies, which must be deferred because code later in the file might modify
the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it
will contain any new bindings added to it.
"""
self._deferred_functions.append((callable, self.scope_stack[:], self.offset)) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def run_deferred(self, deferred):
"""Run the callables in deferred using their associated scope stack."""
for handler, scope, offset in deferred:
self.scope_stack = scope
self.offset = offset
handler() | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def scope(self):
return self.scope_stack[-1] | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def check_dead_scopes(self):
"""Look at scopes which have been fully examined and report names in
them which were imported but unused."""
for scope in self.dead_scopes:
export = isinstance(scope.get('__all__'), ExportBinding)
if export:
all = scope['__all__'].names()
# Look for possible mistakes in the export list
if not scope.importStarred and os.path.basename(self.filename) != '__init__.py':
undefined = set(all) - set(scope)
for name in undefined:
self.report(messages.UndefinedExport, scope['__all__'].source, name)
else:
all = []
# Look for imported names that aren't used without checking imports in namespace definition
for importation in scope.values():
if isinstance(importation, Importation) and not importation.used and importation.name not in all:
self.report(messages.UnusedImport, importation.source, importation.name) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def push_function_scope(self): # XXX Deprecated
self.push_scope(FunctionScope) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def report(self, message_class, *args, **kwargs):
error_code = message_class.error_code
if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not
str(message_class.error_number) in self.ignore_errors):
kwargs['verbose'] = self.settings.get('verbose')
message = message_class(self.filename, *args, **kwargs)
if message.lineno not in self.ignore_lines:
self.messages.append(message) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.