function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def time(self):
return self._time | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def __init__(self, app, settings):
super(GuiController, self).__init__()
self._app = app
self._settings = settings
self._needs_read = False
self._reader = None
self._creds = None
self._lock = QtCore.QMutex()
self._keystore = get_keystore()
self._current_device_has_ccid_disabled = False
self.timer = Timer(TIME_PERIOD)
self.watcher = observe_reader(self.reader_name, self._on_reader)
self.startTimer(3000)
self.timer.time_changed.connect(self.refresh_codes) | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def reader_name(self):
return self._settings.get('reader', 'Yubikey') | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def slot1(self):
return self._settings.get('slot1', 0) | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def slot2(self):
return self._settings.get('slot2', 0) | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def mute_ccid_disabled_warning(self):
return self._settings.get('mute_ccid_disabled_warning', 0) | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def mute_ccid_disabled_warning(self, value):
self._settings['mute_ccid_disabled_warning'] = value | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def grab_lock(self, lock=None, try_lock=False):
return lock or MutexLocker(self._lock, False).lock(try_lock) | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def otp_enabled(self):
return self.otp_supported and bool(self.slot1 or self.slot2) | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def credentials(self):
return self._creds | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def get_capabilities(self):
assert self.grab_lock()
ccid_dev = self.watcher.open()
if ccid_dev:
dev = YubiOathCcid(ccid_dev)
return Capabilities(True, None, dev.version)
legacy = self.open_otp()
if legacy:
return Capabilities(None, legacy.slot_status(), (0, 0, 0))
return Capabilities(None, None, (0, 0, 0)) | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def _on_reader(self, watcher, reader, lock=None):
if reader:
if self._reader is None:
self._reader = reader
self._creds = []
if is_minimized(self._app.window):
self._needs_read = True
else:
ccid_dev = watcher.open()
if ccid_dev:
std = YubiOathCcid(ccid_dev)
self._app.worker.post_fg((self._init_std, std))
else:
self._needs_read = True
elif self._needs_read:
self.refresh_codes(self.timer.time)
else:
self._reader = None
self._creds = None
self.refreshed.emit() | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def _await(self):
self._creds = None | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def _set_creds(self, creds):
if creds:
creds = [self.wrap_credential(c) for c in creds]
if self._creds and names(creds) == names(self._creds):
entry_map = dict((c.cred.name, c) for c in creds)
for entry in self._creds:
cred = entry.cred
code = entry_map[cred.name].code
if code.code:
entry.code = code
elif cred.oath_type != entry_map[cred.name].cred.oath_type:
break
else:
return
elif self._reader and self._needs_read and self._creds:
return
self._creds = creds
self.refreshed.emit() | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def read_slot_otp(self, cred, timestamp=None, use_touch=False):
return super(GuiController, self).read_slot_otp(cred, timestamp, False) | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def refresh_codes(self, timestamp=None, lock=None, std=None):
if not self._reader and self.watcher.reader:
return self._on_reader(self.watcher, self.watcher.reader, lock)
elif is_minimized(self._app.window):
self._needs_read = True
return
lock = self.grab_lock(lock, True)
if not lock:
return
self._app.worker.post_bg((self._refresh_codes_locked, timestamp, lock,
std)) | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def refresh_otp():
lock = self.grab_lock(try_lock=True)
if lock:
read = self.read_creds(
None, self.slot1, self.slot2, timestamp, False)
self._set_creds(read) | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def add_cred(self, *args, **kwargs):
lock = self.grab_lock()
ccid_dev = self.watcher.open()
if ccid_dev:
dev = YubiOathCcid(ccid_dev)
if self.unlock(dev):
super(GuiController, self).add_cred(dev, *args, **kwargs)
self._creds = None
self.refresh_codes(lock=lock) | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def delete_cred(self, name):
lock = self.grab_lock()
ccid_dev = self.watcher.open()
if ccid_dev:
dev = YubiOathCcid(ccid_dev)
if self.unlock(dev):
super(GuiController, self).delete_cred(dev, name)
self._creds = None
self.refresh_codes(lock=lock) | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def set_password(self, password, remember=False):
assert self.grab_lock()
ccid_dev = self.watcher.open()
if ccid_dev:
dev = YubiOathCcid(ccid_dev)
if self.unlock(dev):
key = super(GuiController, self).set_password(dev, password)
self._keystore.put(dev.id, key, remember) | Yubico/yubioath-desktop-dpkg | [
6,
6,
6,
1,
1434464093
] |
def make(self, workDir):
supersuZipProp = self.getTargetConfigProperty("root.methods.supersu.path")
assert supersuZipProp.getValue(), "Must set %s to the supersu zip file" % supersuZipProp.getKey()
includeApk = self.getTargetConfigValue("root.methods.supersu.include_apk", True)
includeArchs = set(self.getTargetConfigValue("root.methods.supersu.include_archs", []))
superSuTargetRelativePath = "supersu"
supersuTargetPath = os.path.join(workDir, superSuTargetRelativePath)
postinstFilePath = os.path.join(supersuTargetPath, "supersu_installer_includer")
supersuOriginalUpdatescriptPath = os.path.join(supersuTargetPath, "supersu_installer.sh")
newSuperSuZipPath = os.path.join(supersuTargetPath, "supersu.zip")
superSuZipTmpExtract = "/tmp/supersu.zip"
superSuUpdatescriptTmpExtract = "/tmp/supersu_installer.sh"
superuserApkPath = os.path.join("common", "Superuser.apk")
with self.newtmpWorkDir() as tmpDir:
with zipfile.ZipFile(supersuZipProp.resolveAsRelativePath(), "r") as z:
z.extractall(tmpDir)
os.mkdir(os.path.join(workDir, "supersu"))
archs = set(
[f for f in os.listdir(tmpDir) if not f in ("common", "META-INF")]
)
unsupportedArchs = includeArchs.difference(archs)
if len(unsupportedArchs):
unsupportedArchs = list(unsupportedArchs)
raise ValueError("Can't find archs: [%s] in supersu" % (", ".join(unsupportedArchs)))
targetArchs = includeArchs if len(includeArchs) else archs
newSuperSuZip = zipfile.ZipFile(newSuperSuZipPath, "w")
for arch in targetArchs:
self.__addDirToZip(newSuperSuZip, os.path.join(tmpDir, arch), arch)
if not includeApk:
os.remove(os.path.join(tmpDir, superuserApkPath))
self.__addDirToZip(newSuperSuZip, os.path.join(tmpDir, "common"), "common")
if self.getMaker().getConfig().isMakeable("update.busybox"):
#process file, with busybox onboard in assumption
with open(os.path.join(tmpDir, "META-INF/com/google/android/update-binary"), "r") as f:
with open(supersuOriginalUpdatescriptPath, "w") as targetF:
for l in f.readlines():
if l.startswith("#!"):
targetF.write("#!" + self.getTargetConfigValue("root.methods.supersu.sh", "/system/bin/sh") + "\n")
else:
targetF.write(l)
else:
shutil.copy(os.path.join(tmpDir, "META-INF/com/google/android/update-binary"), supersuOriginalUpdatescriptPath)
postInstscript = "ui_print(\"Installing SuperSU..\");\n"
postInstscript += "run_program(\"%s\", \"1\", \"stdout\", \"%s\");" % (superSuUpdatescriptTmpExtract, superSuZipTmpExtract)
with open(postinstFilePath, "w") as postinstFile:
postinstFile.write(postInstscript)
superSuConfig = supersuZipProp.getConfig()
currPostInst = superSuConfig.get("script.post", [], directOnly=True)
currPostInst.append(postinstFilePath)
superSuConfig.set("update.script.post", currPostInst)
self.setValue("update.files.add." + newSuperSuZipPath.replace(workDir, "").replace(".", "\.") , {
"destination": superSuZipTmpExtract
})
self.setValue("update.files.add." + supersuOriginalUpdatescriptPath.replace(workDir, "").replace(".", "\."), {
"destination": superSuUpdatescriptTmpExtract,
"mode": "0755",
"uid": "0",
"gid": "0"
}) | tgalal/inception | [
107,
18,
107,
15,
1424985272
] |
def test_slugify():
assert slugify("a") == "a"
assert slugify("1a") == "1a"
assert slugify("1") == "1"
assert slugify("a a") == "a_a"
assert slugify(42) == "42"
assert slugify(3.14159) == "314159"
assert slugify("├── Node1") == "Node1"
assert slugify("a", valid_variable_name=True) == "a"
assert slugify("1a", valid_variable_name=True) == "Number_1a"
assert slugify("1", valid_variable_name=True) == "Number_1"
assert slugify("a", valid_variable_name=False) == "a"
assert slugify("1a", valid_variable_name=False) == "1a"
assert slugify("1", valid_variable_name=False) == "1" | hyperspy/hyperspy | [
415,
201,
415,
238,
1313759676
] |
def test_is_hyperspy_signal():
s = signals.Signal1D(np.zeros((5, 5, 5)))
p = object()
assert is_hyperspy_signal(s) is True
assert is_hyperspy_signal(p) is False | hyperspy/hyperspy | [
415,
201,
415,
238,
1313759676
] |
def test_str2num():
assert (
str2num("2.17\t 3.14\t 42\n 1\t 2\t 3")
== np.array([[2.17, 3.14, 42.0], [1.0, 2.0, 3.0]])
).all() | hyperspy/hyperspy | [
415,
201,
415,
238,
1313759676
] |
def test_fsdict():
parrot = {}
fsdict(
["This", "is", "a", "dead", "parrot"], "It has gone to meet its maker", parrot
)
fsdict(["This", "parrot", "is", "no", "more"], "It is an ex parrot", parrot)
fsdict(
["This", "parrot", "has", "seized", "to", "be"],
"It is pushing up the daisies",
parrot,
)
fsdict([""], "I recognize a dead parrot when I see one", parrot)
assert (
parrot["This"]["is"]["a"]["dead"]["parrot"] == "It has gone to meet its maker"
)
assert parrot["This"]["parrot"]["is"]["no"]["more"] == "It is an ex parrot"
assert (
parrot["This"]["parrot"]["has"]["seized"]["to"]["be"]
== "It is pushing up the daisies"
)
assert parrot[""] == "I recognize a dead parrot when I see one" | hyperspy/hyperspy | [
415,
201,
415,
238,
1313759676
] |
def test_shorten_name():
assert (
shorten_name("And now for soemthing completely different.", 16)
== "And now for so.."
) | hyperspy/hyperspy | [
415,
201,
415,
238,
1313759676
] |
def atransition(self, match, context, next_state):
# do something
result = [...] # a list
return context, next_state, result
# context, next_state may be altered | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __init__(self, state_classes, initial_state, debug=0):
"""
Initialize a `StateMachine` object; add state objects.
Parameters:
- `state_classes`: a list of `State` (sub)classes.
- `initial_state`: a string, the class name of the initial state.
- `debug`: a boolean; produce verbose output if true (nonzero).
"""
self.input_lines = None
"""`StringList` of input lines (without newlines).
Filled by `self.run()`."""
self.input_offset = 0
"""Offset of `self.input_lines` from the beginning of the file."""
self.line = None
"""Current input line."""
self.line_offset = -1
"""Current input line offset from beginning of `self.input_lines`."""
self.debug = debug
"""Debugging mode on/off."""
self.initial_state = initial_state
"""The name of the initial state (key to `self.states`)."""
self.current_state = initial_state
"""The name of the current state (key to `self.states`)."""
self.states = {}
"""Mapping of {state_name: State_object}."""
self.add_states(state_classes)
self.observers = []
"""List of bound methods or functions to call whenever the current
line changes. Observers are called with one argument, ``self``.
Cleared at the end of `run()`.""" | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def run(self, input_lines, input_offset=0, context=None,
input_source=None, initial_state=None):
"""
Run the state machine on `input_lines`. Return results (a list).
Reset `self.line_offset` and `self.current_state`. Run the
beginning-of-file transition. Input one line at a time and check for a
matching transition. If a match is found, call the transition method
and possibly change the state. Store the context returned by the
transition method to be passed on to the next transition matched.
Accumulate the results returned by the transition methods in a list.
Run the end-of-file transition. Finally, return the accumulated
results.
Parameters:
- `input_lines`: a list of strings without newlines, or `StringList`.
- `input_offset`: the line offset of `input_lines` from the beginning
of the file.
- `context`: application-specific storage.
- `input_source`: name or path of source of `input_lines`.
- `initial_state`: name of initial state.
"""
self.runtime_init()
if isinstance(input_lines, StringList):
self.input_lines = input_lines
else:
self.input_lines = StringList(input_lines, source=input_source)
self.input_offset = input_offset
self.line_offset = -1
self.current_state = initial_state or self.initial_state
if self.debug:
print >>sys.stderr, (
'\nStateMachine.run: input_lines (line_offset=%s):\n| %s'
% (self.line_offset, '\n| '.join(self.input_lines)))
transitions = None
results = []
state = self.get_state()
try:
if self.debug:
print >>sys.stderr, ('\nStateMachine.run: bof transition')
context, result = state.bof(context)
results.extend(result)
while 1:
try:
try:
self.next_line()
if self.debug:
source, offset = self.input_lines.info(
self.line_offset)
print >>sys.stderr, (
'\nStateMachine.run: line (source=%r, '
'offset=%r):\n| %s'
% (source, offset, self.line))
context, next_state, result = self.check_line(
context, state, transitions)
except EOFError:
if self.debug:
print >>sys.stderr, (
'\nStateMachine.run: %s.eof transition'
% state.__class__.__name__)
result = state.eof(context)
results.extend(result)
break
else:
results.extend(result)
except TransitionCorrection, exception:
self.previous_line() # back up for another try
transitions = (exception.args[0],)
if self.debug:
print >>sys.stderr, (
'\nStateMachine.run: TransitionCorrection to '
'state "%s", transition %s.'
% (state.__class__.__name__, transitions[0]))
continue
except StateCorrection, exception:
self.previous_line() # back up for another try
next_state = exception.args[0]
if len(exception.args) == 1:
transitions = None
else:
transitions = (exception.args[1],)
if self.debug:
print >>sys.stderr, (
'\nStateMachine.run: StateCorrection to state '
'"%s", transition %s.'
% (next_state, transitions[0]))
else:
transitions = None
state = self.get_state(next_state)
except:
if self.debug:
self.error()
raise
self.observers = []
return results | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def next_line(self, n=1):
"""Load `self.line` with the `n`'th next line and return it."""
try:
try:
self.line_offset += n
self.line = self.input_lines[self.line_offset]
except IndexError:
self.line = None
raise EOFError
return self.line
finally:
self.notify_observers() | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def at_eof(self):
"""Return 1 if the input is at or past end-of-file."""
return self.line_offset >= len(self.input_lines) - 1 | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def previous_line(self, n=1):
"""Load `self.line` with the `n`'th previous line and return it."""
self.line_offset -= n
if self.line_offset < 0:
self.line = None
else:
self.line = self.input_lines[self.line_offset]
self.notify_observers()
return self.line | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def get_source(self, line_offset):
"""Return source of line at absolute line offset `line_offset`."""
return self.input_lines.source(line_offset - self.input_offset) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def abs_line_number(self):
"""Return line number of current line (counting from 1)."""
return self.line_offset + self.input_offset + 1 | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def insert_input(self, input_lines, source):
self.input_lines.insert(self.line_offset + 1, '',
source='internal padding after '+source,
offset=len(input_lines))
self.input_lines.insert(self.line_offset + 1, '',
source='internal padding before '+source,
offset=-1)
self.input_lines.insert(self.line_offset + 2,
StringList(input_lines, source)) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def check_line(self, context, state, transitions=None):
"""
Examine one line of input for a transition match & execute its method.
Parameters:
- `context`: application-dependent storage.
- `state`: a `State` object, the current state.
- `transitions`: an optional ordered list of transition names to try,
instead of ``state.transition_order``.
Return the values returned by the transition method:
- context: possibly modified from the parameter `context`;
- next state name (`State` subclass name);
- the result output of the transition, a list.
When there is no match, ``state.no_match()`` is called and its return
value is returned.
"""
if transitions is None:
transitions = state.transition_order
state_correction = None
if self.debug:
print >>sys.stderr, (
'\nStateMachine.check_line: state="%s", transitions=%r.'
% (state.__class__.__name__, transitions))
for name in transitions:
pattern, method, next_state = state.transitions[name]
match = pattern.match(self.line)
if match:
if self.debug:
print >>sys.stderr, (
'\nStateMachine.check_line: Matched transition '
'"%s" in state "%s".'
% (name, state.__class__.__name__))
return method(match, context, next_state)
else:
if self.debug:
print >>sys.stderr, (
'\nStateMachine.check_line: No match in state "%s".'
% state.__class__.__name__)
return state.no_match(context, transitions) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def add_states(self, state_classes):
"""
Add `state_classes` (a list of `State` subclasses).
"""
for state_class in state_classes:
self.add_state(state_class) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def error(self):
"""Report error details."""
type, value, module, line, function = _exception_data()
print >>sys.stderr, '%s: %s' % (type, value)
print >>sys.stderr, 'input line %s' % (self.abs_line_number())
print >>sys.stderr, ('module %s, line %s, function %s'
% (module, line, function)) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def detach_observer(self, observer):
self.observers.remove(observer) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __init__(self, state_machine, debug=0):
"""
Initialize a `State` object; make & add initial transitions.
Parameters:
- `statemachine`: the controlling `StateMachine` object.
- `debug`: a boolean; produce verbose output if true (nonzero).
"""
self.transition_order = []
"""A list of transition names in search order."""
self.transitions = {}
"""
A mapping of transition names to 3-tuples containing
(compiled_pattern, transition_method, next_state_name). Initialized as
an instance attribute dynamically (instead of as a class attribute)
because it may make forward references to patterns and methods in this
or other classes.
"""
self.add_initial_transitions()
self.state_machine = state_machine
"""A reference to the controlling `StateMachine` object."""
self.debug = debug
"""Debugging mode on/off."""
if self.nested_sm is None:
self.nested_sm = self.state_machine.__class__
if self.nested_sm_kwargs is None:
self.nested_sm_kwargs = {'state_classes': [self.__class__],
'initial_state': self.__class__.__name__} | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def unlink(self):
"""Remove circular references to objects no longer required."""
self.state_machine = None | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def add_transitions(self, names, transitions):
"""
Add a list of transitions to the start of the transition list.
Parameters:
- `names`: a list of transition names.
- `transitions`: a mapping of names to transition tuples.
Exceptions: `DuplicateTransitionError`, `UnknownTransitionError`.
"""
for name in names:
if name in self.transitions:
raise DuplicateTransitionError(name)
if name not in transitions:
raise UnknownTransitionError(name)
self.transition_order[:0] = names
self.transitions.update(transitions) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def remove_transition(self, name):
"""
Remove a transition by `name`.
Exception: `UnknownTransitionError`.
"""
try:
del self.transitions[name]
self.transition_order.remove(name)
except:
raise UnknownTransitionError(name) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def make_transitions(self, name_list):
"""
Return a list of transition names and a transition mapping.
Parameter `name_list`: a list, where each entry is either a transition
name string, or a 1- or 2-tuple (transition name, optional next state
name).
"""
stringtype = type('')
names = []
transitions = {}
for namestate in name_list:
if type(namestate) is stringtype:
transitions[namestate] = self.make_transition(namestate)
names.append(namestate)
else:
transitions[namestate[0]] = self.make_transition(*namestate)
names.append(namestate[0])
return names, transitions | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def bof(self, context):
"""
Handle beginning-of-file. Return unchanged `context`, empty result.
Override in subclasses.
Parameter `context`: application-defined storage.
"""
return context, [] | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def nop(self, match, context, next_state):
"""
A "do nothing" transition method.
Return unchanged `context` & `next_state`, empty result. Useful for
simple state changes (actionless transitions).
"""
return context, next_state, [] | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def get_indented(self, until_blank=0, strip_indent=1):
"""
Return a block of indented lines of text, and info.
Extract an indented block where the indent is unknown for all lines.
:Parameters:
- `until_blank`: Stop collecting at the first blank line if true
(1).
- `strip_indent`: Strip common leading indent if true (1,
default).
:Return:
- the indented block (a list of lines of text),
- its indent,
- its first line offset from BOF, and
- whether or not it finished with a blank line.
"""
offset = self.abs_line_offset()
indented, indent, blank_finish = self.input_lines.get_indented(
self.line_offset, until_blank, strip_indent)
if indented:
self.next_line(len(indented) - 1) # advance to last indented line
while indented and not indented[0].strip():
indented.trim_start()
offset += 1
return indented, indent, offset, blank_finish | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def get_first_known_indented(self, indent, until_blank=0, strip_indent=1,
strip_top=1):
"""
Return an indented block and info.
Extract an indented block where the indent is known for the first line
and unknown for all other lines.
:Parameters:
- `indent`: The first line's indent (# of columns/characters).
- `until_blank`: Stop collecting at the first blank line if true
(1).
- `strip_indent`: Strip `indent` characters of indentation if true
(1, default).
- `strip_top`: Strip blank lines from the beginning of the block.
:Return:
- the indented block,
- its indent,
- its first line offset from BOF, and
- whether or not it finished with a blank line.
"""
offset = self.abs_line_offset()
indented, indent, blank_finish = self.input_lines.get_indented(
self.line_offset, until_blank, strip_indent,
first_indent=indent)
self.next_line(len(indented) - 1) # advance to last indented line
if strip_top:
while indented and not indented[0].strip():
indented.trim_start()
offset += 1
return indented, indent, offset, blank_finish | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __init__(self, state_machine, debug=0):
"""
Initialize a `StateSM` object; extends `State.__init__()`.
Check for indent state machine attributes, set defaults if not set.
"""
State.__init__(self, state_machine, debug)
if self.indent_sm is None:
self.indent_sm = self.nested_sm
if self.indent_sm_kwargs is None:
self.indent_sm_kwargs = self.nested_sm_kwargs
if self.known_indent_sm is None:
self.known_indent_sm = self.indent_sm
if self.known_indent_sm_kwargs is None:
self.known_indent_sm_kwargs = self.indent_sm_kwargs | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def blank(self, match, context, next_state):
"""Handle blank lines. Does nothing. Override in subclasses."""
return self.nop(match, context, next_state) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def known_indent(self, match, context, next_state):
"""
Handle a known-indent text block. Extend or override in subclasses.
Recursively run the registered state machine for known-indent indented
blocks (`self.known_indent_sm`). The indent is the length of the
match, ``match.end()``.
"""
indented, line_offset, blank_finish = \
self.state_machine.get_known_indented(match.end())
sm = self.known_indent_sm(debug=self.debug,
**self.known_indent_sm_kwargs)
results = sm.run(indented, input_offset=line_offset)
return context, next_state, results | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def match(self, pattern):
"""
Return the result of a regular expression search.
Overrides `StateMachine.match()`.
Parameter `pattern`: `re` compiled regular expression.
"""
return pattern.search(self.line) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __init__(self, initlist=None, source=None, items=None,
parent=None, parent_offset=None):
self.data = []
"""The actual list of data, flattened from various sources."""
self.items = []
"""A list of (source, offset) pairs, same length as `self.data`: the
source of each line and the offset of each line from the beginning of
its source."""
self.parent = parent
"""The parent list."""
self.parent_offset = parent_offset
"""Offset of this list from the beginning of the parent list."""
if isinstance(initlist, ViewList):
self.data = initlist.data[:]
self.items = initlist.items[:]
elif initlist is not None:
self.data = list(initlist)
if items:
self.items = items
else:
self.items = [(source, i) for i in range(len(initlist))]
assert len(self.data) == len(self.items), 'data mismatch' | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __repr__(self):
return '%s(%s, items=%s)' % (self.__class__.__name__,
self.data, self.items) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __le__(self, other): return self.data <= self.__cast(other) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __ne__(self, other): return self.data != self.__cast(other) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __ge__(self, other): return self.data >= self.__cast(other) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __cast(self, other):
if isinstance(other, ViewList):
return other.data
else:
return other | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __len__(self): return len(self.data) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __getitem__(self, i):
if isinstance(i, types.SliceType):
assert i.step in (None, 1), 'cannot handle slice with stride'
return self.__class__(self.data[i.start:i.stop],
items=self.items[i.start:i.stop],
parent=self, parent_offset=i.start or 0)
else:
return self.data[i] | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __delitem__(self, i):
try:
del self.data[i]
del self.items[i]
if self.parent:
del self.parent[i + self.parent_offset]
except TypeError:
assert i.step is None, 'cannot handle slice with stride'
del self.data[i.start:i.stop]
del self.items[i.start:i.stop]
if self.parent:
del self.parent[(i.start or 0) + self.parent_offset
: (i.stop or len(self)) + self.parent_offset] | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __radd__(self, other):
if isinstance(other, ViewList):
return self.__class__(other.data + self.data,
items=(other.items + self.items))
else:
raise TypeError('adding ViewList to a non-ViewList') | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __mul__(self, n):
return self.__class__(self.data * n, items=(self.items * n)) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __imul__(self, n):
self.data *= n
self.items *= n
return self | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def append(self, item, source=None, offset=0):
if source is None:
self.extend(item)
else:
if self.parent:
self.parent.insert(len(self.data) + self.parent_offset, item,
source, offset)
self.data.append(item)
self.items.append((source, offset)) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def pop(self, i=-1):
if self.parent:
index = (len(self.data) + i) % len(self.data)
self.parent.pop(index + self.parent_offset)
self.items.pop(i)
return self.data.pop(i) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def trim_end(self, n=1):
"""
Remove items from the end of the list, without touching the parent.
"""
if n > len(self.data):
raise IndexError("Size of trim too large; can't trim %s items "
"from a list of size %s." % (n, len(self.data)))
elif n < 0:
raise IndexError('Trim size must be >= 0.')
del self.data[-n:]
del self.items[-n:] | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def count(self, item): return self.data.count(item) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def reverse(self):
self.data.reverse()
self.items.reverse()
self.parent = None | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def info(self, i):
"""Return source & offset for index `i`."""
try:
return self.items[i]
except IndexError:
if i == len(self.data): # Just past the end
return self.items[i - 1][0], None
else:
raise | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def offset(self, i):
"""Return offset for index `i`."""
return self.info(i)[1] | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def xitems(self):
"""Return iterator yielding (source, offset, value) tuples."""
for (value, (source, offset)) in zip(self.data, self.items):
yield (source, offset, value) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def trim_left(self, length, start=0, end=sys.maxint):
"""
Trim `length` characters off the beginning of each item, in-place,
from index `start` to `end`. No whitespace-checking is done on the
trimmed text. Does not affect slice parent.
"""
self.data[start:end] = [line[length:]
for line in self.data[start:end]] | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def get_indented(self, start=0, until_blank=0, strip_indent=1,
block_indent=None, first_indent=None):
"""
Extract and return a StringList of indented lines of text.
Collect all lines with indentation, determine the minimum indentation,
remove the minimum indentation from all indented lines (unless
`strip_indent` is false), and return them. All lines up to but not
including the first unindented line will be returned.
:Parameters:
- `start`: The index of the first line to examine.
- `until_blank`: Stop collecting at the first blank line if true.
- `strip_indent`: Strip common leading indent if true (default).
- `block_indent`: The indent of the entire block, if known.
- `first_indent`: The indent of the first line, if known.
:Return:
- a StringList of indented lines with mininum indent removed;
- the amount of the indent;
- a boolean: did the indented block finish with a blank line or EOF?
"""
indent = block_indent # start with None if unknown
end = start
if block_indent is not None and first_indent is None:
first_indent = block_indent
if first_indent is not None:
end += 1
last = len(self.data)
while end < last:
line = self.data[end]
if line and (line[0] != ' '
or (block_indent is not None
and line[:block_indent].strip())):
# Line not indented or insufficiently indented.
# Block finished properly iff the last indented line blank:
blank_finish = ((end > start)
and not self.data[end - 1].strip())
break
stripped = line.lstrip()
if not stripped: # blank line
if until_blank:
blank_finish = 1
break
elif block_indent is None:
line_indent = len(line) - len(stripped)
if indent is None:
indent = line_indent
else:
indent = min(indent, line_indent)
end += 1
else:
blank_finish = 1 # block ends at end of lines
block = self[start:end]
if first_indent is not None and block:
block.data[0] = block.data[0][first_indent:]
if indent and strip_indent:
block.trim_left(indent, start=(first_indent is not None))
return block, indent or 0, blank_finish | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def pad_double_width(self, pad_char):
"""
Pad all double-width characters in self by appending `pad_char` to each.
For East Asian language support.
"""
if hasattr(unicodedata, 'east_asian_width'):
east_asian_width = unicodedata.east_asian_width
else:
return # new in Python 2.4
for i in range(len(self.data)):
line = self.data[i]
if isinstance(line, unicode):
new = []
for char in line:
new.append(char)
if east_asian_width(char) in 'WF': # 'W'ide & 'F'ull-width
new.append(pad_char)
self.data[i] = ''.join(new) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def string2lines(astring, tab_width=8, convert_whitespace=0,
whitespace=re.compile('[\v\f]')):
"""
Return a list of one-line strings with tabs expanded, no newlines, and
trailing whitespace stripped.
Each tab is expanded with between 1 and `tab_width` spaces, so that the
next character's index becomes a multiple of `tab_width` (8 by default).
Parameters:
- `astring`: a multi-line string.
- `tab_width`: the number of columns between tab stops.
- `convert_whitespace`: convert form feeds and vertical tabs to spaces?
"""
if convert_whitespace:
astring = whitespace.sub(' ', astring)
return [s.expandtabs(tab_width).rstrip() for s in astring.splitlines()] | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __init__(self):
self.results = []
self.lock = threading.Lock() | danwent/Perspectives-Server | [
51,
13,
51,
33,
1295771502
] |
def get(self):
"""
Return the list of existing results
and empty the set.
"""
# use a lock so we don't lose any results
# between retrieving the current set
# and adding new ones
with self.lock:
# copy existing results so we can clear the list
results_so_far = list(self.results)
self.results = []
return results_so_far | danwent/Perspectives-Server | [
51,
13,
51,
33,
1295771502
] |
def __init__(self, db, sid, global_stats, timeout_sec, sni):
self.db = db
self.sid = sid
self.global_stats = global_stats
self.global_stats.active_threads += 1
threading.Thread.__init__(self)
self.timeout_sec = timeout_sec
self.sni = sni
self.global_stats.threads[sid] = time.time() | danwent/Perspectives-Server | [
51,
13,
51,
33,
1295771502
] |
def _record_failure(self, e):
"""Record an exception that happened during a scan."""
stats.failures += 1
self.db.report_metric('ServiceScanFailure', str(e))
if (isinstance(e, SSLScanTimeoutException)):
stats.failure_timeouts += 1
return
if (isinstance(e, SSLAlertException)):
stats.failure_ssl_alert += 1
return
if (isinstance(e, ValueError)):
stats.failure_other += 1
return
err = self._get_errno(e)
if err == errno.ECONNREFUSED or err == errno.EINVAL:
stats.failure_conn_refused += 1
elif err == errno.EHOSTUNREACH or err == errno.ENETUNREACH:
stats.failure_no_route += 1
elif err == errno.ECONNRESET:
stats.failure_conn_reset += 1
elif err == -2 or err == -3 or err == -5 or err == 8:
stats.failure_dns += 1
else:
stats.failure_other += 1 | danwent/Perspectives-Server | [
51,
13,
51,
33,
1295771502
] |
def __init__(self):
self.failures = 0
self.num_completed = 0
self.active_threads = 0
self.num_started = 0
self.threads = {}
# individual failure counts
self.failure_timeouts = 0
self.failure_no_route = 0
self.failure_conn_refused = 0
self.failure_conn_reset = 0
self.failure_dns = 0
self.failure_ssl_alert = 0
self.failure_socket = 0
self.failure_other = 0 | danwent/Perspectives-Server | [
51,
13,
51,
33,
1295771502
] |
def _record_observations_in_db(db, results):
"""
Record a set of service observations in the database.
"""
if len(results) == 0:
return
try:
for r in results:
db.report_observation(r[0], r[1])
except Exception as e:
# TODO: we should probably retry here
logging.critical("DB Error: Failed to write results of length {0}".format(
len(results)))
logging.exception(e) | danwent/Perspectives-Server | [
51,
13,
51,
33,
1295771502
] |
def main(db, service_id_file, logfile=False, verbose=False, quiet=False, rate=DEFAULT_SCANS,
timeout_sec=DEFAULT_WAIT, sni=False):
"""
Run the main program.
Scan a list of services and update Observation records in the notary database.
"""
global stats
global results
stats = GlobalStats()
results = ResultStore()
notary_logs.setup_logs(logfile, LOGFILE, verbose=verbose, quiet=quiet)
start_time = time.time()
localtime = time.asctime(time.localtime(start_time))
# read all service names to start;
# otherwise the database can lock up
# if we're accepting data piped from another process
all_sids = [line.rstrip() for line in service_id_file]
print("Starting scan of %s service-ids at: %s" % (len(all_sids), localtime))
print("INFO: *** Timeout = %s sec Scans-per-second = %s" % \
(timeout_sec, rate))
db.report_metric('ServiceScanStart', "ServiceCount: " + str(len(all_sids)))
# create a thread to scan each service
# and record results as they come in
for sid in all_sids:
try:
# ignore non SSL services
# TODO: use a regex instead
if sid.split(",")[1] == notary_common.SSL_TYPE:
stats.num_started += 1
t = ScanThread(db, sid, stats, timeout_sec, sni)
t.start()
if (stats.num_started % rate) == 0:
time.sleep(1)
_record_observations_in_db(db, results.get())
so_far = int(time.time() - start_time)
logging.info("%s seconds passed. %s complete, %s " \
"failures. %s Active threads" % \
(so_far, stats.num_completed,
stats.failures, stats.active_threads))
logging.info(" details: timeouts = %s, " \
"ssl-alerts = %s, no-route = %s, " \
"conn-refused = %s, conn-reset = %s,"\
"dns = %s, socket = %s, other = %s" % \
(stats.failure_timeouts,
stats.failure_ssl_alert,
stats.failure_no_route,
stats.failure_conn_refused,
stats.failure_conn_reset,
stats.failure_dns,
stats.failure_socket,
stats.failure_other))
if stats.num_started % 1000 == 0:
if (verbose):
logging.info("long running threads")
cur_time = time.time()
for sid in stats.threads.keys():
spawn_time = stats.threads.get(sid, cur_time)
duration = cur_time - spawn_time
if duration > 20:
logging.info("'%s' has been running for %s" %\
(sid, duration))
except IndexError:
logging.error("Service '%s' has no index [1] after splitting on ','.\n" % (sid))
except KeyboardInterrupt:
exit(1)
# finishing the for-loop means we kicked-off all threads,
# but they may not be done yet. Wait for a bit, if needed.
giveup_time = time.time() + (2 * timeout_sec)
while stats.active_threads > 0:
time.sleep(1)
if time.time() > giveup_time:
if stats.active_threads > 0:
logging.error("Giving up scans after {0}. {1} threads still active!".format(
giveup_time, stats.active_threads))
break
# record any observations made since we finished the
# main for-loop
_record_observations_in_db(db, results.get())
duration = int(time.time() - start_time)
localtime = time.asctime(time.localtime(start_time))
print("Ending scan at: %s" % localtime)
print("Scan of %s services took %s seconds. %s Failures" % \
(stats.num_started, duration, stats.failures))
db.report_metric('ServiceScanStop')
exit(0) | danwent/Perspectives-Server | [
51,
13,
51,
33,
1295771502
] |
def eoms_controlled_blender(t, state, dum, ast):
"""Inertial dumbbell equations of motion about an asteroid | skulumani/asteroid_dumbbell | [
1,
4,
1,
19,
1476306035
] |
def eoms_controlled_blender_traverse_then_land(t, state, dum, ast):
"""Inertial dumbbell equations of motion about an asteroid | skulumani/asteroid_dumbbell | [
1,
4,
1,
19,
1476306035
] |
def blender_traverse_then_land_sim():
# simulation parameters
output_path = './visualization/blender'
asteroid_name = 'itokawa_low'
# create a HDF5 dataset
hdf5_path = './data/itokawa_landing/{}_controlled_vertical_landing.hdf5'.format(
datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S"))
dataset_name = 'landing'
render = 'BLENDER'
image_modulus = 400
RelTol = 1e-6
AbsTol = 1e-6
ast_name = 'itokawa'
num_faces = 64
t0 = 0
dt = 1
tf = 7200
num_steps = 7200
periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607])
periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327])
ast = asteroid.Asteroid(ast_name,num_faces)
dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003)
# instantiate the blender scene once
camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name)
# get some of the camera parameters
K = blender_camera.get_calibration_matrix_K_from_blender(camera)
# set initial state for inertial EOMs
# initial_pos = np.array([2.550, 0, 0]) # km for center of mass in body frame
initial_pos = np.array([0, -2.550, 0])
initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos)
initial_R = attitude.rot3(np.pi/2).reshape(9) # transforms from dumbbell body frame to the inertial frame
initial_w = np.array([0.01, 0.01, 0.01])
initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w))
# instantiate ode object
# system = integrate.ode(eoms_controlled_blender)
system = integrate.ode(eoms_controlled_blender_traverse_then_land)
system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000)
system.set_initial_value(initial_state, t0)
system.set_f_params(dum, ast)
i_state = np.zeros((num_steps+1, 18))
time = np.zeros(num_steps+1)
i_state[0, :] = initial_state
with h5py.File(hdf5_path) as image_data:
# create a dataset
images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8')
RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12))
R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9)) | skulumani/asteroid_dumbbell | [
1,
4,
1,
19,
1476306035
] |
def blender_vertical_landing_sim():
# simulation parameters
output_path = './visualization/blender'
asteroid_name = 'itokawa_low'
# create a HDF5 dataset
hdf5_path = './data/itokawa_landing/{}_controlled_vertical_landing.hdf5'.format(
datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S"))
dataset_name = 'landing'
render = 'BLENDER'
image_modulus = 200
RelTol = 1e-6
AbsTol = 1e-6
ast_name = 'itokawa'
num_faces = 64
t0 = 0
dt = 1
tf = 3600
num_steps = 3600
periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607])
periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327])
ast = asteroid.Asteroid(ast_name,num_faces)
dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003)
# instantiate the blender scene once
camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name)
# get some of the camera parameters
K = blender_camera.get_calibration_matrix_K_from_blender(camera)
# set initial state for inertial EOMs
initial_pos = np.array([2.550, 0, 0]) # km for center of mass in body frame
initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos)
initial_R = attitude.rot3(np.pi).reshape(9) # transforms from dumbbell body frame to the inertial frame
initial_w = np.array([0.01, 0.01, 0.01])
initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w))
# instantiate ode object
system = integrate.ode(eoms_controlled_blender)
system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000)
system.set_initial_value(initial_state, t0)
system.set_f_params(dum, ast)
i_state = np.zeros((num_steps+1, 18))
time = np.zeros(num_steps+1)
i_state[0, :] = initial_state
with h5py.File(hdf5_path) as image_data:
# create a dataset
images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8')
RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12))
R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9)) | skulumani/asteroid_dumbbell | [
1,
4,
1,
19,
1476306035
] |
def blender_inertial_circumnavigate(gen_images=False):
"""Move around the asteroid in the inertial frame, but assume no rotation of the asteroid
"""
# simulation parameters
output_path = './visualization/blender'
asteroid_name = 'itokawa_high'
# create a HDF5 dataset
hdf5_path = './data/asteroid_circumnavigate/{}_inertial_no_ast_rotation.hdf5'.format(
datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S"))
dataset_name = 'landing'
render = 'BLENDER'
image_modulus = 1
RelTol = 1e-6
AbsTol = 1e-6
ast_name = 'itokawa'
num_faces = 64
t0 = 0
dt = 1
tf = 3600 * 4
num_steps = 3600 * 4
loops = 4
periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607])
periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327])
ast = asteroid.Asteroid(ast_name,num_faces)
dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003)
# instantiate the blender scene once
camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name)
# get some of the camera parameters
K = blender_camera.get_calibration_matrix_K_from_blender(camera)
# set initial state for inertial EOMs
initial_pos = np.array([3, 0, 0]) # km for center of mass in body frame
initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos)
initial_R = attitude.rot3(np.pi).reshape(9) # transforms from dumbbell body frame to the inertial frame
initial_w = np.array([0.01, 0.01, 0.01])
initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w))
# instantiate ode object
system = integrate.ode(eoms.eoms_controlled_inertial_circumnavigate)
system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000)
system.set_initial_value(initial_state, t0)
system.set_f_params(dum, ast, tf, loops)
i_state = np.zeros((num_steps+1, 18))
time = np.zeros(num_steps+1)
i_state[0, :] = initial_state
with h5py.File(hdf5_path) as image_data:
# create a dataset
if gen_images:
images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8')
RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12))
R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9)) | skulumani/asteroid_dumbbell | [
1,
4,
1,
19,
1476306035
] |
def blender_inertial_lissajous(gen_images=False):
"""Move around the asteroid in the inertial frame, but assume no rotation of the asteroid
"""
# simulation parameters
output_path = './visualization/blender'
asteroid_name = 'itokawa_high'
# create a HDF5 dataset
hdf5_path = './data/asteroid_circumnavigate/{}_inertial_no_ast_rotation_lissajous.hdf5'.format(
datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S"))
dataset_name = 'landing'
render = 'BLENDER'
image_modulus = 1
RelTol = 1e-6
AbsTol = 1e-6
ast_name = 'itokawa'
num_faces = 64
t0 = 0
dt = 1
tf = 3600 * 2
num_steps = 3600 * 2
loops = 2
periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607])
periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327])
ast = asteroid.Asteroid(ast_name,num_faces)
dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003)
# instantiate the blender scene once
camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name)
# get some of the camera parameters
K = blender_camera.get_calibration_matrix_K_from_blender(camera)
# set initial state for inertial EOMs
initial_pos = np.array([3, 3, 0]) # km for center of mass in body frame
initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos)
initial_R = attitude.rot3(np.pi).reshape(9) # transforms from dumbbell body frame to the inertial frame
initial_w = np.array([0.01, 0.01, 0.01])
initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w))
# instantiate ode object
system = integrate.ode(eoms.eoms_controlled_inertial_lissajous)
system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000)
system.set_initial_value(initial_state, t0)
system.set_f_params(dum, ast, tf, loops)
i_state = np.zeros((num_steps+1, 18))
time = np.zeros(num_steps+1)
i_state[0, :] = initial_state
with h5py.File(hdf5_path) as image_data:
# create a dataset
if gen_images:
images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8')
RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12))
R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9)) | skulumani/asteroid_dumbbell | [
1,
4,
1,
19,
1476306035
] |
def blender_inertial_quarter_equatorial(gen_images=False):
"""Move around the asteroid in the inertial frame, but assume no rotation of the asteroid
Moves in the xy positive quadrant in the equatorial plane
"""
# simulation parameters
output_path = './visualization/blender'
asteroid_name = 'itokawa_high'
# create a HDF5 dataset
hdf5_path = './data/asteroid_circumnavigate/{}_inertial_no_ast_rotation_quarter_xy.hdf5'.format(
datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S"))
dataset_name = 'landing'
render = 'BLENDER'
image_modulus = 1
RelTol = 1e-6
AbsTol = 1e-6
ast_name = 'itokawa'
num_faces = 64
t0 = 0
dt = 1
tf = 3600 * 4
num_steps = 3600 * 4
loops = 4
periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607])
periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327])
ast = asteroid.Asteroid(ast_name,num_faces)
dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003)
# instantiate the blender scene once
camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name)
# get some of the camera parameters
K = blender_camera.get_calibration_matrix_K_from_blender(camera)
# set initial state for inertial EOMs
initial_pos = np.array([3, 0, 0]) # km for center of mass in body frame
initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos)
initial_R = attitude.rot3(np.pi).reshape(9) # transforms from dumbbell body frame to the inertial frame
initial_w = np.array([0.01, 0.01, 0.01])
initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w))
# instantiate ode object
system = integrate.ode(eoms.eoms_controlled_inertial_quarter_equatorial)
system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000)
system.set_initial_value(initial_state, t0)
system.set_f_params(dum, ast, tf, loops)
i_state = np.zeros((num_steps+1, 18))
time = np.zeros(num_steps+1)
i_state[0, :] = initial_state
with h5py.File(hdf5_path) as image_data:
# create a dataset
if gen_images:
images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8')
RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12))
R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9)) | skulumani/asteroid_dumbbell | [
1,
4,
1,
19,
1476306035
] |
def _need_func(testfunc, name, description):
"""Decorator skipping test if given testfunc returns False."""
def check_func(func):
def newfunc(*args, **kwargs):
if not testfunc(name):
pytest.skip("%s %r is not available" % (description, name))
return func(*args, **kwargs)
setattr(newfunc, fnameattr, getattr(func, fnameattr))
return newfunc
return check_func | wummel/patool | [
321,
57,
321,
64,
1355430234
] |
def needs_program(name):
"""Decorator skipping test if given program is not available."""
return _need_func(lambda x: patoolib.util.find_program(x), name, 'program') | wummel/patool | [
321,
57,
321,
64,
1355430234
] |
def needs_module(name):
"""Decorator skipping test if given module is not available."""
def has_module(module):
try:
importlib.import_module(module)
return True
except ImportError:
return False
return _need_func(has_module, name, 'Python module') | wummel/patool | [
321,
57,
321,
64,
1355430234
] |
def check_prog (f):
def newfunc (*args, **kwargs):
if not patoolib.util.find_program(program):
pytest.skip("program `%s' not available" % program)
if not has_codec(program, codec):
pytest.skip("codec `%s' for program `%s' not available" % (codec, program))
return f(*args, **kwargs)
setattr(newfunc, fnameattr, getattr(f, fnameattr))
return newfunc | wummel/patool | [
321,
57,
321,
64,
1355430234
] |
def has_codec (program, codec):
"""Test if program supports given codec."""
if program == '7z' and codec == 'rar':
return patoolib.util.p7zip_supports_rar()
if patoolib.program_supports_compression(program, codec):
return True
return patoolib.util.find_program(codec) | wummel/patool | [
321,
57,
321,
64,
1355430234
] |
def check_func(func):
def newfunc(*args, **kwargs):
if "TRAVIS" in os.environ:
pytest.skip("Skip on TRAVIS CI build.")
return func(*args, **kwargs)
setattr(newfunc, fnameattr, getattr(func, fnameattr))
return newfunc | wummel/patool | [
321,
57,
321,
64,
1355430234
] |
def setUp(self):
self.maxDiff = None
filename = 'set_start_page01.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = ['xl/printerSettings/printerSettings1.bin',
'xl/worksheets/_rels/sheet1.xml.rels']
self.ignore_elements = {'[Content_Types].xml': ['<Default Extension="bin"'],
'xl/worksheets/sheet1.xml': ['<pageMargins']} | ivmech/iviny-scope | [
19,
2,
19,
1,
1398667377
] |
def tearDown(self):
# Cleanup.
if os.path.exists(self.got_filename):
os.remove(self.got_filename) | ivmech/iviny-scope | [
19,
2,
19,
1,
1398667377
] |
def setup(self, timestamp):
"""
Get water requirements for this timestep based on my internal
requirements dictionary.
"""
self.demand = self._seasonal_water_req[timestamp] | UMWRG/pynsim | [
40,
17,
40,
10,
1428770472
] |
def test_make_dir():
assert os.path.exists("output") | ghislainv/deforestprob | [
97,
21,
97,
8,
1480589112
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.