INSTRUCTION
stringlengths 1
46.3k
| RESPONSE
stringlengths 75
80.2k
|
|---|---|
Returns the units of the measured value for the current mode. May return
empty string
|
def units(self):
"""
Returns the units of the measured value for the current mode. May return
empty string
"""
self._units, value = self.get_attr_string(self._units, 'units')
return value
|
Returns the value or values measured by the sensor. Check num_values to
see how many values there are. Values with N >= num_values will return
an error. The values are fixed point numbers, so check decimals to see
if you need to divide to get the actual value.
|
def value(self, n=0):
"""
Returns the value or values measured by the sensor. Check num_values to
see how many values there are. Values with N >= num_values will return
an error. The values are fixed point numbers, so check decimals to see
if you need to divide to get the actual value.
"""
n = int(n)
self._value[n], value = self.get_attr_int(self._value[n], 'value'+str(n))
return value
|
Returns the format of the values in `bin_data` for the current mode.
Possible values are:
- `u8`: Unsigned 8-bit integer (byte)
- `s8`: Signed 8-bit integer (sbyte)
- `u16`: Unsigned 16-bit integer (ushort)
- `s16`: Signed 16-bit integer (short)
- `s16_be`: Signed 16-bit integer, big endian
- `s32`: Signed 32-bit integer (int)
- `float`: IEEE 754 32-bit floating point (float)
|
def bin_data_format(self):
"""
Returns the format of the values in `bin_data` for the current mode.
Possible values are:
- `u8`: Unsigned 8-bit integer (byte)
- `s8`: Signed 8-bit integer (sbyte)
- `u16`: Unsigned 16-bit integer (ushort)
- `s16`: Signed 16-bit integer (short)
- `s16_be`: Signed 16-bit integer, big endian
- `s32`: Signed 32-bit integer (int)
- `float`: IEEE 754 32-bit floating point (float)
"""
self._bin_data_format, value = self.get_attr_string(self._bin_data_format, 'bin_data_format')
return value
|
Returns the unscaled raw values in the `value<N>` attributes as raw byte
array. Use `bin_data_format`, `num_values` and the individual sensor
documentation to determine how to interpret the data.
Use `fmt` to unpack the raw bytes into a struct.
Example::
>>> from ev3dev2.sensor.lego import InfraredSensor
>>> ir = InfraredSensor()
>>> ir.value()
28
>>> ir.bin_data('<b')
(28,)
|
def bin_data(self, fmt=None):
"""
Returns the unscaled raw values in the `value<N>` attributes as raw byte
array. Use `bin_data_format`, `num_values` and the individual sensor
documentation to determine how to interpret the data.
Use `fmt` to unpack the raw bytes into a struct.
Example::
>>> from ev3dev2.sensor.lego import InfraredSensor
>>> ir = InfraredSensor()
>>> ir.value()
28
>>> ir.bin_data('<b')
(28,)
"""
if self._bin_data_size == None:
self._bin_data_size = {
"u8": 1,
"s8": 1,
"u16": 2,
"s16": 2,
"s16_be": 2,
"s32": 4,
"float": 4
}.get(self.bin_data_format, 1) * self.num_values
if None == self._bin_data:
self._bin_data = self._attribute_file_open( 'bin_data' )
self._bin_data.seek(0)
raw = bytearray(self._bin_data.read(self._bin_data_size))
if fmt is None: return raw
return unpack(fmt, raw)
|
Returns the firmware version of the sensor if available. Currently only
I2C/NXT sensors support this.
|
def fw_version(self):
"""
Returns the firmware version of the sensor if available. Currently only
I2C/NXT sensors support this.
"""
(self._fw_version, value) = self.get_cached_attr_string(self._fw_version, 'fw_version')
return value
|
Returns the polling period of the sensor in milliseconds. Writing sets the
polling period. Setting to 0 disables polling. Minimum value is hard
coded as 50 msec. Returns -EOPNOTSUPP if changing polling is not supported.
Currently only I2C/NXT sensors support changing the polling period.
|
def poll_ms(self):
"""
Returns the polling period of the sensor in milliseconds. Writing sets the
polling period. Setting to 0 disables polling. Minimum value is hard
coded as 50 msec. Returns -EOPNOTSUPP if changing polling is not supported.
Currently only I2C/NXT sensors support changing the polling period.
"""
self._poll_ms, value = self.get_attr_int(self._poll_ms, 'poll_ms')
return value
|
Return the framebuffer file descriptor.
Try to use the FRAMEBUFFER
environment variable if fbdev is not given. Use '/dev/fb0' by
default.
|
def _open_fbdev(fbdev=None):
"""Return the framebuffer file descriptor.
Try to use the FRAMEBUFFER
environment variable if fbdev is not given. Use '/dev/fb0' by
default.
"""
dev = fbdev or os.getenv('FRAMEBUFFER', '/dev/fb0')
fbfid = os.open(dev, os.O_RDWR)
return fbfid
|
Return the fix screen info from the framebuffer file descriptor.
|
def _get_fix_info(fbfid):
"""Return the fix screen info from the framebuffer file descriptor."""
fix_info = FbMem.FixScreenInfo()
fcntl.ioctl(fbfid, FbMem.FBIOGET_FSCREENINFO, fix_info)
return fix_info
|
Return the var screen info from the framebuffer file descriptor.
|
def _get_var_info(fbfid):
"""Return the var screen info from the framebuffer file descriptor."""
var_info = FbMem.VarScreenInfo()
fcntl.ioctl(fbfid, FbMem.FBIOGET_VSCREENINFO, var_info)
return var_info
|
Applies pending changes to the screen.
Nothing will be drawn on the screen until this function is called.
|
def update(self):
"""
Applies pending changes to the screen.
Nothing will be drawn on the screen until this function is called.
"""
if self.var_info.bits_per_pixel == 1:
b = self._img.tobytes("raw", "1;R")
self.mmap[:len(b)] = b
elif self.var_info.bits_per_pixel == 16:
self.mmap[:] = self._img_to_rgb565_bytes()
elif self.var_info.bits_per_pixel == 32:
self.mmap[:] = self._img.convert("RGB").tobytes("raw", "XRGB")
else:
raise Exception("Not supported - platform %s with bits_per_pixel %s" %
(self.platform, self.var_info.bits_per_pixel))
|
Map the framebuffer memory.
|
def _map_fb_memory(fbfid, fix_info):
"""Map the framebuffer memory."""
return mmap.mmap(
fbfid,
fix_info.smem_len,
mmap.MAP_SHARED,
mmap.PROT_READ | mmap.PROT_WRITE,
offset=0
)
|
Draw a line from (x1, y1) to (x2, y2)
|
def line(self, clear_screen=True, x1=10, y1=10, x2=50, y2=50, line_color='black', width=1):
"""
Draw a line from (x1, y1) to (x2, y2)
"""
if clear_screen:
self.clear()
return self.draw.line((x1, y1, x2, y2), fill=line_color, width=width)
|
Draw a circle of 'radius' centered at (x, y)
|
def circle(self, clear_screen=True, x=50, y=50, radius=40, fill_color='black', outline_color='black'):
"""
Draw a circle of 'radius' centered at (x, y)
"""
if clear_screen:
self.clear()
x1 = x - radius
y1 = y - radius
x2 = x + radius
y2 = y + radius
return self.draw.ellipse((x1, y1, x2, y2), fill=fill_color, outline=outline_color)
|
Draw a rectangle where the top left corner is at (x1, y1) and the
bottom right corner is at (x2, y2)
|
def rectangle(self, clear_screen=True, x1=10, y1=10, x2=80, y2=40, fill_color='black', outline_color='black'):
"""
Draw a rectangle where the top left corner is at (x1, y1) and the
bottom right corner is at (x2, y2)
"""
if clear_screen:
self.clear()
return self.draw.rectangle((x1, y1, x2, y2), fill=fill_color, outline=outline_color)
|
Draw a single pixel at (x, y)
|
def point(self, clear_screen=True, x=10, y=10, point_color='black'):
"""
Draw a single pixel at (x, y)
"""
if clear_screen:
self.clear()
return self.draw.point((x, y), fill=point_color)
|
Display `text` starting at pixel (x, y).
The EV3 display is 178x128 pixels
- (0, 0) would be the top left corner of the display
- (89, 64) would be right in the middle of the display
'text_color' : PIL says it supports "common HTML color names". There
are 140 HTML color names listed here that are supported by all modern
browsers. This is probably a good list to start with.
https://www.w3schools.com/colors/colors_names.asp
'font' : can be any font displayed here
http://ev3dev-lang.readthedocs.io/projects/python-ev3dev/en/ev3dev-stretch/display.html#bitmap-fonts
- If font is a string, it is the name of a font to be loaded.
- If font is a Font object, returned from :meth:`ev3dev2.fonts.load`, then it is
used directly. This is desirable for faster display times.
|
def text_pixels(self, text, clear_screen=True, x=0, y=0, text_color='black', font=None):
"""
Display `text` starting at pixel (x, y).
The EV3 display is 178x128 pixels
- (0, 0) would be the top left corner of the display
- (89, 64) would be right in the middle of the display
'text_color' : PIL says it supports "common HTML color names". There
are 140 HTML color names listed here that are supported by all modern
browsers. This is probably a good list to start with.
https://www.w3schools.com/colors/colors_names.asp
'font' : can be any font displayed here
http://ev3dev-lang.readthedocs.io/projects/python-ev3dev/en/ev3dev-stretch/display.html#bitmap-fonts
- If font is a string, it is the name of a font to be loaded.
- If font is a Font object, returned from :meth:`ev3dev2.fonts.load`, then it is
used directly. This is desirable for faster display times.
"""
if clear_screen:
self.clear()
if font is not None:
if isinstance(font, str):
assert font in fonts.available(), "%s is an invalid font" % font
font = fonts.load(font)
return self.draw.text((x, y), text, fill=text_color, font=font)
else:
return self.draw.text((x, y), text, fill=text_color)
|
Display 'text' starting at grid (x, y)
The EV3 display can be broken down in a grid that is 22 columns wide
and 12 rows tall. Each column is 8 pixels wide and each row is 10
pixels tall.
'text_color' : PIL says it supports "common HTML color names". There
are 140 HTML color names listed here that are supported by all modern
browsers. This is probably a good list to start with.
https://www.w3schools.com/colors/colors_names.asp
'font' : can be any font displayed here
http://ev3dev-lang.readthedocs.io/projects/python-ev3dev/en/ev3dev-stretch/display.html#bitmap-fonts
- If font is a string, it is the name of a font to be loaded.
- If font is a Font object, returned from :meth:`ev3dev2.fonts.load`, then it is
used directly. This is desirable for faster display times.
|
def text_grid(self, text, clear_screen=True, x=0, y=0, text_color='black', font=None):
"""
Display 'text' starting at grid (x, y)
The EV3 display can be broken down in a grid that is 22 columns wide
and 12 rows tall. Each column is 8 pixels wide and each row is 10
pixels tall.
'text_color' : PIL says it supports "common HTML color names". There
are 140 HTML color names listed here that are supported by all modern
browsers. This is probably a good list to start with.
https://www.w3schools.com/colors/colors_names.asp
'font' : can be any font displayed here
http://ev3dev-lang.readthedocs.io/projects/python-ev3dev/en/ev3dev-stretch/display.html#bitmap-fonts
- If font is a string, it is the name of a font to be loaded.
- If font is a Font object, returned from :meth:`ev3dev2.fonts.load`, then it is
used directly. This is desirable for faster display times.
"""
assert 0 <= x < Display.GRID_COLUMNS,\
"grid columns must be between 0 and %d, %d was requested" %\
((Display.GRID_COLUMNS - 1, x))
assert 0 <= y < Display.GRID_ROWS,\
"grid rows must be between 0 and %d, %d was requested" %\
((Display.GRID_ROWS - 1), y)
return self.text_pixels(text, clear_screen,
x * Display.GRID_COLUMN_PIXELS,
y * Display.GRID_ROW_PIXELS,
text_color, font)
|
Utility function used by Sound class for building the note frequencies table
|
def _make_scales(notes):
""" Utility function used by Sound class for building the note frequencies table """
res = dict()
for note, freq in notes:
freq = round(freq)
for n in note.split('/'):
res[n] = freq
return res
|
Call beep command with the provided arguments (if any).
See `beep man page`_ and google `linux beep music`_ for inspiration.
:param string args: Any additional arguments to be passed to ``beep`` (see the `beep man page`_ for details)
:param play_type: The behavior of ``beep`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_NO_WAIT_FOR_COMPLETE``
:return: When ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the returns the spawn subprocess from ``subprocess.Popen``; ``None`` otherwise
.. _`beep man page`: https://linux.die.net/man/1/beep
.. _`linux beep music`: https://www.google.com/search?q=linux+beep+music
|
def beep(self, args='', play_type=PLAY_WAIT_FOR_COMPLETE):
"""
Call beep command with the provided arguments (if any).
See `beep man page`_ and google `linux beep music`_ for inspiration.
:param string args: Any additional arguments to be passed to ``beep`` (see the `beep man page`_ for details)
:param play_type: The behavior of ``beep`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_NO_WAIT_FOR_COMPLETE``
:return: When ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the returns the spawn subprocess from ``subprocess.Popen``; ``None`` otherwise
.. _`beep man page`: https://linux.die.net/man/1/beep
.. _`linux beep music`: https://www.google.com/search?q=linux+beep+music
"""
with open(os.devnull, 'w') as n:
subprocess = Popen(shlex.split('/usr/bin/beep %s' % args), stdout=n)
if play_type == Sound.PLAY_WAIT_FOR_COMPLETE:
subprocess.wait()
return None
else:
return subprocess
|
.. rubric:: tone(tone_sequence)
Play tone sequence.
Here is a cheerful example::
my_sound = Sound()
my_sound.tone([
(392, 350, 100), (392, 350, 100), (392, 350, 100), (311.1, 250, 100),
(466.2, 25, 100), (392, 350, 100), (311.1, 250, 100), (466.2, 25, 100),
(392, 700, 100), (587.32, 350, 100), (587.32, 350, 100),
(587.32, 350, 100), (622.26, 250, 100), (466.2, 25, 100),
(369.99, 350, 100), (311.1, 250, 100), (466.2, 25, 100), (392, 700, 100),
(784, 350, 100), (392, 250, 100), (392, 25, 100), (784, 350, 100),
(739.98, 250, 100), (698.46, 25, 100), (659.26, 25, 100),
(622.26, 25, 100), (659.26, 50, 400), (415.3, 25, 200), (554.36, 350, 100),
(523.25, 250, 100), (493.88, 25, 100), (466.16, 25, 100), (440, 25, 100),
(466.16, 50, 400), (311.13, 25, 200), (369.99, 350, 100),
(311.13, 250, 100), (392, 25, 100), (466.16, 350, 100), (392, 250, 100),
(466.16, 25, 100), (587.32, 700, 100), (784, 350, 100), (392, 250, 100),
(392, 25, 100), (784, 350, 100), (739.98, 250, 100), (698.46, 25, 100),
(659.26, 25, 100), (622.26, 25, 100), (659.26, 50, 400), (415.3, 25, 200),
(554.36, 350, 100), (523.25, 250, 100), (493.88, 25, 100),
(466.16, 25, 100), (440, 25, 100), (466.16, 50, 400), (311.13, 25, 200),
(392, 350, 100), (311.13, 250, 100), (466.16, 25, 100),
(392.00, 300, 150), (311.13, 250, 100), (466.16, 25, 100), (392, 700)
])
Have also a look at :py:meth:`play_song` for a more musician-friendly way of doing, which uses
the conventional notation for notes and durations.
:param list[tuple(float,float,float)] tone_sequence: The sequence of tones to play. The first number of each tuple is frequency in Hz, the second is duration in milliseconds, and the third is delay in milliseconds between this and the next tone in the sequence.
:param play_type: The behavior of ``tone`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_NO_WAIT_FOR_COMPLETE``
:return: When ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the returns the spawn subprocess from ``subprocess.Popen``; ``None`` otherwise
.. rubric:: tone(frequency, duration)
Play single tone of given frequency and duration.
:param float frequency: The frequency of the tone in Hz
:param float duration: The duration of the tone in milliseconds
:param play_type: The behavior of ``tone`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_NO_WAIT_FOR_COMPLETE``
:return: When ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the returns the spawn subprocess from ``subprocess.Popen``; ``None`` otherwise
|
def tone(self, *args, play_type=PLAY_WAIT_FOR_COMPLETE):
"""
.. rubric:: tone(tone_sequence)
Play tone sequence.
Here is a cheerful example::
my_sound = Sound()
my_sound.tone([
(392, 350, 100), (392, 350, 100), (392, 350, 100), (311.1, 250, 100),
(466.2, 25, 100), (392, 350, 100), (311.1, 250, 100), (466.2, 25, 100),
(392, 700, 100), (587.32, 350, 100), (587.32, 350, 100),
(587.32, 350, 100), (622.26, 250, 100), (466.2, 25, 100),
(369.99, 350, 100), (311.1, 250, 100), (466.2, 25, 100), (392, 700, 100),
(784, 350, 100), (392, 250, 100), (392, 25, 100), (784, 350, 100),
(739.98, 250, 100), (698.46, 25, 100), (659.26, 25, 100),
(622.26, 25, 100), (659.26, 50, 400), (415.3, 25, 200), (554.36, 350, 100),
(523.25, 250, 100), (493.88, 25, 100), (466.16, 25, 100), (440, 25, 100),
(466.16, 50, 400), (311.13, 25, 200), (369.99, 350, 100),
(311.13, 250, 100), (392, 25, 100), (466.16, 350, 100), (392, 250, 100),
(466.16, 25, 100), (587.32, 700, 100), (784, 350, 100), (392, 250, 100),
(392, 25, 100), (784, 350, 100), (739.98, 250, 100), (698.46, 25, 100),
(659.26, 25, 100), (622.26, 25, 100), (659.26, 50, 400), (415.3, 25, 200),
(554.36, 350, 100), (523.25, 250, 100), (493.88, 25, 100),
(466.16, 25, 100), (440, 25, 100), (466.16, 50, 400), (311.13, 25, 200),
(392, 350, 100), (311.13, 250, 100), (466.16, 25, 100),
(392.00, 300, 150), (311.13, 250, 100), (466.16, 25, 100), (392, 700)
])
Have also a look at :py:meth:`play_song` for a more musician-friendly way of doing, which uses
the conventional notation for notes and durations.
:param list[tuple(float,float,float)] tone_sequence: The sequence of tones to play. The first number of each tuple is frequency in Hz, the second is duration in milliseconds, and the third is delay in milliseconds between this and the next tone in the sequence.
:param play_type: The behavior of ``tone`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_NO_WAIT_FOR_COMPLETE``
:return: When ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the returns the spawn subprocess from ``subprocess.Popen``; ``None`` otherwise
.. rubric:: tone(frequency, duration)
Play single tone of given frequency and duration.
:param float frequency: The frequency of the tone in Hz
:param float duration: The duration of the tone in milliseconds
:param play_type: The behavior of ``tone`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_NO_WAIT_FOR_COMPLETE``
:return: When ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the returns the spawn subprocess from ``subprocess.Popen``; ``None`` otherwise
"""
def play_tone_sequence(tone_sequence):
def beep_args(frequency=None, duration=None, delay=None):
args = ''
if frequency is not None:
args += '-f %s ' % frequency
if duration is not None:
args += '-l %s ' % duration
if delay is not None:
args += '-D %s ' % delay
return args
return self.beep(' -n '.join([beep_args(*t) for t in tone_sequence]), play_type=play_type)
if len(args) == 1:
return play_tone_sequence(args[0])
elif len(args) == 2:
return play_tone_sequence([(args[0], args[1])])
else:
raise Exception("Unsupported number of parameters in Sound.tone(): expected 1 or 2, got " + str(len(args)))
|
Play a single tone, specified by its frequency, duration, volume and final delay.
:param int frequency: the tone frequency, in Hertz
:param float duration: Tone duration, in seconds
:param float delay: Delay after tone, in seconds (can be useful when chaining calls to ``play_tone``)
:param int volume: The play volume, in percent of maximum volume
:param play_type: The behavior of ``play_tone`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE``, ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_LOOP``
:return: When ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the PID of the underlying beep command; ``None`` otherwise
:raises ValueError: if invalid parameter
|
def play_tone(self, frequency, duration, delay=0.0, volume=100,
play_type=PLAY_WAIT_FOR_COMPLETE):
""" Play a single tone, specified by its frequency, duration, volume and final delay.
:param int frequency: the tone frequency, in Hertz
:param float duration: Tone duration, in seconds
:param float delay: Delay after tone, in seconds (can be useful when chaining calls to ``play_tone``)
:param int volume: The play volume, in percent of maximum volume
:param play_type: The behavior of ``play_tone`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE``, ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_LOOP``
:return: When ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the PID of the underlying beep command; ``None`` otherwise
:raises ValueError: if invalid parameter
"""
self._validate_play_type(play_type)
if duration <= 0:
raise ValueError('invalid duration (%s)' % duration)
if delay < 0:
raise ValueError('invalid delay (%s)' % delay)
if not 0 < volume <= 100:
raise ValueError('invalid volume (%s)' % volume)
self.set_volume(volume)
duration_ms = int(duration * 1000)
delay_ms = int(delay * 1000)
self.tone([(frequency, duration_ms, delay_ms)], play_type=play_type)
|
Plays a note, given by its name as defined in ``_NOTE_FREQUENCIES``.
:param string note: The note symbol with its octave number
:param float duration: Tone duration, in seconds
:param int volume: The play volume, in percent of maximum volume
:param play_type: The behavior of ``play_note`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE``, ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_LOOP``
:return: When ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the PID of the underlying beep command; ``None`` otherwise
:raises ValueError: is invalid parameter (note, duration,...)
|
def play_note(self, note, duration, volume=100, play_type=PLAY_WAIT_FOR_COMPLETE):
""" Plays a note, given by its name as defined in ``_NOTE_FREQUENCIES``.
:param string note: The note symbol with its octave number
:param float duration: Tone duration, in seconds
:param int volume: The play volume, in percent of maximum volume
:param play_type: The behavior of ``play_note`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE``, ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_LOOP``
:return: When ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the PID of the underlying beep command; ``None`` otherwise
:raises ValueError: is invalid parameter (note, duration,...)
"""
self._validate_play_type(play_type)
try:
freq = self._NOTE_FREQUENCIES.get(note.upper(), self._NOTE_FREQUENCIES[note])
except KeyError:
raise ValueError('invalid note (%s)' % note)
if duration <= 0:
raise ValueError('invalid duration (%s)' % duration)
if not 0 < volume <= 100:
raise ValueError('invalid volume (%s)' % volume)
return self.play_tone(freq, duration=duration, volume=volume, play_type=play_type)
|
Play a sound file (wav format) at a given volume.
:param string wav_file: The sound file path
:param int volume: The play volume, in percent of maximum volume
:param play_type: The behavior of ``play_file`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE``, ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_LOOP``
:returns: When ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the spawn subprocess from ``subprocess.Popen``; ``None`` otherwise
|
def play_file(self, wav_file, volume=100, play_type=PLAY_WAIT_FOR_COMPLETE):
""" Play a sound file (wav format) at a given volume.
:param string wav_file: The sound file path
:param int volume: The play volume, in percent of maximum volume
:param play_type: The behavior of ``play_file`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE``, ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_LOOP``
:returns: When ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the spawn subprocess from ``subprocess.Popen``; ``None`` otherwise
"""
if not 0 < volume <= 100:
raise ValueError('invalid volume (%s)' % volume)
if not wav_file.endswith(".wav"):
raise ValueError('invalid sound file (%s), only .wav files are supported' % wav_file)
if not os.path.isfile(wav_file):
raise ValueError("%s does not exist" % wav_file)
self.set_volume(volume)
self._validate_play_type(play_type)
with open(os.devnull, 'w') as n:
if play_type == Sound.PLAY_WAIT_FOR_COMPLETE:
pid = Popen(shlex.split('/usr/bin/aplay -q "%s"' % wav_file), stdout=n)
pid.wait()
# Do not wait, run in the background
elif play_type == Sound.PLAY_NO_WAIT_FOR_COMPLETE:
return Popen(shlex.split('/usr/bin/aplay -q "%s"' % wav_file), stdout=n)
elif play_type == Sound.PLAY_LOOP:
while True:
pid = Popen(shlex.split('/usr/bin/aplay -q "%s"' % wav_file), stdout=n)
pid.wait()
|
Speak the given text aloud.
Uses the ``espeak`` external command.
:param string text: The text to speak
:param string espeak_opts: ``espeak`` command options (advanced usage)
:param int volume: The play volume, in percent of maximum volume
:param play_type: The behavior of ``speak`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE``, ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_LOOP``
:returns: When ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the spawn subprocess from ``subprocess.Popen``; ``None`` otherwise
|
def speak(self, text, espeak_opts='-a 200 -s 130', volume=100, play_type=PLAY_WAIT_FOR_COMPLETE):
""" Speak the given text aloud.
Uses the ``espeak`` external command.
:param string text: The text to speak
:param string espeak_opts: ``espeak`` command options (advanced usage)
:param int volume: The play volume, in percent of maximum volume
:param play_type: The behavior of ``speak`` once playback has been initiated
:type play_type: ``Sound.PLAY_WAIT_FOR_COMPLETE``, ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` or ``Sound.PLAY_LOOP``
:returns: When ``Sound.PLAY_NO_WAIT_FOR_COMPLETE`` is specified, returns the spawn subprocess from ``subprocess.Popen``; ``None`` otherwise
"""
self._validate_play_type(play_type)
self.set_volume(volume)
with open(os.devnull, 'w') as n:
cmd_line = ['/usr/bin/espeak', '--stdout'] + shlex.split(espeak_opts) + [shlex.quote(text)]
aplay_cmd_line = shlex.split('/usr/bin/aplay -q')
if play_type == Sound.PLAY_WAIT_FOR_COMPLETE:
espeak = Popen(cmd_line, stdout=PIPE)
play = Popen(aplay_cmd_line, stdin=espeak.stdout, stdout=n)
play.wait()
elif play_type == Sound.PLAY_NO_WAIT_FOR_COMPLETE:
espeak = Popen(cmd_line, stdout=PIPE)
return Popen(aplay_cmd_line, stdin=espeak.stdout, stdout=n)
elif play_type == Sound.PLAY_LOOP:
while True:
espeak = Popen(cmd_line, stdout=PIPE)
play = Popen(aplay_cmd_line, stdin=espeak.stdout, stdout=n)
play.wait()
|
:returns: The detected sound channel
:rtype: string
|
def _get_channel(self):
"""
:returns: The detected sound channel
:rtype: string
"""
if self.channel is None:
# Get default channel as the first one that pops up in
# 'amixer scontrols' output, which contains strings in the
# following format:
#
# Simple mixer control 'Master',0
# Simple mixer control 'Capture',0
out = check_output(['amixer', 'scontrols']).decode()
m = re.search(r"'(?P<channel>[^']+)'", out)
if m:
self.channel = m.group('channel')
else:
self.channel = 'Playback'
return self.channel
|
Sets the sound volume to the given percentage [0-100] by calling
``amixer -q set <channel> <pct>%``.
If the channel is not specified, it tries to determine the default one
by running ``amixer scontrols``. If that fails as well, it uses the
``Playback`` channel, as that is the only channel on the EV3.
|
def set_volume(self, pct, channel=None):
"""
Sets the sound volume to the given percentage [0-100] by calling
``amixer -q set <channel> <pct>%``.
If the channel is not specified, it tries to determine the default one
by running ``amixer scontrols``. If that fails as well, it uses the
``Playback`` channel, as that is the only channel on the EV3.
"""
if channel is None:
channel = self._get_channel()
cmd_line = '/usr/bin/amixer -q set {0} {1:d}%'.format(channel, pct)
Popen(shlex.split(cmd_line)).wait()
|
Gets the current sound volume by parsing the output of
``amixer get <channel>``.
If the channel is not specified, it tries to determine the default one
by running ``amixer scontrols``. If that fails as well, it uses the
``Playback`` channel, as that is the only channel on the EV3.
|
def get_volume(self, channel=None):
"""
Gets the current sound volume by parsing the output of
``amixer get <channel>``.
If the channel is not specified, it tries to determine the default one
by running ``amixer scontrols``. If that fails as well, it uses the
``Playback`` channel, as that is the only channel on the EV3.
"""
if channel is None:
channel = self._get_channel()
out = check_output(['amixer', 'get', channel]).decode()
m = re.search(r'\[(?P<volume>\d+)%\]', out)
if m:
return int(m.group('volume'))
else:
raise Exception('Failed to parse output of `amixer get {}`'.format(channel))
|
Plays a song provided as a list of tuples containing the note name and its
value using music conventional notation instead of numerical values for frequency
and duration.
It supports symbolic notes (e.g. ``A4``, ``D#3``, ``Gb5``) and durations (e.g. ``q``, ``h``).
For an exhaustive list of accepted note symbols and values, have a look at the ``_NOTE_FREQUENCIES``
and ``_NOTE_VALUES`` private dictionaries in the source code.
The value can be suffixed by modifiers:
- a *divider* introduced by a ``/`` to obtain triplets for instance
(e.g. ``q/3`` for a triplet of eight note)
- a *multiplier* introduced by ``*`` (e.g. ``*1.5`` is a dotted note).
Shortcuts exist for common modifiers:
- ``3`` produces a triplet member note. For instance `e3` gives a triplet of eight notes,
i.e. 3 eight notes in the duration of a single quarter. You must ensure that 3 triplets
notes are defined in sequence to match the count, otherwise the result will not be the
expected one.
- ``.`` produces a dotted note, i.e. which duration is one and a half the base one. Double dots
are not currently supported.
Example::
>>> # A long time ago in a galaxy far,
>>> # far away...
>>> Sound.play_song((
>>> ('D4', 'e3'), # intro anacrouse
>>> ('D4', 'e3'),
>>> ('D4', 'e3'),
>>> ('G4', 'h'), # meas 1
>>> ('D5', 'h'),
>>> ('C5', 'e3'), # meas 2
>>> ('B4', 'e3'),
>>> ('A4', 'e3'),
>>> ('G5', 'h'),
>>> ('D5', 'q'),
>>> ('C5', 'e3'), # meas 3
>>> ('B4', 'e3'),
>>> ('A4', 'e3'),
>>> ('G5', 'h'),
>>> ('D5', 'q'),
>>> ('C5', 'e3'), # meas 4
>>> ('B4', 'e3'),
>>> ('C5', 'e3'),
>>> ('A4', 'h.'),
>>> ))
.. important::
Only 4/4 signature songs are supported with respect to note durations.
:param iterable[tuple(string, string)] song: the song
:param int tempo: the song tempo, given in quarters per minute
:param float delay: delay between notes (in seconds)
:return: the spawn subprocess from ``subprocess.Popen``
:raises ValueError: if invalid note in song or invalid play parameters
|
def play_song(self, song, tempo=120, delay=0.05):
""" Plays a song provided as a list of tuples containing the note name and its
value using music conventional notation instead of numerical values for frequency
and duration.
It supports symbolic notes (e.g. ``A4``, ``D#3``, ``Gb5``) and durations (e.g. ``q``, ``h``).
For an exhaustive list of accepted note symbols and values, have a look at the ``_NOTE_FREQUENCIES``
and ``_NOTE_VALUES`` private dictionaries in the source code.
The value can be suffixed by modifiers:
- a *divider* introduced by a ``/`` to obtain triplets for instance
(e.g. ``q/3`` for a triplet of eight note)
- a *multiplier* introduced by ``*`` (e.g. ``*1.5`` is a dotted note).
Shortcuts exist for common modifiers:
- ``3`` produces a triplet member note. For instance `e3` gives a triplet of eight notes,
i.e. 3 eight notes in the duration of a single quarter. You must ensure that 3 triplets
notes are defined in sequence to match the count, otherwise the result will not be the
expected one.
- ``.`` produces a dotted note, i.e. which duration is one and a half the base one. Double dots
are not currently supported.
Example::
>>> # A long time ago in a galaxy far,
>>> # far away...
>>> Sound.play_song((
>>> ('D4', 'e3'), # intro anacrouse
>>> ('D4', 'e3'),
>>> ('D4', 'e3'),
>>> ('G4', 'h'), # meas 1
>>> ('D5', 'h'),
>>> ('C5', 'e3'), # meas 2
>>> ('B4', 'e3'),
>>> ('A4', 'e3'),
>>> ('G5', 'h'),
>>> ('D5', 'q'),
>>> ('C5', 'e3'), # meas 3
>>> ('B4', 'e3'),
>>> ('A4', 'e3'),
>>> ('G5', 'h'),
>>> ('D5', 'q'),
>>> ('C5', 'e3'), # meas 4
>>> ('B4', 'e3'),
>>> ('C5', 'e3'),
>>> ('A4', 'h.'),
>>> ))
.. important::
Only 4/4 signature songs are supported with respect to note durations.
:param iterable[tuple(string, string)] song: the song
:param int tempo: the song tempo, given in quarters per minute
:param float delay: delay between notes (in seconds)
:return: the spawn subprocess from ``subprocess.Popen``
:raises ValueError: if invalid note in song or invalid play parameters
"""
if tempo <= 0:
raise ValueError('invalid tempo (%s)' % tempo)
if delay < 0:
raise ValueError('invalid delay (%s)' % delay)
delay_ms = int(delay * 1000)
meas_duration_ms = 60000 / tempo * 4 # we only support 4/4 bars, hence "* 4"
def beep_args(note, value):
""" Builds the arguments string for producing a beep matching
the requested note and value.
Args:
note (str): the note note and octave
value (str): the note value expression
Returns:
str: the arguments to be passed to the beep command
"""
freq = self._NOTE_FREQUENCIES.get(note.upper(), self._NOTE_FREQUENCIES[note])
if '/' in value:
base, factor = value.split('/')
duration_ms = meas_duration_ms * self._NOTE_VALUES[base] / float(factor)
elif '*' in value:
base, factor = value.split('*')
duration_ms = meas_duration_ms * self._NOTE_VALUES[base] * float(factor)
elif value.endswith('.'):
base = value[:-1]
duration_ms = meas_duration_ms * self._NOTE_VALUES[base] * 1.5
elif value.endswith('3'):
base = value[:-1]
duration_ms = meas_duration_ms * self._NOTE_VALUES[base] * 2 / 3
else:
duration_ms = meas_duration_ms * self._NOTE_VALUES[value]
return '-f %d -l %d -D %d' % (freq, duration_ms, delay_ms)
try:
return self.beep(' -n '.join(
[beep_args(note, value) for (note, value) in song]
))
except KeyError as e:
raise ValueError('invalid note (%s)' % e)
|
Look in /sys/class/board-info/ to determine the platform type.
This can return 'ev3', 'evb', 'pistorms', 'brickpi', 'brickpi3' or 'fake'.
|
def get_current_platform():
"""
Look in /sys/class/board-info/ to determine the platform type.
This can return 'ev3', 'evb', 'pistorms', 'brickpi', 'brickpi3' or 'fake'.
"""
board_info_dir = '/sys/class/board-info/'
if not os.path.exists(board_info_dir) or os.environ.get("FAKE_SYS"):
return 'fake'
for board in os.listdir(board_info_dir):
uevent_filename = os.path.join(board_info_dir, board, 'uevent')
if os.path.exists(uevent_filename):
with open(uevent_filename, 'r') as fh:
for line in fh.readlines():
(key, value) = line.strip().split('=')
if key == 'BOARD_INFO_MODEL':
if value == 'LEGO MINDSTORMS EV3':
return 'ev3'
elif value in ('FatcatLab EVB', 'QuestCape'):
return 'evb'
elif value == 'PiStorms':
return 'pistorms'
# This is the same for both BrickPi and BrickPi+.
# There is not a way to tell the difference.
elif value == 'Dexter Industries BrickPi':
return 'brickpi'
elif value == 'Dexter Industries BrickPi3':
return 'brickpi3'
elif value == 'FAKE-SYS':
return 'fake'
return None
|
This is a generator function that lists names of all devices matching the
provided parameters.
Parameters:
class_path: class path of the device, a subdirectory of /sys/class.
For example, '/sys/class/tacho-motor'.
name_pattern: pattern that device name should match.
For example, 'sensor*' or 'motor*'. Default value: '*'.
keyword arguments: used for matching the corresponding device
attributes. For example, address='outA', or
driver_name=['lego-ev3-us', 'lego-nxt-us']. When argument value
is a list, then a match against any entry of the list is
enough.
|
def list_device_names(class_path, name_pattern, **kwargs):
"""
This is a generator function that lists names of all devices matching the
provided parameters.
Parameters:
class_path: class path of the device, a subdirectory of /sys/class.
For example, '/sys/class/tacho-motor'.
name_pattern: pattern that device name should match.
For example, 'sensor*' or 'motor*'. Default value: '*'.
keyword arguments: used for matching the corresponding device
attributes. For example, address='outA', or
driver_name=['lego-ev3-us', 'lego-nxt-us']. When argument value
is a list, then a match against any entry of the list is
enough.
"""
if not os.path.isdir(class_path):
return
def matches(attribute, pattern):
try:
with io.FileIO(attribute) as f:
value = f.read().strip().decode()
except:
return False
if isinstance(pattern, list):
return any([value.find(p) >= 0 for p in pattern])
else:
return value.find(pattern) >= 0
for f in os.listdir(class_path):
if fnmatch.fnmatch(f, name_pattern):
path = class_path + '/' + f
if all([matches(path + '/' + k, kwargs[k]) for k in kwargs]):
yield f
|
This is a generator function that takes same arguments as `Device` class
and enumerates all devices present in the system that match the provided
arguments.
Parameters:
class_name: class name of the device, a subdirectory of /sys/class.
For example, 'tacho-motor'.
name_pattern: pattern that device name should match.
For example, 'sensor*' or 'motor*'. Default value: '*'.
keyword arguments: used for matching the corresponding device
attributes. For example, address='outA', or
driver_name=['lego-ev3-us', 'lego-nxt-us']. When argument value
is a list, then a match against any entry of the list is
enough.
|
def list_devices(class_name, name_pattern, **kwargs):
"""
This is a generator function that takes same arguments as `Device` class
and enumerates all devices present in the system that match the provided
arguments.
Parameters:
class_name: class name of the device, a subdirectory of /sys/class.
For example, 'tacho-motor'.
name_pattern: pattern that device name should match.
For example, 'sensor*' or 'motor*'. Default value: '*'.
keyword arguments: used for matching the corresponding device
attributes. For example, address='outA', or
driver_name=['lego-ev3-us', 'lego-nxt-us']. When argument value
is a list, then a match against any entry of the list is
enough.
"""
classpath = abspath(Device.DEVICE_ROOT_PATH + '/' + class_name)
return (Device(class_name, name, name_exact=True)
for name in list_device_names(classpath, name_pattern, **kwargs))
|
Device attribute getter
|
def _get_attribute(self, attribute, name):
"""Device attribute getter"""
try:
if attribute is None:
attribute = self._attribute_file_open( name )
else:
attribute.seek(0)
return attribute, attribute.read().strip().decode()
except Exception as ex:
self._raise_friendly_access_error(ex, name)
|
Device attribute setter
|
def _set_attribute(self, attribute, name, value):
"""Device attribute setter"""
try:
if attribute is None:
attribute = self._attribute_file_open( name )
else:
attribute.seek(0)
if isinstance(value, str):
value = value.encode()
attribute.write(value)
attribute.flush()
except Exception as ex:
self._raise_friendly_access_error(ex, name)
return attribute
|
Close all file handles and stop all motors.
|
def shutdown(self):
"""Close all file handles and stop all motors."""
self.stop_balance.set() # Stop balance thread
self.motor_left.stop()
self.motor_right.stop()
self.gyro_file.close()
self.touch_file.close()
self.encoder_left_file.close()
self.encoder_right_file.close()
self.dc_left_file.close()
self.dc_right_file.close()
|
Function for fast reading from sensor files.
|
def _fast_read(self, infile):
"""Function for fast reading from sensor files."""
infile.seek(0)
return(int(infile.read().decode().strip()))
|
Function for fast writing to motor files.
|
def _fast_write(self, outfile, value):
"""Function for fast writing to motor files."""
outfile.truncate(0)
outfile.write(str(int(value)))
outfile.flush()
|
Function to set the duty cycle of the motors.
|
def _set_duty(self, motor_duty_file, duty, friction_offset,
voltage_comp):
"""Function to set the duty cycle of the motors."""
# Compensate for nominal voltage and round the input
duty_int = int(round(duty*voltage_comp))
# Add or subtract offset and clamp the value between -100 and 100
if duty_int > 0:
duty_int = min(100, duty_int + friction_offset)
elif duty_int < 0:
duty_int = max(-100, duty_int - friction_offset)
# Apply the signal to the motor
self._fast_write(motor_duty_file, duty_int)
|
Run the _balance method as a thread.
|
def balance(self):
"""Run the _balance method as a thread."""
balance_thread = threading.Thread(target=self._balance)
balance_thread.start()
|
Make the robot balance.
|
def _balance(self):
"""Make the robot balance."""
while True and not self.stop_balance.is_set():
# Reset the motors
self.motor_left.reset() # Reset the encoder
self.motor_right.reset()
self.motor_left.run_direct() # Set to run direct mode
self.motor_right.run_direct()
# Initialize variables representing physical signals
# (more info on these in the docs)
# The angle of "the motor", measured in raw units,
# degrees for the EV3).
# We will take the average of both motor positions as
# "the motor" angle, which is essentially how far the middle
# of the robot has travelled.
motor_angle_raw = 0
# The angle of the motor, converted to RAD (2*pi RAD
# equals 360 degrees).
motor_angle = 0
# The reference angle of the motor. The robot will attempt to
# drive forward or backward, such that its measured position
motor_angle_ref = 0
# equals this reference (or close enough).
# The error: the deviation of the measured motor angle from the
# reference. The robot attempts to make this zero, by driving
# toward the reference.
motor_angle_error = 0
# We add up all of the motor angle error in time. If this value
# gets out of hand, we can use it to drive the robot back to
# the reference position a bit quicker.
motor_angle_error_acc = 0
# The motor speed, estimated by how far the motor has turned in
# a given amount of time.
motor_angular_speed = 0
# The reference speed during manouvers: how fast we would like
# to drive, measured in RAD per second.
motor_angular_speed_ref = 0
# The error: the deviation of the motor speed from the
# reference speed.
motor_angular_speed_error = 0
# The 'voltage' signal we send to the motor.
# We calculate a new value each time, just right to keep the
# robot upright.
motor_duty_cycle = 0
# The raw value from the gyro sensor in rate mode.
gyro_rate_raw = 0
# The angular rate of the robot (how fast it is falling forward
# or backward), measured in RAD per second.
gyro_rate = 0
# The gyro doesn't measure the angle of the robot, but we can
# estimate this angle by keeping track of the gyro_rate value
# in time.
gyro_est_angle = 0
# Over time, the gyro rate value can drift. This causes the
# sensor to think it is moving even when it is perfectly still.
# We keep track of this offset.
gyro_offset = 0
# Start
log.info("Hold robot upright. Press touch sensor to start.")
self.sound.speak("Press touch sensor to start.")
self.touch.wait_for_bump()
# Read battery voltage
voltage_idle = self.power_supply.measured_volts
voltage_comp = self.power_voltage_nominal / voltage_idle
# Offset to limit friction deadlock
friction_offset = int(round(self.pwr_friction_offset_nom *
voltage_comp))
# Timing settings for the program
# Time of each loop, measured in seconds.
loop_time_target = self.timing_loop_msec / 1000
loop_count = 0 # Loop counter, starting at 0
# A deque (a fifo array) which we'll use to keep track of
# previous motor positions, which we can use to calculate the
# rate of change (speed)
motor_angle_hist =\
deque([0], self.motor_angle_history_length)
# The rate at which we'll update the gyro offset (precise
# definition given in docs)
gyro_drift_comp_rate =\
self.gyro_drift_compensation_factor *\
loop_time_target * RAD_PER_SEC_PER_RAW_GYRO_UNIT
# Calibrate Gyro
log.info("-----------------------------------")
log.info("Calibrating...")
# As you hold the robot still, determine the average sensor
# value of 100 samples
gyro_calibrate_count = 100
for i in range(gyro_calibrate_count):
gyro_offset = gyro_offset + self._fast_read(self.gyro_file)
time.sleep(0.01)
gyro_offset = gyro_offset / gyro_calibrate_count
# Print the result
log.info("gyro_offset: " + str(gyro_offset))
log.info("-----------------------------------")
log.info("GO!")
log.info("-----------------------------------")
log.info("Press Touch Sensor to re-start.")
log.info("-----------------------------------")
self.sound.beep()
# Remember start time
prog_start_time = time.time()
if self.debug:
# Data logging
data = OrderedDict()
loop_times = OrderedDict()
data['loop_times'] = loop_times
gyro_readings = OrderedDict()
data['gyro_readings'] = gyro_readings
# Initial fast read touch sensor value
touch_pressed = False
# Driving and Steering
speed, steering = (0, 0)
# Record start time of loop
loop_start_time = time.time()
# Balancing Loop
while not touch_pressed and not self.stop_balance.is_set():
loop_count += 1
# Check for drive instructions and set speed / steering
try:
speed, steering = self.drive_queue.get_nowait()
self.drive_queue.task_done()
except queue.Empty:
pass
# Read the touch sensor (the kill switch)
touch_pressed = self._fast_read(self.touch_file)
# Read the Motor Position
motor_angle_raw = ((self._fast_read(self.encoder_left_file) +
self._fast_read(self.encoder_right_file)) /
2.0)
motor_angle = motor_angle_raw * RAD_PER_RAW_MOTOR_UNIT
# Read the Gyro
gyro_rate_raw = self._fast_read(self.gyro_file)
# Busy wait for the loop to reach target time length
loop_time = 0
while(loop_time < loop_time_target):
loop_time = time.time() - loop_start_time
time.sleep(0.001)
# Calculate most recent loop time
loop_time = time.time() - loop_start_time
# Set start time of next loop
loop_start_time = time.time()
if self.debug:
# Log gyro data and loop time
time_of_sample = time.time() - prog_start_time
gyro_readings[time_of_sample] = gyro_rate_raw
loop_times[time_of_sample] = loop_time * 1000.0
# Calculate gyro rate
gyro_rate = (gyro_rate_raw - gyro_offset) *\
RAD_PER_SEC_PER_RAW_GYRO_UNIT
# Calculate Motor Parameters
motor_angular_speed_ref =\
speed * RAD_PER_SEC_PER_PERCENT_SPEED
motor_angle_ref = motor_angle_ref +\
motor_angular_speed_ref * loop_time_target
motor_angle_error = motor_angle - motor_angle_ref
# Compute Motor Speed
motor_angular_speed =\
((motor_angle - motor_angle_hist[0]) /
(self.motor_angle_history_length * loop_time_target))
motor_angular_speed_error = motor_angular_speed
motor_angle_hist.append(motor_angle)
# Compute the motor duty cycle value
motor_duty_cycle =\
(self.gain_gyro_angle * gyro_est_angle +
self.gain_gyro_rate * gyro_rate +
self.gain_motor_angle * motor_angle_error +
self.gain_motor_angular_speed *
motor_angular_speed_error +
self.gain_motor_angle_error_accumulated *
motor_angle_error_acc)
# Apply the signal to the motor, and add steering
self._set_duty(self.dc_right_file, motor_duty_cycle + steering,
friction_offset, voltage_comp)
self._set_duty(self.dc_left_file, motor_duty_cycle - steering,
friction_offset, voltage_comp)
# Update angle estimate and gyro offset estimate
gyro_est_angle = gyro_est_angle + gyro_rate *\
loop_time_target
gyro_offset = (1 - gyro_drift_comp_rate) *\
gyro_offset + gyro_drift_comp_rate * gyro_rate_raw
# Update Accumulated Motor Error
motor_angle_error_acc = motor_angle_error_acc +\
motor_angle_error * loop_time_target
# Closing down & Cleaning up
# Loop end time, for stats
prog_end_time = time.time()
# Turn off the motors
self._fast_write(self.dc_left_file, 0)
self._fast_write(self.dc_right_file, 0)
# Wait for the Touch Sensor to be released
while self.touch.is_pressed:
time.sleep(0.01)
# Calculate loop time
avg_loop_time = (prog_end_time - prog_start_time) / loop_count
log.info("Loop time:" + str(avg_loop_time * 1000) + "ms")
# Print a stop message
log.info("-----------------------------------")
log.info("STOP")
log.info("-----------------------------------")
if self.debug:
# Dump logged data to file
with open("data.txt", 'w') as data_file:
json.dump(data, data_file)
|
Move robot.
|
def _move(self, speed=0, steering=0, seconds=None):
"""Move robot."""
self.drive_queue.put((speed, steering))
if seconds is not None:
time.sleep(seconds)
self.drive_queue.put((0, 0))
self.drive_queue.join()
|
Move robot forward.
|
def move_forward(self, seconds=None):
"""Move robot forward."""
self._move(speed=SPEED_MAX, steering=0, seconds=seconds)
|
Move robot backward.
|
def move_backward(self, seconds=None):
"""Move robot backward."""
self._move(speed=-SPEED_MAX, steering=0, seconds=seconds)
|
Rotate robot left.
|
def rotate_left(self, seconds=None):
"""Rotate robot left."""
self._move(speed=0, steering=STEER_MAX, seconds=seconds)
|
Rotate robot right.
|
def rotate_right(self, seconds=None):
"""Rotate robot right."""
self._move(speed=0, steering=-STEER_MAX, seconds=seconds)
|
Return our corresponding evdev device object
|
def evdev_device(self):
"""
Return our corresponding evdev device object
"""
devices = [evdev.InputDevice(fn) for fn in evdev.list_devices()]
for device in devices:
if device.name == self.evdev_device_name:
return device
raise Exception("%s: could not find evdev device '%s'" % (self, self.evdev_device_name))
|
Check for currenly pressed buttons. If the new state differs from the
old state, call the appropriate button event handlers.
|
def process(self, new_state=None):
"""
Check for currenly pressed buttons. If the new state differs from the
old state, call the appropriate button event handlers.
"""
if new_state is None:
new_state = set(self.buttons_pressed)
old_state = self._state
self._state = new_state
state_diff = new_state.symmetric_difference(old_state)
for button in state_diff:
handler = getattr(self, 'on_' + button)
if handler is not None:
handler(button in new_state)
if self.on_change is not None and state_diff:
self.on_change([(button, button in new_state) for button in state_diff])
|
Wait for the button to be pressed down and then released.
Both actions must happen within timeout_ms.
|
def wait_for_bump(self, buttons, timeout_ms=None):
"""
Wait for the button to be pressed down and then released.
Both actions must happen within timeout_ms.
"""
start_time = time.time()
if self.wait_for_pressed(buttons, timeout_ms):
if timeout_ms is not None:
timeout_ms -= int((time.time() - start_time) * 1000)
return self.wait_for_released(buttons, timeout_ms)
return False
|
Returns list of names of pressed buttons.
|
def buttons_pressed(self):
"""
Returns list of names of pressed buttons.
"""
for b in self._buffer_cache:
fcntl.ioctl(self._button_file(b), self.EVIOCGKEY, self._buffer_cache[b])
pressed = []
for k, v in self._buttons.items():
buf = self._buffer_cache[v['name']]
bit = v['value']
if bool(buf[int(bit / 8)] & 1 << bit % 8):
pressed.append(k)
return pressed
|
Waits maximum of 5 minutes for orted process to start
|
def _orted_process():
"""Waits maximum of 5 minutes for orted process to start"""
for i in range(5 * 60):
procs = [p for p in psutil.process_iter(attrs=['name']) if p.info['name'] == 'orted']
if procs:
return procs
time.sleep(1)
|
Checks if the connection to provided ``host`` and ``port`` is possible or not.
Args:
host (str): Hostname for the host to check connection.
port (int): Port name of the host to check connection on.
|
def _can_connect(host, port=22): # type: (str, int) -> bool
"""Checks if the connection to provided ``host`` and ``port`` is possible or not.
Args:
host (str): Hostname for the host to check connection.
port (int): Port name of the host to check connection on.
"""
try:
logger.debug('Testing connection to host %s', host)
client = paramiko.SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(host,
port=port)
client.close()
logger.info('Can connect to host %s', host)
return True
except Exception as e:
logger.info('Cannot connect to host %s', host)
logger.info('Connection failed with exception: \n %s', str(e))
return False
|
Parse custom MPI options provided by user. Known options default value will be overridden
and unknown options would be identified separately.
|
def _parse_custom_mpi_options(custom_mpi_options):
# type: (str) -> Tuple[argparse.Namespace, List[str]]
"""Parse custom MPI options provided by user. Known options default value will be overridden
and unknown options would be identified separately."""
parser = argparse.ArgumentParser()
parser.add_argument('--NCCL_DEBUG', default="INFO", type=str)
return parser.parse_known_args(custom_mpi_options.split())
|
The WorkerRunner proceeds as following:
- wait for the MPI Master to create its SSH daemon
- start its SSH daemon
- monitor the MPI orted process and wait it to finish the MPI execution
|
def run(self, wait=True, capture_error=False): # type: (bool, bool) -> None
"""The WorkerRunner proceeds as following:
- wait for the MPI Master to create its SSH daemon
- start its SSH daemon
- monitor the MPI orted process and wait it to finish the MPI execution
"""
logger.info('Starting MPI run as worker node.')
if wait:
logger.info('Waiting for MPI Master to create SSH daemon.')
self._wait_master_to_start()
logger.info('MPI Master online, creating SSH daemon.')
_start_sshd_daemon()
if wait:
logger.info('Waiting for MPI process to finish.')
_wait_orted_process_to_finish()
time.sleep(30)
logger.info('MPI process finished.')
|
Add a signal-based timeout to any block of code.
If multiple time units are specified, they will be added together to determine time limit.
Usage:
with timeout(seconds=5):
my_slow_function(...)
Args:
- seconds: The time limit, in seconds.
- minutes: The time limit, in minutes.
- hours: The time limit, in hours.
|
def timeout(seconds=0, minutes=0, hours=0):
"""
Add a signal-based timeout to any block of code.
If multiple time units are specified, they will be added together to determine time limit.
Usage:
with timeout(seconds=5):
my_slow_function(...)
Args:
- seconds: The time limit, in seconds.
- minutes: The time limit, in minutes.
- hours: The time limit, in hours.
"""
limit = seconds + 60 * minutes + 3600 * hours
def handler(signum, frame): # pylint: disable=W0613
raise TimeoutError('timed out after {} seconds'.format(limit))
try:
signal.signal(signal.SIGALRM, handler)
signal.setitimer(signal.ITIMER_REAL, limit)
yield
finally:
signal.alarm(0)
|
Prepare a Python script (or module) to be imported as a module.
If the script does not contain a setup.py file, it creates a minimal setup.
Args:
path (str): path to directory with the script or module.
name (str): name of the script or module.
|
def prepare(path, name): # type: (str, str) -> None
"""Prepare a Python script (or module) to be imported as a module.
If the script does not contain a setup.py file, it creates a minimal setup.
Args:
path (str): path to directory with the script or module.
name (str): name of the script or module.
"""
setup_path = os.path.join(path, 'setup.py')
if not os.path.exists(setup_path):
data = textwrap.dedent("""
from setuptools import setup
setup(packages=[''],
name="%s",
version='1.0.0',
include_package_data=True)
""" % name)
logger.info('Module %s does not provide a setup.py. \nGenerating setup.py' % name)
_files.write_file(setup_path, data)
data = textwrap.dedent("""
[wheel]
universal = 1
""")
logger.info('Generating setup.cfg')
_files.write_file(os.path.join(path, 'setup.cfg'), data)
data = textwrap.dedent("""
recursive-include . *
recursive-exclude . __pycache__*
recursive-exclude . *.pyc
recursive-exclude . *.pyo
""")
logger.info('Generating MANIFEST.in')
_files.write_file(os.path.join(path, 'MANIFEST.in'), data)
|
Install a Python module in the executing Python environment.
Args:
path (str): Real path location of the Python module.
capture_error (bool): Default false. If True, the running process captures the
stderr, and appends it to the returned Exception message in case of errors.
|
def install(path, capture_error=False): # type: (str, bool) -> None
"""Install a Python module in the executing Python environment.
Args:
path (str): Real path location of the Python module.
capture_error (bool): Default false. If True, the running process captures the
stderr, and appends it to the returned Exception message in case of errors.
"""
cmd = '%s -m pip install -U . ' % _process.python_executable()
if has_requirements(path):
cmd += '-r requirements.txt'
logger.info('Installing module with the following command:\n%s', cmd)
_process.check_error(shlex.split(cmd), _errors.InstallModuleError, cwd=path, capture_error=capture_error)
|
Download, prepare and install a compressed tar file from S3 or local directory as a module.
The SageMaker Python SDK saves the user provided scripts as compressed tar files in S3.
This function downloads this compressed file and, if provided, transforms it
into a module before installing it.
This method is the predecessor of :meth:`~sagemaker_containers.beta.framework.files.download_and_extract`
and has been kept for backward-compatibility purposes.
Args:
name (str): name of the script or module.
uri (str): the location of the module.
cache (bool): defaults to True. It will not download and install the module again if it is already installed.
|
def download_and_install(uri, name=DEFAULT_MODULE_NAME, cache=True):
# type: (str, str, bool) -> None
"""Download, prepare and install a compressed tar file from S3 or local directory as a module.
The SageMaker Python SDK saves the user provided scripts as compressed tar files in S3.
This function downloads this compressed file and, if provided, transforms it
into a module before installing it.
This method is the predecessor of :meth:`~sagemaker_containers.beta.framework.files.download_and_extract`
and has been kept for backward-compatibility purposes.
Args:
name (str): name of the script or module.
uri (str): the location of the module.
cache (bool): defaults to True. It will not download and install the module again if it is already installed.
"""
should_use_cache = cache and exists(name)
if not should_use_cache:
with _files.tmpdir() as tmpdir:
if uri.startswith('s3://'):
dst = os.path.join(tmpdir, 'tar_file')
_files.s3_download(uri, dst)
module_path = os.path.join(tmpdir, 'module_dir')
os.makedirs(module_path)
with tarfile.open(name=dst, mode='r:gz') as t:
t.extractall(path=module_path)
else:
module_path = uri
prepare(module_path, name)
install(module_path)
|
Run Python module as a script.
Search sys.path for the named module and execute its contents as the __main__ module.
Since the argument is a module name, you must not give a file extension (.py). The module name should be a valid
absolute Python module name, but the implementation may not always enforce this (e.g. it may allow you to use a name
that includes a hyphen).
Package names (including namespace packages) are also permitted. When a package name is supplied instead of a
normal module, the interpreter will execute <pkg>.__main__ as the main module. This behaviour is deliberately
similar to the handling of directories and zipfiles that are passed to the interpreter as the script argument.
Note This option cannot be used with built-in modules and extension modules written in C, since they do not have
Python module files. However, it can still be used for precompiled modules, even if the original source file is
not available. If this option is given, the first element of sys.argv will be the full path to the module file (
while the module file is being located, the first element will be set to "-m"). As with the -c option,
the current directory will be added to the start of sys.path.
You can find more information at https://docs.python.org/3/using/cmdline.html#cmdoption-m
Example:
>>>import sagemaker_containers
>>>from sagemaker_containers.beta.framework import mapping, modules
>>>env = sagemaker_containers.training_env()
{'channel-input-dirs': {'training': '/opt/ml/input/training'}, 'model_dir': '/opt/ml/model', ...}
>>>hyperparameters = env.hyperparameters
{'batch-size': 128, 'model_dir': '/opt/ml/model'}
>>>args = mapping.to_cmd_args(hyperparameters)
['--batch-size', '128', '--model_dir', '/opt/ml/model']
>>>env_vars = mapping.to_env_vars()
['SAGEMAKER_CHANNELS':'training', 'SAGEMAKER_CHANNEL_TRAINING':'/opt/ml/input/training',
'MODEL_DIR':'/opt/ml/model', ...}
>>>modules.run('user_script', args, env_vars)
SAGEMAKER_CHANNELS=training SAGEMAKER_CHANNEL_TRAINING=/opt/ml/input/training \
SAGEMAKER_MODEL_DIR=/opt/ml/model python -m user_script --batch-size 128 --model_dir /opt/ml/model
Args:
module_name (str): module name in the same format required by python -m <module-name> cli command.
args (list): A list of program arguments.
env_vars (dict): A map containing the environment variables to be written.
capture_error (bool): Default false. If True, the running process captures the
stderr, and appends it to the returned Exception message in case of errors.
|
def run(module_name, args=None, env_vars=None, wait=True, capture_error=False):
# type: (str, list, dict, bool, bool) -> subprocess.Popen
"""Run Python module as a script.
Search sys.path for the named module and execute its contents as the __main__ module.
Since the argument is a module name, you must not give a file extension (.py). The module name should be a valid
absolute Python module name, but the implementation may not always enforce this (e.g. it may allow you to use a name
that includes a hyphen).
Package names (including namespace packages) are also permitted. When a package name is supplied instead of a
normal module, the interpreter will execute <pkg>.__main__ as the main module. This behaviour is deliberately
similar to the handling of directories and zipfiles that are passed to the interpreter as the script argument.
Note This option cannot be used with built-in modules and extension modules written in C, since they do not have
Python module files. However, it can still be used for precompiled modules, even if the original source file is
not available. If this option is given, the first element of sys.argv will be the full path to the module file (
while the module file is being located, the first element will be set to "-m"). As with the -c option,
the current directory will be added to the start of sys.path.
You can find more information at https://docs.python.org/3/using/cmdline.html#cmdoption-m
Example:
>>>import sagemaker_containers
>>>from sagemaker_containers.beta.framework import mapping, modules
>>>env = sagemaker_containers.training_env()
{'channel-input-dirs': {'training': '/opt/ml/input/training'}, 'model_dir': '/opt/ml/model', ...}
>>>hyperparameters = env.hyperparameters
{'batch-size': 128, 'model_dir': '/opt/ml/model'}
>>>args = mapping.to_cmd_args(hyperparameters)
['--batch-size', '128', '--model_dir', '/opt/ml/model']
>>>env_vars = mapping.to_env_vars()
['SAGEMAKER_CHANNELS':'training', 'SAGEMAKER_CHANNEL_TRAINING':'/opt/ml/input/training',
'MODEL_DIR':'/opt/ml/model', ...}
>>>modules.run('user_script', args, env_vars)
SAGEMAKER_CHANNELS=training SAGEMAKER_CHANNEL_TRAINING=/opt/ml/input/training \
SAGEMAKER_MODEL_DIR=/opt/ml/model python -m user_script --batch-size 128 --model_dir /opt/ml/model
Args:
module_name (str): module name in the same format required by python -m <module-name> cli command.
args (list): A list of program arguments.
env_vars (dict): A map containing the environment variables to be written.
capture_error (bool): Default false. If True, the running process captures the
stderr, and appends it to the returned Exception message in case of errors.
"""
args = args or []
env_vars = env_vars or {}
cmd = [_process.python_executable(), '-m', module_name] + args
_logging.log_script_invocation(cmd, env_vars)
if wait:
return _process.check_error(cmd, _errors.ExecuteUserScriptError, capture_error=capture_error)
else:
return _process.create(cmd, _errors.ExecuteUserScriptError, capture_error=capture_error)
|
Download, prepare and install a compressed tar file from S3 or provided directory as a module.
SageMaker Python SDK saves the user provided scripts as compressed tar files in S3
https://github.com/aws/sagemaker-python-sdk.
This function downloads this compressed file, if provided, and transforms it as a module, and installs it.
Args:
name (str): name of the script or module.
uri (str): the location of the module.
cache (bool): default True. It will not download and install the module again if it is already installed.
Returns:
(module): the imported module
|
def import_module(uri, name=DEFAULT_MODULE_NAME, cache=None): # type: (str, str, bool) -> module
"""Download, prepare and install a compressed tar file from S3 or provided directory as a module.
SageMaker Python SDK saves the user provided scripts as compressed tar files in S3
https://github.com/aws/sagemaker-python-sdk.
This function downloads this compressed file, if provided, and transforms it as a module, and installs it.
Args:
name (str): name of the script or module.
uri (str): the location of the module.
cache (bool): default True. It will not download and install the module again if it is already installed.
Returns:
(module): the imported module
"""
_warning_cache_deprecation(cache)
_files.download_and_extract(uri, name, _env.code_dir)
prepare(_env.code_dir, name)
install(_env.code_dir)
try:
module = importlib.import_module(name)
six.moves.reload_module(module)
return module
except Exception as e:
six.reraise(_errors.ImportModuleError, _errors.ImportModuleError(e), sys.exc_info()[2])
|
Download, prepare and executes a compressed tar file from S3 or provided directory as a module.
SageMaker Python SDK saves the user provided scripts as compressed tar files in S3
https://github.com/aws/sagemaker-python-sdk.
This function downloads this compressed file, transforms it as a module, and executes it.
Args:
uri (str): the location of the module.
args (list): A list of program arguments.
env_vars (dict): A map containing the environment variables to be written.
name (str): name of the script or module.
cache (bool): If True it will avoid downloading the module again, if already installed.
wait (bool): If True run_module will wait for the user module to exit and check the exit code,
otherwise it will launch the user module with subprocess and return the process object.
|
def run_module(uri, args, env_vars=None, name=DEFAULT_MODULE_NAME, cache=None, wait=True, capture_error=False):
# type: (str, list, dict, str, bool, bool, bool) -> subprocess.Popen
"""Download, prepare and executes a compressed tar file from S3 or provided directory as a module.
SageMaker Python SDK saves the user provided scripts as compressed tar files in S3
https://github.com/aws/sagemaker-python-sdk.
This function downloads this compressed file, transforms it as a module, and executes it.
Args:
uri (str): the location of the module.
args (list): A list of program arguments.
env_vars (dict): A map containing the environment variables to be written.
name (str): name of the script or module.
cache (bool): If True it will avoid downloading the module again, if already installed.
wait (bool): If True run_module will wait for the user module to exit and check the exit code,
otherwise it will launch the user module with subprocess and return the process object.
"""
_warning_cache_deprecation(cache)
env_vars = env_vars or {}
env_vars = env_vars.copy()
_files.download_and_extract(uri, name, _env.code_dir)
prepare(_env.code_dir, name)
install(_env.code_dir)
_env.write_env_vars(env_vars)
return run(name, args, env_vars, wait, capture_error)
|
The request's content-type.
Returns:
(str): The value, if any, of the header 'ContentType' (used by some AWS services) and 'Content-Type'.
Otherwise, returns 'application/json' as default.
|
def content_type(self): # type: () -> str
"""The request's content-type.
Returns:
(str): The value, if any, of the header 'ContentType' (used by some AWS services) and 'Content-Type'.
Otherwise, returns 'application/json' as default.
"""
# todo(mvsusp): consider a better default content-type
return self.headers.get('ContentType') or self.headers.get(
'Content-Type') or _content_types.JSON
|
The content-type for the response to the client.
Returns:
(str): The value of the header 'Accept' or the user-supplied SAGEMAKER_DEFAULT_INVOCATIONS_ACCEPT
environment variable.
|
def accept(self): # type: () -> str
"""The content-type for the response to the client.
Returns:
(str): The value of the header 'Accept' or the user-supplied SAGEMAKER_DEFAULT_INVOCATIONS_ACCEPT
environment variable.
"""
accept = self.headers.get('Accept')
if not accept or accept == _content_types.ANY:
return self._default_accept
else:
return accept
|
The request incoming data.
It automatic decodes from utf-8
Returns:
(obj): incoming data
|
def content(self): # type: () -> object
"""The request incoming data.
It automatic decodes from utf-8
Returns:
(obj): incoming data
"""
as_text = self.content_type in _content_types.UTF8_TYPES
return self.get_data(as_text=as_text)
|
Download, prepare and executes a compressed tar file from S3 or provided directory as an user
entrypoint. Runs the user entry point, passing env_vars as environment variables and args as command
arguments.
If the entry point is:
- A Python package: executes the packages as >>> env_vars python -m module_name + args
- A Python script: executes the script as >>> env_vars python module_name + args
- Any other: executes the command as >>> env_vars /bin/sh -c ./module_name + args
Example:
>>>import sagemaker_containers
>>>from sagemaker_containers.beta.framework import entry_point
>>>env = sagemaker_containers.training_env()
{'channel-input-dirs': {'training': '/opt/ml/input/training'}, 'model_dir': '/opt/ml/model', ...}
>>>hyperparameters = env.hyperparameters
{'batch-size': 128, 'model_dir': '/opt/ml/model'}
>>>args = mapping.to_cmd_args(hyperparameters)
['--batch-size', '128', '--model_dir', '/opt/ml/model']
>>>env_vars = mapping.to_env_vars()
['SAGEMAKER_CHANNELS':'training', 'SAGEMAKER_CHANNEL_TRAINING':'/opt/ml/input/training',
'MODEL_DIR':'/opt/ml/model', ...}
>>>entry_point.run('user_script', args, env_vars)
SAGEMAKER_CHANNELS=training SAGEMAKER_CHANNEL_TRAINING=/opt/ml/input/training \
SAGEMAKER_MODEL_DIR=/opt/ml/model python -m user_script --batch-size 128 --model_dir /opt/ml/model
Args:
uri (str): the location of the module.
user_entry_point (str): name of the user provided entry point
args (list): A list of program arguments.
env_vars (dict): A map containing the environment variables to be written (default: None).
wait (bool): If the user entry point should be run to completion before this method returns
(default: True).
capture_error (bool): Default false. If True, the running process captures the
stderr, and appends it to the returned Exception message in case of errors.
runner (sagemaker_containers.beta.framework.runner.RunnerType): the type of runner object to
be created (default: sagemaker_containers.beta.framework.runner.ProcessRunnerType).
extra_opts (dict): Additional options for running the entry point (default: None).
Currently, this only applies for MPI.
Returns:
sagemaker_containers.beta.framework.process.ProcessRunner: the runner object responsible for
executing the entry point.
|
def run(uri,
user_entry_point,
args,
env_vars=None,
wait=True,
capture_error=False,
runner=_runner.ProcessRunnerType,
extra_opts=None):
# type: (str, str, List[str], Dict[str, str], bool, bool, _runner.RunnerType, Dict[str, str]) -> None
"""Download, prepare and executes a compressed tar file from S3 or provided directory as an user
entrypoint. Runs the user entry point, passing env_vars as environment variables and args as command
arguments.
If the entry point is:
- A Python package: executes the packages as >>> env_vars python -m module_name + args
- A Python script: executes the script as >>> env_vars python module_name + args
- Any other: executes the command as >>> env_vars /bin/sh -c ./module_name + args
Example:
>>>import sagemaker_containers
>>>from sagemaker_containers.beta.framework import entry_point
>>>env = sagemaker_containers.training_env()
{'channel-input-dirs': {'training': '/opt/ml/input/training'}, 'model_dir': '/opt/ml/model', ...}
>>>hyperparameters = env.hyperparameters
{'batch-size': 128, 'model_dir': '/opt/ml/model'}
>>>args = mapping.to_cmd_args(hyperparameters)
['--batch-size', '128', '--model_dir', '/opt/ml/model']
>>>env_vars = mapping.to_env_vars()
['SAGEMAKER_CHANNELS':'training', 'SAGEMAKER_CHANNEL_TRAINING':'/opt/ml/input/training',
'MODEL_DIR':'/opt/ml/model', ...}
>>>entry_point.run('user_script', args, env_vars)
SAGEMAKER_CHANNELS=training SAGEMAKER_CHANNEL_TRAINING=/opt/ml/input/training \
SAGEMAKER_MODEL_DIR=/opt/ml/model python -m user_script --batch-size 128 --model_dir /opt/ml/model
Args:
uri (str): the location of the module.
user_entry_point (str): name of the user provided entry point
args (list): A list of program arguments.
env_vars (dict): A map containing the environment variables to be written (default: None).
wait (bool): If the user entry point should be run to completion before this method returns
(default: True).
capture_error (bool): Default false. If True, the running process captures the
stderr, and appends it to the returned Exception message in case of errors.
runner (sagemaker_containers.beta.framework.runner.RunnerType): the type of runner object to
be created (default: sagemaker_containers.beta.framework.runner.ProcessRunnerType).
extra_opts (dict): Additional options for running the entry point (default: None).
Currently, this only applies for MPI.
Returns:
sagemaker_containers.beta.framework.process.ProcessRunner: the runner object responsible for
executing the entry point.
"""
env_vars = env_vars or {}
env_vars = env_vars.copy()
_files.download_and_extract(uri, user_entry_point, _env.code_dir)
install(user_entry_point, _env.code_dir, capture_error)
_env.write_env_vars(env_vars)
return _runner.get(runner, user_entry_point, args, env_vars, extra_opts).run(wait, capture_error)
|
Install the user provided entry point to be executed as follow:
- add the path to sys path
- if the user entry point is a command, gives exec permissions to the script
Args:
name (str): name of the script or module.
dst (str): path to directory with the script or module.
capture_error (bool): Default false. If True, the running process captures the
stderr, and appends it to the returned Exception message in case of errors.
|
def install(name, dst, capture_error=False):
"""Install the user provided entry point to be executed as follow:
- add the path to sys path
- if the user entry point is a command, gives exec permissions to the script
Args:
name (str): name of the script or module.
dst (str): path to directory with the script or module.
capture_error (bool): Default false. If True, the running process captures the
stderr, and appends it to the returned Exception message in case of errors.
"""
if dst not in sys.path:
sys.path.insert(0, dst)
entrypoint_type = _entry_point_type.get(dst, name)
if entrypoint_type is _entry_point_type.PYTHON_PACKAGE:
_modules.install(dst, capture_error)
if entrypoint_type is _entry_point_type.COMMAND:
os.chmod(os.path.join(dst, name), 511)
|
Set logger configuration.
Args:
level (int): Logger level
format (str): Logger format
|
def configure_logger(level, format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s'):
# type: (int, str) -> None
"""Set logger configuration.
Args:
level (int): Logger level
format (str): Logger format
"""
logging.basicConfig(format=format, level=level)
if level >= logging.INFO:
logging.getLogger('boto3').setLevel(logging.INFO)
logging.getLogger('s3transfer').setLevel(logging.INFO)
logging.getLogger('botocore').setLevel(logging.WARN)
|
Return a timestamp with microsecond precision.
|
def _timestamp():
"""Return a timestamp with microsecond precision."""
moment = time.time()
moment_us = repr(moment).split('.')[1]
return time.strftime("%Y-%m-%d-%H-%M-%S-{}".format(moment_us), time.gmtime(moment))
|
As soon as a user is done with a file under `/opt/ml/output/intermediate`
we would get notified by using inotify. We would copy this file under
`/opt/ml/output/intermediate/.tmp.sagemaker_s3_sync` folder preserving
the same folder structure to prevent it from being further modified.
As we copy the file we would add timestamp with microseconds precision
to avoid modification during s3 upload.
After that we copy the file to s3 in a separate Thread.
We keep the queue of the files we need to move as FIFO.
|
def _watch(inotify, watchers, watch_flags, s3_uploader):
"""As soon as a user is done with a file under `/opt/ml/output/intermediate`
we would get notified by using inotify. We would copy this file under
`/opt/ml/output/intermediate/.tmp.sagemaker_s3_sync` folder preserving
the same folder structure to prevent it from being further modified.
As we copy the file we would add timestamp with microseconds precision
to avoid modification during s3 upload.
After that we copy the file to s3 in a separate Thread.
We keep the queue of the files we need to move as FIFO.
"""
# initialize a thread pool with 1 worker
# to be used for uploading files to s3 in a separate thread
executor = futures.ThreadPoolExecutor(max_workers=1)
last_pass_done = False
stop_file_exists = False
# after we see stop file do one additional pass to make sure we didn't miss anything
while not last_pass_done:
# wait for any events in the directory for 1 sec and then re-check exit conditions
for event in inotify.read(timeout=1000):
for flag in inotify_simple.flags.from_mask(event.mask):
# if new directory was created traverse the directory tree to recursively add all
# created folders to the watchers list.
# Upload files to s3 if there any files.
# There is a potential race condition if upload the file and the see a notification
# for it which should cause any problems because when we copy files to temp dir
# we add a unique timestamp up to microseconds.
if flag is inotify_simple.flags.ISDIR and inotify_simple.flags.CREATE & event.mask:
path = os.path.join(intermediate_path, watchers[event.wd], event.name)
for folder, dirs, files in os.walk(path):
wd = inotify.add_watch(folder, watch_flags)
relative_path = os.path.relpath(folder, intermediate_path)
watchers[wd] = relative_path
tmp_sub_folder = os.path.join(tmp_dir_path, relative_path)
if not os.path.exists(tmp_sub_folder):
os.makedirs(tmp_sub_folder)
for file in files:
_copy_file(executor, s3_uploader, relative_path, file)
elif flag is inotify_simple.flags.CLOSE_WRITE:
_copy_file(executor, s3_uploader, watchers[event.wd], event.name)
last_pass_done = stop_file_exists
stop_file_exists = os.path.exists(success_file_path) or os.path.exists(failure_file_path)
# wait for all the s3 upload tasks to finish and shutdown the executor
executor.shutdown(wait=True)
|
Starts intermediate folder sync which copies files from 'opt/ml/output/intermediate'
directory to the provided s3 output location as files created or modified.
If files are deleted it doesn't delete them from s3.
It starts intermediate folder behavior as a daemonic process and
only if the directory doesn't exists yet, if it does - it indicates
that platform is taking care of syncing files to S3 and container should not interfere.
Args:
s3_output_location (str): name of the script or module.
region (str): the location of the module.
Returns:
(multiprocessing.Process): the intermediate output sync daemonic process.
|
def start_sync(s3_output_location, region):
"""Starts intermediate folder sync which copies files from 'opt/ml/output/intermediate'
directory to the provided s3 output location as files created or modified.
If files are deleted it doesn't delete them from s3.
It starts intermediate folder behavior as a daemonic process and
only if the directory doesn't exists yet, if it does - it indicates
that platform is taking care of syncing files to S3 and container should not interfere.
Args:
s3_output_location (str): name of the script or module.
region (str): the location of the module.
Returns:
(multiprocessing.Process): the intermediate output sync daemonic process.
"""
if not s3_output_location or os.path.exists(intermediate_path):
logger.debug('Could not initialize intermediate folder sync to s3.')
return
# create intermediate and intermediate_tmp directories
os.makedirs(intermediate_path)
os.makedirs(tmp_dir_path)
# configure unique s3 output location similar to how SageMaker platform does it
# or link it to the local output directory
url = urlparse(s3_output_location)
if url.scheme == 'file':
logger.debug('Local directory is used for output. No need to sync any intermediate output.')
return
elif url.scheme != 's3':
raise ValueError("Expecting 's3' scheme, got: %s in %s" % (url.scheme, url))
# create s3 transfer client
client = boto3.client('s3', region)
s3_transfer = s3transfer.S3Transfer(client)
s3_uploader = {
'transfer': s3_transfer,
'bucket': url.netloc,
'key_prefix': os.path.join(url.path.lstrip('/'), os.environ.get('TRAINING_JOB_NAME', ''),
'output', 'intermediate'),
}
# Add intermediate folder to the watch list
inotify = inotify_simple.INotify()
watch_flags = inotify_simple.flags.CLOSE_WRITE | inotify_simple.flags.CREATE
watchers = {}
wd = inotify.add_watch(intermediate_path, watch_flags)
watchers[wd] = ''
# start subprocess to sync any files from intermediate folder to s3
p = multiprocessing.Process(target=_watch, args=[inotify, watchers, watch_flags, s3_uploader])
# Make the process daemonic as a safety switch to prevent training job from hanging forever
# in case if something goes wrong and main container process exits in an unexpected way
p.daemon = True
p.start()
return p
|
Transform a dictionary in a dictionary of env vars.
Example:
>>>env_vars = mapping.to_env_vars({'model_dir': '/opt/ml/model', 'batch_size': 25})
>>>
>>>print(args)
['MODEL_DIR', '/opt/ml/model', 'BATCH_SIZE', 25]
Args:
mapping (dict[str, object]): A Python mapping.
Returns:
(dict): Dictionary of env vars
|
def to_env_vars(mapping): # type: (dict) -> dict
"""Transform a dictionary in a dictionary of env vars.
Example:
>>>env_vars = mapping.to_env_vars({'model_dir': '/opt/ml/model', 'batch_size': 25})
>>>
>>>print(args)
['MODEL_DIR', '/opt/ml/model', 'BATCH_SIZE', 25]
Args:
mapping (dict[str, object]): A Python mapping.
Returns:
(dict): Dictionary of env vars
"""
def format_key(key):
"""Decode a key, adds a SM_ prefix to the key and upper case it"""
if key:
decoded_name = 'SM_%s' % str(key).upper()
return decoded_name
else:
return ''
def format_value(_mapping):
if six.PY3 and isinstance(_mapping, six.binary_type):
# transforms a byte string (b'') in unicode
return _mapping.decode('latin1')
elif _mapping is None:
return ''
elif isinstance(_mapping, six.string_types):
return str(_mapping)
else:
return json.dumps(_mapping, sort_keys=True, separators=(',', ':'), ensure_ascii=True)
return {format_key(k): format_value(v) for k, v in mapping.items()}
|
Transform a dictionary in a list of cmd arguments.
Example:
>>>args = mapping.to_cmd_args({'model_dir': '/opt/ml/model', 'batch_size': 25})
>>>
>>>print(args)
['--model_dir', '/opt/ml/model', '--batch_size', 25]
Args:
mapping (dict[str, object]): A Python mapping.
Returns:
(list): List of cmd arguments
|
def to_cmd_args(mapping): # type: (dict) -> list
"""Transform a dictionary in a list of cmd arguments.
Example:
>>>args = mapping.to_cmd_args({'model_dir': '/opt/ml/model', 'batch_size': 25})
>>>
>>>print(args)
['--model_dir', '/opt/ml/model', '--batch_size', 25]
Args:
mapping (dict[str, object]): A Python mapping.
Returns:
(list): List of cmd arguments
"""
sorted_keys = sorted(mapping.keys())
def arg_name(obj):
string = _decode(obj)
if string:
return u'--%s' % string if len(string) > 1 else u'-%s' % string
else:
return u''
arg_names = [arg_name(argument) for argument in sorted_keys]
def arg_value(value):
if hasattr(value, 'items'):
map_items = ['%s=%s' % (k, v) for k, v in sorted(value.items())]
return ','.join(map_items)
return _decode(value)
arg_values = [arg_value(mapping[key]) for key in sorted_keys]
items = zip(arg_names, arg_values)
return [item for item in itertools.chain.from_iterable(items)]
|
Decode an object to unicode.
Args:
obj (bytes or str or unicode or anything serializable): object to be decoded
Returns:
object decoded in unicode.
|
def _decode(obj): # type: (bytes or str or unicode or object) -> unicode # noqa ignore=F821
"""Decode an object to unicode.
Args:
obj (bytes or str or unicode or anything serializable): object to be decoded
Returns:
object decoded in unicode.
"""
if obj is None:
return u''
if six.PY3 and isinstance(obj, six.binary_type):
# transforms a byte string (b'') in unicode
return obj.decode('latin1')
elif six.PY3:
# PY3 strings are unicode.
return str(obj)
elif isinstance(obj, six.text_type):
# returns itself if it is unicode
return obj
else:
# decodes pY2 string to unicode
return str(obj).decode('utf-8')
|
Split a dictionary in two by the provided keys.
Args:
dictionary (dict[str, object]): A Python dictionary
keys (sequence [str]): A sequence of keys which will be added the split criteria
prefix (str): A prefix which will be added the split criteria
Returns:
`SplitResultSpec` : A collections.namedtuple with the following attributes:
* Args:
included (dict[str, object]: A dictionary with the keys included in the criteria.
excluded (dict[str, object]: A dictionary with the keys not included in the criteria.
|
def split_by_criteria(dictionary, keys=None, prefix=None): # type: (dict, set or list or tuple) -> SplitResultSpec
"""Split a dictionary in two by the provided keys.
Args:
dictionary (dict[str, object]): A Python dictionary
keys (sequence [str]): A sequence of keys which will be added the split criteria
prefix (str): A prefix which will be added the split criteria
Returns:
`SplitResultSpec` : A collections.namedtuple with the following attributes:
* Args:
included (dict[str, object]: A dictionary with the keys included in the criteria.
excluded (dict[str, object]: A dictionary with the keys not included in the criteria.
"""
keys = keys or []
keys = set(keys)
included_items = {k: dictionary[k] for k in dictionary.keys() if k in keys or (prefix and k.startswith(prefix))}
excluded_items = {k: dictionary[k] for k in dictionary.keys() if k not in included_items}
return SplitResultSpec(included=included_items, excluded=excluded_items)
|
Returns:
(list[str]) List of public properties
|
def properties(self): # type: () -> list
"""
Returns:
(list[str]) List of public properties
"""
_type = type(self)
return [_property for _property in dir(_type) if self._is_property(_property)]
|
Function responsible to serialize the prediction for the response.
Args:
prediction (obj): prediction returned by predict_fn .
accept (str): accept content-type expected by the client.
Returns:
(worker.Response): a Flask response object with the following args:
* Args:
response: the serialized data to return
accept: the content-type that the data was transformed to.
|
def default_output_fn(prediction, accept):
"""Function responsible to serialize the prediction for the response.
Args:
prediction (obj): prediction returned by predict_fn .
accept (str): accept content-type expected by the client.
Returns:
(worker.Response): a Flask response object with the following args:
* Args:
response: the serialized data to return
accept: the content-type that the data was transformed to.
"""
return _worker.Response(response=_encoders.encode(prediction, accept), mimetype=accept)
|
Take a request with input data, deserialize it, make a prediction, and return a
serialized response.
Returns:
sagemaker_containers.beta.framework.worker.Response: a Flask response object with
the following args:
* response: the serialized data to return
* accept: the content type that the data was serialized into
|
def transform(self): # type: () -> _worker.Response
"""Take a request with input data, deserialize it, make a prediction, and return a
serialized response.
Returns:
sagemaker_containers.beta.framework.worker.Response: a Flask response object with
the following args:
* response: the serialized data to return
* accept: the content type that the data was serialized into
"""
request = _worker.Request()
result = self._transform_fn(self._model, request.content, request.content_type, request.accept)
if isinstance(result, tuple):
# transforms tuple in Response for backwards compatibility
return _worker.Response(response=result[0], mimetype=result[1])
return result
|
Make predictions against the model and return a serialized response.
This serves as the default implementation of transform_fn, used when the user has not
implemented one themselves.
Args:
model (obj): model loaded by model_fn.
content: request content.
content_type (str): the request Content-Type.
accept (str): accept content-type expected by the client.
Returns:
sagemaker_containers.beta.framework.worker.Response or tuple:
the serialized response data and its content type, either as a Response object or
a tuple of the form (response_data, content_type)
|
def _default_transform_fn(self, model, content, content_type, accept):
"""Make predictions against the model and return a serialized response.
This serves as the default implementation of transform_fn, used when the user has not
implemented one themselves.
Args:
model (obj): model loaded by model_fn.
content: request content.
content_type (str): the request Content-Type.
accept (str): accept content-type expected by the client.
Returns:
sagemaker_containers.beta.framework.worker.Response or tuple:
the serialized response data and its content type, either as a Response object or
a tuple of the form (response_data, content_type)
"""
try:
data = self._input_fn(content, content_type)
except _errors.UnsupportedFormatError as e:
return self._error_response(e, http_client.UNSUPPORTED_MEDIA_TYPE)
prediction = self._predict_fn(data, model)
try:
result = self._output_fn(prediction, accept)
except _errors.UnsupportedFormatError as e:
return self._error_response(e, http_client.NOT_ACCEPTABLE)
return result
|
Args:
path (string): Directory where the entry point is located
name (string): Name of the entry point file
Returns:
(_EntryPointType): The type of the entry point
|
def get(path, name): # type: (str, str) -> _EntryPointType
"""
Args:
path (string): Directory where the entry point is located
name (string): Name of the entry point file
Returns:
(_EntryPointType): The type of the entry point
"""
if 'setup.py' in os.listdir(path):
return _EntryPointType.PYTHON_PACKAGE
elif name.endswith('.py'):
return _EntryPointType.PYTHON_PROGRAM
else:
return _EntryPointType.COMMAND
|
Create a TrainingEnv.
Returns:
TrainingEnv: an instance of TrainingEnv
|
def training_env(): # type: () -> _env.TrainingEnv
"""Create a TrainingEnv.
Returns:
TrainingEnv: an instance of TrainingEnv
"""
from sagemaker_containers import _env
return _env.TrainingEnv(
resource_config=_env.read_resource_config(),
input_data_config=_env.read_input_data_config(),
hyperparameters=_env.read_hyperparameters())
|
Writes a serializeable object as a JSON file
|
def _write_json(obj, path): # type: (object, str) -> None
"""Writes a serializeable object as a JSON file"""
with open(path, 'w') as f:
json.dump(obj, f)
|
Sets the environment variable SAGEMAKER_BASE_DIR as
~/sagemaker_local/{timestamp}/opt/ml
Returns:
(bool): indicating whe
|
def _set_base_path_env(): # type: () -> None
"""Sets the environment variable SAGEMAKER_BASE_DIR as
~/sagemaker_local/{timestamp}/opt/ml
Returns:
(bool): indicating whe
"""
local_config_dir = os.path.join(os.path.expanduser('~'), 'sagemaker_local', 'jobs',
str(time.time()), 'opt', 'ml')
logger.info('Setting environment variable SAGEMAKER_BASE_DIR as %s .' % local_config_dir)
os.environ[BASE_PATH_ENV] = local_config_dir
|
Creates the directory structure and files necessary for training under the base path
|
def _create_training_directories():
"""Creates the directory structure and files necessary for training under the base path
"""
logger.info('Creating a new training folder under %s .' % base_dir)
os.makedirs(model_dir)
os.makedirs(input_config_dir)
os.makedirs(output_data_dir)
_write_json({}, hyperparameters_file_dir)
_write_json({}, input_data_config_file_dir)
host_name = socket.gethostname()
resources_dict = {
"current_host": host_name,
"hosts": [host_name]
}
_write_json(resources_dict, resource_config_file_dir)
|
Read the hyperparameters from /opt/ml/input/config/hyperparameters.json.
For more information about hyperparameters.json:
https://docs.aws.amazon.com/sagemaker/latest/dg/your-algorithms-training-algo.html#your-algorithms-training-algo-running-container-hyperparameters
Returns:
(dict[string, object]): a dictionary containing the hyperparameters.
|
def read_hyperparameters(): # type: () -> dict
"""Read the hyperparameters from /opt/ml/input/config/hyperparameters.json.
For more information about hyperparameters.json:
https://docs.aws.amazon.com/sagemaker/latest/dg/your-algorithms-training-algo.html#your-algorithms-training-algo-running-container-hyperparameters
Returns:
(dict[string, object]): a dictionary containing the hyperparameters.
"""
hyperparameters = _read_json(hyperparameters_file_dir)
deserialized_hps = {}
for k, v in hyperparameters.items():
try:
v = json.loads(v)
except (ValueError, TypeError):
logger.info("Failed to parse hyperparameter %s value %s to Json.\n"
"Returning the value itself", k, v)
deserialized_hps[k] = v
return deserialized_hps
|
The number of gpus available in the current container.
Returns:
int: number of gpus available in the current container.
|
def num_gpus(): # type: () -> int
"""The number of gpus available in the current container.
Returns:
int: number of gpus available in the current container.
"""
try:
cmd = shlex.split('nvidia-smi --list-gpus')
output = subprocess.check_output(cmd).decode('utf-8')
return sum([1 for x in output.split('\n') if x.startswith('GPU ')])
except (OSError, subprocess.CalledProcessError):
logger.info('No GPUs detected (normal if no gpus installed)')
return 0
|
Write the dictionary env_vars in the system, as environment variables.
Args:
env_vars ():
Returns:
|
def write_env_vars(env_vars=None): # type: (dict) -> None
"""Write the dictionary env_vars in the system, as environment variables.
Args:
env_vars ():
Returns:
"""
env_vars = env_vars or {}
env_vars['PYTHONPATH'] = ':'.join(sys.path)
for name, value in env_vars.items():
os.environ[name] = value
|
Environment variable representation of the training environment
Returns:
dict: an instance of dictionary
|
def to_env_vars(self):
"""Environment variable representation of the training environment
Returns:
dict: an instance of dictionary
"""
env = {
'hosts': self.hosts, 'network_interface_name': self.network_interface_name,
'hps': self.hyperparameters, 'user_entry_point': self.user_entry_point,
'framework_params': self.additional_framework_parameters,
'resource_config': self.resource_config, 'input_data_config': self.input_data_config,
'output_data_dir': self.output_data_dir,
'channels': sorted(self.channel_input_dirs.keys()),
'current_host': self.current_host, 'module_name': self.module_name,
'log_level': self.log_level,
'framework_module': self.framework_module, 'input_dir': self.input_dir,
'input_config_dir': self.input_config_dir, 'output_dir': self.output_dir,
'num_cpus': self.num_cpus,
'num_gpus': self.num_gpus, 'model_dir': self.model_dir, 'module_dir': self.module_dir,
'training_env': dict(self), 'user_args': self.to_cmd_args(),
'output_intermediate_dir': self.output_intermediate_dir
}
for name, path in self.channel_input_dirs.items():
env['channel_%s' % name] = path
for key, value in self.hyperparameters.items():
env['hp_%s' % key] = value
return _mapping.to_env_vars(env)
|
Convert an array like object to the NPY format.
To understand better what an array like object is see:
https://docs.scipy.org/doc/numpy/user/basics.creation.html#converting-python-array-like-objects-to-numpy-arrays
Args:
array_like (np.array or Iterable or int or float): array like object to be converted to NPY.
Returns:
(obj): NPY array.
|
def array_to_npy(array_like): # type: (np.array or Iterable or int or float) -> object
"""Convert an array like object to the NPY format.
To understand better what an array like object is see:
https://docs.scipy.org/doc/numpy/user/basics.creation.html#converting-python-array-like-objects-to-numpy-arrays
Args:
array_like (np.array or Iterable or int or float): array like object to be converted to NPY.
Returns:
(obj): NPY array.
"""
buffer = BytesIO()
np.save(buffer, array_like)
return buffer.getvalue()
|
Convert an NPY array into numpy.
Args:
npy_array (npy array): to be converted to numpy array
Returns:
(np.array): converted numpy array.
|
def npy_to_numpy(npy_array): # type: (object) -> np.array
"""Convert an NPY array into numpy.
Args:
npy_array (npy array): to be converted to numpy array
Returns:
(np.array): converted numpy array.
"""
stream = BytesIO(npy_array)
return np.load(stream, allow_pickle=True)
|
Convert an array like object to JSON.
To understand better what an array like object is see:
https://docs.scipy.org/doc/numpy/user/basics.creation.html#converting-python-array-like-objects-to-numpy-arrays
Args:
array_like (np.array or Iterable or int or float): array like object to be converted to JSON.
Returns:
(str): object serialized to JSON
|
def array_to_json(array_like): # type: (np.array or Iterable or int or float) -> str
"""Convert an array like object to JSON.
To understand better what an array like object is see:
https://docs.scipy.org/doc/numpy/user/basics.creation.html#converting-python-array-like-objects-to-numpy-arrays
Args:
array_like (np.array or Iterable or int or float): array like object to be converted to JSON.
Returns:
(str): object serialized to JSON
"""
def default(_array_like):
if hasattr(_array_like, 'tolist'):
return _array_like.tolist()
return json.JSONEncoder().default(_array_like)
return json.dumps(array_like, default=default)
|
Convert a JSON object to a numpy array.
Args:
string_like (str): JSON string.
dtype (dtype, optional): Data type of the resulting array. If None, the dtypes will be determined by the
contents of each column, individually. This argument can only be used to
'upcast' the array. For downcasting, use the .astype(t) method.
Returns:
(np.array): numpy array
|
def json_to_numpy(string_like, dtype=None): # type: (str) -> np.array
"""Convert a JSON object to a numpy array.
Args:
string_like (str): JSON string.
dtype (dtype, optional): Data type of the resulting array. If None, the dtypes will be determined by the
contents of each column, individually. This argument can only be used to
'upcast' the array. For downcasting, use the .astype(t) method.
Returns:
(np.array): numpy array
"""
data = json.loads(string_like)
return np.array(data, dtype=dtype)
|
Convert an array like object to CSV.
To understand better what an array like object is see:
https://docs.scipy.org/doc/numpy/user/basics.creation.html#converting-python-array-like-objects-to-numpy-arrays
Args:
array_like (np.array or Iterable or int or float): array like object to be converted to CSV.
Returns:
(str): object serialized to CSV
|
def array_to_csv(array_like): # type: (np.array or Iterable or int or float) -> str
"""Convert an array like object to CSV.
To understand better what an array like object is see:
https://docs.scipy.org/doc/numpy/user/basics.creation.html#converting-python-array-like-objects-to-numpy-arrays
Args:
array_like (np.array or Iterable or int or float): array like object to be converted to CSV.
Returns:
(str): object serialized to CSV
"""
stream = StringIO()
np.savetxt(stream, array_like, delimiter=',', fmt='%s')
return stream.getvalue()
|
Decode an object ton a one of the default content types to a numpy array.
Args:
obj (object): to be decoded.
content_type (str): content type to be used.
Returns:
np.array: decoded object.
|
def decode(obj, content_type):
# type: (np.array or Iterable or int or float, str) -> np.array
"""Decode an object ton a one of the default content types to a numpy array.
Args:
obj (object): to be decoded.
content_type (str): content type to be used.
Returns:
np.array: decoded object.
"""
try:
decoder = _decoders_map[content_type]
return decoder(obj)
except KeyError:
raise _errors.UnsupportedFormatError(content_type)
|
Encode an array like object in a specific content_type to a numpy array.
To understand better what an array like object is see:
https://docs.scipy.org/doc/numpy/user/basics.creation.html#converting-python-array-like-objects-to-numpy-arrays
Args:
array_like (np.array or Iterable or int or float): to be converted to numpy.
content_type (str): content type to be used.
Returns:
(np.array): object converted as numpy array.
|
def encode(array_like, content_type):
# type: (np.array or Iterable or int or float, str) -> np.array
"""Encode an array like object in a specific content_type to a numpy array.
To understand better what an array like object is see:
https://docs.scipy.org/doc/numpy/user/basics.creation.html#converting-python-array-like-objects-to-numpy-arrays
Args:
array_like (np.array or Iterable or int or float): to be converted to numpy.
content_type (str): content type to be used.
Returns:
(np.array): object converted as numpy array.
"""
try:
encoder = _encoders_map[content_type]
return encoder(array_like)
except KeyError:
raise _errors.UnsupportedFormatError(content_type)
|
Create a file 'success' when training is successful. This file doesn't need to have any content.
See: https://docs.aws.amazon.com/sagemaker/latest/dg/your-algorithms-training-algo.html
|
def write_success_file(): # type: () -> None
"""Create a file 'success' when training is successful. This file doesn't need to have any content.
See: https://docs.aws.amazon.com/sagemaker/latest/dg/your-algorithms-training-algo.html
"""
file_path = os.path.join(_env.output_dir, 'success')
empty_content = ''
write_file(file_path, empty_content)
|
Create a file 'failure' if training fails after all algorithm output (for example, logging) completes,
the failure description should be written to this file. In a DescribeTrainingJob response, Amazon SageMaker
returns the first 1024 characters from this file as FailureReason.
See: https://docs.aws.amazon.com/sagemaker/latest/dg/your-algorithms-training-algo.html
Args:
failure_msg: The description of failure
|
def write_failure_file(failure_msg): # type: (str) -> None
"""Create a file 'failure' if training fails after all algorithm output (for example, logging) completes,
the failure description should be written to this file. In a DescribeTrainingJob response, Amazon SageMaker
returns the first 1024 characters from this file as FailureReason.
See: https://docs.aws.amazon.com/sagemaker/latest/dg/your-algorithms-training-algo.html
Args:
failure_msg: The description of failure
"""
file_path = os.path.join(_env.output_dir, 'failure')
write_file(file_path, failure_msg)
|
Create a temporary directory with a context manager. The file is deleted when the context exits.
The prefix, suffix, and dir arguments are the same as for mkstemp().
Args:
suffix (str): If suffix is specified, the file name will end with that suffix, otherwise there will be no
suffix.
prefix (str): If prefix is specified, the file name will begin with that prefix; otherwise,
a default prefix is used.
dir (str): If dir is specified, the file will be created in that directory; otherwise, a default directory is
used.
Returns:
str: path to the directory
|
def tmpdir(suffix='', prefix='tmp', dir=None): # type: (str, str, str) -> None
"""Create a temporary directory with a context manager. The file is deleted when the context exits.
The prefix, suffix, and dir arguments are the same as for mkstemp().
Args:
suffix (str): If suffix is specified, the file name will end with that suffix, otherwise there will be no
suffix.
prefix (str): If prefix is specified, the file name will begin with that prefix; otherwise,
a default prefix is used.
dir (str): If dir is specified, the file will be created in that directory; otherwise, a default directory is
used.
Returns:
str: path to the directory
"""
tmp = tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=dir)
yield tmp
shutil.rmtree(tmp)
|
Write data to a file.
Args:
path (str): path to the file.
data (str): data to be written to the file.
mode (str): mode which the file will be open.
|
def write_file(path, data, mode='w'): # type: (str, str, str) -> None
"""Write data to a file.
Args:
path (str): path to the file.
data (str): data to be written to the file.
mode (str): mode which the file will be open.
"""
with open(path, mode) as f:
f.write(data)
|
Download, prepare and install a compressed tar file from S3 or local directory as an entry point.
SageMaker Python SDK saves the user provided entry points as compressed tar files in S3
Args:
name (str): name of the entry point.
uri (str): the location of the entry point.
path (bool): The path where the script will be installed. It will not download and install the
if the path already has the user entry point.
|
def download_and_extract(uri, name, path): # type: (str, str, str) -> None
"""Download, prepare and install a compressed tar file from S3 or local directory as an entry point.
SageMaker Python SDK saves the user provided entry points as compressed tar files in S3
Args:
name (str): name of the entry point.
uri (str): the location of the entry point.
path (bool): The path where the script will be installed. It will not download and install the
if the path already has the user entry point.
"""
if not os.path.exists(path):
os.makedirs(path)
if not os.listdir(path):
with tmpdir() as tmp:
if uri.startswith('s3://'):
dst = os.path.join(tmp, 'tar_file')
s3_download(uri, dst)
with tarfile.open(name=dst, mode='r:gz') as t:
t.extractall(path=path)
elif os.path.isdir(uri):
if uri == path:
return
if os.path.exists(path):
shutil.rmtree(path)
shutil.move(uri, path)
else:
shutil.copy2(uri, os.path.join(path, name))
|
Download a file from S3.
Args:
url (str): the s3 url of the file.
dst (str): the destination where the file will be saved.
|
def s3_download(url, dst): # type: (str, str) -> None
"""Download a file from S3.
Args:
url (str): the s3 url of the file.
dst (str): the destination where the file will be saved.
"""
url = parse.urlparse(url)
if url.scheme != 's3':
raise ValueError("Expecting 's3' scheme, got: %s in %s" % (url.scheme, url))
bucket, key = url.netloc, url.path.lstrip('/')
region = os.environ.get('AWS_REGION', os.environ.get(_params.REGION_NAME_ENV))
s3 = boto3.resource('s3', region_name=region)
s3.Bucket(bucket).download_file(key, dst)
|
Given a function fn and a dict dictionary, returns the function arguments that match the dict keys.
Example:
def train(channel_dirs, model_dir): pass
dictionary = {'channel_dirs': {}, 'model_dir': '/opt/ml/model', 'other_args': None}
args = functions.matching_args(train, dictionary) # {'channel_dirs': {}, 'model_dir': '/opt/ml/model'}
train(**args)
Args:
fn (function): a function
dictionary (dict): the dictionary with the keys
Returns:
(dict) a dictionary with only matching arguments.
|
def matching_args(fn, dictionary): # type: (Callable, _mapping.Mapping) -> dict
"""Given a function fn and a dict dictionary, returns the function arguments that match the dict keys.
Example:
def train(channel_dirs, model_dir): pass
dictionary = {'channel_dirs': {}, 'model_dir': '/opt/ml/model', 'other_args': None}
args = functions.matching_args(train, dictionary) # {'channel_dirs': {}, 'model_dir': '/opt/ml/model'}
train(**args)
Args:
fn (function): a function
dictionary (dict): the dictionary with the keys
Returns:
(dict) a dictionary with only matching arguments.
"""
arg_spec = getargspec(fn)
if arg_spec.keywords:
return dictionary
return _mapping.split_by_criteria(dictionary, arg_spec.args).included
|
Get the names and default values of a function's arguments.
Args:
fn (function): a function
Returns:
`inspect.ArgSpec`: A collections.namedtuple with the following attributes:
* Args:
args (list): a list of the argument names (it may contain nested lists).
varargs (str): name of the * argument or None.
keywords (str): names of the ** argument or None.
defaults (tuple): an n-tuple of the default values of the last n arguments.
|
def getargspec(fn): # type: (Callable) -> inspect.ArgSpec
"""Get the names and default values of a function's arguments.
Args:
fn (function): a function
Returns:
`inspect.ArgSpec`: A collections.namedtuple with the following attributes:
* Args:
args (list): a list of the argument names (it may contain nested lists).
varargs (str): name of the * argument or None.
keywords (str): names of the ** argument or None.
defaults (tuple): an n-tuple of the default values of the last n arguments.
"""
if six.PY2:
return inspect.getargspec(fn)
elif six.PY3:
full_arg_spec = inspect.getfullargspec(fn)
return inspect.ArgSpec(full_arg_spec.args, full_arg_spec.varargs, full_arg_spec.varkw, full_arg_spec.defaults)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.