repository_name stringclasses 316 values | func_path_in_repository stringlengths 6 223 | func_name stringlengths 1 134 | language stringclasses 1 value | func_code_string stringlengths 57 65.5k | func_documentation_string stringlengths 1 46.3k | split_name stringclasses 1 value | func_code_url stringlengths 91 315 | called_functions listlengths 1 156 ⌀ | enclosing_scope stringlengths 2 1.48M |
|---|---|---|---|---|---|---|---|---|---|
vmlaker/coils | coils/MapSock.py | MapSockClient.send | python | def send(self, request):
self._logger.debug('Opening connection')
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.connect((self._host, self._port))
talk = SocketTalk(sock, encode=self._encode)
self._logger.debug('Sending action %s' % request.action)
if not talk.put(request.action):
self._logger.error('Failed to send action %s' % request.action)
return None
if request.key is not None:
self._logger.debug('Sending key %s' % request.key)
if not talk.put(request.key):
self._logger.error('Failed to send key %s' % request.key)
return None
if request.value is not None:
self._logger.debug('Sending value')
if not talk.put(request.value):
self._logger.error('Failed to send value')
return None
self._logger.debug('Receiving status')
response = talk.get()
if not response:
self._logger.error('Failed to receive status')
return None
self._logger.debug('Status response = %s' % response)
if request.action in ('get', 'size', 'keys') and response == 'ok':
self._logger.debug('Receiving value')
response = talk.get()
if not response:
self._logger.error('Failed to receive value')
return None
if request.action in ('keys',):
response = pickle.loads(response)
self._logger.debug('Closing connection')
try:
sock.shutdown(socket.SHUT_RDWR)
except:
self._logger.error('Failed to shutdown')
sock.close()
return response | Send request to server and return server response. | train | https://github.com/vmlaker/coils/blob/a3a613b3d661dec010e5879c86e62cbff2519dd0/coils/MapSock.py#L48-L99 | [
"def put(self, message):\n \"\"\"Send the given *message*.\n Return True if successful, False if not.\"\"\"\n\n # First assemble the complete message string by prefixing the header.\n header = self.HEADER_FORMAT.format(len(message))\n full_message = header + message\n\n # Then send all bytes of the message.\n sent_count = 0\n while sent_count < len(full_message):\n try:\n msg = full_message[sent_count:]\n msg = msg.encode() if self._encode else msg\n count = self._sock.send(msg)\n except socket.error as err:\n print('error')\n return False\n if count == 0:\n return False\n sent_count += count\n return True\n"
] | class MapSockClient:
"""
Client to the map server.
"""
def __init__(self, host, port, encode=True):
"""
Initialize the object.
*host* - socket host
*port* - socket port
"""
self._logger = logging.getLogger(__name__)
self._host = host
self._port = port
self._encode = encode
|
vmlaker/coils | coils/MapSock.py | MapSockServer._send | python | def _send(self, message):
result = self._talk.put(message)
if not result:
self._logger.error('Failed to send "%s"' % message)
return result | Return response message to client. | train | https://github.com/vmlaker/coils/blob/a3a613b3d661dec010e5879c86e62cbff2519dd0/coils/MapSock.py#L124-L131 | null | class MapSockServer:
def __init__(self, host, port, on_action=None, encode=True):
"""
Initialize the server object.
*host* - socket host
*port* - socket port
*on_action* - callback upon action reception,
is called with action string
"""
self._logger = logging.getLogger(__name__)
self._data = dict()
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self._sock.bind((host, port))
self._sock.listen(1)
self._talk = None
self._on_action = on_action
self._encode = encode
def _receive(self):
"""
Receive a chunk of request from client.
"""
result = self._talk.get()
if not result:
self._logger.error('Failed to receive')
return result
def run(self):
"""
Continuously retrieve client requests until given "stop" request.
"""
while True:
self._logger.debug('Accepting connection')
conn, addr = self._sock.accept()
self._talk = SocketTalk(conn, encode=self._encode)
self._logger.debug('Receiving action')
action = self._receive()
if self._on_action:
self._on_action(action)
key = None
if action in ('set', 'get', 'del',):
self._logger.debug('Receiving key')
key = self._receive()
value = None
if action in ('set',):
self._logger.debug('Receiving value')
value = self._receive()
# Process the request.
if action == 'stop':
self._send('ok')
elif action == 'set':
self._data[key] = value
self._send('ok')
elif action == 'get':
try:
value = self._data[key]
except:
self._logger.debug('Sending "key not found"')
self._send('key not found')
else:
self._logger.debug('Sending "ok"')
self._send('ok')
self._logger.debug('Sending value')
self._send(value)
elif action == 'del':
try:
del self._data[key]
except:
self._logger.debug('Sending "key not found"')
self._send('key not found')
else:
self._logger.debug('Sending "ok"')
self._send('ok')
elif action == 'size':
self._send('ok')
self._send(str(len(self._data)))
elif action == 'keys':
pickled = pickle.dumps(self._data.keys())
self._send('ok')
self._send(pickled)
else:
self._send('unknown action %s' % action)
self._logger.debug('Closing')
try:
conn.shutdown(socket.SHUT_RDWR)
except:
self._logger.error('Failed to shutdown')
conn.close()
if action == 'stop':
break
self._logger.debug('Stopped')
|
vmlaker/coils | coils/MapSock.py | MapSockServer._receive | python | def _receive(self):
result = self._talk.get()
if not result:
self._logger.error('Failed to receive')
return result | Receive a chunk of request from client. | train | https://github.com/vmlaker/coils/blob/a3a613b3d661dec010e5879c86e62cbff2519dd0/coils/MapSock.py#L133-L140 | null | class MapSockServer:
def __init__(self, host, port, on_action=None, encode=True):
"""
Initialize the server object.
*host* - socket host
*port* - socket port
*on_action* - callback upon action reception,
is called with action string
"""
self._logger = logging.getLogger(__name__)
self._data = dict()
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self._sock.bind((host, port))
self._sock.listen(1)
self._talk = None
self._on_action = on_action
self._encode = encode
def _send(self, message):
"""
Return response message to client.
"""
result = self._talk.put(message)
if not result:
self._logger.error('Failed to send "%s"' % message)
return result
def run(self):
"""
Continuously retrieve client requests until given "stop" request.
"""
while True:
self._logger.debug('Accepting connection')
conn, addr = self._sock.accept()
self._talk = SocketTalk(conn, encode=self._encode)
self._logger.debug('Receiving action')
action = self._receive()
if self._on_action:
self._on_action(action)
key = None
if action in ('set', 'get', 'del',):
self._logger.debug('Receiving key')
key = self._receive()
value = None
if action in ('set',):
self._logger.debug('Receiving value')
value = self._receive()
# Process the request.
if action == 'stop':
self._send('ok')
elif action == 'set':
self._data[key] = value
self._send('ok')
elif action == 'get':
try:
value = self._data[key]
except:
self._logger.debug('Sending "key not found"')
self._send('key not found')
else:
self._logger.debug('Sending "ok"')
self._send('ok')
self._logger.debug('Sending value')
self._send(value)
elif action == 'del':
try:
del self._data[key]
except:
self._logger.debug('Sending "key not found"')
self._send('key not found')
else:
self._logger.debug('Sending "ok"')
self._send('ok')
elif action == 'size':
self._send('ok')
self._send(str(len(self._data)))
elif action == 'keys':
pickled = pickle.dumps(self._data.keys())
self._send('ok')
self._send(pickled)
else:
self._send('unknown action %s' % action)
self._logger.debug('Closing')
try:
conn.shutdown(socket.SHUT_RDWR)
except:
self._logger.error('Failed to shutdown')
conn.close()
if action == 'stop':
break
self._logger.debug('Stopped')
|
vmlaker/coils | coils/MapSock.py | MapSockServer.run | python | def run(self):
while True:
self._logger.debug('Accepting connection')
conn, addr = self._sock.accept()
self._talk = SocketTalk(conn, encode=self._encode)
self._logger.debug('Receiving action')
action = self._receive()
if self._on_action:
self._on_action(action)
key = None
if action in ('set', 'get', 'del',):
self._logger.debug('Receiving key')
key = self._receive()
value = None
if action in ('set',):
self._logger.debug('Receiving value')
value = self._receive()
# Process the request.
if action == 'stop':
self._send('ok')
elif action == 'set':
self._data[key] = value
self._send('ok')
elif action == 'get':
try:
value = self._data[key]
except:
self._logger.debug('Sending "key not found"')
self._send('key not found')
else:
self._logger.debug('Sending "ok"')
self._send('ok')
self._logger.debug('Sending value')
self._send(value)
elif action == 'del':
try:
del self._data[key]
except:
self._logger.debug('Sending "key not found"')
self._send('key not found')
else:
self._logger.debug('Sending "ok"')
self._send('ok')
elif action == 'size':
self._send('ok')
self._send(str(len(self._data)))
elif action == 'keys':
pickled = pickle.dumps(self._data.keys())
self._send('ok')
self._send(pickled)
else:
self._send('unknown action %s' % action)
self._logger.debug('Closing')
try:
conn.shutdown(socket.SHUT_RDWR)
except:
self._logger.error('Failed to shutdown')
conn.close()
if action == 'stop':
break
self._logger.debug('Stopped') | Continuously retrieve client requests until given "stop" request. | train | https://github.com/vmlaker/coils/blob/a3a613b3d661dec010e5879c86e62cbff2519dd0/coils/MapSock.py#L142-L219 | [
"def _send(self, message):\n \"\"\"\n Return response message to client.\n \"\"\"\n result = self._talk.put(message)\n if not result:\n self._logger.error('Failed to send \"%s\"' % message)\n return result\n",
"def _receive(self):\n \"\"\"\n Receive a chunk of request from client.\n \"\"\"\n result = self._talk.get()\n if not result:\n self._logger.error('Failed to receive')\n return result\n"
] | class MapSockServer:
def __init__(self, host, port, on_action=None, encode=True):
"""
Initialize the server object.
*host* - socket host
*port* - socket port
*on_action* - callback upon action reception,
is called with action string
"""
self._logger = logging.getLogger(__name__)
self._data = dict()
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self._sock.bind((host, port))
self._sock.listen(1)
self._talk = None
self._on_action = on_action
self._encode = encode
def _send(self, message):
"""
Return response message to client.
"""
result = self._talk.put(message)
if not result:
self._logger.error('Failed to send "%s"' % message)
return result
def _receive(self):
"""
Receive a chunk of request from client.
"""
result = self._talk.get()
if not result:
self._logger.error('Failed to receive')
return result
|
vmlaker/coils | coils/Averager.py | Averager.add | python | def add(self, value):
self._data.append(value)
if len(self._data) > self._max_count:
self._data.popleft()
return sum(self._data)/len(self._data) | Add a value, and return current average. | train | https://github.com/vmlaker/coils/blob/a3a613b3d661dec010e5879c86e62cbff2519dd0/coils/Averager.py#L15-L20 | null | class Averager(object):
"""Keeps a running average with limited history."""
def __init__(self, max_count):
"""Initialize the averager with maximum number of
(latest) samples to keep."""
self._max_count = max_count if max_count > 1 else 1 # Minimum is 1.
self._data = collections.deque()
def __len__(self):
"""Length operator."""
return len(self._data)
|
vmlaker/coils | coils/SocketTalk.py | SocketTalk.server | python | def server(addr):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(addr)
sock.listen(1)
conn, addr = sock.accept()
talk = SocketTalk(conn)
return talk | Return a SocketTalk server. | train | https://github.com/vmlaker/coils/blob/a3a613b3d661dec010e5879c86e62cbff2519dd0/coils/SocketTalk.py#L27-L35 | null | class SocketTalk:
"""A simple layer of socket communication, implementing
send/receive messaging protocol where each (variable length)
message is prefixed with a (fixed length) header containing
the length of the remaining message data string.
Ideas for protocol and byte-handling are borrowed from:
http://docs.python.org/howto/sockets.html.
"""
# Define the header of every message string handed to the socket.
HEADER_LENGTH = 16
HEADER_FORMAT = '{{:{0}d}}'.format(HEADER_LENGTH)
@staticmethod
def pair():
"""Return a pair of connected SocketTalk peers."""
s1, s2 = socket.socketpair()
return SocketTalk(s1), SocketTalk(s2)
@staticmethod
@staticmethod
def client(addr):
"""Return a SocketTalk client."""
success = False
while not success:
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.connect(addr)
success = True
except socket.error as err:
sock.close()
talk = SocketTalk(sock)
return talk
def __init__(self, sock, encode=True):
"""Initialize the object with a socket."""
self._sock = sock
self._encode = encode
def put(self, message):
"""Send the given *message*.
Return True if successful, False if not."""
# First assemble the complete message string by prefixing the header.
header = self.HEADER_FORMAT.format(len(message))
full_message = header + message
# Then send all bytes of the message.
sent_count = 0
while sent_count < len(full_message):
try:
msg = full_message[sent_count:]
msg = msg.encode() if self._encode else msg
count = self._sock.send(msg)
except socket.error as err:
print('error')
return False
if count == 0:
return False
sent_count += count
return True
def get(self):
"""Receive a message.
Return the message upon successful reception, or None upon failure."""
# First retrieve all header bytes in order to extract
# length of the message remainder.
header = ''
while len(header) < self.HEADER_LENGTH:
chunk = self._sock.recv(self.HEADER_LENGTH - len(header))
chunk = chunk.decode() if self._encode else chunk
if chunk == '':
return None
header += chunk
length = int(header)
# Then retrieve the remainder of the message.
message = ''
while len(message) < length:
chunk = self._sock.recv(length - len(message))
chunk = chunk.decode() if self._encode else chunk
if chunk == '':
return None
message += chunk
return message
def close(self):
self._sock.close()
|
vmlaker/coils | coils/SocketTalk.py | SocketTalk.client | python | def client(addr):
success = False
while not success:
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.connect(addr)
success = True
except socket.error as err:
sock.close()
talk = SocketTalk(sock)
return talk | Return a SocketTalk client. | train | https://github.com/vmlaker/coils/blob/a3a613b3d661dec010e5879c86e62cbff2519dd0/coils/SocketTalk.py#L38-L50 | null | class SocketTalk:
"""A simple layer of socket communication, implementing
send/receive messaging protocol where each (variable length)
message is prefixed with a (fixed length) header containing
the length of the remaining message data string.
Ideas for protocol and byte-handling are borrowed from:
http://docs.python.org/howto/sockets.html.
"""
# Define the header of every message string handed to the socket.
HEADER_LENGTH = 16
HEADER_FORMAT = '{{:{0}d}}'.format(HEADER_LENGTH)
@staticmethod
def pair():
"""Return a pair of connected SocketTalk peers."""
s1, s2 = socket.socketpair()
return SocketTalk(s1), SocketTalk(s2)
@staticmethod
def server(addr):
"""Return a SocketTalk server."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(addr)
sock.listen(1)
conn, addr = sock.accept()
talk = SocketTalk(conn)
return talk
@staticmethod
def __init__(self, sock, encode=True):
"""Initialize the object with a socket."""
self._sock = sock
self._encode = encode
def put(self, message):
"""Send the given *message*.
Return True if successful, False if not."""
# First assemble the complete message string by prefixing the header.
header = self.HEADER_FORMAT.format(len(message))
full_message = header + message
# Then send all bytes of the message.
sent_count = 0
while sent_count < len(full_message):
try:
msg = full_message[sent_count:]
msg = msg.encode() if self._encode else msg
count = self._sock.send(msg)
except socket.error as err:
print('error')
return False
if count == 0:
return False
sent_count += count
return True
def get(self):
"""Receive a message.
Return the message upon successful reception, or None upon failure."""
# First retrieve all header bytes in order to extract
# length of the message remainder.
header = ''
while len(header) < self.HEADER_LENGTH:
chunk = self._sock.recv(self.HEADER_LENGTH - len(header))
chunk = chunk.decode() if self._encode else chunk
if chunk == '':
return None
header += chunk
length = int(header)
# Then retrieve the remainder of the message.
message = ''
while len(message) < length:
chunk = self._sock.recv(length - len(message))
chunk = chunk.decode() if self._encode else chunk
if chunk == '':
return None
message += chunk
return message
def close(self):
self._sock.close()
|
vmlaker/coils | coils/SocketTalk.py | SocketTalk.put | python | def put(self, message):
# First assemble the complete message string by prefixing the header.
header = self.HEADER_FORMAT.format(len(message))
full_message = header + message
# Then send all bytes of the message.
sent_count = 0
while sent_count < len(full_message):
try:
msg = full_message[sent_count:]
msg = msg.encode() if self._encode else msg
count = self._sock.send(msg)
except socket.error as err:
print('error')
return False
if count == 0:
return False
sent_count += count
return True | Send the given *message*.
Return True if successful, False if not. | train | https://github.com/vmlaker/coils/blob/a3a613b3d661dec010e5879c86e62cbff2519dd0/coils/SocketTalk.py#L57-L78 | null | class SocketTalk:
"""A simple layer of socket communication, implementing
send/receive messaging protocol where each (variable length)
message is prefixed with a (fixed length) header containing
the length of the remaining message data string.
Ideas for protocol and byte-handling are borrowed from:
http://docs.python.org/howto/sockets.html.
"""
# Define the header of every message string handed to the socket.
HEADER_LENGTH = 16
HEADER_FORMAT = '{{:{0}d}}'.format(HEADER_LENGTH)
@staticmethod
def pair():
"""Return a pair of connected SocketTalk peers."""
s1, s2 = socket.socketpair()
return SocketTalk(s1), SocketTalk(s2)
@staticmethod
def server(addr):
"""Return a SocketTalk server."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(addr)
sock.listen(1)
conn, addr = sock.accept()
talk = SocketTalk(conn)
return talk
@staticmethod
def client(addr):
"""Return a SocketTalk client."""
success = False
while not success:
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.connect(addr)
success = True
except socket.error as err:
sock.close()
talk = SocketTalk(sock)
return talk
def __init__(self, sock, encode=True):
"""Initialize the object with a socket."""
self._sock = sock
self._encode = encode
def get(self):
"""Receive a message.
Return the message upon successful reception, or None upon failure."""
# First retrieve all header bytes in order to extract
# length of the message remainder.
header = ''
while len(header) < self.HEADER_LENGTH:
chunk = self._sock.recv(self.HEADER_LENGTH - len(header))
chunk = chunk.decode() if self._encode else chunk
if chunk == '':
return None
header += chunk
length = int(header)
# Then retrieve the remainder of the message.
message = ''
while len(message) < length:
chunk = self._sock.recv(length - len(message))
chunk = chunk.decode() if self._encode else chunk
if chunk == '':
return None
message += chunk
return message
def close(self):
self._sock.close()
|
vmlaker/coils | coils/SocketTalk.py | SocketTalk.get | python | def get(self):
# First retrieve all header bytes in order to extract
# length of the message remainder.
header = ''
while len(header) < self.HEADER_LENGTH:
chunk = self._sock.recv(self.HEADER_LENGTH - len(header))
chunk = chunk.decode() if self._encode else chunk
if chunk == '':
return None
header += chunk
length = int(header)
# Then retrieve the remainder of the message.
message = ''
while len(message) < length:
chunk = self._sock.recv(length - len(message))
chunk = chunk.decode() if self._encode else chunk
if chunk == '':
return None
message += chunk
return message | Receive a message.
Return the message upon successful reception, or None upon failure. | train | https://github.com/vmlaker/coils/blob/a3a613b3d661dec010e5879c86e62cbff2519dd0/coils/SocketTalk.py#L80-L103 | null | class SocketTalk:
"""A simple layer of socket communication, implementing
send/receive messaging protocol where each (variable length)
message is prefixed with a (fixed length) header containing
the length of the remaining message data string.
Ideas for protocol and byte-handling are borrowed from:
http://docs.python.org/howto/sockets.html.
"""
# Define the header of every message string handed to the socket.
HEADER_LENGTH = 16
HEADER_FORMAT = '{{:{0}d}}'.format(HEADER_LENGTH)
@staticmethod
def pair():
"""Return a pair of connected SocketTalk peers."""
s1, s2 = socket.socketpair()
return SocketTalk(s1), SocketTalk(s2)
@staticmethod
def server(addr):
"""Return a SocketTalk server."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(addr)
sock.listen(1)
conn, addr = sock.accept()
talk = SocketTalk(conn)
return talk
@staticmethod
def client(addr):
"""Return a SocketTalk client."""
success = False
while not success:
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.connect(addr)
success = True
except socket.error as err:
sock.close()
talk = SocketTalk(sock)
return talk
def __init__(self, sock, encode=True):
"""Initialize the object with a socket."""
self._sock = sock
self._encode = encode
def put(self, message):
"""Send the given *message*.
Return True if successful, False if not."""
# First assemble the complete message string by prefixing the header.
header = self.HEADER_FORMAT.format(len(message))
full_message = header + message
# Then send all bytes of the message.
sent_count = 0
while sent_count < len(full_message):
try:
msg = full_message[sent_count:]
msg = msg.encode() if self._encode else msg
count = self._sock.send(msg)
except socket.error as err:
print('error')
return False
if count == 0:
return False
sent_count += count
return True
def close(self):
self._sock.close()
|
vmlaker/coils | coils/Ring.py | Ring.turn | python | def turn(self):
first = self._data.pop(0)
self._data.append(first) | Turn the ring for a single position.
For example, [a, b, c, d] becomes [b, c, d, a]. | train | https://github.com/vmlaker/coils/blob/a3a613b3d661dec010e5879c86e62cbff2519dd0/coils/Ring.py#L26-L30 | null | class Ring:
"""Circular data structure implemented as a list."""
def __init__(self, donor):
"""Initialize the ring with a donor list."""
if not donor:
raise 'Ring must have at least one element.'
self._data = donor
def __repr__(self):
return repr(self._data)
def __len__(self):
return len(self._data)
def __getitem__(self, index):
return self._data[index]
def first(self):
"""Return the first entry."""
return self._data[0]
def last(self):
"""Return the last entry."""
return self._data[-1]
|
vmlaker/coils | coils/Timer.py | Timer.get | python | def get(self):
elapsed = datetime.now() - self._previous
self._previous += elapsed
return elapsed.total_seconds() | Return the number of seconds elapsed since object creation,
or since last call to this function, whichever is more recent. | train | https://github.com/vmlaker/coils/blob/a3a613b3d661dec010e5879c86e62cbff2519dd0/coils/Timer.py#L18-L23 | null | class Timer:
"""Can be used like a timer or a stopwatch to measure time duration
between clicks."""
def __init__(self):
"""Initialize the object, marking the current time."""
self._started = self._previous = datetime.now()
def getTotal(self):
"""Return the number of seconds elapsed since object creation."""
return (datetime.now() - self._started).total_seconds()
|
vmlaker/coils | coils/String.py | string2time | python | def string2time(text):
length = len(TIME_FORMAT)
result = None
while length:
try:
result = datetime.strptime(text, TIME_FORMAT[:length])
except:
length -= 1
else:
break
return result | Return :class:`datetime.datetime` object from given string,
or ``None`` if failed to translate. | train | https://github.com/vmlaker/coils/blob/a3a613b3d661dec010e5879c86e62cbff2519dd0/coils/String.py#L10-L22 | null | """Useful string conversions and such."""
from os.path import join
from datetime import datetime
MICRO = '.%f'
TIME_FORMAT = '%Y-%m-%d %H:%M:%S' + MICRO
def time2string(tstamp, micro=True):
"""Given a :class:`datetime.datetime` object,
return a formatted time string."""
tformat = TIME_FORMAT if micro else TIME_FORMAT[:-len(MICRO)]
return tstamp.strftime(tformat)
def time2levels(tstamp):
"""Given a :class:`datetime.datetime` object,
return a list of directory levels (as strings).
For example, given "2013-09-08 13:01:44",
return ['2013', '09', '08', '13', '01']
"""
return [tstamp.strftime(xx) for xx in ('%Y', '%m', '%d', '%H', '%M')]
def time2dir(tstamp):
"""Given a :class:`datetime.datetime` object,
return a path assembled with :func:`os.path.join`
for the levels."""
result = ''
for field in time2levels(tstamp):
result = join(result, field)
return result
FILENAME_FORMAT = '__%Y-%m-%d__%H:%M:%S:%f__'
def time2fname(tstamp, full=False):
"""Return full path to filename prefix (i.e. without dot extension)
represented by given :class:`datetime.datetime` object."""
result = tstamp.strftime(FILENAME_FORMAT)
result = result if not full else join(time2dir(tstamp), result)
return result
|
vmlaker/coils | coils/String.py | time2string | python | def time2string(tstamp, micro=True):
tformat = TIME_FORMAT if micro else TIME_FORMAT[:-len(MICRO)]
return tstamp.strftime(tformat) | Given a :class:`datetime.datetime` object,
return a formatted time string. | train | https://github.com/vmlaker/coils/blob/a3a613b3d661dec010e5879c86e62cbff2519dd0/coils/String.py#L25-L29 | null | """Useful string conversions and such."""
from os.path import join
from datetime import datetime
MICRO = '.%f'
TIME_FORMAT = '%Y-%m-%d %H:%M:%S' + MICRO
def string2time(text):
"""Return :class:`datetime.datetime` object from given string,
or ``None`` if failed to translate."""
length = len(TIME_FORMAT)
result = None
while length:
try:
result = datetime.strptime(text, TIME_FORMAT[:length])
except:
length -= 1
else:
break
return result
def time2levels(tstamp):
"""Given a :class:`datetime.datetime` object,
return a list of directory levels (as strings).
For example, given "2013-09-08 13:01:44",
return ['2013', '09', '08', '13', '01']
"""
return [tstamp.strftime(xx) for xx in ('%Y', '%m', '%d', '%H', '%M')]
def time2dir(tstamp):
"""Given a :class:`datetime.datetime` object,
return a path assembled with :func:`os.path.join`
for the levels."""
result = ''
for field in time2levels(tstamp):
result = join(result, field)
return result
FILENAME_FORMAT = '__%Y-%m-%d__%H:%M:%S:%f__'
def time2fname(tstamp, full=False):
"""Return full path to filename prefix (i.e. without dot extension)
represented by given :class:`datetime.datetime` object."""
result = tstamp.strftime(FILENAME_FORMAT)
result = result if not full else join(time2dir(tstamp), result)
return result
|
vmlaker/coils | coils/String.py | time2dir | python | def time2dir(tstamp):
result = ''
for field in time2levels(tstamp):
result = join(result, field)
return result | Given a :class:`datetime.datetime` object,
return a path assembled with :func:`os.path.join`
for the levels. | train | https://github.com/vmlaker/coils/blob/a3a613b3d661dec010e5879c86e62cbff2519dd0/coils/String.py#L42-L49 | [
"def time2levels(tstamp):\n \"\"\"Given a :class:`datetime.datetime` object,\n return a list of directory levels (as strings).\n\n For example, given \"2013-09-08 13:01:44\",\n return ['2013', '09', '08', '13', '01']\n \"\"\"\n return [tstamp.strftime(xx) for xx in ('%Y', '%m', '%d', '%H', '%M')]\n"
] | """Useful string conversions and such."""
from os.path import join
from datetime import datetime
MICRO = '.%f'
TIME_FORMAT = '%Y-%m-%d %H:%M:%S' + MICRO
def string2time(text):
"""Return :class:`datetime.datetime` object from given string,
or ``None`` if failed to translate."""
length = len(TIME_FORMAT)
result = None
while length:
try:
result = datetime.strptime(text, TIME_FORMAT[:length])
except:
length -= 1
else:
break
return result
def time2string(tstamp, micro=True):
"""Given a :class:`datetime.datetime` object,
return a formatted time string."""
tformat = TIME_FORMAT if micro else TIME_FORMAT[:-len(MICRO)]
return tstamp.strftime(tformat)
def time2levels(tstamp):
"""Given a :class:`datetime.datetime` object,
return a list of directory levels (as strings).
For example, given "2013-09-08 13:01:44",
return ['2013', '09', '08', '13', '01']
"""
return [tstamp.strftime(xx) for xx in ('%Y', '%m', '%d', '%H', '%M')]
FILENAME_FORMAT = '__%Y-%m-%d__%H:%M:%S:%f__'
def time2fname(tstamp, full=False):
"""Return full path to filename prefix (i.e. without dot extension)
represented by given :class:`datetime.datetime` object."""
result = tstamp.strftime(FILENAME_FORMAT)
result = result if not full else join(time2dir(tstamp), result)
return result
|
vmlaker/coils | coils/String.py | time2fname | python | def time2fname(tstamp, full=False):
result = tstamp.strftime(FILENAME_FORMAT)
result = result if not full else join(time2dir(tstamp), result)
return result | Return full path to filename prefix (i.e. without dot extension)
represented by given :class:`datetime.datetime` object. | train | https://github.com/vmlaker/coils/blob/a3a613b3d661dec010e5879c86e62cbff2519dd0/coils/String.py#L54-L59 | [
"def time2dir(tstamp):\n \"\"\"Given a :class:`datetime.datetime` object,\n return a path assembled with :func:`os.path.join`\n for the levels.\"\"\"\n result = ''\n for field in time2levels(tstamp):\n result = join(result, field)\n return result\n"
] | """Useful string conversions and such."""
from os.path import join
from datetime import datetime
MICRO = '.%f'
TIME_FORMAT = '%Y-%m-%d %H:%M:%S' + MICRO
def string2time(text):
"""Return :class:`datetime.datetime` object from given string,
or ``None`` if failed to translate."""
length = len(TIME_FORMAT)
result = None
while length:
try:
result = datetime.strptime(text, TIME_FORMAT[:length])
except:
length -= 1
else:
break
return result
def time2string(tstamp, micro=True):
"""Given a :class:`datetime.datetime` object,
return a formatted time string."""
tformat = TIME_FORMAT if micro else TIME_FORMAT[:-len(MICRO)]
return tstamp.strftime(tformat)
def time2levels(tstamp):
"""Given a :class:`datetime.datetime` object,
return a list of directory levels (as strings).
For example, given "2013-09-08 13:01:44",
return ['2013', '09', '08', '13', '01']
"""
return [tstamp.strftime(xx) for xx in ('%Y', '%m', '%d', '%H', '%M')]
def time2dir(tstamp):
"""Given a :class:`datetime.datetime` object,
return a path assembled with :func:`os.path.join`
for the levels."""
result = ''
for field in time2levels(tstamp):
result = join(result, field)
return result
FILENAME_FORMAT = '__%Y-%m-%d__%H:%M:%S:%f__'
|
mass-project/mass_api_client | mass_api_client/resources/report.py | Report.create | python | def create(cls, scheduled_analysis, tags=None, json_report_objects=None, raw_report_objects=None, additional_metadata=None, analysis_date=None):
if tags is None:
tags = []
if additional_metadata is None:
additional_metadata = {}
if analysis_date is None:
analysis_date = datetime.datetime.now()
url = cls._creation_point.format(scheduled_analysis=scheduled_analysis.id)
return cls._create(url=url, analysis_date=analysis_date, additional_json_files=json_report_objects,
additional_binary_files=raw_report_objects, tags=tags,
additional_metadata=additional_metadata, force_multipart=True) | Create a new report.
For convenience :func:`~mass_api_client.resources.scheduled_analysis.ScheduledAnalysis.create_report`
of class :class:`.ScheduledAnalysis` can be used instead.
:param scheduled_analysis: The :class:`.ScheduledAnalysis` this report was created for
:param tags: A list of strings
:param json_report_objects: A dictionary of JSON reports, where the key is the object name.
:param raw_report_objects: A dictionary of binary file reports, where the key is the file name.
:param analysis_date: A datetime object of the time the report was generated. Defaults to current time.
:return: The newly created report object | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/report.py#L29-L55 | [
"def _create(cls, additional_json_files=None, additional_binary_files=None, url=None, force_multipart=False, **kwargs):\n con = ConnectionManager().get_connection(cls._connection_alias)\n if not url:\n url = '{}/'.format(cls._creation_point)\n serialized, errors = cls.schema.dump(kwargs)\n\n if additional_binary_files or additional_json_files or force_multipart:\n response_data = con.post_multipart(url, serialized, json_files=additional_json_files, binary_files=additional_binary_files)\n else:\n response_data = con.post_json(url, serialized)\n\n deserialized = cls._deserialize(response_data)\n\n return cls._create_instance_from_data(deserialized)\n"
] | class Report(BaseResource):
REPORT_STATUS_CODE_OK = 0
REPORT_STATUS_CODE_FAILURE = 1
REPORT_STATUS_CODES = [REPORT_STATUS_CODE_OK, REPORT_STATUS_CODE_FAILURE]
schema = ReportSchema()
_endpoint = 'report'
_creation_point = 'scheduled_analysis/{scheduled_analysis}/submit_report/'
def __init__(self, connection_alias, **kwargs):
super(Report, self).__init__(connection_alias, **kwargs)
self._json_reports_cache = None
def __repr__(self):
return '[Report] {} on {}'.format(self.sample, self.analysis_system)
def __str__(self):
return self.__repr__()
@classmethod
@property
def json_reports(self):
if self._json_reports_cache:
return self._json_reports_cache
self._json_reports_cache = {}
for key in self.json_report_objects.keys():
self._json_reports_cache[key] = self.get_json_report_object(key)
return self._json_reports_cache
def get_json_report_object(self, key):
"""
Retrieve a JSON report object of the report.
:param key: The key of the report object
:return: The deserialized JSON report object.
"""
con = ConnectionManager().get_connection(self._connection_alias)
return con.get_json(self.json_report_objects[key], append_base_url=False)
def download_raw_report_object_to_file(self, key, file):
"""
Download a raw report object and store it in a file.
:param key: The key of the report object
:param file: A file-like object to store the report object.
"""
con = ConnectionManager().get_connection(self._connection_alias)
return con.download_to_file(self.raw_report_objects[key], file, append_base_url=False)
|
mass-project/mass_api_client | mass_api_client/resources/report.py | Report.get_json_report_object | python | def get_json_report_object(self, key):
con = ConnectionManager().get_connection(self._connection_alias)
return con.get_json(self.json_report_objects[key], append_base_url=False) | Retrieve a JSON report object of the report.
:param key: The key of the report object
:return: The deserialized JSON report object. | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/report.py#L67-L75 | [
"def get_connection(self, alias):\n if alias not in self._connections:\n raise RuntimeError(\"Connection '{}' is not defined. \"\n \"Use ConnectionManager().register_connection(...) to do so.\".format(alias))\n\n return self._connections[alias]\n"
] | class Report(BaseResource):
REPORT_STATUS_CODE_OK = 0
REPORT_STATUS_CODE_FAILURE = 1
REPORT_STATUS_CODES = [REPORT_STATUS_CODE_OK, REPORT_STATUS_CODE_FAILURE]
schema = ReportSchema()
_endpoint = 'report'
_creation_point = 'scheduled_analysis/{scheduled_analysis}/submit_report/'
def __init__(self, connection_alias, **kwargs):
super(Report, self).__init__(connection_alias, **kwargs)
self._json_reports_cache = None
def __repr__(self):
return '[Report] {} on {}'.format(self.sample, self.analysis_system)
def __str__(self):
return self.__repr__()
@classmethod
def create(cls, scheduled_analysis, tags=None, json_report_objects=None, raw_report_objects=None, additional_metadata=None, analysis_date=None):
"""
Create a new report.
For convenience :func:`~mass_api_client.resources.scheduled_analysis.ScheduledAnalysis.create_report`
of class :class:`.ScheduledAnalysis` can be used instead.
:param scheduled_analysis: The :class:`.ScheduledAnalysis` this report was created for
:param tags: A list of strings
:param json_report_objects: A dictionary of JSON reports, where the key is the object name.
:param raw_report_objects: A dictionary of binary file reports, where the key is the file name.
:param analysis_date: A datetime object of the time the report was generated. Defaults to current time.
:return: The newly created report object
"""
if tags is None:
tags = []
if additional_metadata is None:
additional_metadata = {}
if analysis_date is None:
analysis_date = datetime.datetime.now()
url = cls._creation_point.format(scheduled_analysis=scheduled_analysis.id)
return cls._create(url=url, analysis_date=analysis_date, additional_json_files=json_report_objects,
additional_binary_files=raw_report_objects, tags=tags,
additional_metadata=additional_metadata, force_multipart=True)
@property
def json_reports(self):
if self._json_reports_cache:
return self._json_reports_cache
self._json_reports_cache = {}
for key in self.json_report_objects.keys():
self._json_reports_cache[key] = self.get_json_report_object(key)
return self._json_reports_cache
def download_raw_report_object_to_file(self, key, file):
"""
Download a raw report object and store it in a file.
:param key: The key of the report object
:param file: A file-like object to store the report object.
"""
con = ConnectionManager().get_connection(self._connection_alias)
return con.download_to_file(self.raw_report_objects[key], file, append_base_url=False)
|
mass-project/mass_api_client | mass_api_client/resources/report.py | Report.download_raw_report_object_to_file | python | def download_raw_report_object_to_file(self, key, file):
con = ConnectionManager().get_connection(self._connection_alias)
return con.download_to_file(self.raw_report_objects[key], file, append_base_url=False) | Download a raw report object and store it in a file.
:param key: The key of the report object
:param file: A file-like object to store the report object. | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/report.py#L77-L85 | [
"def get_connection(self, alias):\n if alias not in self._connections:\n raise RuntimeError(\"Connection '{}' is not defined. \"\n \"Use ConnectionManager().register_connection(...) to do so.\".format(alias))\n\n return self._connections[alias]\n"
] | class Report(BaseResource):
REPORT_STATUS_CODE_OK = 0
REPORT_STATUS_CODE_FAILURE = 1
REPORT_STATUS_CODES = [REPORT_STATUS_CODE_OK, REPORT_STATUS_CODE_FAILURE]
schema = ReportSchema()
_endpoint = 'report'
_creation_point = 'scheduled_analysis/{scheduled_analysis}/submit_report/'
def __init__(self, connection_alias, **kwargs):
super(Report, self).__init__(connection_alias, **kwargs)
self._json_reports_cache = None
def __repr__(self):
return '[Report] {} on {}'.format(self.sample, self.analysis_system)
def __str__(self):
return self.__repr__()
@classmethod
def create(cls, scheduled_analysis, tags=None, json_report_objects=None, raw_report_objects=None, additional_metadata=None, analysis_date=None):
"""
Create a new report.
For convenience :func:`~mass_api_client.resources.scheduled_analysis.ScheduledAnalysis.create_report`
of class :class:`.ScheduledAnalysis` can be used instead.
:param scheduled_analysis: The :class:`.ScheduledAnalysis` this report was created for
:param tags: A list of strings
:param json_report_objects: A dictionary of JSON reports, where the key is the object name.
:param raw_report_objects: A dictionary of binary file reports, where the key is the file name.
:param analysis_date: A datetime object of the time the report was generated. Defaults to current time.
:return: The newly created report object
"""
if tags is None:
tags = []
if additional_metadata is None:
additional_metadata = {}
if analysis_date is None:
analysis_date = datetime.datetime.now()
url = cls._creation_point.format(scheduled_analysis=scheduled_analysis.id)
return cls._create(url=url, analysis_date=analysis_date, additional_json_files=json_report_objects,
additional_binary_files=raw_report_objects, tags=tags,
additional_metadata=additional_metadata, force_multipart=True)
@property
def json_reports(self):
if self._json_reports_cache:
return self._json_reports_cache
self._json_reports_cache = {}
for key in self.json_report_objects.keys():
self._json_reports_cache[key] = self.get_json_report_object(key)
return self._json_reports_cache
def get_json_report_object(self, key):
"""
Retrieve a JSON report object of the report.
:param key: The key of the report object
:return: The deserialized JSON report object.
"""
con = ConnectionManager().get_connection(self._connection_alias)
return con.get_json(self.json_report_objects[key], append_base_url=False)
|
mass-project/mass_api_client | mass_api_client/resources/base.py | BaseResource.query | python | def query(cls, **kwargs):
params = dict(cls._default_filters)
for key, value in kwargs.items():
if key in cls._filter_parameters:
if isinstance(value, datetime):
params[key] = value.strftime('%Y-%m-%dT%H:%M:%S+00:00')
else:
params[key] = value
else:
raise ValueError('\'{}\' is not a filter parameter for class \'{}\''.format(key, cls.__name__))
return cls._get_iter_from_url('{}/'.format(cls._endpoint), params=params) | Query multiple objects.
:param kwargs: The query parameters. The key is the filter parameter and the value is the value to search for.
:return: The list of matching objects
:raises: A `ValueError` if at least one of the supplied parameters is not in the list of allowed parameters. | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/base.py#L117-L136 | [
"def _get_iter_from_url(cls, url, params=None, append_base_url=True):\n if params is None:\n params = {}\n\n con = ConnectionManager().get_connection(cls._connection_alias)\n next_url = url\n\n while next_url is not None:\n res = con.get_json(next_url, params=params, append_base_url=append_base_url)\n deserialized = cls._deserialize(res['results'], many=True)\n for data in deserialized:\n yield cls._create_instance_from_data(data)\n try:\n next_url = res['next']\n except KeyError:\n raise StopIteration\n append_base_url = False\n"
] | class BaseResource:
schema = None
_endpoint = None
_creation_point = None
_filter_parameters = []
_default_filters = {}
_connection_alias = 'default'
def __init__(self, connection_alias, **kwargs):
# Store current connection, in case the connection gets switched later on.
self._connection_alias = connection_alias
self.__dict__.update(kwargs)
@classmethod
@property
def schema(cls):
return Ref('schema').resolve(cls)
@classmethod
def _deserialize(cls, data, many=False):
deserialized, errors = cls.schema.load(data, many=many)
if errors:
raise ValueError('An error occurred during object deserialization: {}'.format(errors))
return deserialized
@classmethod
def _create_instance_from_data(cls, data):
return cls(cls._connection_alias, **data)
@classmethod
def _get_detail_from_url(cls, url, append_base_url=True):
con = ConnectionManager().get_connection(cls._connection_alias)
deserialized = cls._deserialize(con.get_json(url, append_base_url=append_base_url))
return cls._create_instance_from_data(deserialized)
@classmethod
def _get_iter_from_url(cls, url, params=None, append_base_url=True):
if params is None:
params = {}
con = ConnectionManager().get_connection(cls._connection_alias)
next_url = url
while next_url is not None:
res = con.get_json(next_url, params=params, append_base_url=append_base_url)
deserialized = cls._deserialize(res['results'], many=True)
for data in deserialized:
yield cls._create_instance_from_data(data)
try:
next_url = res['next']
except KeyError:
raise StopIteration
append_base_url = False
@classmethod
def _get_list_from_url(cls, url, params=None, append_base_url=True):
if params is None:
params = {}
con = ConnectionManager().get_connection(cls._connection_alias)
deserialized = cls._deserialize(con.get_json(url, params=params, append_base_url=append_base_url)['results'], many=True)
objects = [cls._create_instance_from_data(detail) for detail in deserialized]
return objects
@classmethod
def _create(cls, additional_json_files=None, additional_binary_files=None, url=None, force_multipart=False, **kwargs):
con = ConnectionManager().get_connection(cls._connection_alias)
if not url:
url = '{}/'.format(cls._creation_point)
serialized, errors = cls.schema.dump(kwargs)
if additional_binary_files or additional_json_files or force_multipart:
response_data = con.post_multipart(url, serialized, json_files=additional_json_files, binary_files=additional_binary_files)
else:
response_data = con.post_json(url, serialized)
deserialized = cls._deserialize(response_data)
return cls._create_instance_from_data(deserialized)
@classmethod
def get(cls, identifier):
"""
Fetch a single object.
:param identifier: The unique identifier of the object
:return: The retrieved object
"""
return cls._get_detail_from_url('{}/{}/'.format(cls._endpoint, identifier))
@classmethod
def items(cls):
return cls._get_iter_from_url('{}/'.format(cls._endpoint), params=cls._default_filters)
@classmethod
def all(cls):
return cls._get_list_from_url('{}/'.format(cls._endpoint), params=cls._default_filters)
@classmethod
def _to_json(self):
serialized, errors = self.schema.dump(self)
if errors:
raise ValueError('An error occurred during object serialization: {}'.format(errors))
return serialized
|
mass-project/mass_api_client | mass_api_client/resources/analysis_request.py | AnalysisRequest.create | python | def create(cls, sample, analysis_system):
return cls._create(sample=sample.url, analysis_system=analysis_system.url) | Create a new :class:`.AnalysisRequest` on the server.
:param sample: A `Sample` object
:param analysis_system: The :class:`AnalysisSystem` that should be used for the analysis.
:return: The created :class:`AnalysisRequest` object. | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/analysis_request.py#L11-L19 | [
"def _create(cls, additional_json_files=None, additional_binary_files=None, url=None, force_multipart=False, **kwargs):\n con = ConnectionManager().get_connection(cls._connection_alias)\n if not url:\n url = '{}/'.format(cls._creation_point)\n serialized, errors = cls.schema.dump(kwargs)\n\n if additional_binary_files or additional_json_files or force_multipart:\n response_data = con.post_multipart(url, serialized, json_files=additional_json_files, binary_files=additional_binary_files)\n else:\n response_data = con.post_json(url, serialized)\n\n deserialized = cls._deserialize(response_data)\n\n return cls._create_instance_from_data(deserialized)\n"
] | class AnalysisRequest(BaseResource):
schema = AnalysisRequestSchema()
_endpoint = 'analysis_request'
_creation_point = _endpoint
@classmethod
|
mass-project/mass_api_client | mass_api_client/connection_manager.py | ConnectionManager.register_connection | python | def register_connection(self, alias, api_key, base_url, timeout=5):
if not base_url.endswith('/'):
base_url += '/'
self._connections[alias] = Connection(api_key, base_url, timeout) | Create and register a new connection.
:param alias: The alias of the connection. If not changed with `switch_connection`,
the connection with default 'alias' is used by the resources.
:param api_key: The private api key.
:param base_url: The api url including protocol, host, port (optional) and location.
:param timeout: The time in seconds to wait for 'connect' and 'read' respectively.
Use a tuple to set these values separately or None to wait forever.
:return: | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/connection_manager.py#L94-L109 | null | class ConnectionManager:
_connections = {}
def get_connection(self, alias):
if alias not in self._connections:
raise RuntimeError("Connection '{}' is not defined. "
"Use ConnectionManager().register_connection(...) to do so.".format(alias))
return self._connections[alias]
|
mass-project/mass_api_client | mass_api_client/resources/analysis_system.py | AnalysisSystem.create | python | def create(cls, identifier_name, verbose_name, tag_filter_expression=''):
return cls._create(identifier_name=identifier_name, verbose_name=verbose_name, tag_filter_expression=tag_filter_expression) | Create a new :class:`AnalysisSystem` on the server.
:param identifier_name: Unique identifier string.
:param verbose_name: A descriptive name of the AnalysisSystem.
:param tag_filter_expression: Tag filters to automatically select samples for this AnalysisSystem.
:return: The created :class:`AnalysisSystem` object. | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/analysis_system.py#L12-L21 | [
"def _create(cls, additional_json_files=None, additional_binary_files=None, url=None, force_multipart=False, **kwargs):\n con = ConnectionManager().get_connection(cls._connection_alias)\n if not url:\n url = '{}/'.format(cls._creation_point)\n serialized, errors = cls.schema.dump(kwargs)\n\n if additional_binary_files or additional_json_files or force_multipart:\n response_data = con.post_multipart(url, serialized, json_files=additional_json_files, binary_files=additional_binary_files)\n else:\n response_data = con.post_json(url, serialized)\n\n deserialized = cls._deserialize(response_data)\n\n return cls._create_instance_from_data(deserialized)\n"
] | class AnalysisSystem(BaseResource):
schema = AnalysisSystemSchema()
_endpoint = 'analysis_system'
_creation_point = _endpoint
@classmethod
def create_analysis_system_instance(self):
"""
Create an instance of this AnalysisSystem on the server.
:return: The created :class:`AnalysisSystemInstance` object.
"""
return AnalysisSystemInstance.create(analysis_system=self)
def __repr__(self):
return '[AnalysisSystem] {}'.format(self.identifier_name)
def __str__(self):
return self.__repr__()
|
mass-project/mass_api_client | mass_api_client/utils.py | get_or_create_analysis_system_instance | python | def get_or_create_analysis_system_instance(instance_uuid='', identifier='', verbose_name='', tag_filter_exp='', uuid_file='uuid.txt'):
if instance_uuid:
return resources.AnalysisSystemInstance.get(instance_uuid)
try:
with open(uuid_file, 'r') as uuid_fp:
instance_uuid = uuid_fp.read().strip()
return resources.AnalysisSystemInstance.get(instance_uuid)
except IOError:
logging.debug('UUID file does not exist.')
try:
analysis_system = resources.AnalysisSystem.get(identifier)
except requests.HTTPError:
analysis_system = resources.AnalysisSystem.create(identifier, verbose_name, tag_filter_exp)
analysis_system_instance = analysis_system.create_analysis_system_instance()
with open(uuid_file, 'w') as uuid_fp:
uuid_fp.write(analysis_system_instance.uuid)
return analysis_system_instance | Get or create an analysis system instance for the analysis system with the respective identifier.
This is a function for solving a common problem with implementations of MASS analysis clients.
If the analysis system instance already exists, one has either a uuid or a file with the uuid as content.
In this case one can retrieve the analysis system instance with the uuid.
Otherwise one wants to create an instance for the analysis system with the given identifier.
If the analysis system does not yet exists, it is also created.
Then an analysis system instance for the analysis system is created and the uuid is saved to the uuid_file.
:param instance_uuid: If not empty, directly gets the analysis system instance with the given uuid and tries nothing else.
:param uuid_file: A filepath. If not empty, tries to read an uuid from the filepath. Otherwise the uuid is later saved to this file.
:param identifier: Get an instance for an analysis system with the given identifier as string.
:param verbose_name: The verbose name of the respective analysis system.
:param tag_filter_exp: The tag filter expression as a string of the respective analysis system.
:return: a analysis system instance | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/utils.py#L37-L72 | [
"def create(cls, identifier_name, verbose_name, tag_filter_expression=''):\n \"\"\"\n Create a new :class:`AnalysisSystem` on the server.\n\n :param identifier_name: Unique identifier string.\n :param verbose_name: A descriptive name of the AnalysisSystem.\n :param tag_filter_expression: Tag filters to automatically select samples for this AnalysisSystem.\n :return: The created :class:`AnalysisSystem` object.\n \"\"\"\n return cls._create(identifier_name=identifier_name, verbose_name=verbose_name, tag_filter_expression=tag_filter_expression)\n",
"def get(cls, identifier):\n \"\"\"\n Fetch a single object.\n\n :param identifier: The unique identifier of the object\n :return: The retrieved object\n \"\"\"\n return cls._get_detail_from_url('{}/{}/'.format(cls._endpoint, identifier))\n"
] | """Utility functions for writing an analysis client.
Example
-------
import os
from mass_api_client import ConnectionManager
from mass_api_client.utils import process_analyses, get_or_create_analysis_system_instance
def size_analysis(scheduled_analysis):
sample = scheduled_analysis.get_sample()
with sample.temporary_file() as f:
sample_file_size = os.path.getsize(f.name)
size_report = {'sample_file_size': sample_file_size}
scheduled_analysis.create_report(
json_report_objects={'size_report': ('size_report', size_report)},
)
if __name__ == "__main__":
ConnectionManager().register_connection('default', 'your api key', 'mass server url')
analysis_system_instance = get_or_create_analysis_system_instance(identifier='size',
verbose_name= 'Size Analysis Client',
tag_filter_exp='sample-type:filesample',
)
process_analyses(analysis_system_instance, size_analysis, sleep_time=7)
"""
import requests
from mass_api_client import resources
import logging
import time
logging.getLogger(__name__).addHandler(logging.NullHandler())
def get_or_create_analysis_system_instance(instance_uuid='', identifier='', verbose_name='', tag_filter_exp='', uuid_file='uuid.txt'):
"""Get or create an analysis system instance for the analysis system with the respective identifier.
This is a function for solving a common problem with implementations of MASS analysis clients.
If the analysis system instance already exists, one has either a uuid or a file with the uuid as content.
In this case one can retrieve the analysis system instance with the uuid.
Otherwise one wants to create an instance for the analysis system with the given identifier.
If the analysis system does not yet exists, it is also created.
Then an analysis system instance for the analysis system is created and the uuid is saved to the uuid_file.
:param instance_uuid: If not empty, directly gets the analysis system instance with the given uuid and tries nothing else.
:param uuid_file: A filepath. If not empty, tries to read an uuid from the filepath. Otherwise the uuid is later saved to this file.
:param identifier: Get an instance for an analysis system with the given identifier as string.
:param verbose_name: The verbose name of the respective analysis system.
:param tag_filter_exp: The tag filter expression as a string of the respective analysis system.
:return: a analysis system instance
"""
if instance_uuid:
return resources.AnalysisSystemInstance.get(instance_uuid)
try:
with open(uuid_file, 'r') as uuid_fp:
instance_uuid = uuid_fp.read().strip()
return resources.AnalysisSystemInstance.get(instance_uuid)
except IOError:
logging.debug('UUID file does not exist.')
try:
analysis_system = resources.AnalysisSystem.get(identifier)
except requests.HTTPError:
analysis_system = resources.AnalysisSystem.create(identifier, verbose_name, tag_filter_exp)
analysis_system_instance = analysis_system.create_analysis_system_instance()
with open(uuid_file, 'w') as uuid_fp:
uuid_fp.write(analysis_system_instance.uuid)
return analysis_system_instance
def process_analyses(analysis_system_instance, analysis_method, sleep_time):
"""Process all analyses which are scheduled for the analysis system instance.
This function does not terminate on its own, give it a SIGINT or Ctrl+C to stop.
:param analysis_system_instance: The analysis system instance for which the analyses are scheduled.
:param analysis_method: A function or method which analyses a scheduled analysis. The function must not take further arguments.
:param sleep_time: Time to wait between polls to the MASS server
"""
try:
while True:
for analysis_request in analysis_system_instance.get_scheduled_analyses():
analysis_method(analysis_request)
time.sleep(sleep_time)
except KeyboardInterrupt:
logging.debug('Shutting down.')
return
|
mass-project/mass_api_client | mass_api_client/utils.py | process_analyses | python | def process_analyses(analysis_system_instance, analysis_method, sleep_time):
try:
while True:
for analysis_request in analysis_system_instance.get_scheduled_analyses():
analysis_method(analysis_request)
time.sleep(sleep_time)
except KeyboardInterrupt:
logging.debug('Shutting down.')
return | Process all analyses which are scheduled for the analysis system instance.
This function does not terminate on its own, give it a SIGINT or Ctrl+C to stop.
:param analysis_system_instance: The analysis system instance for which the analyses are scheduled.
:param analysis_method: A function or method which analyses a scheduled analysis. The function must not take further arguments.
:param sleep_time: Time to wait between polls to the MASS server | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/utils.py#L75-L91 | null | """Utility functions for writing an analysis client.
Example
-------
import os
from mass_api_client import ConnectionManager
from mass_api_client.utils import process_analyses, get_or_create_analysis_system_instance
def size_analysis(scheduled_analysis):
sample = scheduled_analysis.get_sample()
with sample.temporary_file() as f:
sample_file_size = os.path.getsize(f.name)
size_report = {'sample_file_size': sample_file_size}
scheduled_analysis.create_report(
json_report_objects={'size_report': ('size_report', size_report)},
)
if __name__ == "__main__":
ConnectionManager().register_connection('default', 'your api key', 'mass server url')
analysis_system_instance = get_or_create_analysis_system_instance(identifier='size',
verbose_name= 'Size Analysis Client',
tag_filter_exp='sample-type:filesample',
)
process_analyses(analysis_system_instance, size_analysis, sleep_time=7)
"""
import requests
from mass_api_client import resources
import logging
import time
logging.getLogger(__name__).addHandler(logging.NullHandler())
def get_or_create_analysis_system_instance(instance_uuid='', identifier='', verbose_name='', tag_filter_exp='', uuid_file='uuid.txt'):
"""Get or create an analysis system instance for the analysis system with the respective identifier.
This is a function for solving a common problem with implementations of MASS analysis clients.
If the analysis system instance already exists, one has either a uuid or a file with the uuid as content.
In this case one can retrieve the analysis system instance with the uuid.
Otherwise one wants to create an instance for the analysis system with the given identifier.
If the analysis system does not yet exists, it is also created.
Then an analysis system instance for the analysis system is created and the uuid is saved to the uuid_file.
:param instance_uuid: If not empty, directly gets the analysis system instance with the given uuid and tries nothing else.
:param uuid_file: A filepath. If not empty, tries to read an uuid from the filepath. Otherwise the uuid is later saved to this file.
:param identifier: Get an instance for an analysis system with the given identifier as string.
:param verbose_name: The verbose name of the respective analysis system.
:param tag_filter_exp: The tag filter expression as a string of the respective analysis system.
:return: a analysis system instance
"""
if instance_uuid:
return resources.AnalysisSystemInstance.get(instance_uuid)
try:
with open(uuid_file, 'r') as uuid_fp:
instance_uuid = uuid_fp.read().strip()
return resources.AnalysisSystemInstance.get(instance_uuid)
except IOError:
logging.debug('UUID file does not exist.')
try:
analysis_system = resources.AnalysisSystem.get(identifier)
except requests.HTTPError:
analysis_system = resources.AnalysisSystem.create(identifier, verbose_name, tag_filter_exp)
analysis_system_instance = analysis_system.create_analysis_system_instance()
with open(uuid_file, 'w') as uuid_fp:
uuid_fp.write(analysis_system_instance.uuid)
return analysis_system_instance
|
mass-project/mass_api_client | mass_api_client/resources/scheduled_analysis.py | ScheduledAnalysis.create | python | def create(cls, analysis_system_instance, sample):
return cls._create(analysis_system_instance=analysis_system_instance.url, sample=sample.url) | Create a new :class:`ScheduledAnalysis` on the server.
For convenience
:func:`~mass_api_client.resources.analysis_system_instance.AnalysisSystemInstance.schedule_analysis`
of class :class:`.AnalysisSystemInstance` can be used instead.
:param analysis_system_instance: The :class:`.AnalysisSystemInstance` for which the sample should be scheduled.
:param sample: The class:`.Sample` object to be scheduled.
:return: The created :class:`ScheduledAnalysis` object. | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/scheduled_analysis.py#L13-L25 | [
"def _create(cls, additional_json_files=None, additional_binary_files=None, url=None, force_multipart=False, **kwargs):\n con = ConnectionManager().get_connection(cls._connection_alias)\n if not url:\n url = '{}/'.format(cls._creation_point)\n serialized, errors = cls.schema.dump(kwargs)\n\n if additional_binary_files or additional_json_files or force_multipart:\n response_data = con.post_multipart(url, serialized, json_files=additional_json_files, binary_files=additional_binary_files)\n else:\n response_data = con.post_json(url, serialized)\n\n deserialized = cls._deserialize(response_data)\n\n return cls._create_instance_from_data(deserialized)\n"
] | class ScheduledAnalysis(BaseResource):
schema = ScheduledAnalysisSchema()
_endpoint = 'scheduled_analysis'
_creation_point = _endpoint
@classmethod
def create_report(self, additional_metadata=None, json_report_objects=None, raw_report_objects=None, tags=None, analysis_date=None):
"""
Create a :class:`.Report` and remove the :class:`ScheduledAnalysis` from the server.
:param additional_metadata: A dictionary of additional metadata.
:param json_report_objects: A dictionary of JSON reports, where the key is the object name.
:param raw_report_objects: A dictionary of binary file reports, where the key is the file name.
:param tags: A list of strings.
:param analysis_date: :py:mod:`datetime` object of the time the report was generated. Defaults to current time.
:return: The created :class:`.Report` object.
"""
return Report.create(self, json_report_objects=json_report_objects, raw_report_objects=raw_report_objects, additional_metadata=additional_metadata, tags=tags, analysis_date=analysis_date)
def get_sample(self):
"""
Retrieve the scheduled :class:`.Sample`.
:return: The corresponding :class:`.Sample` object.
"""
sample_url = self.sample
sample = Sample._get_detail_from_url(sample_url, append_base_url=False)
return sample
|
mass-project/mass_api_client | mass_api_client/resources/scheduled_analysis.py | ScheduledAnalysis.create_report | python | def create_report(self, additional_metadata=None, json_report_objects=None, raw_report_objects=None, tags=None, analysis_date=None):
return Report.create(self, json_report_objects=json_report_objects, raw_report_objects=raw_report_objects, additional_metadata=additional_metadata, tags=tags, analysis_date=analysis_date) | Create a :class:`.Report` and remove the :class:`ScheduledAnalysis` from the server.
:param additional_metadata: A dictionary of additional metadata.
:param json_report_objects: A dictionary of JSON reports, where the key is the object name.
:param raw_report_objects: A dictionary of binary file reports, where the key is the file name.
:param tags: A list of strings.
:param analysis_date: :py:mod:`datetime` object of the time the report was generated. Defaults to current time.
:return: The created :class:`.Report` object. | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/scheduled_analysis.py#L27-L38 | [
"def create(cls, scheduled_analysis, tags=None, json_report_objects=None, raw_report_objects=None, additional_metadata=None, analysis_date=None):\n \"\"\"\n Create a new report.\n\n For convenience :func:`~mass_api_client.resources.scheduled_analysis.ScheduledAnalysis.create_report`\n of class :class:`.ScheduledAnalysis` can be used instead.\n\n :param scheduled_analysis: The :class:`.ScheduledAnalysis` this report was created for\n :param tags: A list of strings\n :param json_report_objects: A dictionary of JSON reports, where the key is the object name.\n :param raw_report_objects: A dictionary of binary file reports, where the key is the file name.\n :param analysis_date: A datetime object of the time the report was generated. Defaults to current time.\n :return: The newly created report object\n \"\"\"\n if tags is None:\n tags = []\n\n if additional_metadata is None:\n additional_metadata = {}\n\n if analysis_date is None:\n analysis_date = datetime.datetime.now()\n\n url = cls._creation_point.format(scheduled_analysis=scheduled_analysis.id)\n return cls._create(url=url, analysis_date=analysis_date, additional_json_files=json_report_objects,\n additional_binary_files=raw_report_objects, tags=tags,\n additional_metadata=additional_metadata, force_multipart=True)\n"
] | class ScheduledAnalysis(BaseResource):
schema = ScheduledAnalysisSchema()
_endpoint = 'scheduled_analysis'
_creation_point = _endpoint
@classmethod
def create(cls, analysis_system_instance, sample):
"""
Create a new :class:`ScheduledAnalysis` on the server.
For convenience
:func:`~mass_api_client.resources.analysis_system_instance.AnalysisSystemInstance.schedule_analysis`
of class :class:`.AnalysisSystemInstance` can be used instead.
:param analysis_system_instance: The :class:`.AnalysisSystemInstance` for which the sample should be scheduled.
:param sample: The class:`.Sample` object to be scheduled.
:return: The created :class:`ScheduledAnalysis` object.
"""
return cls._create(analysis_system_instance=analysis_system_instance.url, sample=sample.url)
def get_sample(self):
"""
Retrieve the scheduled :class:`.Sample`.
:return: The corresponding :class:`.Sample` object.
"""
sample_url = self.sample
sample = Sample._get_detail_from_url(sample_url, append_base_url=False)
return sample
|
mass-project/mass_api_client | mass_api_client/resources/scheduled_analysis.py | ScheduledAnalysis.get_sample | python | def get_sample(self):
sample_url = self.sample
sample = Sample._get_detail_from_url(sample_url, append_base_url=False)
return sample | Retrieve the scheduled :class:`.Sample`.
:return: The corresponding :class:`.Sample` object. | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/scheduled_analysis.py#L40-L48 | [
"def _get_detail_from_url(cls, url, append_base_url=True):\n con = ConnectionManager().get_connection(cls._connection_alias)\n\n deserialized = cls._deserialize(con.get_json(url, append_base_url=append_base_url))\n return cls._create_instance_from_data(deserialized)\n"
] | class ScheduledAnalysis(BaseResource):
schema = ScheduledAnalysisSchema()
_endpoint = 'scheduled_analysis'
_creation_point = _endpoint
@classmethod
def create(cls, analysis_system_instance, sample):
"""
Create a new :class:`ScheduledAnalysis` on the server.
For convenience
:func:`~mass_api_client.resources.analysis_system_instance.AnalysisSystemInstance.schedule_analysis`
of class :class:`.AnalysisSystemInstance` can be used instead.
:param analysis_system_instance: The :class:`.AnalysisSystemInstance` for which the sample should be scheduled.
:param sample: The class:`.Sample` object to be scheduled.
:return: The created :class:`ScheduledAnalysis` object.
"""
return cls._create(analysis_system_instance=analysis_system_instance.url, sample=sample.url)
def create_report(self, additional_metadata=None, json_report_objects=None, raw_report_objects=None, tags=None, analysis_date=None):
"""
Create a :class:`.Report` and remove the :class:`ScheduledAnalysis` from the server.
:param additional_metadata: A dictionary of additional metadata.
:param json_report_objects: A dictionary of JSON reports, where the key is the object name.
:param raw_report_objects: A dictionary of binary file reports, where the key is the file name.
:param tags: A list of strings.
:param analysis_date: :py:mod:`datetime` object of the time the report was generated. Defaults to current time.
:return: The created :class:`.Report` object.
"""
return Report.create(self, json_report_objects=json_report_objects, raw_report_objects=raw_report_objects, additional_metadata=additional_metadata, tags=tags, analysis_date=analysis_date)
|
mass-project/mass_api_client | mass_api_client/resources/analysis_system_instance.py | AnalysisSystemInstance.get_scheduled_analyses | python | def get_scheduled_analyses(self):
url = '{}scheduled_analyses/'.format(self.url)
return ScheduledAnalysis._get_list_from_url(url, append_base_url=False) | Retrieve all scheduled analyses for this instance.
:return: A list of :class:`.ScheduledAnalysis` objects. | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/analysis_system_instance.py#L34-L41 | [
"def _get_list_from_url(cls, url, params=None, append_base_url=True):\n if params is None:\n params = {}\n\n con = ConnectionManager().get_connection(cls._connection_alias)\n deserialized = cls._deserialize(con.get_json(url, params=params, append_base_url=append_base_url)['results'], many=True)\n objects = [cls._create_instance_from_data(detail) for detail in deserialized]\n\n return objects\n"
] | class AnalysisSystemInstance(BaseResource):
schema = AnalysisSystemInstanceSchema()
_endpoint = 'analysis_system_instance'
_creation_point = _endpoint
@classmethod
def create(cls, analysis_system):
"""
Create a new :class:`AnalysisSystemInstance` on the server.
For convenience
:func:`~mass_api_client.resources.analysis_system.AnalysisSystem.create_analysis_system_instance`
of class :class:`.AnalysisSystem` can be used instead.
:param analysis_system: The corresponding :class:`.AnalysisSystem` object
:return: The created :class:`AnalysisSystemInstance` object
"""
return cls._create(analysis_system=analysis_system.url)
def schedule_analysis(self, sample):
"""
Schedule the given sample for this instance on the server.
:param sample: The sample object to be scheduled.
:return: The created :class:`.ScheduledAnalysis` object.
"""
return ScheduledAnalysis.create(self, sample)
def __repr__(self):
return '[AnalysisSystemInstance] {}'.format(self.uuid)
def __str__(self):
return self.__repr__()
|
mass-project/mass_api_client | mass_api_client/resources/sample.py | Sample.get_reports | python | def get_reports(self):
url = '{}reports/'.format(self.url)
return Report._get_list_from_url(url, append_base_url=False) | Retrieve all reports submitted for this Sample.
:return: A list of :class:`.Report` | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/sample.py#L22-L29 | [
"def _get_list_from_url(cls, url, params=None, append_base_url=True):\n if params is None:\n params = {}\n\n con = ConnectionManager().get_connection(cls._connection_alias)\n deserialized = cls._deserialize(con.get_json(url, params=params, append_base_url=append_base_url)['results'], many=True)\n objects = [cls._create_instance_from_data(detail) for detail in deserialized]\n\n return objects\n"
] | class Sample(BaseWithSubclasses):
_endpoint = 'sample'
_class_identifier = 'Sample'
_filter_parameters = [
'delivery_date__lte',
'delivery_date__gte',
'first_seen__lte',
'first_seen__gte',
'tags__all'
]
def get_relation_graph(self, depth=None):
"""
Get all `SampleRelation`s in the relation graph of the sample.
:param depth: max depth of the returned graph. None retrieves the complete graph.
:return: An iterator over the relations
"""
url = '{}relation_graph/'.format(self.url)
if depth is not None:
params = {'depth': depth}
else:
params = {}
from .sample_relation import SampleRelation
return SampleRelation._get_iter_from_url(url, params=params, append_base_url=False)
def __repr__(self):
return '[{}] {}'.format(str(self.__class__.__name__), str(self.id))
def __str__(self):
return self.__repr__()
|
mass-project/mass_api_client | mass_api_client/resources/sample.py | Sample.get_relation_graph | python | def get_relation_graph(self, depth=None):
url = '{}relation_graph/'.format(self.url)
if depth is not None:
params = {'depth': depth}
else:
params = {}
from .sample_relation import SampleRelation
return SampleRelation._get_iter_from_url(url, params=params, append_base_url=False) | Get all `SampleRelation`s in the relation graph of the sample.
:param depth: max depth of the returned graph. None retrieves the complete graph.
:return: An iterator over the relations | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/sample.py#L31-L45 | [
"def _get_iter_from_url(cls, url, params=None, append_base_url=True):\n if params is None:\n params = {}\n\n con = ConnectionManager().get_connection(cls._connection_alias)\n next_url = url\n\n while next_url is not None:\n res = con.get_json(next_url, params=params, append_base_url=append_base_url)\n deserialized = cls._deserialize(res['results'], many=True)\n for data in deserialized:\n yield cls._create_instance_from_data(data)\n try:\n next_url = res['next']\n except KeyError:\n raise StopIteration\n append_base_url = False\n"
] | class Sample(BaseWithSubclasses):
_endpoint = 'sample'
_class_identifier = 'Sample'
_filter_parameters = [
'delivery_date__lte',
'delivery_date__gte',
'first_seen__lte',
'first_seen__gte',
'tags__all'
]
def get_reports(self):
"""
Retrieve all reports submitted for this Sample.
:return: A list of :class:`.Report`
"""
url = '{}reports/'.format(self.url)
return Report._get_list_from_url(url, append_base_url=False)
def __repr__(self):
return '[{}] {}'.format(str(self.__class__.__name__), str(self.id))
def __str__(self):
return self.__repr__()
|
mass-project/mass_api_client | mass_api_client/resources/sample.py | DomainSample.create | python | def create(cls, domain, tlp_level=0, tags=[]):
return cls._create(domain=domain, tlp_level=tlp_level, tags=tags) | Create a new :class:`DomainSample` on the server.
:param domain: The domain as a string.
:param tlp_level: The TLP-Level
:param tags: Tags to add to the sample.
:return: The created sample. | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/sample.py#L68-L77 | null | class DomainSample(Sample):
schema = DomainSampleSchema()
_class_identifier = 'Sample.DomainSample'
_creation_point = 'sample/submit_domain'
_default_filters = {'_cls': _class_identifier}
_filter_parameters = Sample._filter_parameters + [
'domain',
'domain__contains',
'domain__startswith',
'domain__endswith'
]
@classmethod
|
mass-project/mass_api_client | mass_api_client/resources/sample.py | URISample.create | python | def create(cls, uri, tlp_level=0, tags=[]):
return cls._create(uri=uri, tlp_level=tlp_level, tags=tags) | Create a new :class:`URISample` on the server.
:param uri: The uri as a string.
:param tlp_level: The TLP-Level
:param tags: Tags to add to the sample.
:return: The created sample. | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/sample.py#L94-L103 | null | class URISample(Sample):
schema = URISampleSchema()
_class_identifier = 'Sample.URISample'
_creation_point = 'sample/submit_uri'
_default_filters = {'_cls': _class_identifier}
_filter_parameters = Sample._filter_parameters + [
'uri',
'uri__contains',
'uri__startswith',
'uri__endswith'
]
@classmethod
|
mass-project/mass_api_client | mass_api_client/resources/sample.py | IPSample.create | python | def create(cls, ip_address, tlp_level=0, tags=[]):
return cls._create(ip_address=ip_address, tlp_level=tlp_level, tags=tags) | Create a new :class:`IPSample` on the server.
:param ip_address: The ip address as a string
:param tlp_level: The TLP-Level
:param tags: Tags to add to the sample.
:return: The created sample. | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/sample.py#L118-L127 | null | class IPSample(Sample):
schema = IPSampleSchema()
_class_identifier = 'Sample.IPSample'
_creation_point = 'sample/submit_ip'
_default_filters = {'_cls': _class_identifier}
_filter_parameters = Sample._filter_parameters + [
'ip_address',
'ip_address__startswith'
]
@classmethod
|
mass-project/mass_api_client | mass_api_client/resources/sample.py | FileSample.create | python | def create(cls, filename, file, tlp_level=0, tags=[]):
return cls._create(additional_binary_files={'file': (filename, file)}, tlp_level=tlp_level, tags=tags) | Create a new :class:`FileSample` on the server.
:param filename: The filename of the file
:param file: A file-like object
:param tlp_level: The TLP-Level
:param tags: Tags to add to the sample.
:return: The created sample. | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/sample.py#L150-L160 | null | class FileSample(Sample):
schema = FileSampleSchema()
_class_identifier = 'Sample.FileSample'
_creation_point = 'sample/submit_file'
_default_filters = {'_cls__startswith': _class_identifier}
_filter_parameters = Sample._filter_parameters + [
'md5sum',
'sha1sum',
'sha256sum',
'sha512sum',
'mime_type',
'file_names',
'file_size__lte',
'file_size__gte',
'shannon_entropy__lte',
'shannon_entropy__gte'
]
@classmethod
def download_to_file(self, file):
"""
Download and store the file of the sample.
:param file: A file-like object to store the file.
"""
con = ConnectionManager().get_connection(self._connection_alias)
return con.download_to_file(self.file, file, append_base_url=False)
@contextmanager
def temporary_file(self):
"""
Contextmanager to get a temporary copy of the file of the sample.
The file will automatically be closed and removed after use.
:return: A file-like object.
"""
with tempfile.NamedTemporaryFile() as tmp:
self.download_to_file(tmp)
yield tmp
|
mass-project/mass_api_client | mass_api_client/resources/sample.py | FileSample.download_to_file | python | def download_to_file(self, file):
con = ConnectionManager().get_connection(self._connection_alias)
return con.download_to_file(self.file, file, append_base_url=False) | Download and store the file of the sample.
:param file: A file-like object to store the file. | train | https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/sample.py#L162-L169 | [
"def get_connection(self, alias):\n if alias not in self._connections:\n raise RuntimeError(\"Connection '{}' is not defined. \"\n \"Use ConnectionManager().register_connection(...) to do so.\".format(alias))\n\n return self._connections[alias]\n"
] | class FileSample(Sample):
schema = FileSampleSchema()
_class_identifier = 'Sample.FileSample'
_creation_point = 'sample/submit_file'
_default_filters = {'_cls__startswith': _class_identifier}
_filter_parameters = Sample._filter_parameters + [
'md5sum',
'sha1sum',
'sha256sum',
'sha512sum',
'mime_type',
'file_names',
'file_size__lte',
'file_size__gte',
'shannon_entropy__lte',
'shannon_entropy__gte'
]
@classmethod
def create(cls, filename, file, tlp_level=0, tags=[]):
"""
Create a new :class:`FileSample` on the server.
:param filename: The filename of the file
:param file: A file-like object
:param tlp_level: The TLP-Level
:param tags: Tags to add to the sample.
:return: The created sample.
"""
return cls._create(additional_binary_files={'file': (filename, file)}, tlp_level=tlp_level, tags=tags)
@contextmanager
def temporary_file(self):
"""
Contextmanager to get a temporary copy of the file of the sample.
The file will automatically be closed and removed after use.
:return: A file-like object.
"""
with tempfile.NamedTemporaryFile() as tmp:
self.download_to_file(tmp)
yield tmp
|
gpoulter/fablib | fablib.py | default_roles | python | def default_roles(*role_list):
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach | Decorate task with these roles by default, but override with -R, -H | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L19-L32 | null | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | chown | python | def chown(dirs, user=None, group=None):
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None | User sudo to set user and group ownership | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L39-L51 | null | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | chput | python | def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result | Put file and set user and group ownership. Default to use sudo. | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L54-L64 | [
"def diff(local_path, remote_path):\n \"\"\"Return true if local and remote paths differ in contents\"\"\"\n with hide('commands'):\n if isinstance(local_path, basestring):\n with open(local_path) as stream:\n local_content = stream.read()\n else:\n pos = local_path.tell()\n local_content = local_path.read()\n local_path.seek(pos)\n remote_content = StringIO()\n with settings(hide('warnings'), warn_only=True):\n if get(remote_path, remote_content).failed:\n return True\n return local_content.strip() != remote_content.getvalue().strip()\n",
"def chown(dirs, user=None, group=None):\n \"\"\"User sudo to set user and group ownership\"\"\"\n if isinstance(dirs, basestring):\n dirs = [dirs]\n args = ' '.join(dirs)\n if user and group:\n return sudo('chown {}:{} {}'.format(user, group, args))\n elif user:\n return sudo('chown {} {}'.format(user, args))\n elif group:\n return sudo('chgrp {} {}'.format(group, args))\n else:\n return None\n"
] | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | cron | python | def cron(name, timespec, user, command, environ=None, disable=False):
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root') | Create entry in /etc/cron.d | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L67-L79 | [
"def chput(local_path=None, remote_path=None, user=None, group=None,\n mode=None, use_sudo=True, mirror_local_mode=False, check=True):\n \"\"\"Put file and set user and group ownership. Default to use sudo.\"\"\"\n # pylint: disable=R0913\n result = None\n if env.get('full') or not check or diff(local_path, remote_path):\n result = put(local_path, remote_path, use_sudo,\n mirror_local_mode, mode)\n with hide('commands'):\n chown(remote_path, user, group)\n return result\n"
] | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | diff | python | def diff(local_path, remote_path):
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip() | Return true if local and remote paths differ in contents | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L82-L96 | null | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | md5sum | python | def md5sum(filename, use_sudo=False):
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0] | Return md5sum of remote file | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L105-L109 | null | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | mkdir | python | def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result | Create directory with sudo and octal mode, then set ownership. | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L112-L123 | [
"def chown(dirs, user=None, group=None):\n \"\"\"User sudo to set user and group ownership\"\"\"\n if isinstance(dirs, basestring):\n dirs = [dirs]\n args = ' '.join(dirs)\n if user and group:\n return sudo('chown {}:{} {}'.format(user, group, args))\n elif user:\n return sudo('chown {} {}'.format(user, args))\n elif group:\n return sudo('chgrp {} {}'.format(group, args))\n else:\n return None\n"
] | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | rsync | python | def rsync(local_path, remote_path, exclude=None, extra_opts=None):
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude) | Helper to rsync submodules across | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L126-L140 | null | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | tempput | python | def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path)) | Put a file to remote and remove it afterwards | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L144-L153 | null | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | watch | python | def watch(filenames, callback, use_sudo=False):
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return | Call callback if any of filenames change during the context | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L157-L165 | [
"def md5sum(filename, use_sudo=False):\n \"\"\"Return md5sum of remote file\"\"\"\n runner = sudo if use_sudo else run\n with hide('commands'):\n return runner(\"md5sum '{}'\".format(filename)).split()[0]\n"
] | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | debconf_set_selections | python | def debconf_set_selections(package, selections):
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text)) | Given package and map config:(type,value), set selections | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L171-L175 | null | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | install_deb | python | def install_deb(pkgname, url):
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False | Install package from custom deb hosted on S3.
Return true if package was installed by this invocation. | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L178-L189 | null | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | package_ensure_apt | python | def package_ensure_apt(*packages):
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True | Ensure apt packages are installed | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L192-L201 | null | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | update_apt | python | def update_apt(days=3, upgrade=False):
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade") | Update apt index if not update in last N days | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L205-L215 | null | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | make_version | python | def make_version(ref=None):
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version | Build git version string for current directory | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L221-L233 | null | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | rsync_git | python | def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts) | Rsync deploy a git repo. Write and compare version.txt | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L236-L244 | [
"def rsync(local_path, remote_path, exclude=None, extra_opts=None):\n \"\"\"Helper to rsync submodules across\"\"\"\n if not local_path.endswith('/'):\n local_path += '/'\n exclude = exclude or []\n exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',\n '.gitmodules', '/build/', '/dist/'])\n with hide('running'):\n run(\"mkdir -p '{}'\".format(remote_path))\n return rsync_project(\n remote_path, local_path, delete=True,\n extra_opts='-i --omit-dir-times -FF ' +\n (extra_opts if extra_opts else ''),\n ssh_opts='-o StrictHostKeyChecking=no',\n exclude=exclude)\n",
"def write_version(path, ref=None):\n \"\"\"Update version file using git desribe\"\"\"\n with lcd(dirname(path)):\n version = make_version(ref)\n if (env.get('full') or not os.path.exists(path)\n or version != open(path).read().strip()):\n with open(path, 'w') as out:\n out.write(version + '\\n')\n return version\n"
] | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | tagversion | python | def tagversion(repo, level='patch', special=''):
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string | Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9. | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L247-L277 | null | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | write_version | python | def write_version(path, ref=None):
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version | Update version file using git desribe | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L280-L288 | [
"def make_version(ref=None):\n \"\"\"Build git version string for current directory\"\"\"\n cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')\n with hide('commands'):\n version = local(cmd, capture=True).strip()\n if re.match('^v[0-9]', version):\n version = version[1:]\n # replacements to match semver.org build numbers\n if '-' in version:\n head, _, tail = version.partition('-')\n count, _, sha = tail.partition('-g')\n version = head + '+' + count + '-' + sha\n return version\n"
] | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | splunk | python | def splunk(cmd, user='admin', passwd='changeme'):
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd)) | Authenticated call to splunk | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L294-L297 | null | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk_monitor(monitors):
"""Monitor a list of (path, sourcetype) pairs in splunk"""
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
gpoulter/fablib | fablib.py | splunk_monitor | python | def splunk_monitor(monitors):
if not exists('/opt/splunkforwarder'):
return
if not env.get('splunk_monitors'):
with hide('commands'):
env['splunk_monitors'] = str(splunk('list monitor'))
for path, sourcetype in monitors:
if path not in env['splunk_monitors']:
with hide('everything'):
run("touch '{path}'; true".format(path=path))
splunk("add monitor '{path}' -sourcetype {st}".format(
path=path, st=sourcetype))
env['splunk_monitors'] += '\n' + path | Monitor a list of (path, sourcetype) pairs in splunk | train | https://github.com/gpoulter/fablib/blob/5d14c4d998f79dd1aa3207063c3d06e30e3e2bf9/fablib.py#L300-L313 | [
"def splunk(cmd, user='admin', passwd='changeme'):\n \"\"\"Authenticated call to splunk\"\"\"\n return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'\n .format(c=cmd, u=user, p=passwd))\n"
] | """Utility functions for fabric tasks"""
from __future__ import print_function
from fabric.api import (env, get, hide, hosts, lcd, local, put, roles,
run, runs_once, settings, sudo)
from StringIO import StringIO
from contextlib import contextmanager
from fabric.colors import green
from fabric.contrib.files import exists
from fabric.contrib.project import rsync_project
from os.path import dirname, join
import os
import re
import time
### {{{ ROLES HELPERS
def default_roles(*role_list):
"""Decorate task with these roles by default, but override with -R, -H"""
def selectively_attach(func):
"""Only decorate if nothing specified on command line"""
# pylint: disable=W0142
if not env.roles and not env.hosts:
return roles(*role_list)(func)
else:
if env.hosts:
func = hosts(*env.hosts)(func)
if env.roles:
func = roles(*env.roles)(func)
return func
return selectively_attach
### ROLES HELPERS }}}
### {{{ FILE AND DIRECTORY HELPERS
def chown(dirs, user=None, group=None):
"""User sudo to set user and group ownership"""
if isinstance(dirs, basestring):
dirs = [dirs]
args = ' '.join(dirs)
if user and group:
return sudo('chown {}:{} {}'.format(user, group, args))
elif user:
return sudo('chown {} {}'.format(user, args))
elif group:
return sudo('chgrp {} {}'.format(group, args))
else:
return None
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
"""Put file and set user and group ownership. Default to use sudo."""
# pylint: disable=R0913
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
def cron(name, timespec, user, command, environ=None, disable=False):
"""Create entry in /etc/cron.d"""
path = '/etc/cron.d/{}'.format(name)
if disable:
sudo('rm ' + path)
return
entry = '{}\t{}\t{}\n'.format(timespec, user, command)
if environ:
envstr = '\n'.join('{}={}'.format(k, v)
for k, v in environ.iteritems())
entry = '{}\n{}'.format(envstr, entry)
chput(StringIO(entry), path, use_sudo=True,
mode=0o644, user='root', group='root')
def diff(local_path, remote_path):
"""Return true if local and remote paths differ in contents"""
with hide('commands'):
if isinstance(local_path, basestring):
with open(local_path) as stream:
local_content = stream.read()
else:
pos = local_path.tell()
local_content = local_path.read()
local_path.seek(pos)
remote_content = StringIO()
with settings(hide('warnings'), warn_only=True):
if get(remote_path, remote_content).failed:
return True
return local_content.strip() != remote_content.getvalue().strip()
def file_exists(location):
"""Tests if there is a remote file at the given location."""
return run('test -e "{}" && echo OK ; true'
.format(location)).endswith("OK")
def md5sum(filename, use_sudo=False):
"""Return md5sum of remote file"""
runner = sudo if use_sudo else run
with hide('commands'):
return runner("md5sum '{}'".format(filename)).split()[0]
def mkdir(dirs, user=None, group=None, mode=None, use_sudo=True):
"""Create directory with sudo and octal mode, then set ownership."""
if isinstance(dirs, basestring):
dirs = [dirs]
runner = sudo if use_sudo else run
if dirs:
modearg = '-m {:o}'.format(mode) if mode else ''
cmd = 'mkdir -v -p {} {}'.format(modearg, ' '.join(dirs))
result = runner(cmd)
with hide('commands'):
chown(dirs, user, group)
return result
def rsync(local_path, remote_path, exclude=None, extra_opts=None):
"""Helper to rsync submodules across"""
if not local_path.endswith('/'):
local_path += '/'
exclude = exclude or []
exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore',
'.gitmodules', '/build/', '/dist/'])
with hide('running'):
run("mkdir -p '{}'".format(remote_path))
return rsync_project(
remote_path, local_path, delete=True,
extra_opts='-i --omit-dir-times -FF ' +
(extra_opts if extra_opts else ''),
ssh_opts='-o StrictHostKeyChecking=no',
exclude=exclude)
@contextmanager
def tempput(local_path=None, remote_path=None, use_sudo=False,
mirror_local_mode=False, mode=None):
"""Put a file to remote and remove it afterwards"""
import warnings
warnings.simplefilter('ignore', RuntimeWarning)
if remote_path is None:
remote_path = os.tempnam()
put(local_path, remote_path, use_sudo, mirror_local_mode, mode)
yield remote_path
run("rm '{}'".format(remote_path))
@contextmanager
def watch(filenames, callback, use_sudo=False):
"""Call callback if any of filenames change during the context"""
filenames = [filenames] if isinstance(filenames, basestring) else filenames
old_md5 = {fn: md5sum(fn, use_sudo) for fn in filenames}
yield
for filename in filenames:
if md5sum(filename, use_sudo) != old_md5[filename]:
callback()
return
### FILE AND DIRECTORY HELPERS }}}
### {{{ DEBIAN/UBUNTU HELPERS
def debconf_set_selections(package, selections):
"""Given package and map config:(type,value), set selections"""
text = '\n'.join(' '.join([package, k, t, v]) for
k, (t, v) in selections.iteritems())
sudo('debconf-set-selections <<-HEREDOC\n{}\nHEREDOC'.format(text))
def install_deb(pkgname, url):
"""Install package from custom deb hosted on S3.
Return true if package was installed by this invocation."""
status = run("dpkg-query -W -f='${{Status}}' {p}; true".format(p=pkgname))
if ('installed' not in status) or ('not-installed' in status):
deb = url.rpartition('/')[2]
debtmp = '/tmp/{}'.format(deb)
run("wget --no-check-certificate -qc -O '{}' '{}'".format(debtmp, url))
sudo("dpkg -i '{0}' && rm -f '{0}'".format(debtmp))
return True
else:
return False
def package_ensure_apt(*packages):
"""Ensure apt packages are installed"""
package = " ".join(packages)
status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package))
status = status.lower()
if 'no packages found' in status or 'not-installed' in status:
sudo("apt-get --yes install " + package)
return False
else:
return True
@runs_once
def update_apt(days=3, upgrade=False):
"""Update apt index if not update in last N days"""
# Check the apt-get update timestamp (works on Ubuntu only)
with settings(warn_only=True):
last_update = run(
"stat -c %Y /var/lib/apt/periodic/update-success-stamp")
if ('cannot stat' in last_update
or (time.time() - float(last_update)) > days * 86400):
sudo("apt-get --yes update")
if upgrade:
sudo("apt-get --yes upgrade")
### DEBIAN/UBUNTU HELPERS }}}
### {{{ VERSION TAGGING HELPERS
def make_version(ref=None):
"""Build git version string for current directory"""
cmd = 'git describe --tags --abbrev=6 {}'.format(ref or '')
with hide('commands'):
version = local(cmd, capture=True).strip()
if re.match('^v[0-9]', version):
version = version[1:]
# replacements to match semver.org build numbers
if '-' in version:
head, _, tail = version.partition('-')
count, _, sha = tail.partition('-g')
version = head + '+' + count + '-' + sha
return version
def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts)
def tagversion(repo, level='patch', special=''):
"""Increment and return tagged version in git.
Increment levels are patch, minor and major.
Using semver.org versioning: {major}.{minor}.{patch}{special}
Special must start with a-z and consist of _a-zA-Z0-9.
"""
prepend = 'v'
with lcd(repo):
oldversion = local(
'git describe --abbrev=0 --tags', capture=True).strip()
if oldversion.startswith('v'):
oldversion = oldversion[1:]
else:
prepend = ''
major, minor, patch = [int(x) for x in re.split('\D', oldversion, 3)[:3]]
if special:
if not re.match('^[a-z]', special):
raise ValueError('Special must start with a-z')
if not re.match('[_a-zA-Z0-9]+', special):
raise ValueError('Must contain start with lowercase letter')
if level == 'major':
major, minor, patch = major + 1, 0, 0
elif level == 'minor':
major, minor, patch = major, minor + 1, 0
elif level == 'patch':
major, minor, patch = major, minor, patch + 1
version_string = '{}.{}.{}'.format(major, minor, patch) + special
with lcd(repo):
local('git tag -s --force {}{}'.format(prepend, version_string))
return version_string
def write_version(path, ref=None):
"""Update version file using git desribe"""
with lcd(dirname(path)):
version = make_version(ref)
if (env.get('full') or not os.path.exists(path)
or version != open(path).read().strip()):
with open(path, 'w') as out:
out.write(version + '\n')
return version
### VERSION TAGGING HELPERS }}}
### {{{ SPLUNK HELPERS
def splunk(cmd, user='admin', passwd='changeme'):
"""Authenticated call to splunk"""
return sudo('/opt/splunkforwarder/bin/splunk {c} -auth {u}:{p}'
.format(c=cmd, u=user, p=passwd))
# SPLUNK HELPERS }}}
# vim:foldnestmax=1:foldenable:foldmethod=marker:
|
mlouielu/twstock | twstock/analytics.py | Analytics.ma_bias_ratio | python | def ma_bias_ratio(self, day1, day2):
data1 = self.moving_average(self.price, day1)
data2 = self.moving_average(self.price, day2)
result = [data1[-i] - data2[-i] for i in range(1, min(len(data1), len(data2)) + 1)]
return result[::-1] | Calculate moving average bias ratio | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/analytics.py#L24-L30 | [
"def moving_average(self, data, days):\n result = []\n data = data[:]\n for _ in range(len(data) - days + 1):\n result.append(round(sum(data[-days:]) / days, 2))\n data.pop()\n return result[::-1]\n"
] | class Analytics(object):
def continuous(self, data):
diff = [1 if data[-i] > data[-i - 1] else -1 for i in range(1, len(data))]
cont = 0
for v in diff:
if v == diff[0]:
cont += 1
else:
break
return cont * diff[0]
def moving_average(self, data, days):
result = []
data = data[:]
for _ in range(len(data) - days + 1):
result.append(round(sum(data[-days:]) / days, 2))
data.pop()
return result[::-1]
def ma_bias_ratio_pivot(self, data, sample_size=5, position=False):
"""Calculate pivot point"""
sample = data[-sample_size:]
if position is True:
check_value = max(sample)
pre_check_value = max(sample) > 0
elif position is False:
check_value = min(sample)
pre_check_value = max(sample) < 0
return ((sample_size - sample.index(check_value) < 4 and
sample.index(check_value) != sample_size - 1 and pre_check_value),
sample_size - sample.index(check_value) - 1,
check_value)
|
mlouielu/twstock | twstock/analytics.py | Analytics.ma_bias_ratio_pivot | python | def ma_bias_ratio_pivot(self, data, sample_size=5, position=False):
sample = data[-sample_size:]
if position is True:
check_value = max(sample)
pre_check_value = max(sample) > 0
elif position is False:
check_value = min(sample)
pre_check_value = max(sample) < 0
return ((sample_size - sample.index(check_value) < 4 and
sample.index(check_value) != sample_size - 1 and pre_check_value),
sample_size - sample.index(check_value) - 1,
check_value) | Calculate pivot point | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/analytics.py#L32-L46 | null | class Analytics(object):
def continuous(self, data):
diff = [1 if data[-i] > data[-i - 1] else -1 for i in range(1, len(data))]
cont = 0
for v in diff:
if v == diff[0]:
cont += 1
else:
break
return cont * diff[0]
def moving_average(self, data, days):
result = []
data = data[:]
for _ in range(len(data) - days + 1):
result.append(round(sum(data[-days:]) / days, 2))
data.pop()
return result[::-1]
def ma_bias_ratio(self, day1, day2):
"""Calculate moving average bias ratio"""
data1 = self.moving_average(self.price, day1)
data2 = self.moving_average(self.price, day2)
result = [data1[-i] - data2[-i] for i in range(1, min(len(data1), len(data2)) + 1)]
return result[::-1]
|
mlouielu/twstock | twstock/legacy.py | LegacyAnalytics.cal_continue | python | def cal_continue(self, list_data):
diff_data = []
for i in range(1, len(list_data)):
if list_data[-i] > list_data[-i - 1]:
diff_data.append(1)
else:
diff_data.append(-1)
cont = 0
for value in diff_data:
if value == diff_data[0]:
cont += 1
else:
break
return cont * diff_data[0] | 計算持續天數
:rtype: int
:returns: 向量數值:正數向上、負數向下。 | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/legacy.py#L7-L25 | null | class LegacyAnalytics(object):
"""Legacy analytics from toomore/grs"""
def moving_average(self, data, days):
""" 計算移動平均數
:rtype: 序列 舊→新
"""
result = []
data = data[:]
for dummy in range(len(data) - int(days) + 1):
result.append(round(sum(data[-days:]) / days, 2))
data.pop()
result.reverse()
return result
def ma_bias_ratio(self, date1, date2, data):
""" 計算乖離率(均價)
date1 - date2
:param int data1: n 日
:param int data2: m 日
:rtype: 序列 舊→新
"""
data1 = self.moving_average(data, date1)
data2 = self.moving_average(data, date2)
cal_list = []
for i in range(1, min(len(data1), len(data2)) + 1):
cal_list.append(data1[-i] - data2[-i])
cal_list.reverse()
return cal_list
def ma_bias_ratio_point(cls, data, sample=5,
positive_or_negative=False):
"""判斷轉折點位置
:param list data: 計算資料
:param int sample: 計算的區間樣本數量
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False
:rtype: tuple
:returns: (True or False, 第幾個轉折日, 轉折點值)
"""
sample_data = data[-sample:]
if positive_or_negative: # 正
ckvalue = max(sample_data) # 尋找最大值
preckvalue = max(sample_data) > 0 # 區間最大值必須為正
else:
ckvalue = min(sample_data) # 尋找最小值
preckvalue = max(sample_data) < 0 # 區間最大值必須為負
return (sample - sample_data.index(ckvalue) < 4 and \
sample_data.index(ckvalue) != sample - 1 and preckvalue,
sample - sample_data.index(ckvalue) - 1,
ckvalue)
|
mlouielu/twstock | twstock/legacy.py | LegacyAnalytics.moving_average | python | def moving_average(self, data, days):
result = []
data = data[:]
for dummy in range(len(data) - int(days) + 1):
result.append(round(sum(data[-days:]) / days, 2))
data.pop()
result.reverse()
return result | 計算移動平均數
:rtype: 序列 舊→新 | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/legacy.py#L27-L38 | null | class LegacyAnalytics(object):
"""Legacy analytics from toomore/grs"""
def cal_continue(self, list_data):
""" 計算持續天數
:rtype: int
:returns: 向量數值:正數向上、負數向下。
"""
diff_data = []
for i in range(1, len(list_data)):
if list_data[-i] > list_data[-i - 1]:
diff_data.append(1)
else:
diff_data.append(-1)
cont = 0
for value in diff_data:
if value == diff_data[0]:
cont += 1
else:
break
return cont * diff_data[0]
def ma_bias_ratio(self, date1, date2, data):
""" 計算乖離率(均價)
date1 - date2
:param int data1: n 日
:param int data2: m 日
:rtype: 序列 舊→新
"""
data1 = self.moving_average(data, date1)
data2 = self.moving_average(data, date2)
cal_list = []
for i in range(1, min(len(data1), len(data2)) + 1):
cal_list.append(data1[-i] - data2[-i])
cal_list.reverse()
return cal_list
def ma_bias_ratio_point(cls, data, sample=5,
positive_or_negative=False):
"""判斷轉折點位置
:param list data: 計算資料
:param int sample: 計算的區間樣本數量
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False
:rtype: tuple
:returns: (True or False, 第幾個轉折日, 轉折點值)
"""
sample_data = data[-sample:]
if positive_or_negative: # 正
ckvalue = max(sample_data) # 尋找最大值
preckvalue = max(sample_data) > 0 # 區間最大值必須為正
else:
ckvalue = min(sample_data) # 尋找最小值
preckvalue = max(sample_data) < 0 # 區間最大值必須為負
return (sample - sample_data.index(ckvalue) < 4 and \
sample_data.index(ckvalue) != sample - 1 and preckvalue,
sample - sample_data.index(ckvalue) - 1,
ckvalue)
|
mlouielu/twstock | twstock/legacy.py | LegacyAnalytics.ma_bias_ratio | python | def ma_bias_ratio(self, date1, date2, data):
data1 = self.moving_average(data, date1)
data2 = self.moving_average(data, date2)
cal_list = []
for i in range(1, min(len(data1), len(data2)) + 1):
cal_list.append(data1[-i] - data2[-i])
cal_list.reverse()
return cal_list | 計算乖離率(均價)
date1 - date2
:param int data1: n 日
:param int data2: m 日
:rtype: 序列 舊→新 | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/legacy.py#L40-L54 | [
"def moving_average(self, data, days):\n \"\"\" 計算移動平均數\n\n :rtype: 序列 舊→新\n \"\"\"\n result = []\n data = data[:]\n for dummy in range(len(data) - int(days) + 1):\n result.append(round(sum(data[-days:]) / days, 2))\n data.pop()\n result.reverse()\n return result\n"
] | class LegacyAnalytics(object):
"""Legacy analytics from toomore/grs"""
def cal_continue(self, list_data):
""" 計算持續天數
:rtype: int
:returns: 向量數值:正數向上、負數向下。
"""
diff_data = []
for i in range(1, len(list_data)):
if list_data[-i] > list_data[-i - 1]:
diff_data.append(1)
else:
diff_data.append(-1)
cont = 0
for value in diff_data:
if value == diff_data[0]:
cont += 1
else:
break
return cont * diff_data[0]
def moving_average(self, data, days):
""" 計算移動平均數
:rtype: 序列 舊→新
"""
result = []
data = data[:]
for dummy in range(len(data) - int(days) + 1):
result.append(round(sum(data[-days:]) / days, 2))
data.pop()
result.reverse()
return result
def ma_bias_ratio_point(cls, data, sample=5,
positive_or_negative=False):
"""判斷轉折點位置
:param list data: 計算資料
:param int sample: 計算的區間樣本數量
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False
:rtype: tuple
:returns: (True or False, 第幾個轉折日, 轉折點值)
"""
sample_data = data[-sample:]
if positive_or_negative: # 正
ckvalue = max(sample_data) # 尋找最大值
preckvalue = max(sample_data) > 0 # 區間最大值必須為正
else:
ckvalue = min(sample_data) # 尋找最小值
preckvalue = max(sample_data) < 0 # 區間最大值必須為負
return (sample - sample_data.index(ckvalue) < 4 and \
sample_data.index(ckvalue) != sample - 1 and preckvalue,
sample - sample_data.index(ckvalue) - 1,
ckvalue)
|
mlouielu/twstock | twstock/legacy.py | LegacyBestFourPoint.bias_ratio | python | def bias_ratio(self, position=False):
return self.data.ma_bias_ratio_pivot(
self.data.ma_bias_ratio(3, 6),
position=position) | 判斷乖離
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/legacy.py#L87-L94 | null | class LegacyBestFourPoint(object):
""" 四大買點組合
:param grs.Stock data: 個股資料
"""
def __init__(self, data):
self.data = data
def check_plus_bias_ratio(self):
""" 正乖離扣至最大 """
return self.bias_ratio(True)
def check_mins_bias_ratio(self):
""" 負乖離扣至最大 """
return self.bias_ratio()
##### 四大買點 #####
def best_buy_1(self):
"""量大收紅
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] > self.data.open[-1]
return result
def best_buy_2(self):
"""量縮價不跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] > self.data.price[-2]
return result
def best_buy_3(self):
"""三日均價由下往上
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == 1
def best_buy_4(self):
"""三日均價大於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] > \
self.data.moving_average(self.data.price, 6)[-1]
##### 四大賣點 #####
def best_sell_1(self):
"""量大收黑
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] < self.data.open[-1]
return result
def best_sell_2(self):
"""量縮價跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] < self.data.price[-2]
return result
def best_sell_3(self):
"""三日均價由上往下
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == -1
def best_sell_4(self):
"""三日均價小於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] < \
self.data.moving_average(self.data.price, 6)[-1]
def best_four_point_to_buy(self):
""" 判斷是否為四大買點
:rtype: str or False
"""
result = []
if self.check_mins_bias_ratio() and \
(self.best_buy_1() or self.best_buy_2() or self.best_buy_3() or \
self.best_buy_4()):
if self.best_buy_1():
result.append(self.best_buy_1.__doc__.strip())
if self.best_buy_2():
result.append(self.best_buy_2.__doc__.strip())
if self.best_buy_3():
result.append(self.best_buy_3.__doc__.strip())
if self.best_buy_4():
result.append(self.best_buy_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point_to_sell(self):
""" 判斷是否為四大賣點
:rtype: str or False
"""
result = []
if self.check_plus_bias_ratio() and \
(self.best_sell_1() or self.best_sell_2() or self.best_sell_3() or \
self.best_sell_4()):
if self.best_sell_1():
result.append(self.best_sell_1.__doc__.strip())
if self.best_sell_2():
result.append(self.best_sell_2.__doc__.strip())
if self.best_sell_3():
result.append(self.best_sell_3.__doc__.strip())
if self.best_sell_4():
result.append(self.best_sell_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point(self):
""" 判斷買點或賣點
:rtype: tuple
:returns: (bool, str)
"""
buy = self.best_four_point_to_buy()
sell = self.best_four_point_to_sell()
if buy:
return True, buy
elif sell:
return False, sell
return None
|
mlouielu/twstock | twstock/legacy.py | LegacyBestFourPoint.best_buy_1 | python | def best_buy_1(self):
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] > self.data.open[-1]
return result | 量大收紅 | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/legacy.py#L105-L110 | null | class LegacyBestFourPoint(object):
""" 四大買點組合
:param grs.Stock data: 個股資料
"""
def __init__(self, data):
self.data = data
def bias_ratio(self, position=False):
""" 判斷乖離
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False
"""
return self.data.ma_bias_ratio_pivot(
self.data.ma_bias_ratio(3, 6),
position=position)
def check_plus_bias_ratio(self):
""" 正乖離扣至最大 """
return self.bias_ratio(True)
def check_mins_bias_ratio(self):
""" 負乖離扣至最大 """
return self.bias_ratio()
##### 四大買點 #####
def best_buy_2(self):
"""量縮價不跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] > self.data.price[-2]
return result
def best_buy_3(self):
"""三日均價由下往上
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == 1
def best_buy_4(self):
"""三日均價大於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] > \
self.data.moving_average(self.data.price, 6)[-1]
##### 四大賣點 #####
def best_sell_1(self):
"""量大收黑
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] < self.data.open[-1]
return result
def best_sell_2(self):
"""量縮價跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] < self.data.price[-2]
return result
def best_sell_3(self):
"""三日均價由上往下
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == -1
def best_sell_4(self):
"""三日均價小於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] < \
self.data.moving_average(self.data.price, 6)[-1]
def best_four_point_to_buy(self):
""" 判斷是否為四大買點
:rtype: str or False
"""
result = []
if self.check_mins_bias_ratio() and \
(self.best_buy_1() or self.best_buy_2() or self.best_buy_3() or \
self.best_buy_4()):
if self.best_buy_1():
result.append(self.best_buy_1.__doc__.strip())
if self.best_buy_2():
result.append(self.best_buy_2.__doc__.strip())
if self.best_buy_3():
result.append(self.best_buy_3.__doc__.strip())
if self.best_buy_4():
result.append(self.best_buy_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point_to_sell(self):
""" 判斷是否為四大賣點
:rtype: str or False
"""
result = []
if self.check_plus_bias_ratio() and \
(self.best_sell_1() or self.best_sell_2() or self.best_sell_3() or \
self.best_sell_4()):
if self.best_sell_1():
result.append(self.best_sell_1.__doc__.strip())
if self.best_sell_2():
result.append(self.best_sell_2.__doc__.strip())
if self.best_sell_3():
result.append(self.best_sell_3.__doc__.strip())
if self.best_sell_4():
result.append(self.best_sell_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point(self):
""" 判斷買點或賣點
:rtype: tuple
:returns: (bool, str)
"""
buy = self.best_four_point_to_buy()
sell = self.best_four_point_to_sell()
if buy:
return True, buy
elif sell:
return False, sell
return None
|
mlouielu/twstock | twstock/legacy.py | LegacyBestFourPoint.best_buy_2 | python | def best_buy_2(self):
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] > self.data.price[-2]
return result | 量縮價不跌 | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/legacy.py#L112-L117 | null | class LegacyBestFourPoint(object):
""" 四大買點組合
:param grs.Stock data: 個股資料
"""
def __init__(self, data):
self.data = data
def bias_ratio(self, position=False):
""" 判斷乖離
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False
"""
return self.data.ma_bias_ratio_pivot(
self.data.ma_bias_ratio(3, 6),
position=position)
def check_plus_bias_ratio(self):
""" 正乖離扣至最大 """
return self.bias_ratio(True)
def check_mins_bias_ratio(self):
""" 負乖離扣至最大 """
return self.bias_ratio()
##### 四大買點 #####
def best_buy_1(self):
"""量大收紅
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] > self.data.open[-1]
return result
def best_buy_3(self):
"""三日均價由下往上
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == 1
def best_buy_4(self):
"""三日均價大於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] > \
self.data.moving_average(self.data.price, 6)[-1]
##### 四大賣點 #####
def best_sell_1(self):
"""量大收黑
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] < self.data.open[-1]
return result
def best_sell_2(self):
"""量縮價跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] < self.data.price[-2]
return result
def best_sell_3(self):
"""三日均價由上往下
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == -1
def best_sell_4(self):
"""三日均價小於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] < \
self.data.moving_average(self.data.price, 6)[-1]
def best_four_point_to_buy(self):
""" 判斷是否為四大買點
:rtype: str or False
"""
result = []
if self.check_mins_bias_ratio() and \
(self.best_buy_1() or self.best_buy_2() or self.best_buy_3() or \
self.best_buy_4()):
if self.best_buy_1():
result.append(self.best_buy_1.__doc__.strip())
if self.best_buy_2():
result.append(self.best_buy_2.__doc__.strip())
if self.best_buy_3():
result.append(self.best_buy_3.__doc__.strip())
if self.best_buy_4():
result.append(self.best_buy_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point_to_sell(self):
""" 判斷是否為四大賣點
:rtype: str or False
"""
result = []
if self.check_plus_bias_ratio() and \
(self.best_sell_1() or self.best_sell_2() or self.best_sell_3() or \
self.best_sell_4()):
if self.best_sell_1():
result.append(self.best_sell_1.__doc__.strip())
if self.best_sell_2():
result.append(self.best_sell_2.__doc__.strip())
if self.best_sell_3():
result.append(self.best_sell_3.__doc__.strip())
if self.best_sell_4():
result.append(self.best_sell_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point(self):
""" 判斷買點或賣點
:rtype: tuple
:returns: (bool, str)
"""
buy = self.best_four_point_to_buy()
sell = self.best_four_point_to_sell()
if buy:
return True, buy
elif sell:
return False, sell
return None
|
mlouielu/twstock | twstock/legacy.py | LegacyBestFourPoint.best_buy_3 | python | def best_buy_3(self):
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == 1 | 三日均價由下往上 | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/legacy.py#L119-L122 | null | class LegacyBestFourPoint(object):
""" 四大買點組合
:param grs.Stock data: 個股資料
"""
def __init__(self, data):
self.data = data
def bias_ratio(self, position=False):
""" 判斷乖離
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False
"""
return self.data.ma_bias_ratio_pivot(
self.data.ma_bias_ratio(3, 6),
position=position)
def check_plus_bias_ratio(self):
""" 正乖離扣至最大 """
return self.bias_ratio(True)
def check_mins_bias_ratio(self):
""" 負乖離扣至最大 """
return self.bias_ratio()
##### 四大買點 #####
def best_buy_1(self):
"""量大收紅
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] > self.data.open[-1]
return result
def best_buy_2(self):
"""量縮價不跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] > self.data.price[-2]
return result
def best_buy_4(self):
"""三日均價大於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] > \
self.data.moving_average(self.data.price, 6)[-1]
##### 四大賣點 #####
def best_sell_1(self):
"""量大收黑
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] < self.data.open[-1]
return result
def best_sell_2(self):
"""量縮價跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] < self.data.price[-2]
return result
def best_sell_3(self):
"""三日均價由上往下
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == -1
def best_sell_4(self):
"""三日均價小於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] < \
self.data.moving_average(self.data.price, 6)[-1]
def best_four_point_to_buy(self):
""" 判斷是否為四大買點
:rtype: str or False
"""
result = []
if self.check_mins_bias_ratio() and \
(self.best_buy_1() or self.best_buy_2() or self.best_buy_3() or \
self.best_buy_4()):
if self.best_buy_1():
result.append(self.best_buy_1.__doc__.strip())
if self.best_buy_2():
result.append(self.best_buy_2.__doc__.strip())
if self.best_buy_3():
result.append(self.best_buy_3.__doc__.strip())
if self.best_buy_4():
result.append(self.best_buy_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point_to_sell(self):
""" 判斷是否為四大賣點
:rtype: str or False
"""
result = []
if self.check_plus_bias_ratio() and \
(self.best_sell_1() or self.best_sell_2() or self.best_sell_3() or \
self.best_sell_4()):
if self.best_sell_1():
result.append(self.best_sell_1.__doc__.strip())
if self.best_sell_2():
result.append(self.best_sell_2.__doc__.strip())
if self.best_sell_3():
result.append(self.best_sell_3.__doc__.strip())
if self.best_sell_4():
result.append(self.best_sell_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point(self):
""" 判斷買點或賣點
:rtype: tuple
:returns: (bool, str)
"""
buy = self.best_four_point_to_buy()
sell = self.best_four_point_to_sell()
if buy:
return True, buy
elif sell:
return False, sell
return None
|
mlouielu/twstock | twstock/legacy.py | LegacyBestFourPoint.best_buy_4 | python | def best_buy_4(self):
return self.data.moving_average(self.data.price, 3)[-1] > \
self.data.moving_average(self.data.price, 6)[-1] | 三日均價大於六日均價 | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/legacy.py#L124-L128 | null | class LegacyBestFourPoint(object):
""" 四大買點組合
:param grs.Stock data: 個股資料
"""
def __init__(self, data):
self.data = data
def bias_ratio(self, position=False):
""" 判斷乖離
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False
"""
return self.data.ma_bias_ratio_pivot(
self.data.ma_bias_ratio(3, 6),
position=position)
def check_plus_bias_ratio(self):
""" 正乖離扣至最大 """
return self.bias_ratio(True)
def check_mins_bias_ratio(self):
""" 負乖離扣至最大 """
return self.bias_ratio()
##### 四大買點 #####
def best_buy_1(self):
"""量大收紅
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] > self.data.open[-1]
return result
def best_buy_2(self):
"""量縮價不跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] > self.data.price[-2]
return result
def best_buy_3(self):
"""三日均價由下往上
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == 1
##### 四大賣點 #####
def best_sell_1(self):
"""量大收黑
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] < self.data.open[-1]
return result
def best_sell_2(self):
"""量縮價跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] < self.data.price[-2]
return result
def best_sell_3(self):
"""三日均價由上往下
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == -1
def best_sell_4(self):
"""三日均價小於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] < \
self.data.moving_average(self.data.price, 6)[-1]
def best_four_point_to_buy(self):
""" 判斷是否為四大買點
:rtype: str or False
"""
result = []
if self.check_mins_bias_ratio() and \
(self.best_buy_1() or self.best_buy_2() or self.best_buy_3() or \
self.best_buy_4()):
if self.best_buy_1():
result.append(self.best_buy_1.__doc__.strip())
if self.best_buy_2():
result.append(self.best_buy_2.__doc__.strip())
if self.best_buy_3():
result.append(self.best_buy_3.__doc__.strip())
if self.best_buy_4():
result.append(self.best_buy_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point_to_sell(self):
""" 判斷是否為四大賣點
:rtype: str or False
"""
result = []
if self.check_plus_bias_ratio() and \
(self.best_sell_1() or self.best_sell_2() or self.best_sell_3() or \
self.best_sell_4()):
if self.best_sell_1():
result.append(self.best_sell_1.__doc__.strip())
if self.best_sell_2():
result.append(self.best_sell_2.__doc__.strip())
if self.best_sell_3():
result.append(self.best_sell_3.__doc__.strip())
if self.best_sell_4():
result.append(self.best_sell_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point(self):
""" 判斷買點或賣點
:rtype: tuple
:returns: (bool, str)
"""
buy = self.best_four_point_to_buy()
sell = self.best_four_point_to_sell()
if buy:
return True, buy
elif sell:
return False, sell
return None
|
mlouielu/twstock | twstock/legacy.py | LegacyBestFourPoint.best_sell_1 | python | def best_sell_1(self):
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] < self.data.open[-1]
return result | 量大收黑 | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/legacy.py#L131-L136 | null | class LegacyBestFourPoint(object):
""" 四大買點組合
:param grs.Stock data: 個股資料
"""
def __init__(self, data):
self.data = data
def bias_ratio(self, position=False):
""" 判斷乖離
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False
"""
return self.data.ma_bias_ratio_pivot(
self.data.ma_bias_ratio(3, 6),
position=position)
def check_plus_bias_ratio(self):
""" 正乖離扣至最大 """
return self.bias_ratio(True)
def check_mins_bias_ratio(self):
""" 負乖離扣至最大 """
return self.bias_ratio()
##### 四大買點 #####
def best_buy_1(self):
"""量大收紅
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] > self.data.open[-1]
return result
def best_buy_2(self):
"""量縮價不跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] > self.data.price[-2]
return result
def best_buy_3(self):
"""三日均價由下往上
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == 1
def best_buy_4(self):
"""三日均價大於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] > \
self.data.moving_average(self.data.price, 6)[-1]
##### 四大賣點 #####
def best_sell_2(self):
"""量縮價跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] < self.data.price[-2]
return result
def best_sell_3(self):
"""三日均價由上往下
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == -1
def best_sell_4(self):
"""三日均價小於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] < \
self.data.moving_average(self.data.price, 6)[-1]
def best_four_point_to_buy(self):
""" 判斷是否為四大買點
:rtype: str or False
"""
result = []
if self.check_mins_bias_ratio() and \
(self.best_buy_1() or self.best_buy_2() or self.best_buy_3() or \
self.best_buy_4()):
if self.best_buy_1():
result.append(self.best_buy_1.__doc__.strip())
if self.best_buy_2():
result.append(self.best_buy_2.__doc__.strip())
if self.best_buy_3():
result.append(self.best_buy_3.__doc__.strip())
if self.best_buy_4():
result.append(self.best_buy_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point_to_sell(self):
""" 判斷是否為四大賣點
:rtype: str or False
"""
result = []
if self.check_plus_bias_ratio() and \
(self.best_sell_1() or self.best_sell_2() or self.best_sell_3() or \
self.best_sell_4()):
if self.best_sell_1():
result.append(self.best_sell_1.__doc__.strip())
if self.best_sell_2():
result.append(self.best_sell_2.__doc__.strip())
if self.best_sell_3():
result.append(self.best_sell_3.__doc__.strip())
if self.best_sell_4():
result.append(self.best_sell_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point(self):
""" 判斷買點或賣點
:rtype: tuple
:returns: (bool, str)
"""
buy = self.best_four_point_to_buy()
sell = self.best_four_point_to_sell()
if buy:
return True, buy
elif sell:
return False, sell
return None
|
mlouielu/twstock | twstock/legacy.py | LegacyBestFourPoint.best_sell_2 | python | def best_sell_2(self):
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] < self.data.price[-2]
return result | 量縮價跌 | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/legacy.py#L138-L143 | null | class LegacyBestFourPoint(object):
""" 四大買點組合
:param grs.Stock data: 個股資料
"""
def __init__(self, data):
self.data = data
def bias_ratio(self, position=False):
""" 判斷乖離
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False
"""
return self.data.ma_bias_ratio_pivot(
self.data.ma_bias_ratio(3, 6),
position=position)
def check_plus_bias_ratio(self):
""" 正乖離扣至最大 """
return self.bias_ratio(True)
def check_mins_bias_ratio(self):
""" 負乖離扣至最大 """
return self.bias_ratio()
##### 四大買點 #####
def best_buy_1(self):
"""量大收紅
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] > self.data.open[-1]
return result
def best_buy_2(self):
"""量縮價不跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] > self.data.price[-2]
return result
def best_buy_3(self):
"""三日均價由下往上
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == 1
def best_buy_4(self):
"""三日均價大於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] > \
self.data.moving_average(self.data.price, 6)[-1]
##### 四大賣點 #####
def best_sell_1(self):
"""量大收黑
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] < self.data.open[-1]
return result
def best_sell_3(self):
"""三日均價由上往下
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == -1
def best_sell_4(self):
"""三日均價小於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] < \
self.data.moving_average(self.data.price, 6)[-1]
def best_four_point_to_buy(self):
""" 判斷是否為四大買點
:rtype: str or False
"""
result = []
if self.check_mins_bias_ratio() and \
(self.best_buy_1() or self.best_buy_2() or self.best_buy_3() or \
self.best_buy_4()):
if self.best_buy_1():
result.append(self.best_buy_1.__doc__.strip())
if self.best_buy_2():
result.append(self.best_buy_2.__doc__.strip())
if self.best_buy_3():
result.append(self.best_buy_3.__doc__.strip())
if self.best_buy_4():
result.append(self.best_buy_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point_to_sell(self):
""" 判斷是否為四大賣點
:rtype: str or False
"""
result = []
if self.check_plus_bias_ratio() and \
(self.best_sell_1() or self.best_sell_2() or self.best_sell_3() or \
self.best_sell_4()):
if self.best_sell_1():
result.append(self.best_sell_1.__doc__.strip())
if self.best_sell_2():
result.append(self.best_sell_2.__doc__.strip())
if self.best_sell_3():
result.append(self.best_sell_3.__doc__.strip())
if self.best_sell_4():
result.append(self.best_sell_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point(self):
""" 判斷買點或賣點
:rtype: tuple
:returns: (bool, str)
"""
buy = self.best_four_point_to_buy()
sell = self.best_four_point_to_sell()
if buy:
return True, buy
elif sell:
return False, sell
return None
|
mlouielu/twstock | twstock/legacy.py | LegacyBestFourPoint.best_sell_3 | python | def best_sell_3(self):
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == -1 | 三日均價由上往下 | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/legacy.py#L145-L148 | null | class LegacyBestFourPoint(object):
""" 四大買點組合
:param grs.Stock data: 個股資料
"""
def __init__(self, data):
self.data = data
def bias_ratio(self, position=False):
""" 判斷乖離
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False
"""
return self.data.ma_bias_ratio_pivot(
self.data.ma_bias_ratio(3, 6),
position=position)
def check_plus_bias_ratio(self):
""" 正乖離扣至最大 """
return self.bias_ratio(True)
def check_mins_bias_ratio(self):
""" 負乖離扣至最大 """
return self.bias_ratio()
##### 四大買點 #####
def best_buy_1(self):
"""量大收紅
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] > self.data.open[-1]
return result
def best_buy_2(self):
"""量縮價不跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] > self.data.price[-2]
return result
def best_buy_3(self):
"""三日均價由下往上
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == 1
def best_buy_4(self):
"""三日均價大於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] > \
self.data.moving_average(self.data.price, 6)[-1]
##### 四大賣點 #####
def best_sell_1(self):
"""量大收黑
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] < self.data.open[-1]
return result
def best_sell_2(self):
"""量縮價跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] < self.data.price[-2]
return result
def best_sell_4(self):
"""三日均價小於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] < \
self.data.moving_average(self.data.price, 6)[-1]
def best_four_point_to_buy(self):
""" 判斷是否為四大買點
:rtype: str or False
"""
result = []
if self.check_mins_bias_ratio() and \
(self.best_buy_1() or self.best_buy_2() or self.best_buy_3() or \
self.best_buy_4()):
if self.best_buy_1():
result.append(self.best_buy_1.__doc__.strip())
if self.best_buy_2():
result.append(self.best_buy_2.__doc__.strip())
if self.best_buy_3():
result.append(self.best_buy_3.__doc__.strip())
if self.best_buy_4():
result.append(self.best_buy_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point_to_sell(self):
""" 判斷是否為四大賣點
:rtype: str or False
"""
result = []
if self.check_plus_bias_ratio() and \
(self.best_sell_1() or self.best_sell_2() or self.best_sell_3() or \
self.best_sell_4()):
if self.best_sell_1():
result.append(self.best_sell_1.__doc__.strip())
if self.best_sell_2():
result.append(self.best_sell_2.__doc__.strip())
if self.best_sell_3():
result.append(self.best_sell_3.__doc__.strip())
if self.best_sell_4():
result.append(self.best_sell_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point(self):
""" 判斷買點或賣點
:rtype: tuple
:returns: (bool, str)
"""
buy = self.best_four_point_to_buy()
sell = self.best_four_point_to_sell()
if buy:
return True, buy
elif sell:
return False, sell
return None
|
mlouielu/twstock | twstock/legacy.py | LegacyBestFourPoint.best_sell_4 | python | def best_sell_4(self):
return self.data.moving_average(self.data.price, 3)[-1] < \
self.data.moving_average(self.data.price, 6)[-1] | 三日均價小於六日均價 | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/legacy.py#L150-L154 | null | class LegacyBestFourPoint(object):
""" 四大買點組合
:param grs.Stock data: 個股資料
"""
def __init__(self, data):
self.data = data
def bias_ratio(self, position=False):
""" 判斷乖離
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False
"""
return self.data.ma_bias_ratio_pivot(
self.data.ma_bias_ratio(3, 6),
position=position)
def check_plus_bias_ratio(self):
""" 正乖離扣至最大 """
return self.bias_ratio(True)
def check_mins_bias_ratio(self):
""" 負乖離扣至最大 """
return self.bias_ratio()
##### 四大買點 #####
def best_buy_1(self):
"""量大收紅
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] > self.data.open[-1]
return result
def best_buy_2(self):
"""量縮價不跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] > self.data.price[-2]
return result
def best_buy_3(self):
"""三日均價由下往上
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == 1
def best_buy_4(self):
"""三日均價大於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] > \
self.data.moving_average(self.data.price, 6)[-1]
##### 四大賣點 #####
def best_sell_1(self):
"""量大收黑
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] < self.data.open[-1]
return result
def best_sell_2(self):
"""量縮價跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] < self.data.price[-2]
return result
def best_sell_3(self):
"""三日均價由上往下
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == -1
def best_four_point_to_buy(self):
""" 判斷是否為四大買點
:rtype: str or False
"""
result = []
if self.check_mins_bias_ratio() and \
(self.best_buy_1() or self.best_buy_2() or self.best_buy_3() or \
self.best_buy_4()):
if self.best_buy_1():
result.append(self.best_buy_1.__doc__.strip())
if self.best_buy_2():
result.append(self.best_buy_2.__doc__.strip())
if self.best_buy_3():
result.append(self.best_buy_3.__doc__.strip())
if self.best_buy_4():
result.append(self.best_buy_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point_to_sell(self):
""" 判斷是否為四大賣點
:rtype: str or False
"""
result = []
if self.check_plus_bias_ratio() and \
(self.best_sell_1() or self.best_sell_2() or self.best_sell_3() or \
self.best_sell_4()):
if self.best_sell_1():
result.append(self.best_sell_1.__doc__.strip())
if self.best_sell_2():
result.append(self.best_sell_2.__doc__.strip())
if self.best_sell_3():
result.append(self.best_sell_3.__doc__.strip())
if self.best_sell_4():
result.append(self.best_sell_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point(self):
""" 判斷買點或賣點
:rtype: tuple
:returns: (bool, str)
"""
buy = self.best_four_point_to_buy()
sell = self.best_four_point_to_sell()
if buy:
return True, buy
elif sell:
return False, sell
return None
|
mlouielu/twstock | twstock/legacy.py | LegacyBestFourPoint.best_four_point_to_sell | python | def best_four_point_to_sell(self):
result = []
if self.check_plus_bias_ratio() and \
(self.best_sell_1() or self.best_sell_2() or self.best_sell_3() or \
self.best_sell_4()):
if self.best_sell_1():
result.append(self.best_sell_1.__doc__.strip())
if self.best_sell_2():
result.append(self.best_sell_2.__doc__.strip())
if self.best_sell_3():
result.append(self.best_sell_3.__doc__.strip())
if self.best_sell_4():
result.append(self.best_sell_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result | 判斷是否為四大賣點
:rtype: str or False | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/legacy.py#L178-L198 | [
"def check_plus_bias_ratio(self):\n \"\"\" 正乖離扣至最大 \"\"\"\n return self.bias_ratio(True)\n"
] | class LegacyBestFourPoint(object):
""" 四大買點組合
:param grs.Stock data: 個股資料
"""
def __init__(self, data):
self.data = data
def bias_ratio(self, position=False):
""" 判斷乖離
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False
"""
return self.data.ma_bias_ratio_pivot(
self.data.ma_bias_ratio(3, 6),
position=position)
def check_plus_bias_ratio(self):
""" 正乖離扣至最大 """
return self.bias_ratio(True)
def check_mins_bias_ratio(self):
""" 負乖離扣至最大 """
return self.bias_ratio()
##### 四大買點 #####
def best_buy_1(self):
"""量大收紅
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] > self.data.open[-1]
return result
def best_buy_2(self):
"""量縮價不跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] > self.data.price[-2]
return result
def best_buy_3(self):
"""三日均價由下往上
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == 1
def best_buy_4(self):
"""三日均價大於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] > \
self.data.moving_average(self.data.price, 6)[-1]
##### 四大賣點 #####
def best_sell_1(self):
"""量大收黑
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] < self.data.open[-1]
return result
def best_sell_2(self):
"""量縮價跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] < self.data.price[-2]
return result
def best_sell_3(self):
"""三日均價由上往下
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == -1
def best_sell_4(self):
"""三日均價小於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] < \
self.data.moving_average(self.data.price, 6)[-1]
def best_four_point_to_buy(self):
""" 判斷是否為四大買點
:rtype: str or False
"""
result = []
if self.check_mins_bias_ratio() and \
(self.best_buy_1() or self.best_buy_2() or self.best_buy_3() or \
self.best_buy_4()):
if self.best_buy_1():
result.append(self.best_buy_1.__doc__.strip())
if self.best_buy_2():
result.append(self.best_buy_2.__doc__.strip())
if self.best_buy_3():
result.append(self.best_buy_3.__doc__.strip())
if self.best_buy_4():
result.append(self.best_buy_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point(self):
""" 判斷買點或賣點
:rtype: tuple
:returns: (bool, str)
"""
buy = self.best_four_point_to_buy()
sell = self.best_four_point_to_sell()
if buy:
return True, buy
elif sell:
return False, sell
return None
|
mlouielu/twstock | twstock/legacy.py | LegacyBestFourPoint.best_four_point | python | def best_four_point(self):
buy = self.best_four_point_to_buy()
sell = self.best_four_point_to_sell()
if buy:
return True, buy
elif sell:
return False, sell
return None | 判斷買點或賣點
:rtype: tuple
:returns: (bool, str) | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/legacy.py#L200-L214 | [
"def best_four_point_to_buy(self):\n \"\"\" 判斷是否為四大買點\n\n :rtype: str or False\n \"\"\"\n result = []\n if self.check_mins_bias_ratio() and \\\n (self.best_buy_1() or self.best_buy_2() or self.best_buy_3() or \\\n self.best_buy_4()):\n if self.best_buy_1():\n result.append(self.best_buy_1.__doc__.strip())\n if self.best_buy_2():\n result.append(self.best_buy_2.__doc__.strip())\n if self.best_buy_3():\n result.append(self.best_buy_3.__doc__.strip())\n if self.best_buy_4():\n result.append(self.best_buy_4.__doc__.strip())\n result = ', '.join(result)\n else:\n result = False\n return result\n",
"def best_four_point_to_sell(self):\n \"\"\" 判斷是否為四大賣點\n\n :rtype: str or False\n \"\"\"\n result = []\n if self.check_plus_bias_ratio() and \\\n (self.best_sell_1() or self.best_sell_2() or self.best_sell_3() or \\\n self.best_sell_4()):\n if self.best_sell_1():\n result.append(self.best_sell_1.__doc__.strip())\n if self.best_sell_2():\n result.append(self.best_sell_2.__doc__.strip())\n if self.best_sell_3():\n result.append(self.best_sell_3.__doc__.strip())\n if self.best_sell_4():\n result.append(self.best_sell_4.__doc__.strip())\n result = ', '.join(result)\n else:\n result = False\n return result\n"
] | class LegacyBestFourPoint(object):
""" 四大買點組合
:param grs.Stock data: 個股資料
"""
def __init__(self, data):
self.data = data
def bias_ratio(self, position=False):
""" 判斷乖離
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False
"""
return self.data.ma_bias_ratio_pivot(
self.data.ma_bias_ratio(3, 6),
position=position)
def check_plus_bias_ratio(self):
""" 正乖離扣至最大 """
return self.bias_ratio(True)
def check_mins_bias_ratio(self):
""" 負乖離扣至最大 """
return self.bias_ratio()
##### 四大買點 #####
def best_buy_1(self):
"""量大收紅
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] > self.data.open[-1]
return result
def best_buy_2(self):
"""量縮價不跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] > self.data.price[-2]
return result
def best_buy_3(self):
"""三日均價由下往上
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == 1
def best_buy_4(self):
"""三日均價大於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] > \
self.data.moving_average(self.data.price, 6)[-1]
##### 四大賣點 #####
def best_sell_1(self):
"""量大收黑
"""
result = self.data.capacity[-1] > self.data.capacity[-2] and \
self.data.price[-1] < self.data.open[-1]
return result
def best_sell_2(self):
"""量縮價跌
"""
result = self.data.capacity[-1] < self.data.capacity[-2] and \
self.data.price[-1] < self.data.price[-2]
return result
def best_sell_3(self):
"""三日均價由上往下
"""
return self.data.continuous(self.data.moving_average(self.data.price, 3)) == -1
def best_sell_4(self):
"""三日均價小於六日均價
"""
return self.data.moving_average(self.data.price, 3)[-1] < \
self.data.moving_average(self.data.price, 6)[-1]
def best_four_point_to_buy(self):
""" 判斷是否為四大買點
:rtype: str or False
"""
result = []
if self.check_mins_bias_ratio() and \
(self.best_buy_1() or self.best_buy_2() or self.best_buy_3() or \
self.best_buy_4()):
if self.best_buy_1():
result.append(self.best_buy_1.__doc__.strip())
if self.best_buy_2():
result.append(self.best_buy_2.__doc__.strip())
if self.best_buy_3():
result.append(self.best_buy_3.__doc__.strip())
if self.best_buy_4():
result.append(self.best_buy_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
def best_four_point_to_sell(self):
""" 判斷是否為四大賣點
:rtype: str or False
"""
result = []
if self.check_plus_bias_ratio() and \
(self.best_sell_1() or self.best_sell_2() or self.best_sell_3() or \
self.best_sell_4()):
if self.best_sell_1():
result.append(self.best_sell_1.__doc__.strip())
if self.best_sell_2():
result.append(self.best_sell_2.__doc__.strip())
if self.best_sell_3():
result.append(self.best_sell_3.__doc__.strip())
if self.best_sell_4():
result.append(self.best_sell_4.__doc__.strip())
result = ', '.join(result)
else:
result = False
return result
|
mlouielu/twstock | twstock/stock.py | BaseFetcher._convert_date | python | def _convert_date(self, date):
"""Convert '106/05/01' to '2017/05/01'"""
return '/'.join([str(int(date.split('/')[0]) + 1911)] + date.split('/')[1:]) | Convert '106/05/01' to '2017/05/01 | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/stock.py#L31-L33 | null | class BaseFetcher(object):
def fetch(self, year, month, sid, retry):
pass
def _make_datatuple(self, data):
pass
def purify(self, original_data):
pass
|
mlouielu/twstock | twstock/stock.py | Stock.fetch | python | def fetch(self, year: int, month: int):
self.raw_data = [self.fetcher.fetch(year, month, self.sid)]
self.data = self.raw_data[0]['data']
return self.data | Fetch year month data | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/stock.py#L151-L155 | [
"def fetch(self, year: int, month: int, sid: str, retry: int=5):\n params = {'date': '%d%02d01' % (year, month), 'stockNo': sid}\n for retry_i in range(retry):\n r = requests.get(self.REPORT_URL, params=params)\n try:\n data = r.json()\n except JSONDecodeError:\n continue\n else:\n break\n else:\n # Fail in all retries\n data = {'stat': '', 'data': []}\n\n if data['stat'] == 'OK':\n data['data'] = self.purify(data)\n else:\n data['data'] = []\n return data\n",
"def fetch(self, year: int, month: int, sid: str, retry: int=5):\n params = {'d': '%d/%d' % (year - 1911, month), 'stkno': sid}\n for retry_i in range(retry):\n r = requests.get(self.REPORT_URL, params=params)\n try:\n data = r.json()\n except JSONDecodeError:\n continue\n else:\n break\n else:\n # Fail in all retries\n data = {'aaData': []}\n\n data['data'] = []\n if data['aaData']:\n data['data'] = self.purify(data)\n return data\n"
] | class Stock(analytics.Analytics):
def __init__(self, sid: str, initial_fetch: bool=True):
self.sid = sid
self.fetcher = TWSEFetcher() if codes[sid].market == '上市' else TPEXFetcher()
self.raw_data = []
self.data = []
# Init data
if initial_fetch:
self.fetch_31()
def _month_year_iter(self, start_month, start_year, end_month, end_year):
ym_start = 12 * start_year + start_month - 1
ym_end = 12 * end_year + end_month
for ym in range(ym_start, ym_end):
y, m = divmod(ym, 12)
yield y, m + 1
def fetch_from(self, year: int, month: int):
"""Fetch data from year, month to current year month data"""
self.raw_data = []
self.data = []
today = datetime.datetime.today()
for year, month in self._month_year_iter(month, year, today.month, today.year):
self.raw_data.append(self.fetcher.fetch(year, month, self.sid))
self.data.extend(self.raw_data[-1]['data'])
return self.data
def fetch_31(self):
"""Fetch 31 days data"""
today = datetime.datetime.today()
before = today - datetime.timedelta(days=60)
self.fetch_from(before.year, before.month)
self.data = self.data[-31:]
return self.data
@property
def date(self):
return [d.date for d in self.data]
@property
def capacity(self):
return [d.capacity for d in self.data]
@property
def turnover(self):
return [d.turnover for d in self.data]
@property
def price(self):
return [d.close for d in self.data]
@property
def high(self):
return [d.high for d in self.data]
@property
def low(self):
return [d.low for d in self.data]
@property
def open(self):
return [d.open for d in self.data]
@property
def close(self):
return [d.close for d in self.data]
@property
def change(self):
return [d.change for d in self.data]
@property
def transaction(self):
return [d.transaction for d in self.data]
|
mlouielu/twstock | twstock/stock.py | Stock.fetch_from | python | def fetch_from(self, year: int, month: int):
self.raw_data = []
self.data = []
today = datetime.datetime.today()
for year, month in self._month_year_iter(month, year, today.month, today.year):
self.raw_data.append(self.fetcher.fetch(year, month, self.sid))
self.data.extend(self.raw_data[-1]['data'])
return self.data | Fetch data from year, month to current year month data | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/stock.py#L157-L165 | [
"def _month_year_iter(self, start_month, start_year, end_month, end_year):\n ym_start = 12 * start_year + start_month - 1\n ym_end = 12 * end_year + end_month\n for ym in range(ym_start, ym_end):\n y, m = divmod(ym, 12)\n yield y, m + 1\n"
] | class Stock(analytics.Analytics):
def __init__(self, sid: str, initial_fetch: bool=True):
self.sid = sid
self.fetcher = TWSEFetcher() if codes[sid].market == '上市' else TPEXFetcher()
self.raw_data = []
self.data = []
# Init data
if initial_fetch:
self.fetch_31()
def _month_year_iter(self, start_month, start_year, end_month, end_year):
ym_start = 12 * start_year + start_month - 1
ym_end = 12 * end_year + end_month
for ym in range(ym_start, ym_end):
y, m = divmod(ym, 12)
yield y, m + 1
def fetch(self, year: int, month: int):
"""Fetch year month data"""
self.raw_data = [self.fetcher.fetch(year, month, self.sid)]
self.data = self.raw_data[0]['data']
return self.data
def fetch_31(self):
"""Fetch 31 days data"""
today = datetime.datetime.today()
before = today - datetime.timedelta(days=60)
self.fetch_from(before.year, before.month)
self.data = self.data[-31:]
return self.data
@property
def date(self):
return [d.date for d in self.data]
@property
def capacity(self):
return [d.capacity for d in self.data]
@property
def turnover(self):
return [d.turnover for d in self.data]
@property
def price(self):
return [d.close for d in self.data]
@property
def high(self):
return [d.high for d in self.data]
@property
def low(self):
return [d.low for d in self.data]
@property
def open(self):
return [d.open for d in self.data]
@property
def close(self):
return [d.close for d in self.data]
@property
def change(self):
return [d.change for d in self.data]
@property
def transaction(self):
return [d.transaction for d in self.data]
|
mlouielu/twstock | twstock/stock.py | Stock.fetch_31 | python | def fetch_31(self):
today = datetime.datetime.today()
before = today - datetime.timedelta(days=60)
self.fetch_from(before.year, before.month)
self.data = self.data[-31:]
return self.data | Fetch 31 days data | train | https://github.com/mlouielu/twstock/blob/cddddcc084d2d00497d591ab3059e3205b755825/twstock/stock.py#L167-L173 | [
"def fetch_from(self, year: int, month: int):\n \"\"\"Fetch data from year, month to current year month data\"\"\"\n self.raw_data = []\n self.data = []\n today = datetime.datetime.today()\n for year, month in self._month_year_iter(month, year, today.month, today.year):\n self.raw_data.append(self.fetcher.fetch(year, month, self.sid))\n self.data.extend(self.raw_data[-1]['data'])\n return self.data\n"
] | class Stock(analytics.Analytics):
def __init__(self, sid: str, initial_fetch: bool=True):
self.sid = sid
self.fetcher = TWSEFetcher() if codes[sid].market == '上市' else TPEXFetcher()
self.raw_data = []
self.data = []
# Init data
if initial_fetch:
self.fetch_31()
def _month_year_iter(self, start_month, start_year, end_month, end_year):
ym_start = 12 * start_year + start_month - 1
ym_end = 12 * end_year + end_month
for ym in range(ym_start, ym_end):
y, m = divmod(ym, 12)
yield y, m + 1
def fetch(self, year: int, month: int):
"""Fetch year month data"""
self.raw_data = [self.fetcher.fetch(year, month, self.sid)]
self.data = self.raw_data[0]['data']
return self.data
def fetch_from(self, year: int, month: int):
"""Fetch data from year, month to current year month data"""
self.raw_data = []
self.data = []
today = datetime.datetime.today()
for year, month in self._month_year_iter(month, year, today.month, today.year):
self.raw_data.append(self.fetcher.fetch(year, month, self.sid))
self.data.extend(self.raw_data[-1]['data'])
return self.data
@property
def date(self):
return [d.date for d in self.data]
@property
def capacity(self):
return [d.capacity for d in self.data]
@property
def turnover(self):
return [d.turnover for d in self.data]
@property
def price(self):
return [d.close for d in self.data]
@property
def high(self):
return [d.high for d in self.data]
@property
def low(self):
return [d.low for d in self.data]
@property
def open(self):
return [d.open for d in self.data]
@property
def close(self):
return [d.close for d in self.data]
@property
def change(self):
return [d.change for d in self.data]
@property
def transaction(self):
return [d.transaction for d in self.data]
|
thoughtworksarts/EmoPy | EmoPy/src/csv_data_loader.py | CSVDataLoader.load_data | python | def load_data(self):
print('Extracting training data from csv...')
images = list()
labels = list()
emotion_index_map = dict()
with open(self.datapath) as csv_file:
reader = csv.reader(csv_file, delimiter=',', quotechar='"')
for row in reader:
label_class = row[self.csv_label_col]
if label_class not in self.target_emotion_map.keys():
continue
label_class = self.target_emotion_map[label_class]
if label_class not in emotion_index_map.keys():
emotion_index_map[label_class] = len(emotion_index_map.keys())
labels.append(label_class)
image = np.asarray([int(pixel) for pixel in row[self.csv_image_col].split(' ')], dtype=np.uint8).reshape(self.image_dimensions)
image = self._reshape(image)
images.append(image)
vectorized_labels = self._vectorize_labels(emotion_index_map, labels)
self._check_data_not_empty(images)
return self._load_dataset(np.array(images), np.array(vectorized_labels), emotion_index_map) | Loads image and label data from specified csv file path.
:return: Dataset object containing image and label data. | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/src/csv_data_loader.py#L26-L54 | [
"def _load_dataset(self, images, labels, emotion_index_map):\n \"\"\"\n Loads Dataset object with images, labels, and other data.\n\n :param images: numpy array of image data\n :param labels: numpy array of one-hot vector labels\n :param emotion_index_map: map linking string/integer emotion class to integer index used in labels vectors\n\n :return: Dataset object containing image and label data.\n \"\"\"\n train_images, test_images, train_labels, test_labels = train_test_split(images, labels, test_size=self.validation_split, random_state=42, stratify=labels)\n dataset = Dataset(train_images, test_images, train_labels, test_labels, emotion_index_map, self.time_delay)\n return dataset\n",
"def _check_data_not_empty(self, images):\n if len(images) == 0:\n raise AssertionError('csv file does not contain samples of specified labels: %s' % str(self.label_map.keys()))\n",
"def _vectorize_labels(self, label_index_map, labels):\n label_values = list()\n label_count = len(label_index_map.keys())\n for label in labels:\n label_value = [0] * label_count\n label_value[label_index_map[label]] = 1.0\n label_values.append(label_value)\n return label_values\n"
] | class CSVDataLoader(_DataLoader):
"""
DataLoader subclass loads image and label data from csv file.
:param emotion_map: Dict of target emotion label values and their corresponding label vector index values.
:param datapath: Location of image dataset.
:param validation_split: Float percentage of data to use as validation set.
:param image_dimensions: Dimensions of sample images (height, width).
:param csv_label_col: Index of label value column in csv.
:param csv_image_col: Index of image column in csv.
:param out_channels: Number of image channels.
"""
def __init__(self, target_emotion_map, datapath=None, validation_split=0.2, image_dimensions=None, csv_label_col=None, csv_image_col=None, out_channels=1):
self.target_emotion_map = target_emotion_map
self.datapath = datapath
self.image_dimensions = image_dimensions
self.csv_image_col = csv_image_col
self.csv_label_col = csv_label_col
self.out_channels = out_channels
super().__init__(validation_split)
def _validate_arguments(self):
if self.csv_image_col is None or self.csv_label_col is None:
raise ValueError(
'Must provide image and label indices to extract data from csv. csv_label_col and csv_image_col arguments not provided during DataLoader initialization.')
if self.target_emotion_map is None:
raise ValueError('Must supply target_labels when loading data from csv.')
if self.image_dimensions is None:
raise ValueError('Must provide image dimensions when loading data from csv.')
# check received valid csv file
with open(self.datapath) as csv_file:
# check image and label indices are valid
reader = csv.reader(csv_file, delimiter=',', quotechar='"')
num_cols = len(next(reader))
if self.csv_image_col >= num_cols:
raise (ValueError('Csv column index for image is out of range: %i' % self.csv_image_col))
if self.csv_label_col >= num_cols:
raise (ValueError('Csv column index for label is out of range: %i' % self.csv_label_col))
# check image dimensions
pixels = next(reader)[self.csv_image_col].split(' ')
if len(pixels) != self.image_dimensions[0] * self.image_dimensions[1]:
raise ValueError('Invalid image dimensions: %s' % str(self.image_dimensions))
|
thoughtworksarts/EmoPy | EmoPy/src/data_loader.py | _DataLoader._load_dataset | python | def _load_dataset(self, images, labels, emotion_index_map):
train_images, test_images, train_labels, test_labels = train_test_split(images, labels, test_size=self.validation_split, random_state=42, stratify=labels)
dataset = Dataset(train_images, test_images, train_labels, test_labels, emotion_index_map, self.time_delay)
return dataset | Loads Dataset object with images, labels, and other data.
:param images: numpy array of image data
:param labels: numpy array of one-hot vector labels
:param emotion_index_map: map linking string/integer emotion class to integer index used in labels vectors
:return: Dataset object containing image and label data. | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/src/data_loader.py#L27-L39 | null | class _DataLoader(object):
"""
Abstract class to load image and label data from a directory or csv file.
Methods load_data and _validate_arguments must be implemented by subclasses.
"""
def __init__(self, validation_split, time_delay=None):
self.validation_split = validation_split
self.time_delay = time_delay
self._validate_arguments()
def load_data(self):
"""
Loads image and label data from path specified in subclass initialization.
:return: Dataset object containing image and label data.
"""
raise NotImplementedError("Class %s doesn't implement load_data()" % self.__class__.__name__)
def _validate_arguments(self):
if self.out_channels not in (1, 3):
raise ValueError("Out put channel should be either 3(RGB) or 1(Grey) but got {channels}".format(channels=self.out_channels))
if self.validation_split < 0 or self.validation_split > 1:
raise ValueError("validation_split must be a float between 0 and 1")
raise NotImplementedError("Class %s doesn't implement _validate_arguments()" % self.__class__.__name__)
def _reshape(self, image):
if image.ndim == 2:
image = np.expand_dims(image, axis=2)
channels = image.shape[-1]
if channels == 3 and self.out_channels == 1:
gray = cv2.cvtColor(image, code=cv2.COLOR_BGR2GRAY)
return np.expand_dims(gray, axis=2)
if channels == 1 and self.out_channels == 3:
return np.repeat(image, repeats=3, axis=2)
return image
def _check_data_not_empty(self, images):
if len(images) == 0:
raise AssertionError('csv file does not contain samples of specified labels: %s' % str(self.label_map.keys()))
def _vectorize_labels(self, label_index_map, labels):
label_values = list()
label_count = len(label_index_map.keys())
for label in labels:
label_value = [0] * label_count
label_value[label_index_map[label]] = 1.0
label_values.append(label_value)
return label_values
|
thoughtworksarts/EmoPy | EmoPy/src/directory_data_loader.py | DirectoryDataLoader.load_data | python | def load_data(self):
images = list()
labels = list()
emotion_index_map = dict()
label_directories = [dir for dir in os.listdir(self.datapath) if not dir.startswith('.')]
for label_directory in label_directories:
if self.target_emotion_map:
if label_directory not in self.target_emotion_map.keys(): continue
self._add_new_label_to_map(label_directory, emotion_index_map)
label_directory_path = self.datapath + '/' + label_directory
if self.time_delay:
self._load_series_for_single_emotion_directory(images, label_directory, label_directory_path, labels)
else:
image_files = [image_file for image_file in os.listdir(label_directory_path) if not image_file.startswith('.')]
self._load_images_from_directory_to_array(image_files, images, label_directory, label_directory_path, labels)
vectorized_labels = self._vectorize_labels(emotion_index_map, labels)
self._check_data_not_empty(images)
return self._load_dataset(np.array(images), np.array(vectorized_labels), emotion_index_map) | Loads image and label data from specified directory path.
:return: Dataset object containing image and label data. | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/src/directory_data_loader.py#L23-L47 | [
"def _load_dataset(self, images, labels, emotion_index_map):\n \"\"\"\n Loads Dataset object with images, labels, and other data.\n\n :param images: numpy array of image data\n :param labels: numpy array of one-hot vector labels\n :param emotion_index_map: map linking string/integer emotion class to integer index used in labels vectors\n\n :return: Dataset object containing image and label data.\n \"\"\"\n train_images, test_images, train_labels, test_labels = train_test_split(images, labels, test_size=self.validation_split, random_state=42, stratify=labels)\n dataset = Dataset(train_images, test_images, train_labels, test_labels, emotion_index_map, self.time_delay)\n return dataset\n",
"def _check_data_not_empty(self, images):\n if len(images) == 0:\n raise AssertionError('csv file does not contain samples of specified labels: %s' % str(self.label_map.keys()))\n",
"def _vectorize_labels(self, label_index_map, labels):\n label_values = list()\n label_count = len(label_index_map.keys())\n for label in labels:\n label_value = [0] * label_count\n label_value[label_index_map[label]] = 1.0\n label_values.append(label_value)\n return label_values\n",
"def _load_series_for_single_emotion_directory(self, images, label_directory, label_directory_path, labels):\n series_directories = [series_directory for series_directory in os.listdir(label_directory_path) if not series_directory.startswith('.')]\n for series_directory in series_directories:\n series_directory_path = label_directory_path + '/' + series_directory\n self._check_series_directory_size(series_directory_path)\n new_image_series = list()\n image_files = [image_file for image_file in os.listdir(series_directory_path) if not image_file.startswith('.')]\n self._load_images_from_directory_to_array(image_files, new_image_series, label_directory, series_directory_path, labels)\n new_image_series = self._apply_time_delay_to_series(images, new_image_series)\n images.append(new_image_series)\n labels.append(label_directory)\n",
"def _load_images_from_directory_to_array(self, image_files, images, label, directory_path, labels):\n for image_file in image_files:\n images.append(self._load_image(image_file, directory_path))\n if not self.time_delay:\n labels.append(label)\n",
"def _add_new_label_to_map(self, label_directory, label_index_map):\n new_label_index = len(label_index_map.keys())\n label_index_map[label_directory] = new_label_index\n"
] | class DirectoryDataLoader(_DataLoader):
"""
DataLoader subclass loads image and label data from directory.
:param target_emotion_map: Optional dict of target emotion label values/strings and their corresponding label vector index values.
:param datapath: Location of image dataset.
:param validation_split: Float percentage of data to use as validation set.
:param out_channels: Number of image channels.
:param time_delay: Number of images to load from each time series sample. Parameter must be provided to load time series data and unspecified if using static image data.
"""
def __init__(self, target_emotion_map=None, datapath=None, validation_split=0.2, out_channels=1, time_delay=None):
self.datapath = datapath
self.target_emotion_map = target_emotion_map
self.out_channels = out_channels
super().__init__(validation_split, time_delay)
def _load_series_for_single_emotion_directory(self, images, label_directory, label_directory_path, labels):
series_directories = [series_directory for series_directory in os.listdir(label_directory_path) if not series_directory.startswith('.')]
for series_directory in series_directories:
series_directory_path = label_directory_path + '/' + series_directory
self._check_series_directory_size(series_directory_path)
new_image_series = list()
image_files = [image_file for image_file in os.listdir(series_directory_path) if not image_file.startswith('.')]
self._load_images_from_directory_to_array(image_files, new_image_series, label_directory, series_directory_path, labels)
new_image_series = self._apply_time_delay_to_series(images, new_image_series)
images.append(new_image_series)
labels.append(label_directory)
def _apply_time_delay_to_series(self, images, new_image_series):
start_idx = len(new_image_series) - self.time_delay
end_idx = len(new_image_series)
return new_image_series[start_idx:end_idx]
def _load_images_from_directory_to_array(self, image_files, images, label, directory_path, labels):
for image_file in image_files:
images.append(self._load_image(image_file, directory_path))
if not self.time_delay:
labels.append(label)
def _add_new_label_to_map(self, label_directory, label_index_map):
new_label_index = len(label_index_map.keys())
label_index_map[label_directory] = new_label_index
def _load_image(self, image_file, directory_path):
image_file_path = directory_path + '/' + image_file
image = cv2.imread(image_file_path)
image = self._reshape(image)
return image
def _validate_arguments(self):
self._check_directory_arguments()
def _check_directory_arguments(self):
"""
Validates arguments for loading from directories, including static image and time series directories.
"""
if not os.path.isdir(self.datapath):
raise (NotADirectoryError('Directory does not exist: %s' % self.datapath))
if self.time_delay:
if self.time_delay < 1:
raise ValueError('Time step argument must be greater than 0, but gave: %i' % self.time_delay)
if not isinstance(self.time_delay, int):
raise ValueError('Time step argument must be an integer, but gave: %s' % str(self.time_delay))
def _check_series_directory_size(self, series_directory_path):
image_files = [image_file for image_file in os.listdir(series_directory_path) if not image_file.startswith('.')]
if len(image_files) < self.time_delay:
raise ValueError('Time series sample found in path %s does not contain enough images for %s time steps.' % (
series_directory_path, str(self.time_delay)))
|
thoughtworksarts/EmoPy | EmoPy/src/directory_data_loader.py | DirectoryDataLoader._check_directory_arguments | python | def _check_directory_arguments(self):
if not os.path.isdir(self.datapath):
raise (NotADirectoryError('Directory does not exist: %s' % self.datapath))
if self.time_delay:
if self.time_delay < 1:
raise ValueError('Time step argument must be greater than 0, but gave: %i' % self.time_delay)
if not isinstance(self.time_delay, int):
raise ValueError('Time step argument must be an integer, but gave: %s' % str(self.time_delay)) | Validates arguments for loading from directories, including static image and time series directories. | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/src/directory_data_loader.py#L86-L96 | null | class DirectoryDataLoader(_DataLoader):
"""
DataLoader subclass loads image and label data from directory.
:param target_emotion_map: Optional dict of target emotion label values/strings and their corresponding label vector index values.
:param datapath: Location of image dataset.
:param validation_split: Float percentage of data to use as validation set.
:param out_channels: Number of image channels.
:param time_delay: Number of images to load from each time series sample. Parameter must be provided to load time series data and unspecified if using static image data.
"""
def __init__(self, target_emotion_map=None, datapath=None, validation_split=0.2, out_channels=1, time_delay=None):
self.datapath = datapath
self.target_emotion_map = target_emotion_map
self.out_channels = out_channels
super().__init__(validation_split, time_delay)
def load_data(self):
"""
Loads image and label data from specified directory path.
:return: Dataset object containing image and label data.
"""
images = list()
labels = list()
emotion_index_map = dict()
label_directories = [dir for dir in os.listdir(self.datapath) if not dir.startswith('.')]
for label_directory in label_directories:
if self.target_emotion_map:
if label_directory not in self.target_emotion_map.keys(): continue
self._add_new_label_to_map(label_directory, emotion_index_map)
label_directory_path = self.datapath + '/' + label_directory
if self.time_delay:
self._load_series_for_single_emotion_directory(images, label_directory, label_directory_path, labels)
else:
image_files = [image_file for image_file in os.listdir(label_directory_path) if not image_file.startswith('.')]
self._load_images_from_directory_to_array(image_files, images, label_directory, label_directory_path, labels)
vectorized_labels = self._vectorize_labels(emotion_index_map, labels)
self._check_data_not_empty(images)
return self._load_dataset(np.array(images), np.array(vectorized_labels), emotion_index_map)
def _load_series_for_single_emotion_directory(self, images, label_directory, label_directory_path, labels):
series_directories = [series_directory for series_directory in os.listdir(label_directory_path) if not series_directory.startswith('.')]
for series_directory in series_directories:
series_directory_path = label_directory_path + '/' + series_directory
self._check_series_directory_size(series_directory_path)
new_image_series = list()
image_files = [image_file for image_file in os.listdir(series_directory_path) if not image_file.startswith('.')]
self._load_images_from_directory_to_array(image_files, new_image_series, label_directory, series_directory_path, labels)
new_image_series = self._apply_time_delay_to_series(images, new_image_series)
images.append(new_image_series)
labels.append(label_directory)
def _apply_time_delay_to_series(self, images, new_image_series):
start_idx = len(new_image_series) - self.time_delay
end_idx = len(new_image_series)
return new_image_series[start_idx:end_idx]
def _load_images_from_directory_to_array(self, image_files, images, label, directory_path, labels):
for image_file in image_files:
images.append(self._load_image(image_file, directory_path))
if not self.time_delay:
labels.append(label)
def _add_new_label_to_map(self, label_directory, label_index_map):
new_label_index = len(label_index_map.keys())
label_index_map[label_directory] = new_label_index
def _load_image(self, image_file, directory_path):
image_file_path = directory_path + '/' + image_file
image = cv2.imread(image_file_path)
image = self._reshape(image)
return image
def _validate_arguments(self):
self._check_directory_arguments()
def _check_series_directory_size(self, series_directory_path):
image_files = [image_file for image_file in os.listdir(series_directory_path) if not image_file.startswith('.')]
if len(image_files) < self.time_delay:
raise ValueError('Time series sample found in path %s does not contain enough images for %s time steps.' % (
series_directory_path, str(self.time_delay)))
|
thoughtworksarts/EmoPy | EmoPy/src/fermodel.py | FERModel.predict | python | def predict(self, image_file):
image = misc.imread(image_file)
gray_image = image
if len(image.shape) > 2:
gray_image = cv2.cvtColor(image, code=cv2.COLOR_BGR2GRAY)
resized_image = cv2.resize(gray_image, self.target_dimensions, interpolation=cv2.INTER_LINEAR)
final_image = np.array([np.array([resized_image]).reshape(list(self.target_dimensions)+[self.channels])])
prediction = self.model.predict(final_image)
# Return the dominant expression
dominant_expression = self._print_prediction(prediction[0])
return dominant_expression | Predicts discrete emotion for given image.
:param images: image file (jpg or png format) | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/src/fermodel.py#L47-L62 | [
"def _print_prediction(self, prediction):\n normalized_prediction = [x/sum(prediction) for x in prediction]\n for emotion in self.emotion_map.keys():\n print('%s: %.1f%%' % (emotion, normalized_prediction[self.emotion_map[emotion]]*100))\n dominant_emotion_index = np.argmax(prediction)\n for emotion in self.emotion_map.keys():\n if dominant_emotion_index == self.emotion_map[emotion]:\n dominant_emotion = emotion\n break\n # print('Dominant emotion: %s' % dominant_emotion)\n # print()\n return dominant_emotion\n"
] | class FERModel:
"""
Pretrained deep learning model for facial expression recognition.
:param target_emotions: set of target emotions to classify
:param verbose: if true, will print out extra process information
**Example**::
from fermodel import FERModel
target_emotions = ['happiness', 'disgust', 'surprise']
model = FERModel(target_emotions, verbose=True)
"""
POSSIBLE_EMOTIONS = ['anger', 'fear', 'calm', 'sadness', 'happiness', 'surprise', 'disgust']
def __init__(self, target_emotions, verbose=False):
self.target_emotions = target_emotions
self.emotion_index_map = {
'anger': 0,
'disgust': 1,
'fear': 2,
'happiness': 3,
'sadness': 4,
'surprise': 5,
'calm': 6
}
self._check_emotion_set_is_supported()
self.verbose = verbose
self.target_dimensions = (48, 48)
self.channels = 1
self._initialize_model()
def _initialize_model(self):
print('Initializing FER model parameters for target emotions: %s' % self.target_emotions)
self.model, self.emotion_map = self._choose_model_from_target_emotions()
def _check_emotion_set_is_supported(self):
"""
Validates set of user-supplied target emotions.
"""
supported_emotion_subsets = [
set(['anger', 'fear', 'surprise', 'calm']),
set(['happiness', 'disgust', 'surprise']),
set(['anger', 'fear', 'surprise']),
set(['anger', 'fear', 'calm']),
set(['anger', 'happiness', 'calm']),
set(['anger', 'fear', 'disgust']),
set(['calm', 'disgust', 'surprise']),
set(['sadness', 'disgust', 'surprise']),
set(['anger', 'happiness'])
]
if not set(self.target_emotions) in supported_emotion_subsets:
error_string = 'Target emotions must be a supported subset. '
error_string += 'Choose from one of the following emotion subset: \n'
possible_subset_string = ''
for emotion_set in supported_emotion_subsets:
possible_subset_string += ', '.join(emotion_set)
possible_subset_string += '\n'
error_string += possible_subset_string
raise ValueError(error_string)
def _choose_model_from_target_emotions(self):
"""
Initializes pre-trained deep learning model for the set of target emotions supplied by user.
"""
model_indices = [self.emotion_index_map[emotion] for emotion in self.target_emotions]
sorted_indices = [str(idx) for idx in sorted(model_indices)]
model_suffix = ''.join(sorted_indices)
#Modify the path to choose the model file and the emotion map that you want to use
model_file = 'models/conv_model_%s.hdf5' % model_suffix
emotion_map_file = 'models/conv_emotion_map_%s.json' % model_suffix
emotion_map = json.loads(open(resource_filename('EmoPy', emotion_map_file)).read())
return load_model(resource_filename('EmoPy', model_file)), emotion_map
def _print_prediction(self, prediction):
normalized_prediction = [x/sum(prediction) for x in prediction]
for emotion in self.emotion_map.keys():
print('%s: %.1f%%' % (emotion, normalized_prediction[self.emotion_map[emotion]]*100))
dominant_emotion_index = np.argmax(prediction)
for emotion in self.emotion_map.keys():
if dominant_emotion_index == self.emotion_map[emotion]:
dominant_emotion = emotion
break
# print('Dominant emotion: %s' % dominant_emotion)
# print()
return dominant_emotion
|
thoughtworksarts/EmoPy | EmoPy/src/fermodel.py | FERModel._check_emotion_set_is_supported | python | def _check_emotion_set_is_supported(self):
supported_emotion_subsets = [
set(['anger', 'fear', 'surprise', 'calm']),
set(['happiness', 'disgust', 'surprise']),
set(['anger', 'fear', 'surprise']),
set(['anger', 'fear', 'calm']),
set(['anger', 'happiness', 'calm']),
set(['anger', 'fear', 'disgust']),
set(['calm', 'disgust', 'surprise']),
set(['sadness', 'disgust', 'surprise']),
set(['anger', 'happiness'])
]
if not set(self.target_emotions) in supported_emotion_subsets:
error_string = 'Target emotions must be a supported subset. '
error_string += 'Choose from one of the following emotion subset: \n'
possible_subset_string = ''
for emotion_set in supported_emotion_subsets:
possible_subset_string += ', '.join(emotion_set)
possible_subset_string += '\n'
error_string += possible_subset_string
raise ValueError(error_string) | Validates set of user-supplied target emotions. | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/src/fermodel.py#L64-L87 | null | class FERModel:
"""
Pretrained deep learning model for facial expression recognition.
:param target_emotions: set of target emotions to classify
:param verbose: if true, will print out extra process information
**Example**::
from fermodel import FERModel
target_emotions = ['happiness', 'disgust', 'surprise']
model = FERModel(target_emotions, verbose=True)
"""
POSSIBLE_EMOTIONS = ['anger', 'fear', 'calm', 'sadness', 'happiness', 'surprise', 'disgust']
def __init__(self, target_emotions, verbose=False):
self.target_emotions = target_emotions
self.emotion_index_map = {
'anger': 0,
'disgust': 1,
'fear': 2,
'happiness': 3,
'sadness': 4,
'surprise': 5,
'calm': 6
}
self._check_emotion_set_is_supported()
self.verbose = verbose
self.target_dimensions = (48, 48)
self.channels = 1
self._initialize_model()
def _initialize_model(self):
print('Initializing FER model parameters for target emotions: %s' % self.target_emotions)
self.model, self.emotion_map = self._choose_model_from_target_emotions()
def predict(self, image_file):
"""
Predicts discrete emotion for given image.
:param images: image file (jpg or png format)
"""
image = misc.imread(image_file)
gray_image = image
if len(image.shape) > 2:
gray_image = cv2.cvtColor(image, code=cv2.COLOR_BGR2GRAY)
resized_image = cv2.resize(gray_image, self.target_dimensions, interpolation=cv2.INTER_LINEAR)
final_image = np.array([np.array([resized_image]).reshape(list(self.target_dimensions)+[self.channels])])
prediction = self.model.predict(final_image)
# Return the dominant expression
dominant_expression = self._print_prediction(prediction[0])
return dominant_expression
def _choose_model_from_target_emotions(self):
"""
Initializes pre-trained deep learning model for the set of target emotions supplied by user.
"""
model_indices = [self.emotion_index_map[emotion] for emotion in self.target_emotions]
sorted_indices = [str(idx) for idx in sorted(model_indices)]
model_suffix = ''.join(sorted_indices)
#Modify the path to choose the model file and the emotion map that you want to use
model_file = 'models/conv_model_%s.hdf5' % model_suffix
emotion_map_file = 'models/conv_emotion_map_%s.json' % model_suffix
emotion_map = json.loads(open(resource_filename('EmoPy', emotion_map_file)).read())
return load_model(resource_filename('EmoPy', model_file)), emotion_map
def _print_prediction(self, prediction):
normalized_prediction = [x/sum(prediction) for x in prediction]
for emotion in self.emotion_map.keys():
print('%s: %.1f%%' % (emotion, normalized_prediction[self.emotion_map[emotion]]*100))
dominant_emotion_index = np.argmax(prediction)
for emotion in self.emotion_map.keys():
if dominant_emotion_index == self.emotion_map[emotion]:
dominant_emotion = emotion
break
# print('Dominant emotion: %s' % dominant_emotion)
# print()
return dominant_emotion
|
thoughtworksarts/EmoPy | EmoPy/src/fermodel.py | FERModel._choose_model_from_target_emotions | python | def _choose_model_from_target_emotions(self):
model_indices = [self.emotion_index_map[emotion] for emotion in self.target_emotions]
sorted_indices = [str(idx) for idx in sorted(model_indices)]
model_suffix = ''.join(sorted_indices)
#Modify the path to choose the model file and the emotion map that you want to use
model_file = 'models/conv_model_%s.hdf5' % model_suffix
emotion_map_file = 'models/conv_emotion_map_%s.json' % model_suffix
emotion_map = json.loads(open(resource_filename('EmoPy', emotion_map_file)).read())
return load_model(resource_filename('EmoPy', model_file)), emotion_map | Initializes pre-trained deep learning model for the set of target emotions supplied by user. | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/src/fermodel.py#L89-L100 | null | class FERModel:
"""
Pretrained deep learning model for facial expression recognition.
:param target_emotions: set of target emotions to classify
:param verbose: if true, will print out extra process information
**Example**::
from fermodel import FERModel
target_emotions = ['happiness', 'disgust', 'surprise']
model = FERModel(target_emotions, verbose=True)
"""
POSSIBLE_EMOTIONS = ['anger', 'fear', 'calm', 'sadness', 'happiness', 'surprise', 'disgust']
def __init__(self, target_emotions, verbose=False):
self.target_emotions = target_emotions
self.emotion_index_map = {
'anger': 0,
'disgust': 1,
'fear': 2,
'happiness': 3,
'sadness': 4,
'surprise': 5,
'calm': 6
}
self._check_emotion_set_is_supported()
self.verbose = verbose
self.target_dimensions = (48, 48)
self.channels = 1
self._initialize_model()
def _initialize_model(self):
print('Initializing FER model parameters for target emotions: %s' % self.target_emotions)
self.model, self.emotion_map = self._choose_model_from_target_emotions()
def predict(self, image_file):
"""
Predicts discrete emotion for given image.
:param images: image file (jpg or png format)
"""
image = misc.imread(image_file)
gray_image = image
if len(image.shape) > 2:
gray_image = cv2.cvtColor(image, code=cv2.COLOR_BGR2GRAY)
resized_image = cv2.resize(gray_image, self.target_dimensions, interpolation=cv2.INTER_LINEAR)
final_image = np.array([np.array([resized_image]).reshape(list(self.target_dimensions)+[self.channels])])
prediction = self.model.predict(final_image)
# Return the dominant expression
dominant_expression = self._print_prediction(prediction[0])
return dominant_expression
def _check_emotion_set_is_supported(self):
"""
Validates set of user-supplied target emotions.
"""
supported_emotion_subsets = [
set(['anger', 'fear', 'surprise', 'calm']),
set(['happiness', 'disgust', 'surprise']),
set(['anger', 'fear', 'surprise']),
set(['anger', 'fear', 'calm']),
set(['anger', 'happiness', 'calm']),
set(['anger', 'fear', 'disgust']),
set(['calm', 'disgust', 'surprise']),
set(['sadness', 'disgust', 'surprise']),
set(['anger', 'happiness'])
]
if not set(self.target_emotions) in supported_emotion_subsets:
error_string = 'Target emotions must be a supported subset. '
error_string += 'Choose from one of the following emotion subset: \n'
possible_subset_string = ''
for emotion_set in supported_emotion_subsets:
possible_subset_string += ', '.join(emotion_set)
possible_subset_string += '\n'
error_string += possible_subset_string
raise ValueError(error_string)
def _print_prediction(self, prediction):
normalized_prediction = [x/sum(prediction) for x in prediction]
for emotion in self.emotion_map.keys():
print('%s: %.1f%%' % (emotion, normalized_prediction[self.emotion_map[emotion]]*100))
dominant_emotion_index = np.argmax(prediction)
for emotion in self.emotion_map.keys():
if dominant_emotion_index == self.emotion_map[emotion]:
dominant_emotion = emotion
break
# print('Dominant emotion: %s' % dominant_emotion)
# print()
return dominant_emotion
|
thoughtworksarts/EmoPy | EmoPy/library/image.py | apply_transform | python | def apply_transform(sample,
transform_matrix,
channel_axis=0,
fill_mode='nearest',
cval=0.):
if sample.ndim == 4:
channel_axis = channel_axis - 1
transformed_frames = [transform(frame, transform_matrix, channel_axis, fill_mode, cval) for frame in sample]
return np.stack(transformed_frames, axis=0)
if sample.ndim == 3:
return transform(sample, transform_matrix, channel_axis, fill_mode, cval) | Apply the image transformation specified by a matrix.
# Arguments
sample: 2D numpy array, single sample.
transform_matrix: Numpy array specifying the geometric transformation.
channel_axis: Index of axis for channels in the input tensor.
fill_mode: Points outside the boundaries of the input
are filled according to the given mode
(one of `{'constant', 'nearest', 'reflect', 'wrap'}`).
cval: Value used for points outside the boundaries
of the input if `mode='constant'`.
# Returns
The transformed version of the input. | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/library/image.py#L75-L101 | [
"def transform(frame, transform_matrix, channel_axis, fill_mode, cval):\n final_affine_matrix = transform_matrix[:2, :2]\n final_offset = transform_matrix[:2, 2]\n\n frame = np.rollaxis(frame, channel_axis, 0)\n channel_images = [ndi.interpolation.affine_transform(\n f_channel,\n final_affine_matrix,\n final_offset,\n order=0,\n mode=fill_mode,\n cval=cval) for f_channel in frame]\n frame = np.stack(channel_images, axis=0)\n return np.rollaxis(frame, 0, channel_axis + 1)\n"
] | """Fairly basic set of tools for real-time data augmentation on image data.
Can easily be extended to include new transformations,
new preprocessing methods, etc...
"""
from __future__ import absolute_import
from __future__ import print_function
import threading
import warnings
import cv2
import numpy as np
import scipy.ndimage as ndi
from keras import backend as K
from keras.utils.data_utils import Sequence
from scipy import linalg
from six.moves import range
try:
from PIL import Image as pil_image
except ImportError:
pil_image = None
if pil_image is not None:
_PIL_INTERPOLATION_METHODS = {
'nearest': pil_image.NEAREST,
'bilinear': pil_image.BILINEAR,
'bicubic': pil_image.BICUBIC,
}
# These methods were only introduced in version 3.4.0 (2016).
if hasattr(pil_image, 'HAMMING'):
_PIL_INTERPOLATION_METHODS['hamming'] = pil_image.HAMMING
if hasattr(pil_image, 'BOX'):
_PIL_INTERPOLATION_METHODS['box'] = pil_image.BOX
# This method is new in version 1.1.3 (2013).
if hasattr(pil_image, 'LANCZOS'):
_PIL_INTERPOLATION_METHODS['lanczos'] = pil_image.LANCZOS
def random_channel_shift(x, intensity, channel_axis=0):
x = np.rollaxis(x, channel_axis, 0)
min_x, max_x = np.min(x), np.max(x)
channel_images = [np.clip(x_channel + np.random.uniform(-intensity, intensity), min_x, max_x)
for x_channel in x]
x = np.stack(channel_images, axis=0)
x = np.rollaxis(x, 0, channel_axis + 1)
return x
def transform_matrix_offset_center(matrix, x, y):
o_x = float(x) / 2 + 0.5
o_y = float(y) / 2 + 0.5
offset_matrix = np.array([[1, 0, o_x], [0, 1, o_y], [0, 0, 1]])
reset_matrix = np.array([[1, 0, -o_x], [0, 1, -o_y], [0, 0, 1]])
transform_matrix = np.dot(np.dot(offset_matrix, matrix), reset_matrix)
return transform_matrix
def transform(frame, transform_matrix, channel_axis, fill_mode, cval):
final_affine_matrix = transform_matrix[:2, :2]
final_offset = transform_matrix[:2, 2]
frame = np.rollaxis(frame, channel_axis, 0)
channel_images = [ndi.interpolation.affine_transform(
f_channel,
final_affine_matrix,
final_offset,
order=0,
mode=fill_mode,
cval=cval) for f_channel in frame]
frame = np.stack(channel_images, axis=0)
return np.rollaxis(frame, 0, channel_axis + 1)
def flip_axis(x, axis):
x = np.asarray(x).swapaxes(axis, 0)
x = x[::-1, ...]
x = x.swapaxes(0, axis)
return x
def resize(image, target_dimensions):
channels = image.shape[-1]
return cv2.resize(image, target_dimensions, interpolation=cv2.INTER_CUBIC)\
.reshape(list(target_dimensions) + [channels])
def resize_sample(sample, target_dimensions=None):
if target_dimensions is None:
return sample
if sample.ndim == 4:
resized_images = [resize(frame, target_dimensions) for frame in sample]
return np.stack(resized_images, axis=0)
if sample.ndim == 3:
return resize(sample, target_dimensions)
class ImageDataGenerator(object):
"""Generate minibatches of image data with real-time data augmentation.
# Arguments
featurewise_center: set input mean to 0 over the dataset.
samplewise_center: set each sample mean to 0.
featurewise_std_normalization: divide inputs by std of the dataset.
samplewise_std_normalization: divide each input by its std.
zca_whitening: apply ZCA whitening.
zca_epsilon: epsilon for ZCA whitening. Default is 1e-6.
rotation_range: degrees (0 to 180).
width_shift_range: fraction of total width.
height_shift_range: fraction of total height.
shear_range: shear intensity (shear angle in radians).
zoom_range: amount of zoom. if scalar z, zoom will be randomly picked
in the range [1-z, 1+z]. A sequence of two can be passed instead
to select this range.
channel_shift_range: shift range for each channel.
fill_mode: points outside the boundaries are filled according to the
given mode ('constant', 'nearest', 'reflect' or 'wrap'). Default
is 'nearest'.
cval: value used for points outside the boundaries when fill_mode is
'constant'. Default is 0.
horizontal_flip: whether to randomly flip images horizontally.
vertical_flip: whether to randomly flip images vertically.
rescale: rescaling factor. If None or 0, no rescaling is applied,
otherwise we multiply the data by the value provided. This is
applied after the `preprocessing_function` (if any provided)
but before any other transformation.
preprocessing_function: function that will be implied on each input.
The function will run before any other modification on it.
The function should take one argument:
one image (Numpy tensor with rank 3),
and should output a Numpy tensor with the same shape.
data_format: 'channels_first' or 'channels_last'. In 'channels_first' mode, the channels dimension
(the depth) is at index 1, in 'channels_last' mode it is at index 3.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
"""
def __init__(self,
featurewise_center=False,
samplewise_center=False,
featurewise_std_normalization=False,
samplewise_std_normalization=False,
zca_whitening=False,
zca_epsilon=1e-6,
rotation_angle=0.,
width_shift_range=0.,
height_shift_range=0.,
shear_range=0.,
zoom_range=0.,
channel_shift_range=0.,
fill_mode='nearest',
cval=0.,
horizontal_flip=False,
vertical_flip=False,
rescale=None,
preprocessing_function=None,
data_format="channels_last",
time_delay=None,
target_dimensions=None):
self.featurewise_center = featurewise_center
self.samplewise_center = samplewise_center
self.featurewise_std_normalization = featurewise_std_normalization
self.samplewise_std_normalization = samplewise_std_normalization
self.zca_whitening = zca_whitening
self.zca_epsilon = zca_epsilon
self.rotation_angle = rotation_angle
self.width_shift_range = width_shift_range
self.height_shift_range = height_shift_range
self.shear_range = shear_range
self.zoom_range = zoom_range
self.channel_shift_range = channel_shift_range
self.fill_mode = fill_mode
self.cval = cval
self.horizontal_flip = horizontal_flip
self.vertical_flip = vertical_flip
self.rescale = rescale
self.preprocessing_function = preprocessing_function
self.data_format = data_format
self.time_delay = time_delay
self.target_dimensions = target_dimensions
if data_format == 'channels_last':
if time_delay is None:
self.time_delay_axis, self.row_axis, self.col_axis, self.channel_axis = None, 1, 2, 3
else:
self.time_delay_axis, self.row_axis, self.col_axis, self.channel_axis = 1, 2, 3, 4
self.mean = None
self.std = None
self.principal_components = None
if np.isscalar(zoom_range):
self.zoom_range = [1 - zoom_range, 1 + zoom_range]
elif len(zoom_range) == 2:
self.zoom_range = [zoom_range[0], zoom_range[1]]
else:
raise ValueError('`zoom_range` should be a float or '
'a tuple or list of two floats. '
'Received arg: ', zoom_range)
def flow(self, x, y=None, batch_size=32, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_format='png', target_dimension=None):
if target_dimension:
self.target_dimensions = target_dimension
return NumpyArrayIterator(
x, y, self,
batch_size=batch_size,
shuffle=shuffle,
seed=seed,
data_format=self.data_format,
save_to_dir=save_to_dir,
save_prefix=save_prefix,
save_format=save_format)
def standardize(self, x):
"""Apply the normalization configuration to a batch of inputs.
# Arguments
x: batch of inputs to be normalized.
# Returns
The inputs, normalized.
"""
if self.preprocessing_function:
x = self.preprocessing_function(x)
if self.rescale:
x *= self.rescale
if self.samplewise_center:
x -= np.mean(x, keepdims=True)
if self.samplewise_std_normalization:
x /= np.std(x, keepdims=True) + 1e-7
if self.featurewise_center:
if self.mean is not None:
x -= self.mean
else:
warnings.warn('This ImageDataGenerator specifies '
'`featurewise_center`, but it hasn\'t '
'been fit on any training data. Fit it '
'first by calling `.fit(numpy_data)`.')
if self.featurewise_std_normalization:
if self.std is not None:
x /= (self.std + 1e-7)
else:
warnings.warn('This ImageDataGenerator specifies '
'`featurewise_std_normalization`, but it hasn\'t '
'been fit on any training data. Fit it '
'first by calling `.fit(numpy_data)`.')
if self.zca_whitening:
if self.principal_components is not None:
flatx = np.reshape(x, (-1, np.prod(x.shape[-3:])))
whitex = np.dot(flatx, self.principal_components)
x = np.reshape(whitex, x.shape)
else:
warnings.warn('This ImageDataGenerator specifies '
'`zca_whitening`, but it hasn\'t '
'been fit on any training data. Fit it '
'first by calling `.fit(numpy_data)`.')
return x
def resize(self, sample):
return resize_sample(sample, self.target_dimensions)
def get_random_transform_matrix(self, sample, seed=None):
"""Randomly augment a single image tensor.
# Arguments
sample: 3D or 4D tensor, single sample.
seed: random seed.
# Returns
A randomly generated transformation Matrix.
"""
# x is a single image, so it doesn't have image number at index 0
img_row_axis = self.row_axis - 1
img_col_axis = self.col_axis - 1
if seed is not None:
np.random.seed(seed)
# use composition of homographies
# to generate final transform that needs to be applied
if self.rotation_angle:
theta = np.pi / 180 * np.random.uniform(-self.rotation_angle, self.rotation_angle)
else:
theta = 0
if self.height_shift_range:
tx = np.random.uniform(-self.height_shift_range, self.height_shift_range) * sample.shape[img_row_axis]
else:
tx = 0
if self.width_shift_range:
ty = np.random.uniform(-self.width_shift_range, self.width_shift_range) * sample.shape[img_col_axis]
else:
ty = 0
if self.shear_range:
shear = np.random.uniform(-self.shear_range, self.shear_range)
else:
shear = 0
if self.zoom_range[0] == 1 and self.zoom_range[1] == 1:
zx, zy = 1, 1
else:
zx, zy = np.random.uniform(self.zoom_range[0], self.zoom_range[1], 2)
transform_matrix = np.identity(3)
if theta != 0:
rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, rotation_matrix)
if tx != 0 or ty != 0:
shift_matrix = np.array([[1, 0, tx],
[0, 1, ty],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, shift_matrix)
if shear != 0:
shear_matrix = np.array([[1, -np.sin(shear), 0],
[0, np.cos(shear), 0],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, shear_matrix)
if zx != 1 or zy != 1:
zoom_matrix = np.array([[zx, 0, 0],
[0, zy, 0],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, zoom_matrix)
return transform_matrix
def random_transform(self, sample, seed=None):
"""Randomly augment a single image tensor.
# Arguments
sample: 3D or 4D tensor, single sample.
seed: random seed.
# Returns
A randomly transformed version of the input (same shape).
"""
img_row_axis = self.row_axis - 1
img_col_axis = self.col_axis - 1
img_channel_axis = self.channel_axis - 1
transform_matrix = self.get_random_transform_matrix(sample, seed)
if transform_matrix is not None:
h, w = sample.shape[img_row_axis], sample.shape[img_col_axis]
transform_matrix = transform_matrix_offset_center(transform_matrix, h, w)
sample = apply_transform(sample, transform_matrix, img_channel_axis,
fill_mode=self.fill_mode, cval=self.cval)
if self.channel_shift_range != 0:
sample = random_channel_shift(sample,
self.channel_shift_range,
img_channel_axis)
if self.horizontal_flip:
if np.random.random() < 0.5:
sample = flip_axis(sample, img_col_axis)
if self.vertical_flip:
if np.random.random() < 0.5:
sample = flip_axis(sample, img_row_axis)
return sample
def fit(self, x,
augment=False,
rounds=1,
seed=None):
"""Fits internal statistics to some sample data.
Required for featurewise_center, featurewise_std_normalization
and zca_whitening.
# Arguments
x: Numpy array, the data to fit on. Should have rank 5 or 4 when time_delay is None.
In case of grayscale data,
the channels axis should have value 1, and in case
of RGB data, it should have value 3.
augment: Whether to fit on randomly augmented samples
rounds: If `augment`,
how many augmentation passes to do over the data
seed: random seed.
# Raises
ValueError: in case of invalid input `x`.
"""
x = np.asarray(x, dtype=K.floatx())
if x.shape[self.channel_axis] not in {1, 3, 4}:
warnings.warn(
'Expected input to be images (as Numpy array) '
'following the data format convention "' + self.data_format + '" '
'(channels on axis ' + str(
self.channel_axis) + '), i.e. expected '
'either 1, 3 or 4 channels on axis ' + str(self.channel_axis) + '. '
'However, it was passed an array with shape ' + str(
x.shape) +
' (' + str(x.shape[self.channel_axis]) + ' channels).')
if seed is not None:
np.random.seed(seed)
x = np.copy(x)
if augment:
ax = np.zeros(tuple([rounds * x.shape[0]] + list(x.shape)[1:]), dtype=K.floatx())
for r in range(rounds):
for i in range(x.shape[0]):
ax[i + r * x.shape[0]] = self.random_transform(x[i])
x = ax
if self.featurewise_center:
self.mean = np.mean(x, axis=0)
x -= self.mean
if self.featurewise_std_normalization:
self.std = np.std(x, axis=0)
x /= (self.std + K.epsilon())
if self.zca_whitening:
flat_x = np.reshape(x, (x.shape[0], x.shape[1] * x.shape[2] * x.shape[3]))
sigma = np.dot(flat_x.T, flat_x) / flat_x.shape[0]
u, s, _ = linalg.svd(sigma)
self.principal_components = np.dot(np.dot(u, np.diag(1. / np.sqrt(s + self.zca_epsilon))), u.T)
class Iterator(Sequence):
"""Base class for image data iterators.
Every `Iterator` must implement the `_get_batches_of_transformed_samples`
method.
# Arguments
n: Integer, total number of samples in the dataset to loop over.
batch_size: Integer, size of a batch.
shuffle: Boolean, whether to shuffle the data between epochs.
seed: Random seeding for data shuffling.
"""
def __init__(self, n, batch_size, shuffle, seed):
self.n = n
self.batch_size = batch_size
self.seed = seed
self.shuffle = shuffle
self.batch_index = 0
self.total_batches_seen = 0
self.lock = threading.Lock()
self.index_array = None
self.index_generator = self._flow_index()
def _set_index_array(self):
self.index_array = np.arange(self.n)
if self.shuffle:
self.index_array = np.random.permutation(self.n)
def __getitem__(self, idx):
if idx >= len(self):
raise ValueError('Asked to retrieve element {idx}, '
'but the Sequence '
'has length {length}'.format(idx=idx,
length=len(self)))
if self.seed is not None:
np.random.seed(self.seed + self.total_batches_seen)
self.total_batches_seen += 1
if self.index_array is None:
self._set_index_array()
index_array = self.index_array[self.batch_size * idx:
self.batch_size * (idx + 1)]
return self._get_batches_of_transformed_samples(index_array)
def __len__(self):
return (self.n + self.batch_size - 1) // self.batch_size # round up
def on_epoch_end(self):
self._set_index_array()
def reset(self):
self.batch_index = 0
def _flow_index(self):
# Ensure self.batch_index is 0.
self.reset()
while 1:
if self.seed is not None:
np.random.seed(self.seed + self.total_batches_seen)
if self.batch_index == 0:
self._set_index_array()
current_index = (self.batch_index * self.batch_size) % self.n
if self.n > current_index + self.batch_size:
self.batch_index += 1
else:
self.batch_index = 0
self.total_batches_seen += 1
yield self.index_array[current_index:
current_index + self.batch_size]
def __iter__(self):
# Needed if we want to do something like:
# for x, y in data_gen.flow(...):
return self
def __next__(self, *args, **kwargs):
return self.next(*args, **kwargs)
def _get_batches_of_transformed_samples(self, index_array):
"""Gets a batch of transformed samples.
# Arguments
index_array: array of sample indices to include in batch.
# Returns
A batch of transformed samples.
"""
raise NotImplementedError
class NumpyArrayIterator(Iterator):
"""Iterator yielding data from a Numpy array.
# Arguments
x: Numpy array of input data.
y: Numpy array of targets data.
image_data_generator: Instance of `ImageDataGenerator`
to use for random transformations and normalization.
batch_size: Integer, size of a batch.
shuffle: Boolean, whether to shuffle the data between epochs.
seed: Random seed for data shuffling.
data_format: String, one of `channels_first`, `channels_last`.
save_to_dir: Optional directory where to save the pictures
being yielded, in a viewable format. This is useful
for visualizing the random transformations being
applied, for debugging purposes.
save_prefix: String prefix to use for saving sample
images (if `save_to_dir` is set).
save_format: Format to use for saving sample images
(if `save_to_dir` is set).
"""
def __init__(self, x, y, image_data_generator,
batch_size=32, shuffle=False, seed=None,
data_format=None,
save_to_dir=None, save_prefix='', save_format='png'):
if y is not None and len(x) != len(y):
raise ValueError('X (images tensor) and y (labels) '
'should have the same length. '
'Found: X.shape = %s, y.shape = %s' %
(np.asarray(x).shape, np.asarray(y).shape))
self.x = np.asarray(x, dtype=K.floatx())
channels_axis = image_data_generator.channel_axis
if self.x.shape[channels_axis] not in {1, 3, 4}:
warnings.warn('NumpyArrayIterator is set to use the '
'data format convention "' + data_format + '" '
'(channels on axis ' + str(
channels_axis) + '), i.e. expected '
'either 1, 3 or 4 channels on axis ' + str(channels_axis) + '. '
'However, it was passed an array with shape ' + str(
self.x.shape) +
' (' + str(self.x.shape[channels_axis]) + ' channels).')
if y is not None:
self.y = np.asarray(y)
else:
self.y = None
self.image_data_generator = image_data_generator
self.data_format = data_format
self.save_to_dir = save_to_dir
self.save_prefix = save_prefix
self.save_format = save_format
super(NumpyArrayIterator, self).__init__(x.shape[0], batch_size, shuffle, seed)
def _get_batches_of_transformed_samples(self, index_array):
batch_x = list()
for i, j in enumerate(index_array):
x = self.x[j]
x = self.image_data_generator.random_transform(x.astype(K.floatx()))
x = self.image_data_generator.standardize(x)
x = self.image_data_generator.resize(x)
batch_x.append(x)
batch_x = np.array(batch_x)
if self.y is None:
return batch_x
batch_y = self.y[index_array]
return batch_x, batch_y
def next(self):
"""For python 2.x.
# Returns
The next batch.
"""
# Keeps under lock only the mechanism which advances
# the indexing of each batch.
with self.lock:
index_array = next(self.index_generator)
# The transformation of images is not under thread lock
# so it can be done in parallel
return self._get_batches_of_transformed_samples(index_array)
|
thoughtworksarts/EmoPy | EmoPy/library/image.py | ImageDataGenerator.standardize | python | def standardize(self, x):
if self.preprocessing_function:
x = self.preprocessing_function(x)
if self.rescale:
x *= self.rescale
if self.samplewise_center:
x -= np.mean(x, keepdims=True)
if self.samplewise_std_normalization:
x /= np.std(x, keepdims=True) + 1e-7
if self.featurewise_center:
if self.mean is not None:
x -= self.mean
else:
warnings.warn('This ImageDataGenerator specifies '
'`featurewise_center`, but it hasn\'t '
'been fit on any training data. Fit it '
'first by calling `.fit(numpy_data)`.')
if self.featurewise_std_normalization:
if self.std is not None:
x /= (self.std + 1e-7)
else:
warnings.warn('This ImageDataGenerator specifies '
'`featurewise_std_normalization`, but it hasn\'t '
'been fit on any training data. Fit it '
'first by calling `.fit(numpy_data)`.')
if self.zca_whitening:
if self.principal_components is not None:
flatx = np.reshape(x, (-1, np.prod(x.shape[-3:])))
whitex = np.dot(flatx, self.principal_components)
x = np.reshape(whitex, x.shape)
else:
warnings.warn('This ImageDataGenerator specifies '
'`zca_whitening`, but it hasn\'t '
'been fit on any training data. Fit it '
'first by calling `.fit(numpy_data)`.')
return x | Apply the normalization configuration to a batch of inputs.
# Arguments
x: batch of inputs to be normalized.
# Returns
The inputs, normalized. | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/library/image.py#L247-L291 | null | class ImageDataGenerator(object):
"""Generate minibatches of image data with real-time data augmentation.
# Arguments
featurewise_center: set input mean to 0 over the dataset.
samplewise_center: set each sample mean to 0.
featurewise_std_normalization: divide inputs by std of the dataset.
samplewise_std_normalization: divide each input by its std.
zca_whitening: apply ZCA whitening.
zca_epsilon: epsilon for ZCA whitening. Default is 1e-6.
rotation_range: degrees (0 to 180).
width_shift_range: fraction of total width.
height_shift_range: fraction of total height.
shear_range: shear intensity (shear angle in radians).
zoom_range: amount of zoom. if scalar z, zoom will be randomly picked
in the range [1-z, 1+z]. A sequence of two can be passed instead
to select this range.
channel_shift_range: shift range for each channel.
fill_mode: points outside the boundaries are filled according to the
given mode ('constant', 'nearest', 'reflect' or 'wrap'). Default
is 'nearest'.
cval: value used for points outside the boundaries when fill_mode is
'constant'. Default is 0.
horizontal_flip: whether to randomly flip images horizontally.
vertical_flip: whether to randomly flip images vertically.
rescale: rescaling factor. If None or 0, no rescaling is applied,
otherwise we multiply the data by the value provided. This is
applied after the `preprocessing_function` (if any provided)
but before any other transformation.
preprocessing_function: function that will be implied on each input.
The function will run before any other modification on it.
The function should take one argument:
one image (Numpy tensor with rank 3),
and should output a Numpy tensor with the same shape.
data_format: 'channels_first' or 'channels_last'. In 'channels_first' mode, the channels dimension
(the depth) is at index 1, in 'channels_last' mode it is at index 3.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
"""
def __init__(self,
featurewise_center=False,
samplewise_center=False,
featurewise_std_normalization=False,
samplewise_std_normalization=False,
zca_whitening=False,
zca_epsilon=1e-6,
rotation_angle=0.,
width_shift_range=0.,
height_shift_range=0.,
shear_range=0.,
zoom_range=0.,
channel_shift_range=0.,
fill_mode='nearest',
cval=0.,
horizontal_flip=False,
vertical_flip=False,
rescale=None,
preprocessing_function=None,
data_format="channels_last",
time_delay=None,
target_dimensions=None):
self.featurewise_center = featurewise_center
self.samplewise_center = samplewise_center
self.featurewise_std_normalization = featurewise_std_normalization
self.samplewise_std_normalization = samplewise_std_normalization
self.zca_whitening = zca_whitening
self.zca_epsilon = zca_epsilon
self.rotation_angle = rotation_angle
self.width_shift_range = width_shift_range
self.height_shift_range = height_shift_range
self.shear_range = shear_range
self.zoom_range = zoom_range
self.channel_shift_range = channel_shift_range
self.fill_mode = fill_mode
self.cval = cval
self.horizontal_flip = horizontal_flip
self.vertical_flip = vertical_flip
self.rescale = rescale
self.preprocessing_function = preprocessing_function
self.data_format = data_format
self.time_delay = time_delay
self.target_dimensions = target_dimensions
if data_format == 'channels_last':
if time_delay is None:
self.time_delay_axis, self.row_axis, self.col_axis, self.channel_axis = None, 1, 2, 3
else:
self.time_delay_axis, self.row_axis, self.col_axis, self.channel_axis = 1, 2, 3, 4
self.mean = None
self.std = None
self.principal_components = None
if np.isscalar(zoom_range):
self.zoom_range = [1 - zoom_range, 1 + zoom_range]
elif len(zoom_range) == 2:
self.zoom_range = [zoom_range[0], zoom_range[1]]
else:
raise ValueError('`zoom_range` should be a float or '
'a tuple or list of two floats. '
'Received arg: ', zoom_range)
def flow(self, x, y=None, batch_size=32, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_format='png', target_dimension=None):
if target_dimension:
self.target_dimensions = target_dimension
return NumpyArrayIterator(
x, y, self,
batch_size=batch_size,
shuffle=shuffle,
seed=seed,
data_format=self.data_format,
save_to_dir=save_to_dir,
save_prefix=save_prefix,
save_format=save_format)
def resize(self, sample):
return resize_sample(sample, self.target_dimensions)
def get_random_transform_matrix(self, sample, seed=None):
"""Randomly augment a single image tensor.
# Arguments
sample: 3D or 4D tensor, single sample.
seed: random seed.
# Returns
A randomly generated transformation Matrix.
"""
# x is a single image, so it doesn't have image number at index 0
img_row_axis = self.row_axis - 1
img_col_axis = self.col_axis - 1
if seed is not None:
np.random.seed(seed)
# use composition of homographies
# to generate final transform that needs to be applied
if self.rotation_angle:
theta = np.pi / 180 * np.random.uniform(-self.rotation_angle, self.rotation_angle)
else:
theta = 0
if self.height_shift_range:
tx = np.random.uniform(-self.height_shift_range, self.height_shift_range) * sample.shape[img_row_axis]
else:
tx = 0
if self.width_shift_range:
ty = np.random.uniform(-self.width_shift_range, self.width_shift_range) * sample.shape[img_col_axis]
else:
ty = 0
if self.shear_range:
shear = np.random.uniform(-self.shear_range, self.shear_range)
else:
shear = 0
if self.zoom_range[0] == 1 and self.zoom_range[1] == 1:
zx, zy = 1, 1
else:
zx, zy = np.random.uniform(self.zoom_range[0], self.zoom_range[1], 2)
transform_matrix = np.identity(3)
if theta != 0:
rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, rotation_matrix)
if tx != 0 or ty != 0:
shift_matrix = np.array([[1, 0, tx],
[0, 1, ty],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, shift_matrix)
if shear != 0:
shear_matrix = np.array([[1, -np.sin(shear), 0],
[0, np.cos(shear), 0],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, shear_matrix)
if zx != 1 or zy != 1:
zoom_matrix = np.array([[zx, 0, 0],
[0, zy, 0],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, zoom_matrix)
return transform_matrix
def random_transform(self, sample, seed=None):
"""Randomly augment a single image tensor.
# Arguments
sample: 3D or 4D tensor, single sample.
seed: random seed.
# Returns
A randomly transformed version of the input (same shape).
"""
img_row_axis = self.row_axis - 1
img_col_axis = self.col_axis - 1
img_channel_axis = self.channel_axis - 1
transform_matrix = self.get_random_transform_matrix(sample, seed)
if transform_matrix is not None:
h, w = sample.shape[img_row_axis], sample.shape[img_col_axis]
transform_matrix = transform_matrix_offset_center(transform_matrix, h, w)
sample = apply_transform(sample, transform_matrix, img_channel_axis,
fill_mode=self.fill_mode, cval=self.cval)
if self.channel_shift_range != 0:
sample = random_channel_shift(sample,
self.channel_shift_range,
img_channel_axis)
if self.horizontal_flip:
if np.random.random() < 0.5:
sample = flip_axis(sample, img_col_axis)
if self.vertical_flip:
if np.random.random() < 0.5:
sample = flip_axis(sample, img_row_axis)
return sample
def fit(self, x,
augment=False,
rounds=1,
seed=None):
"""Fits internal statistics to some sample data.
Required for featurewise_center, featurewise_std_normalization
and zca_whitening.
# Arguments
x: Numpy array, the data to fit on. Should have rank 5 or 4 when time_delay is None.
In case of grayscale data,
the channels axis should have value 1, and in case
of RGB data, it should have value 3.
augment: Whether to fit on randomly augmented samples
rounds: If `augment`,
how many augmentation passes to do over the data
seed: random seed.
# Raises
ValueError: in case of invalid input `x`.
"""
x = np.asarray(x, dtype=K.floatx())
if x.shape[self.channel_axis] not in {1, 3, 4}:
warnings.warn(
'Expected input to be images (as Numpy array) '
'following the data format convention "' + self.data_format + '" '
'(channels on axis ' + str(
self.channel_axis) + '), i.e. expected '
'either 1, 3 or 4 channels on axis ' + str(self.channel_axis) + '. '
'However, it was passed an array with shape ' + str(
x.shape) +
' (' + str(x.shape[self.channel_axis]) + ' channels).')
if seed is not None:
np.random.seed(seed)
x = np.copy(x)
if augment:
ax = np.zeros(tuple([rounds * x.shape[0]] + list(x.shape)[1:]), dtype=K.floatx())
for r in range(rounds):
for i in range(x.shape[0]):
ax[i + r * x.shape[0]] = self.random_transform(x[i])
x = ax
if self.featurewise_center:
self.mean = np.mean(x, axis=0)
x -= self.mean
if self.featurewise_std_normalization:
self.std = np.std(x, axis=0)
x /= (self.std + K.epsilon())
if self.zca_whitening:
flat_x = np.reshape(x, (x.shape[0], x.shape[1] * x.shape[2] * x.shape[3]))
sigma = np.dot(flat_x.T, flat_x) / flat_x.shape[0]
u, s, _ = linalg.svd(sigma)
self.principal_components = np.dot(np.dot(u, np.diag(1. / np.sqrt(s + self.zca_epsilon))), u.T)
|
thoughtworksarts/EmoPy | EmoPy/library/image.py | ImageDataGenerator.get_random_transform_matrix | python | def get_random_transform_matrix(self, sample, seed=None):
# x is a single image, so it doesn't have image number at index 0
img_row_axis = self.row_axis - 1
img_col_axis = self.col_axis - 1
if seed is not None:
np.random.seed(seed)
# use composition of homographies
# to generate final transform that needs to be applied
if self.rotation_angle:
theta = np.pi / 180 * np.random.uniform(-self.rotation_angle, self.rotation_angle)
else:
theta = 0
if self.height_shift_range:
tx = np.random.uniform(-self.height_shift_range, self.height_shift_range) * sample.shape[img_row_axis]
else:
tx = 0
if self.width_shift_range:
ty = np.random.uniform(-self.width_shift_range, self.width_shift_range) * sample.shape[img_col_axis]
else:
ty = 0
if self.shear_range:
shear = np.random.uniform(-self.shear_range, self.shear_range)
else:
shear = 0
if self.zoom_range[0] == 1 and self.zoom_range[1] == 1:
zx, zy = 1, 1
else:
zx, zy = np.random.uniform(self.zoom_range[0], self.zoom_range[1], 2)
transform_matrix = np.identity(3)
if theta != 0:
rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, rotation_matrix)
if tx != 0 or ty != 0:
shift_matrix = np.array([[1, 0, tx],
[0, 1, ty],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, shift_matrix)
if shear != 0:
shear_matrix = np.array([[1, -np.sin(shear), 0],
[0, np.cos(shear), 0],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, shear_matrix)
if zx != 1 or zy != 1:
zoom_matrix = np.array([[zx, 0, 0],
[0, zy, 0],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, zoom_matrix)
return transform_matrix | Randomly augment a single image tensor.
# Arguments
sample: 3D or 4D tensor, single sample.
seed: random seed.
# Returns
A randomly generated transformation Matrix. | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/library/image.py#L296-L366 | null | class ImageDataGenerator(object):
"""Generate minibatches of image data with real-time data augmentation.
# Arguments
featurewise_center: set input mean to 0 over the dataset.
samplewise_center: set each sample mean to 0.
featurewise_std_normalization: divide inputs by std of the dataset.
samplewise_std_normalization: divide each input by its std.
zca_whitening: apply ZCA whitening.
zca_epsilon: epsilon for ZCA whitening. Default is 1e-6.
rotation_range: degrees (0 to 180).
width_shift_range: fraction of total width.
height_shift_range: fraction of total height.
shear_range: shear intensity (shear angle in radians).
zoom_range: amount of zoom. if scalar z, zoom will be randomly picked
in the range [1-z, 1+z]. A sequence of two can be passed instead
to select this range.
channel_shift_range: shift range for each channel.
fill_mode: points outside the boundaries are filled according to the
given mode ('constant', 'nearest', 'reflect' or 'wrap'). Default
is 'nearest'.
cval: value used for points outside the boundaries when fill_mode is
'constant'. Default is 0.
horizontal_flip: whether to randomly flip images horizontally.
vertical_flip: whether to randomly flip images vertically.
rescale: rescaling factor. If None or 0, no rescaling is applied,
otherwise we multiply the data by the value provided. This is
applied after the `preprocessing_function` (if any provided)
but before any other transformation.
preprocessing_function: function that will be implied on each input.
The function will run before any other modification on it.
The function should take one argument:
one image (Numpy tensor with rank 3),
and should output a Numpy tensor with the same shape.
data_format: 'channels_first' or 'channels_last'. In 'channels_first' mode, the channels dimension
(the depth) is at index 1, in 'channels_last' mode it is at index 3.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
"""
def __init__(self,
featurewise_center=False,
samplewise_center=False,
featurewise_std_normalization=False,
samplewise_std_normalization=False,
zca_whitening=False,
zca_epsilon=1e-6,
rotation_angle=0.,
width_shift_range=0.,
height_shift_range=0.,
shear_range=0.,
zoom_range=0.,
channel_shift_range=0.,
fill_mode='nearest',
cval=0.,
horizontal_flip=False,
vertical_flip=False,
rescale=None,
preprocessing_function=None,
data_format="channels_last",
time_delay=None,
target_dimensions=None):
self.featurewise_center = featurewise_center
self.samplewise_center = samplewise_center
self.featurewise_std_normalization = featurewise_std_normalization
self.samplewise_std_normalization = samplewise_std_normalization
self.zca_whitening = zca_whitening
self.zca_epsilon = zca_epsilon
self.rotation_angle = rotation_angle
self.width_shift_range = width_shift_range
self.height_shift_range = height_shift_range
self.shear_range = shear_range
self.zoom_range = zoom_range
self.channel_shift_range = channel_shift_range
self.fill_mode = fill_mode
self.cval = cval
self.horizontal_flip = horizontal_flip
self.vertical_flip = vertical_flip
self.rescale = rescale
self.preprocessing_function = preprocessing_function
self.data_format = data_format
self.time_delay = time_delay
self.target_dimensions = target_dimensions
if data_format == 'channels_last':
if time_delay is None:
self.time_delay_axis, self.row_axis, self.col_axis, self.channel_axis = None, 1, 2, 3
else:
self.time_delay_axis, self.row_axis, self.col_axis, self.channel_axis = 1, 2, 3, 4
self.mean = None
self.std = None
self.principal_components = None
if np.isscalar(zoom_range):
self.zoom_range = [1 - zoom_range, 1 + zoom_range]
elif len(zoom_range) == 2:
self.zoom_range = [zoom_range[0], zoom_range[1]]
else:
raise ValueError('`zoom_range` should be a float or '
'a tuple or list of two floats. '
'Received arg: ', zoom_range)
def flow(self, x, y=None, batch_size=32, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_format='png', target_dimension=None):
if target_dimension:
self.target_dimensions = target_dimension
return NumpyArrayIterator(
x, y, self,
batch_size=batch_size,
shuffle=shuffle,
seed=seed,
data_format=self.data_format,
save_to_dir=save_to_dir,
save_prefix=save_prefix,
save_format=save_format)
def standardize(self, x):
"""Apply the normalization configuration to a batch of inputs.
# Arguments
x: batch of inputs to be normalized.
# Returns
The inputs, normalized.
"""
if self.preprocessing_function:
x = self.preprocessing_function(x)
if self.rescale:
x *= self.rescale
if self.samplewise_center:
x -= np.mean(x, keepdims=True)
if self.samplewise_std_normalization:
x /= np.std(x, keepdims=True) + 1e-7
if self.featurewise_center:
if self.mean is not None:
x -= self.mean
else:
warnings.warn('This ImageDataGenerator specifies '
'`featurewise_center`, but it hasn\'t '
'been fit on any training data. Fit it '
'first by calling `.fit(numpy_data)`.')
if self.featurewise_std_normalization:
if self.std is not None:
x /= (self.std + 1e-7)
else:
warnings.warn('This ImageDataGenerator specifies '
'`featurewise_std_normalization`, but it hasn\'t '
'been fit on any training data. Fit it '
'first by calling `.fit(numpy_data)`.')
if self.zca_whitening:
if self.principal_components is not None:
flatx = np.reshape(x, (-1, np.prod(x.shape[-3:])))
whitex = np.dot(flatx, self.principal_components)
x = np.reshape(whitex, x.shape)
else:
warnings.warn('This ImageDataGenerator specifies '
'`zca_whitening`, but it hasn\'t '
'been fit on any training data. Fit it '
'first by calling `.fit(numpy_data)`.')
return x
def resize(self, sample):
return resize_sample(sample, self.target_dimensions)
def random_transform(self, sample, seed=None):
"""Randomly augment a single image tensor.
# Arguments
sample: 3D or 4D tensor, single sample.
seed: random seed.
# Returns
A randomly transformed version of the input (same shape).
"""
img_row_axis = self.row_axis - 1
img_col_axis = self.col_axis - 1
img_channel_axis = self.channel_axis - 1
transform_matrix = self.get_random_transform_matrix(sample, seed)
if transform_matrix is not None:
h, w = sample.shape[img_row_axis], sample.shape[img_col_axis]
transform_matrix = transform_matrix_offset_center(transform_matrix, h, w)
sample = apply_transform(sample, transform_matrix, img_channel_axis,
fill_mode=self.fill_mode, cval=self.cval)
if self.channel_shift_range != 0:
sample = random_channel_shift(sample,
self.channel_shift_range,
img_channel_axis)
if self.horizontal_flip:
if np.random.random() < 0.5:
sample = flip_axis(sample, img_col_axis)
if self.vertical_flip:
if np.random.random() < 0.5:
sample = flip_axis(sample, img_row_axis)
return sample
def fit(self, x,
augment=False,
rounds=1,
seed=None):
"""Fits internal statistics to some sample data.
Required for featurewise_center, featurewise_std_normalization
and zca_whitening.
# Arguments
x: Numpy array, the data to fit on. Should have rank 5 or 4 when time_delay is None.
In case of grayscale data,
the channels axis should have value 1, and in case
of RGB data, it should have value 3.
augment: Whether to fit on randomly augmented samples
rounds: If `augment`,
how many augmentation passes to do over the data
seed: random seed.
# Raises
ValueError: in case of invalid input `x`.
"""
x = np.asarray(x, dtype=K.floatx())
if x.shape[self.channel_axis] not in {1, 3, 4}:
warnings.warn(
'Expected input to be images (as Numpy array) '
'following the data format convention "' + self.data_format + '" '
'(channels on axis ' + str(
self.channel_axis) + '), i.e. expected '
'either 1, 3 or 4 channels on axis ' + str(self.channel_axis) + '. '
'However, it was passed an array with shape ' + str(
x.shape) +
' (' + str(x.shape[self.channel_axis]) + ' channels).')
if seed is not None:
np.random.seed(seed)
x = np.copy(x)
if augment:
ax = np.zeros(tuple([rounds * x.shape[0]] + list(x.shape)[1:]), dtype=K.floatx())
for r in range(rounds):
for i in range(x.shape[0]):
ax[i + r * x.shape[0]] = self.random_transform(x[i])
x = ax
if self.featurewise_center:
self.mean = np.mean(x, axis=0)
x -= self.mean
if self.featurewise_std_normalization:
self.std = np.std(x, axis=0)
x /= (self.std + K.epsilon())
if self.zca_whitening:
flat_x = np.reshape(x, (x.shape[0], x.shape[1] * x.shape[2] * x.shape[3]))
sigma = np.dot(flat_x.T, flat_x) / flat_x.shape[0]
u, s, _ = linalg.svd(sigma)
self.principal_components = np.dot(np.dot(u, np.diag(1. / np.sqrt(s + self.zca_epsilon))), u.T)
|
thoughtworksarts/EmoPy | EmoPy/library/image.py | ImageDataGenerator.random_transform | python | def random_transform(self, sample, seed=None):
img_row_axis = self.row_axis - 1
img_col_axis = self.col_axis - 1
img_channel_axis = self.channel_axis - 1
transform_matrix = self.get_random_transform_matrix(sample, seed)
if transform_matrix is not None:
h, w = sample.shape[img_row_axis], sample.shape[img_col_axis]
transform_matrix = transform_matrix_offset_center(transform_matrix, h, w)
sample = apply_transform(sample, transform_matrix, img_channel_axis,
fill_mode=self.fill_mode, cval=self.cval)
if self.channel_shift_range != 0:
sample = random_channel_shift(sample,
self.channel_shift_range,
img_channel_axis)
if self.horizontal_flip:
if np.random.random() < 0.5:
sample = flip_axis(sample, img_col_axis)
if self.vertical_flip:
if np.random.random() < 0.5:
sample = flip_axis(sample, img_row_axis)
return sample | Randomly augment a single image tensor.
# Arguments
sample: 3D or 4D tensor, single sample.
seed: random seed.
# Returns
A randomly transformed version of the input (same shape). | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/library/image.py#L368-L402 | [
"def random_channel_shift(x, intensity, channel_axis=0):\n x = np.rollaxis(x, channel_axis, 0)\n min_x, max_x = np.min(x), np.max(x)\n channel_images = [np.clip(x_channel + np.random.uniform(-intensity, intensity), min_x, max_x)\n for x_channel in x]\n x = np.stack(channel_images, axis=0)\n x = np.rollaxis(x, 0, channel_axis + 1)\n return x\n",
"def transform_matrix_offset_center(matrix, x, y):\n o_x = float(x) / 2 + 0.5\n o_y = float(y) / 2 + 0.5\n offset_matrix = np.array([[1, 0, o_x], [0, 1, o_y], [0, 0, 1]])\n reset_matrix = np.array([[1, 0, -o_x], [0, 1, -o_y], [0, 0, 1]])\n transform_matrix = np.dot(np.dot(offset_matrix, matrix), reset_matrix)\n return transform_matrix\n",
"def apply_transform(sample,\n transform_matrix,\n channel_axis=0,\n fill_mode='nearest',\n cval=0.):\n \"\"\"Apply the image transformation specified by a matrix.\n\n # Arguments\n sample: 2D numpy array, single sample.\n transform_matrix: Numpy array specifying the geometric transformation.\n channel_axis: Index of axis for channels in the input tensor.\n fill_mode: Points outside the boundaries of the input\n are filled according to the given mode\n (one of `{'constant', 'nearest', 'reflect', 'wrap'}`).\n cval: Value used for points outside the boundaries\n of the input if `mode='constant'`.\n\n # Returns\n The transformed version of the input.\n \"\"\"\n if sample.ndim == 4:\n channel_axis = channel_axis - 1\n transformed_frames = [transform(frame, transform_matrix, channel_axis, fill_mode, cval) for frame in sample]\n return np.stack(transformed_frames, axis=0)\n\n if sample.ndim == 3:\n return transform(sample, transform_matrix, channel_axis, fill_mode, cval)\n",
"def flip_axis(x, axis):\n x = np.asarray(x).swapaxes(axis, 0)\n x = x[::-1, ...]\n x = x.swapaxes(0, axis)\n return x\n",
"def get_random_transform_matrix(self, sample, seed=None):\n \"\"\"Randomly augment a single image tensor.\n\n # Arguments\n sample: 3D or 4D tensor, single sample.\n seed: random seed.\n\n # Returns\n A randomly generated transformation Matrix.\n \"\"\"\n # x is a single image, so it doesn't have image number at index 0\n img_row_axis = self.row_axis - 1\n img_col_axis = self.col_axis - 1\n\n if seed is not None:\n np.random.seed(seed)\n\n # use composition of homographies\n # to generate final transform that needs to be applied\n if self.rotation_angle:\n theta = np.pi / 180 * np.random.uniform(-self.rotation_angle, self.rotation_angle)\n else:\n theta = 0\n\n if self.height_shift_range:\n tx = np.random.uniform(-self.height_shift_range, self.height_shift_range) * sample.shape[img_row_axis]\n else:\n tx = 0\n\n if self.width_shift_range:\n ty = np.random.uniform(-self.width_shift_range, self.width_shift_range) * sample.shape[img_col_axis]\n else:\n ty = 0\n\n if self.shear_range:\n shear = np.random.uniform(-self.shear_range, self.shear_range)\n else:\n shear = 0\n\n if self.zoom_range[0] == 1 and self.zoom_range[1] == 1:\n zx, zy = 1, 1\n else:\n zx, zy = np.random.uniform(self.zoom_range[0], self.zoom_range[1], 2)\n\n transform_matrix = np.identity(3)\n\n if theta != 0:\n rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],\n [np.sin(theta), np.cos(theta), 0],\n [0, 0, 1]])\n transform_matrix = np.dot(transform_matrix, rotation_matrix)\n\n if tx != 0 or ty != 0:\n shift_matrix = np.array([[1, 0, tx],\n [0, 1, ty],\n [0, 0, 1]])\n transform_matrix = np.dot(transform_matrix, shift_matrix)\n\n if shear != 0:\n shear_matrix = np.array([[1, -np.sin(shear), 0],\n [0, np.cos(shear), 0],\n [0, 0, 1]])\n transform_matrix = np.dot(transform_matrix, shear_matrix)\n\n if zx != 1 or zy != 1:\n zoom_matrix = np.array([[zx, 0, 0],\n [0, zy, 0],\n [0, 0, 1]])\n transform_matrix = np.dot(transform_matrix, zoom_matrix)\n\n return transform_matrix\n"
] | class ImageDataGenerator(object):
"""Generate minibatches of image data with real-time data augmentation.
# Arguments
featurewise_center: set input mean to 0 over the dataset.
samplewise_center: set each sample mean to 0.
featurewise_std_normalization: divide inputs by std of the dataset.
samplewise_std_normalization: divide each input by its std.
zca_whitening: apply ZCA whitening.
zca_epsilon: epsilon for ZCA whitening. Default is 1e-6.
rotation_range: degrees (0 to 180).
width_shift_range: fraction of total width.
height_shift_range: fraction of total height.
shear_range: shear intensity (shear angle in radians).
zoom_range: amount of zoom. if scalar z, zoom will be randomly picked
in the range [1-z, 1+z]. A sequence of two can be passed instead
to select this range.
channel_shift_range: shift range for each channel.
fill_mode: points outside the boundaries are filled according to the
given mode ('constant', 'nearest', 'reflect' or 'wrap'). Default
is 'nearest'.
cval: value used for points outside the boundaries when fill_mode is
'constant'. Default is 0.
horizontal_flip: whether to randomly flip images horizontally.
vertical_flip: whether to randomly flip images vertically.
rescale: rescaling factor. If None or 0, no rescaling is applied,
otherwise we multiply the data by the value provided. This is
applied after the `preprocessing_function` (if any provided)
but before any other transformation.
preprocessing_function: function that will be implied on each input.
The function will run before any other modification on it.
The function should take one argument:
one image (Numpy tensor with rank 3),
and should output a Numpy tensor with the same shape.
data_format: 'channels_first' or 'channels_last'. In 'channels_first' mode, the channels dimension
(the depth) is at index 1, in 'channels_last' mode it is at index 3.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
"""
def __init__(self,
featurewise_center=False,
samplewise_center=False,
featurewise_std_normalization=False,
samplewise_std_normalization=False,
zca_whitening=False,
zca_epsilon=1e-6,
rotation_angle=0.,
width_shift_range=0.,
height_shift_range=0.,
shear_range=0.,
zoom_range=0.,
channel_shift_range=0.,
fill_mode='nearest',
cval=0.,
horizontal_flip=False,
vertical_flip=False,
rescale=None,
preprocessing_function=None,
data_format="channels_last",
time_delay=None,
target_dimensions=None):
self.featurewise_center = featurewise_center
self.samplewise_center = samplewise_center
self.featurewise_std_normalization = featurewise_std_normalization
self.samplewise_std_normalization = samplewise_std_normalization
self.zca_whitening = zca_whitening
self.zca_epsilon = zca_epsilon
self.rotation_angle = rotation_angle
self.width_shift_range = width_shift_range
self.height_shift_range = height_shift_range
self.shear_range = shear_range
self.zoom_range = zoom_range
self.channel_shift_range = channel_shift_range
self.fill_mode = fill_mode
self.cval = cval
self.horizontal_flip = horizontal_flip
self.vertical_flip = vertical_flip
self.rescale = rescale
self.preprocessing_function = preprocessing_function
self.data_format = data_format
self.time_delay = time_delay
self.target_dimensions = target_dimensions
if data_format == 'channels_last':
if time_delay is None:
self.time_delay_axis, self.row_axis, self.col_axis, self.channel_axis = None, 1, 2, 3
else:
self.time_delay_axis, self.row_axis, self.col_axis, self.channel_axis = 1, 2, 3, 4
self.mean = None
self.std = None
self.principal_components = None
if np.isscalar(zoom_range):
self.zoom_range = [1 - zoom_range, 1 + zoom_range]
elif len(zoom_range) == 2:
self.zoom_range = [zoom_range[0], zoom_range[1]]
else:
raise ValueError('`zoom_range` should be a float or '
'a tuple or list of two floats. '
'Received arg: ', zoom_range)
def flow(self, x, y=None, batch_size=32, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_format='png', target_dimension=None):
if target_dimension:
self.target_dimensions = target_dimension
return NumpyArrayIterator(
x, y, self,
batch_size=batch_size,
shuffle=shuffle,
seed=seed,
data_format=self.data_format,
save_to_dir=save_to_dir,
save_prefix=save_prefix,
save_format=save_format)
def standardize(self, x):
"""Apply the normalization configuration to a batch of inputs.
# Arguments
x: batch of inputs to be normalized.
# Returns
The inputs, normalized.
"""
if self.preprocessing_function:
x = self.preprocessing_function(x)
if self.rescale:
x *= self.rescale
if self.samplewise_center:
x -= np.mean(x, keepdims=True)
if self.samplewise_std_normalization:
x /= np.std(x, keepdims=True) + 1e-7
if self.featurewise_center:
if self.mean is not None:
x -= self.mean
else:
warnings.warn('This ImageDataGenerator specifies '
'`featurewise_center`, but it hasn\'t '
'been fit on any training data. Fit it '
'first by calling `.fit(numpy_data)`.')
if self.featurewise_std_normalization:
if self.std is not None:
x /= (self.std + 1e-7)
else:
warnings.warn('This ImageDataGenerator specifies '
'`featurewise_std_normalization`, but it hasn\'t '
'been fit on any training data. Fit it '
'first by calling `.fit(numpy_data)`.')
if self.zca_whitening:
if self.principal_components is not None:
flatx = np.reshape(x, (-1, np.prod(x.shape[-3:])))
whitex = np.dot(flatx, self.principal_components)
x = np.reshape(whitex, x.shape)
else:
warnings.warn('This ImageDataGenerator specifies '
'`zca_whitening`, but it hasn\'t '
'been fit on any training data. Fit it '
'first by calling `.fit(numpy_data)`.')
return x
def resize(self, sample):
return resize_sample(sample, self.target_dimensions)
def get_random_transform_matrix(self, sample, seed=None):
"""Randomly augment a single image tensor.
# Arguments
sample: 3D or 4D tensor, single sample.
seed: random seed.
# Returns
A randomly generated transformation Matrix.
"""
# x is a single image, so it doesn't have image number at index 0
img_row_axis = self.row_axis - 1
img_col_axis = self.col_axis - 1
if seed is not None:
np.random.seed(seed)
# use composition of homographies
# to generate final transform that needs to be applied
if self.rotation_angle:
theta = np.pi / 180 * np.random.uniform(-self.rotation_angle, self.rotation_angle)
else:
theta = 0
if self.height_shift_range:
tx = np.random.uniform(-self.height_shift_range, self.height_shift_range) * sample.shape[img_row_axis]
else:
tx = 0
if self.width_shift_range:
ty = np.random.uniform(-self.width_shift_range, self.width_shift_range) * sample.shape[img_col_axis]
else:
ty = 0
if self.shear_range:
shear = np.random.uniform(-self.shear_range, self.shear_range)
else:
shear = 0
if self.zoom_range[0] == 1 and self.zoom_range[1] == 1:
zx, zy = 1, 1
else:
zx, zy = np.random.uniform(self.zoom_range[0], self.zoom_range[1], 2)
transform_matrix = np.identity(3)
if theta != 0:
rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, rotation_matrix)
if tx != 0 or ty != 0:
shift_matrix = np.array([[1, 0, tx],
[0, 1, ty],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, shift_matrix)
if shear != 0:
shear_matrix = np.array([[1, -np.sin(shear), 0],
[0, np.cos(shear), 0],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, shear_matrix)
if zx != 1 or zy != 1:
zoom_matrix = np.array([[zx, 0, 0],
[0, zy, 0],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, zoom_matrix)
return transform_matrix
def fit(self, x,
augment=False,
rounds=1,
seed=None):
"""Fits internal statistics to some sample data.
Required for featurewise_center, featurewise_std_normalization
and zca_whitening.
# Arguments
x: Numpy array, the data to fit on. Should have rank 5 or 4 when time_delay is None.
In case of grayscale data,
the channels axis should have value 1, and in case
of RGB data, it should have value 3.
augment: Whether to fit on randomly augmented samples
rounds: If `augment`,
how many augmentation passes to do over the data
seed: random seed.
# Raises
ValueError: in case of invalid input `x`.
"""
x = np.asarray(x, dtype=K.floatx())
if x.shape[self.channel_axis] not in {1, 3, 4}:
warnings.warn(
'Expected input to be images (as Numpy array) '
'following the data format convention "' + self.data_format + '" '
'(channels on axis ' + str(
self.channel_axis) + '), i.e. expected '
'either 1, 3 or 4 channels on axis ' + str(self.channel_axis) + '. '
'However, it was passed an array with shape ' + str(
x.shape) +
' (' + str(x.shape[self.channel_axis]) + ' channels).')
if seed is not None:
np.random.seed(seed)
x = np.copy(x)
if augment:
ax = np.zeros(tuple([rounds * x.shape[0]] + list(x.shape)[1:]), dtype=K.floatx())
for r in range(rounds):
for i in range(x.shape[0]):
ax[i + r * x.shape[0]] = self.random_transform(x[i])
x = ax
if self.featurewise_center:
self.mean = np.mean(x, axis=0)
x -= self.mean
if self.featurewise_std_normalization:
self.std = np.std(x, axis=0)
x /= (self.std + K.epsilon())
if self.zca_whitening:
flat_x = np.reshape(x, (x.shape[0], x.shape[1] * x.shape[2] * x.shape[3]))
sigma = np.dot(flat_x.T, flat_x) / flat_x.shape[0]
u, s, _ = linalg.svd(sigma)
self.principal_components = np.dot(np.dot(u, np.diag(1. / np.sqrt(s + self.zca_epsilon))), u.T)
|
thoughtworksarts/EmoPy | EmoPy/library/image.py | ImageDataGenerator.fit | python | def fit(self, x,
augment=False,
rounds=1,
seed=None):
x = np.asarray(x, dtype=K.floatx())
if x.shape[self.channel_axis] not in {1, 3, 4}:
warnings.warn(
'Expected input to be images (as Numpy array) '
'following the data format convention "' + self.data_format + '" '
'(channels on axis ' + str(
self.channel_axis) + '), i.e. expected '
'either 1, 3 or 4 channels on axis ' + str(self.channel_axis) + '. '
'However, it was passed an array with shape ' + str(
x.shape) +
' (' + str(x.shape[self.channel_axis]) + ' channels).')
if seed is not None:
np.random.seed(seed)
x = np.copy(x)
if augment:
ax = np.zeros(tuple([rounds * x.shape[0]] + list(x.shape)[1:]), dtype=K.floatx())
for r in range(rounds):
for i in range(x.shape[0]):
ax[i + r * x.shape[0]] = self.random_transform(x[i])
x = ax
if self.featurewise_center:
self.mean = np.mean(x, axis=0)
x -= self.mean
if self.featurewise_std_normalization:
self.std = np.std(x, axis=0)
x /= (self.std + K.epsilon())
if self.zca_whitening:
flat_x = np.reshape(x, (x.shape[0], x.shape[1] * x.shape[2] * x.shape[3]))
sigma = np.dot(flat_x.T, flat_x) / flat_x.shape[0]
u, s, _ = linalg.svd(sigma)
self.principal_components = np.dot(np.dot(u, np.diag(1. / np.sqrt(s + self.zca_epsilon))), u.T) | Fits internal statistics to some sample data.
Required for featurewise_center, featurewise_std_normalization
and zca_whitening.
# Arguments
x: Numpy array, the data to fit on. Should have rank 5 or 4 when time_delay is None.
In case of grayscale data,
the channels axis should have value 1, and in case
of RGB data, it should have value 3.
augment: Whether to fit on randomly augmented samples
rounds: If `augment`,
how many augmentation passes to do over the data
seed: random seed.
# Raises
ValueError: in case of invalid input `x`. | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/library/image.py#L404-L461 | [
"def random_transform(self, sample, seed=None):\n \"\"\"Randomly augment a single image tensor.\n\n # Arguments\n sample: 3D or 4D tensor, single sample.\n seed: random seed.\n\n # Returns\n A randomly transformed version of the input (same shape).\n \"\"\"\n img_row_axis = self.row_axis - 1\n img_col_axis = self.col_axis - 1\n img_channel_axis = self.channel_axis - 1\n\n transform_matrix = self.get_random_transform_matrix(sample, seed)\n\n if transform_matrix is not None:\n h, w = sample.shape[img_row_axis], sample.shape[img_col_axis]\n transform_matrix = transform_matrix_offset_center(transform_matrix, h, w)\n sample = apply_transform(sample, transform_matrix, img_channel_axis,\n fill_mode=self.fill_mode, cval=self.cval)\n\n if self.channel_shift_range != 0:\n sample = random_channel_shift(sample,\n self.channel_shift_range,\n img_channel_axis)\n if self.horizontal_flip:\n if np.random.random() < 0.5:\n sample = flip_axis(sample, img_col_axis)\n\n if self.vertical_flip:\n if np.random.random() < 0.5:\n sample = flip_axis(sample, img_row_axis)\n\n return sample\n"
] | class ImageDataGenerator(object):
"""Generate minibatches of image data with real-time data augmentation.
# Arguments
featurewise_center: set input mean to 0 over the dataset.
samplewise_center: set each sample mean to 0.
featurewise_std_normalization: divide inputs by std of the dataset.
samplewise_std_normalization: divide each input by its std.
zca_whitening: apply ZCA whitening.
zca_epsilon: epsilon for ZCA whitening. Default is 1e-6.
rotation_range: degrees (0 to 180).
width_shift_range: fraction of total width.
height_shift_range: fraction of total height.
shear_range: shear intensity (shear angle in radians).
zoom_range: amount of zoom. if scalar z, zoom will be randomly picked
in the range [1-z, 1+z]. A sequence of two can be passed instead
to select this range.
channel_shift_range: shift range for each channel.
fill_mode: points outside the boundaries are filled according to the
given mode ('constant', 'nearest', 'reflect' or 'wrap'). Default
is 'nearest'.
cval: value used for points outside the boundaries when fill_mode is
'constant'. Default is 0.
horizontal_flip: whether to randomly flip images horizontally.
vertical_flip: whether to randomly flip images vertically.
rescale: rescaling factor. If None or 0, no rescaling is applied,
otherwise we multiply the data by the value provided. This is
applied after the `preprocessing_function` (if any provided)
but before any other transformation.
preprocessing_function: function that will be implied on each input.
The function will run before any other modification on it.
The function should take one argument:
one image (Numpy tensor with rank 3),
and should output a Numpy tensor with the same shape.
data_format: 'channels_first' or 'channels_last'. In 'channels_first' mode, the channels dimension
(the depth) is at index 1, in 'channels_last' mode it is at index 3.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
"""
def __init__(self,
featurewise_center=False,
samplewise_center=False,
featurewise_std_normalization=False,
samplewise_std_normalization=False,
zca_whitening=False,
zca_epsilon=1e-6,
rotation_angle=0.,
width_shift_range=0.,
height_shift_range=0.,
shear_range=0.,
zoom_range=0.,
channel_shift_range=0.,
fill_mode='nearest',
cval=0.,
horizontal_flip=False,
vertical_flip=False,
rescale=None,
preprocessing_function=None,
data_format="channels_last",
time_delay=None,
target_dimensions=None):
self.featurewise_center = featurewise_center
self.samplewise_center = samplewise_center
self.featurewise_std_normalization = featurewise_std_normalization
self.samplewise_std_normalization = samplewise_std_normalization
self.zca_whitening = zca_whitening
self.zca_epsilon = zca_epsilon
self.rotation_angle = rotation_angle
self.width_shift_range = width_shift_range
self.height_shift_range = height_shift_range
self.shear_range = shear_range
self.zoom_range = zoom_range
self.channel_shift_range = channel_shift_range
self.fill_mode = fill_mode
self.cval = cval
self.horizontal_flip = horizontal_flip
self.vertical_flip = vertical_flip
self.rescale = rescale
self.preprocessing_function = preprocessing_function
self.data_format = data_format
self.time_delay = time_delay
self.target_dimensions = target_dimensions
if data_format == 'channels_last':
if time_delay is None:
self.time_delay_axis, self.row_axis, self.col_axis, self.channel_axis = None, 1, 2, 3
else:
self.time_delay_axis, self.row_axis, self.col_axis, self.channel_axis = 1, 2, 3, 4
self.mean = None
self.std = None
self.principal_components = None
if np.isscalar(zoom_range):
self.zoom_range = [1 - zoom_range, 1 + zoom_range]
elif len(zoom_range) == 2:
self.zoom_range = [zoom_range[0], zoom_range[1]]
else:
raise ValueError('`zoom_range` should be a float or '
'a tuple or list of two floats. '
'Received arg: ', zoom_range)
def flow(self, x, y=None, batch_size=32, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_format='png', target_dimension=None):
if target_dimension:
self.target_dimensions = target_dimension
return NumpyArrayIterator(
x, y, self,
batch_size=batch_size,
shuffle=shuffle,
seed=seed,
data_format=self.data_format,
save_to_dir=save_to_dir,
save_prefix=save_prefix,
save_format=save_format)
def standardize(self, x):
"""Apply the normalization configuration to a batch of inputs.
# Arguments
x: batch of inputs to be normalized.
# Returns
The inputs, normalized.
"""
if self.preprocessing_function:
x = self.preprocessing_function(x)
if self.rescale:
x *= self.rescale
if self.samplewise_center:
x -= np.mean(x, keepdims=True)
if self.samplewise_std_normalization:
x /= np.std(x, keepdims=True) + 1e-7
if self.featurewise_center:
if self.mean is not None:
x -= self.mean
else:
warnings.warn('This ImageDataGenerator specifies '
'`featurewise_center`, but it hasn\'t '
'been fit on any training data. Fit it '
'first by calling `.fit(numpy_data)`.')
if self.featurewise_std_normalization:
if self.std is not None:
x /= (self.std + 1e-7)
else:
warnings.warn('This ImageDataGenerator specifies '
'`featurewise_std_normalization`, but it hasn\'t '
'been fit on any training data. Fit it '
'first by calling `.fit(numpy_data)`.')
if self.zca_whitening:
if self.principal_components is not None:
flatx = np.reshape(x, (-1, np.prod(x.shape[-3:])))
whitex = np.dot(flatx, self.principal_components)
x = np.reshape(whitex, x.shape)
else:
warnings.warn('This ImageDataGenerator specifies '
'`zca_whitening`, but it hasn\'t '
'been fit on any training data. Fit it '
'first by calling `.fit(numpy_data)`.')
return x
def resize(self, sample):
return resize_sample(sample, self.target_dimensions)
def get_random_transform_matrix(self, sample, seed=None):
"""Randomly augment a single image tensor.
# Arguments
sample: 3D or 4D tensor, single sample.
seed: random seed.
# Returns
A randomly generated transformation Matrix.
"""
# x is a single image, so it doesn't have image number at index 0
img_row_axis = self.row_axis - 1
img_col_axis = self.col_axis - 1
if seed is not None:
np.random.seed(seed)
# use composition of homographies
# to generate final transform that needs to be applied
if self.rotation_angle:
theta = np.pi / 180 * np.random.uniform(-self.rotation_angle, self.rotation_angle)
else:
theta = 0
if self.height_shift_range:
tx = np.random.uniform(-self.height_shift_range, self.height_shift_range) * sample.shape[img_row_axis]
else:
tx = 0
if self.width_shift_range:
ty = np.random.uniform(-self.width_shift_range, self.width_shift_range) * sample.shape[img_col_axis]
else:
ty = 0
if self.shear_range:
shear = np.random.uniform(-self.shear_range, self.shear_range)
else:
shear = 0
if self.zoom_range[0] == 1 and self.zoom_range[1] == 1:
zx, zy = 1, 1
else:
zx, zy = np.random.uniform(self.zoom_range[0], self.zoom_range[1], 2)
transform_matrix = np.identity(3)
if theta != 0:
rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, rotation_matrix)
if tx != 0 or ty != 0:
shift_matrix = np.array([[1, 0, tx],
[0, 1, ty],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, shift_matrix)
if shear != 0:
shear_matrix = np.array([[1, -np.sin(shear), 0],
[0, np.cos(shear), 0],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, shear_matrix)
if zx != 1 or zy != 1:
zoom_matrix = np.array([[zx, 0, 0],
[0, zy, 0],
[0, 0, 1]])
transform_matrix = np.dot(transform_matrix, zoom_matrix)
return transform_matrix
def random_transform(self, sample, seed=None):
"""Randomly augment a single image tensor.
# Arguments
sample: 3D or 4D tensor, single sample.
seed: random seed.
# Returns
A randomly transformed version of the input (same shape).
"""
img_row_axis = self.row_axis - 1
img_col_axis = self.col_axis - 1
img_channel_axis = self.channel_axis - 1
transform_matrix = self.get_random_transform_matrix(sample, seed)
if transform_matrix is not None:
h, w = sample.shape[img_row_axis], sample.shape[img_col_axis]
transform_matrix = transform_matrix_offset_center(transform_matrix, h, w)
sample = apply_transform(sample, transform_matrix, img_channel_axis,
fill_mode=self.fill_mode, cval=self.cval)
if self.channel_shift_range != 0:
sample = random_channel_shift(sample,
self.channel_shift_range,
img_channel_axis)
if self.horizontal_flip:
if np.random.random() < 0.5:
sample = flip_axis(sample, img_col_axis)
if self.vertical_flip:
if np.random.random() < 0.5:
sample = flip_axis(sample, img_row_axis)
return sample
|
thoughtworksarts/EmoPy | EmoPy/library/image.py | NumpyArrayIterator.next | python | def next(self):
# Keeps under lock only the mechanism which advances
# the indexing of each batch.
with self.lock:
index_array = next(self.index_generator)
# The transformation of images is not under thread lock
# so it can be done in parallel
return self._get_batches_of_transformed_samples(index_array) | For python 2.x.
# Returns
The next batch. | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/library/image.py#L625-L637 | null | class NumpyArrayIterator(Iterator):
"""Iterator yielding data from a Numpy array.
# Arguments
x: Numpy array of input data.
y: Numpy array of targets data.
image_data_generator: Instance of `ImageDataGenerator`
to use for random transformations and normalization.
batch_size: Integer, size of a batch.
shuffle: Boolean, whether to shuffle the data between epochs.
seed: Random seed for data shuffling.
data_format: String, one of `channels_first`, `channels_last`.
save_to_dir: Optional directory where to save the pictures
being yielded, in a viewable format. This is useful
for visualizing the random transformations being
applied, for debugging purposes.
save_prefix: String prefix to use for saving sample
images (if `save_to_dir` is set).
save_format: Format to use for saving sample images
(if `save_to_dir` is set).
"""
def __init__(self, x, y, image_data_generator,
batch_size=32, shuffle=False, seed=None,
data_format=None,
save_to_dir=None, save_prefix='', save_format='png'):
if y is not None and len(x) != len(y):
raise ValueError('X (images tensor) and y (labels) '
'should have the same length. '
'Found: X.shape = %s, y.shape = %s' %
(np.asarray(x).shape, np.asarray(y).shape))
self.x = np.asarray(x, dtype=K.floatx())
channels_axis = image_data_generator.channel_axis
if self.x.shape[channels_axis] not in {1, 3, 4}:
warnings.warn('NumpyArrayIterator is set to use the '
'data format convention "' + data_format + '" '
'(channels on axis ' + str(
channels_axis) + '), i.e. expected '
'either 1, 3 or 4 channels on axis ' + str(channels_axis) + '. '
'However, it was passed an array with shape ' + str(
self.x.shape) +
' (' + str(self.x.shape[channels_axis]) + ' channels).')
if y is not None:
self.y = np.asarray(y)
else:
self.y = None
self.image_data_generator = image_data_generator
self.data_format = data_format
self.save_to_dir = save_to_dir
self.save_prefix = save_prefix
self.save_format = save_format
super(NumpyArrayIterator, self).__init__(x.shape[0], batch_size, shuffle, seed)
def _get_batches_of_transformed_samples(self, index_array):
batch_x = list()
for i, j in enumerate(index_array):
x = self.x[j]
x = self.image_data_generator.random_transform(x.astype(K.floatx()))
x = self.image_data_generator.standardize(x)
x = self.image_data_generator.resize(x)
batch_x.append(x)
batch_x = np.array(batch_x)
if self.y is None:
return batch_x
batch_y = self.y[index_array]
return batch_x, batch_y
|
thoughtworksarts/EmoPy | EmoPy/src/neuralnets.py | TransferLearningNN._init_model | python | def _init_model(self):
base_model = self._get_base_model()
top_layer_model = base_model.output
top_layer_model = GlobalAveragePooling2D()(top_layer_model)
top_layer_model = Dense(1024, activation='relu')(top_layer_model)
prediction_layer = Dense(output_dim=len(self.emotion_map.keys()), activation='softmax')(top_layer_model)
model = Model(input=base_model.input, output=prediction_layer)
print(model.summary())
for layer in base_model.layers:
layer.trainable = False
model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])
self.model = model | Initialize base model from Keras and add top layers to match number of training emotions labels.
:return: | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/src/neuralnets.py#L74-L92 | [
"def _get_base_model(self):\n \"\"\"\n :return: base model from Keras based on user-supplied model name\n \"\"\"\n if self.model_name == 'inception_v3':\n return InceptionV3(weights='imagenet', include_top=False)\n elif self.model_name == 'xception':\n return Xception(weights='imagenet', include_top=False)\n elif self.model_name == 'vgg16':\n return VGG16(weights='imagenet', include_top=False)\n elif self.model_name == 'vgg19':\n return VGG19(weights='imagenet', include_top=False)\n elif self.model_name == 'resnet50':\n return ResNet50(weights='imagenet', include_top=False)\n else:\n raise ValueError('Cannot find base model %s' % self.model_name)\n"
] | class TransferLearningNN(_FERNeuralNet):
"""
Transfer Learning Convolutional Neural Network initialized with pretrained weights.
:param model_name: name of pretrained model to use for initial weights. Options: ['Xception', 'VGG16', 'VGG19', 'ResNet50', 'InceptionV3', 'InceptionResNetV2']
:param emotion_map: dict of target emotion label keys with int values corresponding to the index of the emotion probability in the prediction output array
**Example**::
model = TransferLearningNN(model_name='inception_v3', target_labels=[0,1,2,3,4,5,6])
model.fit(images, labels, validation_split=0.15)
"""
_NUM_BOTTOM_LAYERS_TO_RETRAIN = 249
def __init__(self, model_name, emotion_map):
self.model_name = model_name
super().__init__(emotion_map)
def _get_base_model(self):
"""
:return: base model from Keras based on user-supplied model name
"""
if self.model_name == 'inception_v3':
return InceptionV3(weights='imagenet', include_top=False)
elif self.model_name == 'xception':
return Xception(weights='imagenet', include_top=False)
elif self.model_name == 'vgg16':
return VGG16(weights='imagenet', include_top=False)
elif self.model_name == 'vgg19':
return VGG19(weights='imagenet', include_top=False)
elif self.model_name == 'resnet50':
return ResNet50(weights='imagenet', include_top=False)
else:
raise ValueError('Cannot find base model %s' % self.model_name)
def fit(self, features, labels, validation_split, epochs=50):
"""
Trains the neural net on the data provided.
:param features: Numpy array of training data.
:param labels: Numpy array of target (label) data.
:param validation_split: Float between 0 and 1. Percentage of training data to use for validation
:param epochs: Max number of times to train over dataset.
"""
self.model.fit(x=features, y=labels, epochs=epochs, verbose=1,
callbacks=[ReduceLROnPlateau(), EarlyStopping(patience=3)], validation_split=validation_split,
shuffle=True)
for layer in self.model.layers[:self._NUM_BOTTOM_LAYERS_TO_RETRAIN]:
layer.trainable = False
for layer in self.model.layers[self._NUM_BOTTOM_LAYERS_TO_RETRAIN:]:
layer.trainable = True
self.model.compile(optimizer='sgd', loss='categorical_crossentropy', metrics=['accuracy'])
self.model.fit(x=features, y=labels, epochs=50, verbose=1,
callbacks=[ReduceLROnPlateau(), EarlyStopping(patience=3)], validation_split=validation_split,
shuffle=True)
|
thoughtworksarts/EmoPy | EmoPy/src/neuralnets.py | TransferLearningNN._get_base_model | python | def _get_base_model(self):
if self.model_name == 'inception_v3':
return InceptionV3(weights='imagenet', include_top=False)
elif self.model_name == 'xception':
return Xception(weights='imagenet', include_top=False)
elif self.model_name == 'vgg16':
return VGG16(weights='imagenet', include_top=False)
elif self.model_name == 'vgg19':
return VGG19(weights='imagenet', include_top=False)
elif self.model_name == 'resnet50':
return ResNet50(weights='imagenet', include_top=False)
else:
raise ValueError('Cannot find base model %s' % self.model_name) | :return: base model from Keras based on user-supplied model name | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/src/neuralnets.py#L94-L109 | null | class TransferLearningNN(_FERNeuralNet):
"""
Transfer Learning Convolutional Neural Network initialized with pretrained weights.
:param model_name: name of pretrained model to use for initial weights. Options: ['Xception', 'VGG16', 'VGG19', 'ResNet50', 'InceptionV3', 'InceptionResNetV2']
:param emotion_map: dict of target emotion label keys with int values corresponding to the index of the emotion probability in the prediction output array
**Example**::
model = TransferLearningNN(model_name='inception_v3', target_labels=[0,1,2,3,4,5,6])
model.fit(images, labels, validation_split=0.15)
"""
_NUM_BOTTOM_LAYERS_TO_RETRAIN = 249
def __init__(self, model_name, emotion_map):
self.model_name = model_name
super().__init__(emotion_map)
def _init_model(self):
"""
Initialize base model from Keras and add top layers to match number of training emotions labels.
:return:
"""
base_model = self._get_base_model()
top_layer_model = base_model.output
top_layer_model = GlobalAveragePooling2D()(top_layer_model)
top_layer_model = Dense(1024, activation='relu')(top_layer_model)
prediction_layer = Dense(output_dim=len(self.emotion_map.keys()), activation='softmax')(top_layer_model)
model = Model(input=base_model.input, output=prediction_layer)
print(model.summary())
for layer in base_model.layers:
layer.trainable = False
model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])
self.model = model
def fit(self, features, labels, validation_split, epochs=50):
"""
Trains the neural net on the data provided.
:param features: Numpy array of training data.
:param labels: Numpy array of target (label) data.
:param validation_split: Float between 0 and 1. Percentage of training data to use for validation
:param epochs: Max number of times to train over dataset.
"""
self.model.fit(x=features, y=labels, epochs=epochs, verbose=1,
callbacks=[ReduceLROnPlateau(), EarlyStopping(patience=3)], validation_split=validation_split,
shuffle=True)
for layer in self.model.layers[:self._NUM_BOTTOM_LAYERS_TO_RETRAIN]:
layer.trainable = False
for layer in self.model.layers[self._NUM_BOTTOM_LAYERS_TO_RETRAIN:]:
layer.trainable = True
self.model.compile(optimizer='sgd', loss='categorical_crossentropy', metrics=['accuracy'])
self.model.fit(x=features, y=labels, epochs=50, verbose=1,
callbacks=[ReduceLROnPlateau(), EarlyStopping(patience=3)], validation_split=validation_split,
shuffle=True)
|
thoughtworksarts/EmoPy | EmoPy/src/neuralnets.py | TransferLearningNN.fit | python | def fit(self, features, labels, validation_split, epochs=50):
self.model.fit(x=features, y=labels, epochs=epochs, verbose=1,
callbacks=[ReduceLROnPlateau(), EarlyStopping(patience=3)], validation_split=validation_split,
shuffle=True)
for layer in self.model.layers[:self._NUM_BOTTOM_LAYERS_TO_RETRAIN]:
layer.trainable = False
for layer in self.model.layers[self._NUM_BOTTOM_LAYERS_TO_RETRAIN:]:
layer.trainable = True
self.model.compile(optimizer='sgd', loss='categorical_crossentropy', metrics=['accuracy'])
self.model.fit(x=features, y=labels, epochs=50, verbose=1,
callbacks=[ReduceLROnPlateau(), EarlyStopping(patience=3)], validation_split=validation_split,
shuffle=True) | Trains the neural net on the data provided.
:param features: Numpy array of training data.
:param labels: Numpy array of target (label) data.
:param validation_split: Float between 0 and 1. Percentage of training data to use for validation
:param epochs: Max number of times to train over dataset. | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/src/neuralnets.py#L111-L132 | null | class TransferLearningNN(_FERNeuralNet):
"""
Transfer Learning Convolutional Neural Network initialized with pretrained weights.
:param model_name: name of pretrained model to use for initial weights. Options: ['Xception', 'VGG16', 'VGG19', 'ResNet50', 'InceptionV3', 'InceptionResNetV2']
:param emotion_map: dict of target emotion label keys with int values corresponding to the index of the emotion probability in the prediction output array
**Example**::
model = TransferLearningNN(model_name='inception_v3', target_labels=[0,1,2,3,4,5,6])
model.fit(images, labels, validation_split=0.15)
"""
_NUM_BOTTOM_LAYERS_TO_RETRAIN = 249
def __init__(self, model_name, emotion_map):
self.model_name = model_name
super().__init__(emotion_map)
def _init_model(self):
"""
Initialize base model from Keras and add top layers to match number of training emotions labels.
:return:
"""
base_model = self._get_base_model()
top_layer_model = base_model.output
top_layer_model = GlobalAveragePooling2D()(top_layer_model)
top_layer_model = Dense(1024, activation='relu')(top_layer_model)
prediction_layer = Dense(output_dim=len(self.emotion_map.keys()), activation='softmax')(top_layer_model)
model = Model(input=base_model.input, output=prediction_layer)
print(model.summary())
for layer in base_model.layers:
layer.trainable = False
model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])
self.model = model
def _get_base_model(self):
"""
:return: base model from Keras based on user-supplied model name
"""
if self.model_name == 'inception_v3':
return InceptionV3(weights='imagenet', include_top=False)
elif self.model_name == 'xception':
return Xception(weights='imagenet', include_top=False)
elif self.model_name == 'vgg16':
return VGG16(weights='imagenet', include_top=False)
elif self.model_name == 'vgg19':
return VGG19(weights='imagenet', include_top=False)
elif self.model_name == 'resnet50':
return ResNet50(weights='imagenet', include_top=False)
else:
raise ValueError('Cannot find base model %s' % self.model_name)
|
thoughtworksarts/EmoPy | EmoPy/src/neuralnets.py | ConvolutionalLstmNN._init_model | python | def _init_model(self):
model = Sequential()
model.add(ConvLSTM2D(filters=self.filters, kernel_size=self.kernel_size, activation=self.activation,
input_shape=[self.time_delay] + list(self.image_size) + [self.channels],
data_format='channels_last', return_sequences=True))
model.add(BatchNormalization())
model.add(ConvLSTM2D(filters=self.filters, kernel_size=self.kernel_size, activation=self.activation,
input_shape=(self.time_delay, self.channels) + self.image_size,
data_format='channels_last', return_sequences=True))
model.add(BatchNormalization())
model.add(ConvLSTM2D(filters=self.filters, kernel_size=self.kernel_size, activation=self.activation))
model.add(BatchNormalization())
model.add(Conv2D(filters=1, kernel_size=self.kernel_size, activation="sigmoid", data_format="channels_last"))
model.add(Flatten())
model.add(Dense(units=len(self.emotion_map.keys()), activation="sigmoid"))
if self.verbose:
model.summary()
self.model = model | Composes all layers of CNN. | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/src/neuralnets.py#L166-L186 | null | class ConvolutionalLstmNN(_FERNeuralNet):
"""
Convolutional Long Short Term Memory Neural Network.
:param image_size: dimensions of input images
:param channels: number of image channels
:param emotion_map: dict of target emotion label keys with int values corresponding to the index of the emotion probability in the prediction output array
:param time_delay: number of time steps for lookback
:param filters: number of filters/nodes per layer in CNN
:param kernel_size: size of sliding window for each layer of CNN
:param activation: name of activation function for CNN
:param verbose: if true, will print out extra process information
**Example**::
net = ConvolutionalLstmNN(target_dimensions=(64,64), channels=1, target_labels=[0,1,2,3,4,5,6], time_delay=3)
net.fit(features, labels, validation_split=0.15)
"""
def __init__(self, image_size, channels, emotion_map, time_delay=2, filters=10, kernel_size=(4, 4),
activation='sigmoid', verbose=False):
self.time_delay = time_delay
self.channels = channels
self.image_size = image_size
self.verbose = verbose
self.filters = filters
self.kernel_size = kernel_size
self.activation = activation
super().__init__(emotion_map)
def fit(self, features, labels, validation_split, batch_size=10, epochs=50):
"""
Trains the neural net on the data provided.
:param features: Numpy array of training data.
:param labels: Numpy array of target (label) data.
:param validation_split: Float between 0 and 1. Percentage of training data to use for validation
:param batch_size:
:param epochs: number of times to train over input dataset.
"""
self.model.compile(optimizer="RMSProp", loss="cosine_proximity", metrics=["accuracy"])
self.model.fit(features, labels, batch_size=batch_size, epochs=epochs, validation_split=validation_split,
callbacks=[ReduceLROnPlateau(), EarlyStopping(patience=3)])
|
thoughtworksarts/EmoPy | EmoPy/src/neuralnets.py | ConvolutionalNN._init_model | python | def _init_model(self):
model = Sequential()
model.add(Conv2D(input_shape=list(self.image_size) + [self.channels], filters=self.filters,
kernel_size=self.kernel_size, activation='relu', data_format='channels_last'))
model.add(
Conv2D(filters=self.filters, kernel_size=self.kernel_size, activation='relu', data_format='channels_last'))
model.add(MaxPooling2D())
model.add(
Conv2D(filters=self.filters, kernel_size=self.kernel_size, activation='relu', data_format='channels_last'))
model.add(
Conv2D(filters=self.filters, kernel_size=self.kernel_size, activation='relu', data_format='channels_last'))
model.add(MaxPooling2D())
model.add(Flatten())
model.add(Dense(units=len(self.emotion_map.keys()), activation="relu"))
if self.verbose:
model.summary()
self.model = model | Composes all layers of 2D CNN. | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/src/neuralnets.py#L232-L252 | null | class ConvolutionalNN(_FERNeuralNet):
"""
2D Convolutional Neural Network
:param image_size: dimensions of input images
:param channels: number of image channels
:param emotion_map: dict of target emotion label keys with int values corresponding to the index of the emotion probability in the prediction output array
:param filters: number of filters/nodes per layer in CNN
:param kernel_size: size of sliding window for each layer of CNN
:param activation: name of activation function for CNN
:param verbose: if true, will print out extra process information
**Example**::
net = ConvolutionalNN(target_dimensions=(64,64), channels=1, target_labels=[0,1,2,3,4,5,6], time_delay=3)
net.fit(features, labels, validation_split=0.15)
"""
def __init__(self, image_size, channels, emotion_map, filters=10, kernel_size=(4, 4), activation='relu',
verbose=False):
self.channels = channels
self.image_size = image_size
self.verbose = verbose
self.filters = filters
self.kernel_size = kernel_size
self.activation = activation
super().__init__(emotion_map)
def fit(self, image_data, labels, validation_split, epochs=50):
"""
Trains the neural net on the data provided.
:param image_data: Numpy array of training data.
:param labels: Numpy array of target (label) data.
:param validation_split: Float between 0 and 1. Percentage of training data to use for validation
:param batch_size:
:param epochs: number of times to train over input dataset.
"""
self.model.compile(optimizer="RMSProp", loss="cosine_proximity", metrics=["accuracy"])
self.model.fit(image_data, labels, epochs=epochs, validation_split=validation_split,
callbacks=[ReduceLROnPlateau(), EarlyStopping(patience=3)])
|
thoughtworksarts/EmoPy | EmoPy/src/neuralnets.py | ConvolutionalNN.fit | python | def fit(self, image_data, labels, validation_split, epochs=50):
self.model.compile(optimizer="RMSProp", loss="cosine_proximity", metrics=["accuracy"])
self.model.fit(image_data, labels, epochs=epochs, validation_split=validation_split,
callbacks=[ReduceLROnPlateau(), EarlyStopping(patience=3)]) | Trains the neural net on the data provided.
:param image_data: Numpy array of training data.
:param labels: Numpy array of target (label) data.
:param validation_split: Float between 0 and 1. Percentage of training data to use for validation
:param batch_size:
:param epochs: number of times to train over input dataset. | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/src/neuralnets.py#L254-L266 | null | class ConvolutionalNN(_FERNeuralNet):
"""
2D Convolutional Neural Network
:param image_size: dimensions of input images
:param channels: number of image channels
:param emotion_map: dict of target emotion label keys with int values corresponding to the index of the emotion probability in the prediction output array
:param filters: number of filters/nodes per layer in CNN
:param kernel_size: size of sliding window for each layer of CNN
:param activation: name of activation function for CNN
:param verbose: if true, will print out extra process information
**Example**::
net = ConvolutionalNN(target_dimensions=(64,64), channels=1, target_labels=[0,1,2,3,4,5,6], time_delay=3)
net.fit(features, labels, validation_split=0.15)
"""
def __init__(self, image_size, channels, emotion_map, filters=10, kernel_size=(4, 4), activation='relu',
verbose=False):
self.channels = channels
self.image_size = image_size
self.verbose = verbose
self.filters = filters
self.kernel_size = kernel_size
self.activation = activation
super().__init__(emotion_map)
def _init_model(self):
"""
Composes all layers of 2D CNN.
"""
model = Sequential()
model.add(Conv2D(input_shape=list(self.image_size) + [self.channels], filters=self.filters,
kernel_size=self.kernel_size, activation='relu', data_format='channels_last'))
model.add(
Conv2D(filters=self.filters, kernel_size=self.kernel_size, activation='relu', data_format='channels_last'))
model.add(MaxPooling2D())
model.add(
Conv2D(filters=self.filters, kernel_size=self.kernel_size, activation='relu', data_format='channels_last'))
model.add(
Conv2D(filters=self.filters, kernel_size=self.kernel_size, activation='relu', data_format='channels_last'))
model.add(MaxPooling2D())
model.add(Flatten())
model.add(Dense(units=len(self.emotion_map.keys()), activation="relu"))
if self.verbose:
model.summary()
self.model = model
|
thoughtworksarts/EmoPy | EmoPy/src/neuralnets.py | TimeDelayConvNN._init_model | python | def _init_model(self):
model = Sequential()
model.add(Conv3D(input_shape=[self.time_delay] + list(self.image_size) + [self.channels], filters=self.filters,
kernel_size=self.kernel_size, activation='relu', data_format='channels_last'))
model.add(
Conv3D(filters=self.filters, kernel_size=self.kernel_size, activation='relu', data_format='channels_last'))
model.add(MaxPooling3D(pool_size=(1, 2, 2), data_format='channels_last'))
model.add(
Conv3D(filters=self.filters, kernel_size=self.kernel_size, activation='relu', data_format='channels_last'))
model.add(
Conv3D(filters=self.filters, kernel_size=self.kernel_size, activation='relu', data_format='channels_last'))
model.add(MaxPooling3D(pool_size=(1, 2, 2), data_format='channels_last'))
model.add(Flatten())
model.add(Dense(units=len(self.emotion_map.keys()), activation="relu"))
if self.verbose:
model.summary()
self.model = model | Composes all layers of 3D CNN. | train | https://github.com/thoughtworksarts/EmoPy/blob/a0ab97b3719ebe0a9de9bfc5adae5e46c9b77fd7/EmoPy/src/neuralnets.py#L300-L320 | null | class TimeDelayConvNN(_FERNeuralNet):
"""
The Time-Delayed Convolutional Neural Network model is a 3D-Convolutional network that trains on 3-dimensional temporal image data. One training sample will contain n number of images from a series and its emotion label will be that of the most recent image.
:param image_size: dimensions of input images
:param time_delay: number of past time steps included in each training sample
:param channels: number of image channels
:param emotion_map: dict of target emotion label keys with int values corresponding to the index of the emotion probability in the prediction output array
:param filters: number of filters/nodes per layer in CNN
:param kernel_size: size of sliding window for each layer of CNN
:param activation: name of activation function for CNN
:param verbose: if true, will print out extra process information
**Example**::
model = TimeDelayConvNN(target_dimensions={64,64), time_delay=3, channels=1, label_count=6)
model.fit(image_data, labels, validation_split=0.15)
"""
def __init__(self, image_size, channels, emotion_map, time_delay, filters=32, kernel_size=(1, 4, 4),
activation='relu', verbose=False):
self.image_size = image_size
self.time_delay = time_delay
self.channels = channels
self.verbose = verbose
self.filters = filters
self.kernel_size = kernel_size
self.activation = activation
super().__init__(emotion_map)
def fit(self, image_data, labels, validation_split, epochs=50):
"""
Trains the neural net on the data provided.
:param image_data: Numpy array of training data.
:param labels: Numpy array of target (label) data.
:param validation_split: Float between 0 and 1. Percentage of training data to use for validation
:param batch_size:
:param epochs: number of times to train over input dataset.
"""
self.model.compile(optimizer="RMSProp", loss="cosine_proximity", metrics=["accuracy"])
self.model.fit(image_data, labels, epochs=epochs, validation_split=validation_split,
callbacks=[ReduceLROnPlateau(), EarlyStopping(patience=3)])
|
mobolic/facebook-sdk | facebook/__init__.py | get_user_from_cookie | python | def get_user_from_cookie(cookies, app_id, app_secret):
cookie = cookies.get("fbsr_" + app_id, "")
if not cookie:
return None
parsed_request = parse_signed_request(cookie, app_secret)
if not parsed_request:
return None
try:
result = GraphAPI().get_access_token_from_code(
parsed_request["code"], "", app_id, app_secret
)
except GraphAPIError:
return None
result["uid"] = parsed_request["user_id"]
return result | Parses the cookie set by the official Facebook JavaScript SDK.
cookies should be a dictionary-like object mapping cookie names to
cookie values.
If the user is logged in via Facebook, we return a dictionary with
the keys "uid" and "access_token". The former is the user's
Facebook ID, and the latter can be used to make authenticated
requests to the Graph API. If the user is not logged in, we
return None.
Read more about Facebook authentication at
https://developers.facebook.com/docs/facebook-login. | train | https://github.com/mobolic/facebook-sdk/blob/65ff582e77f7ed68b6e9643a7490e5dee2a1031b/facebook/__init__.py#L443-L472 | [
"def parse_signed_request(signed_request, app_secret):\n \"\"\" Return dictionary with signed request data.\n\n We return a dictionary containing the information in the\n signed_request. This includes a user_id if the user has authorised\n your application, as well as any information requested.\n\n If the signed_request is malformed or corrupted, False is returned.\n\n \"\"\"\n try:\n encoded_sig, payload = map(str, signed_request.split(\".\", 1))\n\n sig = base64.urlsafe_b64decode(\n encoded_sig + \"=\" * ((4 - len(encoded_sig) % 4) % 4)\n )\n data = base64.urlsafe_b64decode(\n payload + \"=\" * ((4 - len(payload) % 4) % 4)\n )\n except IndexError:\n # Signed request was malformed.\n return False\n except TypeError:\n # Signed request had a corrupted payload.\n return False\n except binascii.Error:\n # Signed request had a corrupted payload.\n return False\n\n data = json.loads(data.decode(\"ascii\"))\n if data.get(\"algorithm\", \"\").upper() != \"HMAC-SHA256\":\n return False\n\n # HMAC can only handle ascii (byte) strings\n # https://bugs.python.org/issue5285\n app_secret = app_secret.encode(\"ascii\")\n payload = payload.encode(\"ascii\")\n\n expected_sig = hmac.new(\n app_secret, msg=payload, digestmod=hashlib.sha256\n ).digest()\n if sig != expected_sig:\n return False\n\n return data\n",
"def get_access_token_from_code(\n self, code, redirect_uri, app_id, app_secret\n):\n \"\"\"Get an access token from the \"code\" returned from an OAuth dialog.\n\n Returns a dict containing the user-specific access token and its\n expiration date (if applicable).\n\n \"\"\"\n args = {\n \"code\": code,\n \"redirect_uri\": redirect_uri,\n \"client_id\": app_id,\n \"client_secret\": app_secret,\n }\n\n return self.request(\n \"{0}/oauth/access_token\".format(self.version), args\n )\n"
] | #!/usr/bin/env python
#
# Copyright 2010 Facebook
# Copyright 2015 Mobolic
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Python client library for the Facebook Platform.
This client library is designed to support the Graph API and the
official Facebook JavaScript SDK, which is the canonical way to
implement Facebook authentication. Read more about the Graph API at
https://developers.facebook.com/docs/graph-api.
"""
import hashlib
import hmac
import binascii
import base64
import requests
import json
import re
try:
from urllib.parse import parse_qs, urlencode, urlparse
except ImportError:
from urlparse import parse_qs, urlparse
from urllib import urlencode
from . import version
__version__ = version.__version__
FACEBOOK_GRAPH_URL = "https://graph.facebook.com/"
FACEBOOK_WWW_URL = "https://www.facebook.com/"
FACEBOOK_OAUTH_DIALOG_PATH = "dialog/oauth?"
VALID_API_VERSIONS = [
"2.8",
"2.9",
"2.10",
"2.11",
"2.12",
"3.0",
"3.1",
"3.2",
]
VALID_SEARCH_TYPES = ["place", "placetopic"]
class GraphAPI(object):
"""A client for the Facebook Graph API.
https://developers.facebook.com/docs/graph-api
The Graph API is made up of the objects in Facebook (e.g., people,
pages, events, photos) and the connections between them (e.g.,
friends, photo tags, and event RSVPs). This client provides access
to those primitive types in a generic way. For example, given an
OAuth access token, this will fetch the profile of the active user
and the list of the user's friends:
graph = facebook.GraphAPI(access_token)
user = graph.get_object("me")
friends = graph.get_connections(user["id"], "friends")
You can see a list of all of the objects and connections supported
by the API at https://developers.facebook.com/docs/graph-api/reference/.
You can obtain an access token via OAuth or by using the Facebook
JavaScript SDK. See
https://developers.facebook.com/docs/facebook-login for details.
If you are using the JavaScript SDK, you can use the
get_user_from_cookie() method below to get the OAuth access token
for the active user from the cookie saved by the SDK.
"""
def __init__(
self,
access_token=None,
timeout=None,
version=None,
proxies=None,
session=None,
):
# The default version is only used if the version kwarg does not exist.
default_version = VALID_API_VERSIONS[0]
self.access_token = access_token
self.timeout = timeout
self.proxies = proxies
self.session = session or requests.Session()
if version:
version_regex = re.compile("^\d\.\d{1,2}$")
match = version_regex.search(str(version))
if match is not None:
if str(version) not in VALID_API_VERSIONS:
raise GraphAPIError(
"Valid API versions are "
+ str(VALID_API_VERSIONS).strip("[]")
)
else:
self.version = "v" + str(version)
else:
raise GraphAPIError(
"Version number should be in the"
" following format: #.# (e.g. 2.0)."
)
else:
self.version = "v" + default_version
def get_permissions(self, user_id):
"""Fetches the permissions object from the graph."""
response = self.request(
"{0}/{1}/permissions".format(self.version, user_id), {}
)["data"]
return {x["permission"] for x in response if x["status"] == "granted"}
def get_object(self, id, **args):
"""Fetches the given object from the graph."""
return self.request("{0}/{1}".format(self.version, id), args)
def get_objects(self, ids, **args):
"""Fetches all of the given object from the graph.
We return a map from ID to object. If any of the IDs are
invalid, we raise an exception.
"""
args["ids"] = ",".join(ids)
return self.request(self.version + "/", args)
def search(self, type, **args):
"""https://developers.facebook.com/docs/places/search"""
if type not in VALID_SEARCH_TYPES:
raise GraphAPIError(
"Valid types are: %s" % ", ".join(VALID_SEARCH_TYPES)
)
args["type"] = type
return self.request(self.version + "/search/", args)
def get_connections(self, id, connection_name, **args):
"""Fetches the connections for given object."""
return self.request(
"{0}/{1}/{2}".format(self.version, id, connection_name), args
)
def get_all_connections(self, id, connection_name, **args):
"""Get all pages from a get_connections call
This will iterate over all pages returned by a get_connections call
and yield the individual items.
"""
while True:
page = self.get_connections(id, connection_name, **args)
for post in page["data"]:
yield post
next = page.get("paging", {}).get("next")
if not next:
return
args = parse_qs(urlparse(next).query)
del args["access_token"]
def put_object(self, parent_object, connection_name, **data):
"""Writes the given object to the graph, connected to the given parent.
For example,
graph.put_object("me", "feed", message="Hello, world")
writes "Hello, world" to the active user's wall. Likewise, this
will comment on the first post of the active user's feed:
feed = graph.get_connections("me", "feed")
post = feed["data"][0]
graph.put_object(post["id"], "comments", message="First!")
Certain operations require extended permissions. See
https://developers.facebook.com/docs/facebook-login/permissions
for details about permissions.
"""
assert self.access_token, "Write operations require an access token"
return self.request(
"{0}/{1}/{2}".format(self.version, parent_object, connection_name),
post_args=data,
method="POST",
)
def put_comment(self, object_id, message):
"""Writes the given comment on the given post."""
return self.put_object(object_id, "comments", message=message)
def put_like(self, object_id):
"""Likes the given post."""
return self.put_object(object_id, "likes")
def delete_object(self, id):
"""Deletes the object with the given ID from the graph."""
return self.request(
"{0}/{1}".format(self.version, id), method="DELETE"
)
def delete_request(self, user_id, request_id):
"""Deletes the Request with the given ID for the given user."""
return self.request(
"{0}_{1}".format(request_id, user_id), method="DELETE"
)
def put_photo(self, image, album_path="me/photos", **kwargs):
"""
Upload an image using multipart/form-data.
image - A file object representing the image to be uploaded.
album_path - A path representing where the image should be uploaded.
"""
return self.request(
"{0}/{1}".format(self.version, album_path),
post_args=kwargs,
files={"source": image},
method="POST",
)
def get_version(self):
"""Fetches the current version number of the Graph API being used."""
args = {"access_token": self.access_token}
try:
response = self.session.request(
"GET",
FACEBOOK_GRAPH_URL + self.version + "/me",
params=args,
timeout=self.timeout,
proxies=self.proxies,
)
except requests.HTTPError as e:
response = json.loads(e.read())
raise GraphAPIError(response)
try:
headers = response.headers
version = headers["facebook-api-version"].replace("v", "")
return str(version)
except Exception:
raise GraphAPIError("API version number not available")
def request(
self, path, args=None, post_args=None, files=None, method=None
):
"""Fetches the given path in the Graph API.
We translate args to a valid query string. If post_args is
given, we send a POST request to the given path with the given
arguments.
"""
if args is None:
args = dict()
if post_args is not None:
method = "POST"
# Add `access_token` to post_args or args if it has not already been
# included.
if self.access_token:
# If post_args exists, we assume that args either does not exists
# or it does not need `access_token`.
if post_args and "access_token" not in post_args:
post_args["access_token"] = self.access_token
elif "access_token" not in args:
args["access_token"] = self.access_token
try:
response = self.session.request(
method or "GET",
FACEBOOK_GRAPH_URL + path,
timeout=self.timeout,
params=args,
data=post_args,
proxies=self.proxies,
files=files,
)
except requests.HTTPError as e:
response = json.loads(e.read())
raise GraphAPIError(response)
headers = response.headers
if "json" in headers["content-type"]:
result = response.json()
elif "image/" in headers["content-type"]:
mimetype = headers["content-type"]
result = {
"data": response.content,
"mime-type": mimetype,
"url": response.url,
}
elif "access_token" in parse_qs(response.text):
query_str = parse_qs(response.text)
if "access_token" in query_str:
result = {"access_token": query_str["access_token"][0]}
if "expires" in query_str:
result["expires"] = query_str["expires"][0]
else:
raise GraphAPIError(response.json())
else:
raise GraphAPIError("Maintype was not text, image, or querystring")
if result and isinstance(result, dict) and result.get("error"):
raise GraphAPIError(result)
return result
def get_app_access_token(self, app_id, app_secret, offline=False):
"""
Get the application's access token as a string.
If offline=True, use the concatenated app ID and secret
instead of making an API call.
<https://developers.facebook.com/docs/facebook-login/
access-tokens#apptokens>
"""
if offline:
return "{0}|{1}".format(app_id, app_secret)
else:
args = {
"grant_type": "client_credentials",
"client_id": app_id,
"client_secret": app_secret,
}
return self.request(
"{0}/oauth/access_token".format(self.version), args=args
)["access_token"]
def get_access_token_from_code(
self, code, redirect_uri, app_id, app_secret
):
"""Get an access token from the "code" returned from an OAuth dialog.
Returns a dict containing the user-specific access token and its
expiration date (if applicable).
"""
args = {
"code": code,
"redirect_uri": redirect_uri,
"client_id": app_id,
"client_secret": app_secret,
}
return self.request(
"{0}/oauth/access_token".format(self.version), args
)
def extend_access_token(self, app_id, app_secret):
"""
Extends the expiration time of a valid OAuth access token. See
<https://developers.facebook.com/docs/facebook-login/access-tokens/
expiration-and-extension>
"""
args = {
"client_id": app_id,
"client_secret": app_secret,
"grant_type": "fb_exchange_token",
"fb_exchange_token": self.access_token,
}
return self.request(
"{0}/oauth/access_token".format(self.version), args=args
)
def debug_access_token(self, token, app_id, app_secret):
"""
Gets information about a user access token issued by an app. See
<https://developers.facebook.com/docs/facebook-login/
access-tokens/debugging-and-error-handling>
We can generate the app access token by concatenating the app
id and secret: <https://developers.facebook.com/docs/
facebook-login/access-tokens#apptokens>
"""
args = {
"input_token": token,
"access_token": "{0}|{1}".format(app_id, app_secret),
}
return self.request(self.version + "/" + "debug_token", args=args)
def get_auth_url(self, app_id, canvas_url, perms=None, **kwargs):
"""Build a URL to create an OAuth dialog."""
url = "{0}{1}/{2}".format(
FACEBOOK_WWW_URL, self.version, FACEBOOK_OAUTH_DIALOG_PATH
)
args = {"client_id": app_id, "redirect_uri": canvas_url}
if perms:
args["scope"] = ",".join(perms)
args.update(kwargs)
return url + urlencode(args)
class GraphAPIError(Exception):
def __init__(self, result):
self.result = result
self.code = None
try:
self.type = result["error_code"]
except (KeyError, TypeError):
self.type = ""
# OAuth 2.0 Draft 10
try:
self.message = result["error_description"]
except (KeyError, TypeError):
# OAuth 2.0 Draft 00
try:
self.message = result["error"]["message"]
self.code = result["error"].get("code")
if not self.type:
self.type = result["error"].get("type", "")
except (KeyError, TypeError):
# REST server style
try:
self.message = result["error_msg"]
except (KeyError, TypeError):
self.message = result
Exception.__init__(self, self.message)
def parse_signed_request(signed_request, app_secret):
""" Return dictionary with signed request data.
We return a dictionary containing the information in the
signed_request. This includes a user_id if the user has authorised
your application, as well as any information requested.
If the signed_request is malformed or corrupted, False is returned.
"""
try:
encoded_sig, payload = map(str, signed_request.split(".", 1))
sig = base64.urlsafe_b64decode(
encoded_sig + "=" * ((4 - len(encoded_sig) % 4) % 4)
)
data = base64.urlsafe_b64decode(
payload + "=" * ((4 - len(payload) % 4) % 4)
)
except IndexError:
# Signed request was malformed.
return False
except TypeError:
# Signed request had a corrupted payload.
return False
except binascii.Error:
# Signed request had a corrupted payload.
return False
data = json.loads(data.decode("ascii"))
if data.get("algorithm", "").upper() != "HMAC-SHA256":
return False
# HMAC can only handle ascii (byte) strings
# https://bugs.python.org/issue5285
app_secret = app_secret.encode("ascii")
payload = payload.encode("ascii")
expected_sig = hmac.new(
app_secret, msg=payload, digestmod=hashlib.sha256
).digest()
if sig != expected_sig:
return False
return data
|
mobolic/facebook-sdk | facebook/__init__.py | parse_signed_request | python | def parse_signed_request(signed_request, app_secret):
try:
encoded_sig, payload = map(str, signed_request.split(".", 1))
sig = base64.urlsafe_b64decode(
encoded_sig + "=" * ((4 - len(encoded_sig) % 4) % 4)
)
data = base64.urlsafe_b64decode(
payload + "=" * ((4 - len(payload) % 4) % 4)
)
except IndexError:
# Signed request was malformed.
return False
except TypeError:
# Signed request had a corrupted payload.
return False
except binascii.Error:
# Signed request had a corrupted payload.
return False
data = json.loads(data.decode("ascii"))
if data.get("algorithm", "").upper() != "HMAC-SHA256":
return False
# HMAC can only handle ascii (byte) strings
# https://bugs.python.org/issue5285
app_secret = app_secret.encode("ascii")
payload = payload.encode("ascii")
expected_sig = hmac.new(
app_secret, msg=payload, digestmod=hashlib.sha256
).digest()
if sig != expected_sig:
return False
return data | Return dictionary with signed request data.
We return a dictionary containing the information in the
signed_request. This includes a user_id if the user has authorised
your application, as well as any information requested.
If the signed_request is malformed or corrupted, False is returned. | train | https://github.com/mobolic/facebook-sdk/blob/65ff582e77f7ed68b6e9643a7490e5dee2a1031b/facebook/__init__.py#L475-L519 | null | #!/usr/bin/env python
#
# Copyright 2010 Facebook
# Copyright 2015 Mobolic
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Python client library for the Facebook Platform.
This client library is designed to support the Graph API and the
official Facebook JavaScript SDK, which is the canonical way to
implement Facebook authentication. Read more about the Graph API at
https://developers.facebook.com/docs/graph-api.
"""
import hashlib
import hmac
import binascii
import base64
import requests
import json
import re
try:
from urllib.parse import parse_qs, urlencode, urlparse
except ImportError:
from urlparse import parse_qs, urlparse
from urllib import urlencode
from . import version
__version__ = version.__version__
FACEBOOK_GRAPH_URL = "https://graph.facebook.com/"
FACEBOOK_WWW_URL = "https://www.facebook.com/"
FACEBOOK_OAUTH_DIALOG_PATH = "dialog/oauth?"
VALID_API_VERSIONS = [
"2.8",
"2.9",
"2.10",
"2.11",
"2.12",
"3.0",
"3.1",
"3.2",
]
VALID_SEARCH_TYPES = ["place", "placetopic"]
class GraphAPI(object):
"""A client for the Facebook Graph API.
https://developers.facebook.com/docs/graph-api
The Graph API is made up of the objects in Facebook (e.g., people,
pages, events, photos) and the connections between them (e.g.,
friends, photo tags, and event RSVPs). This client provides access
to those primitive types in a generic way. For example, given an
OAuth access token, this will fetch the profile of the active user
and the list of the user's friends:
graph = facebook.GraphAPI(access_token)
user = graph.get_object("me")
friends = graph.get_connections(user["id"], "friends")
You can see a list of all of the objects and connections supported
by the API at https://developers.facebook.com/docs/graph-api/reference/.
You can obtain an access token via OAuth or by using the Facebook
JavaScript SDK. See
https://developers.facebook.com/docs/facebook-login for details.
If you are using the JavaScript SDK, you can use the
get_user_from_cookie() method below to get the OAuth access token
for the active user from the cookie saved by the SDK.
"""
def __init__(
self,
access_token=None,
timeout=None,
version=None,
proxies=None,
session=None,
):
# The default version is only used if the version kwarg does not exist.
default_version = VALID_API_VERSIONS[0]
self.access_token = access_token
self.timeout = timeout
self.proxies = proxies
self.session = session or requests.Session()
if version:
version_regex = re.compile("^\d\.\d{1,2}$")
match = version_regex.search(str(version))
if match is not None:
if str(version) not in VALID_API_VERSIONS:
raise GraphAPIError(
"Valid API versions are "
+ str(VALID_API_VERSIONS).strip("[]")
)
else:
self.version = "v" + str(version)
else:
raise GraphAPIError(
"Version number should be in the"
" following format: #.# (e.g. 2.0)."
)
else:
self.version = "v" + default_version
def get_permissions(self, user_id):
"""Fetches the permissions object from the graph."""
response = self.request(
"{0}/{1}/permissions".format(self.version, user_id), {}
)["data"]
return {x["permission"] for x in response if x["status"] == "granted"}
def get_object(self, id, **args):
"""Fetches the given object from the graph."""
return self.request("{0}/{1}".format(self.version, id), args)
def get_objects(self, ids, **args):
"""Fetches all of the given object from the graph.
We return a map from ID to object. If any of the IDs are
invalid, we raise an exception.
"""
args["ids"] = ",".join(ids)
return self.request(self.version + "/", args)
def search(self, type, **args):
"""https://developers.facebook.com/docs/places/search"""
if type not in VALID_SEARCH_TYPES:
raise GraphAPIError(
"Valid types are: %s" % ", ".join(VALID_SEARCH_TYPES)
)
args["type"] = type
return self.request(self.version + "/search/", args)
def get_connections(self, id, connection_name, **args):
"""Fetches the connections for given object."""
return self.request(
"{0}/{1}/{2}".format(self.version, id, connection_name), args
)
def get_all_connections(self, id, connection_name, **args):
"""Get all pages from a get_connections call
This will iterate over all pages returned by a get_connections call
and yield the individual items.
"""
while True:
page = self.get_connections(id, connection_name, **args)
for post in page["data"]:
yield post
next = page.get("paging", {}).get("next")
if not next:
return
args = parse_qs(urlparse(next).query)
del args["access_token"]
def put_object(self, parent_object, connection_name, **data):
"""Writes the given object to the graph, connected to the given parent.
For example,
graph.put_object("me", "feed", message="Hello, world")
writes "Hello, world" to the active user's wall. Likewise, this
will comment on the first post of the active user's feed:
feed = graph.get_connections("me", "feed")
post = feed["data"][0]
graph.put_object(post["id"], "comments", message="First!")
Certain operations require extended permissions. See
https://developers.facebook.com/docs/facebook-login/permissions
for details about permissions.
"""
assert self.access_token, "Write operations require an access token"
return self.request(
"{0}/{1}/{2}".format(self.version, parent_object, connection_name),
post_args=data,
method="POST",
)
def put_comment(self, object_id, message):
"""Writes the given comment on the given post."""
return self.put_object(object_id, "comments", message=message)
def put_like(self, object_id):
"""Likes the given post."""
return self.put_object(object_id, "likes")
def delete_object(self, id):
"""Deletes the object with the given ID from the graph."""
return self.request(
"{0}/{1}".format(self.version, id), method="DELETE"
)
def delete_request(self, user_id, request_id):
"""Deletes the Request with the given ID for the given user."""
return self.request(
"{0}_{1}".format(request_id, user_id), method="DELETE"
)
def put_photo(self, image, album_path="me/photos", **kwargs):
"""
Upload an image using multipart/form-data.
image - A file object representing the image to be uploaded.
album_path - A path representing where the image should be uploaded.
"""
return self.request(
"{0}/{1}".format(self.version, album_path),
post_args=kwargs,
files={"source": image},
method="POST",
)
def get_version(self):
"""Fetches the current version number of the Graph API being used."""
args = {"access_token": self.access_token}
try:
response = self.session.request(
"GET",
FACEBOOK_GRAPH_URL + self.version + "/me",
params=args,
timeout=self.timeout,
proxies=self.proxies,
)
except requests.HTTPError as e:
response = json.loads(e.read())
raise GraphAPIError(response)
try:
headers = response.headers
version = headers["facebook-api-version"].replace("v", "")
return str(version)
except Exception:
raise GraphAPIError("API version number not available")
def request(
self, path, args=None, post_args=None, files=None, method=None
):
"""Fetches the given path in the Graph API.
We translate args to a valid query string. If post_args is
given, we send a POST request to the given path with the given
arguments.
"""
if args is None:
args = dict()
if post_args is not None:
method = "POST"
# Add `access_token` to post_args or args if it has not already been
# included.
if self.access_token:
# If post_args exists, we assume that args either does not exists
# or it does not need `access_token`.
if post_args and "access_token" not in post_args:
post_args["access_token"] = self.access_token
elif "access_token" not in args:
args["access_token"] = self.access_token
try:
response = self.session.request(
method or "GET",
FACEBOOK_GRAPH_URL + path,
timeout=self.timeout,
params=args,
data=post_args,
proxies=self.proxies,
files=files,
)
except requests.HTTPError as e:
response = json.loads(e.read())
raise GraphAPIError(response)
headers = response.headers
if "json" in headers["content-type"]:
result = response.json()
elif "image/" in headers["content-type"]:
mimetype = headers["content-type"]
result = {
"data": response.content,
"mime-type": mimetype,
"url": response.url,
}
elif "access_token" in parse_qs(response.text):
query_str = parse_qs(response.text)
if "access_token" in query_str:
result = {"access_token": query_str["access_token"][0]}
if "expires" in query_str:
result["expires"] = query_str["expires"][0]
else:
raise GraphAPIError(response.json())
else:
raise GraphAPIError("Maintype was not text, image, or querystring")
if result and isinstance(result, dict) and result.get("error"):
raise GraphAPIError(result)
return result
def get_app_access_token(self, app_id, app_secret, offline=False):
"""
Get the application's access token as a string.
If offline=True, use the concatenated app ID and secret
instead of making an API call.
<https://developers.facebook.com/docs/facebook-login/
access-tokens#apptokens>
"""
if offline:
return "{0}|{1}".format(app_id, app_secret)
else:
args = {
"grant_type": "client_credentials",
"client_id": app_id,
"client_secret": app_secret,
}
return self.request(
"{0}/oauth/access_token".format(self.version), args=args
)["access_token"]
def get_access_token_from_code(
self, code, redirect_uri, app_id, app_secret
):
"""Get an access token from the "code" returned from an OAuth dialog.
Returns a dict containing the user-specific access token and its
expiration date (if applicable).
"""
args = {
"code": code,
"redirect_uri": redirect_uri,
"client_id": app_id,
"client_secret": app_secret,
}
return self.request(
"{0}/oauth/access_token".format(self.version), args
)
def extend_access_token(self, app_id, app_secret):
"""
Extends the expiration time of a valid OAuth access token. See
<https://developers.facebook.com/docs/facebook-login/access-tokens/
expiration-and-extension>
"""
args = {
"client_id": app_id,
"client_secret": app_secret,
"grant_type": "fb_exchange_token",
"fb_exchange_token": self.access_token,
}
return self.request(
"{0}/oauth/access_token".format(self.version), args=args
)
def debug_access_token(self, token, app_id, app_secret):
"""
Gets information about a user access token issued by an app. See
<https://developers.facebook.com/docs/facebook-login/
access-tokens/debugging-and-error-handling>
We can generate the app access token by concatenating the app
id and secret: <https://developers.facebook.com/docs/
facebook-login/access-tokens#apptokens>
"""
args = {
"input_token": token,
"access_token": "{0}|{1}".format(app_id, app_secret),
}
return self.request(self.version + "/" + "debug_token", args=args)
def get_auth_url(self, app_id, canvas_url, perms=None, **kwargs):
"""Build a URL to create an OAuth dialog."""
url = "{0}{1}/{2}".format(
FACEBOOK_WWW_URL, self.version, FACEBOOK_OAUTH_DIALOG_PATH
)
args = {"client_id": app_id, "redirect_uri": canvas_url}
if perms:
args["scope"] = ",".join(perms)
args.update(kwargs)
return url + urlencode(args)
class GraphAPIError(Exception):
def __init__(self, result):
self.result = result
self.code = None
try:
self.type = result["error_code"]
except (KeyError, TypeError):
self.type = ""
# OAuth 2.0 Draft 10
try:
self.message = result["error_description"]
except (KeyError, TypeError):
# OAuth 2.0 Draft 00
try:
self.message = result["error"]["message"]
self.code = result["error"].get("code")
if not self.type:
self.type = result["error"].get("type", "")
except (KeyError, TypeError):
# REST server style
try:
self.message = result["error_msg"]
except (KeyError, TypeError):
self.message = result
Exception.__init__(self, self.message)
def get_user_from_cookie(cookies, app_id, app_secret):
"""Parses the cookie set by the official Facebook JavaScript SDK.
cookies should be a dictionary-like object mapping cookie names to
cookie values.
If the user is logged in via Facebook, we return a dictionary with
the keys "uid" and "access_token". The former is the user's
Facebook ID, and the latter can be used to make authenticated
requests to the Graph API. If the user is not logged in, we
return None.
Read more about Facebook authentication at
https://developers.facebook.com/docs/facebook-login.
"""
cookie = cookies.get("fbsr_" + app_id, "")
if not cookie:
return None
parsed_request = parse_signed_request(cookie, app_secret)
if not parsed_request:
return None
try:
result = GraphAPI().get_access_token_from_code(
parsed_request["code"], "", app_id, app_secret
)
except GraphAPIError:
return None
result["uid"] = parsed_request["user_id"]
return result
|
mobolic/facebook-sdk | facebook/__init__.py | GraphAPI.get_permissions | python | def get_permissions(self, user_id):
response = self.request(
"{0}/{1}/permissions".format(self.version, user_id), {}
)["data"]
return {x["permission"] for x in response if x["status"] == "granted"} | Fetches the permissions object from the graph. | train | https://github.com/mobolic/facebook-sdk/blob/65ff582e77f7ed68b6e9643a7490e5dee2a1031b/facebook/__init__.py#L126-L131 | [
"def request(\n self, path, args=None, post_args=None, files=None, method=None\n):\n \"\"\"Fetches the given path in the Graph API.\n\n We translate args to a valid query string. If post_args is\n given, we send a POST request to the given path with the given\n arguments.\n\n \"\"\"\n if args is None:\n args = dict()\n if post_args is not None:\n method = \"POST\"\n\n # Add `access_token` to post_args or args if it has not already been\n # included.\n if self.access_token:\n # If post_args exists, we assume that args either does not exists\n # or it does not need `access_token`.\n if post_args and \"access_token\" not in post_args:\n post_args[\"access_token\"] = self.access_token\n elif \"access_token\" not in args:\n args[\"access_token\"] = self.access_token\n\n try:\n response = self.session.request(\n method or \"GET\",\n FACEBOOK_GRAPH_URL + path,\n timeout=self.timeout,\n params=args,\n data=post_args,\n proxies=self.proxies,\n files=files,\n )\n except requests.HTTPError as e:\n response = json.loads(e.read())\n raise GraphAPIError(response)\n\n headers = response.headers\n if \"json\" in headers[\"content-type\"]:\n result = response.json()\n elif \"image/\" in headers[\"content-type\"]:\n mimetype = headers[\"content-type\"]\n result = {\n \"data\": response.content,\n \"mime-type\": mimetype,\n \"url\": response.url,\n }\n elif \"access_token\" in parse_qs(response.text):\n query_str = parse_qs(response.text)\n if \"access_token\" in query_str:\n result = {\"access_token\": query_str[\"access_token\"][0]}\n if \"expires\" in query_str:\n result[\"expires\"] = query_str[\"expires\"][0]\n else:\n raise GraphAPIError(response.json())\n else:\n raise GraphAPIError(\"Maintype was not text, image, or querystring\")\n\n if result and isinstance(result, dict) and result.get(\"error\"):\n raise GraphAPIError(result)\n return result\n"
] | class GraphAPI(object):
"""A client for the Facebook Graph API.
https://developers.facebook.com/docs/graph-api
The Graph API is made up of the objects in Facebook (e.g., people,
pages, events, photos) and the connections between them (e.g.,
friends, photo tags, and event RSVPs). This client provides access
to those primitive types in a generic way. For example, given an
OAuth access token, this will fetch the profile of the active user
and the list of the user's friends:
graph = facebook.GraphAPI(access_token)
user = graph.get_object("me")
friends = graph.get_connections(user["id"], "friends")
You can see a list of all of the objects and connections supported
by the API at https://developers.facebook.com/docs/graph-api/reference/.
You can obtain an access token via OAuth or by using the Facebook
JavaScript SDK. See
https://developers.facebook.com/docs/facebook-login for details.
If you are using the JavaScript SDK, you can use the
get_user_from_cookie() method below to get the OAuth access token
for the active user from the cookie saved by the SDK.
"""
def __init__(
self,
access_token=None,
timeout=None,
version=None,
proxies=None,
session=None,
):
# The default version is only used if the version kwarg does not exist.
default_version = VALID_API_VERSIONS[0]
self.access_token = access_token
self.timeout = timeout
self.proxies = proxies
self.session = session or requests.Session()
if version:
version_regex = re.compile("^\d\.\d{1,2}$")
match = version_regex.search(str(version))
if match is not None:
if str(version) not in VALID_API_VERSIONS:
raise GraphAPIError(
"Valid API versions are "
+ str(VALID_API_VERSIONS).strip("[]")
)
else:
self.version = "v" + str(version)
else:
raise GraphAPIError(
"Version number should be in the"
" following format: #.# (e.g. 2.0)."
)
else:
self.version = "v" + default_version
def get_object(self, id, **args):
"""Fetches the given object from the graph."""
return self.request("{0}/{1}".format(self.version, id), args)
def get_objects(self, ids, **args):
"""Fetches all of the given object from the graph.
We return a map from ID to object. If any of the IDs are
invalid, we raise an exception.
"""
args["ids"] = ",".join(ids)
return self.request(self.version + "/", args)
def search(self, type, **args):
"""https://developers.facebook.com/docs/places/search"""
if type not in VALID_SEARCH_TYPES:
raise GraphAPIError(
"Valid types are: %s" % ", ".join(VALID_SEARCH_TYPES)
)
args["type"] = type
return self.request(self.version + "/search/", args)
def get_connections(self, id, connection_name, **args):
"""Fetches the connections for given object."""
return self.request(
"{0}/{1}/{2}".format(self.version, id, connection_name), args
)
def get_all_connections(self, id, connection_name, **args):
"""Get all pages from a get_connections call
This will iterate over all pages returned by a get_connections call
and yield the individual items.
"""
while True:
page = self.get_connections(id, connection_name, **args)
for post in page["data"]:
yield post
next = page.get("paging", {}).get("next")
if not next:
return
args = parse_qs(urlparse(next).query)
del args["access_token"]
def put_object(self, parent_object, connection_name, **data):
"""Writes the given object to the graph, connected to the given parent.
For example,
graph.put_object("me", "feed", message="Hello, world")
writes "Hello, world" to the active user's wall. Likewise, this
will comment on the first post of the active user's feed:
feed = graph.get_connections("me", "feed")
post = feed["data"][0]
graph.put_object(post["id"], "comments", message="First!")
Certain operations require extended permissions. See
https://developers.facebook.com/docs/facebook-login/permissions
for details about permissions.
"""
assert self.access_token, "Write operations require an access token"
return self.request(
"{0}/{1}/{2}".format(self.version, parent_object, connection_name),
post_args=data,
method="POST",
)
def put_comment(self, object_id, message):
"""Writes the given comment on the given post."""
return self.put_object(object_id, "comments", message=message)
def put_like(self, object_id):
"""Likes the given post."""
return self.put_object(object_id, "likes")
def delete_object(self, id):
"""Deletes the object with the given ID from the graph."""
return self.request(
"{0}/{1}".format(self.version, id), method="DELETE"
)
def delete_request(self, user_id, request_id):
"""Deletes the Request with the given ID for the given user."""
return self.request(
"{0}_{1}".format(request_id, user_id), method="DELETE"
)
def put_photo(self, image, album_path="me/photos", **kwargs):
"""
Upload an image using multipart/form-data.
image - A file object representing the image to be uploaded.
album_path - A path representing where the image should be uploaded.
"""
return self.request(
"{0}/{1}".format(self.version, album_path),
post_args=kwargs,
files={"source": image},
method="POST",
)
def get_version(self):
"""Fetches the current version number of the Graph API being used."""
args = {"access_token": self.access_token}
try:
response = self.session.request(
"GET",
FACEBOOK_GRAPH_URL + self.version + "/me",
params=args,
timeout=self.timeout,
proxies=self.proxies,
)
except requests.HTTPError as e:
response = json.loads(e.read())
raise GraphAPIError(response)
try:
headers = response.headers
version = headers["facebook-api-version"].replace("v", "")
return str(version)
except Exception:
raise GraphAPIError("API version number not available")
def request(
self, path, args=None, post_args=None, files=None, method=None
):
"""Fetches the given path in the Graph API.
We translate args to a valid query string. If post_args is
given, we send a POST request to the given path with the given
arguments.
"""
if args is None:
args = dict()
if post_args is not None:
method = "POST"
# Add `access_token` to post_args or args if it has not already been
# included.
if self.access_token:
# If post_args exists, we assume that args either does not exists
# or it does not need `access_token`.
if post_args and "access_token" not in post_args:
post_args["access_token"] = self.access_token
elif "access_token" not in args:
args["access_token"] = self.access_token
try:
response = self.session.request(
method or "GET",
FACEBOOK_GRAPH_URL + path,
timeout=self.timeout,
params=args,
data=post_args,
proxies=self.proxies,
files=files,
)
except requests.HTTPError as e:
response = json.loads(e.read())
raise GraphAPIError(response)
headers = response.headers
if "json" in headers["content-type"]:
result = response.json()
elif "image/" in headers["content-type"]:
mimetype = headers["content-type"]
result = {
"data": response.content,
"mime-type": mimetype,
"url": response.url,
}
elif "access_token" in parse_qs(response.text):
query_str = parse_qs(response.text)
if "access_token" in query_str:
result = {"access_token": query_str["access_token"][0]}
if "expires" in query_str:
result["expires"] = query_str["expires"][0]
else:
raise GraphAPIError(response.json())
else:
raise GraphAPIError("Maintype was not text, image, or querystring")
if result and isinstance(result, dict) and result.get("error"):
raise GraphAPIError(result)
return result
def get_app_access_token(self, app_id, app_secret, offline=False):
"""
Get the application's access token as a string.
If offline=True, use the concatenated app ID and secret
instead of making an API call.
<https://developers.facebook.com/docs/facebook-login/
access-tokens#apptokens>
"""
if offline:
return "{0}|{1}".format(app_id, app_secret)
else:
args = {
"grant_type": "client_credentials",
"client_id": app_id,
"client_secret": app_secret,
}
return self.request(
"{0}/oauth/access_token".format(self.version), args=args
)["access_token"]
def get_access_token_from_code(
self, code, redirect_uri, app_id, app_secret
):
"""Get an access token from the "code" returned from an OAuth dialog.
Returns a dict containing the user-specific access token and its
expiration date (if applicable).
"""
args = {
"code": code,
"redirect_uri": redirect_uri,
"client_id": app_id,
"client_secret": app_secret,
}
return self.request(
"{0}/oauth/access_token".format(self.version), args
)
def extend_access_token(self, app_id, app_secret):
"""
Extends the expiration time of a valid OAuth access token. See
<https://developers.facebook.com/docs/facebook-login/access-tokens/
expiration-and-extension>
"""
args = {
"client_id": app_id,
"client_secret": app_secret,
"grant_type": "fb_exchange_token",
"fb_exchange_token": self.access_token,
}
return self.request(
"{0}/oauth/access_token".format(self.version), args=args
)
def debug_access_token(self, token, app_id, app_secret):
"""
Gets information about a user access token issued by an app. See
<https://developers.facebook.com/docs/facebook-login/
access-tokens/debugging-and-error-handling>
We can generate the app access token by concatenating the app
id and secret: <https://developers.facebook.com/docs/
facebook-login/access-tokens#apptokens>
"""
args = {
"input_token": token,
"access_token": "{0}|{1}".format(app_id, app_secret),
}
return self.request(self.version + "/" + "debug_token", args=args)
def get_auth_url(self, app_id, canvas_url, perms=None, **kwargs):
"""Build a URL to create an OAuth dialog."""
url = "{0}{1}/{2}".format(
FACEBOOK_WWW_URL, self.version, FACEBOOK_OAUTH_DIALOG_PATH
)
args = {"client_id": app_id, "redirect_uri": canvas_url}
if perms:
args["scope"] = ",".join(perms)
args.update(kwargs)
return url + urlencode(args)
|
mobolic/facebook-sdk | facebook/__init__.py | GraphAPI.get_object | python | def get_object(self, id, **args):
return self.request("{0}/{1}".format(self.version, id), args) | Fetches the given object from the graph. | train | https://github.com/mobolic/facebook-sdk/blob/65ff582e77f7ed68b6e9643a7490e5dee2a1031b/facebook/__init__.py#L133-L135 | [
"def request(\n self, path, args=None, post_args=None, files=None, method=None\n):\n \"\"\"Fetches the given path in the Graph API.\n\n We translate args to a valid query string. If post_args is\n given, we send a POST request to the given path with the given\n arguments.\n\n \"\"\"\n if args is None:\n args = dict()\n if post_args is not None:\n method = \"POST\"\n\n # Add `access_token` to post_args or args if it has not already been\n # included.\n if self.access_token:\n # If post_args exists, we assume that args either does not exists\n # or it does not need `access_token`.\n if post_args and \"access_token\" not in post_args:\n post_args[\"access_token\"] = self.access_token\n elif \"access_token\" not in args:\n args[\"access_token\"] = self.access_token\n\n try:\n response = self.session.request(\n method or \"GET\",\n FACEBOOK_GRAPH_URL + path,\n timeout=self.timeout,\n params=args,\n data=post_args,\n proxies=self.proxies,\n files=files,\n )\n except requests.HTTPError as e:\n response = json.loads(e.read())\n raise GraphAPIError(response)\n\n headers = response.headers\n if \"json\" in headers[\"content-type\"]:\n result = response.json()\n elif \"image/\" in headers[\"content-type\"]:\n mimetype = headers[\"content-type\"]\n result = {\n \"data\": response.content,\n \"mime-type\": mimetype,\n \"url\": response.url,\n }\n elif \"access_token\" in parse_qs(response.text):\n query_str = parse_qs(response.text)\n if \"access_token\" in query_str:\n result = {\"access_token\": query_str[\"access_token\"][0]}\n if \"expires\" in query_str:\n result[\"expires\"] = query_str[\"expires\"][0]\n else:\n raise GraphAPIError(response.json())\n else:\n raise GraphAPIError(\"Maintype was not text, image, or querystring\")\n\n if result and isinstance(result, dict) and result.get(\"error\"):\n raise GraphAPIError(result)\n return result\n"
] | class GraphAPI(object):
"""A client for the Facebook Graph API.
https://developers.facebook.com/docs/graph-api
The Graph API is made up of the objects in Facebook (e.g., people,
pages, events, photos) and the connections between them (e.g.,
friends, photo tags, and event RSVPs). This client provides access
to those primitive types in a generic way. For example, given an
OAuth access token, this will fetch the profile of the active user
and the list of the user's friends:
graph = facebook.GraphAPI(access_token)
user = graph.get_object("me")
friends = graph.get_connections(user["id"], "friends")
You can see a list of all of the objects and connections supported
by the API at https://developers.facebook.com/docs/graph-api/reference/.
You can obtain an access token via OAuth or by using the Facebook
JavaScript SDK. See
https://developers.facebook.com/docs/facebook-login for details.
If you are using the JavaScript SDK, you can use the
get_user_from_cookie() method below to get the OAuth access token
for the active user from the cookie saved by the SDK.
"""
def __init__(
self,
access_token=None,
timeout=None,
version=None,
proxies=None,
session=None,
):
# The default version is only used if the version kwarg does not exist.
default_version = VALID_API_VERSIONS[0]
self.access_token = access_token
self.timeout = timeout
self.proxies = proxies
self.session = session or requests.Session()
if version:
version_regex = re.compile("^\d\.\d{1,2}$")
match = version_regex.search(str(version))
if match is not None:
if str(version) not in VALID_API_VERSIONS:
raise GraphAPIError(
"Valid API versions are "
+ str(VALID_API_VERSIONS).strip("[]")
)
else:
self.version = "v" + str(version)
else:
raise GraphAPIError(
"Version number should be in the"
" following format: #.# (e.g. 2.0)."
)
else:
self.version = "v" + default_version
def get_permissions(self, user_id):
"""Fetches the permissions object from the graph."""
response = self.request(
"{0}/{1}/permissions".format(self.version, user_id), {}
)["data"]
return {x["permission"] for x in response if x["status"] == "granted"}
def get_objects(self, ids, **args):
"""Fetches all of the given object from the graph.
We return a map from ID to object. If any of the IDs are
invalid, we raise an exception.
"""
args["ids"] = ",".join(ids)
return self.request(self.version + "/", args)
def search(self, type, **args):
"""https://developers.facebook.com/docs/places/search"""
if type not in VALID_SEARCH_TYPES:
raise GraphAPIError(
"Valid types are: %s" % ", ".join(VALID_SEARCH_TYPES)
)
args["type"] = type
return self.request(self.version + "/search/", args)
def get_connections(self, id, connection_name, **args):
"""Fetches the connections for given object."""
return self.request(
"{0}/{1}/{2}".format(self.version, id, connection_name), args
)
def get_all_connections(self, id, connection_name, **args):
"""Get all pages from a get_connections call
This will iterate over all pages returned by a get_connections call
and yield the individual items.
"""
while True:
page = self.get_connections(id, connection_name, **args)
for post in page["data"]:
yield post
next = page.get("paging", {}).get("next")
if not next:
return
args = parse_qs(urlparse(next).query)
del args["access_token"]
def put_object(self, parent_object, connection_name, **data):
"""Writes the given object to the graph, connected to the given parent.
For example,
graph.put_object("me", "feed", message="Hello, world")
writes "Hello, world" to the active user's wall. Likewise, this
will comment on the first post of the active user's feed:
feed = graph.get_connections("me", "feed")
post = feed["data"][0]
graph.put_object(post["id"], "comments", message="First!")
Certain operations require extended permissions. See
https://developers.facebook.com/docs/facebook-login/permissions
for details about permissions.
"""
assert self.access_token, "Write operations require an access token"
return self.request(
"{0}/{1}/{2}".format(self.version, parent_object, connection_name),
post_args=data,
method="POST",
)
def put_comment(self, object_id, message):
"""Writes the given comment on the given post."""
return self.put_object(object_id, "comments", message=message)
def put_like(self, object_id):
"""Likes the given post."""
return self.put_object(object_id, "likes")
def delete_object(self, id):
"""Deletes the object with the given ID from the graph."""
return self.request(
"{0}/{1}".format(self.version, id), method="DELETE"
)
def delete_request(self, user_id, request_id):
"""Deletes the Request with the given ID for the given user."""
return self.request(
"{0}_{1}".format(request_id, user_id), method="DELETE"
)
def put_photo(self, image, album_path="me/photos", **kwargs):
"""
Upload an image using multipart/form-data.
image - A file object representing the image to be uploaded.
album_path - A path representing where the image should be uploaded.
"""
return self.request(
"{0}/{1}".format(self.version, album_path),
post_args=kwargs,
files={"source": image},
method="POST",
)
def get_version(self):
"""Fetches the current version number of the Graph API being used."""
args = {"access_token": self.access_token}
try:
response = self.session.request(
"GET",
FACEBOOK_GRAPH_URL + self.version + "/me",
params=args,
timeout=self.timeout,
proxies=self.proxies,
)
except requests.HTTPError as e:
response = json.loads(e.read())
raise GraphAPIError(response)
try:
headers = response.headers
version = headers["facebook-api-version"].replace("v", "")
return str(version)
except Exception:
raise GraphAPIError("API version number not available")
def request(
self, path, args=None, post_args=None, files=None, method=None
):
"""Fetches the given path in the Graph API.
We translate args to a valid query string. If post_args is
given, we send a POST request to the given path with the given
arguments.
"""
if args is None:
args = dict()
if post_args is not None:
method = "POST"
# Add `access_token` to post_args or args if it has not already been
# included.
if self.access_token:
# If post_args exists, we assume that args either does not exists
# or it does not need `access_token`.
if post_args and "access_token" not in post_args:
post_args["access_token"] = self.access_token
elif "access_token" not in args:
args["access_token"] = self.access_token
try:
response = self.session.request(
method or "GET",
FACEBOOK_GRAPH_URL + path,
timeout=self.timeout,
params=args,
data=post_args,
proxies=self.proxies,
files=files,
)
except requests.HTTPError as e:
response = json.loads(e.read())
raise GraphAPIError(response)
headers = response.headers
if "json" in headers["content-type"]:
result = response.json()
elif "image/" in headers["content-type"]:
mimetype = headers["content-type"]
result = {
"data": response.content,
"mime-type": mimetype,
"url": response.url,
}
elif "access_token" in parse_qs(response.text):
query_str = parse_qs(response.text)
if "access_token" in query_str:
result = {"access_token": query_str["access_token"][0]}
if "expires" in query_str:
result["expires"] = query_str["expires"][0]
else:
raise GraphAPIError(response.json())
else:
raise GraphAPIError("Maintype was not text, image, or querystring")
if result and isinstance(result, dict) and result.get("error"):
raise GraphAPIError(result)
return result
def get_app_access_token(self, app_id, app_secret, offline=False):
"""
Get the application's access token as a string.
If offline=True, use the concatenated app ID and secret
instead of making an API call.
<https://developers.facebook.com/docs/facebook-login/
access-tokens#apptokens>
"""
if offline:
return "{0}|{1}".format(app_id, app_secret)
else:
args = {
"grant_type": "client_credentials",
"client_id": app_id,
"client_secret": app_secret,
}
return self.request(
"{0}/oauth/access_token".format(self.version), args=args
)["access_token"]
def get_access_token_from_code(
self, code, redirect_uri, app_id, app_secret
):
"""Get an access token from the "code" returned from an OAuth dialog.
Returns a dict containing the user-specific access token and its
expiration date (if applicable).
"""
args = {
"code": code,
"redirect_uri": redirect_uri,
"client_id": app_id,
"client_secret": app_secret,
}
return self.request(
"{0}/oauth/access_token".format(self.version), args
)
def extend_access_token(self, app_id, app_secret):
"""
Extends the expiration time of a valid OAuth access token. See
<https://developers.facebook.com/docs/facebook-login/access-tokens/
expiration-and-extension>
"""
args = {
"client_id": app_id,
"client_secret": app_secret,
"grant_type": "fb_exchange_token",
"fb_exchange_token": self.access_token,
}
return self.request(
"{0}/oauth/access_token".format(self.version), args=args
)
def debug_access_token(self, token, app_id, app_secret):
"""
Gets information about a user access token issued by an app. See
<https://developers.facebook.com/docs/facebook-login/
access-tokens/debugging-and-error-handling>
We can generate the app access token by concatenating the app
id and secret: <https://developers.facebook.com/docs/
facebook-login/access-tokens#apptokens>
"""
args = {
"input_token": token,
"access_token": "{0}|{1}".format(app_id, app_secret),
}
return self.request(self.version + "/" + "debug_token", args=args)
def get_auth_url(self, app_id, canvas_url, perms=None, **kwargs):
"""Build a URL to create an OAuth dialog."""
url = "{0}{1}/{2}".format(
FACEBOOK_WWW_URL, self.version, FACEBOOK_OAUTH_DIALOG_PATH
)
args = {"client_id": app_id, "redirect_uri": canvas_url}
if perms:
args["scope"] = ",".join(perms)
args.update(kwargs)
return url + urlencode(args)
|
mobolic/facebook-sdk | facebook/__init__.py | GraphAPI.get_objects | python | def get_objects(self, ids, **args):
args["ids"] = ",".join(ids)
return self.request(self.version + "/", args) | Fetches all of the given object from the graph.
We return a map from ID to object. If any of the IDs are
invalid, we raise an exception. | train | https://github.com/mobolic/facebook-sdk/blob/65ff582e77f7ed68b6e9643a7490e5dee2a1031b/facebook/__init__.py#L137-L144 | [
"def request(\n self, path, args=None, post_args=None, files=None, method=None\n):\n \"\"\"Fetches the given path in the Graph API.\n\n We translate args to a valid query string. If post_args is\n given, we send a POST request to the given path with the given\n arguments.\n\n \"\"\"\n if args is None:\n args = dict()\n if post_args is not None:\n method = \"POST\"\n\n # Add `access_token` to post_args or args if it has not already been\n # included.\n if self.access_token:\n # If post_args exists, we assume that args either does not exists\n # or it does not need `access_token`.\n if post_args and \"access_token\" not in post_args:\n post_args[\"access_token\"] = self.access_token\n elif \"access_token\" not in args:\n args[\"access_token\"] = self.access_token\n\n try:\n response = self.session.request(\n method or \"GET\",\n FACEBOOK_GRAPH_URL + path,\n timeout=self.timeout,\n params=args,\n data=post_args,\n proxies=self.proxies,\n files=files,\n )\n except requests.HTTPError as e:\n response = json.loads(e.read())\n raise GraphAPIError(response)\n\n headers = response.headers\n if \"json\" in headers[\"content-type\"]:\n result = response.json()\n elif \"image/\" in headers[\"content-type\"]:\n mimetype = headers[\"content-type\"]\n result = {\n \"data\": response.content,\n \"mime-type\": mimetype,\n \"url\": response.url,\n }\n elif \"access_token\" in parse_qs(response.text):\n query_str = parse_qs(response.text)\n if \"access_token\" in query_str:\n result = {\"access_token\": query_str[\"access_token\"][0]}\n if \"expires\" in query_str:\n result[\"expires\"] = query_str[\"expires\"][0]\n else:\n raise GraphAPIError(response.json())\n else:\n raise GraphAPIError(\"Maintype was not text, image, or querystring\")\n\n if result and isinstance(result, dict) and result.get(\"error\"):\n raise GraphAPIError(result)\n return result\n"
] | class GraphAPI(object):
"""A client for the Facebook Graph API.
https://developers.facebook.com/docs/graph-api
The Graph API is made up of the objects in Facebook (e.g., people,
pages, events, photos) and the connections between them (e.g.,
friends, photo tags, and event RSVPs). This client provides access
to those primitive types in a generic way. For example, given an
OAuth access token, this will fetch the profile of the active user
and the list of the user's friends:
graph = facebook.GraphAPI(access_token)
user = graph.get_object("me")
friends = graph.get_connections(user["id"], "friends")
You can see a list of all of the objects and connections supported
by the API at https://developers.facebook.com/docs/graph-api/reference/.
You can obtain an access token via OAuth or by using the Facebook
JavaScript SDK. See
https://developers.facebook.com/docs/facebook-login for details.
If you are using the JavaScript SDK, you can use the
get_user_from_cookie() method below to get the OAuth access token
for the active user from the cookie saved by the SDK.
"""
def __init__(
self,
access_token=None,
timeout=None,
version=None,
proxies=None,
session=None,
):
# The default version is only used if the version kwarg does not exist.
default_version = VALID_API_VERSIONS[0]
self.access_token = access_token
self.timeout = timeout
self.proxies = proxies
self.session = session or requests.Session()
if version:
version_regex = re.compile("^\d\.\d{1,2}$")
match = version_regex.search(str(version))
if match is not None:
if str(version) not in VALID_API_VERSIONS:
raise GraphAPIError(
"Valid API versions are "
+ str(VALID_API_VERSIONS).strip("[]")
)
else:
self.version = "v" + str(version)
else:
raise GraphAPIError(
"Version number should be in the"
" following format: #.# (e.g. 2.0)."
)
else:
self.version = "v" + default_version
def get_permissions(self, user_id):
"""Fetches the permissions object from the graph."""
response = self.request(
"{0}/{1}/permissions".format(self.version, user_id), {}
)["data"]
return {x["permission"] for x in response if x["status"] == "granted"}
def get_object(self, id, **args):
"""Fetches the given object from the graph."""
return self.request("{0}/{1}".format(self.version, id), args)
def search(self, type, **args):
"""https://developers.facebook.com/docs/places/search"""
if type not in VALID_SEARCH_TYPES:
raise GraphAPIError(
"Valid types are: %s" % ", ".join(VALID_SEARCH_TYPES)
)
args["type"] = type
return self.request(self.version + "/search/", args)
def get_connections(self, id, connection_name, **args):
"""Fetches the connections for given object."""
return self.request(
"{0}/{1}/{2}".format(self.version, id, connection_name), args
)
def get_all_connections(self, id, connection_name, **args):
"""Get all pages from a get_connections call
This will iterate over all pages returned by a get_connections call
and yield the individual items.
"""
while True:
page = self.get_connections(id, connection_name, **args)
for post in page["data"]:
yield post
next = page.get("paging", {}).get("next")
if not next:
return
args = parse_qs(urlparse(next).query)
del args["access_token"]
def put_object(self, parent_object, connection_name, **data):
"""Writes the given object to the graph, connected to the given parent.
For example,
graph.put_object("me", "feed", message="Hello, world")
writes "Hello, world" to the active user's wall. Likewise, this
will comment on the first post of the active user's feed:
feed = graph.get_connections("me", "feed")
post = feed["data"][0]
graph.put_object(post["id"], "comments", message="First!")
Certain operations require extended permissions. See
https://developers.facebook.com/docs/facebook-login/permissions
for details about permissions.
"""
assert self.access_token, "Write operations require an access token"
return self.request(
"{0}/{1}/{2}".format(self.version, parent_object, connection_name),
post_args=data,
method="POST",
)
def put_comment(self, object_id, message):
"""Writes the given comment on the given post."""
return self.put_object(object_id, "comments", message=message)
def put_like(self, object_id):
"""Likes the given post."""
return self.put_object(object_id, "likes")
def delete_object(self, id):
"""Deletes the object with the given ID from the graph."""
return self.request(
"{0}/{1}".format(self.version, id), method="DELETE"
)
def delete_request(self, user_id, request_id):
"""Deletes the Request with the given ID for the given user."""
return self.request(
"{0}_{1}".format(request_id, user_id), method="DELETE"
)
def put_photo(self, image, album_path="me/photos", **kwargs):
"""
Upload an image using multipart/form-data.
image - A file object representing the image to be uploaded.
album_path - A path representing where the image should be uploaded.
"""
return self.request(
"{0}/{1}".format(self.version, album_path),
post_args=kwargs,
files={"source": image},
method="POST",
)
def get_version(self):
"""Fetches the current version number of the Graph API being used."""
args = {"access_token": self.access_token}
try:
response = self.session.request(
"GET",
FACEBOOK_GRAPH_URL + self.version + "/me",
params=args,
timeout=self.timeout,
proxies=self.proxies,
)
except requests.HTTPError as e:
response = json.loads(e.read())
raise GraphAPIError(response)
try:
headers = response.headers
version = headers["facebook-api-version"].replace("v", "")
return str(version)
except Exception:
raise GraphAPIError("API version number not available")
def request(
self, path, args=None, post_args=None, files=None, method=None
):
"""Fetches the given path in the Graph API.
We translate args to a valid query string. If post_args is
given, we send a POST request to the given path with the given
arguments.
"""
if args is None:
args = dict()
if post_args is not None:
method = "POST"
# Add `access_token` to post_args or args if it has not already been
# included.
if self.access_token:
# If post_args exists, we assume that args either does not exists
# or it does not need `access_token`.
if post_args and "access_token" not in post_args:
post_args["access_token"] = self.access_token
elif "access_token" not in args:
args["access_token"] = self.access_token
try:
response = self.session.request(
method or "GET",
FACEBOOK_GRAPH_URL + path,
timeout=self.timeout,
params=args,
data=post_args,
proxies=self.proxies,
files=files,
)
except requests.HTTPError as e:
response = json.loads(e.read())
raise GraphAPIError(response)
headers = response.headers
if "json" in headers["content-type"]:
result = response.json()
elif "image/" in headers["content-type"]:
mimetype = headers["content-type"]
result = {
"data": response.content,
"mime-type": mimetype,
"url": response.url,
}
elif "access_token" in parse_qs(response.text):
query_str = parse_qs(response.text)
if "access_token" in query_str:
result = {"access_token": query_str["access_token"][0]}
if "expires" in query_str:
result["expires"] = query_str["expires"][0]
else:
raise GraphAPIError(response.json())
else:
raise GraphAPIError("Maintype was not text, image, or querystring")
if result and isinstance(result, dict) and result.get("error"):
raise GraphAPIError(result)
return result
def get_app_access_token(self, app_id, app_secret, offline=False):
"""
Get the application's access token as a string.
If offline=True, use the concatenated app ID and secret
instead of making an API call.
<https://developers.facebook.com/docs/facebook-login/
access-tokens#apptokens>
"""
if offline:
return "{0}|{1}".format(app_id, app_secret)
else:
args = {
"grant_type": "client_credentials",
"client_id": app_id,
"client_secret": app_secret,
}
return self.request(
"{0}/oauth/access_token".format(self.version), args=args
)["access_token"]
def get_access_token_from_code(
self, code, redirect_uri, app_id, app_secret
):
"""Get an access token from the "code" returned from an OAuth dialog.
Returns a dict containing the user-specific access token and its
expiration date (if applicable).
"""
args = {
"code": code,
"redirect_uri": redirect_uri,
"client_id": app_id,
"client_secret": app_secret,
}
return self.request(
"{0}/oauth/access_token".format(self.version), args
)
def extend_access_token(self, app_id, app_secret):
"""
Extends the expiration time of a valid OAuth access token. See
<https://developers.facebook.com/docs/facebook-login/access-tokens/
expiration-and-extension>
"""
args = {
"client_id": app_id,
"client_secret": app_secret,
"grant_type": "fb_exchange_token",
"fb_exchange_token": self.access_token,
}
return self.request(
"{0}/oauth/access_token".format(self.version), args=args
)
def debug_access_token(self, token, app_id, app_secret):
"""
Gets information about a user access token issued by an app. See
<https://developers.facebook.com/docs/facebook-login/
access-tokens/debugging-and-error-handling>
We can generate the app access token by concatenating the app
id and secret: <https://developers.facebook.com/docs/
facebook-login/access-tokens#apptokens>
"""
args = {
"input_token": token,
"access_token": "{0}|{1}".format(app_id, app_secret),
}
return self.request(self.version + "/" + "debug_token", args=args)
def get_auth_url(self, app_id, canvas_url, perms=None, **kwargs):
"""Build a URL to create an OAuth dialog."""
url = "{0}{1}/{2}".format(
FACEBOOK_WWW_URL, self.version, FACEBOOK_OAUTH_DIALOG_PATH
)
args = {"client_id": app_id, "redirect_uri": canvas_url}
if perms:
args["scope"] = ",".join(perms)
args.update(kwargs)
return url + urlencode(args)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.