repository_name stringclasses 316 values | func_path_in_repository stringlengths 6 223 | func_name stringlengths 1 134 | language stringclasses 1 value | func_code_string stringlengths 57 65.5k | func_documentation_string stringlengths 1 46.3k | split_name stringclasses 1 value | func_code_url stringlengths 91 315 | called_functions listlengths 1 156 ⌀ | enclosing_scope stringlengths 2 1.48M |
|---|---|---|---|---|---|---|---|---|---|
SpheMakh/Stimela | stimela/singularity.py | pull | python | def pull(image, store_path, docker=True):
if docker:
fp = "docker://{0:s}".format(image)
else:
fp = image
utils.xrun("singularity", ["pull", "--force", "--name", store_path, fp])
return 0 | pull an image | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/singularity.py#L15-L26 | [
"def xrun(command, options, log=None, _log_container_as_started=False, logfile=None, timeout=-1, kill_callback=None):\n \"\"\"\n Run something on command line.\n\n Example: _run(\"ls\", [\"-lrt\", \"../\"])\n \"\"\"\n\n cmd = \" \".join([command] + list(map(str, options)) )\n\n def _print_info(msg):\n if msg is None: return\n if log:\n log.info(msg)\n else:\n print(msg)\n\n def _print_warn(msg):\n if msg is None: return\n if log:\n log.warn(msg)\n else:\n print(msg)\n\n _print_info(u\"Running: {0:s}\".format(cmd))\n\n sys.stdout.flush()\n starttime = time.time()\n process = p = None\n try:\n foutname = os.path.join(\"/tmp\", \"stimela_output_{0:s}_{1:f}\".format(hashlib.md5(cmd.encode('utf-8')).hexdigest(), starttime))\n with open(foutname, \"w+\") as fout:\n p = process = subprocess.Popen(cmd,\n stderr=fout,\n stdout=fout,\n shell=True)\n\n def clock_killer(p):\n while process.poll() is None and (timeout >= 0):\n currenttime = time.time()\n if (currenttime - starttime < timeout):\n DEBUG and _print_warn(u\"Clock Reaper: has been running for {0:f}, must finish in {1:f}\".format(currenttime - starttime, timeout))\n else:\n _print_warn(u\"Clock Reaper: Timeout reached for '{0:s}'... sending the KILL signal\".format(cmd))\n (kill_callback is not None) and kill_callback()\n time.sleep(INTERRUPT_TIME)\n\n Thread(target=clock_killer, args=tuple([p])).start()\n\n while (process.poll() is None):\n currenttime = time.time()\n DEBUG and _print_info(u\"God mode on: has been running for {0:f}\".format(currenttime - starttime))\n time.sleep(INTERRUPT_TIME) # this is probably not ideal as it interrupts the process every few seconds, \n #check whether there is an alternative with a callback\n assert hasattr(process, \"returncode\"), \"No returncode after termination!\"\n with open(foutname, \"r\") as fout:\n _print_info(fout.read())\n finally:\n if (process is not None) and process.returncode:\n raise StimelaCabRuntimeError('%s: returns errr code %d' % (command, process.returncode))\n"
] | import subprocess
import os
import sys
from stimela import utils
import json
import stimela
import time
import datetime
import tempfile
class SingularityError(Exception):
pass
class Container(object):
def __init__(self, image, name,
volumes=None,
logger=None,
time_out=-1,
runscript=""):
"""
Python wrapper to singularity tools for managing containers.
"""
self.image = image
self.name = name
self.volumes = volumes or []
self.logger = logger
self.status = None
self.WORKDIR = None
self.RUNSCRIPT = runscript
self.PID = os.getpid()
self.uptime = "00:00:00"
self.time_out = time_out
#self.cont_logger = utils.logger.StimelaLogger(log_container or stimela.LOG_FILE)
def add_volume(self, host, container, perm="rw", noverify=False):
if os.path.exists(host) or noverify:
if self.logger:
self.logger.debug("Mounting volume [{0}] in container [{1}] at [{2}]".format(host, self.name, container))
host = os.path.abspath(host)
else:
raise IOError("Path {0} cannot be mounted on container: File doesn't exist".format(host))
self.volumes.append(":".join([host,container,perm]))
return 0
def start(self, *args):
"""
Create a singularity container instance
"""
if self.volumes:
volumes = " --bind " + " --bind ".join(self.volumes)
else:
volumes = ""
self._print("Instantiating container [{0:s}]. Timeout set to {1:d}. The container ID is printed below.".format(self.name, self.time_out))
utils.xrun("singularity instance.start",
list(args) + [volumes,
# "-c",
self.image, self.name])
self.status = "created"
return 0
def run(self, *args):
"""
Run a singularity container instance
"""
if self.volumes:
volumes = " --bind " + " --bind ".join(self.volumes)
else:
volumes = ""
self._print("Starting container [{0:s}]. Timeout set to {1:d}. The container ID is printed below.".format(self.name, self.time_out))
utils.xrun("singularity run", ["instance://{0:s} {1:s}".format(self.name, self.RUNSCRIPT)],
timeout= self.time_out, kill_callback=self.stop)
self.status = "running"
return 0
def stop(self, *args):
"""
Stop a singularity container instance
"""
if self.volumes:
volumes = " --bind " + " --bind ".join(self.volumes)
else:
volumes = ""
self._print("Stopping container [{}]. The container ID is printed below.".format(self.name))
utils.xrun("singularity", ["instance.stop {0:s}".format(self.name)])
self.status = "exited"
return 0
def _print(self, message):
if self.logger:
self.logger.info(message)
else:
print(message)
return 0
|
SpheMakh/Stimela | stimela/singularity.py | Container.start | python | def start(self, *args):
if self.volumes:
volumes = " --bind " + " --bind ".join(self.volumes)
else:
volumes = ""
self._print("Instantiating container [{0:s}]. Timeout set to {1:d}. The container ID is printed below.".format(self.name, self.time_out))
utils.xrun("singularity instance.start",
list(args) + [volumes,
# "-c",
self.image, self.name])
self.status = "created"
return 0 | Create a singularity container instance | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/singularity.py#L66-L84 | [
"def xrun(command, options, log=None, _log_container_as_started=False, logfile=None, timeout=-1, kill_callback=None):\n \"\"\"\n Run something on command line.\n\n Example: _run(\"ls\", [\"-lrt\", \"../\"])\n \"\"\"\n\n cmd = \" \".join([command] + list(map(str, options)) )\n\n def _print_info(msg):\n if msg is None: return\n if log:\n log.info(msg)\n else:\n print(msg)\n\n def _print_warn(msg):\n if msg is None: return\n if log:\n log.warn(msg)\n else:\n print(msg)\n\n _print_info(u\"Running: {0:s}\".format(cmd))\n\n sys.stdout.flush()\n starttime = time.time()\n process = p = None\n try:\n foutname = os.path.join(\"/tmp\", \"stimela_output_{0:s}_{1:f}\".format(hashlib.md5(cmd.encode('utf-8')).hexdigest(), starttime))\n with open(foutname, \"w+\") as fout:\n p = process = subprocess.Popen(cmd,\n stderr=fout,\n stdout=fout,\n shell=True)\n\n def clock_killer(p):\n while process.poll() is None and (timeout >= 0):\n currenttime = time.time()\n if (currenttime - starttime < timeout):\n DEBUG and _print_warn(u\"Clock Reaper: has been running for {0:f}, must finish in {1:f}\".format(currenttime - starttime, timeout))\n else:\n _print_warn(u\"Clock Reaper: Timeout reached for '{0:s}'... sending the KILL signal\".format(cmd))\n (kill_callback is not None) and kill_callback()\n time.sleep(INTERRUPT_TIME)\n\n Thread(target=clock_killer, args=tuple([p])).start()\n\n while (process.poll() is None):\n currenttime = time.time()\n DEBUG and _print_info(u\"God mode on: has been running for {0:f}\".format(currenttime - starttime))\n time.sleep(INTERRUPT_TIME) # this is probably not ideal as it interrupts the process every few seconds, \n #check whether there is an alternative with a callback\n assert hasattr(process, \"returncode\"), \"No returncode after termination!\"\n with open(foutname, \"r\") as fout:\n _print_info(fout.read())\n finally:\n if (process is not None) and process.returncode:\n raise StimelaCabRuntimeError('%s: returns errr code %d' % (command, process.returncode))\n",
"def _print(self, message):\n if self.logger:\n self.logger.info(message)\n else:\n print(message)\n\n return 0\n"
] | class Container(object):
def __init__(self, image, name,
volumes=None,
logger=None,
time_out=-1,
runscript=""):
"""
Python wrapper to singularity tools for managing containers.
"""
self.image = image
self.name = name
self.volumes = volumes or []
self.logger = logger
self.status = None
self.WORKDIR = None
self.RUNSCRIPT = runscript
self.PID = os.getpid()
self.uptime = "00:00:00"
self.time_out = time_out
#self.cont_logger = utils.logger.StimelaLogger(log_container or stimela.LOG_FILE)
def add_volume(self, host, container, perm="rw", noverify=False):
if os.path.exists(host) or noverify:
if self.logger:
self.logger.debug("Mounting volume [{0}] in container [{1}] at [{2}]".format(host, self.name, container))
host = os.path.abspath(host)
else:
raise IOError("Path {0} cannot be mounted on container: File doesn't exist".format(host))
self.volumes.append(":".join([host,container,perm]))
return 0
def start(self, *args):
"""
Create a singularity container instance
"""
if self.volumes:
volumes = " --bind " + " --bind ".join(self.volumes)
else:
volumes = ""
self._print("Instantiating container [{0:s}]. Timeout set to {1:d}. The container ID is printed below.".format(self.name, self.time_out))
utils.xrun("singularity instance.start",
list(args) + [volumes,
# "-c",
self.image, self.name])
self.status = "created"
return 0
def run(self, *args):
"""
Run a singularity container instance
"""
if self.volumes:
volumes = " --bind " + " --bind ".join(self.volumes)
else:
volumes = ""
self._print("Starting container [{0:s}]. Timeout set to {1:d}. The container ID is printed below.".format(self.name, self.time_out))
utils.xrun("singularity run", ["instance://{0:s} {1:s}".format(self.name, self.RUNSCRIPT)],
timeout= self.time_out, kill_callback=self.stop)
self.status = "running"
return 0
def stop(self, *args):
"""
Stop a singularity container instance
"""
if self.volumes:
volumes = " --bind " + " --bind ".join(self.volumes)
else:
volumes = ""
self._print("Stopping container [{}]. The container ID is printed below.".format(self.name))
utils.xrun("singularity", ["instance.stop {0:s}".format(self.name)])
self.status = "exited"
return 0
def _print(self, message):
if self.logger:
self.logger.info(message)
else:
print(message)
return 0
|
SpheMakh/Stimela | stimela/singularity.py | Container.run | python | def run(self, *args):
if self.volumes:
volumes = " --bind " + " --bind ".join(self.volumes)
else:
volumes = ""
self._print("Starting container [{0:s}]. Timeout set to {1:d}. The container ID is printed below.".format(self.name, self.time_out))
utils.xrun("singularity run", ["instance://{0:s} {1:s}".format(self.name, self.RUNSCRIPT)],
timeout= self.time_out, kill_callback=self.stop)
self.status = "running"
return 0 | Run a singularity container instance | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/singularity.py#L87-L103 | [
"def xrun(command, options, log=None, _log_container_as_started=False, logfile=None, timeout=-1, kill_callback=None):\n \"\"\"\n Run something on command line.\n\n Example: _run(\"ls\", [\"-lrt\", \"../\"])\n \"\"\"\n\n cmd = \" \".join([command] + list(map(str, options)) )\n\n def _print_info(msg):\n if msg is None: return\n if log:\n log.info(msg)\n else:\n print(msg)\n\n def _print_warn(msg):\n if msg is None: return\n if log:\n log.warn(msg)\n else:\n print(msg)\n\n _print_info(u\"Running: {0:s}\".format(cmd))\n\n sys.stdout.flush()\n starttime = time.time()\n process = p = None\n try:\n foutname = os.path.join(\"/tmp\", \"stimela_output_{0:s}_{1:f}\".format(hashlib.md5(cmd.encode('utf-8')).hexdigest(), starttime))\n with open(foutname, \"w+\") as fout:\n p = process = subprocess.Popen(cmd,\n stderr=fout,\n stdout=fout,\n shell=True)\n\n def clock_killer(p):\n while process.poll() is None and (timeout >= 0):\n currenttime = time.time()\n if (currenttime - starttime < timeout):\n DEBUG and _print_warn(u\"Clock Reaper: has been running for {0:f}, must finish in {1:f}\".format(currenttime - starttime, timeout))\n else:\n _print_warn(u\"Clock Reaper: Timeout reached for '{0:s}'... sending the KILL signal\".format(cmd))\n (kill_callback is not None) and kill_callback()\n time.sleep(INTERRUPT_TIME)\n\n Thread(target=clock_killer, args=tuple([p])).start()\n\n while (process.poll() is None):\n currenttime = time.time()\n DEBUG and _print_info(u\"God mode on: has been running for {0:f}\".format(currenttime - starttime))\n time.sleep(INTERRUPT_TIME) # this is probably not ideal as it interrupts the process every few seconds, \n #check whether there is an alternative with a callback\n assert hasattr(process, \"returncode\"), \"No returncode after termination!\"\n with open(foutname, \"r\") as fout:\n _print_info(fout.read())\n finally:\n if (process is not None) and process.returncode:\n raise StimelaCabRuntimeError('%s: returns errr code %d' % (command, process.returncode))\n",
"def _print(self, message):\n if self.logger:\n self.logger.info(message)\n else:\n print(message)\n\n return 0\n"
] | class Container(object):
def __init__(self, image, name,
volumes=None,
logger=None,
time_out=-1,
runscript=""):
"""
Python wrapper to singularity tools for managing containers.
"""
self.image = image
self.name = name
self.volumes = volumes or []
self.logger = logger
self.status = None
self.WORKDIR = None
self.RUNSCRIPT = runscript
self.PID = os.getpid()
self.uptime = "00:00:00"
self.time_out = time_out
#self.cont_logger = utils.logger.StimelaLogger(log_container or stimela.LOG_FILE)
def add_volume(self, host, container, perm="rw", noverify=False):
if os.path.exists(host) or noverify:
if self.logger:
self.logger.debug("Mounting volume [{0}] in container [{1}] at [{2}]".format(host, self.name, container))
host = os.path.abspath(host)
else:
raise IOError("Path {0} cannot be mounted on container: File doesn't exist".format(host))
self.volumes.append(":".join([host,container,perm]))
return 0
def start(self, *args):
"""
Create a singularity container instance
"""
if self.volumes:
volumes = " --bind " + " --bind ".join(self.volumes)
else:
volumes = ""
self._print("Instantiating container [{0:s}]. Timeout set to {1:d}. The container ID is printed below.".format(self.name, self.time_out))
utils.xrun("singularity instance.start",
list(args) + [volumes,
# "-c",
self.image, self.name])
self.status = "created"
return 0
def run(self, *args):
"""
Run a singularity container instance
"""
if self.volumes:
volumes = " --bind " + " --bind ".join(self.volumes)
else:
volumes = ""
self._print("Starting container [{0:s}]. Timeout set to {1:d}. The container ID is printed below.".format(self.name, self.time_out))
utils.xrun("singularity run", ["instance://{0:s} {1:s}".format(self.name, self.RUNSCRIPT)],
timeout= self.time_out, kill_callback=self.stop)
self.status = "running"
return 0
def stop(self, *args):
"""
Stop a singularity container instance
"""
if self.volumes:
volumes = " --bind " + " --bind ".join(self.volumes)
else:
volumes = ""
self._print("Stopping container [{}]. The container ID is printed below.".format(self.name))
utils.xrun("singularity", ["instance.stop {0:s}".format(self.name)])
self.status = "exited"
return 0
def _print(self, message):
if self.logger:
self.logger.info(message)
else:
print(message)
return 0
|
SpheMakh/Stimela | stimela/singularity.py | Container.stop | python | def stop(self, *args):
if self.volumes:
volumes = " --bind " + " --bind ".join(self.volumes)
else:
volumes = ""
self._print("Stopping container [{}]. The container ID is printed below.".format(self.name))
utils.xrun("singularity", ["instance.stop {0:s}".format(self.name)])
self.status = "exited"
return 0 | Stop a singularity container instance | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/singularity.py#L106-L121 | [
"def xrun(command, options, log=None, _log_container_as_started=False, logfile=None, timeout=-1, kill_callback=None):\n \"\"\"\n Run something on command line.\n\n Example: _run(\"ls\", [\"-lrt\", \"../\"])\n \"\"\"\n\n cmd = \" \".join([command] + list(map(str, options)) )\n\n def _print_info(msg):\n if msg is None: return\n if log:\n log.info(msg)\n else:\n print(msg)\n\n def _print_warn(msg):\n if msg is None: return\n if log:\n log.warn(msg)\n else:\n print(msg)\n\n _print_info(u\"Running: {0:s}\".format(cmd))\n\n sys.stdout.flush()\n starttime = time.time()\n process = p = None\n try:\n foutname = os.path.join(\"/tmp\", \"stimela_output_{0:s}_{1:f}\".format(hashlib.md5(cmd.encode('utf-8')).hexdigest(), starttime))\n with open(foutname, \"w+\") as fout:\n p = process = subprocess.Popen(cmd,\n stderr=fout,\n stdout=fout,\n shell=True)\n\n def clock_killer(p):\n while process.poll() is None and (timeout >= 0):\n currenttime = time.time()\n if (currenttime - starttime < timeout):\n DEBUG and _print_warn(u\"Clock Reaper: has been running for {0:f}, must finish in {1:f}\".format(currenttime - starttime, timeout))\n else:\n _print_warn(u\"Clock Reaper: Timeout reached for '{0:s}'... sending the KILL signal\".format(cmd))\n (kill_callback is not None) and kill_callback()\n time.sleep(INTERRUPT_TIME)\n\n Thread(target=clock_killer, args=tuple([p])).start()\n\n while (process.poll() is None):\n currenttime = time.time()\n DEBUG and _print_info(u\"God mode on: has been running for {0:f}\".format(currenttime - starttime))\n time.sleep(INTERRUPT_TIME) # this is probably not ideal as it interrupts the process every few seconds, \n #check whether there is an alternative with a callback\n assert hasattr(process, \"returncode\"), \"No returncode after termination!\"\n with open(foutname, \"r\") as fout:\n _print_info(fout.read())\n finally:\n if (process is not None) and process.returncode:\n raise StimelaCabRuntimeError('%s: returns errr code %d' % (command, process.returncode))\n",
"def _print(self, message):\n if self.logger:\n self.logger.info(message)\n else:\n print(message)\n\n return 0\n"
] | class Container(object):
def __init__(self, image, name,
volumes=None,
logger=None,
time_out=-1,
runscript=""):
"""
Python wrapper to singularity tools for managing containers.
"""
self.image = image
self.name = name
self.volumes = volumes or []
self.logger = logger
self.status = None
self.WORKDIR = None
self.RUNSCRIPT = runscript
self.PID = os.getpid()
self.uptime = "00:00:00"
self.time_out = time_out
#self.cont_logger = utils.logger.StimelaLogger(log_container or stimela.LOG_FILE)
def add_volume(self, host, container, perm="rw", noverify=False):
if os.path.exists(host) or noverify:
if self.logger:
self.logger.debug("Mounting volume [{0}] in container [{1}] at [{2}]".format(host, self.name, container))
host = os.path.abspath(host)
else:
raise IOError("Path {0} cannot be mounted on container: File doesn't exist".format(host))
self.volumes.append(":".join([host,container,perm]))
return 0
def start(self, *args):
"""
Create a singularity container instance
"""
if self.volumes:
volumes = " --bind " + " --bind ".join(self.volumes)
else:
volumes = ""
self._print("Instantiating container [{0:s}]. Timeout set to {1:d}. The container ID is printed below.".format(self.name, self.time_out))
utils.xrun("singularity instance.start",
list(args) + [volumes,
# "-c",
self.image, self.name])
self.status = "created"
return 0
def run(self, *args):
"""
Run a singularity container instance
"""
if self.volumes:
volumes = " --bind " + " --bind ".join(self.volumes)
else:
volumes = ""
self._print("Starting container [{0:s}]. Timeout set to {1:d}. The container ID is printed below.".format(self.name, self.time_out))
utils.xrun("singularity run", ["instance://{0:s} {1:s}".format(self.name, self.RUNSCRIPT)],
timeout= self.time_out, kill_callback=self.stop)
self.status = "running"
return 0
def stop(self, *args):
"""
Stop a singularity container instance
"""
if self.volumes:
volumes = " --bind " + " --bind ".join(self.volumes)
else:
volumes = ""
self._print("Stopping container [{}]. The container ID is printed below.".format(self.name))
utils.xrun("singularity", ["instance.stop {0:s}".format(self.name)])
self.status = "exited"
return 0
def _print(self, message):
if self.logger:
self.logger.info(message)
else:
print(message)
return 0
|
SpheMakh/Stimela | stimela/cargo/cab/lwimager/src/run.py | predict_vis | python | def predict_vis (msname, image, column="MODEL_DATA",
chanchunk=None, chanstart=0, chanstep=1):
# CASA to convert them
casaimage = '{0}/{1}.img'.format(OUTPUT, os.path.basename(image))
# convert to CASA image
img = pyrap.images.image(image)
img.saveas(casaimage)
imgshp = img.shape()
ftab = table(msname+'/SPECTRAL_WINDOW')
numchans = ftab.getcol('NUM_CHAN')[0]
# default chunk list is entire chanel range. Update this if needed
chunklist = [ (0, numchans, None, None) ]
if len(imgshp) == 4 and imgshp[0] > 1:
nimgchan = imgshp[0]
print("image cube has {0} channels, MS has {1} channels".format(nimgchan, numchans))
imgchansize = imgshp[1]*imgshp[2]*imgshp[3]*4 # size of an image channel in bytes
if chanchunk is None:
mem_bytes = os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') # e.g. 4015976448
chanchunk = max((mem_bytes/20)/imgchansize, 1)
print("based on available memory ({0}), max image chunk is {1} channels".format(mem_bytes, chanchunk))
if chanchunk < nimgchan:
mschanstep = numchans*chanstep/nimgchan
if numchans%nimgchan:
warn("MS channels not evenly divisible into $nimgchan image channels, chunking may be incorrect")
chunklist = []
for chan0 in range(0, nimgchan, chanchunk):
imch0, imch1 = chan0, (min(chan0+chanchunk, nimgchan)-1)
msch0 = chanstart + imch0*mschanstep
msnch = (imch1-imch0+1)*mschanstep/chanstep
# overlap each chunk from 1 onwards by a half-chunk back to take care of extrapolated visibilties
# from previous channel
if imch0:
imch0 -= 1
msch0 -= mschanstep/2
msnch += mschanstep/2
print("image chunk {0}~{1} corresponds to MS chunk {2}~{3}".format(imch0, imch1, msch0, msch0+msnch-1))
chunklist.append((msch0, msnch, imch0, imch1))
# even in fill-model mode where it claims to ignore image parameters, the image channelization
# arguments need to be "just so" as per below, otherwise it gives a GridFT: weights all zero message
kw0 = {}
kw0.update(ms=msname, model=casaimage,
niter=0, fixed=1, mode="channel", operation="csclean",
img_nchan=1, img_chanstart=chanstart, img_chanstep=numchans*chanstep)
kw0['fillmodel'] = 1
blc = [0]*len(imgshp)
trc = [ x-1 for x in imgshp ]
# now loop over image frequency chunks
for ichunk, (mschanstart, msnumchans, imgch0, imgch1) in enumerate(chunklist):
if len(chunklist) > 1:
blc[0], trc[0] = imgch0, imgch1
print("writing CASA image for slice {0} {1}".format(blc, trc))
casaimage1 = II("{0}.{1}.img".format(image, ichunk1))
rm_fr(casaimage1)
print("writing CASA image for slice {0} {1} to {2}".format(blc, trc, casaimage1))
img.subimage(blc, trc, dropdegenerate=False).saveas(casaimage1)
kw0.update(model=casaimage1)
else:
img.unlock()
# setup imager options
kw0.update(chanstart=mschanstart, chanstep=chanstep, nchan=msnumchans)
print("predicting visibilities into MODEL_DATA")
_run(predict=True, **kw0)
if len(chunklist) > 1:
rm_fr(casaimage1)
rm_fr(casaimage)
if column != "MODEL_DATA":
print('Data was predicted to MODEL_DATA column. Will now copy it to the {} column as requested'.format(column))
msutils.copycol(msname=msname, fromcol="MODEL_DATA", tocol=column) | Converts image into predicted visibilities | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/cargo/cab/lwimager/src/run.py#L61-L139 | [
"def _run (prefix=None, predict=False, **kw):\n\n if predict:\n args = [ '{0}={1}'.format(a, b) for a,b in kw.iteritems() ]\n utils.xrun(cab['binary'], args)\n return\n\n if kw.get('niter', 0) >0:\n if kw.get('operation', None) not in ['clark', 'hogbom', 'csclean', 'multiscale', 'entropy']:\n kw['operation'] = 'csclean'\n images = {\n \"restored\" : [ '{0}.restored.{1}'.format(prefix, a) for a in ['fits', 'img']],\n \"model\" : [ '{0}.model.{1}'.format(prefix, a) for a in ['fits', 'img']],\n \"residual\" : [ '{0}.residual.{1}'.format(prefix, a) for a in ['fits', 'img']],\n }\n\n elif kw.get('niter', 0) == 0:\n kw[\"operation\"] = 'image'\n\n images = {\n \"image\" : [ '{0}.dirty.{1}'.format(prefix, a) for a in ['fits', 'img']],\n }\n\n for key,value in images.iteritems():\n kw[key] = value[1]\n\n args = [ '{0}={1}'.format(a, b) for a,b in kw.iteritems() ]\n utils.xrun(cab['binary'], args)\n\n print('Converting CASA iamges to FITS images')\n for fits,img in images.itervalues():\n im = pyrap.images.image(img)\n im.tofits(fits, overwrite=True, velocity=kw.get('prefervelocity', False))\n rm_fr(img)\n",
"def rm_fr(item):\n os.system('rm -fr {}'.format(item))\n"
] | import pyrap.images
import os
import sys
from pyrap.tables import table
from MSUtils import msutils
import tempfile
import pyfits
sys.path.append('/scratch/stimela')
import utils
CONFIG = os.environ['CONFIG']
OUTPUT = os.environ['OUTPUT']
cab = utils.readJson(CONFIG)
params = cab['parameters']
tdir = tempfile.mkdtemp(dir='.')
os.chdir(tdir)
def rm_fr(item):
os.system('rm -fr {}'.format(item))
def _run (prefix=None, predict=False, **kw):
if predict:
args = [ '{0}={1}'.format(a, b) for a,b in kw.iteritems() ]
utils.xrun(cab['binary'], args)
return
if kw.get('niter', 0) >0:
if kw.get('operation', None) not in ['clark', 'hogbom', 'csclean', 'multiscale', 'entropy']:
kw['operation'] = 'csclean'
images = {
"restored" : [ '{0}.restored.{1}'.format(prefix, a) for a in ['fits', 'img']],
"model" : [ '{0}.model.{1}'.format(prefix, a) for a in ['fits', 'img']],
"residual" : [ '{0}.residual.{1}'.format(prefix, a) for a in ['fits', 'img']],
}
elif kw.get('niter', 0) == 0:
kw["operation"] = 'image'
images = {
"image" : [ '{0}.dirty.{1}'.format(prefix, a) for a in ['fits', 'img']],
}
for key,value in images.iteritems():
kw[key] = value[1]
args = [ '{0}={1}'.format(a, b) for a,b in kw.iteritems() ]
utils.xrun(cab['binary'], args)
print('Converting CASA iamges to FITS images')
for fits,img in images.itervalues():
im = pyrap.images.image(img)
im.tofits(fits, overwrite=True, velocity=kw.get('prefervelocity', False))
rm_fr(img)
def predict_vis (msname, image, column="MODEL_DATA",
chanchunk=None, chanstart=0, chanstep=1):
"""Converts image into predicted visibilities"""
# CASA to convert them
casaimage = '{0}/{1}.img'.format(OUTPUT, os.path.basename(image))
# convert to CASA image
img = pyrap.images.image(image)
img.saveas(casaimage)
imgshp = img.shape()
ftab = table(msname+'/SPECTRAL_WINDOW')
numchans = ftab.getcol('NUM_CHAN')[0]
# default chunk list is entire chanel range. Update this if needed
chunklist = [ (0, numchans, None, None) ]
if len(imgshp) == 4 and imgshp[0] > 1:
nimgchan = imgshp[0]
print("image cube has {0} channels, MS has {1} channels".format(nimgchan, numchans))
imgchansize = imgshp[1]*imgshp[2]*imgshp[3]*4 # size of an image channel in bytes
if chanchunk is None:
mem_bytes = os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') # e.g. 4015976448
chanchunk = max((mem_bytes/20)/imgchansize, 1)
print("based on available memory ({0}), max image chunk is {1} channels".format(mem_bytes, chanchunk))
if chanchunk < nimgchan:
mschanstep = numchans*chanstep/nimgchan
if numchans%nimgchan:
warn("MS channels not evenly divisible into $nimgchan image channels, chunking may be incorrect")
chunklist = []
for chan0 in range(0, nimgchan, chanchunk):
imch0, imch1 = chan0, (min(chan0+chanchunk, nimgchan)-1)
msch0 = chanstart + imch0*mschanstep
msnch = (imch1-imch0+1)*mschanstep/chanstep
# overlap each chunk from 1 onwards by a half-chunk back to take care of extrapolated visibilties
# from previous channel
if imch0:
imch0 -= 1
msch0 -= mschanstep/2
msnch += mschanstep/2
print("image chunk {0}~{1} corresponds to MS chunk {2}~{3}".format(imch0, imch1, msch0, msch0+msnch-1))
chunklist.append((msch0, msnch, imch0, imch1))
# even in fill-model mode where it claims to ignore image parameters, the image channelization
# arguments need to be "just so" as per below, otherwise it gives a GridFT: weights all zero message
kw0 = {}
kw0.update(ms=msname, model=casaimage,
niter=0, fixed=1, mode="channel", operation="csclean",
img_nchan=1, img_chanstart=chanstart, img_chanstep=numchans*chanstep)
kw0['fillmodel'] = 1
blc = [0]*len(imgshp)
trc = [ x-1 for x in imgshp ]
# now loop over image frequency chunks
for ichunk, (mschanstart, msnumchans, imgch0, imgch1) in enumerate(chunklist):
if len(chunklist) > 1:
blc[0], trc[0] = imgch0, imgch1
print("writing CASA image for slice {0} {1}".format(blc, trc))
casaimage1 = II("{0}.{1}.img".format(image, ichunk1))
rm_fr(casaimage1)
print("writing CASA image for slice {0} {1} to {2}".format(blc, trc, casaimage1))
img.subimage(blc, trc, dropdegenerate=False).saveas(casaimage1)
kw0.update(model=casaimage1)
else:
img.unlock()
# setup imager options
kw0.update(chanstart=mschanstart, chanstep=chanstep, nchan=msnumchans)
print("predicting visibilities into MODEL_DATA")
_run(predict=True, **kw0)
if len(chunklist) > 1:
rm_fr(casaimage1)
rm_fr(casaimage)
if column != "MODEL_DATA":
print('Data was predicted to MODEL_DATA column. Will now copy it to the {} column as requested'.format(column))
msutils.copycol(msname=msname, fromcol="MODEL_DATA", tocol=column)
options = {}
for param in params:
value = param['value']
name = param['name']
if name == 'prefix':
prefix = value
continue
if value is None:
continue
if name == 'cellsize':
if isinstance(value, (float, int)):
value = '{}arcsec'.format(value)
elif name in ['threshold', 'targetflux']:
if isinstance(value, float):
value = '{}arcsec'.format(value)
options[name] = value
predict = options.pop('simulate_fits', False)
if predict:
tfile = tempfile.NamedTemporaryFile(suffix='.fits')
tfile.flush()
cell = options.get('cellsize', None)
if cell is None:
with pyfits.open(predict) as _hdu:
if hasattr(_hdu, '__iter__'):
hdu = _hdu[0]
else:
hdu = _hdu
cdelt = hdu.header.get('CDELT1', None)
if cdelt:
cell = '{:f}arcsec'.format(abs(cdelt)*3600)
if cell is None:
raise RuntimeError('The size of a pixel in this FITS image was not specified \
in FITS header (CDELT1/2), or as parameter for this module ("cellsize"). Cannot proceed')
utils.xrun('python /scratch/code/predict_from_fits.py', [predict, options['ms'], cell,
tfile.name])
predict_vis(msname=options['ms'], image=tfile.name, column=options.get('data','MODEL_DATA'),
chanchunk=options.get('chanchunk', None), chanstart=options.get('img_chanstart', 0),
chanstep=options.get('img_chanstep', 1))
tfile.close()
else:
_run(prefix, **options)
os.chdir(OUTPUT)
os.system('rm -r {}'.format(tdir))
|
SpheMakh/Stimela | stimela/docker.py | build | python | def build(image, build_path, tag=None, build_args=None, fromline=None, args=[]):
if tag:
image = ":".join([image, tag])
bdir = tempfile.mkdtemp()
os.system('cp -r {0:s}/* {1:s}'.format(build_path, bdir))
if build_args:
stdw = tempfile.NamedTemporaryFile(dir=bdir, mode='w')
with open("{}/Dockerfile".format(bdir)) as std:
dfile = std.readlines()
for line in dfile:
if fromline and line.lower().startswith('from'):
stdw.write('FROM {:s}\n'.format(fromline))
elif line.lower().startswith("cmd"):
for arg in build_args:
stdw.write(arg+"\n")
stdw.write(line)
else:
stdw.write(line)
stdw.flush()
utils.xrun("docker build", args+["--force-rm","-f", stdw.name,
"-t", image,
bdir])
stdw.close()
else:
utils.xrun("docker build", args+["--force-rm", "-t", image,
bdir])
os.system('rm -rf {:s}'.format(bdir)) | build a docker image | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/docker.py#L16-L48 | [
"def xrun(command, options, log=None, _log_container_as_started=False, logfile=None, timeout=-1, kill_callback=None):\n \"\"\"\n Run something on command line.\n\n Example: _run(\"ls\", [\"-lrt\", \"../\"])\n \"\"\"\n\n cmd = \" \".join([command] + list(map(str, options)) )\n\n def _print_info(msg):\n if msg is None: return\n if log:\n log.info(msg)\n else:\n print(msg)\n\n def _print_warn(msg):\n if msg is None: return\n if log:\n log.warn(msg)\n else:\n print(msg)\n\n _print_info(u\"Running: {0:s}\".format(cmd))\n\n sys.stdout.flush()\n starttime = time.time()\n process = p = None\n try:\n foutname = os.path.join(\"/tmp\", \"stimela_output_{0:s}_{1:f}\".format(hashlib.md5(cmd.encode('utf-8')).hexdigest(), starttime))\n with open(foutname, \"w+\") as fout:\n p = process = subprocess.Popen(cmd,\n stderr=fout,\n stdout=fout,\n shell=True)\n\n def clock_killer(p):\n while process.poll() is None and (timeout >= 0):\n currenttime = time.time()\n if (currenttime - starttime < timeout):\n DEBUG and _print_warn(u\"Clock Reaper: has been running for {0:f}, must finish in {1:f}\".format(currenttime - starttime, timeout))\n else:\n _print_warn(u\"Clock Reaper: Timeout reached for '{0:s}'... sending the KILL signal\".format(cmd))\n (kill_callback is not None) and kill_callback()\n time.sleep(INTERRUPT_TIME)\n\n Thread(target=clock_killer, args=tuple([p])).start()\n\n while (process.poll() is None):\n currenttime = time.time()\n DEBUG and _print_info(u\"God mode on: has been running for {0:f}\".format(currenttime - starttime))\n time.sleep(INTERRUPT_TIME) # this is probably not ideal as it interrupts the process every few seconds, \n #check whether there is an alternative with a callback\n assert hasattr(process, \"returncode\"), \"No returncode after termination!\"\n with open(foutname, \"r\") as fout:\n _print_info(fout.read())\n finally:\n if (process is not None) and process.returncode:\n raise StimelaCabRuntimeError('%s: returns errr code %d' % (command, process.returncode))\n"
] | import subprocess
import os
import sys
from io import StringIO
from stimela import utils
import json
import stimela
import time
import datetime
import tempfile
class DockerError(Exception):
pass
def pull(image, tag=None):
""" pull a docker image """
if tag:
image = ":".join([image, tag])
utils.xrun("docker pull", [image])
def seconds_hms(seconds):
return str(datetime.timedelta(seconds=seconds))
class Container(object):
def __init__(self, image, name,
volumes=None, environs=None,
label="", logger=None,
shared_memory="1gb",
time_out=-1,
log_container=None):
"""
Python wrapper to docker engine tools for managing containers.
"""
self.image = image
self.name = name
self.label = label
self.volumes = volumes or []
self.environs = environs or []
self.logger = logger
self.status = None
self.WORKDIR = None
self.COMMAND = None
self.shared_memory = shared_memory
self.PID = os.getpid()
self.uptime = "00:00:00"
self.time_out = time_out
self.cont_logger = utils.logger.StimelaLogger(log_container or stimela.LOG_FILE)
def add_volume(self, host, container, perm="rw"):
if os.path.exists(host):
if self.logger:
self.logger.debug("Mounting volume [{0}] in container [{1}] at [{2}]".format(host, self.name, container))
host = os.path.abspath(host)
else:
raise IOError("Directory {0} cannot be mounted on container: File doesn't exist".format(host))
self.volumes.append(":".join([host,container,perm]))
def add_environ(self, key, value):
if self.logger:
self.logger.debug("Adding environ varaible [{0}={1}] in container {2}".format(key, value, self.name))
self.environs.append("=".join([key, value]))
def create(self, *args):
if self.volumes:
volumes = " -v " + " -v ".join(self.volumes)
else:
volumes = ""
if self.environs:
environs = environs = " -e "+" -e ".join(self.environs)
else:
environs = ""
self._print("Instantiating container [{}]. The container ID is printed below.".format(self.name))
utils.xrun("docker create", list(args) + [volumes, environs,
"-w %s"%(self.WORKDIR) if self.WORKDIR else "",
"--name", self.name, "--shm-size", self.shared_memory,
self.image,
self.COMMAND or ""])
self.status = "created"
def info(self):
output = subprocess.check_output("docker inspect {}".format(self.name), shell=True).decode()
output_file = StringIO(output[3:-3])
jdict = json.load(output_file)
output_file.close()
return jdict
def get_log(self):
stdout = open(self.logfile, 'w')
exit_status = subprocess.call("docker logs {0}".format(self.name),
stdout=stdout, stderr=stdout, shell=True)
if exit_status !=0:
self.logger.warn('Could not log container: {}. Something went wrong durring execution'.format(self.name))
output = 'Task was not started.'
stdout.write(output)
else:
output = stdout.read()
stdout.close()
return output
def start(self):
running = True
tstart = time.time()
self.status = "running"
self.cont_logger.log_container(self.name)
self.cont_logger.write()
try:
self._print("Starting container [{0:s}]. Timeout set to {1:d}. The container ID is printed below.".format(self.name, self.time_out))
utils.xrun("docker", ["start", "-a", self.name],
timeout=self.time_out,
kill_callback=lambda: utils.xrun("docker", ["kill", self.name]))
except KeyboardInterrupt:
utils.xrun("docker", ["kill", self.name])
raise
uptime = seconds_hms(time.time() - tstart)
self.uptime = uptime
self._print("Container [{0}] has executed successfully".format(self.name))
self._print("Runtime was {0}.".format(uptime))
self.status = "exited"
def stop(self):
dinfo = self.info()
status = dinfo["State"]["Status"]
killed = False
if status in ["running", "paused"]:
try:
utils.xrun("docker stop", [self.name])
except KeyboardInterrupt("Received terminate signal. Will stop and remove container first"):
killed = True
self.status = 'exited'
self._print("Container {} has been stopped.".format(self.name))
if killed:
self.remove()
raise KeyboardInterrupt
def remove(self):
dinfo = self.info()
status = dinfo["State"]["Status"]
killed = False
if status == "exited":
try:
utils.xrun("docker rm", [self.name])
except KeyboardInterrupt:
killed = True
if killed:
raise KeyboardInterrupt
else:
raise DockerError("Container [{}] has not been stopped, cannot remove".format(self.name))
self.cont_logger.remove('containers', self.name)
self.cont_logger.write()
def _print(self, message):
if self.logger:
self.logger.info(message)
else:
print(message)
|
SpheMakh/Stimela | stimela/docker.py | pull | python | def pull(image, tag=None):
if tag:
image = ":".join([image, tag])
utils.xrun("docker pull", [image]) | pull a docker image | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/docker.py#L50-L55 | [
"def xrun(command, options, log=None, _log_container_as_started=False, logfile=None, timeout=-1, kill_callback=None):\n \"\"\"\n Run something on command line.\n\n Example: _run(\"ls\", [\"-lrt\", \"../\"])\n \"\"\"\n\n cmd = \" \".join([command] + list(map(str, options)) )\n\n def _print_info(msg):\n if msg is None: return\n if log:\n log.info(msg)\n else:\n print(msg)\n\n def _print_warn(msg):\n if msg is None: return\n if log:\n log.warn(msg)\n else:\n print(msg)\n\n _print_info(u\"Running: {0:s}\".format(cmd))\n\n sys.stdout.flush()\n starttime = time.time()\n process = p = None\n try:\n foutname = os.path.join(\"/tmp\", \"stimela_output_{0:s}_{1:f}\".format(hashlib.md5(cmd.encode('utf-8')).hexdigest(), starttime))\n with open(foutname, \"w+\") as fout:\n p = process = subprocess.Popen(cmd,\n stderr=fout,\n stdout=fout,\n shell=True)\n\n def clock_killer(p):\n while process.poll() is None and (timeout >= 0):\n currenttime = time.time()\n if (currenttime - starttime < timeout):\n DEBUG and _print_warn(u\"Clock Reaper: has been running for {0:f}, must finish in {1:f}\".format(currenttime - starttime, timeout))\n else:\n _print_warn(u\"Clock Reaper: Timeout reached for '{0:s}'... sending the KILL signal\".format(cmd))\n (kill_callback is not None) and kill_callback()\n time.sleep(INTERRUPT_TIME)\n\n Thread(target=clock_killer, args=tuple([p])).start()\n\n while (process.poll() is None):\n currenttime = time.time()\n DEBUG and _print_info(u\"God mode on: has been running for {0:f}\".format(currenttime - starttime))\n time.sleep(INTERRUPT_TIME) # this is probably not ideal as it interrupts the process every few seconds, \n #check whether there is an alternative with a callback\n assert hasattr(process, \"returncode\"), \"No returncode after termination!\"\n with open(foutname, \"r\") as fout:\n _print_info(fout.read())\n finally:\n if (process is not None) and process.returncode:\n raise StimelaCabRuntimeError('%s: returns errr code %d' % (command, process.returncode))\n"
] | import subprocess
import os
import sys
from io import StringIO
from stimela import utils
import json
import stimela
import time
import datetime
import tempfile
class DockerError(Exception):
pass
def build(image, build_path, tag=None, build_args=None, fromline=None, args=[]):
""" build a docker image"""
if tag:
image = ":".join([image, tag])
bdir = tempfile.mkdtemp()
os.system('cp -r {0:s}/* {1:s}'.format(build_path, bdir))
if build_args:
stdw = tempfile.NamedTemporaryFile(dir=bdir, mode='w')
with open("{}/Dockerfile".format(bdir)) as std:
dfile = std.readlines()
for line in dfile:
if fromline and line.lower().startswith('from'):
stdw.write('FROM {:s}\n'.format(fromline))
elif line.lower().startswith("cmd"):
for arg in build_args:
stdw.write(arg+"\n")
stdw.write(line)
else:
stdw.write(line)
stdw.flush()
utils.xrun("docker build", args+["--force-rm","-f", stdw.name,
"-t", image,
bdir])
stdw.close()
else:
utils.xrun("docker build", args+["--force-rm", "-t", image,
bdir])
os.system('rm -rf {:s}'.format(bdir))
def seconds_hms(seconds):
return str(datetime.timedelta(seconds=seconds))
class Container(object):
def __init__(self, image, name,
volumes=None, environs=None,
label="", logger=None,
shared_memory="1gb",
time_out=-1,
log_container=None):
"""
Python wrapper to docker engine tools for managing containers.
"""
self.image = image
self.name = name
self.label = label
self.volumes = volumes or []
self.environs = environs or []
self.logger = logger
self.status = None
self.WORKDIR = None
self.COMMAND = None
self.shared_memory = shared_memory
self.PID = os.getpid()
self.uptime = "00:00:00"
self.time_out = time_out
self.cont_logger = utils.logger.StimelaLogger(log_container or stimela.LOG_FILE)
def add_volume(self, host, container, perm="rw"):
if os.path.exists(host):
if self.logger:
self.logger.debug("Mounting volume [{0}] in container [{1}] at [{2}]".format(host, self.name, container))
host = os.path.abspath(host)
else:
raise IOError("Directory {0} cannot be mounted on container: File doesn't exist".format(host))
self.volumes.append(":".join([host,container,perm]))
def add_environ(self, key, value):
if self.logger:
self.logger.debug("Adding environ varaible [{0}={1}] in container {2}".format(key, value, self.name))
self.environs.append("=".join([key, value]))
def create(self, *args):
if self.volumes:
volumes = " -v " + " -v ".join(self.volumes)
else:
volumes = ""
if self.environs:
environs = environs = " -e "+" -e ".join(self.environs)
else:
environs = ""
self._print("Instantiating container [{}]. The container ID is printed below.".format(self.name))
utils.xrun("docker create", list(args) + [volumes, environs,
"-w %s"%(self.WORKDIR) if self.WORKDIR else "",
"--name", self.name, "--shm-size", self.shared_memory,
self.image,
self.COMMAND or ""])
self.status = "created"
def info(self):
output = subprocess.check_output("docker inspect {}".format(self.name), shell=True).decode()
output_file = StringIO(output[3:-3])
jdict = json.load(output_file)
output_file.close()
return jdict
def get_log(self):
stdout = open(self.logfile, 'w')
exit_status = subprocess.call("docker logs {0}".format(self.name),
stdout=stdout, stderr=stdout, shell=True)
if exit_status !=0:
self.logger.warn('Could not log container: {}. Something went wrong durring execution'.format(self.name))
output = 'Task was not started.'
stdout.write(output)
else:
output = stdout.read()
stdout.close()
return output
def start(self):
running = True
tstart = time.time()
self.status = "running"
self.cont_logger.log_container(self.name)
self.cont_logger.write()
try:
self._print("Starting container [{0:s}]. Timeout set to {1:d}. The container ID is printed below.".format(self.name, self.time_out))
utils.xrun("docker", ["start", "-a", self.name],
timeout=self.time_out,
kill_callback=lambda: utils.xrun("docker", ["kill", self.name]))
except KeyboardInterrupt:
utils.xrun("docker", ["kill", self.name])
raise
uptime = seconds_hms(time.time() - tstart)
self.uptime = uptime
self._print("Container [{0}] has executed successfully".format(self.name))
self._print("Runtime was {0}.".format(uptime))
self.status = "exited"
def stop(self):
dinfo = self.info()
status = dinfo["State"]["Status"]
killed = False
if status in ["running", "paused"]:
try:
utils.xrun("docker stop", [self.name])
except KeyboardInterrupt("Received terminate signal. Will stop and remove container first"):
killed = True
self.status = 'exited'
self._print("Container {} has been stopped.".format(self.name))
if killed:
self.remove()
raise KeyboardInterrupt
def remove(self):
dinfo = self.info()
status = dinfo["State"]["Status"]
killed = False
if status == "exited":
try:
utils.xrun("docker rm", [self.name])
except KeyboardInterrupt:
killed = True
if killed:
raise KeyboardInterrupt
else:
raise DockerError("Container [{}] has not been stopped, cannot remove".format(self.name))
self.cont_logger.remove('containers', self.name)
self.cont_logger.write()
def _print(self, message):
if self.logger:
self.logger.info(message)
else:
print(message)
|
SpheMakh/Stimela | stimela/__init__.py | info | python | def info(cabdir, header=False):
# First check if cab exists
pfile = "{}/parameters.json".format(cabdir)
if not os.path.exists(pfile):
raise RuntimeError("Cab could not be found at : {}".format(cabdir))
# Get cab info
cab_definition = cab.CabDefinition(parameter_file=pfile)
cab_definition.display(header) | prints out help information about a cab | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/__init__.py#L194-L203 | [
"def display(self, header=False):\n rows, cols = os.popen('stty size', 'r').read().split()\n lines = textwrap.wrap(self.description, int(cols)*3/4)\n print(\"Cab {0}\".format(self.task))\n print(\"Info {}\".format(lines[0]))\n for line in lines[1:]:\n print(\" {}\".format(line))\n if header:\n print(\" \")\n return\n\n print(\"Base Image {0}:{1}\".format(self.base, self.tag))\n print(\"\\n\")\n\n print(\"Parameters:\")\n rows, cols = os.popen('stty size', 'r').read().split()\n for param in self.parameters:\n\n _types = \"\"\n for i,_type in enumerate(param.dtype):\n if isinstance(_type, tuple):\n _name = \"list:{}\".format(\"file\" if _type[1]==\"file\" else _type[1].__name__)\n else:\n _name = \"file\" if _type==\"file\" else _type.__name__\n _types += \"{}\".format(_name) if i==0 else \"/{}\".format(_name)\n\n lines = textwrap.wrap(param.info, int(cols)*3/4)\n\n print(\" Name {}{}\".format(param.name, \"/{}\".format(param.mapping) if param.mapping else \"\"))\n print(\" Description {}\".format(lines[0]))\n for line in lines[1:]:\n print(\" {}\".format(line))\n print(\" Type {}\".format(_types))\n print(\" Default {}\".format(param.default))\n if param.choices:\n print(\" Choices {}\".format(param.choices))\n print(\" \")\n"
] | import os
import sys
import argparse
import time
from argparse import ArgumentParser
import textwrap as _textwrap
import tempfile
import signal
import inspect
import stimela
from stimela import utils, cargo
from stimela.cargo import cab
from stimela.recipe import Recipe as Pipeline
from stimela.recipe import Recipe, PipelineException
from stimela import docker, singularity
import pkg_resources
try:
__version__ = pkg_resources.require("stimela")[0].version
except pkg_resources.DistributionNotFound:
__version__ = "dev"
from stimela.utils import logger
# Get to know user
USER = os.environ["USER"]
UID = os.getuid()
GID = os.getgid()
# Set up logging infrastructure
LOG_HOME = os.path.expanduser("~/.stimela")
# This is is the default log file. It logs stimela images, containers and processes
LOG_FILE = "{0:s}/stimela_logfile.json".format(LOG_HOME)
#LOG_FILE = "{0:s}/{1:s}_stimela_logfile.json".format(LOG_HOME, USER)
# Get base images
# All base images must be on dockerhub
BASE = os.listdir(cargo.BASE_PATH)
# Get package cab images (user can add their own cab images)
# All package cab images must be based on stimela base images.
CAB = list()
for item in os.listdir(cargo.CAB_PATH):
try:
# These files must exist for a cab image to be valid
ls_cabdir = os.listdir('{0}/{1}'.format(cargo.CAB_PATH, item))
dockerfile = 'Dockerfile' in ls_cabdir
paramfile = 'parameters.json' in ls_cabdir
srcdir = 'src' in ls_cabdir
except OSError:
continue
if dockerfile and paramfile and srcdir:
CAB.append(item)
GLOBALS = {'foo' : 'bar'}
del GLOBALS['foo']
class MultilineFormatter(argparse.HelpFormatter):
def _fill_text(self, text, width, indent):
text = self._whitespace_matcher.sub(' ', text).strip()
paragraphs = text.split('|n ')
multiline_text = ''
for paragraph in paragraphs:
formatted_paragraph = _textwrap.fill(paragraph, width, initial_indent=indent, subsequent_indent=indent) + '\n\n'
multiline_text = multiline_text + formatted_paragraph
return multiline_text
def register_globals():
frame = inspect.currentframe().f_back
frame.f_globals.update(GLOBALS)
def build(argv):
for i, arg in enumerate(argv):
if (arg[0] == '-') and arg[1].isdigit(): argv = ' ' + arg
parser = ArgumentParser(description='Build executor (a.k.a cab) images')
parser.add_argument("-b", "--base", action="store_true",
help="Build base images")
parser.add_argument("-c", "--cab", metavar="CAB,CAB_DIR",
help="Executor image (name) name, location of executor image files")
parser.add_argument("-uo", "--us-only",
help="Only build these cabs. Comma separated cab names")
parser.add_argument("-i", "--ignore-cabs", default="",
help="Comma separated cabs (executor images) to ignore.")
parser.add_argument("-nc", "--no-cache", action="store_true",
help="Do not use cache when building the image")
parser.add_argument("-bl", "--build-label", default=USER,
help="Label for cab images. All cab images will be named <CAB_LABEL>_<cab name>. The default is $USER")
args = parser.parse_args(argv)
log = logger.StimelaLogger('{0:s}/{1:s}_stimela_logfile.json'.format(LOG_HOME, args.build_label))
if args.base:
# Build base and meqtrees images first
BASE.remove("base")
BASE.remove("meqtrees")
BASE.remove("casa")
for image in ["base", "meqtrees", "casa"] + BASE:
dockerfile = "{:s}/{:s}".format(cargo.BASE_PATH, image)
image = "stimela/{0}:{1}".format(image, __version__)
docker.build(image,
dockerfile)
log.log_image(image, dockerfile, replace=True)
log.write()
return 0
workdir = "/home/{}/output/".format(USER)
build_args = [
"WORKDIR {:s}".format(workdir),
"RUN useradd -r -u {0:d} -U {1:s}".format(UID, USER),
"USER {0:s}".format(USER),
]
no_cache = ["--no-cache"] if args.no_cache else []
if args.cab:
cab_args = args.cab.split(",")
if len(cab_args)==2:
cab, path = cab_args
else:
raise ValueError("Not enough arguments for build command.")
image = "{:s}_cab/{:s}".format(args.build_label, cab)
docker.build(image,
path,
build_args=build_args, args=no_cache)
log.log_image(image, path, replace=True, cab=True)
log.write()
return
# Cabs and their locations
cabs = []
dockerfiles = []
if args.us_only:
CABS = args.us_only.split(',')
else:
# Images that have been logged
# This is crucial for making custom cabs
logged_images = log.read().get('images', {})
for key,val in logged_images.items():
if val['CAB']:
cabs.append(key)
dockerfiles.append(val['DIR'])
# If user wants to ignore some cabs
IGNORE = args.ignore_cabs.split(",")
CABS = set(CAB).difference(set(IGNORE))
# Prioritise package images over logged images
cabs = ["{:s}_cab/{:s}".format(args.build_label, cab) for cab in CABS] + cabs
dockerfiles = [ "{:s}/{:s}".format(cargo.CAB_PATH, cab) for cab in CABS] + dockerfiles
built = []
for image, dockerfile in zip(cabs,dockerfiles):
if image not in built:
docker.build(image,
dockerfile,
build_args=build_args, args=no_cache)
log.log_image(image, dockerfile, replace=True, cab=True)
log.write()
built.append(image)
def get_cabs(logfile):
log = logger.StimelaLogger(logfile)
cabs_ = log.read()['images']
# Remove images that are not cabs
for key in cabs_.keys():
if not cabs_[key]['CAB']:
del cabs_[key]
return cabs_
def cabs(argv):
for i, arg in enumerate(argv):
if (arg[0] == '-') and arg[1].isdigit(): argv = ' ' + arg
parser = ArgumentParser(description='List executor (a.k.a cab) images')
parser.add_argument("-i", "--cab-doc",
help="Will display document about the specified cab. For example, \
to get help on the 'cleanmask cab' run 'stimela cabs --cab-doc cleanmask'")
parser.add_argument("-l", "--list", action="store_true",
help="List cab names")
parser.add_argument("-ls", "--list-summary", action="store_true",
help="List cabs with a summary of the cab")
parser.add_argument("-bl", "--build-label", default=USER,
help="Label for build you want documentation for. See --build-label option in 'stimela help build'")
args = parser.parse_args(argv)
logfile = '{0:s}/{1:s}_stimela_logfile.json'.format(LOG_HOME, args.build_label)
cabs_ = get_cabs(logfile)
if cabs_:
pass
else:
print('No cab images found, did you run \'stimela build\'')
sys.exit(0)
if args.cab_doc:
name = '{0:s}_cab/{1:s}'.format(args.build_label, args.cab_doc)
try:
cabdir = cabs_[name]['DIR']
except KeyError:
raise RuntimeError('The cab you requested is not known to stimela, or has not been built. Run \'stimela cabs -l\' to see which cabs have been built')
info(cabdir)
elif args.list_summary:
for key,val in cabs_.items():
if not val['CAB']:
continue
cabdir = cabs_[key]['DIR']
try:
info(cabdir, header=True)
except IOError:
pass
else:
_cabs = []
for cab in cabs_:
# strip away the label
name = cab.split('{}_'.format(args.build_label))[1].split('/')[1]
_cabs.append(name)
# print them cabs
print( ', '.join(_cabs) )
def run(argv):
for i, arg in enumerate(argv):
if (arg[0] == '-') and arg[1].isdigit(): argv[i] = ' ' + arg
parser = ArgumentParser(description='Dockerized Radio Interferometric Scripting Framework.\n'
'Sphesihle Makhathini <sphemakh@gmail.com>')
add = parser.add_argument
add("-in", "--input",
help="Input folder")
add("-out", "--output",
help="Output folder")
add("-ms", "--msdir",
help="MS folder. MSs should be placed here. Also, empty MSs will be placed here")
add("-j", "--ncores", type=int,
help="Number of cores to when stimela parallesization (stimea.utils.pper function) ")
add("script",
help="Run script")
add("-g", "--globals", metavar="KEY=VALUE[:TYPE]", action="append", default=[],
help="Global variables to pass to script. The type is assumed to string unless specified")
add("-bl", "--build-label", default=USER,
help="Label for cab images. All cab images will be named <CAB_LABEL>_<cab name>. The default is $USER")
args = parser.parse_args(argv)
tag = None
_globals = dict(_STIMELA_INPUT=args.input, _STIMELA_OUTPUT=args.output,
_STIMELA_MSDIR=args.msdir,
CAB_TAG=tag, _STIMELA_BUILD_LABEL=args.build_label)
nargs = len(args.globals)
global GLOBALS
if nargs:
for arg in args.globals:
if arg.find("=") > 1:
key, value = arg.split("=")
try:
value, _type = value.split(":")
except ValueError:
_type = "str"
GLOBALS[key] = eval("{:s}('{:s}')".format(_type, value))
if args.ncores:
utils.CPUS = args.ncores
with open(args.script, 'r') as stdr:
exec(stdr.read(), _globals)
def pull(argv):
for i, arg in enumerate(argv):
if (arg[0] == '-') and arg[1].isdigit(): argv[i] = ' ' + arg
parser = ArgumentParser(description='Pull docker stimela base images')
add = parser.add_argument
add("-im", "--image", action="append", metavar="IMAGE[:TAG]",
help="Pull base image along with its tag (or version). Can be called multiple times")
add("-t", "--tag",
help="Tag")
add("-s", "--singularity", action="store_true",
help="Use singularity instead of docker."
"Images will be pulled into the directory specified by the enviroment varaible, SINGULARITY_PULLFOLDER. $PWD by default")
args = parser.parse_args(argv)
log = logger.StimelaLogger(LOG_FILE)
images = log.read()['images']
if args.image:
for image in args.image:
simage = image.replace("/", "_")
simage = simage.replace(":", "_") + ".img"
if args.singularity:
singularity.pull(image, simage)
else:
docker.pull(image)
log.log_image(image, 'pulled')
else:
base = []
for cab in CAB:
image = "{:s}/{:s}".format(cargo.CAB_PATH, cab)
base.append( utils.get_Dockerfile_base_image(image).split()[-1] )
base = set(base)
for image in base:
if image not in ["stimela/ddfacet", "radioastro/ddfacet"]:
if args.singularity:
simage = image.replace("/", "_")
simage = simage.replace(":", "_") + ".img"
singularity.pull(image, simage)
else:
docker.pull(image)
log.log_image(image, 'pulled')
log.write()
def images(argv):
for i, arg in enumerate(argv):
if (arg[0] == '-') and arg[1].isdigit(): argv[i] = ' ' + arg
parser = ArgumentParser(description='List all stimela related images.')
add = parser.add_argument
add("-c", "--clear", action="store_true",
help="Clear the logfile that keeps track of stimela images. This does not do anythig to the images.")
args = parser.parse_args(argv)
log = logger.StimelaLogger(LOG_FILE)
log.display('images')
if args.clear:
log.clear('images')
log.write()
def containers(argv):
for i, arg in enumerate(argv):
if (arg[0] == '-') and arg[1].isdigit(): argv[i] = ' ' + arg
parser = ArgumentParser(description='List all active stimela containers.')
add = parser.add_argument
add("-c", "--clear", action="store_true",
help="Clear the log file that keeps track of stimela containers. This doesn't do anything to the containers.")
args = parser.parse_args(argv)
log = logger.StimelaLogger(LOG_FILE)
log.display('containers')
if args.clear:
log.clear('containers')
log.write()
def ps(argv):
for i, arg in enumerate(argv):
if (arg[0] == '-') and arg[1].isdigit(): argv[i] = ' ' + arg
parser = ArgumentParser(description='List all running stimela processes')
add = parser.add_argument
add("-c", "--clear", action="store_true",
help="Clear logfile that keeps track of stimela processes. This doesn't do anything ot the processes themselves.")
args = parser.parse_args(argv)
log = logger.StimelaLogger(LOG_FILE)
log.display('processes')
if args.clear:
log.clear('processes')
log.write()
def kill(argv):
for i, arg in enumerate(argv):
if (arg[0] == '-') and arg[1].isdigit(): argv[i] = ' ' + arg
parser = ArgumentParser(description='Gracefully kill stimela process(s).')
add = parser.add_argument
add("pid", nargs="*",
help="Process ID")
args = parser.parse_args(argv)
log = logger.StimelaLogger(LOG_FILE)
for pid in args.pid:
found = pid in log.info['processes'].keys()
if not found:
print("Could not find process {0}".format(pid))
continue
try:
os.kill(int(pid), signal.SIGINT)
except OSError:
raise OSError('Process with PID {} could not be killed'.format(pid))
log.remove('processes', pid)
log.write()
def clean(argv):
for i, arg in enumerate(argv):
if (arg[0] == '-') and arg[1].isdigit(): argv[i] = ' ' + arg
parser = ArgumentParser(description='Convience tools for cleaning up after stimela')
add = parser.add_argument
add("-ai", "--all-images", action="store_true",
help="Remove all images pulled/built by stimela. This include CAB images")
add("-ab", "--all-base", action="store_true",
help="Remove all base images")
add("-ac", "--all-cabs", action="store_true",
help="Remove all CAB images")
add("-aC", "--all-containers", action="store_true",
help="Stop and/or Remove all stimela containers")
add("-bl", "--build-label", default=USER,
help="Label for cab images. All cab images will be named <CAB_LABEL>_<cab name>. The default is $USER")
args = parser.parse_args(argv)
log = logger.StimelaLogger(LOG_FILE)
log_cabs = logger.StimelaLogger('{0:s}/{1:s}_stimela_logfile.json'.format(LOG_HOME,
args.build_label))
if args.all_images:
images = log.info['images'].keys()
images = log_cabs.info['images'].keys()
for image in images:
utils.xrun('docker', ['rmi', image])
log.remove('images', image)
log.write()
images = log_cabs.info['images'].keys()
for image in images:
if log_cabs.info['images'][image]['CAB']:
utils.xrun('docker', ['rmi', image])
log_cabs.remove('images', image)
log_cabs.write()
if args.all_base:
images = log.info['images'].keys()
for image in images:
if log.info['images'][image]['CAB'] is False:
utils.xrun('docker', ['rmi', image])
log.remove('images', image)
log.write()
if args.all_cabs:
images = log_cabs.info['images'].keys()
for image in images:
if log_cabs.info['images'][image]['CAB']:
utils.xrun('docker', ['rmi', image])
log_cabs.remove('images', image)
log_cabs.write()
if args.all_containers:
containers = log.info['containers'].keys()
for container in containers:
cont = docker.Container(log.info['containers'][container]['IMAGE'], container)
try:
status = cont.info()['State']['Status'].lower()
except:
print('Could not inspect container {}. It probably doesn\'t exist, will remove it from log'.format(container))
status = "no there"
if status == 'running':
# Kill the container instead of stopping it, so that effect can be felt py parent process
utils.xrun('docker', ['kill', container])
cont.remove()
elif status in ['exited', 'dead']:
cont.remove()
log.remove('containers', container)
log.write()
def main(argv):
for i, arg in enumerate(argv):
if (arg[0] == '-') and arg[1].isdigit(): argv[i] = ' ' + arg
parser = ArgumentParser(description='Stimela: Dockerized Radio Interferometric Scripting Framework. '
'|n version {:s} |n install path {:s} |n '
'Sphesihle Makhathini <sphemakh@gmail.com>'.format(__version__,
os.path.dirname(__file__)),
formatter_class=MultilineFormatter,
add_help=False)
add = parser.add_argument
add("-h", "--help", action="store_true",
help="Print help message and exit")
add("-v","--version", action='version',
version='{:s} version {:s}'.format(parser.prog, __version__))
add("command", nargs="*", metavar="command [options]",
help="Stimela command to execute. For example, 'stimela help run'")
options = []
commands = dict(pull=pull, build=build, run=run,
images=images, cabs=cabs, ps=ps,
containers=containers, kill=kill,
clean=clean)
command = "failure"
for cmd in commands:
if cmd in argv:
command = cmd
index = argv.index(cmd)
options = argv[index+1:]
argv = argv[:index+1]
args = parser.parse_args(argv)
# Command is help and no other commands following
try:
main_help = (args.command[0] == "help" and len(args.command) == 1)
except IndexError:
main_help = True
if args.help or main_help:
parser.print_help()
print ("""
Run a command. These can be:
help : Prints out a help message about other commands
build : Build a set of stimela images
pull : pull a stimela base images
run : Run a stimela script
images : List stimela images
cabs : Manage cab images
ps : List running stimela scripts
kill : Gracefully kill runing stimela process
clean : Clean up tools for stimela
""")
sys.exit(0)
# Separate commands into command and arguments
cmd, argv = args.command[0], args.command[1:]
# If we've got past the if statement above, and help
# is the command then assume that help on a command
# is requested
if cmd == "help":
# Request help on the sub-command
cmd, argv = argv[0], ["-h"]
else:
argv = options
# Get the function to execute for the command
try:
_cmd = commands[cmd]
except KeyError:
raise KeyError("Command '{:s}' not recognized "
"Run : 'stimela help' for help".format(cmd))
# Invoke the command
_cmd(argv)
|
SpheMakh/Stimela | stimela/utils/__init__.py | xrun | python | def xrun(command, options, log=None, _log_container_as_started=False, logfile=None, timeout=-1, kill_callback=None):
cmd = " ".join([command] + list(map(str, options)) )
def _print_info(msg):
if msg is None: return
if log:
log.info(msg)
else:
print(msg)
def _print_warn(msg):
if msg is None: return
if log:
log.warn(msg)
else:
print(msg)
_print_info(u"Running: {0:s}".format(cmd))
sys.stdout.flush()
starttime = time.time()
process = p = None
try:
foutname = os.path.join("/tmp", "stimela_output_{0:s}_{1:f}".format(hashlib.md5(cmd.encode('utf-8')).hexdigest(), starttime))
with open(foutname, "w+") as fout:
p = process = subprocess.Popen(cmd,
stderr=fout,
stdout=fout,
shell=True)
def clock_killer(p):
while process.poll() is None and (timeout >= 0):
currenttime = time.time()
if (currenttime - starttime < timeout):
DEBUG and _print_warn(u"Clock Reaper: has been running for {0:f}, must finish in {1:f}".format(currenttime - starttime, timeout))
else:
_print_warn(u"Clock Reaper: Timeout reached for '{0:s}'... sending the KILL signal".format(cmd))
(kill_callback is not None) and kill_callback()
time.sleep(INTERRUPT_TIME)
Thread(target=clock_killer, args=tuple([p])).start()
while (process.poll() is None):
currenttime = time.time()
DEBUG and _print_info(u"God mode on: has been running for {0:f}".format(currenttime - starttime))
time.sleep(INTERRUPT_TIME) # this is probably not ideal as it interrupts the process every few seconds,
#check whether there is an alternative with a callback
assert hasattr(process, "returncode"), "No returncode after termination!"
with open(foutname, "r") as fout:
_print_info(fout.read())
finally:
if (process is not None) and process.returncode:
raise StimelaCabRuntimeError('%s: returns errr code %d' % (command, process.returncode)) | Run something on command line.
Example: _run("ls", ["-lrt", "../"]) | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/utils/__init__.py#L52-L110 | [
"def _print_info(msg):\n if msg is None: return\n if log:\n log.info(msg)\n else:\n print(msg)\n"
] | import subprocess
import signal
import os
import sys
import logging
import json
import yaml
import codecs
import time
import tempfile
import inspect
import warnings
import re
import math
from threading import Thread
import unicodedata
import hashlib
#from fcntl import fcntl, F_GETFL, F_SETFL
#from os import O_NONBLOCK, read
DEBUG = False
INTERRUPT_TIME = 1.0 # seconds -- do not want to constantly interrupt the child process
class StimelaCabRuntimeError(RuntimeError): pass
from multiprocessing import Process, Manager, Lock
CPUS = 1
def _logger(level=0, logfile=None):
if logfile and not logging.getLogger("STIMELA"):
logging.basicConfig(filename=logfile)
elif not logging.getLogger("STIMELA"):
logging.basicConfig()
LOGL = {"0": "INFO",
"1": "DEBUG",
"2": "ERROR",
"3": "CRITICAL"}
log = logging.getLogger("STIMELA")
log.setLevel(eval("logging."+LOGL[str(level)]))
return log
def assign(key, value):
frame = inspect.currentframe().f_back
frame.f_globals[key] = value
def xrun(command, options, log=None, _log_container_as_started=False, logfile=None, timeout=-1, kill_callback=None):
"""
Run something on command line.
Example: _run("ls", ["-lrt", "../"])
"""
cmd = " ".join([command] + list(map(str, options)) )
def _print_info(msg):
if msg is None: return
if log:
log.info(msg)
else:
print(msg)
def _print_warn(msg):
if msg is None: return
if log:
log.warn(msg)
else:
print(msg)
_print_info(u"Running: {0:s}".format(cmd))
sys.stdout.flush()
starttime = time.time()
process = p = None
try:
foutname = os.path.join("/tmp", "stimela_output_{0:s}_{1:f}".format(hashlib.md5(cmd.encode('utf-8')).hexdigest(), starttime))
with open(foutname, "w+") as fout:
p = process = subprocess.Popen(cmd,
stderr=fout,
stdout=fout,
shell=True)
def clock_killer(p):
while process.poll() is None and (timeout >= 0):
currenttime = time.time()
if (currenttime - starttime < timeout):
DEBUG and _print_warn(u"Clock Reaper: has been running for {0:f}, must finish in {1:f}".format(currenttime - starttime, timeout))
else:
_print_warn(u"Clock Reaper: Timeout reached for '{0:s}'... sending the KILL signal".format(cmd))
(kill_callback is not None) and kill_callback()
time.sleep(INTERRUPT_TIME)
Thread(target=clock_killer, args=tuple([p])).start()
while (process.poll() is None):
currenttime = time.time()
DEBUG and _print_info(u"God mode on: has been running for {0:f}".format(currenttime - starttime))
time.sleep(INTERRUPT_TIME) # this is probably not ideal as it interrupts the process every few seconds,
#check whether there is an alternative with a callback
assert hasattr(process, "returncode"), "No returncode after termination!"
with open(foutname, "r") as fout:
_print_info(fout.read())
finally:
if (process is not None) and process.returncode:
raise StimelaCabRuntimeError('%s: returns errr code %d' % (command, process.returncode))
def readJson(conf):
with open(conf) as _std:
jdict = yaml.safe_load(_std)
return jdict
def writeJson(config, dictionary):
with codecs.open(config, 'w', 'utf8') as std:
std.write(json.dumps(dictionary, ensure_ascii=False))
def get_Dockerfile_base_image(image):
if os.path.isfile(image):
dockerfile = image
else:
dockerfile = "{:s}/Dockerfile".format(image)
with open(dockerfile, "r") as std:
_from = ""
for line in std.readlines():
if line.startswith("FROM"):
_from = line
return _from
def change_Dockerfile_base_image(path, _from, label, destdir="."):
if os.path.isfile(path):
dockerfile = path
dirname = os.path.dirname(path)
else:
dockerfile = "{:s}/Dockerfile".format(path)
dirname = path
with open(dockerfile, "r") as std:
lines = std.readlines()
for line in lines:
if line.startswith("FROM"):
lines.remove(line)
temp_dir = tempfile.mkdtemp(prefix="tmp-stimela-{:s}-".format(label), dir=destdir)
xrun("cp", ["-r", "{:s}/Dockerfile {:s}/src".format(dirname, dirname), temp_dir])
dockerfile = "{:s}/Dockerfile".format(temp_dir)
with open(dockerfile, "w") as std:
std.write("{:s}\n".format(_from))
for line in lines:
std.write(line)
return temp_dir, dockerfile
def get_base_images(logfile, index=1):
with open(logfile) as std:
string = std.read()
separator = "[================================DONE==========================]"
log = string.split(separator)[index-1]
images = []
for line in log.split("\n"):
if line.find("<=BASE_IMAGE=>")>0:
tmp = line.split("<=BASE_IMAGE=>")[-1]
image, base = tmp.split("=")
images.append((image.strip(), base))
return images
def icasa(taskname, mult=None, clearstart=False, loadthese=[],**kw0):
"""
runs a CASA task given a list of options.
A given task can be run multiple times with a different options,
in this case the options must be parsed as a list/tuple of dictionaries via mult, e.g
icasa('exportfits',mult=[{'imagename':'img1.image','fitsimage':'image1.fits},{'imagename':'img2.image','fitsimage':'image2.fits}]).
Options you want be common between the multiple commands should be specified as key word args.
"""
# create temp directory from which to run casapy
td = tempfile.mkdtemp(dir='.')
# we want get back to the working directory once casapy is launched
cdir = os.path.realpath('.')
# load modules in loadthese
_load = ""
if "os" not in loadthese or "import os" not in loadthese:
loadthese.append("os")
if loadthese:
exclude = filter(lambda line: line.startswith("import") or line.startswith("from"), loadthese)
for line in loadthese:
if line not in exclude:
line = "import %s"%line
_load += "%s\n"%line
if mult:
if isinstance(mult,(tuple,list)):
for opts in mult:
opts.update(kw0)
else:
mult.upadte(kw0)
mult = [mult]
else:
mult = [kw0]
run_cmd = """ """
for kw in mult:
task_cmds = []
for key,val in kw.items():
if isinstance(val,(str, unicode)):
val = '"%s"'%val
task_cmds .append('%s=%s'%(key,val))
task_cmds = ", ".join(task_cmds)
run_cmd += """
%s
os.chdir('%s')
%s
%s(%s)
"""%(_load, cdir,"clearstart()" if clearstart else "", taskname, task_cmds)
tf = tempfile.NamedTemporaryFile(suffix='.py')
tf.write(run_cmd)
tf.flush()
t0 = time.time()
# all logging information will be in the pyxis log files
print("Running {}".format(run_cmd))
xrun("cd", [td, "&& casa --nologger --log2term --nologfile -c", tf.name])
# log taskname.last
task_last = '%s.last'%taskname
if os.path.exists(task_last):
with open(task_last,'r') as last:
print('%s.last is: \n %s'%(taskname, last.read()))
# remove temp directory. This also gets rid of the casa log files; so long suckers!
xrun("rm", ["-fr ", td, task_last])
tf.close()
def stack_fits(fitslist, outname, axis=0, ctype=None, keep_old=False, fits=False):
""" Stack a list of fits files along a given axiis.
fitslist: list of fits file to combine
outname: output file name
axis: axis along which to combine the files
fits: If True will axis FITS ordering axes
ctype: Axis label in the fits header (if given, axis will be ignored)
keep_old: Keep component files after combining?
"""
import numpy
try:
import pyfits
except ImportError:
warnings.warn("Could not find pyfits on this system. FITS files will not be stacked")
sys.exit(0)
hdu = pyfits.open(fitslist[0])[0]
hdr = hdu.header
naxis = hdr['NAXIS']
# find axis via CTYPE key
if ctype is not None:
for i in range(1,naxis+1):
if hdr['CTYPE%d'%i].lower().startswith(ctype.lower()):
axis = naxis - i # fits to numpy convention
elif fits:
axis = naxis - axis
fits_ind = abs(axis-naxis)
crval = hdr['CRVAL%d'%fits_ind]
imslice = [slice(None)]*naxis
_sorted = sorted([pyfits.open(fits) for fits in fitslist],
key=lambda a: a[0].header['CRVAL%d'%(naxis-axis)])
# define structure of new FITS file
nn = [ hd[0].header['NAXIS%d'%(naxis-axis)] for hd in _sorted]
shape = list(hdu.data.shape)
shape[axis] = sum(nn)
data = numpy.zeros(shape,dtype=float)
for i, hdu0 in enumerate(_sorted):
h = hdu0[0].header
d = hdu0[0].data
imslice[axis] = range(sum(nn[:i]),sum(nn[:i+1]) )
data[imslice] = d
if crval > h['CRVAL%d'%fits_ind]:
crval = h['CRVAL%d'%fits_ind]
# update header
hdr['CRVAL%d'%fits_ind] = crval
hdr['CRPIX%d'%fits_ind] = 1
pyfits.writeto(outname, data, hdr, clobber=True)
print("Successfully stacked images. Output image is %s"%outname)
# remove old files
if not keep_old:
for fits in fitslist:
os.system('rm -f %s'%fits)
def substitute_globals(string, globs=None):
sub = set(re.findall('\{(.*?)\}', string))
globs = globs or inspect.currentframe().f_back.f_globals
if sub:
for item in map(str, sub):
string = string.replace("${%s}"%item, globs[item])
return string
else:
return False
def get_imslice(ndim):
imslice = []
for i in xrange(ndim):
if i<ndim-2:
imslice.append(0)
else:
imslice.append(slice(None))
return imslice
def addcol(msname, colname=None, shape=None,
data_desc_type='array', valuetype=None, init_with=0, **kw):
""" add column to MS
msanme : MS to add colmn to
colname : column name
shape : shape
valuetype : data type
data_desc_type : 'scalar' for scalar elements and array for 'array' elements
init_with : value to initialise the column with
"""
import numpy
import pyrap.tables
tab = pyrap.tables.table(msname,readonly=False)
try:
tab.getcol(colname)
print('Column already exists')
except RuntimeError:
print('Attempting to add %s column to %s'%(colname,msname))
from pyrap.tables import maketabdesc
valuetype = valuetype or 'complex'
if shape is None:
dshape = list(tab.getcol('DATA').shape)
shape = dshape[1:]
if data_desc_type=='array':
from pyrap.tables import makearrcoldesc
coldmi = tab.getdminfo('DATA') # God forbid this (or the TIME) column doesn't exist
coldmi['NAME'] = colname.lower()
tab.addcols(maketabdesc(makearrcoldesc(colname,init_with,shape=shape,valuetype=valuetype)),coldmi)
elif data_desc_type=='scalar':
from pyrap.tables import makescacoldesc
coldmi = tab.getdminfo('TIME')
coldmi['NAME'] = colname.lower()
tab.addcols(maketabdesc(makescacoldesc(colname,init_with,valuetype=valuetype)),coldmi)
print('Column added successfuly.')
if init_with:
nrows = dshape[0]
rowchunk = nrows//10 if nrows > 1000 else nrows
for row0 in range(0,nrows,rowchunk):
nr = min(rowchunk,nrows-row0)
dshape[0] = nr
tab.putcol(colname,numpy.ones(dshape,dtype=valuetype)*init_with,row0,nr)
tab.close()
def sumcols(msname, col1=None, col2=None, outcol=None, cols=None, suntract=False):
""" add col1 to col2, or sum columns in 'cols' list.
If subtract, subtract col2 from col1
"""
from pyrap.tables import table
tab = table(msname, readonly=False)
if cols:
data = 0
for col in cols:
data += tab.getcol(col)
else:
if subtract:
data = tab.getcol(col1) - tab.getcol(col2)
else:
data = tab.getcol(col1) + tab.getcol(col2)
rowchunk = nrows//10 if nrows > 1000 else nrows
for row0 in range(0, nrows, rowchunk):
nr = min(rowchunk, nrows-row0)
tab.putcol(outcol, data[row0:row0+nr], row0, nr)
tab.close()
def copycol(msname, fromcol, tocol):
from pyrap.tables import table
tab = table(msname, readonly=False)
data = tab.getcol(fromcol)
if tocol not in tab.colnames():
addcol(msname, tocol)
nrows = tab.nrows()
rowchunk = nrows//10 if nrows > 5000 else nrows
for row0 in range(0, nrows, rowchunk):
nr = min(rowchunk, nrows-row0)
tab.putcol(tocol, data[row0:row0+nr], row0, nr)
tab.close()
def cab_dict_update(dictionary, key=None, value=None, options=None):
if options is None:
options = {key:value}
for key, value in options.items():
dictionary[key] = dictionary.pop(key, None) or value
return dictionary
def compute_vis_noise(msname, sefd, spw_id=0):
"""Computes nominal per-visibility noise"""
from pyrap.tables import table
tab = table(msname)
spwtab = table(msname + "/SPECTRAL_WINDOW")
freq0 = spwtab.getcol("CHAN_FREQ")[spw_id, 0]
wavelength = 300e+6/freq0
bw = spwtab.getcol("CHAN_WIDTH")[spw_id, 0]
dt = tab.getcol("EXPOSURE", 0, 1)[0]
dtf = (tab.getcol("TIME", tab.nrows()-1, 1)-tab.getcol("TIME", 0, 1))[0]
# close tables properly, else the calls below will hang waiting for a lock...
tab.close()
spwtab.close()
print(">>> %s freq %.2f MHz (lambda=%.2fm), bandwidth %.2g kHz, %.2fs integrations, %.2fh synthesis"%(msname, freq0*1e-6, wavelength, bw*1e-3, dt, dtf/3600))
noise = sefd/math.sqrt(abs(2*bw*dt))
print(">>> SEFD of %.2f Jy gives per-visibility noise of %.2f mJy"%(sefd, noise*1000))
return noise
|
SpheMakh/Stimela | stimela/utils/__init__.py | icasa | python | def icasa(taskname, mult=None, clearstart=False, loadthese=[],**kw0):
# create temp directory from which to run casapy
td = tempfile.mkdtemp(dir='.')
# we want get back to the working directory once casapy is launched
cdir = os.path.realpath('.')
# load modules in loadthese
_load = ""
if "os" not in loadthese or "import os" not in loadthese:
loadthese.append("os")
if loadthese:
exclude = filter(lambda line: line.startswith("import") or line.startswith("from"), loadthese)
for line in loadthese:
if line not in exclude:
line = "import %s"%line
_load += "%s\n"%line
if mult:
if isinstance(mult,(tuple,list)):
for opts in mult:
opts.update(kw0)
else:
mult.upadte(kw0)
mult = [mult]
else:
mult = [kw0]
run_cmd = """ """
for kw in mult:
task_cmds = []
for key,val in kw.items():
if isinstance(val,(str, unicode)):
val = '"%s"'%val
task_cmds .append('%s=%s'%(key,val))
task_cmds = ", ".join(task_cmds)
run_cmd += """
%s
os.chdir('%s')
%s
%s(%s)
"""%(_load, cdir,"clearstart()" if clearstart else "", taskname, task_cmds)
tf = tempfile.NamedTemporaryFile(suffix='.py')
tf.write(run_cmd)
tf.flush()
t0 = time.time()
# all logging information will be in the pyxis log files
print("Running {}".format(run_cmd))
xrun("cd", [td, "&& casa --nologger --log2term --nologfile -c", tf.name])
# log taskname.last
task_last = '%s.last'%taskname
if os.path.exists(task_last):
with open(task_last,'r') as last:
print('%s.last is: \n %s'%(taskname, last.read()))
# remove temp directory. This also gets rid of the casa log files; so long suckers!
xrun("rm", ["-fr ", td, task_last])
tf.close() | runs a CASA task given a list of options.
A given task can be run multiple times with a different options,
in this case the options must be parsed as a list/tuple of dictionaries via mult, e.g
icasa('exportfits',mult=[{'imagename':'img1.image','fitsimage':'image1.fits},{'imagename':'img2.image','fitsimage':'image2.fits}]).
Options you want be common between the multiple commands should be specified as key word args. | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/utils/__init__.py#L187-L255 | [
"def xrun(command, options, log=None, _log_container_as_started=False, logfile=None, timeout=-1, kill_callback=None):\n \"\"\"\n Run something on command line.\n\n Example: _run(\"ls\", [\"-lrt\", \"../\"])\n \"\"\"\n\n cmd = \" \".join([command] + list(map(str, options)) )\n\n def _print_info(msg):\n if msg is None: return\n if log:\n log.info(msg)\n else:\n print(msg)\n\n def _print_warn(msg):\n if msg is None: return\n if log:\n log.warn(msg)\n else:\n print(msg)\n\n _print_info(u\"Running: {0:s}\".format(cmd))\n\n sys.stdout.flush()\n starttime = time.time()\n process = p = None\n try:\n foutname = os.path.join(\"/tmp\", \"stimela_output_{0:s}_{1:f}\".format(hashlib.md5(cmd.encode('utf-8')).hexdigest(), starttime))\n with open(foutname, \"w+\") as fout:\n p = process = subprocess.Popen(cmd,\n stderr=fout,\n stdout=fout,\n shell=True)\n\n def clock_killer(p):\n while process.poll() is None and (timeout >= 0):\n currenttime = time.time()\n if (currenttime - starttime < timeout):\n DEBUG and _print_warn(u\"Clock Reaper: has been running for {0:f}, must finish in {1:f}\".format(currenttime - starttime, timeout))\n else:\n _print_warn(u\"Clock Reaper: Timeout reached for '{0:s}'... sending the KILL signal\".format(cmd))\n (kill_callback is not None) and kill_callback()\n time.sleep(INTERRUPT_TIME)\n\n Thread(target=clock_killer, args=tuple([p])).start()\n\n while (process.poll() is None):\n currenttime = time.time()\n DEBUG and _print_info(u\"God mode on: has been running for {0:f}\".format(currenttime - starttime))\n time.sleep(INTERRUPT_TIME) # this is probably not ideal as it interrupts the process every few seconds, \n #check whether there is an alternative with a callback\n assert hasattr(process, \"returncode\"), \"No returncode after termination!\"\n with open(foutname, \"r\") as fout:\n _print_info(fout.read())\n finally:\n if (process is not None) and process.returncode:\n raise StimelaCabRuntimeError('%s: returns errr code %d' % (command, process.returncode))\n"
] | import subprocess
import signal
import os
import sys
import logging
import json
import yaml
import codecs
import time
import tempfile
import inspect
import warnings
import re
import math
from threading import Thread
import unicodedata
import hashlib
#from fcntl import fcntl, F_GETFL, F_SETFL
#from os import O_NONBLOCK, read
DEBUG = False
INTERRUPT_TIME = 1.0 # seconds -- do not want to constantly interrupt the child process
class StimelaCabRuntimeError(RuntimeError): pass
from multiprocessing import Process, Manager, Lock
CPUS = 1
def _logger(level=0, logfile=None):
if logfile and not logging.getLogger("STIMELA"):
logging.basicConfig(filename=logfile)
elif not logging.getLogger("STIMELA"):
logging.basicConfig()
LOGL = {"0": "INFO",
"1": "DEBUG",
"2": "ERROR",
"3": "CRITICAL"}
log = logging.getLogger("STIMELA")
log.setLevel(eval("logging."+LOGL[str(level)]))
return log
def assign(key, value):
frame = inspect.currentframe().f_back
frame.f_globals[key] = value
def xrun(command, options, log=None, _log_container_as_started=False, logfile=None, timeout=-1, kill_callback=None):
"""
Run something on command line.
Example: _run("ls", ["-lrt", "../"])
"""
cmd = " ".join([command] + list(map(str, options)) )
def _print_info(msg):
if msg is None: return
if log:
log.info(msg)
else:
print(msg)
def _print_warn(msg):
if msg is None: return
if log:
log.warn(msg)
else:
print(msg)
_print_info(u"Running: {0:s}".format(cmd))
sys.stdout.flush()
starttime = time.time()
process = p = None
try:
foutname = os.path.join("/tmp", "stimela_output_{0:s}_{1:f}".format(hashlib.md5(cmd.encode('utf-8')).hexdigest(), starttime))
with open(foutname, "w+") as fout:
p = process = subprocess.Popen(cmd,
stderr=fout,
stdout=fout,
shell=True)
def clock_killer(p):
while process.poll() is None and (timeout >= 0):
currenttime = time.time()
if (currenttime - starttime < timeout):
DEBUG and _print_warn(u"Clock Reaper: has been running for {0:f}, must finish in {1:f}".format(currenttime - starttime, timeout))
else:
_print_warn(u"Clock Reaper: Timeout reached for '{0:s}'... sending the KILL signal".format(cmd))
(kill_callback is not None) and kill_callback()
time.sleep(INTERRUPT_TIME)
Thread(target=clock_killer, args=tuple([p])).start()
while (process.poll() is None):
currenttime = time.time()
DEBUG and _print_info(u"God mode on: has been running for {0:f}".format(currenttime - starttime))
time.sleep(INTERRUPT_TIME) # this is probably not ideal as it interrupts the process every few seconds,
#check whether there is an alternative with a callback
assert hasattr(process, "returncode"), "No returncode after termination!"
with open(foutname, "r") as fout:
_print_info(fout.read())
finally:
if (process is not None) and process.returncode:
raise StimelaCabRuntimeError('%s: returns errr code %d' % (command, process.returncode))
def readJson(conf):
with open(conf) as _std:
jdict = yaml.safe_load(_std)
return jdict
def writeJson(config, dictionary):
with codecs.open(config, 'w', 'utf8') as std:
std.write(json.dumps(dictionary, ensure_ascii=False))
def get_Dockerfile_base_image(image):
if os.path.isfile(image):
dockerfile = image
else:
dockerfile = "{:s}/Dockerfile".format(image)
with open(dockerfile, "r") as std:
_from = ""
for line in std.readlines():
if line.startswith("FROM"):
_from = line
return _from
def change_Dockerfile_base_image(path, _from, label, destdir="."):
if os.path.isfile(path):
dockerfile = path
dirname = os.path.dirname(path)
else:
dockerfile = "{:s}/Dockerfile".format(path)
dirname = path
with open(dockerfile, "r") as std:
lines = std.readlines()
for line in lines:
if line.startswith("FROM"):
lines.remove(line)
temp_dir = tempfile.mkdtemp(prefix="tmp-stimela-{:s}-".format(label), dir=destdir)
xrun("cp", ["-r", "{:s}/Dockerfile {:s}/src".format(dirname, dirname), temp_dir])
dockerfile = "{:s}/Dockerfile".format(temp_dir)
with open(dockerfile, "w") as std:
std.write("{:s}\n".format(_from))
for line in lines:
std.write(line)
return temp_dir, dockerfile
def get_base_images(logfile, index=1):
with open(logfile) as std:
string = std.read()
separator = "[================================DONE==========================]"
log = string.split(separator)[index-1]
images = []
for line in log.split("\n"):
if line.find("<=BASE_IMAGE=>")>0:
tmp = line.split("<=BASE_IMAGE=>")[-1]
image, base = tmp.split("=")
images.append((image.strip(), base))
return images
def stack_fits(fitslist, outname, axis=0, ctype=None, keep_old=False, fits=False):
""" Stack a list of fits files along a given axiis.
fitslist: list of fits file to combine
outname: output file name
axis: axis along which to combine the files
fits: If True will axis FITS ordering axes
ctype: Axis label in the fits header (if given, axis will be ignored)
keep_old: Keep component files after combining?
"""
import numpy
try:
import pyfits
except ImportError:
warnings.warn("Could not find pyfits on this system. FITS files will not be stacked")
sys.exit(0)
hdu = pyfits.open(fitslist[0])[0]
hdr = hdu.header
naxis = hdr['NAXIS']
# find axis via CTYPE key
if ctype is not None:
for i in range(1,naxis+1):
if hdr['CTYPE%d'%i].lower().startswith(ctype.lower()):
axis = naxis - i # fits to numpy convention
elif fits:
axis = naxis - axis
fits_ind = abs(axis-naxis)
crval = hdr['CRVAL%d'%fits_ind]
imslice = [slice(None)]*naxis
_sorted = sorted([pyfits.open(fits) for fits in fitslist],
key=lambda a: a[0].header['CRVAL%d'%(naxis-axis)])
# define structure of new FITS file
nn = [ hd[0].header['NAXIS%d'%(naxis-axis)] for hd in _sorted]
shape = list(hdu.data.shape)
shape[axis] = sum(nn)
data = numpy.zeros(shape,dtype=float)
for i, hdu0 in enumerate(_sorted):
h = hdu0[0].header
d = hdu0[0].data
imslice[axis] = range(sum(nn[:i]),sum(nn[:i+1]) )
data[imslice] = d
if crval > h['CRVAL%d'%fits_ind]:
crval = h['CRVAL%d'%fits_ind]
# update header
hdr['CRVAL%d'%fits_ind] = crval
hdr['CRPIX%d'%fits_ind] = 1
pyfits.writeto(outname, data, hdr, clobber=True)
print("Successfully stacked images. Output image is %s"%outname)
# remove old files
if not keep_old:
for fits in fitslist:
os.system('rm -f %s'%fits)
def substitute_globals(string, globs=None):
sub = set(re.findall('\{(.*?)\}', string))
globs = globs or inspect.currentframe().f_back.f_globals
if sub:
for item in map(str, sub):
string = string.replace("${%s}"%item, globs[item])
return string
else:
return False
def get_imslice(ndim):
imslice = []
for i in xrange(ndim):
if i<ndim-2:
imslice.append(0)
else:
imslice.append(slice(None))
return imslice
def addcol(msname, colname=None, shape=None,
data_desc_type='array', valuetype=None, init_with=0, **kw):
""" add column to MS
msanme : MS to add colmn to
colname : column name
shape : shape
valuetype : data type
data_desc_type : 'scalar' for scalar elements and array for 'array' elements
init_with : value to initialise the column with
"""
import numpy
import pyrap.tables
tab = pyrap.tables.table(msname,readonly=False)
try:
tab.getcol(colname)
print('Column already exists')
except RuntimeError:
print('Attempting to add %s column to %s'%(colname,msname))
from pyrap.tables import maketabdesc
valuetype = valuetype or 'complex'
if shape is None:
dshape = list(tab.getcol('DATA').shape)
shape = dshape[1:]
if data_desc_type=='array':
from pyrap.tables import makearrcoldesc
coldmi = tab.getdminfo('DATA') # God forbid this (or the TIME) column doesn't exist
coldmi['NAME'] = colname.lower()
tab.addcols(maketabdesc(makearrcoldesc(colname,init_with,shape=shape,valuetype=valuetype)),coldmi)
elif data_desc_type=='scalar':
from pyrap.tables import makescacoldesc
coldmi = tab.getdminfo('TIME')
coldmi['NAME'] = colname.lower()
tab.addcols(maketabdesc(makescacoldesc(colname,init_with,valuetype=valuetype)),coldmi)
print('Column added successfuly.')
if init_with:
nrows = dshape[0]
rowchunk = nrows//10 if nrows > 1000 else nrows
for row0 in range(0,nrows,rowchunk):
nr = min(rowchunk,nrows-row0)
dshape[0] = nr
tab.putcol(colname,numpy.ones(dshape,dtype=valuetype)*init_with,row0,nr)
tab.close()
def sumcols(msname, col1=None, col2=None, outcol=None, cols=None, suntract=False):
""" add col1 to col2, or sum columns in 'cols' list.
If subtract, subtract col2 from col1
"""
from pyrap.tables import table
tab = table(msname, readonly=False)
if cols:
data = 0
for col in cols:
data += tab.getcol(col)
else:
if subtract:
data = tab.getcol(col1) - tab.getcol(col2)
else:
data = tab.getcol(col1) + tab.getcol(col2)
rowchunk = nrows//10 if nrows > 1000 else nrows
for row0 in range(0, nrows, rowchunk):
nr = min(rowchunk, nrows-row0)
tab.putcol(outcol, data[row0:row0+nr], row0, nr)
tab.close()
def copycol(msname, fromcol, tocol):
from pyrap.tables import table
tab = table(msname, readonly=False)
data = tab.getcol(fromcol)
if tocol not in tab.colnames():
addcol(msname, tocol)
nrows = tab.nrows()
rowchunk = nrows//10 if nrows > 5000 else nrows
for row0 in range(0, nrows, rowchunk):
nr = min(rowchunk, nrows-row0)
tab.putcol(tocol, data[row0:row0+nr], row0, nr)
tab.close()
def cab_dict_update(dictionary, key=None, value=None, options=None):
if options is None:
options = {key:value}
for key, value in options.items():
dictionary[key] = dictionary.pop(key, None) or value
return dictionary
def compute_vis_noise(msname, sefd, spw_id=0):
"""Computes nominal per-visibility noise"""
from pyrap.tables import table
tab = table(msname)
spwtab = table(msname + "/SPECTRAL_WINDOW")
freq0 = spwtab.getcol("CHAN_FREQ")[spw_id, 0]
wavelength = 300e+6/freq0
bw = spwtab.getcol("CHAN_WIDTH")[spw_id, 0]
dt = tab.getcol("EXPOSURE", 0, 1)[0]
dtf = (tab.getcol("TIME", tab.nrows()-1, 1)-tab.getcol("TIME", 0, 1))[0]
# close tables properly, else the calls below will hang waiting for a lock...
tab.close()
spwtab.close()
print(">>> %s freq %.2f MHz (lambda=%.2fm), bandwidth %.2g kHz, %.2fs integrations, %.2fh synthesis"%(msname, freq0*1e-6, wavelength, bw*1e-3, dt, dtf/3600))
noise = sefd/math.sqrt(abs(2*bw*dt))
print(">>> SEFD of %.2f Jy gives per-visibility noise of %.2f mJy"%(sefd, noise*1000))
return noise
|
SpheMakh/Stimela | stimela/utils/__init__.py | stack_fits | python | def stack_fits(fitslist, outname, axis=0, ctype=None, keep_old=False, fits=False):
import numpy
try:
import pyfits
except ImportError:
warnings.warn("Could not find pyfits on this system. FITS files will not be stacked")
sys.exit(0)
hdu = pyfits.open(fitslist[0])[0]
hdr = hdu.header
naxis = hdr['NAXIS']
# find axis via CTYPE key
if ctype is not None:
for i in range(1,naxis+1):
if hdr['CTYPE%d'%i].lower().startswith(ctype.lower()):
axis = naxis - i # fits to numpy convention
elif fits:
axis = naxis - axis
fits_ind = abs(axis-naxis)
crval = hdr['CRVAL%d'%fits_ind]
imslice = [slice(None)]*naxis
_sorted = sorted([pyfits.open(fits) for fits in fitslist],
key=lambda a: a[0].header['CRVAL%d'%(naxis-axis)])
# define structure of new FITS file
nn = [ hd[0].header['NAXIS%d'%(naxis-axis)] for hd in _sorted]
shape = list(hdu.data.shape)
shape[axis] = sum(nn)
data = numpy.zeros(shape,dtype=float)
for i, hdu0 in enumerate(_sorted):
h = hdu0[0].header
d = hdu0[0].data
imslice[axis] = range(sum(nn[:i]),sum(nn[:i+1]) )
data[imslice] = d
if crval > h['CRVAL%d'%fits_ind]:
crval = h['CRVAL%d'%fits_ind]
# update header
hdr['CRVAL%d'%fits_ind] = crval
hdr['CRPIX%d'%fits_ind] = 1
pyfits.writeto(outname, data, hdr, clobber=True)
print("Successfully stacked images. Output image is %s"%outname)
# remove old files
if not keep_old:
for fits in fitslist:
os.system('rm -f %s'%fits) | Stack a list of fits files along a given axiis.
fitslist: list of fits file to combine
outname: output file name
axis: axis along which to combine the files
fits: If True will axis FITS ordering axes
ctype: Axis label in the fits header (if given, axis will be ignored)
keep_old: Keep component files after combining? | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/utils/__init__.py#L258-L318 | null | import subprocess
import signal
import os
import sys
import logging
import json
import yaml
import codecs
import time
import tempfile
import inspect
import warnings
import re
import math
from threading import Thread
import unicodedata
import hashlib
#from fcntl import fcntl, F_GETFL, F_SETFL
#from os import O_NONBLOCK, read
DEBUG = False
INTERRUPT_TIME = 1.0 # seconds -- do not want to constantly interrupt the child process
class StimelaCabRuntimeError(RuntimeError): pass
from multiprocessing import Process, Manager, Lock
CPUS = 1
def _logger(level=0, logfile=None):
if logfile and not logging.getLogger("STIMELA"):
logging.basicConfig(filename=logfile)
elif not logging.getLogger("STIMELA"):
logging.basicConfig()
LOGL = {"0": "INFO",
"1": "DEBUG",
"2": "ERROR",
"3": "CRITICAL"}
log = logging.getLogger("STIMELA")
log.setLevel(eval("logging."+LOGL[str(level)]))
return log
def assign(key, value):
frame = inspect.currentframe().f_back
frame.f_globals[key] = value
def xrun(command, options, log=None, _log_container_as_started=False, logfile=None, timeout=-1, kill_callback=None):
"""
Run something on command line.
Example: _run("ls", ["-lrt", "../"])
"""
cmd = " ".join([command] + list(map(str, options)) )
def _print_info(msg):
if msg is None: return
if log:
log.info(msg)
else:
print(msg)
def _print_warn(msg):
if msg is None: return
if log:
log.warn(msg)
else:
print(msg)
_print_info(u"Running: {0:s}".format(cmd))
sys.stdout.flush()
starttime = time.time()
process = p = None
try:
foutname = os.path.join("/tmp", "stimela_output_{0:s}_{1:f}".format(hashlib.md5(cmd.encode('utf-8')).hexdigest(), starttime))
with open(foutname, "w+") as fout:
p = process = subprocess.Popen(cmd,
stderr=fout,
stdout=fout,
shell=True)
def clock_killer(p):
while process.poll() is None and (timeout >= 0):
currenttime = time.time()
if (currenttime - starttime < timeout):
DEBUG and _print_warn(u"Clock Reaper: has been running for {0:f}, must finish in {1:f}".format(currenttime - starttime, timeout))
else:
_print_warn(u"Clock Reaper: Timeout reached for '{0:s}'... sending the KILL signal".format(cmd))
(kill_callback is not None) and kill_callback()
time.sleep(INTERRUPT_TIME)
Thread(target=clock_killer, args=tuple([p])).start()
while (process.poll() is None):
currenttime = time.time()
DEBUG and _print_info(u"God mode on: has been running for {0:f}".format(currenttime - starttime))
time.sleep(INTERRUPT_TIME) # this is probably not ideal as it interrupts the process every few seconds,
#check whether there is an alternative with a callback
assert hasattr(process, "returncode"), "No returncode after termination!"
with open(foutname, "r") as fout:
_print_info(fout.read())
finally:
if (process is not None) and process.returncode:
raise StimelaCabRuntimeError('%s: returns errr code %d' % (command, process.returncode))
def readJson(conf):
with open(conf) as _std:
jdict = yaml.safe_load(_std)
return jdict
def writeJson(config, dictionary):
with codecs.open(config, 'w', 'utf8') as std:
std.write(json.dumps(dictionary, ensure_ascii=False))
def get_Dockerfile_base_image(image):
if os.path.isfile(image):
dockerfile = image
else:
dockerfile = "{:s}/Dockerfile".format(image)
with open(dockerfile, "r") as std:
_from = ""
for line in std.readlines():
if line.startswith("FROM"):
_from = line
return _from
def change_Dockerfile_base_image(path, _from, label, destdir="."):
if os.path.isfile(path):
dockerfile = path
dirname = os.path.dirname(path)
else:
dockerfile = "{:s}/Dockerfile".format(path)
dirname = path
with open(dockerfile, "r") as std:
lines = std.readlines()
for line in lines:
if line.startswith("FROM"):
lines.remove(line)
temp_dir = tempfile.mkdtemp(prefix="tmp-stimela-{:s}-".format(label), dir=destdir)
xrun("cp", ["-r", "{:s}/Dockerfile {:s}/src".format(dirname, dirname), temp_dir])
dockerfile = "{:s}/Dockerfile".format(temp_dir)
with open(dockerfile, "w") as std:
std.write("{:s}\n".format(_from))
for line in lines:
std.write(line)
return temp_dir, dockerfile
def get_base_images(logfile, index=1):
with open(logfile) as std:
string = std.read()
separator = "[================================DONE==========================]"
log = string.split(separator)[index-1]
images = []
for line in log.split("\n"):
if line.find("<=BASE_IMAGE=>")>0:
tmp = line.split("<=BASE_IMAGE=>")[-1]
image, base = tmp.split("=")
images.append((image.strip(), base))
return images
def icasa(taskname, mult=None, clearstart=False, loadthese=[],**kw0):
"""
runs a CASA task given a list of options.
A given task can be run multiple times with a different options,
in this case the options must be parsed as a list/tuple of dictionaries via mult, e.g
icasa('exportfits',mult=[{'imagename':'img1.image','fitsimage':'image1.fits},{'imagename':'img2.image','fitsimage':'image2.fits}]).
Options you want be common between the multiple commands should be specified as key word args.
"""
# create temp directory from which to run casapy
td = tempfile.mkdtemp(dir='.')
# we want get back to the working directory once casapy is launched
cdir = os.path.realpath('.')
# load modules in loadthese
_load = ""
if "os" not in loadthese or "import os" not in loadthese:
loadthese.append("os")
if loadthese:
exclude = filter(lambda line: line.startswith("import") or line.startswith("from"), loadthese)
for line in loadthese:
if line not in exclude:
line = "import %s"%line
_load += "%s\n"%line
if mult:
if isinstance(mult,(tuple,list)):
for opts in mult:
opts.update(kw0)
else:
mult.upadte(kw0)
mult = [mult]
else:
mult = [kw0]
run_cmd = """ """
for kw in mult:
task_cmds = []
for key,val in kw.items():
if isinstance(val,(str, unicode)):
val = '"%s"'%val
task_cmds .append('%s=%s'%(key,val))
task_cmds = ", ".join(task_cmds)
run_cmd += """
%s
os.chdir('%s')
%s
%s(%s)
"""%(_load, cdir,"clearstart()" if clearstart else "", taskname, task_cmds)
tf = tempfile.NamedTemporaryFile(suffix='.py')
tf.write(run_cmd)
tf.flush()
t0 = time.time()
# all logging information will be in the pyxis log files
print("Running {}".format(run_cmd))
xrun("cd", [td, "&& casa --nologger --log2term --nologfile -c", tf.name])
# log taskname.last
task_last = '%s.last'%taskname
if os.path.exists(task_last):
with open(task_last,'r') as last:
print('%s.last is: \n %s'%(taskname, last.read()))
# remove temp directory. This also gets rid of the casa log files; so long suckers!
xrun("rm", ["-fr ", td, task_last])
tf.close()
def stack_fits(fitslist, outname, axis=0, ctype=None, keep_old=False, fits=False):
""" Stack a list of fits files along a given axiis.
fitslist: list of fits file to combine
outname: output file name
axis: axis along which to combine the files
fits: If True will axis FITS ordering axes
ctype: Axis label in the fits header (if given, axis will be ignored)
keep_old: Keep component files after combining?
"""
import numpy
try:
import pyfits
except ImportError:
warnings.warn("Could not find pyfits on this system. FITS files will not be stacked")
sys.exit(0)
hdu = pyfits.open(fitslist[0])[0]
hdr = hdu.header
naxis = hdr['NAXIS']
# find axis via CTYPE key
if ctype is not None:
for i in range(1,naxis+1):
if hdr['CTYPE%d'%i].lower().startswith(ctype.lower()):
axis = naxis - i # fits to numpy convention
elif fits:
axis = naxis - axis
fits_ind = abs(axis-naxis)
crval = hdr['CRVAL%d'%fits_ind]
imslice = [slice(None)]*naxis
_sorted = sorted([pyfits.open(fits) for fits in fitslist],
key=lambda a: a[0].header['CRVAL%d'%(naxis-axis)])
# define structure of new FITS file
nn = [ hd[0].header['NAXIS%d'%(naxis-axis)] for hd in _sorted]
shape = list(hdu.data.shape)
shape[axis] = sum(nn)
data = numpy.zeros(shape,dtype=float)
for i, hdu0 in enumerate(_sorted):
h = hdu0[0].header
d = hdu0[0].data
imslice[axis] = range(sum(nn[:i]),sum(nn[:i+1]) )
data[imslice] = d
if crval > h['CRVAL%d'%fits_ind]:
crval = h['CRVAL%d'%fits_ind]
# update header
hdr['CRVAL%d'%fits_ind] = crval
hdr['CRPIX%d'%fits_ind] = 1
pyfits.writeto(outname, data, hdr, clobber=True)
print("Successfully stacked images. Output image is %s"%outname)
# remove old files
if not keep_old:
for fits in fitslist:
os.system('rm -f %s'%fits)
def substitute_globals(string, globs=None):
sub = set(re.findall('\{(.*?)\}', string))
globs = globs or inspect.currentframe().f_back.f_globals
if sub:
for item in map(str, sub):
string = string.replace("${%s}"%item, globs[item])
return string
else:
return False
def get_imslice(ndim):
imslice = []
for i in xrange(ndim):
if i<ndim-2:
imslice.append(0)
else:
imslice.append(slice(None))
return imslice
def addcol(msname, colname=None, shape=None,
data_desc_type='array', valuetype=None, init_with=0, **kw):
""" add column to MS
msanme : MS to add colmn to
colname : column name
shape : shape
valuetype : data type
data_desc_type : 'scalar' for scalar elements and array for 'array' elements
init_with : value to initialise the column with
"""
import numpy
import pyrap.tables
tab = pyrap.tables.table(msname,readonly=False)
try:
tab.getcol(colname)
print('Column already exists')
except RuntimeError:
print('Attempting to add %s column to %s'%(colname,msname))
from pyrap.tables import maketabdesc
valuetype = valuetype or 'complex'
if shape is None:
dshape = list(tab.getcol('DATA').shape)
shape = dshape[1:]
if data_desc_type=='array':
from pyrap.tables import makearrcoldesc
coldmi = tab.getdminfo('DATA') # God forbid this (or the TIME) column doesn't exist
coldmi['NAME'] = colname.lower()
tab.addcols(maketabdesc(makearrcoldesc(colname,init_with,shape=shape,valuetype=valuetype)),coldmi)
elif data_desc_type=='scalar':
from pyrap.tables import makescacoldesc
coldmi = tab.getdminfo('TIME')
coldmi['NAME'] = colname.lower()
tab.addcols(maketabdesc(makescacoldesc(colname,init_with,valuetype=valuetype)),coldmi)
print('Column added successfuly.')
if init_with:
nrows = dshape[0]
rowchunk = nrows//10 if nrows > 1000 else nrows
for row0 in range(0,nrows,rowchunk):
nr = min(rowchunk,nrows-row0)
dshape[0] = nr
tab.putcol(colname,numpy.ones(dshape,dtype=valuetype)*init_with,row0,nr)
tab.close()
def sumcols(msname, col1=None, col2=None, outcol=None, cols=None, suntract=False):
""" add col1 to col2, or sum columns in 'cols' list.
If subtract, subtract col2 from col1
"""
from pyrap.tables import table
tab = table(msname, readonly=False)
if cols:
data = 0
for col in cols:
data += tab.getcol(col)
else:
if subtract:
data = tab.getcol(col1) - tab.getcol(col2)
else:
data = tab.getcol(col1) + tab.getcol(col2)
rowchunk = nrows//10 if nrows > 1000 else nrows
for row0 in range(0, nrows, rowchunk):
nr = min(rowchunk, nrows-row0)
tab.putcol(outcol, data[row0:row0+nr], row0, nr)
tab.close()
def copycol(msname, fromcol, tocol):
from pyrap.tables import table
tab = table(msname, readonly=False)
data = tab.getcol(fromcol)
if tocol not in tab.colnames():
addcol(msname, tocol)
nrows = tab.nrows()
rowchunk = nrows//10 if nrows > 5000 else nrows
for row0 in range(0, nrows, rowchunk):
nr = min(rowchunk, nrows-row0)
tab.putcol(tocol, data[row0:row0+nr], row0, nr)
tab.close()
def cab_dict_update(dictionary, key=None, value=None, options=None):
if options is None:
options = {key:value}
for key, value in options.items():
dictionary[key] = dictionary.pop(key, None) or value
return dictionary
def compute_vis_noise(msname, sefd, spw_id=0):
"""Computes nominal per-visibility noise"""
from pyrap.tables import table
tab = table(msname)
spwtab = table(msname + "/SPECTRAL_WINDOW")
freq0 = spwtab.getcol("CHAN_FREQ")[spw_id, 0]
wavelength = 300e+6/freq0
bw = spwtab.getcol("CHAN_WIDTH")[spw_id, 0]
dt = tab.getcol("EXPOSURE", 0, 1)[0]
dtf = (tab.getcol("TIME", tab.nrows()-1, 1)-tab.getcol("TIME", 0, 1))[0]
# close tables properly, else the calls below will hang waiting for a lock...
tab.close()
spwtab.close()
print(">>> %s freq %.2f MHz (lambda=%.2fm), bandwidth %.2g kHz, %.2fs integrations, %.2fh synthesis"%(msname, freq0*1e-6, wavelength, bw*1e-3, dt, dtf/3600))
noise = sefd/math.sqrt(abs(2*bw*dt))
print(">>> SEFD of %.2f Jy gives per-visibility noise of %.2f mJy"%(sefd, noise*1000))
return noise
|
SpheMakh/Stimela | stimela/utils/__init__.py | addcol | python | def addcol(msname, colname=None, shape=None,
data_desc_type='array', valuetype=None, init_with=0, **kw):
import numpy
import pyrap.tables
tab = pyrap.tables.table(msname,readonly=False)
try:
tab.getcol(colname)
print('Column already exists')
except RuntimeError:
print('Attempting to add %s column to %s'%(colname,msname))
from pyrap.tables import maketabdesc
valuetype = valuetype or 'complex'
if shape is None:
dshape = list(tab.getcol('DATA').shape)
shape = dshape[1:]
if data_desc_type=='array':
from pyrap.tables import makearrcoldesc
coldmi = tab.getdminfo('DATA') # God forbid this (or the TIME) column doesn't exist
coldmi['NAME'] = colname.lower()
tab.addcols(maketabdesc(makearrcoldesc(colname,init_with,shape=shape,valuetype=valuetype)),coldmi)
elif data_desc_type=='scalar':
from pyrap.tables import makescacoldesc
coldmi = tab.getdminfo('TIME')
coldmi['NAME'] = colname.lower()
tab.addcols(maketabdesc(makescacoldesc(colname,init_with,valuetype=valuetype)),coldmi)
print('Column added successfuly.')
if init_with:
nrows = dshape[0]
rowchunk = nrows//10 if nrows > 1000 else nrows
for row0 in range(0,nrows,rowchunk):
nr = min(rowchunk,nrows-row0)
dshape[0] = nr
tab.putcol(colname,numpy.ones(dshape,dtype=valuetype)*init_with,row0,nr)
tab.close() | add column to MS
msanme : MS to add colmn to
colname : column name
shape : shape
valuetype : data type
data_desc_type : 'scalar' for scalar elements and array for 'array' elements
init_with : value to initialise the column with | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/utils/__init__.py#L343-L394 | null | import subprocess
import signal
import os
import sys
import logging
import json
import yaml
import codecs
import time
import tempfile
import inspect
import warnings
import re
import math
from threading import Thread
import unicodedata
import hashlib
#from fcntl import fcntl, F_GETFL, F_SETFL
#from os import O_NONBLOCK, read
DEBUG = False
INTERRUPT_TIME = 1.0 # seconds -- do not want to constantly interrupt the child process
class StimelaCabRuntimeError(RuntimeError): pass
from multiprocessing import Process, Manager, Lock
CPUS = 1
def _logger(level=0, logfile=None):
if logfile and not logging.getLogger("STIMELA"):
logging.basicConfig(filename=logfile)
elif not logging.getLogger("STIMELA"):
logging.basicConfig()
LOGL = {"0": "INFO",
"1": "DEBUG",
"2": "ERROR",
"3": "CRITICAL"}
log = logging.getLogger("STIMELA")
log.setLevel(eval("logging."+LOGL[str(level)]))
return log
def assign(key, value):
frame = inspect.currentframe().f_back
frame.f_globals[key] = value
def xrun(command, options, log=None, _log_container_as_started=False, logfile=None, timeout=-1, kill_callback=None):
"""
Run something on command line.
Example: _run("ls", ["-lrt", "../"])
"""
cmd = " ".join([command] + list(map(str, options)) )
def _print_info(msg):
if msg is None: return
if log:
log.info(msg)
else:
print(msg)
def _print_warn(msg):
if msg is None: return
if log:
log.warn(msg)
else:
print(msg)
_print_info(u"Running: {0:s}".format(cmd))
sys.stdout.flush()
starttime = time.time()
process = p = None
try:
foutname = os.path.join("/tmp", "stimela_output_{0:s}_{1:f}".format(hashlib.md5(cmd.encode('utf-8')).hexdigest(), starttime))
with open(foutname, "w+") as fout:
p = process = subprocess.Popen(cmd,
stderr=fout,
stdout=fout,
shell=True)
def clock_killer(p):
while process.poll() is None and (timeout >= 0):
currenttime = time.time()
if (currenttime - starttime < timeout):
DEBUG and _print_warn(u"Clock Reaper: has been running for {0:f}, must finish in {1:f}".format(currenttime - starttime, timeout))
else:
_print_warn(u"Clock Reaper: Timeout reached for '{0:s}'... sending the KILL signal".format(cmd))
(kill_callback is not None) and kill_callback()
time.sleep(INTERRUPT_TIME)
Thread(target=clock_killer, args=tuple([p])).start()
while (process.poll() is None):
currenttime = time.time()
DEBUG and _print_info(u"God mode on: has been running for {0:f}".format(currenttime - starttime))
time.sleep(INTERRUPT_TIME) # this is probably not ideal as it interrupts the process every few seconds,
#check whether there is an alternative with a callback
assert hasattr(process, "returncode"), "No returncode after termination!"
with open(foutname, "r") as fout:
_print_info(fout.read())
finally:
if (process is not None) and process.returncode:
raise StimelaCabRuntimeError('%s: returns errr code %d' % (command, process.returncode))
def readJson(conf):
with open(conf) as _std:
jdict = yaml.safe_load(_std)
return jdict
def writeJson(config, dictionary):
with codecs.open(config, 'w', 'utf8') as std:
std.write(json.dumps(dictionary, ensure_ascii=False))
def get_Dockerfile_base_image(image):
if os.path.isfile(image):
dockerfile = image
else:
dockerfile = "{:s}/Dockerfile".format(image)
with open(dockerfile, "r") as std:
_from = ""
for line in std.readlines():
if line.startswith("FROM"):
_from = line
return _from
def change_Dockerfile_base_image(path, _from, label, destdir="."):
if os.path.isfile(path):
dockerfile = path
dirname = os.path.dirname(path)
else:
dockerfile = "{:s}/Dockerfile".format(path)
dirname = path
with open(dockerfile, "r") as std:
lines = std.readlines()
for line in lines:
if line.startswith("FROM"):
lines.remove(line)
temp_dir = tempfile.mkdtemp(prefix="tmp-stimela-{:s}-".format(label), dir=destdir)
xrun("cp", ["-r", "{:s}/Dockerfile {:s}/src".format(dirname, dirname), temp_dir])
dockerfile = "{:s}/Dockerfile".format(temp_dir)
with open(dockerfile, "w") as std:
std.write("{:s}\n".format(_from))
for line in lines:
std.write(line)
return temp_dir, dockerfile
def get_base_images(logfile, index=1):
with open(logfile) as std:
string = std.read()
separator = "[================================DONE==========================]"
log = string.split(separator)[index-1]
images = []
for line in log.split("\n"):
if line.find("<=BASE_IMAGE=>")>0:
tmp = line.split("<=BASE_IMAGE=>")[-1]
image, base = tmp.split("=")
images.append((image.strip(), base))
return images
def icasa(taskname, mult=None, clearstart=False, loadthese=[],**kw0):
"""
runs a CASA task given a list of options.
A given task can be run multiple times with a different options,
in this case the options must be parsed as a list/tuple of dictionaries via mult, e.g
icasa('exportfits',mult=[{'imagename':'img1.image','fitsimage':'image1.fits},{'imagename':'img2.image','fitsimage':'image2.fits}]).
Options you want be common between the multiple commands should be specified as key word args.
"""
# create temp directory from which to run casapy
td = tempfile.mkdtemp(dir='.')
# we want get back to the working directory once casapy is launched
cdir = os.path.realpath('.')
# load modules in loadthese
_load = ""
if "os" not in loadthese or "import os" not in loadthese:
loadthese.append("os")
if loadthese:
exclude = filter(lambda line: line.startswith("import") or line.startswith("from"), loadthese)
for line in loadthese:
if line not in exclude:
line = "import %s"%line
_load += "%s\n"%line
if mult:
if isinstance(mult,(tuple,list)):
for opts in mult:
opts.update(kw0)
else:
mult.upadte(kw0)
mult = [mult]
else:
mult = [kw0]
run_cmd = """ """
for kw in mult:
task_cmds = []
for key,val in kw.items():
if isinstance(val,(str, unicode)):
val = '"%s"'%val
task_cmds .append('%s=%s'%(key,val))
task_cmds = ", ".join(task_cmds)
run_cmd += """
%s
os.chdir('%s')
%s
%s(%s)
"""%(_load, cdir,"clearstart()" if clearstart else "", taskname, task_cmds)
tf = tempfile.NamedTemporaryFile(suffix='.py')
tf.write(run_cmd)
tf.flush()
t0 = time.time()
# all logging information will be in the pyxis log files
print("Running {}".format(run_cmd))
xrun("cd", [td, "&& casa --nologger --log2term --nologfile -c", tf.name])
# log taskname.last
task_last = '%s.last'%taskname
if os.path.exists(task_last):
with open(task_last,'r') as last:
print('%s.last is: \n %s'%(taskname, last.read()))
# remove temp directory. This also gets rid of the casa log files; so long suckers!
xrun("rm", ["-fr ", td, task_last])
tf.close()
def stack_fits(fitslist, outname, axis=0, ctype=None, keep_old=False, fits=False):
""" Stack a list of fits files along a given axiis.
fitslist: list of fits file to combine
outname: output file name
axis: axis along which to combine the files
fits: If True will axis FITS ordering axes
ctype: Axis label in the fits header (if given, axis will be ignored)
keep_old: Keep component files after combining?
"""
import numpy
try:
import pyfits
except ImportError:
warnings.warn("Could not find pyfits on this system. FITS files will not be stacked")
sys.exit(0)
hdu = pyfits.open(fitslist[0])[0]
hdr = hdu.header
naxis = hdr['NAXIS']
# find axis via CTYPE key
if ctype is not None:
for i in range(1,naxis+1):
if hdr['CTYPE%d'%i].lower().startswith(ctype.lower()):
axis = naxis - i # fits to numpy convention
elif fits:
axis = naxis - axis
fits_ind = abs(axis-naxis)
crval = hdr['CRVAL%d'%fits_ind]
imslice = [slice(None)]*naxis
_sorted = sorted([pyfits.open(fits) for fits in fitslist],
key=lambda a: a[0].header['CRVAL%d'%(naxis-axis)])
# define structure of new FITS file
nn = [ hd[0].header['NAXIS%d'%(naxis-axis)] for hd in _sorted]
shape = list(hdu.data.shape)
shape[axis] = sum(nn)
data = numpy.zeros(shape,dtype=float)
for i, hdu0 in enumerate(_sorted):
h = hdu0[0].header
d = hdu0[0].data
imslice[axis] = range(sum(nn[:i]),sum(nn[:i+1]) )
data[imslice] = d
if crval > h['CRVAL%d'%fits_ind]:
crval = h['CRVAL%d'%fits_ind]
# update header
hdr['CRVAL%d'%fits_ind] = crval
hdr['CRPIX%d'%fits_ind] = 1
pyfits.writeto(outname, data, hdr, clobber=True)
print("Successfully stacked images. Output image is %s"%outname)
# remove old files
if not keep_old:
for fits in fitslist:
os.system('rm -f %s'%fits)
def substitute_globals(string, globs=None):
sub = set(re.findall('\{(.*?)\}', string))
globs = globs or inspect.currentframe().f_back.f_globals
if sub:
for item in map(str, sub):
string = string.replace("${%s}"%item, globs[item])
return string
else:
return False
def get_imslice(ndim):
imslice = []
for i in xrange(ndim):
if i<ndim-2:
imslice.append(0)
else:
imslice.append(slice(None))
return imslice
def sumcols(msname, col1=None, col2=None, outcol=None, cols=None, suntract=False):
""" add col1 to col2, or sum columns in 'cols' list.
If subtract, subtract col2 from col1
"""
from pyrap.tables import table
tab = table(msname, readonly=False)
if cols:
data = 0
for col in cols:
data += tab.getcol(col)
else:
if subtract:
data = tab.getcol(col1) - tab.getcol(col2)
else:
data = tab.getcol(col1) + tab.getcol(col2)
rowchunk = nrows//10 if nrows > 1000 else nrows
for row0 in range(0, nrows, rowchunk):
nr = min(rowchunk, nrows-row0)
tab.putcol(outcol, data[row0:row0+nr], row0, nr)
tab.close()
def copycol(msname, fromcol, tocol):
from pyrap.tables import table
tab = table(msname, readonly=False)
data = tab.getcol(fromcol)
if tocol not in tab.colnames():
addcol(msname, tocol)
nrows = tab.nrows()
rowchunk = nrows//10 if nrows > 5000 else nrows
for row0 in range(0, nrows, rowchunk):
nr = min(rowchunk, nrows-row0)
tab.putcol(tocol, data[row0:row0+nr], row0, nr)
tab.close()
def cab_dict_update(dictionary, key=None, value=None, options=None):
if options is None:
options = {key:value}
for key, value in options.items():
dictionary[key] = dictionary.pop(key, None) or value
return dictionary
def compute_vis_noise(msname, sefd, spw_id=0):
"""Computes nominal per-visibility noise"""
from pyrap.tables import table
tab = table(msname)
spwtab = table(msname + "/SPECTRAL_WINDOW")
freq0 = spwtab.getcol("CHAN_FREQ")[spw_id, 0]
wavelength = 300e+6/freq0
bw = spwtab.getcol("CHAN_WIDTH")[spw_id, 0]
dt = tab.getcol("EXPOSURE", 0, 1)[0]
dtf = (tab.getcol("TIME", tab.nrows()-1, 1)-tab.getcol("TIME", 0, 1))[0]
# close tables properly, else the calls below will hang waiting for a lock...
tab.close()
spwtab.close()
print(">>> %s freq %.2f MHz (lambda=%.2fm), bandwidth %.2g kHz, %.2fs integrations, %.2fh synthesis"%(msname, freq0*1e-6, wavelength, bw*1e-3, dt, dtf/3600))
noise = sefd/math.sqrt(abs(2*bw*dt))
print(">>> SEFD of %.2f Jy gives per-visibility noise of %.2f mJy"%(sefd, noise*1000))
return noise
|
SpheMakh/Stimela | stimela/utils/__init__.py | sumcols | python | def sumcols(msname, col1=None, col2=None, outcol=None, cols=None, suntract=False):
from pyrap.tables import table
tab = table(msname, readonly=False)
if cols:
data = 0
for col in cols:
data += tab.getcol(col)
else:
if subtract:
data = tab.getcol(col1) - tab.getcol(col2)
else:
data = tab.getcol(col1) + tab.getcol(col2)
rowchunk = nrows//10 if nrows > 1000 else nrows
for row0 in range(0, nrows, rowchunk):
nr = min(rowchunk, nrows-row0)
tab.putcol(outcol, data[row0:row0+nr], row0, nr)
tab.close() | add col1 to col2, or sum columns in 'cols' list.
If subtract, subtract col2 from col1 | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/utils/__init__.py#L397-L420 | null | import subprocess
import signal
import os
import sys
import logging
import json
import yaml
import codecs
import time
import tempfile
import inspect
import warnings
import re
import math
from threading import Thread
import unicodedata
import hashlib
#from fcntl import fcntl, F_GETFL, F_SETFL
#from os import O_NONBLOCK, read
DEBUG = False
INTERRUPT_TIME = 1.0 # seconds -- do not want to constantly interrupt the child process
class StimelaCabRuntimeError(RuntimeError): pass
from multiprocessing import Process, Manager, Lock
CPUS = 1
def _logger(level=0, logfile=None):
if logfile and not logging.getLogger("STIMELA"):
logging.basicConfig(filename=logfile)
elif not logging.getLogger("STIMELA"):
logging.basicConfig()
LOGL = {"0": "INFO",
"1": "DEBUG",
"2": "ERROR",
"3": "CRITICAL"}
log = logging.getLogger("STIMELA")
log.setLevel(eval("logging."+LOGL[str(level)]))
return log
def assign(key, value):
frame = inspect.currentframe().f_back
frame.f_globals[key] = value
def xrun(command, options, log=None, _log_container_as_started=False, logfile=None, timeout=-1, kill_callback=None):
"""
Run something on command line.
Example: _run("ls", ["-lrt", "../"])
"""
cmd = " ".join([command] + list(map(str, options)) )
def _print_info(msg):
if msg is None: return
if log:
log.info(msg)
else:
print(msg)
def _print_warn(msg):
if msg is None: return
if log:
log.warn(msg)
else:
print(msg)
_print_info(u"Running: {0:s}".format(cmd))
sys.stdout.flush()
starttime = time.time()
process = p = None
try:
foutname = os.path.join("/tmp", "stimela_output_{0:s}_{1:f}".format(hashlib.md5(cmd.encode('utf-8')).hexdigest(), starttime))
with open(foutname, "w+") as fout:
p = process = subprocess.Popen(cmd,
stderr=fout,
stdout=fout,
shell=True)
def clock_killer(p):
while process.poll() is None and (timeout >= 0):
currenttime = time.time()
if (currenttime - starttime < timeout):
DEBUG and _print_warn(u"Clock Reaper: has been running for {0:f}, must finish in {1:f}".format(currenttime - starttime, timeout))
else:
_print_warn(u"Clock Reaper: Timeout reached for '{0:s}'... sending the KILL signal".format(cmd))
(kill_callback is not None) and kill_callback()
time.sleep(INTERRUPT_TIME)
Thread(target=clock_killer, args=tuple([p])).start()
while (process.poll() is None):
currenttime = time.time()
DEBUG and _print_info(u"God mode on: has been running for {0:f}".format(currenttime - starttime))
time.sleep(INTERRUPT_TIME) # this is probably not ideal as it interrupts the process every few seconds,
#check whether there is an alternative with a callback
assert hasattr(process, "returncode"), "No returncode after termination!"
with open(foutname, "r") as fout:
_print_info(fout.read())
finally:
if (process is not None) and process.returncode:
raise StimelaCabRuntimeError('%s: returns errr code %d' % (command, process.returncode))
def readJson(conf):
with open(conf) as _std:
jdict = yaml.safe_load(_std)
return jdict
def writeJson(config, dictionary):
with codecs.open(config, 'w', 'utf8') as std:
std.write(json.dumps(dictionary, ensure_ascii=False))
def get_Dockerfile_base_image(image):
if os.path.isfile(image):
dockerfile = image
else:
dockerfile = "{:s}/Dockerfile".format(image)
with open(dockerfile, "r") as std:
_from = ""
for line in std.readlines():
if line.startswith("FROM"):
_from = line
return _from
def change_Dockerfile_base_image(path, _from, label, destdir="."):
if os.path.isfile(path):
dockerfile = path
dirname = os.path.dirname(path)
else:
dockerfile = "{:s}/Dockerfile".format(path)
dirname = path
with open(dockerfile, "r") as std:
lines = std.readlines()
for line in lines:
if line.startswith("FROM"):
lines.remove(line)
temp_dir = tempfile.mkdtemp(prefix="tmp-stimela-{:s}-".format(label), dir=destdir)
xrun("cp", ["-r", "{:s}/Dockerfile {:s}/src".format(dirname, dirname), temp_dir])
dockerfile = "{:s}/Dockerfile".format(temp_dir)
with open(dockerfile, "w") as std:
std.write("{:s}\n".format(_from))
for line in lines:
std.write(line)
return temp_dir, dockerfile
def get_base_images(logfile, index=1):
with open(logfile) as std:
string = std.read()
separator = "[================================DONE==========================]"
log = string.split(separator)[index-1]
images = []
for line in log.split("\n"):
if line.find("<=BASE_IMAGE=>")>0:
tmp = line.split("<=BASE_IMAGE=>")[-1]
image, base = tmp.split("=")
images.append((image.strip(), base))
return images
def icasa(taskname, mult=None, clearstart=False, loadthese=[],**kw0):
"""
runs a CASA task given a list of options.
A given task can be run multiple times with a different options,
in this case the options must be parsed as a list/tuple of dictionaries via mult, e.g
icasa('exportfits',mult=[{'imagename':'img1.image','fitsimage':'image1.fits},{'imagename':'img2.image','fitsimage':'image2.fits}]).
Options you want be common between the multiple commands should be specified as key word args.
"""
# create temp directory from which to run casapy
td = tempfile.mkdtemp(dir='.')
# we want get back to the working directory once casapy is launched
cdir = os.path.realpath('.')
# load modules in loadthese
_load = ""
if "os" not in loadthese or "import os" not in loadthese:
loadthese.append("os")
if loadthese:
exclude = filter(lambda line: line.startswith("import") or line.startswith("from"), loadthese)
for line in loadthese:
if line not in exclude:
line = "import %s"%line
_load += "%s\n"%line
if mult:
if isinstance(mult,(tuple,list)):
for opts in mult:
opts.update(kw0)
else:
mult.upadte(kw0)
mult = [mult]
else:
mult = [kw0]
run_cmd = """ """
for kw in mult:
task_cmds = []
for key,val in kw.items():
if isinstance(val,(str, unicode)):
val = '"%s"'%val
task_cmds .append('%s=%s'%(key,val))
task_cmds = ", ".join(task_cmds)
run_cmd += """
%s
os.chdir('%s')
%s
%s(%s)
"""%(_load, cdir,"clearstart()" if clearstart else "", taskname, task_cmds)
tf = tempfile.NamedTemporaryFile(suffix='.py')
tf.write(run_cmd)
tf.flush()
t0 = time.time()
# all logging information will be in the pyxis log files
print("Running {}".format(run_cmd))
xrun("cd", [td, "&& casa --nologger --log2term --nologfile -c", tf.name])
# log taskname.last
task_last = '%s.last'%taskname
if os.path.exists(task_last):
with open(task_last,'r') as last:
print('%s.last is: \n %s'%(taskname, last.read()))
# remove temp directory. This also gets rid of the casa log files; so long suckers!
xrun("rm", ["-fr ", td, task_last])
tf.close()
def stack_fits(fitslist, outname, axis=0, ctype=None, keep_old=False, fits=False):
""" Stack a list of fits files along a given axiis.
fitslist: list of fits file to combine
outname: output file name
axis: axis along which to combine the files
fits: If True will axis FITS ordering axes
ctype: Axis label in the fits header (if given, axis will be ignored)
keep_old: Keep component files after combining?
"""
import numpy
try:
import pyfits
except ImportError:
warnings.warn("Could not find pyfits on this system. FITS files will not be stacked")
sys.exit(0)
hdu = pyfits.open(fitslist[0])[0]
hdr = hdu.header
naxis = hdr['NAXIS']
# find axis via CTYPE key
if ctype is not None:
for i in range(1,naxis+1):
if hdr['CTYPE%d'%i].lower().startswith(ctype.lower()):
axis = naxis - i # fits to numpy convention
elif fits:
axis = naxis - axis
fits_ind = abs(axis-naxis)
crval = hdr['CRVAL%d'%fits_ind]
imslice = [slice(None)]*naxis
_sorted = sorted([pyfits.open(fits) for fits in fitslist],
key=lambda a: a[0].header['CRVAL%d'%(naxis-axis)])
# define structure of new FITS file
nn = [ hd[0].header['NAXIS%d'%(naxis-axis)] for hd in _sorted]
shape = list(hdu.data.shape)
shape[axis] = sum(nn)
data = numpy.zeros(shape,dtype=float)
for i, hdu0 in enumerate(_sorted):
h = hdu0[0].header
d = hdu0[0].data
imslice[axis] = range(sum(nn[:i]),sum(nn[:i+1]) )
data[imslice] = d
if crval > h['CRVAL%d'%fits_ind]:
crval = h['CRVAL%d'%fits_ind]
# update header
hdr['CRVAL%d'%fits_ind] = crval
hdr['CRPIX%d'%fits_ind] = 1
pyfits.writeto(outname, data, hdr, clobber=True)
print("Successfully stacked images. Output image is %s"%outname)
# remove old files
if not keep_old:
for fits in fitslist:
os.system('rm -f %s'%fits)
def substitute_globals(string, globs=None):
sub = set(re.findall('\{(.*?)\}', string))
globs = globs or inspect.currentframe().f_back.f_globals
if sub:
for item in map(str, sub):
string = string.replace("${%s}"%item, globs[item])
return string
else:
return False
def get_imslice(ndim):
imslice = []
for i in xrange(ndim):
if i<ndim-2:
imslice.append(0)
else:
imslice.append(slice(None))
return imslice
def addcol(msname, colname=None, shape=None,
data_desc_type='array', valuetype=None, init_with=0, **kw):
""" add column to MS
msanme : MS to add colmn to
colname : column name
shape : shape
valuetype : data type
data_desc_type : 'scalar' for scalar elements and array for 'array' elements
init_with : value to initialise the column with
"""
import numpy
import pyrap.tables
tab = pyrap.tables.table(msname,readonly=False)
try:
tab.getcol(colname)
print('Column already exists')
except RuntimeError:
print('Attempting to add %s column to %s'%(colname,msname))
from pyrap.tables import maketabdesc
valuetype = valuetype or 'complex'
if shape is None:
dshape = list(tab.getcol('DATA').shape)
shape = dshape[1:]
if data_desc_type=='array':
from pyrap.tables import makearrcoldesc
coldmi = tab.getdminfo('DATA') # God forbid this (or the TIME) column doesn't exist
coldmi['NAME'] = colname.lower()
tab.addcols(maketabdesc(makearrcoldesc(colname,init_with,shape=shape,valuetype=valuetype)),coldmi)
elif data_desc_type=='scalar':
from pyrap.tables import makescacoldesc
coldmi = tab.getdminfo('TIME')
coldmi['NAME'] = colname.lower()
tab.addcols(maketabdesc(makescacoldesc(colname,init_with,valuetype=valuetype)),coldmi)
print('Column added successfuly.')
if init_with:
nrows = dshape[0]
rowchunk = nrows//10 if nrows > 1000 else nrows
for row0 in range(0,nrows,rowchunk):
nr = min(rowchunk,nrows-row0)
dshape[0] = nr
tab.putcol(colname,numpy.ones(dshape,dtype=valuetype)*init_with,row0,nr)
tab.close()
def copycol(msname, fromcol, tocol):
from pyrap.tables import table
tab = table(msname, readonly=False)
data = tab.getcol(fromcol)
if tocol not in tab.colnames():
addcol(msname, tocol)
nrows = tab.nrows()
rowchunk = nrows//10 if nrows > 5000 else nrows
for row0 in range(0, nrows, rowchunk):
nr = min(rowchunk, nrows-row0)
tab.putcol(tocol, data[row0:row0+nr], row0, nr)
tab.close()
def cab_dict_update(dictionary, key=None, value=None, options=None):
if options is None:
options = {key:value}
for key, value in options.items():
dictionary[key] = dictionary.pop(key, None) or value
return dictionary
def compute_vis_noise(msname, sefd, spw_id=0):
"""Computes nominal per-visibility noise"""
from pyrap.tables import table
tab = table(msname)
spwtab = table(msname + "/SPECTRAL_WINDOW")
freq0 = spwtab.getcol("CHAN_FREQ")[spw_id, 0]
wavelength = 300e+6/freq0
bw = spwtab.getcol("CHAN_WIDTH")[spw_id, 0]
dt = tab.getcol("EXPOSURE", 0, 1)[0]
dtf = (tab.getcol("TIME", tab.nrows()-1, 1)-tab.getcol("TIME", 0, 1))[0]
# close tables properly, else the calls below will hang waiting for a lock...
tab.close()
spwtab.close()
print(">>> %s freq %.2f MHz (lambda=%.2fm), bandwidth %.2g kHz, %.2fs integrations, %.2fh synthesis"%(msname, freq0*1e-6, wavelength, bw*1e-3, dt, dtf/3600))
noise = sefd/math.sqrt(abs(2*bw*dt))
print(">>> SEFD of %.2f Jy gives per-visibility noise of %.2f mJy"%(sefd, noise*1000))
return noise
|
SpheMakh/Stimela | stimela/utils/__init__.py | compute_vis_noise | python | def compute_vis_noise(msname, sefd, spw_id=0):
from pyrap.tables import table
tab = table(msname)
spwtab = table(msname + "/SPECTRAL_WINDOW")
freq0 = spwtab.getcol("CHAN_FREQ")[spw_id, 0]
wavelength = 300e+6/freq0
bw = spwtab.getcol("CHAN_WIDTH")[spw_id, 0]
dt = tab.getcol("EXPOSURE", 0, 1)[0]
dtf = (tab.getcol("TIME", tab.nrows()-1, 1)-tab.getcol("TIME", 0, 1))[0]
# close tables properly, else the calls below will hang waiting for a lock...
tab.close()
spwtab.close()
print(">>> %s freq %.2f MHz (lambda=%.2fm), bandwidth %.2g kHz, %.2fs integrations, %.2fh synthesis"%(msname, freq0*1e-6, wavelength, bw*1e-3, dt, dtf/3600))
noise = sefd/math.sqrt(abs(2*bw*dt))
print(">>> SEFD of %.2f Jy gives per-visibility noise of %.2f mJy"%(sefd, noise*1000))
return noise | Computes nominal per-visibility noise | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/utils/__init__.py#L448-L469 | null | import subprocess
import signal
import os
import sys
import logging
import json
import yaml
import codecs
import time
import tempfile
import inspect
import warnings
import re
import math
from threading import Thread
import unicodedata
import hashlib
#from fcntl import fcntl, F_GETFL, F_SETFL
#from os import O_NONBLOCK, read
DEBUG = False
INTERRUPT_TIME = 1.0 # seconds -- do not want to constantly interrupt the child process
class StimelaCabRuntimeError(RuntimeError): pass
from multiprocessing import Process, Manager, Lock
CPUS = 1
def _logger(level=0, logfile=None):
if logfile and not logging.getLogger("STIMELA"):
logging.basicConfig(filename=logfile)
elif not logging.getLogger("STIMELA"):
logging.basicConfig()
LOGL = {"0": "INFO",
"1": "DEBUG",
"2": "ERROR",
"3": "CRITICAL"}
log = logging.getLogger("STIMELA")
log.setLevel(eval("logging."+LOGL[str(level)]))
return log
def assign(key, value):
frame = inspect.currentframe().f_back
frame.f_globals[key] = value
def xrun(command, options, log=None, _log_container_as_started=False, logfile=None, timeout=-1, kill_callback=None):
"""
Run something on command line.
Example: _run("ls", ["-lrt", "../"])
"""
cmd = " ".join([command] + list(map(str, options)) )
def _print_info(msg):
if msg is None: return
if log:
log.info(msg)
else:
print(msg)
def _print_warn(msg):
if msg is None: return
if log:
log.warn(msg)
else:
print(msg)
_print_info(u"Running: {0:s}".format(cmd))
sys.stdout.flush()
starttime = time.time()
process = p = None
try:
foutname = os.path.join("/tmp", "stimela_output_{0:s}_{1:f}".format(hashlib.md5(cmd.encode('utf-8')).hexdigest(), starttime))
with open(foutname, "w+") as fout:
p = process = subprocess.Popen(cmd,
stderr=fout,
stdout=fout,
shell=True)
def clock_killer(p):
while process.poll() is None and (timeout >= 0):
currenttime = time.time()
if (currenttime - starttime < timeout):
DEBUG and _print_warn(u"Clock Reaper: has been running for {0:f}, must finish in {1:f}".format(currenttime - starttime, timeout))
else:
_print_warn(u"Clock Reaper: Timeout reached for '{0:s}'... sending the KILL signal".format(cmd))
(kill_callback is not None) and kill_callback()
time.sleep(INTERRUPT_TIME)
Thread(target=clock_killer, args=tuple([p])).start()
while (process.poll() is None):
currenttime = time.time()
DEBUG and _print_info(u"God mode on: has been running for {0:f}".format(currenttime - starttime))
time.sleep(INTERRUPT_TIME) # this is probably not ideal as it interrupts the process every few seconds,
#check whether there is an alternative with a callback
assert hasattr(process, "returncode"), "No returncode after termination!"
with open(foutname, "r") as fout:
_print_info(fout.read())
finally:
if (process is not None) and process.returncode:
raise StimelaCabRuntimeError('%s: returns errr code %d' % (command, process.returncode))
def readJson(conf):
with open(conf) as _std:
jdict = yaml.safe_load(_std)
return jdict
def writeJson(config, dictionary):
with codecs.open(config, 'w', 'utf8') as std:
std.write(json.dumps(dictionary, ensure_ascii=False))
def get_Dockerfile_base_image(image):
if os.path.isfile(image):
dockerfile = image
else:
dockerfile = "{:s}/Dockerfile".format(image)
with open(dockerfile, "r") as std:
_from = ""
for line in std.readlines():
if line.startswith("FROM"):
_from = line
return _from
def change_Dockerfile_base_image(path, _from, label, destdir="."):
if os.path.isfile(path):
dockerfile = path
dirname = os.path.dirname(path)
else:
dockerfile = "{:s}/Dockerfile".format(path)
dirname = path
with open(dockerfile, "r") as std:
lines = std.readlines()
for line in lines:
if line.startswith("FROM"):
lines.remove(line)
temp_dir = tempfile.mkdtemp(prefix="tmp-stimela-{:s}-".format(label), dir=destdir)
xrun("cp", ["-r", "{:s}/Dockerfile {:s}/src".format(dirname, dirname), temp_dir])
dockerfile = "{:s}/Dockerfile".format(temp_dir)
with open(dockerfile, "w") as std:
std.write("{:s}\n".format(_from))
for line in lines:
std.write(line)
return temp_dir, dockerfile
def get_base_images(logfile, index=1):
with open(logfile) as std:
string = std.read()
separator = "[================================DONE==========================]"
log = string.split(separator)[index-1]
images = []
for line in log.split("\n"):
if line.find("<=BASE_IMAGE=>")>0:
tmp = line.split("<=BASE_IMAGE=>")[-1]
image, base = tmp.split("=")
images.append((image.strip(), base))
return images
def icasa(taskname, mult=None, clearstart=False, loadthese=[],**kw0):
"""
runs a CASA task given a list of options.
A given task can be run multiple times with a different options,
in this case the options must be parsed as a list/tuple of dictionaries via mult, e.g
icasa('exportfits',mult=[{'imagename':'img1.image','fitsimage':'image1.fits},{'imagename':'img2.image','fitsimage':'image2.fits}]).
Options you want be common between the multiple commands should be specified as key word args.
"""
# create temp directory from which to run casapy
td = tempfile.mkdtemp(dir='.')
# we want get back to the working directory once casapy is launched
cdir = os.path.realpath('.')
# load modules in loadthese
_load = ""
if "os" not in loadthese or "import os" not in loadthese:
loadthese.append("os")
if loadthese:
exclude = filter(lambda line: line.startswith("import") or line.startswith("from"), loadthese)
for line in loadthese:
if line not in exclude:
line = "import %s"%line
_load += "%s\n"%line
if mult:
if isinstance(mult,(tuple,list)):
for opts in mult:
opts.update(kw0)
else:
mult.upadte(kw0)
mult = [mult]
else:
mult = [kw0]
run_cmd = """ """
for kw in mult:
task_cmds = []
for key,val in kw.items():
if isinstance(val,(str, unicode)):
val = '"%s"'%val
task_cmds .append('%s=%s'%(key,val))
task_cmds = ", ".join(task_cmds)
run_cmd += """
%s
os.chdir('%s')
%s
%s(%s)
"""%(_load, cdir,"clearstart()" if clearstart else "", taskname, task_cmds)
tf = tempfile.NamedTemporaryFile(suffix='.py')
tf.write(run_cmd)
tf.flush()
t0 = time.time()
# all logging information will be in the pyxis log files
print("Running {}".format(run_cmd))
xrun("cd", [td, "&& casa --nologger --log2term --nologfile -c", tf.name])
# log taskname.last
task_last = '%s.last'%taskname
if os.path.exists(task_last):
with open(task_last,'r') as last:
print('%s.last is: \n %s'%(taskname, last.read()))
# remove temp directory. This also gets rid of the casa log files; so long suckers!
xrun("rm", ["-fr ", td, task_last])
tf.close()
def stack_fits(fitslist, outname, axis=0, ctype=None, keep_old=False, fits=False):
""" Stack a list of fits files along a given axiis.
fitslist: list of fits file to combine
outname: output file name
axis: axis along which to combine the files
fits: If True will axis FITS ordering axes
ctype: Axis label in the fits header (if given, axis will be ignored)
keep_old: Keep component files after combining?
"""
import numpy
try:
import pyfits
except ImportError:
warnings.warn("Could not find pyfits on this system. FITS files will not be stacked")
sys.exit(0)
hdu = pyfits.open(fitslist[0])[0]
hdr = hdu.header
naxis = hdr['NAXIS']
# find axis via CTYPE key
if ctype is not None:
for i in range(1,naxis+1):
if hdr['CTYPE%d'%i].lower().startswith(ctype.lower()):
axis = naxis - i # fits to numpy convention
elif fits:
axis = naxis - axis
fits_ind = abs(axis-naxis)
crval = hdr['CRVAL%d'%fits_ind]
imslice = [slice(None)]*naxis
_sorted = sorted([pyfits.open(fits) for fits in fitslist],
key=lambda a: a[0].header['CRVAL%d'%(naxis-axis)])
# define structure of new FITS file
nn = [ hd[0].header['NAXIS%d'%(naxis-axis)] for hd in _sorted]
shape = list(hdu.data.shape)
shape[axis] = sum(nn)
data = numpy.zeros(shape,dtype=float)
for i, hdu0 in enumerate(_sorted):
h = hdu0[0].header
d = hdu0[0].data
imslice[axis] = range(sum(nn[:i]),sum(nn[:i+1]) )
data[imslice] = d
if crval > h['CRVAL%d'%fits_ind]:
crval = h['CRVAL%d'%fits_ind]
# update header
hdr['CRVAL%d'%fits_ind] = crval
hdr['CRPIX%d'%fits_ind] = 1
pyfits.writeto(outname, data, hdr, clobber=True)
print("Successfully stacked images. Output image is %s"%outname)
# remove old files
if not keep_old:
for fits in fitslist:
os.system('rm -f %s'%fits)
def substitute_globals(string, globs=None):
sub = set(re.findall('\{(.*?)\}', string))
globs = globs or inspect.currentframe().f_back.f_globals
if sub:
for item in map(str, sub):
string = string.replace("${%s}"%item, globs[item])
return string
else:
return False
def get_imslice(ndim):
imslice = []
for i in xrange(ndim):
if i<ndim-2:
imslice.append(0)
else:
imslice.append(slice(None))
return imslice
def addcol(msname, colname=None, shape=None,
data_desc_type='array', valuetype=None, init_with=0, **kw):
""" add column to MS
msanme : MS to add colmn to
colname : column name
shape : shape
valuetype : data type
data_desc_type : 'scalar' for scalar elements and array for 'array' elements
init_with : value to initialise the column with
"""
import numpy
import pyrap.tables
tab = pyrap.tables.table(msname,readonly=False)
try:
tab.getcol(colname)
print('Column already exists')
except RuntimeError:
print('Attempting to add %s column to %s'%(colname,msname))
from pyrap.tables import maketabdesc
valuetype = valuetype or 'complex'
if shape is None:
dshape = list(tab.getcol('DATA').shape)
shape = dshape[1:]
if data_desc_type=='array':
from pyrap.tables import makearrcoldesc
coldmi = tab.getdminfo('DATA') # God forbid this (or the TIME) column doesn't exist
coldmi['NAME'] = colname.lower()
tab.addcols(maketabdesc(makearrcoldesc(colname,init_with,shape=shape,valuetype=valuetype)),coldmi)
elif data_desc_type=='scalar':
from pyrap.tables import makescacoldesc
coldmi = tab.getdminfo('TIME')
coldmi['NAME'] = colname.lower()
tab.addcols(maketabdesc(makescacoldesc(colname,init_with,valuetype=valuetype)),coldmi)
print('Column added successfuly.')
if init_with:
nrows = dshape[0]
rowchunk = nrows//10 if nrows > 1000 else nrows
for row0 in range(0,nrows,rowchunk):
nr = min(rowchunk,nrows-row0)
dshape[0] = nr
tab.putcol(colname,numpy.ones(dshape,dtype=valuetype)*init_with,row0,nr)
tab.close()
def sumcols(msname, col1=None, col2=None, outcol=None, cols=None, suntract=False):
""" add col1 to col2, or sum columns in 'cols' list.
If subtract, subtract col2 from col1
"""
from pyrap.tables import table
tab = table(msname, readonly=False)
if cols:
data = 0
for col in cols:
data += tab.getcol(col)
else:
if subtract:
data = tab.getcol(col1) - tab.getcol(col2)
else:
data = tab.getcol(col1) + tab.getcol(col2)
rowchunk = nrows//10 if nrows > 1000 else nrows
for row0 in range(0, nrows, rowchunk):
nr = min(rowchunk, nrows-row0)
tab.putcol(outcol, data[row0:row0+nr], row0, nr)
tab.close()
def copycol(msname, fromcol, tocol):
from pyrap.tables import table
tab = table(msname, readonly=False)
data = tab.getcol(fromcol)
if tocol not in tab.colnames():
addcol(msname, tocol)
nrows = tab.nrows()
rowchunk = nrows//10 if nrows > 5000 else nrows
for row0 in range(0, nrows, rowchunk):
nr = min(rowchunk, nrows-row0)
tab.putcol(tocol, data[row0:row0+nr], row0, nr)
tab.close()
def cab_dict_update(dictionary, key=None, value=None, options=None):
if options is None:
options = {key:value}
for key, value in options.items():
dictionary[key] = dictionary.pop(key, None) or value
return dictionary
|
SpheMakh/Stimela | stimela/cargo/cab/specfit/src/addSPI.py | fitsInfo | python | def fitsInfo(fitsname = None):
hdu = pyfits.open(fitsname)
hdr = hdu[0].header
ra = hdr['CRVAL1']
dra = abs(hdr['CDELT1'])
raPix = hdr['CRPIX1']
dec = hdr['CRVAL2']
ddec = abs(hdr['CDELT2'])
decPix = hdr['CRPIX2']
freq0 = 0
for i in range(1,hdr['NAXIS']+1):
if hdr['CTYPE%d'%i].strip() == 'FREQ':
freq0 = hdr['CRVAL%d'%i]
break
ndim = hdr["NAXIS"]
imslice = np.zeros(ndim, dtype=int).tolist()
imslice[-2:] = slice(None), slice(None)
image = hdu[0].data[imslice]
wcs = WCS(hdr,mode='pyfits')
return {'image':image,'wcs':wcs,'ra':ra,'dec':dec,'dra':dra,'ddec':ddec,'raPix':raPix,'decPix':decPix,'freq0':freq0} | Get fits info | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/cargo/cab/specfit/src/addSPI.py#L9-L33 | null | #!/usr/bin/env python
import pyfits
import numpy as np
import Tigger
import sys,os
from astLib.astWCS import WCS
from scipy import ndimage
def sky2px(wcs,ra,dec,dra,ddec,cell, beam):
"""convert a sky region to pixel positions"""
dra = beam if dra<beam else dra # assume every source is at least as large as the psf
ddec = beam if ddec<beam else ddec
offsetDec = int((ddec/2.)/cell)
offsetRA = int((dra/2.)/cell)
if offsetDec%2==1:
offsetDec += 1
if offsetRA%2==1:
offsetRA += 1
raPix,decPix = map(int, wcs.wcs2pix(ra,dec))
return np.array([raPix-offsetRA,raPix+offsetRA,decPix-offsetDec,decPix+offsetDec])
def RemoveSourcesWithoutSPI(lsmname_in, lsmname_out):
model = Tigger.load(lsmname_in)
sources = [src for src in model.sources]
for src in sources:
if not src.spectrum:
model.sources.remove(src)
model.save(lsmname_out)
def CombineSourcesInCluster(lsmname_in, lsmname_out):
model = Tigger.load(lsmname_in)
for src in model.sources:
if src.cluster_size>1 and rad2arcsec(src.r)>30:
cluster_sources = [src1 for src1 in model.sources if src1.cluster is src.cluster]
flux_sources = [src1.flux.I for src1 in cluster_sources]
max_flux_index = flux_sources.index(max( flux_sources))
cluster_sources[max_flux_index].flux.I = sum([src1.flux.I for src1 in cluster_sources])
for src2 in cluster_sources:
if src2 is not cluster_sources[max_flux_index]:
model.sources.remove(src2)
cluster_sources[max_flux_index].cluster_size=1
model.save(lsmname_out)
def rad2arcsec(x):
return x*3600.0*180.0/np.pi
def addSPI(fitsname_alpha=None, fitsname_alpha_error=None, lsmname=None,
outfile=None,freq0=None, beam=None, spitol=(-10,10)):
"""
Add spectral index to a tigger lsm from a spectral index map (fits format)
takes in a spectral index map, input lsm and output lsm name.
"""
# import pylab as plt
if beam is None:
raise RuntimeError("the beam option must be specified")
print "INFO: Getting fits info from: %s, %s" %(fitsname_alpha, fitsname_alpha_error)
fits_alpha = fitsInfo(fitsname_alpha) # Get fits info
image_alpha = fits_alpha['image'] # get image data
if fitsname_alpha_error:
fits_alpha_error = fitsInfo(fitsname_alpha_error)
image_alpha_error = fits_alpha_error['image']
else:
fits_alpha_error = fitsInfo(fitsname_alpha)
image_alpha_error = fits_alpha_error['image']
image_alpha_error[...] = 1.0
# may supply FITS file for freq0, in which case just pull ref frequency from FITS file,
# else explicit frequency, else get frequency from alpha image
if type(freq0) is str:
freq0 = fitsInfo(freq0)['freq0']
else:
freq0 = freq0 or fits_alpha['freq0']
model = Tigger.load(lsmname) # load output sky model
rad = lambda a: a*(180/np.pi) # convert radians to degrees
for src in model.sources:
ra = rad(src.pos.ra)
dec = rad(src.pos.dec)
# Cater for point sources and assume source extent equal to the
# Gaussian major axis along both ra and dec axis
dra = rad(src.shape.ex) if src.shape else beam[0]
ddec = rad(src.shape.ey) if src.shape else beam[1]
pa = rad(src.shape.pa) if src.shape else beam[2]
emin, emaj = sorted([dra, ddec])
# Determine region of interest
rgn = sky2px(fits_alpha["wcs"],ra,dec,dra,ddec,fits_alpha["dra"], beam[1])
imslice = slice(rgn[2], rgn[3]), slice(rgn[0], rgn[3])
alpha = image_alpha[imslice]
xpix, ypix = alpha.shape
xx, yy = np.ogrid[-xpix:xpix, -ypix:ypix]
emajPix = emaj/fits_alpha["dra"]
eminPix = emin/fits_alpha["dra"]
# Create elliptcal mask which has same shape as source
mask = ((xx/emajPix)**2 + (yy/eminPix)**2 <= 1)[xpix*2-xpix:xpix*2+xpix, ypix*2-ypix:ypix*2+ypix]
mask = ndimage.rotate(mask, angle=pa, order=0, reshape=False)
draPix = dra/fits_alpha["dra"]
ddPix = ddec/fits_alpha["ddec"]
alpha *= mask
alpha_error = image_alpha_error[imslice]*mask
good = np.where( np.logical_and(alpha!=0, alpha!=np.nan))
alpha = alpha[good]
alpha_error = alpha_error[good]
good = np.where( np.logical_and(alpha_error!=np.nan, alpha_error!=np.inf))
alpha = alpha[good]
alpha_error = alpha_error[good]
subIm_weight = 1/alpha_error
subIm_weighted = alpha*subIm_weight
if len(subIm_weighted)>0:
subIm_normalization = np.sum(subIm_weight)
spi = float(np.sum(subIm_weighted)/subIm_normalization)
spi_error = 1/float(subIm_normalization)
if spi > spitol[0] or spi < spitol[-1]:
sys.stdout.write("INFO: Adding spi: %.3f (at %.3g MHz) to source %s" % (
spi, freq0/1e6, src.name))
src.spectrum = Tigger.Models.ModelClasses.SpectralIndex(spi, freq0)
src.setAttribute('spi_error', spi_error)
else:
sys.stdout.write("ALERT: no spi info found in %s for source %s" % (
fitsname_alpha, src.name))
model.save(outfile)
if __name__=="__main__":
fitsname_alpha = sys.argv[1]
#fitsname_alpha_error = sys.argv[2]
lsmname = sys.argv[2]
outfile = sys.argv[3]
addSPI(fitsname_alpha, None, lsmname, outfile)
|
SpheMakh/Stimela | stimela/cargo/cab/specfit/src/addSPI.py | sky2px | python | def sky2px(wcs,ra,dec,dra,ddec,cell, beam):
dra = beam if dra<beam else dra # assume every source is at least as large as the psf
ddec = beam if ddec<beam else ddec
offsetDec = int((ddec/2.)/cell)
offsetRA = int((dra/2.)/cell)
if offsetDec%2==1:
offsetDec += 1
if offsetRA%2==1:
offsetRA += 1
raPix,decPix = map(int, wcs.wcs2pix(ra,dec))
return np.array([raPix-offsetRA,raPix+offsetRA,decPix-offsetDec,decPix+offsetDec]) | convert a sky region to pixel positions | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/cargo/cab/specfit/src/addSPI.py#L35-L47 | null | #!/usr/bin/env python
import pyfits
import numpy as np
import Tigger
import sys,os
from astLib.astWCS import WCS
from scipy import ndimage
def fitsInfo(fitsname = None):
"""
Get fits info
"""
hdu = pyfits.open(fitsname)
hdr = hdu[0].header
ra = hdr['CRVAL1']
dra = abs(hdr['CDELT1'])
raPix = hdr['CRPIX1']
dec = hdr['CRVAL2']
ddec = abs(hdr['CDELT2'])
decPix = hdr['CRPIX2']
freq0 = 0
for i in range(1,hdr['NAXIS']+1):
if hdr['CTYPE%d'%i].strip() == 'FREQ':
freq0 = hdr['CRVAL%d'%i]
break
ndim = hdr["NAXIS"]
imslice = np.zeros(ndim, dtype=int).tolist()
imslice[-2:] = slice(None), slice(None)
image = hdu[0].data[imslice]
wcs = WCS(hdr,mode='pyfits')
return {'image':image,'wcs':wcs,'ra':ra,'dec':dec,'dra':dra,'ddec':ddec,'raPix':raPix,'decPix':decPix,'freq0':freq0}
def RemoveSourcesWithoutSPI(lsmname_in, lsmname_out):
model = Tigger.load(lsmname_in)
sources = [src for src in model.sources]
for src in sources:
if not src.spectrum:
model.sources.remove(src)
model.save(lsmname_out)
def CombineSourcesInCluster(lsmname_in, lsmname_out):
model = Tigger.load(lsmname_in)
for src in model.sources:
if src.cluster_size>1 and rad2arcsec(src.r)>30:
cluster_sources = [src1 for src1 in model.sources if src1.cluster is src.cluster]
flux_sources = [src1.flux.I for src1 in cluster_sources]
max_flux_index = flux_sources.index(max( flux_sources))
cluster_sources[max_flux_index].flux.I = sum([src1.flux.I for src1 in cluster_sources])
for src2 in cluster_sources:
if src2 is not cluster_sources[max_flux_index]:
model.sources.remove(src2)
cluster_sources[max_flux_index].cluster_size=1
model.save(lsmname_out)
def rad2arcsec(x):
return x*3600.0*180.0/np.pi
def addSPI(fitsname_alpha=None, fitsname_alpha_error=None, lsmname=None,
outfile=None,freq0=None, beam=None, spitol=(-10,10)):
"""
Add spectral index to a tigger lsm from a spectral index map (fits format)
takes in a spectral index map, input lsm and output lsm name.
"""
# import pylab as plt
if beam is None:
raise RuntimeError("the beam option must be specified")
print "INFO: Getting fits info from: %s, %s" %(fitsname_alpha, fitsname_alpha_error)
fits_alpha = fitsInfo(fitsname_alpha) # Get fits info
image_alpha = fits_alpha['image'] # get image data
if fitsname_alpha_error:
fits_alpha_error = fitsInfo(fitsname_alpha_error)
image_alpha_error = fits_alpha_error['image']
else:
fits_alpha_error = fitsInfo(fitsname_alpha)
image_alpha_error = fits_alpha_error['image']
image_alpha_error[...] = 1.0
# may supply FITS file for freq0, in which case just pull ref frequency from FITS file,
# else explicit frequency, else get frequency from alpha image
if type(freq0) is str:
freq0 = fitsInfo(freq0)['freq0']
else:
freq0 = freq0 or fits_alpha['freq0']
model = Tigger.load(lsmname) # load output sky model
rad = lambda a: a*(180/np.pi) # convert radians to degrees
for src in model.sources:
ra = rad(src.pos.ra)
dec = rad(src.pos.dec)
# Cater for point sources and assume source extent equal to the
# Gaussian major axis along both ra and dec axis
dra = rad(src.shape.ex) if src.shape else beam[0]
ddec = rad(src.shape.ey) if src.shape else beam[1]
pa = rad(src.shape.pa) if src.shape else beam[2]
emin, emaj = sorted([dra, ddec])
# Determine region of interest
rgn = sky2px(fits_alpha["wcs"],ra,dec,dra,ddec,fits_alpha["dra"], beam[1])
imslice = slice(rgn[2], rgn[3]), slice(rgn[0], rgn[3])
alpha = image_alpha[imslice]
xpix, ypix = alpha.shape
xx, yy = np.ogrid[-xpix:xpix, -ypix:ypix]
emajPix = emaj/fits_alpha["dra"]
eminPix = emin/fits_alpha["dra"]
# Create elliptcal mask which has same shape as source
mask = ((xx/emajPix)**2 + (yy/eminPix)**2 <= 1)[xpix*2-xpix:xpix*2+xpix, ypix*2-ypix:ypix*2+ypix]
mask = ndimage.rotate(mask, angle=pa, order=0, reshape=False)
draPix = dra/fits_alpha["dra"]
ddPix = ddec/fits_alpha["ddec"]
alpha *= mask
alpha_error = image_alpha_error[imslice]*mask
good = np.where( np.logical_and(alpha!=0, alpha!=np.nan))
alpha = alpha[good]
alpha_error = alpha_error[good]
good = np.where( np.logical_and(alpha_error!=np.nan, alpha_error!=np.inf))
alpha = alpha[good]
alpha_error = alpha_error[good]
subIm_weight = 1/alpha_error
subIm_weighted = alpha*subIm_weight
if len(subIm_weighted)>0:
subIm_normalization = np.sum(subIm_weight)
spi = float(np.sum(subIm_weighted)/subIm_normalization)
spi_error = 1/float(subIm_normalization)
if spi > spitol[0] or spi < spitol[-1]:
sys.stdout.write("INFO: Adding spi: %.3f (at %.3g MHz) to source %s" % (
spi, freq0/1e6, src.name))
src.spectrum = Tigger.Models.ModelClasses.SpectralIndex(spi, freq0)
src.setAttribute('spi_error', spi_error)
else:
sys.stdout.write("ALERT: no spi info found in %s for source %s" % (
fitsname_alpha, src.name))
model.save(outfile)
if __name__=="__main__":
fitsname_alpha = sys.argv[1]
#fitsname_alpha_error = sys.argv[2]
lsmname = sys.argv[2]
outfile = sys.argv[3]
addSPI(fitsname_alpha, None, lsmname, outfile)
|
SpheMakh/Stimela | stimela/cargo/cab/specfit/src/addSPI.py | addSPI | python | def addSPI(fitsname_alpha=None, fitsname_alpha_error=None, lsmname=None,
outfile=None,freq0=None, beam=None, spitol=(-10,10)):
# import pylab as plt
if beam is None:
raise RuntimeError("the beam option must be specified")
print "INFO: Getting fits info from: %s, %s" %(fitsname_alpha, fitsname_alpha_error)
fits_alpha = fitsInfo(fitsname_alpha) # Get fits info
image_alpha = fits_alpha['image'] # get image data
if fitsname_alpha_error:
fits_alpha_error = fitsInfo(fitsname_alpha_error)
image_alpha_error = fits_alpha_error['image']
else:
fits_alpha_error = fitsInfo(fitsname_alpha)
image_alpha_error = fits_alpha_error['image']
image_alpha_error[...] = 1.0
# may supply FITS file for freq0, in which case just pull ref frequency from FITS file,
# else explicit frequency, else get frequency from alpha image
if type(freq0) is str:
freq0 = fitsInfo(freq0)['freq0']
else:
freq0 = freq0 or fits_alpha['freq0']
model = Tigger.load(lsmname) # load output sky model
rad = lambda a: a*(180/np.pi) # convert radians to degrees
for src in model.sources:
ra = rad(src.pos.ra)
dec = rad(src.pos.dec)
# Cater for point sources and assume source extent equal to the
# Gaussian major axis along both ra and dec axis
dra = rad(src.shape.ex) if src.shape else beam[0]
ddec = rad(src.shape.ey) if src.shape else beam[1]
pa = rad(src.shape.pa) if src.shape else beam[2]
emin, emaj = sorted([dra, ddec])
# Determine region of interest
rgn = sky2px(fits_alpha["wcs"],ra,dec,dra,ddec,fits_alpha["dra"], beam[1])
imslice = slice(rgn[2], rgn[3]), slice(rgn[0], rgn[3])
alpha = image_alpha[imslice]
xpix, ypix = alpha.shape
xx, yy = np.ogrid[-xpix:xpix, -ypix:ypix]
emajPix = emaj/fits_alpha["dra"]
eminPix = emin/fits_alpha["dra"]
# Create elliptcal mask which has same shape as source
mask = ((xx/emajPix)**2 + (yy/eminPix)**2 <= 1)[xpix*2-xpix:xpix*2+xpix, ypix*2-ypix:ypix*2+ypix]
mask = ndimage.rotate(mask, angle=pa, order=0, reshape=False)
draPix = dra/fits_alpha["dra"]
ddPix = ddec/fits_alpha["ddec"]
alpha *= mask
alpha_error = image_alpha_error[imslice]*mask
good = np.where( np.logical_and(alpha!=0, alpha!=np.nan))
alpha = alpha[good]
alpha_error = alpha_error[good]
good = np.where( np.logical_and(alpha_error!=np.nan, alpha_error!=np.inf))
alpha = alpha[good]
alpha_error = alpha_error[good]
subIm_weight = 1/alpha_error
subIm_weighted = alpha*subIm_weight
if len(subIm_weighted)>0:
subIm_normalization = np.sum(subIm_weight)
spi = float(np.sum(subIm_weighted)/subIm_normalization)
spi_error = 1/float(subIm_normalization)
if spi > spitol[0] or spi < spitol[-1]:
sys.stdout.write("INFO: Adding spi: %.3f (at %.3g MHz) to source %s" % (
spi, freq0/1e6, src.name))
src.spectrum = Tigger.Models.ModelClasses.SpectralIndex(spi, freq0)
src.setAttribute('spi_error', spi_error)
else:
sys.stdout.write("ALERT: no spi info found in %s for source %s" % (
fitsname_alpha, src.name))
model.save(outfile) | Add spectral index to a tigger lsm from a spectral index map (fits format)
takes in a spectral index map, input lsm and output lsm name. | train | https://github.com/SpheMakh/Stimela/blob/292e80461a0c3498da8e7e987e2891d3ae5981ad/stimela/cargo/cab/specfit/src/addSPI.py#L75-L161 | [
"def fitsInfo(fitsname = None):\n \"\"\"\n Get fits info\n \"\"\"\n hdu = pyfits.open(fitsname)\n hdr = hdu[0].header\n ra = hdr['CRVAL1']\n dra = abs(hdr['CDELT1'])\n raPix = hdr['CRPIX1']\n dec = hdr['CRVAL2']\n ddec = abs(hdr['CDELT2'])\n decPix = hdr['CRPIX2']\n freq0 = 0\n for i in range(1,hdr['NAXIS']+1):\n if hdr['CTYPE%d'%i].strip() == 'FREQ':\n freq0 = hdr['CRVAL%d'%i]\n break\n\n ndim = hdr[\"NAXIS\"]\n imslice = np.zeros(ndim, dtype=int).tolist()\n imslice[-2:] = slice(None), slice(None)\n image = hdu[0].data[imslice]\n wcs = WCS(hdr,mode='pyfits')\n\n return {'image':image,'wcs':wcs,'ra':ra,'dec':dec,'dra':dra,'ddec':ddec,'raPix':raPix,'decPix':decPix,'freq0':freq0}\n",
"def sky2px(wcs,ra,dec,dra,ddec,cell, beam):\n \"\"\"convert a sky region to pixel positions\"\"\"\n dra = beam if dra<beam else dra # assume every source is at least as large as the psf\n ddec = beam if ddec<beam else ddec\n offsetDec = int((ddec/2.)/cell)\n offsetRA = int((dra/2.)/cell)\n if offsetDec%2==1:\n offsetDec += 1\n if offsetRA%2==1:\n offsetRA += 1\n\n raPix,decPix = map(int, wcs.wcs2pix(ra,dec))\n return np.array([raPix-offsetRA,raPix+offsetRA,decPix-offsetDec,decPix+offsetDec])\n",
"rad = lambda a: a*(180/np.pi) # convert radians to degrees\n"
] | #!/usr/bin/env python
import pyfits
import numpy as np
import Tigger
import sys,os
from astLib.astWCS import WCS
from scipy import ndimage
def fitsInfo(fitsname = None):
"""
Get fits info
"""
hdu = pyfits.open(fitsname)
hdr = hdu[0].header
ra = hdr['CRVAL1']
dra = abs(hdr['CDELT1'])
raPix = hdr['CRPIX1']
dec = hdr['CRVAL2']
ddec = abs(hdr['CDELT2'])
decPix = hdr['CRPIX2']
freq0 = 0
for i in range(1,hdr['NAXIS']+1):
if hdr['CTYPE%d'%i].strip() == 'FREQ':
freq0 = hdr['CRVAL%d'%i]
break
ndim = hdr["NAXIS"]
imslice = np.zeros(ndim, dtype=int).tolist()
imslice[-2:] = slice(None), slice(None)
image = hdu[0].data[imslice]
wcs = WCS(hdr,mode='pyfits')
return {'image':image,'wcs':wcs,'ra':ra,'dec':dec,'dra':dra,'ddec':ddec,'raPix':raPix,'decPix':decPix,'freq0':freq0}
def sky2px(wcs,ra,dec,dra,ddec,cell, beam):
"""convert a sky region to pixel positions"""
dra = beam if dra<beam else dra # assume every source is at least as large as the psf
ddec = beam if ddec<beam else ddec
offsetDec = int((ddec/2.)/cell)
offsetRA = int((dra/2.)/cell)
if offsetDec%2==1:
offsetDec += 1
if offsetRA%2==1:
offsetRA += 1
raPix,decPix = map(int, wcs.wcs2pix(ra,dec))
return np.array([raPix-offsetRA,raPix+offsetRA,decPix-offsetDec,decPix+offsetDec])
def RemoveSourcesWithoutSPI(lsmname_in, lsmname_out):
model = Tigger.load(lsmname_in)
sources = [src for src in model.sources]
for src in sources:
if not src.spectrum:
model.sources.remove(src)
model.save(lsmname_out)
def CombineSourcesInCluster(lsmname_in, lsmname_out):
model = Tigger.load(lsmname_in)
for src in model.sources:
if src.cluster_size>1 and rad2arcsec(src.r)>30:
cluster_sources = [src1 for src1 in model.sources if src1.cluster is src.cluster]
flux_sources = [src1.flux.I for src1 in cluster_sources]
max_flux_index = flux_sources.index(max( flux_sources))
cluster_sources[max_flux_index].flux.I = sum([src1.flux.I for src1 in cluster_sources])
for src2 in cluster_sources:
if src2 is not cluster_sources[max_flux_index]:
model.sources.remove(src2)
cluster_sources[max_flux_index].cluster_size=1
model.save(lsmname_out)
def rad2arcsec(x):
return x*3600.0*180.0/np.pi
def addSPI(fitsname_alpha=None, fitsname_alpha_error=None, lsmname=None,
outfile=None,freq0=None, beam=None, spitol=(-10,10)):
"""
Add spectral index to a tigger lsm from a spectral index map (fits format)
takes in a spectral index map, input lsm and output lsm name.
"""
# import pylab as plt
if beam is None:
raise RuntimeError("the beam option must be specified")
print "INFO: Getting fits info from: %s, %s" %(fitsname_alpha, fitsname_alpha_error)
fits_alpha = fitsInfo(fitsname_alpha) # Get fits info
image_alpha = fits_alpha['image'] # get image data
if fitsname_alpha_error:
fits_alpha_error = fitsInfo(fitsname_alpha_error)
image_alpha_error = fits_alpha_error['image']
else:
fits_alpha_error = fitsInfo(fitsname_alpha)
image_alpha_error = fits_alpha_error['image']
image_alpha_error[...] = 1.0
# may supply FITS file for freq0, in which case just pull ref frequency from FITS file,
# else explicit frequency, else get frequency from alpha image
if type(freq0) is str:
freq0 = fitsInfo(freq0)['freq0']
else:
freq0 = freq0 or fits_alpha['freq0']
model = Tigger.load(lsmname) # load output sky model
rad = lambda a: a*(180/np.pi) # convert radians to degrees
for src in model.sources:
ra = rad(src.pos.ra)
dec = rad(src.pos.dec)
# Cater for point sources and assume source extent equal to the
# Gaussian major axis along both ra and dec axis
dra = rad(src.shape.ex) if src.shape else beam[0]
ddec = rad(src.shape.ey) if src.shape else beam[1]
pa = rad(src.shape.pa) if src.shape else beam[2]
emin, emaj = sorted([dra, ddec])
# Determine region of interest
rgn = sky2px(fits_alpha["wcs"],ra,dec,dra,ddec,fits_alpha["dra"], beam[1])
imslice = slice(rgn[2], rgn[3]), slice(rgn[0], rgn[3])
alpha = image_alpha[imslice]
xpix, ypix = alpha.shape
xx, yy = np.ogrid[-xpix:xpix, -ypix:ypix]
emajPix = emaj/fits_alpha["dra"]
eminPix = emin/fits_alpha["dra"]
# Create elliptcal mask which has same shape as source
mask = ((xx/emajPix)**2 + (yy/eminPix)**2 <= 1)[xpix*2-xpix:xpix*2+xpix, ypix*2-ypix:ypix*2+ypix]
mask = ndimage.rotate(mask, angle=pa, order=0, reshape=False)
draPix = dra/fits_alpha["dra"]
ddPix = ddec/fits_alpha["ddec"]
alpha *= mask
alpha_error = image_alpha_error[imslice]*mask
good = np.where( np.logical_and(alpha!=0, alpha!=np.nan))
alpha = alpha[good]
alpha_error = alpha_error[good]
good = np.where( np.logical_and(alpha_error!=np.nan, alpha_error!=np.inf))
alpha = alpha[good]
alpha_error = alpha_error[good]
subIm_weight = 1/alpha_error
subIm_weighted = alpha*subIm_weight
if len(subIm_weighted)>0:
subIm_normalization = np.sum(subIm_weight)
spi = float(np.sum(subIm_weighted)/subIm_normalization)
spi_error = 1/float(subIm_normalization)
if spi > spitol[0] or spi < spitol[-1]:
sys.stdout.write("INFO: Adding spi: %.3f (at %.3g MHz) to source %s" % (
spi, freq0/1e6, src.name))
src.spectrum = Tigger.Models.ModelClasses.SpectralIndex(spi, freq0)
src.setAttribute('spi_error', spi_error)
else:
sys.stdout.write("ALERT: no spi info found in %s for source %s" % (
fitsname_alpha, src.name))
model.save(outfile)
if __name__=="__main__":
fitsname_alpha = sys.argv[1]
#fitsname_alpha_error = sys.argv[2]
lsmname = sys.argv[2]
outfile = sys.argv[3]
addSPI(fitsname_alpha, None, lsmname, outfile)
|
frc1418/tbapy | tbapy/main.py | TBA._get | python | def _get(self, url):
return self.session.get(self.READ_URL_PRE + url).json() | Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L33-L40 | null | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA._post | python | def _post(self, url, data):
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()}) | Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L42-L51 | null | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.teams | python | def teams(self, page=None, year=None, simple=False, keys=False):
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams | Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L75-L108 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n",
"def teams(self, page=None, year=None, simple=False, keys=False):\n \"\"\"\n Get list of teams.\n\n :param page: Page of teams to view. Each page contains 500 teams.\n :param year: View teams from a specific year.\n :param simple: Get only vital data.\n :param keys: Set to true if you only want the teams' keys rather than full data on them.\n :return: List of Team objects or string keys.\n \"\"\"\n # If the user has requested a specific page, get that page.\n if page is not None:\n if year:\n if keys:\n return self._get('teams/%s/%s/keys' % (year, page))\n else:\n return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]\n else:\n if keys:\n return self._get('teams/%s/keys' % page)\n else:\n return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]\n # If no page was specified, get all of them and combine.\n else:\n teams = []\n target = 0\n while True:\n page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)\n if page_teams:\n teams.extend(page_teams)\n else:\n break\n target += 1\n return teams\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.team | python | def team(self, team, simple=False):
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else ''))) | Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L110-L118 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n",
"def team_key(identifier):\n \"\"\"\n Take raw team number or string key and return string key.\n\n Used by all team-related methods to support either an integer team number or team key being passed.\n\n (We recommend passing an integer, just because it's cleaner. But whatever works.)\n\n :param identifier: int team number or str 'frc####'\n :return: string team key in format 'frc####'\n \"\"\"\n return identifier if type(identifier) == str else 'frc%s' % identifier\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.team_events | python | def team_events(self, team, year=None, simple=False, keys=False):
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))] | Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L120-L139 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n",
"def team_key(identifier):\n \"\"\"\n Take raw team number or string key and return string key.\n\n Used by all team-related methods to support either an integer team number or team key being passed.\n\n (We recommend passing an integer, just because it's cleaner. But whatever works.)\n\n :param identifier: int team number or str 'frc####'\n :return: string team key in format 'frc####'\n \"\"\"\n return identifier if type(identifier) == str else 'frc%s' % identifier\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.team_awards | python | def team_awards(self, team, year=None, event=None):
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))] | Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L141-L156 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n",
"def team_key(identifier):\n \"\"\"\n Take raw team number or string key and return string key.\n\n Used by all team-related methods to support either an integer team number or team key being passed.\n\n (We recommend passing an integer, just because it's cleaner. But whatever works.)\n\n :param identifier: int team number or str 'frc####'\n :return: string team key in format 'frc####'\n \"\"\"\n return identifier if type(identifier) == str else 'frc%s' % identifier\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.team_matches | python | def team_matches(self, team, event=None, year=None, simple=False, keys=False):
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))] | Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L158-L178 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n",
"def team_key(identifier):\n \"\"\"\n Take raw team number or string key and return string key.\n\n Used by all team-related methods to support either an integer team number or team key being passed.\n\n (We recommend passing an integer, just because it's cleaner. But whatever works.)\n\n :param identifier: int team number or str 'frc####'\n :return: string team key in format 'frc####'\n \"\"\"\n return identifier if type(identifier) == str else 'frc%s' % identifier\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.team_media | python | def team_media(self, team, year=None, tag=None):
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))] | Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L189-L198 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n",
"def team_key(identifier):\n \"\"\"\n Take raw team number or string key and return string key.\n\n Used by all team-related methods to support either an integer team number or team key being passed.\n\n (We recommend passing an integer, just because it's cleaner. But whatever works.)\n\n :param identifier: int team number or str 'frc####'\n :return: string team key in format 'frc####'\n \"\"\"\n return identifier if type(identifier) == str else 'frc%s' % identifier\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.team_robots | python | def team_robots(self, team):
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))] | Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L200-L207 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n",
"def team_key(identifier):\n \"\"\"\n Take raw team number or string key and return string key.\n\n Used by all team-related methods to support either an integer team number or team key being passed.\n\n (We recommend passing an integer, just because it's cleaner. But whatever works.)\n\n :param identifier: int team number or str 'frc####'\n :return: string team key in format 'frc####'\n \"\"\"\n return identifier if type(identifier) == str else 'frc%s' % identifier\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.team_districts | python | def team_districts(self, team):
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))] | Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L209-L216 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n",
"def team_key(identifier):\n \"\"\"\n Take raw team number or string key and return string key.\n\n Used by all team-related methods to support either an integer team number or team key being passed.\n\n (We recommend passing an integer, just because it's cleaner. But whatever works.)\n\n :param identifier: int team number or str 'frc####'\n :return: string team key in format 'frc####'\n \"\"\"\n return identifier if type(identifier) == str else 'frc%s' % identifier\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.team_profiles | python | def team_profiles(self, team):
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))] | Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L218-L225 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n",
"def team_key(identifier):\n \"\"\"\n Take raw team number or string key and return string key.\n\n Used by all team-related methods to support either an integer team number or team key being passed.\n\n (We recommend passing an integer, just because it's cleaner. But whatever works.)\n\n :param identifier: int team number or str 'frc####'\n :return: string team key in format 'frc####'\n \"\"\"\n return identifier if type(identifier) == str else 'frc%s' % identifier\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.team_status | python | def team_status(self, team, event):
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event))) | Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L227-L235 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n",
"def team_key(identifier):\n \"\"\"\n Take raw team number or string key and return string key.\n\n Used by all team-related methods to support either an integer team number or team key being passed.\n\n (We recommend passing an integer, just because it's cleaner. But whatever works.)\n\n :param identifier: int team number or str 'frc####'\n :return: string team key in format 'frc####'\n \"\"\"\n return identifier if type(identifier) == str else 'frc%s' % identifier\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.events | python | def events(self, year, simple=False, keys=False):
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))] | Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L237-L249 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.event | python | def event(self, event, simple=False):
return Event(self._get('event/%s%s' % (event, '/simple' if simple else ''))) | Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L251-L261 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.event_teams | python | def event_teams(self, event, simple=False, keys=False):
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))] | Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L317-L329 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.event_matches | python | def event_matches(self, event, simple=False, keys=False):
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))] | Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L340-L352 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.match | python | def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else ''))) | Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L354-L377 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.district_events | python | def district_events(self, district, simple=False, keys=False):
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))] | Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L388-L400 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.district_teams | python | def district_teams(self, district, simple=False, keys=False):
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)] | Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L411-L423 | [
"def _get(self, url):\n \"\"\"\n Helper method: GET data from given URL on TBA's API.\n\n :param url: URL string to get data from.\n :return: Requested data in JSON format.\n \"\"\"\n return self.session.get(self.READ_URL_PRE + url).json()\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.update_trusted | python | def update_trusted(self, auth_id, auth_secret, event_key):
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key | Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L425-L435 | null | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def delete_event_matches(self, data=None):
"""
Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches.
"""
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
frc1418/tbapy | tbapy/main.py | TBA.delete_event_matches | python | def delete_event_matches(self, data=None):
return self._post('event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete', json.dumps(self.event_key) if data is None else json.dumps(data)) | Delete an event's matches on The Blue Alliance.
:param data: List of match keys to delete, can be ommited if you would like to delete all matches. | train | https://github.com/frc1418/tbapy/blob/3866d5a9971fe3dfaf1a1d83638bd6be6070f0c4/tbapy/main.py#L469-L475 | [
"def _post(self, url, data):\n \"\"\"\n Helper method: POST data to a given URL on TBA's API.\n\n :param url: URL string to post data to and hash.\n :pararm data: JSON data to post and hash.\n :return: Requests Response object.\n\n \"\"\"\n return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})\n"
] | class TBA:
"""
Main library class.
Contains methods for interacting with The Blue Alliance.
"""
READ_URL_PRE = 'https://www.thebluealliance.com/api/v3/'
WRITE_URL_PRE = 'https://www.thebluealliance.com/api/trusted/v1/'
session = requests.Session()
auth_secret = ''
event_key = ''
def __init__(self, auth_key, auth_id='', auth_secret='', event_key=''):
"""
Store auth key so we can reuse it as many times as we make a request.
:param auth_key: Your application authorization key, obtainable at https://www.thebluealliance.com/account.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.auth_secret = auth_secret
self.event_key = event_key
self.session.headers.update({'X-TBA-Auth-Key': auth_key, 'X-TBA-Auth-Id': auth_id})
def _get(self, url):
"""
Helper method: GET data from given URL on TBA's API.
:param url: URL string to get data from.
:return: Requested data in JSON format.
"""
return self.session.get(self.READ_URL_PRE + url).json()
def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()})
@staticmethod
def team_key(identifier):
"""
Take raw team number or string key and return string key.
Used by all team-related methods to support either an integer team number or team key being passed.
(We recommend passing an integer, just because it's cleaner. But whatever works.)
:param identifier: int team number or str 'frc####'
:return: string team key in format 'frc####'
"""
return identifier if type(identifier) == str else 'frc%s' % identifier
def status(self):
"""
Get TBA API status information.
:return: Data on current status of the TBA API as APIStatus object.
"""
return APIStatus(self._get('status'))
def teams(self, page=None, year=None, simple=False, keys=False):
"""
Get list of teams.
:param page: Page of teams to view. Each page contains 500 teams.
:param year: View teams from a specific year.
:param simple: Get only vital data.
:param keys: Set to true if you only want the teams' keys rather than full data on them.
:return: List of Team objects or string keys.
"""
# If the user has requested a specific page, get that page.
if page is not None:
if year:
if keys:
return self._get('teams/%s/%s/keys' % (year, page))
else:
return [Team(raw) for raw in self._get('teams/%s/%s%s' % (year, page, '/simple' if simple else ''))]
else:
if keys:
return self._get('teams/%s/keys' % page)
else:
return [Team(raw) for raw in self._get('teams/%s%s' % (page, '/simple' if simple else ''))]
# If no page was specified, get all of them and combine.
else:
teams = []
target = 0
while True:
page_teams = self.teams(page=target, year=year, simple=simple, keys=keys)
if page_teams:
teams.extend(page_teams)
else:
break
target += 1
return teams
def team(self, team, simple=False):
"""
Get data on a single specified team.
:param team: Team to get data for.
:param simple: Get only vital data.
:return: Team object with data on specified team.
"""
return Team(self._get('team/%s%s' % (self.team_key(team), '/simple' if simple else '')))
def team_events(self, team, year=None, simple=False, keys=False):
"""
Get team events a team has participated in.
:param team: Team to get events for.
:param year: Year to get events from.
:param simple: Get only vital data.
:param keys: Get just the keys of the events. Set to True if you only need the keys of each event and not their full data.
:return: List of strings or Teams
"""
if year:
if keys:
return self._get('team/%s/events/%s/keys' % (self.team_key(team), year))
else:
return [Event(raw) for raw in self._get('team/%s/events/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
else:
if keys:
return self._get('team/%s/events/keys' % self.team_key(team))
else:
return [Event(raw) for raw in self._get('team/%s/events%s' % (self.team_key(team), '/simple' if simple else ''))]
def team_awards(self, team, year=None, event=None):
"""
Get list of awards team has recieved.
:param team: Team to get awards of.
:param year: Year to get awards from.
:param event: Event to get awards from.
:return: List of Award objects
"""
if event:
return [Award(raw) for raw in self._get('team/%s/event/%s/awards' % (self.team_key(team), event))]
else:
if year:
return [Award(raw) for raw in self._get('team/%s/awards/%s' % (self.team_key(team), year))]
else:
return [Award(raw) for raw in self._get('team/%s/awards' % self.team_key(team))]
def team_matches(self, team, event=None, year=None, simple=False, keys=False):
"""
Get list of matches team has participated in.
:param team: Team to get matches of.
:param year: Year to get matches from.
:param event: Event to get matches from.
:param simple: Get only vital data.
:param keys: Only get match keys rather than their full data.
:return: List of string keys or Match objects.
"""
if event:
if keys:
return self._get('team/%s/event/%s/matches/keys' % (self.team_key(team), event))
else:
return [Match(raw) for raw in self._get('team/%s/event/%s/matches%s' % (self.team_key(team), event, '/simple' if simple else ''))]
elif year:
if keys:
return self._get('team/%s/matches/%s/keys' % (self.team_key(team), year))
else:
return [Match(raw) for raw in self._get('team/%s/matches/%s%s' % (self.team_key(team), year, '/simple' if simple else ''))]
def team_years(self, team):
"""
Get years during which a team participated in FRC.
:param team: Key for team to get data about.
:return: List of integer years in which team participated.
"""
return self._get('team/%s/years_participated' % self.team_key(team))
def team_media(self, team, year=None, tag=None):
"""
Get media for a given team.
:param team: Team to get media of.
:param year: Year to get media from.
:param tag: Get only media with a given tag.
:return: List of Media objects.
"""
return [Media(raw) for raw in self._get('team/%s/media%s%s' % (self.team_key(team), ('/tag/%s' % tag) if tag else '', ('/%s' % year) if year else ''))]
def team_robots(self, team):
"""
Get data about a team's robots.
:param team: Key for team whose robots you want data on.
:return: List of Robot objects
"""
return [Robot(raw) for raw in self._get('team/%s/robots' % self.team_key(team))]
def team_districts(self, team):
"""
Get districts a team has competed in.
:param team: Team to get data on.
:return: List of District objects.
"""
return [District(raw) for raw in self._get('team/%s/districts' % self.team_key(team))]
def team_profiles(self, team):
"""
Get team's social media profiles linked on their TBA page.
:param team: Team to get data on.
:return: List of Profile objects.
"""
return [Profile(raw) for raw in self._get('team/%s/social_media' % self.team_key(team))]
def team_status(self, team, event):
"""
Get status of a team at an event.
:param team: Team whose status to get.
:param event: Event team is at.
:return: Status object.
"""
return Status(self._get('team/%s/event/%s/status' % (self.team_key(team), event)))
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))]
def event(self, event, simple=False):
"""
Get basic information about an event.
More specific data (typically obtained with the detail_type URL parameter) can be obtained with event_alliances(), event_district_points(), event_insights(), event_oprs(), event_predictions(), and event_rankings().
:param event: Key of event for which you desire data.
:param simple: Get only vital data.
:return: A single Event object.
"""
return Event(self._get('event/%s%s' % (event, '/simple' if simple else '')))
def event_alliances(self, event):
"""
Get information about alliances at event.
:param event: Key of event to get data on.
:return: List of Alliance objects.
"""
return [Alliance(raw) for raw in self._get('event/%s/alliances' % event)]
def event_district_points(self, event):
"""
Get district point information about an event.
:param event: Key of event to get data on.
:return: Single DistrictPoints object.
"""
return DistrictPoints(self._get('event/%s/district_points' % event))
def event_insights(self, event):
"""
Get insights about an event.
:param event: Key of event to get data on.
:return: Single Insights object.
"""
return Insights(self._get('event/%s/insights' % event))
def event_oprs(self, event):
"""
Get OPRs from an event.
:param event: Key of event to get data on.
:return: Single OPRs object.
"""
return OPRs(self._get('event/%s/oprs' % event))
def event_predictions(self, event):
"""
Get predictions for matches during an event.
:param event: Key of event to get data on.
:return: Single Predictions object.
"""
return Predictions(self._get('event/%s/predictions' % event))
def event_rankings(self, event):
"""
Get rankings from an event.
:param event: Key of event to get data on.
:return: Single Rankings object.
"""
return Rankings(self._get('event/%s/rankings' % event))
def event_teams(self, event, simple=False, keys=False):
"""
Get list of teams at an event.
:param event: Event key to get data on.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('event/%s/teams/keys' % event)
else:
return [Team(raw) for raw in self._get('event/%s/teams%s' % (event, '/simple' if simple else ''))]
def event_awards(self, event):
"""
Get list of awards presented at an event.
:param event: Event key to get data on.
:return: List of Award objects.
"""
return [Award(raw) for raw in self._get('event/%s/awards' % event)]
def event_matches(self, event, simple=False, keys=False):
"""
Get list of matches played at an event.
:param event: Event key to get data on.
:param keys: Return list of match keys only rather than full data on every match.
:param simple: Get only vital data.
:return: List of string keys or Match objects.
"""
if keys:
return self._get('event/%s/matches/keys' % event)
else:
return [Match(raw) for raw in self._get('event/%s/matches%s' % (event, '/simple' if simple else ''))]
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else '')))
def districts(self, year):
"""
Return a list of districts active.
:param year: Year from which you want to get active districts.
:return: A list of District objects.
"""
return [District(raw) for raw in self._get('districts/%s' % year)]
def district_events(self, district, simple=False, keys=False):
"""
Return list of events in a given district.
:param district: Key of district whose events you want.
:param simple: Get only vital data.
:param keys: Return list of event keys only rather than full data on every event.
:return: List of string keys or Event objects.
"""
if keys:
return self._get('district/%s/events/keys' % district)
else:
return [Event(raw) for raw in self._get('district/%s/events%s' % (district, '/simple' if simple else ''))]
def district_rankings(self, district):
"""
Return data about rankings in a given district.
:param district: Key of district to get rankings of.
:return: List of DistrictRanking objects.
"""
return [DistrictRanking(raw) for raw in self._get('district/%s/rankings' % district)]
def district_teams(self, district, simple=False, keys=False):
"""
Get list of teams in the given district.
:param district: Key for the district to get teams in.
:param simple: Get only vital data.
:param keys: Return list of team keys only rather than full data on every team.
:return: List of string keys or Team objects.
"""
if keys:
return self._get('district/%s/teams/keys' % district)
else:
return [Team(raw) for raw in self._get('district/%s/teams' % district)]
def update_trusted(self, auth_id, auth_secret, event_key):
"""
Set Trusted API ID and Secret and the event key they are assigned to.
:param auth_id: Your event authorization ID, obtainable at https://www.thebluealliance.com/request/apiwrite
:param auth_secret: Your event authorization secret, obtainable at https://www.thebluealliance.com/request/apiwrite
:param event_key: The event key that is linked to the ID and secret provided.
"""
self.session.headers.update({'X-TBA-Auth-Id': auth_id})
self.auth_secret = auth_secret
self.event_key = event_key
def update_event_info(self, data):
"""
Update an event's info on The Blue Alliance.
:param data: Dictionary of data to update the event with.
"""
return self._post('event/%s/info/update', json.dumps(data))
def update_event_alliances(self, data):
"""
Update an event's alliances on The Blue Alliance.
:param data: List of lists of alliances in frc#### string format.
"""
return self._post('event/%s/alliance_selections/update', json.dumps(data))
def update_event_awards(self, data):
"""
Update an event's awards on The Blue Alliance.
:param data: List of Dictionaries of award winners. Each dictionary should have a name_str for the award name, team_key in frc#### string format, and the awardee for any awards given to individuals. The last two can be null
"""
return self._post('event/%s/awards/update', json.dumps(data))
def update_event_matches(self, data):
"""
Update an event's matches on The Blue Alliance.
:param data: List of Dictionaries. More info about the match data can be found in the API docs.
"""
return self._post('event/%s/matches/update', json.dumps(data))
def update_event_rankings(self, data):
"""
Update an event's rankings on The Blue Alliance.
:param data: Dictionary of breakdowns and rankings. Rankings are a list of dictionaries.
"""
return self._post('event/%s/rankings/update', json.dumps(data))
def update_event_team_list(self, data):
"""
Update an event's team list on The Blue Alliance.
:param data: a list of team keys in frc#### string format.
"""
return self._post('event/%s/team_list/update', json.dumps(data))
def add_match_videos(self, data):
"""
Add match videos to the respective match pages of an event on The Blue Alliance.
:param data: Dictionary of partial match keys to youtube video ids.
"""
return self._post('event/%s/match_videos/add', json.dumps(data))
def add_event_videos(self, data):
"""
Add videos to an event's media tab on The Blue Alliance.
:param data: List of youtube video ids.
"""
return self._post('event/%s/media/add', json.dumps(data))
|
wadda/gps3 | gps3/gps3threaded.py | GPS3mechanism.stream_data | python | def stream_data(self, host=HOST, port=GPSD_PORT, enable=True, gpsd_protocol=PROTOCOL, devicepath=None):
self.socket.connect(host, port)
self.socket.watch(enable, gpsd_protocol, devicepath) | Connect and command, point and shoot, flail and bail | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/gps3/gps3threaded.py#L32-L36 | [
"def connect(self, host=HOST, port=GPSD_PORT):\n \"\"\"Connect to a host on a given port.\n Arguments:\n host: default host='127.0.0.1'\n port: default port=2947\n \"\"\"\n for alotta_stuff in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):\n family, socktype, proto, _canonname, host_port = alotta_stuff\n try:\n self.streamSock = socket.socket(family, socktype, proto)\n self.streamSock.connect(host_port)\n self.streamSock.setblocking(False)\n except (OSError, IOError) as error:\n sys.stderr.write('\\r\\nGPSDSocket.connect exception is--> {}'.format(error))\n sys.stderr.write('\\r\\nGPS3 gpsd connection at \\'{0}\\' on port \\'{1}\\' failed\\r\\n'.format(host, port))\n",
"def watch(self, enable=True, gpsd_protocol=PROTOCOL, devicepath=None):\n \"\"\"watch gpsd in various gpsd_protocols or devices.\n Arguments:\n enable: (bool) stream data to socket\n gpsd_protocol: (str) 'json' | 'nmea' | 'rare' | 'raw' | 'scaled' | 'split24' | 'pps'\n devicepath: (str) device path - '/dev/ttyUSBn' for some number n or '/dev/whatever_works'\n Returns:\n command: (str) e.g., '?WATCH={\"enable\":true,\"json\":true};'\n \"\"\"\n # N.B.: 'timing' requires special attention, as it is undocumented and lives with dragons.\n command = '?WATCH={{\"enable\":true,\"{0}\":true}}'.format(gpsd_protocol)\n\n if gpsd_protocol == 'rare': # 1 for a channel, gpsd reports the unprocessed NMEA or AIVDM data stream\n command = command.replace('\"rare\":true', '\"raw\":1')\n if gpsd_protocol == 'raw': # 2 channel that processes binary data, received data verbatim without hex-dumping.\n command = command.replace('\"raw\":true', '\"raw\",2')\n if not enable:\n command = command.replace('true', 'false') # sets -all- command values false .\n if devicepath:\n command = command.replace('}', ',\"device\":\"') + devicepath + '\"}'\n\n return self.send(command)\n"
] | class GPS3mechanism(object):
"""Create threaded data stream as updated object attributes
"""
def __init__(self):
self.socket = gps3.GPSDSocket()
self.data_stream = gps3.DataStream()
def unpack_data(self, usnap=.2): # 2/10th second sleep between empty requests
""" Iterates over socket response and unpacks values of object attributes.
Sleeping here has the greatest response to cpu cycles short of blocking sockets
"""
for new_data in self.socket:
if new_data:
self.data_stream.unpack(new_data)
else:
sleep(usnap) # Sleep in seconds after an empty look up.
def run_thread(self, usnap=.2, daemon=True):
"""run thread with data
"""
# self.stream_data() # Unless other changes are made this would limit to localhost only.
try:
gps3_data_thread = Thread(target=self.unpack_data, args={usnap: usnap}, daemon=daemon)
except TypeError:
# threading.Thread() only accepts daemon argument in Python 3.3
gps3_data_thread = Thread(target=self.unpack_data, args={usnap: usnap})
gps3_data_thread.setDaemon(daemon)
gps3_data_thread.start()
def stop(self):
""" Stop as much as possible, as gracefully as possible, if possible.
"""
self.stream_data(enable=False) # Stop data stream, thread is on its own so far.
print('Process stopped by user')
print('Good bye.') # You haven't gone anywhere, re-start it all with 'self.stream_data()'
|
wadda/gps3 | gps3/gps3threaded.py | GPS3mechanism.unpack_data | python | def unpack_data(self, usnap=.2): # 2/10th second sleep between empty requests
for new_data in self.socket:
if new_data:
self.data_stream.unpack(new_data)
else:
sleep(usnap) | Iterates over socket response and unpacks values of object attributes.
Sleeping here has the greatest response to cpu cycles short of blocking sockets | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/gps3/gps3threaded.py#L38-L46 | null | class GPS3mechanism(object):
"""Create threaded data stream as updated object attributes
"""
def __init__(self):
self.socket = gps3.GPSDSocket()
self.data_stream = gps3.DataStream()
def stream_data(self, host=HOST, port=GPSD_PORT, enable=True, gpsd_protocol=PROTOCOL, devicepath=None):
""" Connect and command, point and shoot, flail and bail
"""
self.socket.connect(host, port)
self.socket.watch(enable, gpsd_protocol, devicepath)
# Sleep in seconds after an empty look up.
def run_thread(self, usnap=.2, daemon=True):
"""run thread with data
"""
# self.stream_data() # Unless other changes are made this would limit to localhost only.
try:
gps3_data_thread = Thread(target=self.unpack_data, args={usnap: usnap}, daemon=daemon)
except TypeError:
# threading.Thread() only accepts daemon argument in Python 3.3
gps3_data_thread = Thread(target=self.unpack_data, args={usnap: usnap})
gps3_data_thread.setDaemon(daemon)
gps3_data_thread.start()
def stop(self):
""" Stop as much as possible, as gracefully as possible, if possible.
"""
self.stream_data(enable=False) # Stop data stream, thread is on its own so far.
print('Process stopped by user')
print('Good bye.') # You haven't gone anywhere, re-start it all with 'self.stream_data()'
|
wadda/gps3 | gps3/gps3threaded.py | GPS3mechanism.run_thread | python | def run_thread(self, usnap=.2, daemon=True):
# self.stream_data() # Unless other changes are made this would limit to localhost only.
try:
gps3_data_thread = Thread(target=self.unpack_data, args={usnap: usnap}, daemon=daemon)
except TypeError:
# threading.Thread() only accepts daemon argument in Python 3.3
gps3_data_thread = Thread(target=self.unpack_data, args={usnap: usnap})
gps3_data_thread.setDaemon(daemon)
gps3_data_thread.start() | run thread with data | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/gps3/gps3threaded.py#L48-L58 | null | class GPS3mechanism(object):
"""Create threaded data stream as updated object attributes
"""
def __init__(self):
self.socket = gps3.GPSDSocket()
self.data_stream = gps3.DataStream()
def stream_data(self, host=HOST, port=GPSD_PORT, enable=True, gpsd_protocol=PROTOCOL, devicepath=None):
""" Connect and command, point and shoot, flail and bail
"""
self.socket.connect(host, port)
self.socket.watch(enable, gpsd_protocol, devicepath)
def unpack_data(self, usnap=.2): # 2/10th second sleep between empty requests
""" Iterates over socket response and unpacks values of object attributes.
Sleeping here has the greatest response to cpu cycles short of blocking sockets
"""
for new_data in self.socket:
if new_data:
self.data_stream.unpack(new_data)
else:
sleep(usnap) # Sleep in seconds after an empty look up.
def stop(self):
""" Stop as much as possible, as gracefully as possible, if possible.
"""
self.stream_data(enable=False) # Stop data stream, thread is on its own so far.
print('Process stopped by user')
print('Good bye.') # You haven't gone anywhere, re-start it all with 'self.stream_data()'
|
wadda/gps3 | examples/human.py | add_args | python | def add_args():
parser = argparse.ArgumentParser()
parser.add_argument('-host', action='store', dest='host', default='127.0.0.1', help='DEFAULT "127.0.0.1"')
parser.add_argument('-port', action='store', dest='port', default='2947', help='DEFAULT 2947', type=int)
parser.add_argument('-json', dest='gpsd_protocol', const='json', action='store_const', default='json', help='DEFAULT JSON objects */')
parser.add_argument('-device', dest='devicepath', action='store', help='alternate devicepath e.g.,"-device /dev/ttyUSB4"')
# Infrequently used options
parser.add_argument('-nmea', dest='gpsd_protocol', const='nmea', action='store_const', help='*/ output in NMEA */')
# parser.add_argument('-rare', dest='gpsd_protocol', const='rare', action='store_const', help='*/ output of packets in hex */')
# parser.add_argument('-raw', dest='gpsd_protocol', const='raw', action='store_const', help='*/ output of raw packets */')
# parser.add_argument('-scaled', dest='gpsd_protocol', const='scaled', action='store_const', help='*/ scale output to floats */')
# parser.add_argument('-timing', dest='gpsd_protocol', const='timing', action='store_const', help='*/ timing information */')
# parser.add_argument('-split24', dest='gpsd_protocol', const='split24', action='store_const', help='*/ split AIS Type 24s */')
# parser.add_argument('-pps', dest='gpsd_protocol', const='pps', action='store_const', help='*/ enable PPS JSON */')
parser.add_argument('-v', '--version', action='version', version='Version: {}'.format(__version__))
cli_args = parser.parse_args()
return cli_args | Adds commandline arguments and formatted Help | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/examples/human.py#L39-L57 | null | #!/usr/bin/env python3
# coding=utf-8
"""
human.py is to showcase gps3.py, a Python 2.7-3.5 GPSD interface
Defaults host='127.0.0.1', port=2947, gpsd_protocol='json'
Toggle Lat/Lon form with '0', '1', '2', '3' for RAW, DDD, DMM, DMS
Toggle units with '0', 'm', 'i', 'n', for 'raw', Metric, Imperial, Nautical
Toggle gpsd protocol with 'j', 'a' for 'json', 'nmea' displays
Quit with 'q' or '^c'
python[X] human.py --help for list of commandline options.
"""
import argparse
import curses
import sys
from datetime import datetime
from math import modf
from time import sleep
from gps3 import gps3 # Moe, remember to CHANGE to straight 'import gps3' if not installed,
# or check which Python version it's installed in. You forget sometimes.
__author__ = 'Moe'
__copyright__ = 'Copyright 2015-2016 Moe'
__license__ = 'MIT'
__version__ = '0.33.2'
CONVERSION = {'raw': (1, 1, 'm/s', 'meters'),
'metric': (3.6, 1, 'kph', 'meters'),
'nautical': (1.9438445, 1, 'kts', 'meters'),
'imperial': (2.2369363, 3.2808399, 'mph', 'feet')}
def satellites_used(feed):
"""Counts number of satellites used in calculation from total visible satellites
Arguments:
feed feed=data_stream.TPV['satellites']
Returns:
total_satellites(int):
used_satellites (int):
"""
total_satellites = 0
used_satellites = 0
if not isinstance(feed, list):
return 0, 0
for satellites in feed:
total_satellites += 1
if satellites['used'] is True:
used_satellites += 1
return total_satellites, used_satellites
def make_time(gps_datetime_str):
"""Makes datetime object from string object"""
if not 'n/a' == gps_datetime_str:
datetime_string = gps_datetime_str
datetime_object = datetime.strptime(datetime_string, "%Y-%m-%dT%H:%M:%S")
return datetime_object
def elapsed_time_from(start_time):
"""calculate time delta from latched time and current time"""
time_then = make_time(start_time)
time_now = datetime.utcnow().replace(microsecond=0)
if time_then is None:
return
delta_t = time_now - time_then
return delta_t
def unit_conversion(thing, units, length=False):
"""converts base data between metric, imperial, or nautical units"""
if 'n/a' == thing:
return 'n/a'
try:
thing = round(thing * CONVERSION[units][0 + length], 2)
except TypeError:
thing = 'fubar'
return thing, CONVERSION[units][2 + length]
def sexagesimal(sexathang, latlon, form='DDD'):
"""
Arguments:
sexathang: (float), -15.560615 (negative = South), -146.241122 (negative = West) # Apataki Carenage
latlon: (str) 'lat' | 'lon'
form: (str), 'DDD'|'DMM'|'DMS', decimal Degrees, decimal Minutes, decimal Seconds
Returns:
latitude: e.g., '15°33'38.214"S'
longitude: e.g., '146°14'28.039"W'
"""
cardinal = 'O'
if not isinstance(sexathang, float):
sexathang = 'n/a'
return sexathang
if latlon == 'lon':
if sexathang > 0.0:
cardinal = 'E'
if sexathang < 0.0:
cardinal = 'W'
if latlon == 'lat':
if sexathang > 0.0:
cardinal = 'N'
if sexathang < 0.0:
cardinal = 'S'
if 'RAW' in form:
sexathang = '{0:4.9f}°'.format(sexathang) # 4 to allow -100° through -179.999999° to -180°
return sexathang
if 'DDD' in form:
sexathang = '{0:3.6f}°'.format(abs(sexathang))
if 'DMM' in form:
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
minute_latlon *= 60
sexathang = '{0}°{1:2.5f}\''.format(int(degree_latlon), minute_latlon)
if 'DMS' in form:
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
second_latlon, minute_latlon = modf(minute_latlon * 60)
second_latlon *= 60.0
sexathang = '{0}°{1}\'{2:2.3f}\"'.format(int(degree_latlon), int(minute_latlon), second_latlon)
return sexathang + cardinal
def show_human():
"""Curses terminal with standard outputs """
form = 'RAW'
units = 'raw'
data_window = curses.newwin(19, 39, 0, 0)
sat_window = curses.newwin(14, 39, 0, 40)
device_window = curses.newwin(6, 39, 13, 40)
packet_window = curses.newwin(7, 79, 19, 0)
for new_data in gpsd_socket:
if new_data:
data_stream.unpack(new_data)
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'): # quit
shut_down()
elif key_press == ord('a'): # NMEA
gpsd_socket.watch(enable=False, gpsd_protocol='json')
gpsd_socket.watch(gpsd_protocol='nmea')
show_nmea()
elif key_press == ord('0'): # raw
form = 'RAW'
units = 'raw'
data_window.clear()
elif key_press == ord('1'): # DDD
form = 'DDD'
data_window.clear()
elif key_press == ord('2'): # DMM
form = 'DMM'
data_window.clear()
elif key_press == ord('3'): # DMS
form = 'DMS'
data_window.clear()
elif key_press == ord('m'): # Metric
units = 'metric'
data_window.clear()
elif key_press == ord('i'): # Imperial
units = 'imperial'
data_window.clear()
elif key_press == ord('n'): # Nautical
units = 'nautical'
data_window.clear()
elif key_press == ord('d'): # Refresh device listings
gpsd_socket.send('?DEVICES;')
device_window.clear()
data_window.box()
data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(1, 2, 'Time: {time} '.format(**data_stream.TPV))
data_window.addstr(2, 2, 'Latitude: {} '.format(sexagesimal(data_stream.TPV['lat'], 'lat', form)))
data_window.addstr(3, 2, 'Longitude: {} '.format(sexagesimal(data_stream.TPV['lon'], 'lon', form)))
data_window.addstr(4, 2, 'Altitude: {} {}'.format(*unit_conversion(data_stream.TPV['alt'], units, length=True)))
data_window.addstr(5, 2, 'Speed: {} {}'.format(*unit_conversion(data_stream.TPV['speed'], units)))
data_window.addstr(6, 2, 'Heading: {track}° True'.format(**data_stream.TPV))
data_window.addstr(7, 2, 'Climb: {} {}/s'.format(*unit_conversion(data_stream.TPV['climb'], units, length=True)))
data_window.addstr(8, 2, 'Status: {mode:<}D '.format(**data_stream.TPV))
data_window.addstr(9, 2, 'Latitude Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['epx'], units, length=True)))
data_window.addstr(10, 2, 'Longitude Err: +/-{} {}'.format(*unit_conversion(data_stream.TPV['epy'], units, length=True)))
data_window.addstr(11, 2, 'Altitude Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['epv'], units, length=True)))
data_window.addstr(12, 2, 'Course Err: +/-{epc} '.format(**data_stream.TPV), curses.A_DIM)
data_window.addstr(13, 2, 'Speed Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['eps'], units)), curses.A_DIM)
data_window.addstr(14, 2, 'Time Offset: +/-{ept} '.format(**data_stream.TPV), curses.A_DIM)
data_window.addstr(15, 2, 'gdop:{gdop} pdop:{pdop} tdop:{tdop}'.format(**data_stream.SKY))
data_window.addstr(16, 2, 'ydop:{ydop} xdop:{xdop} '.format(**data_stream.SKY))
data_window.addstr(17, 2, 'vdop:{vdop} hdop:{hdop} '.format(**data_stream.SKY))
sat_window.clear()
sat_window.box()
sat_window.addstr(0, 2, 'Using {0[1]}/{0[0]} satellites (truncated)'.format(satellites_used(data_stream.SKY['satellites'])))
sat_window.addstr(1, 2, 'PRN Elev Azimuth SNR Used')
line = 2
if isinstance(data_stream.SKY['satellites'], list): # Nested lists of dictionaries are strings before data is present
for sats in data_stream.SKY['satellites'][0:10]:
sat_window.addstr(line, 2, '{PRN:>2} {el:>6} {az:>5} {ss:>5} {used:}'.format(**sats))
line += 1
# device_window.clear()
device_window.box()
if not isinstance(data_stream.DEVICES['devices'], list): # Local machines need a 'device' kick start
gpsd_socket.send('?DEVICES;') # to have valid data I don't know why.
if isinstance(data_stream.DEVICES['devices'], list): # Nested lists of dictionaries are strings before data is present.
for gizmo in data_stream.DEVICES['devices']:
start_time, _uicroseconds = gizmo['activated'].split('.') # Remove '.000Z'
elapsed = elapsed_time_from(start_time)
device_window.addstr(1, 2, 'Activated: {}'.format(gizmo['activated']))
device_window.addstr(2, 2, 'Host:{0.host}:{0.port} {1}'.format(args, gizmo['path']))
device_window.addstr(3, 2, 'Driver:{driver} BPS:{bps}'.format(**gizmo))
device_window.addstr(4, 2, 'Cycle:{0} Hz {1!s:>14} Elapsed'.format(1 / gizmo['cycle'], elapsed))
packet_window.clear()
# packet_window.border(0)
packet_window.scrollok(True)
packet_window.addstr(0, 0, '{}'.format(new_data))
# sleep(.9)
data_window.refresh()
sat_window.refresh()
device_window.refresh()
packet_window.refresh()
else: # Reduced CPU cycles with the non-blocking socket read, by putting 'sleep' here, rather than hitting
sleep(.1) # the socket fast and furious with hundreds of empty checks between sleeps.
def show_nmea():
"""NMEA output in curses terminal"""
data_window = curses.newwin(24, 79, 0, 0)
for new_data in gpsd_socket:
if new_data:
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'):
shut_down()
elif key_press == ord('j'): # raw
gpsd_socket.watch(enable=False, gpsd_protocol='nmea')
gpsd_socket.watch(gpsd_protocol='json')
show_human()
data_window.border(0)
data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface Showing NMEA protocol'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(2, 2, '{}'.format(gpsd_socket.response))
data_window.refresh()
else:
sleep(.1)
def shut_down():
"""Closes connection and restores terminal"""
curses.nocbreak()
curses.echo()
curses.endwin()
gpsd_socket.close()
print('Keyboard interrupt received\nTerminated by user\nGood Bye.\n')
sys.exit(1)
if __name__ == '__main__':
args = add_args()
gpsd_socket = gps3.GPSDSocket()
gpsd_socket.connect(args.host, args.port)
gpsd_socket.watch(gpsd_protocol=args.gpsd_protocol)
data_stream = gps3.DataStream()
screen = curses.initscr()
screen.clear()
screen.scrollok(True)
curses.noecho()
curses.curs_set(0)
curses.cbreak()
try:
if 'json' in args.gpsd_protocol:
show_human()
if 'nmea' in args.gpsd_protocol:
show_nmea()
except KeyboardInterrupt:
shut_down()
except (OSError, IOError) as error:
gpsd_socket.close()
curses.nocbreak()
curses.echo()
curses.endwin()
sys.stderr.write('\rHUMAN error--> {}'.format(error))
sys.stderr.write('\rhuman connection to gpsd at \'{0}\' on port \'{1}\' failed.\n'.format(args.host, args.port))
sys.exit(1) # TODO: gpsd existence check and start
#
# Someday a cleaner Python interface will live here
#
# End
|
wadda/gps3 | examples/human.py | satellites_used | python | def satellites_used(feed):
total_satellites = 0
used_satellites = 0
if not isinstance(feed, list):
return 0, 0
for satellites in feed:
total_satellites += 1
if satellites['used'] is True:
used_satellites += 1
return total_satellites, used_satellites | Counts number of satellites used in calculation from total visible satellites
Arguments:
feed feed=data_stream.TPV['satellites']
Returns:
total_satellites(int):
used_satellites (int): | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/examples/human.py#L60-L78 | null | #!/usr/bin/env python3
# coding=utf-8
"""
human.py is to showcase gps3.py, a Python 2.7-3.5 GPSD interface
Defaults host='127.0.0.1', port=2947, gpsd_protocol='json'
Toggle Lat/Lon form with '0', '1', '2', '3' for RAW, DDD, DMM, DMS
Toggle units with '0', 'm', 'i', 'n', for 'raw', Metric, Imperial, Nautical
Toggle gpsd protocol with 'j', 'a' for 'json', 'nmea' displays
Quit with 'q' or '^c'
python[X] human.py --help for list of commandline options.
"""
import argparse
import curses
import sys
from datetime import datetime
from math import modf
from time import sleep
from gps3 import gps3 # Moe, remember to CHANGE to straight 'import gps3' if not installed,
# or check which Python version it's installed in. You forget sometimes.
__author__ = 'Moe'
__copyright__ = 'Copyright 2015-2016 Moe'
__license__ = 'MIT'
__version__ = '0.33.2'
CONVERSION = {'raw': (1, 1, 'm/s', 'meters'),
'metric': (3.6, 1, 'kph', 'meters'),
'nautical': (1.9438445, 1, 'kts', 'meters'),
'imperial': (2.2369363, 3.2808399, 'mph', 'feet')}
def add_args():
"""Adds commandline arguments and formatted Help"""
parser = argparse.ArgumentParser()
parser.add_argument('-host', action='store', dest='host', default='127.0.0.1', help='DEFAULT "127.0.0.1"')
parser.add_argument('-port', action='store', dest='port', default='2947', help='DEFAULT 2947', type=int)
parser.add_argument('-json', dest='gpsd_protocol', const='json', action='store_const', default='json', help='DEFAULT JSON objects */')
parser.add_argument('-device', dest='devicepath', action='store', help='alternate devicepath e.g.,"-device /dev/ttyUSB4"')
# Infrequently used options
parser.add_argument('-nmea', dest='gpsd_protocol', const='nmea', action='store_const', help='*/ output in NMEA */')
# parser.add_argument('-rare', dest='gpsd_protocol', const='rare', action='store_const', help='*/ output of packets in hex */')
# parser.add_argument('-raw', dest='gpsd_protocol', const='raw', action='store_const', help='*/ output of raw packets */')
# parser.add_argument('-scaled', dest='gpsd_protocol', const='scaled', action='store_const', help='*/ scale output to floats */')
# parser.add_argument('-timing', dest='gpsd_protocol', const='timing', action='store_const', help='*/ timing information */')
# parser.add_argument('-split24', dest='gpsd_protocol', const='split24', action='store_const', help='*/ split AIS Type 24s */')
# parser.add_argument('-pps', dest='gpsd_protocol', const='pps', action='store_const', help='*/ enable PPS JSON */')
parser.add_argument('-v', '--version', action='version', version='Version: {}'.format(__version__))
cli_args = parser.parse_args()
return cli_args
def make_time(gps_datetime_str):
"""Makes datetime object from string object"""
if not 'n/a' == gps_datetime_str:
datetime_string = gps_datetime_str
datetime_object = datetime.strptime(datetime_string, "%Y-%m-%dT%H:%M:%S")
return datetime_object
def elapsed_time_from(start_time):
"""calculate time delta from latched time and current time"""
time_then = make_time(start_time)
time_now = datetime.utcnow().replace(microsecond=0)
if time_then is None:
return
delta_t = time_now - time_then
return delta_t
def unit_conversion(thing, units, length=False):
"""converts base data between metric, imperial, or nautical units"""
if 'n/a' == thing:
return 'n/a'
try:
thing = round(thing * CONVERSION[units][0 + length], 2)
except TypeError:
thing = 'fubar'
return thing, CONVERSION[units][2 + length]
def sexagesimal(sexathang, latlon, form='DDD'):
"""
Arguments:
sexathang: (float), -15.560615 (negative = South), -146.241122 (negative = West) # Apataki Carenage
latlon: (str) 'lat' | 'lon'
form: (str), 'DDD'|'DMM'|'DMS', decimal Degrees, decimal Minutes, decimal Seconds
Returns:
latitude: e.g., '15°33'38.214"S'
longitude: e.g., '146°14'28.039"W'
"""
cardinal = 'O'
if not isinstance(sexathang, float):
sexathang = 'n/a'
return sexathang
if latlon == 'lon':
if sexathang > 0.0:
cardinal = 'E'
if sexathang < 0.0:
cardinal = 'W'
if latlon == 'lat':
if sexathang > 0.0:
cardinal = 'N'
if sexathang < 0.0:
cardinal = 'S'
if 'RAW' in form:
sexathang = '{0:4.9f}°'.format(sexathang) # 4 to allow -100° through -179.999999° to -180°
return sexathang
if 'DDD' in form:
sexathang = '{0:3.6f}°'.format(abs(sexathang))
if 'DMM' in form:
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
minute_latlon *= 60
sexathang = '{0}°{1:2.5f}\''.format(int(degree_latlon), minute_latlon)
if 'DMS' in form:
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
second_latlon, minute_latlon = modf(minute_latlon * 60)
second_latlon *= 60.0
sexathang = '{0}°{1}\'{2:2.3f}\"'.format(int(degree_latlon), int(minute_latlon), second_latlon)
return sexathang + cardinal
def show_human():
"""Curses terminal with standard outputs """
form = 'RAW'
units = 'raw'
data_window = curses.newwin(19, 39, 0, 0)
sat_window = curses.newwin(14, 39, 0, 40)
device_window = curses.newwin(6, 39, 13, 40)
packet_window = curses.newwin(7, 79, 19, 0)
for new_data in gpsd_socket:
if new_data:
data_stream.unpack(new_data)
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'): # quit
shut_down()
elif key_press == ord('a'): # NMEA
gpsd_socket.watch(enable=False, gpsd_protocol='json')
gpsd_socket.watch(gpsd_protocol='nmea')
show_nmea()
elif key_press == ord('0'): # raw
form = 'RAW'
units = 'raw'
data_window.clear()
elif key_press == ord('1'): # DDD
form = 'DDD'
data_window.clear()
elif key_press == ord('2'): # DMM
form = 'DMM'
data_window.clear()
elif key_press == ord('3'): # DMS
form = 'DMS'
data_window.clear()
elif key_press == ord('m'): # Metric
units = 'metric'
data_window.clear()
elif key_press == ord('i'): # Imperial
units = 'imperial'
data_window.clear()
elif key_press == ord('n'): # Nautical
units = 'nautical'
data_window.clear()
elif key_press == ord('d'): # Refresh device listings
gpsd_socket.send('?DEVICES;')
device_window.clear()
data_window.box()
data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(1, 2, 'Time: {time} '.format(**data_stream.TPV))
data_window.addstr(2, 2, 'Latitude: {} '.format(sexagesimal(data_stream.TPV['lat'], 'lat', form)))
data_window.addstr(3, 2, 'Longitude: {} '.format(sexagesimal(data_stream.TPV['lon'], 'lon', form)))
data_window.addstr(4, 2, 'Altitude: {} {}'.format(*unit_conversion(data_stream.TPV['alt'], units, length=True)))
data_window.addstr(5, 2, 'Speed: {} {}'.format(*unit_conversion(data_stream.TPV['speed'], units)))
data_window.addstr(6, 2, 'Heading: {track}° True'.format(**data_stream.TPV))
data_window.addstr(7, 2, 'Climb: {} {}/s'.format(*unit_conversion(data_stream.TPV['climb'], units, length=True)))
data_window.addstr(8, 2, 'Status: {mode:<}D '.format(**data_stream.TPV))
data_window.addstr(9, 2, 'Latitude Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['epx'], units, length=True)))
data_window.addstr(10, 2, 'Longitude Err: +/-{} {}'.format(*unit_conversion(data_stream.TPV['epy'], units, length=True)))
data_window.addstr(11, 2, 'Altitude Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['epv'], units, length=True)))
data_window.addstr(12, 2, 'Course Err: +/-{epc} '.format(**data_stream.TPV), curses.A_DIM)
data_window.addstr(13, 2, 'Speed Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['eps'], units)), curses.A_DIM)
data_window.addstr(14, 2, 'Time Offset: +/-{ept} '.format(**data_stream.TPV), curses.A_DIM)
data_window.addstr(15, 2, 'gdop:{gdop} pdop:{pdop} tdop:{tdop}'.format(**data_stream.SKY))
data_window.addstr(16, 2, 'ydop:{ydop} xdop:{xdop} '.format(**data_stream.SKY))
data_window.addstr(17, 2, 'vdop:{vdop} hdop:{hdop} '.format(**data_stream.SKY))
sat_window.clear()
sat_window.box()
sat_window.addstr(0, 2, 'Using {0[1]}/{0[0]} satellites (truncated)'.format(satellites_used(data_stream.SKY['satellites'])))
sat_window.addstr(1, 2, 'PRN Elev Azimuth SNR Used')
line = 2
if isinstance(data_stream.SKY['satellites'], list): # Nested lists of dictionaries are strings before data is present
for sats in data_stream.SKY['satellites'][0:10]:
sat_window.addstr(line, 2, '{PRN:>2} {el:>6} {az:>5} {ss:>5} {used:}'.format(**sats))
line += 1
# device_window.clear()
device_window.box()
if not isinstance(data_stream.DEVICES['devices'], list): # Local machines need a 'device' kick start
gpsd_socket.send('?DEVICES;') # to have valid data I don't know why.
if isinstance(data_stream.DEVICES['devices'], list): # Nested lists of dictionaries are strings before data is present.
for gizmo in data_stream.DEVICES['devices']:
start_time, _uicroseconds = gizmo['activated'].split('.') # Remove '.000Z'
elapsed = elapsed_time_from(start_time)
device_window.addstr(1, 2, 'Activated: {}'.format(gizmo['activated']))
device_window.addstr(2, 2, 'Host:{0.host}:{0.port} {1}'.format(args, gizmo['path']))
device_window.addstr(3, 2, 'Driver:{driver} BPS:{bps}'.format(**gizmo))
device_window.addstr(4, 2, 'Cycle:{0} Hz {1!s:>14} Elapsed'.format(1 / gizmo['cycle'], elapsed))
packet_window.clear()
# packet_window.border(0)
packet_window.scrollok(True)
packet_window.addstr(0, 0, '{}'.format(new_data))
# sleep(.9)
data_window.refresh()
sat_window.refresh()
device_window.refresh()
packet_window.refresh()
else: # Reduced CPU cycles with the non-blocking socket read, by putting 'sleep' here, rather than hitting
sleep(.1) # the socket fast and furious with hundreds of empty checks between sleeps.
def show_nmea():
"""NMEA output in curses terminal"""
data_window = curses.newwin(24, 79, 0, 0)
for new_data in gpsd_socket:
if new_data:
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'):
shut_down()
elif key_press == ord('j'): # raw
gpsd_socket.watch(enable=False, gpsd_protocol='nmea')
gpsd_socket.watch(gpsd_protocol='json')
show_human()
data_window.border(0)
data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface Showing NMEA protocol'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(2, 2, '{}'.format(gpsd_socket.response))
data_window.refresh()
else:
sleep(.1)
def shut_down():
"""Closes connection and restores terminal"""
curses.nocbreak()
curses.echo()
curses.endwin()
gpsd_socket.close()
print('Keyboard interrupt received\nTerminated by user\nGood Bye.\n')
sys.exit(1)
if __name__ == '__main__':
args = add_args()
gpsd_socket = gps3.GPSDSocket()
gpsd_socket.connect(args.host, args.port)
gpsd_socket.watch(gpsd_protocol=args.gpsd_protocol)
data_stream = gps3.DataStream()
screen = curses.initscr()
screen.clear()
screen.scrollok(True)
curses.noecho()
curses.curs_set(0)
curses.cbreak()
try:
if 'json' in args.gpsd_protocol:
show_human()
if 'nmea' in args.gpsd_protocol:
show_nmea()
except KeyboardInterrupt:
shut_down()
except (OSError, IOError) as error:
gpsd_socket.close()
curses.nocbreak()
curses.echo()
curses.endwin()
sys.stderr.write('\rHUMAN error--> {}'.format(error))
sys.stderr.write('\rhuman connection to gpsd at \'{0}\' on port \'{1}\' failed.\n'.format(args.host, args.port))
sys.exit(1) # TODO: gpsd existence check and start
#
# Someday a cleaner Python interface will live here
#
# End
|
wadda/gps3 | examples/human.py | make_time | python | def make_time(gps_datetime_str):
if not 'n/a' == gps_datetime_str:
datetime_string = gps_datetime_str
datetime_object = datetime.strptime(datetime_string, "%Y-%m-%dT%H:%M:%S")
return datetime_object | Makes datetime object from string object | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/examples/human.py#L81-L86 | null | #!/usr/bin/env python3
# coding=utf-8
"""
human.py is to showcase gps3.py, a Python 2.7-3.5 GPSD interface
Defaults host='127.0.0.1', port=2947, gpsd_protocol='json'
Toggle Lat/Lon form with '0', '1', '2', '3' for RAW, DDD, DMM, DMS
Toggle units with '0', 'm', 'i', 'n', for 'raw', Metric, Imperial, Nautical
Toggle gpsd protocol with 'j', 'a' for 'json', 'nmea' displays
Quit with 'q' or '^c'
python[X] human.py --help for list of commandline options.
"""
import argparse
import curses
import sys
from datetime import datetime
from math import modf
from time import sleep
from gps3 import gps3 # Moe, remember to CHANGE to straight 'import gps3' if not installed,
# or check which Python version it's installed in. You forget sometimes.
__author__ = 'Moe'
__copyright__ = 'Copyright 2015-2016 Moe'
__license__ = 'MIT'
__version__ = '0.33.2'
CONVERSION = {'raw': (1, 1, 'm/s', 'meters'),
'metric': (3.6, 1, 'kph', 'meters'),
'nautical': (1.9438445, 1, 'kts', 'meters'),
'imperial': (2.2369363, 3.2808399, 'mph', 'feet')}
def add_args():
"""Adds commandline arguments and formatted Help"""
parser = argparse.ArgumentParser()
parser.add_argument('-host', action='store', dest='host', default='127.0.0.1', help='DEFAULT "127.0.0.1"')
parser.add_argument('-port', action='store', dest='port', default='2947', help='DEFAULT 2947', type=int)
parser.add_argument('-json', dest='gpsd_protocol', const='json', action='store_const', default='json', help='DEFAULT JSON objects */')
parser.add_argument('-device', dest='devicepath', action='store', help='alternate devicepath e.g.,"-device /dev/ttyUSB4"')
# Infrequently used options
parser.add_argument('-nmea', dest='gpsd_protocol', const='nmea', action='store_const', help='*/ output in NMEA */')
# parser.add_argument('-rare', dest='gpsd_protocol', const='rare', action='store_const', help='*/ output of packets in hex */')
# parser.add_argument('-raw', dest='gpsd_protocol', const='raw', action='store_const', help='*/ output of raw packets */')
# parser.add_argument('-scaled', dest='gpsd_protocol', const='scaled', action='store_const', help='*/ scale output to floats */')
# parser.add_argument('-timing', dest='gpsd_protocol', const='timing', action='store_const', help='*/ timing information */')
# parser.add_argument('-split24', dest='gpsd_protocol', const='split24', action='store_const', help='*/ split AIS Type 24s */')
# parser.add_argument('-pps', dest='gpsd_protocol', const='pps', action='store_const', help='*/ enable PPS JSON */')
parser.add_argument('-v', '--version', action='version', version='Version: {}'.format(__version__))
cli_args = parser.parse_args()
return cli_args
def satellites_used(feed):
"""Counts number of satellites used in calculation from total visible satellites
Arguments:
feed feed=data_stream.TPV['satellites']
Returns:
total_satellites(int):
used_satellites (int):
"""
total_satellites = 0
used_satellites = 0
if not isinstance(feed, list):
return 0, 0
for satellites in feed:
total_satellites += 1
if satellites['used'] is True:
used_satellites += 1
return total_satellites, used_satellites
def elapsed_time_from(start_time):
"""calculate time delta from latched time and current time"""
time_then = make_time(start_time)
time_now = datetime.utcnow().replace(microsecond=0)
if time_then is None:
return
delta_t = time_now - time_then
return delta_t
def unit_conversion(thing, units, length=False):
"""converts base data between metric, imperial, or nautical units"""
if 'n/a' == thing:
return 'n/a'
try:
thing = round(thing * CONVERSION[units][0 + length], 2)
except TypeError:
thing = 'fubar'
return thing, CONVERSION[units][2 + length]
def sexagesimal(sexathang, latlon, form='DDD'):
"""
Arguments:
sexathang: (float), -15.560615 (negative = South), -146.241122 (negative = West) # Apataki Carenage
latlon: (str) 'lat' | 'lon'
form: (str), 'DDD'|'DMM'|'DMS', decimal Degrees, decimal Minutes, decimal Seconds
Returns:
latitude: e.g., '15°33'38.214"S'
longitude: e.g., '146°14'28.039"W'
"""
cardinal = 'O'
if not isinstance(sexathang, float):
sexathang = 'n/a'
return sexathang
if latlon == 'lon':
if sexathang > 0.0:
cardinal = 'E'
if sexathang < 0.0:
cardinal = 'W'
if latlon == 'lat':
if sexathang > 0.0:
cardinal = 'N'
if sexathang < 0.0:
cardinal = 'S'
if 'RAW' in form:
sexathang = '{0:4.9f}°'.format(sexathang) # 4 to allow -100° through -179.999999° to -180°
return sexathang
if 'DDD' in form:
sexathang = '{0:3.6f}°'.format(abs(sexathang))
if 'DMM' in form:
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
minute_latlon *= 60
sexathang = '{0}°{1:2.5f}\''.format(int(degree_latlon), minute_latlon)
if 'DMS' in form:
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
second_latlon, minute_latlon = modf(minute_latlon * 60)
second_latlon *= 60.0
sexathang = '{0}°{1}\'{2:2.3f}\"'.format(int(degree_latlon), int(minute_latlon), second_latlon)
return sexathang + cardinal
def show_human():
"""Curses terminal with standard outputs """
form = 'RAW'
units = 'raw'
data_window = curses.newwin(19, 39, 0, 0)
sat_window = curses.newwin(14, 39, 0, 40)
device_window = curses.newwin(6, 39, 13, 40)
packet_window = curses.newwin(7, 79, 19, 0)
for new_data in gpsd_socket:
if new_data:
data_stream.unpack(new_data)
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'): # quit
shut_down()
elif key_press == ord('a'): # NMEA
gpsd_socket.watch(enable=False, gpsd_protocol='json')
gpsd_socket.watch(gpsd_protocol='nmea')
show_nmea()
elif key_press == ord('0'): # raw
form = 'RAW'
units = 'raw'
data_window.clear()
elif key_press == ord('1'): # DDD
form = 'DDD'
data_window.clear()
elif key_press == ord('2'): # DMM
form = 'DMM'
data_window.clear()
elif key_press == ord('3'): # DMS
form = 'DMS'
data_window.clear()
elif key_press == ord('m'): # Metric
units = 'metric'
data_window.clear()
elif key_press == ord('i'): # Imperial
units = 'imperial'
data_window.clear()
elif key_press == ord('n'): # Nautical
units = 'nautical'
data_window.clear()
elif key_press == ord('d'): # Refresh device listings
gpsd_socket.send('?DEVICES;')
device_window.clear()
data_window.box()
data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(1, 2, 'Time: {time} '.format(**data_stream.TPV))
data_window.addstr(2, 2, 'Latitude: {} '.format(sexagesimal(data_stream.TPV['lat'], 'lat', form)))
data_window.addstr(3, 2, 'Longitude: {} '.format(sexagesimal(data_stream.TPV['lon'], 'lon', form)))
data_window.addstr(4, 2, 'Altitude: {} {}'.format(*unit_conversion(data_stream.TPV['alt'], units, length=True)))
data_window.addstr(5, 2, 'Speed: {} {}'.format(*unit_conversion(data_stream.TPV['speed'], units)))
data_window.addstr(6, 2, 'Heading: {track}° True'.format(**data_stream.TPV))
data_window.addstr(7, 2, 'Climb: {} {}/s'.format(*unit_conversion(data_stream.TPV['climb'], units, length=True)))
data_window.addstr(8, 2, 'Status: {mode:<}D '.format(**data_stream.TPV))
data_window.addstr(9, 2, 'Latitude Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['epx'], units, length=True)))
data_window.addstr(10, 2, 'Longitude Err: +/-{} {}'.format(*unit_conversion(data_stream.TPV['epy'], units, length=True)))
data_window.addstr(11, 2, 'Altitude Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['epv'], units, length=True)))
data_window.addstr(12, 2, 'Course Err: +/-{epc} '.format(**data_stream.TPV), curses.A_DIM)
data_window.addstr(13, 2, 'Speed Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['eps'], units)), curses.A_DIM)
data_window.addstr(14, 2, 'Time Offset: +/-{ept} '.format(**data_stream.TPV), curses.A_DIM)
data_window.addstr(15, 2, 'gdop:{gdop} pdop:{pdop} tdop:{tdop}'.format(**data_stream.SKY))
data_window.addstr(16, 2, 'ydop:{ydop} xdop:{xdop} '.format(**data_stream.SKY))
data_window.addstr(17, 2, 'vdop:{vdop} hdop:{hdop} '.format(**data_stream.SKY))
sat_window.clear()
sat_window.box()
sat_window.addstr(0, 2, 'Using {0[1]}/{0[0]} satellites (truncated)'.format(satellites_used(data_stream.SKY['satellites'])))
sat_window.addstr(1, 2, 'PRN Elev Azimuth SNR Used')
line = 2
if isinstance(data_stream.SKY['satellites'], list): # Nested lists of dictionaries are strings before data is present
for sats in data_stream.SKY['satellites'][0:10]:
sat_window.addstr(line, 2, '{PRN:>2} {el:>6} {az:>5} {ss:>5} {used:}'.format(**sats))
line += 1
# device_window.clear()
device_window.box()
if not isinstance(data_stream.DEVICES['devices'], list): # Local machines need a 'device' kick start
gpsd_socket.send('?DEVICES;') # to have valid data I don't know why.
if isinstance(data_stream.DEVICES['devices'], list): # Nested lists of dictionaries are strings before data is present.
for gizmo in data_stream.DEVICES['devices']:
start_time, _uicroseconds = gizmo['activated'].split('.') # Remove '.000Z'
elapsed = elapsed_time_from(start_time)
device_window.addstr(1, 2, 'Activated: {}'.format(gizmo['activated']))
device_window.addstr(2, 2, 'Host:{0.host}:{0.port} {1}'.format(args, gizmo['path']))
device_window.addstr(3, 2, 'Driver:{driver} BPS:{bps}'.format(**gizmo))
device_window.addstr(4, 2, 'Cycle:{0} Hz {1!s:>14} Elapsed'.format(1 / gizmo['cycle'], elapsed))
packet_window.clear()
# packet_window.border(0)
packet_window.scrollok(True)
packet_window.addstr(0, 0, '{}'.format(new_data))
# sleep(.9)
data_window.refresh()
sat_window.refresh()
device_window.refresh()
packet_window.refresh()
else: # Reduced CPU cycles with the non-blocking socket read, by putting 'sleep' here, rather than hitting
sleep(.1) # the socket fast and furious with hundreds of empty checks between sleeps.
def show_nmea():
"""NMEA output in curses terminal"""
data_window = curses.newwin(24, 79, 0, 0)
for new_data in gpsd_socket:
if new_data:
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'):
shut_down()
elif key_press == ord('j'): # raw
gpsd_socket.watch(enable=False, gpsd_protocol='nmea')
gpsd_socket.watch(gpsd_protocol='json')
show_human()
data_window.border(0)
data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface Showing NMEA protocol'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(2, 2, '{}'.format(gpsd_socket.response))
data_window.refresh()
else:
sleep(.1)
def shut_down():
"""Closes connection and restores terminal"""
curses.nocbreak()
curses.echo()
curses.endwin()
gpsd_socket.close()
print('Keyboard interrupt received\nTerminated by user\nGood Bye.\n')
sys.exit(1)
if __name__ == '__main__':
args = add_args()
gpsd_socket = gps3.GPSDSocket()
gpsd_socket.connect(args.host, args.port)
gpsd_socket.watch(gpsd_protocol=args.gpsd_protocol)
data_stream = gps3.DataStream()
screen = curses.initscr()
screen.clear()
screen.scrollok(True)
curses.noecho()
curses.curs_set(0)
curses.cbreak()
try:
if 'json' in args.gpsd_protocol:
show_human()
if 'nmea' in args.gpsd_protocol:
show_nmea()
except KeyboardInterrupt:
shut_down()
except (OSError, IOError) as error:
gpsd_socket.close()
curses.nocbreak()
curses.echo()
curses.endwin()
sys.stderr.write('\rHUMAN error--> {}'.format(error))
sys.stderr.write('\rhuman connection to gpsd at \'{0}\' on port \'{1}\' failed.\n'.format(args.host, args.port))
sys.exit(1) # TODO: gpsd existence check and start
#
# Someday a cleaner Python interface will live here
#
# End
|
wadda/gps3 | examples/human.py | elapsed_time_from | python | def elapsed_time_from(start_time):
time_then = make_time(start_time)
time_now = datetime.utcnow().replace(microsecond=0)
if time_then is None:
return
delta_t = time_now - time_then
return delta_t | calculate time delta from latched time and current time | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/examples/human.py#L89-L96 | [
"def make_time(gps_datetime_str):\n \"\"\"Makes datetime object from string object\"\"\"\n if not 'n/a' == gps_datetime_str:\n datetime_string = gps_datetime_str\n datetime_object = datetime.strptime(datetime_string, \"%Y-%m-%dT%H:%M:%S\")\n return datetime_object\n"
] | #!/usr/bin/env python3
# coding=utf-8
"""
human.py is to showcase gps3.py, a Python 2.7-3.5 GPSD interface
Defaults host='127.0.0.1', port=2947, gpsd_protocol='json'
Toggle Lat/Lon form with '0', '1', '2', '3' for RAW, DDD, DMM, DMS
Toggle units with '0', 'm', 'i', 'n', for 'raw', Metric, Imperial, Nautical
Toggle gpsd protocol with 'j', 'a' for 'json', 'nmea' displays
Quit with 'q' or '^c'
python[X] human.py --help for list of commandline options.
"""
import argparse
import curses
import sys
from datetime import datetime
from math import modf
from time import sleep
from gps3 import gps3 # Moe, remember to CHANGE to straight 'import gps3' if not installed,
# or check which Python version it's installed in. You forget sometimes.
__author__ = 'Moe'
__copyright__ = 'Copyright 2015-2016 Moe'
__license__ = 'MIT'
__version__ = '0.33.2'
CONVERSION = {'raw': (1, 1, 'm/s', 'meters'),
'metric': (3.6, 1, 'kph', 'meters'),
'nautical': (1.9438445, 1, 'kts', 'meters'),
'imperial': (2.2369363, 3.2808399, 'mph', 'feet')}
def add_args():
"""Adds commandline arguments and formatted Help"""
parser = argparse.ArgumentParser()
parser.add_argument('-host', action='store', dest='host', default='127.0.0.1', help='DEFAULT "127.0.0.1"')
parser.add_argument('-port', action='store', dest='port', default='2947', help='DEFAULT 2947', type=int)
parser.add_argument('-json', dest='gpsd_protocol', const='json', action='store_const', default='json', help='DEFAULT JSON objects */')
parser.add_argument('-device', dest='devicepath', action='store', help='alternate devicepath e.g.,"-device /dev/ttyUSB4"')
# Infrequently used options
parser.add_argument('-nmea', dest='gpsd_protocol', const='nmea', action='store_const', help='*/ output in NMEA */')
# parser.add_argument('-rare', dest='gpsd_protocol', const='rare', action='store_const', help='*/ output of packets in hex */')
# parser.add_argument('-raw', dest='gpsd_protocol', const='raw', action='store_const', help='*/ output of raw packets */')
# parser.add_argument('-scaled', dest='gpsd_protocol', const='scaled', action='store_const', help='*/ scale output to floats */')
# parser.add_argument('-timing', dest='gpsd_protocol', const='timing', action='store_const', help='*/ timing information */')
# parser.add_argument('-split24', dest='gpsd_protocol', const='split24', action='store_const', help='*/ split AIS Type 24s */')
# parser.add_argument('-pps', dest='gpsd_protocol', const='pps', action='store_const', help='*/ enable PPS JSON */')
parser.add_argument('-v', '--version', action='version', version='Version: {}'.format(__version__))
cli_args = parser.parse_args()
return cli_args
def satellites_used(feed):
"""Counts number of satellites used in calculation from total visible satellites
Arguments:
feed feed=data_stream.TPV['satellites']
Returns:
total_satellites(int):
used_satellites (int):
"""
total_satellites = 0
used_satellites = 0
if not isinstance(feed, list):
return 0, 0
for satellites in feed:
total_satellites += 1
if satellites['used'] is True:
used_satellites += 1
return total_satellites, used_satellites
def make_time(gps_datetime_str):
"""Makes datetime object from string object"""
if not 'n/a' == gps_datetime_str:
datetime_string = gps_datetime_str
datetime_object = datetime.strptime(datetime_string, "%Y-%m-%dT%H:%M:%S")
return datetime_object
def unit_conversion(thing, units, length=False):
"""converts base data between metric, imperial, or nautical units"""
if 'n/a' == thing:
return 'n/a'
try:
thing = round(thing * CONVERSION[units][0 + length], 2)
except TypeError:
thing = 'fubar'
return thing, CONVERSION[units][2 + length]
def sexagesimal(sexathang, latlon, form='DDD'):
"""
Arguments:
sexathang: (float), -15.560615 (negative = South), -146.241122 (negative = West) # Apataki Carenage
latlon: (str) 'lat' | 'lon'
form: (str), 'DDD'|'DMM'|'DMS', decimal Degrees, decimal Minutes, decimal Seconds
Returns:
latitude: e.g., '15°33'38.214"S'
longitude: e.g., '146°14'28.039"W'
"""
cardinal = 'O'
if not isinstance(sexathang, float):
sexathang = 'n/a'
return sexathang
if latlon == 'lon':
if sexathang > 0.0:
cardinal = 'E'
if sexathang < 0.0:
cardinal = 'W'
if latlon == 'lat':
if sexathang > 0.0:
cardinal = 'N'
if sexathang < 0.0:
cardinal = 'S'
if 'RAW' in form:
sexathang = '{0:4.9f}°'.format(sexathang) # 4 to allow -100° through -179.999999° to -180°
return sexathang
if 'DDD' in form:
sexathang = '{0:3.6f}°'.format(abs(sexathang))
if 'DMM' in form:
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
minute_latlon *= 60
sexathang = '{0}°{1:2.5f}\''.format(int(degree_latlon), minute_latlon)
if 'DMS' in form:
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
second_latlon, minute_latlon = modf(minute_latlon * 60)
second_latlon *= 60.0
sexathang = '{0}°{1}\'{2:2.3f}\"'.format(int(degree_latlon), int(minute_latlon), second_latlon)
return sexathang + cardinal
def show_human():
"""Curses terminal with standard outputs """
form = 'RAW'
units = 'raw'
data_window = curses.newwin(19, 39, 0, 0)
sat_window = curses.newwin(14, 39, 0, 40)
device_window = curses.newwin(6, 39, 13, 40)
packet_window = curses.newwin(7, 79, 19, 0)
for new_data in gpsd_socket:
if new_data:
data_stream.unpack(new_data)
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'): # quit
shut_down()
elif key_press == ord('a'): # NMEA
gpsd_socket.watch(enable=False, gpsd_protocol='json')
gpsd_socket.watch(gpsd_protocol='nmea')
show_nmea()
elif key_press == ord('0'): # raw
form = 'RAW'
units = 'raw'
data_window.clear()
elif key_press == ord('1'): # DDD
form = 'DDD'
data_window.clear()
elif key_press == ord('2'): # DMM
form = 'DMM'
data_window.clear()
elif key_press == ord('3'): # DMS
form = 'DMS'
data_window.clear()
elif key_press == ord('m'): # Metric
units = 'metric'
data_window.clear()
elif key_press == ord('i'): # Imperial
units = 'imperial'
data_window.clear()
elif key_press == ord('n'): # Nautical
units = 'nautical'
data_window.clear()
elif key_press == ord('d'): # Refresh device listings
gpsd_socket.send('?DEVICES;')
device_window.clear()
data_window.box()
data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(1, 2, 'Time: {time} '.format(**data_stream.TPV))
data_window.addstr(2, 2, 'Latitude: {} '.format(sexagesimal(data_stream.TPV['lat'], 'lat', form)))
data_window.addstr(3, 2, 'Longitude: {} '.format(sexagesimal(data_stream.TPV['lon'], 'lon', form)))
data_window.addstr(4, 2, 'Altitude: {} {}'.format(*unit_conversion(data_stream.TPV['alt'], units, length=True)))
data_window.addstr(5, 2, 'Speed: {} {}'.format(*unit_conversion(data_stream.TPV['speed'], units)))
data_window.addstr(6, 2, 'Heading: {track}° True'.format(**data_stream.TPV))
data_window.addstr(7, 2, 'Climb: {} {}/s'.format(*unit_conversion(data_stream.TPV['climb'], units, length=True)))
data_window.addstr(8, 2, 'Status: {mode:<}D '.format(**data_stream.TPV))
data_window.addstr(9, 2, 'Latitude Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['epx'], units, length=True)))
data_window.addstr(10, 2, 'Longitude Err: +/-{} {}'.format(*unit_conversion(data_stream.TPV['epy'], units, length=True)))
data_window.addstr(11, 2, 'Altitude Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['epv'], units, length=True)))
data_window.addstr(12, 2, 'Course Err: +/-{epc} '.format(**data_stream.TPV), curses.A_DIM)
data_window.addstr(13, 2, 'Speed Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['eps'], units)), curses.A_DIM)
data_window.addstr(14, 2, 'Time Offset: +/-{ept} '.format(**data_stream.TPV), curses.A_DIM)
data_window.addstr(15, 2, 'gdop:{gdop} pdop:{pdop} tdop:{tdop}'.format(**data_stream.SKY))
data_window.addstr(16, 2, 'ydop:{ydop} xdop:{xdop} '.format(**data_stream.SKY))
data_window.addstr(17, 2, 'vdop:{vdop} hdop:{hdop} '.format(**data_stream.SKY))
sat_window.clear()
sat_window.box()
sat_window.addstr(0, 2, 'Using {0[1]}/{0[0]} satellites (truncated)'.format(satellites_used(data_stream.SKY['satellites'])))
sat_window.addstr(1, 2, 'PRN Elev Azimuth SNR Used')
line = 2
if isinstance(data_stream.SKY['satellites'], list): # Nested lists of dictionaries are strings before data is present
for sats in data_stream.SKY['satellites'][0:10]:
sat_window.addstr(line, 2, '{PRN:>2} {el:>6} {az:>5} {ss:>5} {used:}'.format(**sats))
line += 1
# device_window.clear()
device_window.box()
if not isinstance(data_stream.DEVICES['devices'], list): # Local machines need a 'device' kick start
gpsd_socket.send('?DEVICES;') # to have valid data I don't know why.
if isinstance(data_stream.DEVICES['devices'], list): # Nested lists of dictionaries are strings before data is present.
for gizmo in data_stream.DEVICES['devices']:
start_time, _uicroseconds = gizmo['activated'].split('.') # Remove '.000Z'
elapsed = elapsed_time_from(start_time)
device_window.addstr(1, 2, 'Activated: {}'.format(gizmo['activated']))
device_window.addstr(2, 2, 'Host:{0.host}:{0.port} {1}'.format(args, gizmo['path']))
device_window.addstr(3, 2, 'Driver:{driver} BPS:{bps}'.format(**gizmo))
device_window.addstr(4, 2, 'Cycle:{0} Hz {1!s:>14} Elapsed'.format(1 / gizmo['cycle'], elapsed))
packet_window.clear()
# packet_window.border(0)
packet_window.scrollok(True)
packet_window.addstr(0, 0, '{}'.format(new_data))
# sleep(.9)
data_window.refresh()
sat_window.refresh()
device_window.refresh()
packet_window.refresh()
else: # Reduced CPU cycles with the non-blocking socket read, by putting 'sleep' here, rather than hitting
sleep(.1) # the socket fast and furious with hundreds of empty checks between sleeps.
def show_nmea():
"""NMEA output in curses terminal"""
data_window = curses.newwin(24, 79, 0, 0)
for new_data in gpsd_socket:
if new_data:
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'):
shut_down()
elif key_press == ord('j'): # raw
gpsd_socket.watch(enable=False, gpsd_protocol='nmea')
gpsd_socket.watch(gpsd_protocol='json')
show_human()
data_window.border(0)
data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface Showing NMEA protocol'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(2, 2, '{}'.format(gpsd_socket.response))
data_window.refresh()
else:
sleep(.1)
def shut_down():
"""Closes connection and restores terminal"""
curses.nocbreak()
curses.echo()
curses.endwin()
gpsd_socket.close()
print('Keyboard interrupt received\nTerminated by user\nGood Bye.\n')
sys.exit(1)
if __name__ == '__main__':
args = add_args()
gpsd_socket = gps3.GPSDSocket()
gpsd_socket.connect(args.host, args.port)
gpsd_socket.watch(gpsd_protocol=args.gpsd_protocol)
data_stream = gps3.DataStream()
screen = curses.initscr()
screen.clear()
screen.scrollok(True)
curses.noecho()
curses.curs_set(0)
curses.cbreak()
try:
if 'json' in args.gpsd_protocol:
show_human()
if 'nmea' in args.gpsd_protocol:
show_nmea()
except KeyboardInterrupt:
shut_down()
except (OSError, IOError) as error:
gpsd_socket.close()
curses.nocbreak()
curses.echo()
curses.endwin()
sys.stderr.write('\rHUMAN error--> {}'.format(error))
sys.stderr.write('\rhuman connection to gpsd at \'{0}\' on port \'{1}\' failed.\n'.format(args.host, args.port))
sys.exit(1) # TODO: gpsd existence check and start
#
# Someday a cleaner Python interface will live here
#
# End
|
wadda/gps3 | examples/human.py | unit_conversion | python | def unit_conversion(thing, units, length=False):
if 'n/a' == thing:
return 'n/a'
try:
thing = round(thing * CONVERSION[units][0 + length], 2)
except TypeError:
thing = 'fubar'
return thing, CONVERSION[units][2 + length] | converts base data between metric, imperial, or nautical units | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/examples/human.py#L99-L107 | null | #!/usr/bin/env python3
# coding=utf-8
"""
human.py is to showcase gps3.py, a Python 2.7-3.5 GPSD interface
Defaults host='127.0.0.1', port=2947, gpsd_protocol='json'
Toggle Lat/Lon form with '0', '1', '2', '3' for RAW, DDD, DMM, DMS
Toggle units with '0', 'm', 'i', 'n', for 'raw', Metric, Imperial, Nautical
Toggle gpsd protocol with 'j', 'a' for 'json', 'nmea' displays
Quit with 'q' or '^c'
python[X] human.py --help for list of commandline options.
"""
import argparse
import curses
import sys
from datetime import datetime
from math import modf
from time import sleep
from gps3 import gps3 # Moe, remember to CHANGE to straight 'import gps3' if not installed,
# or check which Python version it's installed in. You forget sometimes.
__author__ = 'Moe'
__copyright__ = 'Copyright 2015-2016 Moe'
__license__ = 'MIT'
__version__ = '0.33.2'
CONVERSION = {'raw': (1, 1, 'm/s', 'meters'),
'metric': (3.6, 1, 'kph', 'meters'),
'nautical': (1.9438445, 1, 'kts', 'meters'),
'imperial': (2.2369363, 3.2808399, 'mph', 'feet')}
def add_args():
"""Adds commandline arguments and formatted Help"""
parser = argparse.ArgumentParser()
parser.add_argument('-host', action='store', dest='host', default='127.0.0.1', help='DEFAULT "127.0.0.1"')
parser.add_argument('-port', action='store', dest='port', default='2947', help='DEFAULT 2947', type=int)
parser.add_argument('-json', dest='gpsd_protocol', const='json', action='store_const', default='json', help='DEFAULT JSON objects */')
parser.add_argument('-device', dest='devicepath', action='store', help='alternate devicepath e.g.,"-device /dev/ttyUSB4"')
# Infrequently used options
parser.add_argument('-nmea', dest='gpsd_protocol', const='nmea', action='store_const', help='*/ output in NMEA */')
# parser.add_argument('-rare', dest='gpsd_protocol', const='rare', action='store_const', help='*/ output of packets in hex */')
# parser.add_argument('-raw', dest='gpsd_protocol', const='raw', action='store_const', help='*/ output of raw packets */')
# parser.add_argument('-scaled', dest='gpsd_protocol', const='scaled', action='store_const', help='*/ scale output to floats */')
# parser.add_argument('-timing', dest='gpsd_protocol', const='timing', action='store_const', help='*/ timing information */')
# parser.add_argument('-split24', dest='gpsd_protocol', const='split24', action='store_const', help='*/ split AIS Type 24s */')
# parser.add_argument('-pps', dest='gpsd_protocol', const='pps', action='store_const', help='*/ enable PPS JSON */')
parser.add_argument('-v', '--version', action='version', version='Version: {}'.format(__version__))
cli_args = parser.parse_args()
return cli_args
def satellites_used(feed):
"""Counts number of satellites used in calculation from total visible satellites
Arguments:
feed feed=data_stream.TPV['satellites']
Returns:
total_satellites(int):
used_satellites (int):
"""
total_satellites = 0
used_satellites = 0
if not isinstance(feed, list):
return 0, 0
for satellites in feed:
total_satellites += 1
if satellites['used'] is True:
used_satellites += 1
return total_satellites, used_satellites
def make_time(gps_datetime_str):
"""Makes datetime object from string object"""
if not 'n/a' == gps_datetime_str:
datetime_string = gps_datetime_str
datetime_object = datetime.strptime(datetime_string, "%Y-%m-%dT%H:%M:%S")
return datetime_object
def elapsed_time_from(start_time):
"""calculate time delta from latched time and current time"""
time_then = make_time(start_time)
time_now = datetime.utcnow().replace(microsecond=0)
if time_then is None:
return
delta_t = time_now - time_then
return delta_t
def sexagesimal(sexathang, latlon, form='DDD'):
"""
Arguments:
sexathang: (float), -15.560615 (negative = South), -146.241122 (negative = West) # Apataki Carenage
latlon: (str) 'lat' | 'lon'
form: (str), 'DDD'|'DMM'|'DMS', decimal Degrees, decimal Minutes, decimal Seconds
Returns:
latitude: e.g., '15°33'38.214"S'
longitude: e.g., '146°14'28.039"W'
"""
cardinal = 'O'
if not isinstance(sexathang, float):
sexathang = 'n/a'
return sexathang
if latlon == 'lon':
if sexathang > 0.0:
cardinal = 'E'
if sexathang < 0.0:
cardinal = 'W'
if latlon == 'lat':
if sexathang > 0.0:
cardinal = 'N'
if sexathang < 0.0:
cardinal = 'S'
if 'RAW' in form:
sexathang = '{0:4.9f}°'.format(sexathang) # 4 to allow -100° through -179.999999° to -180°
return sexathang
if 'DDD' in form:
sexathang = '{0:3.6f}°'.format(abs(sexathang))
if 'DMM' in form:
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
minute_latlon *= 60
sexathang = '{0}°{1:2.5f}\''.format(int(degree_latlon), minute_latlon)
if 'DMS' in form:
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
second_latlon, minute_latlon = modf(minute_latlon * 60)
second_latlon *= 60.0
sexathang = '{0}°{1}\'{2:2.3f}\"'.format(int(degree_latlon), int(minute_latlon), second_latlon)
return sexathang + cardinal
def show_human():
"""Curses terminal with standard outputs """
form = 'RAW'
units = 'raw'
data_window = curses.newwin(19, 39, 0, 0)
sat_window = curses.newwin(14, 39, 0, 40)
device_window = curses.newwin(6, 39, 13, 40)
packet_window = curses.newwin(7, 79, 19, 0)
for new_data in gpsd_socket:
if new_data:
data_stream.unpack(new_data)
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'): # quit
shut_down()
elif key_press == ord('a'): # NMEA
gpsd_socket.watch(enable=False, gpsd_protocol='json')
gpsd_socket.watch(gpsd_protocol='nmea')
show_nmea()
elif key_press == ord('0'): # raw
form = 'RAW'
units = 'raw'
data_window.clear()
elif key_press == ord('1'): # DDD
form = 'DDD'
data_window.clear()
elif key_press == ord('2'): # DMM
form = 'DMM'
data_window.clear()
elif key_press == ord('3'): # DMS
form = 'DMS'
data_window.clear()
elif key_press == ord('m'): # Metric
units = 'metric'
data_window.clear()
elif key_press == ord('i'): # Imperial
units = 'imperial'
data_window.clear()
elif key_press == ord('n'): # Nautical
units = 'nautical'
data_window.clear()
elif key_press == ord('d'): # Refresh device listings
gpsd_socket.send('?DEVICES;')
device_window.clear()
data_window.box()
data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(1, 2, 'Time: {time} '.format(**data_stream.TPV))
data_window.addstr(2, 2, 'Latitude: {} '.format(sexagesimal(data_stream.TPV['lat'], 'lat', form)))
data_window.addstr(3, 2, 'Longitude: {} '.format(sexagesimal(data_stream.TPV['lon'], 'lon', form)))
data_window.addstr(4, 2, 'Altitude: {} {}'.format(*unit_conversion(data_stream.TPV['alt'], units, length=True)))
data_window.addstr(5, 2, 'Speed: {} {}'.format(*unit_conversion(data_stream.TPV['speed'], units)))
data_window.addstr(6, 2, 'Heading: {track}° True'.format(**data_stream.TPV))
data_window.addstr(7, 2, 'Climb: {} {}/s'.format(*unit_conversion(data_stream.TPV['climb'], units, length=True)))
data_window.addstr(8, 2, 'Status: {mode:<}D '.format(**data_stream.TPV))
data_window.addstr(9, 2, 'Latitude Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['epx'], units, length=True)))
data_window.addstr(10, 2, 'Longitude Err: +/-{} {}'.format(*unit_conversion(data_stream.TPV['epy'], units, length=True)))
data_window.addstr(11, 2, 'Altitude Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['epv'], units, length=True)))
data_window.addstr(12, 2, 'Course Err: +/-{epc} '.format(**data_stream.TPV), curses.A_DIM)
data_window.addstr(13, 2, 'Speed Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['eps'], units)), curses.A_DIM)
data_window.addstr(14, 2, 'Time Offset: +/-{ept} '.format(**data_stream.TPV), curses.A_DIM)
data_window.addstr(15, 2, 'gdop:{gdop} pdop:{pdop} tdop:{tdop}'.format(**data_stream.SKY))
data_window.addstr(16, 2, 'ydop:{ydop} xdop:{xdop} '.format(**data_stream.SKY))
data_window.addstr(17, 2, 'vdop:{vdop} hdop:{hdop} '.format(**data_stream.SKY))
sat_window.clear()
sat_window.box()
sat_window.addstr(0, 2, 'Using {0[1]}/{0[0]} satellites (truncated)'.format(satellites_used(data_stream.SKY['satellites'])))
sat_window.addstr(1, 2, 'PRN Elev Azimuth SNR Used')
line = 2
if isinstance(data_stream.SKY['satellites'], list): # Nested lists of dictionaries are strings before data is present
for sats in data_stream.SKY['satellites'][0:10]:
sat_window.addstr(line, 2, '{PRN:>2} {el:>6} {az:>5} {ss:>5} {used:}'.format(**sats))
line += 1
# device_window.clear()
device_window.box()
if not isinstance(data_stream.DEVICES['devices'], list): # Local machines need a 'device' kick start
gpsd_socket.send('?DEVICES;') # to have valid data I don't know why.
if isinstance(data_stream.DEVICES['devices'], list): # Nested lists of dictionaries are strings before data is present.
for gizmo in data_stream.DEVICES['devices']:
start_time, _uicroseconds = gizmo['activated'].split('.') # Remove '.000Z'
elapsed = elapsed_time_from(start_time)
device_window.addstr(1, 2, 'Activated: {}'.format(gizmo['activated']))
device_window.addstr(2, 2, 'Host:{0.host}:{0.port} {1}'.format(args, gizmo['path']))
device_window.addstr(3, 2, 'Driver:{driver} BPS:{bps}'.format(**gizmo))
device_window.addstr(4, 2, 'Cycle:{0} Hz {1!s:>14} Elapsed'.format(1 / gizmo['cycle'], elapsed))
packet_window.clear()
# packet_window.border(0)
packet_window.scrollok(True)
packet_window.addstr(0, 0, '{}'.format(new_data))
# sleep(.9)
data_window.refresh()
sat_window.refresh()
device_window.refresh()
packet_window.refresh()
else: # Reduced CPU cycles with the non-blocking socket read, by putting 'sleep' here, rather than hitting
sleep(.1) # the socket fast and furious with hundreds of empty checks between sleeps.
def show_nmea():
"""NMEA output in curses terminal"""
data_window = curses.newwin(24, 79, 0, 0)
for new_data in gpsd_socket:
if new_data:
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'):
shut_down()
elif key_press == ord('j'): # raw
gpsd_socket.watch(enable=False, gpsd_protocol='nmea')
gpsd_socket.watch(gpsd_protocol='json')
show_human()
data_window.border(0)
data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface Showing NMEA protocol'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(2, 2, '{}'.format(gpsd_socket.response))
data_window.refresh()
else:
sleep(.1)
def shut_down():
"""Closes connection and restores terminal"""
curses.nocbreak()
curses.echo()
curses.endwin()
gpsd_socket.close()
print('Keyboard interrupt received\nTerminated by user\nGood Bye.\n')
sys.exit(1)
if __name__ == '__main__':
args = add_args()
gpsd_socket = gps3.GPSDSocket()
gpsd_socket.connect(args.host, args.port)
gpsd_socket.watch(gpsd_protocol=args.gpsd_protocol)
data_stream = gps3.DataStream()
screen = curses.initscr()
screen.clear()
screen.scrollok(True)
curses.noecho()
curses.curs_set(0)
curses.cbreak()
try:
if 'json' in args.gpsd_protocol:
show_human()
if 'nmea' in args.gpsd_protocol:
show_nmea()
except KeyboardInterrupt:
shut_down()
except (OSError, IOError) as error:
gpsd_socket.close()
curses.nocbreak()
curses.echo()
curses.endwin()
sys.stderr.write('\rHUMAN error--> {}'.format(error))
sys.stderr.write('\rhuman connection to gpsd at \'{0}\' on port \'{1}\' failed.\n'.format(args.host, args.port))
sys.exit(1) # TODO: gpsd existence check and start
#
# Someday a cleaner Python interface will live here
#
# End
|
wadda/gps3 | examples/human.py | sexagesimal | python | def sexagesimal(sexathang, latlon, form='DDD'):
cardinal = 'O'
if not isinstance(sexathang, float):
sexathang = 'n/a'
return sexathang
if latlon == 'lon':
if sexathang > 0.0:
cardinal = 'E'
if sexathang < 0.0:
cardinal = 'W'
if latlon == 'lat':
if sexathang > 0.0:
cardinal = 'N'
if sexathang < 0.0:
cardinal = 'S'
if 'RAW' in form:
sexathang = '{0:4.9f}°'.format(sexathang) # 4 to allow -100° through -179.999999° to -180°
return sexathang
if 'DDD' in form:
sexathang = '{0:3.6f}°'.format(abs(sexathang))
if 'DMM' in form:
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
minute_latlon *= 60
sexathang = '{0}°{1:2.5f}\''.format(int(degree_latlon), minute_latlon)
if 'DMS' in form:
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
second_latlon, minute_latlon = modf(minute_latlon * 60)
second_latlon *= 60.0
sexathang = '{0}°{1}\'{2:2.3f}\"'.format(int(degree_latlon), int(minute_latlon), second_latlon)
return sexathang + cardinal | Arguments:
sexathang: (float), -15.560615 (negative = South), -146.241122 (negative = West) # Apataki Carenage
latlon: (str) 'lat' | 'lon'
form: (str), 'DDD'|'DMM'|'DMS', decimal Degrees, decimal Minutes, decimal Seconds
Returns:
latitude: e.g., '15°33'38.214"S'
longitude: e.g., '146°14'28.039"W' | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/examples/human.py#L110-L157 | null | #!/usr/bin/env python3
# coding=utf-8
"""
human.py is to showcase gps3.py, a Python 2.7-3.5 GPSD interface
Defaults host='127.0.0.1', port=2947, gpsd_protocol='json'
Toggle Lat/Lon form with '0', '1', '2', '3' for RAW, DDD, DMM, DMS
Toggle units with '0', 'm', 'i', 'n', for 'raw', Metric, Imperial, Nautical
Toggle gpsd protocol with 'j', 'a' for 'json', 'nmea' displays
Quit with 'q' or '^c'
python[X] human.py --help for list of commandline options.
"""
import argparse
import curses
import sys
from datetime import datetime
from math import modf
from time import sleep
from gps3 import gps3 # Moe, remember to CHANGE to straight 'import gps3' if not installed,
# or check which Python version it's installed in. You forget sometimes.
__author__ = 'Moe'
__copyright__ = 'Copyright 2015-2016 Moe'
__license__ = 'MIT'
__version__ = '0.33.2'
CONVERSION = {'raw': (1, 1, 'm/s', 'meters'),
'metric': (3.6, 1, 'kph', 'meters'),
'nautical': (1.9438445, 1, 'kts', 'meters'),
'imperial': (2.2369363, 3.2808399, 'mph', 'feet')}
def add_args():
"""Adds commandline arguments and formatted Help"""
parser = argparse.ArgumentParser()
parser.add_argument('-host', action='store', dest='host', default='127.0.0.1', help='DEFAULT "127.0.0.1"')
parser.add_argument('-port', action='store', dest='port', default='2947', help='DEFAULT 2947', type=int)
parser.add_argument('-json', dest='gpsd_protocol', const='json', action='store_const', default='json', help='DEFAULT JSON objects */')
parser.add_argument('-device', dest='devicepath', action='store', help='alternate devicepath e.g.,"-device /dev/ttyUSB4"')
# Infrequently used options
parser.add_argument('-nmea', dest='gpsd_protocol', const='nmea', action='store_const', help='*/ output in NMEA */')
# parser.add_argument('-rare', dest='gpsd_protocol', const='rare', action='store_const', help='*/ output of packets in hex */')
# parser.add_argument('-raw', dest='gpsd_protocol', const='raw', action='store_const', help='*/ output of raw packets */')
# parser.add_argument('-scaled', dest='gpsd_protocol', const='scaled', action='store_const', help='*/ scale output to floats */')
# parser.add_argument('-timing', dest='gpsd_protocol', const='timing', action='store_const', help='*/ timing information */')
# parser.add_argument('-split24', dest='gpsd_protocol', const='split24', action='store_const', help='*/ split AIS Type 24s */')
# parser.add_argument('-pps', dest='gpsd_protocol', const='pps', action='store_const', help='*/ enable PPS JSON */')
parser.add_argument('-v', '--version', action='version', version='Version: {}'.format(__version__))
cli_args = parser.parse_args()
return cli_args
def satellites_used(feed):
"""Counts number of satellites used in calculation from total visible satellites
Arguments:
feed feed=data_stream.TPV['satellites']
Returns:
total_satellites(int):
used_satellites (int):
"""
total_satellites = 0
used_satellites = 0
if not isinstance(feed, list):
return 0, 0
for satellites in feed:
total_satellites += 1
if satellites['used'] is True:
used_satellites += 1
return total_satellites, used_satellites
def make_time(gps_datetime_str):
"""Makes datetime object from string object"""
if not 'n/a' == gps_datetime_str:
datetime_string = gps_datetime_str
datetime_object = datetime.strptime(datetime_string, "%Y-%m-%dT%H:%M:%S")
return datetime_object
def elapsed_time_from(start_time):
"""calculate time delta from latched time and current time"""
time_then = make_time(start_time)
time_now = datetime.utcnow().replace(microsecond=0)
if time_then is None:
return
delta_t = time_now - time_then
return delta_t
def unit_conversion(thing, units, length=False):
"""converts base data between metric, imperial, or nautical units"""
if 'n/a' == thing:
return 'n/a'
try:
thing = round(thing * CONVERSION[units][0 + length], 2)
except TypeError:
thing = 'fubar'
return thing, CONVERSION[units][2 + length]
def show_human():
"""Curses terminal with standard outputs """
form = 'RAW'
units = 'raw'
data_window = curses.newwin(19, 39, 0, 0)
sat_window = curses.newwin(14, 39, 0, 40)
device_window = curses.newwin(6, 39, 13, 40)
packet_window = curses.newwin(7, 79, 19, 0)
for new_data in gpsd_socket:
if new_data:
data_stream.unpack(new_data)
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'): # quit
shut_down()
elif key_press == ord('a'): # NMEA
gpsd_socket.watch(enable=False, gpsd_protocol='json')
gpsd_socket.watch(gpsd_protocol='nmea')
show_nmea()
elif key_press == ord('0'): # raw
form = 'RAW'
units = 'raw'
data_window.clear()
elif key_press == ord('1'): # DDD
form = 'DDD'
data_window.clear()
elif key_press == ord('2'): # DMM
form = 'DMM'
data_window.clear()
elif key_press == ord('3'): # DMS
form = 'DMS'
data_window.clear()
elif key_press == ord('m'): # Metric
units = 'metric'
data_window.clear()
elif key_press == ord('i'): # Imperial
units = 'imperial'
data_window.clear()
elif key_press == ord('n'): # Nautical
units = 'nautical'
data_window.clear()
elif key_press == ord('d'): # Refresh device listings
gpsd_socket.send('?DEVICES;')
device_window.clear()
data_window.box()
data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(1, 2, 'Time: {time} '.format(**data_stream.TPV))
data_window.addstr(2, 2, 'Latitude: {} '.format(sexagesimal(data_stream.TPV['lat'], 'lat', form)))
data_window.addstr(3, 2, 'Longitude: {} '.format(sexagesimal(data_stream.TPV['lon'], 'lon', form)))
data_window.addstr(4, 2, 'Altitude: {} {}'.format(*unit_conversion(data_stream.TPV['alt'], units, length=True)))
data_window.addstr(5, 2, 'Speed: {} {}'.format(*unit_conversion(data_stream.TPV['speed'], units)))
data_window.addstr(6, 2, 'Heading: {track}° True'.format(**data_stream.TPV))
data_window.addstr(7, 2, 'Climb: {} {}/s'.format(*unit_conversion(data_stream.TPV['climb'], units, length=True)))
data_window.addstr(8, 2, 'Status: {mode:<}D '.format(**data_stream.TPV))
data_window.addstr(9, 2, 'Latitude Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['epx'], units, length=True)))
data_window.addstr(10, 2, 'Longitude Err: +/-{} {}'.format(*unit_conversion(data_stream.TPV['epy'], units, length=True)))
data_window.addstr(11, 2, 'Altitude Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['epv'], units, length=True)))
data_window.addstr(12, 2, 'Course Err: +/-{epc} '.format(**data_stream.TPV), curses.A_DIM)
data_window.addstr(13, 2, 'Speed Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['eps'], units)), curses.A_DIM)
data_window.addstr(14, 2, 'Time Offset: +/-{ept} '.format(**data_stream.TPV), curses.A_DIM)
data_window.addstr(15, 2, 'gdop:{gdop} pdop:{pdop} tdop:{tdop}'.format(**data_stream.SKY))
data_window.addstr(16, 2, 'ydop:{ydop} xdop:{xdop} '.format(**data_stream.SKY))
data_window.addstr(17, 2, 'vdop:{vdop} hdop:{hdop} '.format(**data_stream.SKY))
sat_window.clear()
sat_window.box()
sat_window.addstr(0, 2, 'Using {0[1]}/{0[0]} satellites (truncated)'.format(satellites_used(data_stream.SKY['satellites'])))
sat_window.addstr(1, 2, 'PRN Elev Azimuth SNR Used')
line = 2
if isinstance(data_stream.SKY['satellites'], list): # Nested lists of dictionaries are strings before data is present
for sats in data_stream.SKY['satellites'][0:10]:
sat_window.addstr(line, 2, '{PRN:>2} {el:>6} {az:>5} {ss:>5} {used:}'.format(**sats))
line += 1
# device_window.clear()
device_window.box()
if not isinstance(data_stream.DEVICES['devices'], list): # Local machines need a 'device' kick start
gpsd_socket.send('?DEVICES;') # to have valid data I don't know why.
if isinstance(data_stream.DEVICES['devices'], list): # Nested lists of dictionaries are strings before data is present.
for gizmo in data_stream.DEVICES['devices']:
start_time, _uicroseconds = gizmo['activated'].split('.') # Remove '.000Z'
elapsed = elapsed_time_from(start_time)
device_window.addstr(1, 2, 'Activated: {}'.format(gizmo['activated']))
device_window.addstr(2, 2, 'Host:{0.host}:{0.port} {1}'.format(args, gizmo['path']))
device_window.addstr(3, 2, 'Driver:{driver} BPS:{bps}'.format(**gizmo))
device_window.addstr(4, 2, 'Cycle:{0} Hz {1!s:>14} Elapsed'.format(1 / gizmo['cycle'], elapsed))
packet_window.clear()
# packet_window.border(0)
packet_window.scrollok(True)
packet_window.addstr(0, 0, '{}'.format(new_data))
# sleep(.9)
data_window.refresh()
sat_window.refresh()
device_window.refresh()
packet_window.refresh()
else: # Reduced CPU cycles with the non-blocking socket read, by putting 'sleep' here, rather than hitting
sleep(.1) # the socket fast and furious with hundreds of empty checks between sleeps.
def show_nmea():
"""NMEA output in curses terminal"""
data_window = curses.newwin(24, 79, 0, 0)
for new_data in gpsd_socket:
if new_data:
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'):
shut_down()
elif key_press == ord('j'): # raw
gpsd_socket.watch(enable=False, gpsd_protocol='nmea')
gpsd_socket.watch(gpsd_protocol='json')
show_human()
data_window.border(0)
data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface Showing NMEA protocol'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(2, 2, '{}'.format(gpsd_socket.response))
data_window.refresh()
else:
sleep(.1)
def shut_down():
"""Closes connection and restores terminal"""
curses.nocbreak()
curses.echo()
curses.endwin()
gpsd_socket.close()
print('Keyboard interrupt received\nTerminated by user\nGood Bye.\n')
sys.exit(1)
if __name__ == '__main__':
args = add_args()
gpsd_socket = gps3.GPSDSocket()
gpsd_socket.connect(args.host, args.port)
gpsd_socket.watch(gpsd_protocol=args.gpsd_protocol)
data_stream = gps3.DataStream()
screen = curses.initscr()
screen.clear()
screen.scrollok(True)
curses.noecho()
curses.curs_set(0)
curses.cbreak()
try:
if 'json' in args.gpsd_protocol:
show_human()
if 'nmea' in args.gpsd_protocol:
show_nmea()
except KeyboardInterrupt:
shut_down()
except (OSError, IOError) as error:
gpsd_socket.close()
curses.nocbreak()
curses.echo()
curses.endwin()
sys.stderr.write('\rHUMAN error--> {}'.format(error))
sys.stderr.write('\rhuman connection to gpsd at \'{0}\' on port \'{1}\' failed.\n'.format(args.host, args.port))
sys.exit(1) # TODO: gpsd existence check and start
#
# Someday a cleaner Python interface will live here
#
# End
|
wadda/gps3 | examples/human.py | show_human | python | def show_human():
form = 'RAW'
units = 'raw'
data_window = curses.newwin(19, 39, 0, 0)
sat_window = curses.newwin(14, 39, 0, 40)
device_window = curses.newwin(6, 39, 13, 40)
packet_window = curses.newwin(7, 79, 19, 0)
for new_data in gpsd_socket:
if new_data:
data_stream.unpack(new_data)
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'): # quit
shut_down()
elif key_press == ord('a'): # NMEA
gpsd_socket.watch(enable=False, gpsd_protocol='json')
gpsd_socket.watch(gpsd_protocol='nmea')
show_nmea()
elif key_press == ord('0'): # raw
form = 'RAW'
units = 'raw'
data_window.clear()
elif key_press == ord('1'): # DDD
form = 'DDD'
data_window.clear()
elif key_press == ord('2'): # DMM
form = 'DMM'
data_window.clear()
elif key_press == ord('3'): # DMS
form = 'DMS'
data_window.clear()
elif key_press == ord('m'): # Metric
units = 'metric'
data_window.clear()
elif key_press == ord('i'): # Imperial
units = 'imperial'
data_window.clear()
elif key_press == ord('n'): # Nautical
units = 'nautical'
data_window.clear()
elif key_press == ord('d'): # Refresh device listings
gpsd_socket.send('?DEVICES;')
device_window.clear()
data_window.box()
data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(1, 2, 'Time: {time} '.format(**data_stream.TPV))
data_window.addstr(2, 2, 'Latitude: {} '.format(sexagesimal(data_stream.TPV['lat'], 'lat', form)))
data_window.addstr(3, 2, 'Longitude: {} '.format(sexagesimal(data_stream.TPV['lon'], 'lon', form)))
data_window.addstr(4, 2, 'Altitude: {} {}'.format(*unit_conversion(data_stream.TPV['alt'], units, length=True)))
data_window.addstr(5, 2, 'Speed: {} {}'.format(*unit_conversion(data_stream.TPV['speed'], units)))
data_window.addstr(6, 2, 'Heading: {track}° True'.format(**data_stream.TPV))
data_window.addstr(7, 2, 'Climb: {} {}/s'.format(*unit_conversion(data_stream.TPV['climb'], units, length=True)))
data_window.addstr(8, 2, 'Status: {mode:<}D '.format(**data_stream.TPV))
data_window.addstr(9, 2, 'Latitude Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['epx'], units, length=True)))
data_window.addstr(10, 2, 'Longitude Err: +/-{} {}'.format(*unit_conversion(data_stream.TPV['epy'], units, length=True)))
data_window.addstr(11, 2, 'Altitude Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['epv'], units, length=True)))
data_window.addstr(12, 2, 'Course Err: +/-{epc} '.format(**data_stream.TPV), curses.A_DIM)
data_window.addstr(13, 2, 'Speed Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['eps'], units)), curses.A_DIM)
data_window.addstr(14, 2, 'Time Offset: +/-{ept} '.format(**data_stream.TPV), curses.A_DIM)
data_window.addstr(15, 2, 'gdop:{gdop} pdop:{pdop} tdop:{tdop}'.format(**data_stream.SKY))
data_window.addstr(16, 2, 'ydop:{ydop} xdop:{xdop} '.format(**data_stream.SKY))
data_window.addstr(17, 2, 'vdop:{vdop} hdop:{hdop} '.format(**data_stream.SKY))
sat_window.clear()
sat_window.box()
sat_window.addstr(0, 2, 'Using {0[1]}/{0[0]} satellites (truncated)'.format(satellites_used(data_stream.SKY['satellites'])))
sat_window.addstr(1, 2, 'PRN Elev Azimuth SNR Used')
line = 2
if isinstance(data_stream.SKY['satellites'], list): # Nested lists of dictionaries are strings before data is present
for sats in data_stream.SKY['satellites'][0:10]:
sat_window.addstr(line, 2, '{PRN:>2} {el:>6} {az:>5} {ss:>5} {used:}'.format(**sats))
line += 1
# device_window.clear()
device_window.box()
if not isinstance(data_stream.DEVICES['devices'], list): # Local machines need a 'device' kick start
gpsd_socket.send('?DEVICES;') # to have valid data I don't know why.
if isinstance(data_stream.DEVICES['devices'], list): # Nested lists of dictionaries are strings before data is present.
for gizmo in data_stream.DEVICES['devices']:
start_time, _uicroseconds = gizmo['activated'].split('.') # Remove '.000Z'
elapsed = elapsed_time_from(start_time)
device_window.addstr(1, 2, 'Activated: {}'.format(gizmo['activated']))
device_window.addstr(2, 2, 'Host:{0.host}:{0.port} {1}'.format(args, gizmo['path']))
device_window.addstr(3, 2, 'Driver:{driver} BPS:{bps}'.format(**gizmo))
device_window.addstr(4, 2, 'Cycle:{0} Hz {1!s:>14} Elapsed'.format(1 / gizmo['cycle'], elapsed))
packet_window.clear()
# packet_window.border(0)
packet_window.scrollok(True)
packet_window.addstr(0, 0, '{}'.format(new_data))
# sleep(.9)
data_window.refresh()
sat_window.refresh()
device_window.refresh()
packet_window.refresh()
else: # Reduced CPU cycles with the non-blocking socket read, by putting 'sleep' here, rather than hitting
sleep(.1) | Curses terminal with standard outputs | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/examples/human.py#L160-L266 | [
"def satellites_used(feed):\n \"\"\"Counts number of satellites used in calculation from total visible satellites\n Arguments:\n feed feed=data_stream.TPV['satellites']\n Returns:\n total_satellites(int):\n used_satellites (int):\n \"\"\"\n total_satellites = 0\n used_satellites = 0\n\n if not isinstance(feed, list):\n return 0, 0\n\n for satellites in feed:\n total_satellites += 1\n if satellites['used'] is True:\n used_satellites += 1\n return total_satellites, used_satellites\n",
"def elapsed_time_from(start_time):\n \"\"\"calculate time delta from latched time and current time\"\"\"\n time_then = make_time(start_time)\n time_now = datetime.utcnow().replace(microsecond=0)\n if time_then is None:\n return\n delta_t = time_now - time_then\n return delta_t\n",
"def unit_conversion(thing, units, length=False):\n \"\"\"converts base data between metric, imperial, or nautical units\"\"\"\n if 'n/a' == thing:\n return 'n/a'\n try:\n thing = round(thing * CONVERSION[units][0 + length], 2)\n except TypeError:\n thing = 'fubar'\n return thing, CONVERSION[units][2 + length]\n",
"def sexagesimal(sexathang, latlon, form='DDD'):\n \"\"\"\n Arguments:\n sexathang: (float), -15.560615 (negative = South), -146.241122 (negative = West) # Apataki Carenage\n latlon: (str) 'lat' | 'lon'\n form: (str), 'DDD'|'DMM'|'DMS', decimal Degrees, decimal Minutes, decimal Seconds\n Returns:\n latitude: e.g., '15°33'38.214\"S'\n longitude: e.g., '146°14'28.039\"W'\n \"\"\"\n cardinal = 'O'\n if not isinstance(sexathang, float):\n sexathang = 'n/a'\n return sexathang\n\n if latlon == 'lon':\n if sexathang > 0.0:\n cardinal = 'E'\n if sexathang < 0.0:\n cardinal = 'W'\n\n if latlon == 'lat':\n if sexathang > 0.0:\n cardinal = 'N'\n if sexathang < 0.0:\n cardinal = 'S'\n\n if 'RAW' in form:\n sexathang = '{0:4.9f}°'.format(sexathang) # 4 to allow -100° through -179.999999° to -180°\n return sexathang\n\n if 'DDD' in form:\n sexathang = '{0:3.6f}°'.format(abs(sexathang))\n\n if 'DMM' in form:\n _latlon = abs(sexathang)\n minute_latlon, degree_latlon = modf(_latlon)\n minute_latlon *= 60\n sexathang = '{0}°{1:2.5f}\\''.format(int(degree_latlon), minute_latlon)\n\n if 'DMS' in form:\n _latlon = abs(sexathang)\n minute_latlon, degree_latlon = modf(_latlon)\n second_latlon, minute_latlon = modf(minute_latlon * 60)\n second_latlon *= 60.0\n sexathang = '{0}°{1}\\'{2:2.3f}\\\"'.format(int(degree_latlon), int(minute_latlon), second_latlon)\n\n return sexathang + cardinal\n",
"def shut_down():\n \"\"\"Closes connection and restores terminal\"\"\"\n curses.nocbreak()\n curses.echo()\n curses.endwin()\n gpsd_socket.close()\n print('Keyboard interrupt received\\nTerminated by user\\nGood Bye.\\n')\n sys.exit(1)\n",
"def show_nmea():\n \"\"\"NMEA output in curses terminal\"\"\"\n data_window = curses.newwin(24, 79, 0, 0)\n\n for new_data in gpsd_socket:\n if new_data:\n screen.nodelay(1)\n key_press = screen.getch()\n if key_press == ord('q'):\n shut_down()\n elif key_press == ord('j'): # raw\n gpsd_socket.watch(enable=False, gpsd_protocol='nmea')\n gpsd_socket.watch(gpsd_protocol='json')\n show_human()\n\n data_window.border(0)\n data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface Showing NMEA protocol'.format(*sys.version_info), curses.A_BOLD)\n data_window.addstr(2, 2, '{}'.format(gpsd_socket.response))\n data_window.refresh()\n else:\n sleep(.1)\n",
"def watch(self, enable=True, gpsd_protocol=PROTOCOL, devicepath=None):\n \"\"\"watch gpsd in various gpsd_protocols or devices.\n Arguments:\n enable: (bool) stream data to socket\n gpsd_protocol: (str) 'json' | 'nmea' | 'rare' | 'raw' | 'scaled' | 'split24' | 'pps'\n devicepath: (str) device path - '/dev/ttyUSBn' for some number n or '/dev/whatever_works'\n Returns:\n command: (str) e.g., '?WATCH={\"enable\":true,\"json\":true};'\n \"\"\"\n # N.B.: 'timing' requires special attention, as it is undocumented and lives with dragons.\n command = '?WATCH={{\"enable\":true,\"{0}\":true}}'.format(gpsd_protocol)\n\n if gpsd_protocol == 'rare': # 1 for a channel, gpsd reports the unprocessed NMEA or AIVDM data stream\n command = command.replace('\"rare\":true', '\"raw\":1')\n if gpsd_protocol == 'raw': # 2 channel that processes binary data, received data verbatim without hex-dumping.\n command = command.replace('\"raw\":true', '\"raw\",2')\n if not enable:\n command = command.replace('true', 'false') # sets -all- command values false .\n if devicepath:\n command = command.replace('}', ',\"device\":\"') + devicepath + '\"}'\n\n return self.send(command)\n",
"def send(self, command):\n \"\"\"Ship commands to the daemon\n Arguments:\n command: e.g., '?WATCH={{'enable':true,'json':true}}'|'?VERSION;'|'?DEVICES;'|'?DEVICE;'|'?POLL;'\n \"\"\"\n # The POLL command requests data from the last-seen fixes on all active GPS devices.\n # Devices must previously have been activated by ?WATCH to be pollable.\n try:\n self.streamSock.send(bytes(command, encoding='utf-8'))\n except TypeError:\n self.streamSock.send(command) # 2.7 chokes on 'bytes' and 'encoding='\n except (OSError, IOError) as error: # MOE, LEAVE THIS ALONE!...for now.\n sys.stderr.write('\\nGPS3 send command fail with {}\\n'.format(error)) # [Errno 107] typically no socket\n",
"def unpack(self, gpsd_socket_response):\n \"\"\"Sets new socket data as DataStream attributes in those initialised dictionaries\n Arguments:\n gpsd_socket_response (json object):\n Provides:\n self attribute dictionaries, e.g., self.TPV['lat'], self.SKY['gdop']\n Raises:\n AttributeError: 'str' object has no attribute 'keys' when the device falls out of the system\n ValueError, KeyError: most likely extra, or mangled JSON data, should not happen, but that\n applies to a lot of things.\n \"\"\"\n try:\n fresh_data = json.loads(gpsd_socket_response) # The reserved word 'class' is popped from JSON object class\n package_name = fresh_data.pop('class', 'ERROR') # gpsd data package errors are also 'ERROR'.\n package = getattr(self, package_name, package_name) # packages are named for JSON object class\n for key in package.keys():\n package[key] = fresh_data.get(key, 'n/a') # Restores 'n/a' if key is absent in the socket response\n\n except AttributeError: # 'str' object has no attribute 'keys'\n sys.stderr.write('There is an unexpected exception in DataStream.unpack')\n return\n\n except (ValueError, KeyError) as error:\n sys.stderr.write(str(error)) # Extra data or aberrant data in stream.\n return\n"
] | #!/usr/bin/env python3
# coding=utf-8
"""
human.py is to showcase gps3.py, a Python 2.7-3.5 GPSD interface
Defaults host='127.0.0.1', port=2947, gpsd_protocol='json'
Toggle Lat/Lon form with '0', '1', '2', '3' for RAW, DDD, DMM, DMS
Toggle units with '0', 'm', 'i', 'n', for 'raw', Metric, Imperial, Nautical
Toggle gpsd protocol with 'j', 'a' for 'json', 'nmea' displays
Quit with 'q' or '^c'
python[X] human.py --help for list of commandline options.
"""
import argparse
import curses
import sys
from datetime import datetime
from math import modf
from time import sleep
from gps3 import gps3 # Moe, remember to CHANGE to straight 'import gps3' if not installed,
# or check which Python version it's installed in. You forget sometimes.
__author__ = 'Moe'
__copyright__ = 'Copyright 2015-2016 Moe'
__license__ = 'MIT'
__version__ = '0.33.2'
CONVERSION = {'raw': (1, 1, 'm/s', 'meters'),
'metric': (3.6, 1, 'kph', 'meters'),
'nautical': (1.9438445, 1, 'kts', 'meters'),
'imperial': (2.2369363, 3.2808399, 'mph', 'feet')}
def add_args():
"""Adds commandline arguments and formatted Help"""
parser = argparse.ArgumentParser()
parser.add_argument('-host', action='store', dest='host', default='127.0.0.1', help='DEFAULT "127.0.0.1"')
parser.add_argument('-port', action='store', dest='port', default='2947', help='DEFAULT 2947', type=int)
parser.add_argument('-json', dest='gpsd_protocol', const='json', action='store_const', default='json', help='DEFAULT JSON objects */')
parser.add_argument('-device', dest='devicepath', action='store', help='alternate devicepath e.g.,"-device /dev/ttyUSB4"')
# Infrequently used options
parser.add_argument('-nmea', dest='gpsd_protocol', const='nmea', action='store_const', help='*/ output in NMEA */')
# parser.add_argument('-rare', dest='gpsd_protocol', const='rare', action='store_const', help='*/ output of packets in hex */')
# parser.add_argument('-raw', dest='gpsd_protocol', const='raw', action='store_const', help='*/ output of raw packets */')
# parser.add_argument('-scaled', dest='gpsd_protocol', const='scaled', action='store_const', help='*/ scale output to floats */')
# parser.add_argument('-timing', dest='gpsd_protocol', const='timing', action='store_const', help='*/ timing information */')
# parser.add_argument('-split24', dest='gpsd_protocol', const='split24', action='store_const', help='*/ split AIS Type 24s */')
# parser.add_argument('-pps', dest='gpsd_protocol', const='pps', action='store_const', help='*/ enable PPS JSON */')
parser.add_argument('-v', '--version', action='version', version='Version: {}'.format(__version__))
cli_args = parser.parse_args()
return cli_args
def satellites_used(feed):
"""Counts number of satellites used in calculation from total visible satellites
Arguments:
feed feed=data_stream.TPV['satellites']
Returns:
total_satellites(int):
used_satellites (int):
"""
total_satellites = 0
used_satellites = 0
if not isinstance(feed, list):
return 0, 0
for satellites in feed:
total_satellites += 1
if satellites['used'] is True:
used_satellites += 1
return total_satellites, used_satellites
def make_time(gps_datetime_str):
"""Makes datetime object from string object"""
if not 'n/a' == gps_datetime_str:
datetime_string = gps_datetime_str
datetime_object = datetime.strptime(datetime_string, "%Y-%m-%dT%H:%M:%S")
return datetime_object
def elapsed_time_from(start_time):
"""calculate time delta from latched time and current time"""
time_then = make_time(start_time)
time_now = datetime.utcnow().replace(microsecond=0)
if time_then is None:
return
delta_t = time_now - time_then
return delta_t
def unit_conversion(thing, units, length=False):
"""converts base data between metric, imperial, or nautical units"""
if 'n/a' == thing:
return 'n/a'
try:
thing = round(thing * CONVERSION[units][0 + length], 2)
except TypeError:
thing = 'fubar'
return thing, CONVERSION[units][2 + length]
def sexagesimal(sexathang, latlon, form='DDD'):
"""
Arguments:
sexathang: (float), -15.560615 (negative = South), -146.241122 (negative = West) # Apataki Carenage
latlon: (str) 'lat' | 'lon'
form: (str), 'DDD'|'DMM'|'DMS', decimal Degrees, decimal Minutes, decimal Seconds
Returns:
latitude: e.g., '15°33'38.214"S'
longitude: e.g., '146°14'28.039"W'
"""
cardinal = 'O'
if not isinstance(sexathang, float):
sexathang = 'n/a'
return sexathang
if latlon == 'lon':
if sexathang > 0.0:
cardinal = 'E'
if sexathang < 0.0:
cardinal = 'W'
if latlon == 'lat':
if sexathang > 0.0:
cardinal = 'N'
if sexathang < 0.0:
cardinal = 'S'
if 'RAW' in form:
sexathang = '{0:4.9f}°'.format(sexathang) # 4 to allow -100° through -179.999999° to -180°
return sexathang
if 'DDD' in form:
sexathang = '{0:3.6f}°'.format(abs(sexathang))
if 'DMM' in form:
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
minute_latlon *= 60
sexathang = '{0}°{1:2.5f}\''.format(int(degree_latlon), minute_latlon)
if 'DMS' in form:
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
second_latlon, minute_latlon = modf(minute_latlon * 60)
second_latlon *= 60.0
sexathang = '{0}°{1}\'{2:2.3f}\"'.format(int(degree_latlon), int(minute_latlon), second_latlon)
return sexathang + cardinal
# the socket fast and furious with hundreds of empty checks between sleeps.
def show_nmea():
"""NMEA output in curses terminal"""
data_window = curses.newwin(24, 79, 0, 0)
for new_data in gpsd_socket:
if new_data:
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'):
shut_down()
elif key_press == ord('j'): # raw
gpsd_socket.watch(enable=False, gpsd_protocol='nmea')
gpsd_socket.watch(gpsd_protocol='json')
show_human()
data_window.border(0)
data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface Showing NMEA protocol'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(2, 2, '{}'.format(gpsd_socket.response))
data_window.refresh()
else:
sleep(.1)
def shut_down():
"""Closes connection and restores terminal"""
curses.nocbreak()
curses.echo()
curses.endwin()
gpsd_socket.close()
print('Keyboard interrupt received\nTerminated by user\nGood Bye.\n')
sys.exit(1)
if __name__ == '__main__':
args = add_args()
gpsd_socket = gps3.GPSDSocket()
gpsd_socket.connect(args.host, args.port)
gpsd_socket.watch(gpsd_protocol=args.gpsd_protocol)
data_stream = gps3.DataStream()
screen = curses.initscr()
screen.clear()
screen.scrollok(True)
curses.noecho()
curses.curs_set(0)
curses.cbreak()
try:
if 'json' in args.gpsd_protocol:
show_human()
if 'nmea' in args.gpsd_protocol:
show_nmea()
except KeyboardInterrupt:
shut_down()
except (OSError, IOError) as error:
gpsd_socket.close()
curses.nocbreak()
curses.echo()
curses.endwin()
sys.stderr.write('\rHUMAN error--> {}'.format(error))
sys.stderr.write('\rhuman connection to gpsd at \'{0}\' on port \'{1}\' failed.\n'.format(args.host, args.port))
sys.exit(1) # TODO: gpsd existence check and start
#
# Someday a cleaner Python interface will live here
#
# End
|
wadda/gps3 | examples/human.py | shut_down | python | def shut_down():
curses.nocbreak()
curses.echo()
curses.endwin()
gpsd_socket.close()
print('Keyboard interrupt received\nTerminated by user\nGood Bye.\n')
sys.exit(1) | Closes connection and restores terminal | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/examples/human.py#L292-L299 | [
"def close(self):\n \"\"\"turn off stream and close socket\"\"\"\n if self.streamSock:\n self.watch(enable=False)\n self.streamSock.close()\n self.streamSock = None\n"
] | #!/usr/bin/env python3
# coding=utf-8
"""
human.py is to showcase gps3.py, a Python 2.7-3.5 GPSD interface
Defaults host='127.0.0.1', port=2947, gpsd_protocol='json'
Toggle Lat/Lon form with '0', '1', '2', '3' for RAW, DDD, DMM, DMS
Toggle units with '0', 'm', 'i', 'n', for 'raw', Metric, Imperial, Nautical
Toggle gpsd protocol with 'j', 'a' for 'json', 'nmea' displays
Quit with 'q' or '^c'
python[X] human.py --help for list of commandline options.
"""
import argparse
import curses
import sys
from datetime import datetime
from math import modf
from time import sleep
from gps3 import gps3 # Moe, remember to CHANGE to straight 'import gps3' if not installed,
# or check which Python version it's installed in. You forget sometimes.
__author__ = 'Moe'
__copyright__ = 'Copyright 2015-2016 Moe'
__license__ = 'MIT'
__version__ = '0.33.2'
CONVERSION = {'raw': (1, 1, 'm/s', 'meters'),
'metric': (3.6, 1, 'kph', 'meters'),
'nautical': (1.9438445, 1, 'kts', 'meters'),
'imperial': (2.2369363, 3.2808399, 'mph', 'feet')}
def add_args():
"""Adds commandline arguments and formatted Help"""
parser = argparse.ArgumentParser()
parser.add_argument('-host', action='store', dest='host', default='127.0.0.1', help='DEFAULT "127.0.0.1"')
parser.add_argument('-port', action='store', dest='port', default='2947', help='DEFAULT 2947', type=int)
parser.add_argument('-json', dest='gpsd_protocol', const='json', action='store_const', default='json', help='DEFAULT JSON objects */')
parser.add_argument('-device', dest='devicepath', action='store', help='alternate devicepath e.g.,"-device /dev/ttyUSB4"')
# Infrequently used options
parser.add_argument('-nmea', dest='gpsd_protocol', const='nmea', action='store_const', help='*/ output in NMEA */')
# parser.add_argument('-rare', dest='gpsd_protocol', const='rare', action='store_const', help='*/ output of packets in hex */')
# parser.add_argument('-raw', dest='gpsd_protocol', const='raw', action='store_const', help='*/ output of raw packets */')
# parser.add_argument('-scaled', dest='gpsd_protocol', const='scaled', action='store_const', help='*/ scale output to floats */')
# parser.add_argument('-timing', dest='gpsd_protocol', const='timing', action='store_const', help='*/ timing information */')
# parser.add_argument('-split24', dest='gpsd_protocol', const='split24', action='store_const', help='*/ split AIS Type 24s */')
# parser.add_argument('-pps', dest='gpsd_protocol', const='pps', action='store_const', help='*/ enable PPS JSON */')
parser.add_argument('-v', '--version', action='version', version='Version: {}'.format(__version__))
cli_args = parser.parse_args()
return cli_args
def satellites_used(feed):
"""Counts number of satellites used in calculation from total visible satellites
Arguments:
feed feed=data_stream.TPV['satellites']
Returns:
total_satellites(int):
used_satellites (int):
"""
total_satellites = 0
used_satellites = 0
if not isinstance(feed, list):
return 0, 0
for satellites in feed:
total_satellites += 1
if satellites['used'] is True:
used_satellites += 1
return total_satellites, used_satellites
def make_time(gps_datetime_str):
"""Makes datetime object from string object"""
if not 'n/a' == gps_datetime_str:
datetime_string = gps_datetime_str
datetime_object = datetime.strptime(datetime_string, "%Y-%m-%dT%H:%M:%S")
return datetime_object
def elapsed_time_from(start_time):
"""calculate time delta from latched time and current time"""
time_then = make_time(start_time)
time_now = datetime.utcnow().replace(microsecond=0)
if time_then is None:
return
delta_t = time_now - time_then
return delta_t
def unit_conversion(thing, units, length=False):
"""converts base data between metric, imperial, or nautical units"""
if 'n/a' == thing:
return 'n/a'
try:
thing = round(thing * CONVERSION[units][0 + length], 2)
except TypeError:
thing = 'fubar'
return thing, CONVERSION[units][2 + length]
def sexagesimal(sexathang, latlon, form='DDD'):
"""
Arguments:
sexathang: (float), -15.560615 (negative = South), -146.241122 (negative = West) # Apataki Carenage
latlon: (str) 'lat' | 'lon'
form: (str), 'DDD'|'DMM'|'DMS', decimal Degrees, decimal Minutes, decimal Seconds
Returns:
latitude: e.g., '15°33'38.214"S'
longitude: e.g., '146°14'28.039"W'
"""
cardinal = 'O'
if not isinstance(sexathang, float):
sexathang = 'n/a'
return sexathang
if latlon == 'lon':
if sexathang > 0.0:
cardinal = 'E'
if sexathang < 0.0:
cardinal = 'W'
if latlon == 'lat':
if sexathang > 0.0:
cardinal = 'N'
if sexathang < 0.0:
cardinal = 'S'
if 'RAW' in form:
sexathang = '{0:4.9f}°'.format(sexathang) # 4 to allow -100° through -179.999999° to -180°
return sexathang
if 'DDD' in form:
sexathang = '{0:3.6f}°'.format(abs(sexathang))
if 'DMM' in form:
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
minute_latlon *= 60
sexathang = '{0}°{1:2.5f}\''.format(int(degree_latlon), minute_latlon)
if 'DMS' in form:
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
second_latlon, minute_latlon = modf(minute_latlon * 60)
second_latlon *= 60.0
sexathang = '{0}°{1}\'{2:2.3f}\"'.format(int(degree_latlon), int(minute_latlon), second_latlon)
return sexathang + cardinal
def show_human():
"""Curses terminal with standard outputs """
form = 'RAW'
units = 'raw'
data_window = curses.newwin(19, 39, 0, 0)
sat_window = curses.newwin(14, 39, 0, 40)
device_window = curses.newwin(6, 39, 13, 40)
packet_window = curses.newwin(7, 79, 19, 0)
for new_data in gpsd_socket:
if new_data:
data_stream.unpack(new_data)
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'): # quit
shut_down()
elif key_press == ord('a'): # NMEA
gpsd_socket.watch(enable=False, gpsd_protocol='json')
gpsd_socket.watch(gpsd_protocol='nmea')
show_nmea()
elif key_press == ord('0'): # raw
form = 'RAW'
units = 'raw'
data_window.clear()
elif key_press == ord('1'): # DDD
form = 'DDD'
data_window.clear()
elif key_press == ord('2'): # DMM
form = 'DMM'
data_window.clear()
elif key_press == ord('3'): # DMS
form = 'DMS'
data_window.clear()
elif key_press == ord('m'): # Metric
units = 'metric'
data_window.clear()
elif key_press == ord('i'): # Imperial
units = 'imperial'
data_window.clear()
elif key_press == ord('n'): # Nautical
units = 'nautical'
data_window.clear()
elif key_press == ord('d'): # Refresh device listings
gpsd_socket.send('?DEVICES;')
device_window.clear()
data_window.box()
data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(1, 2, 'Time: {time} '.format(**data_stream.TPV))
data_window.addstr(2, 2, 'Latitude: {} '.format(sexagesimal(data_stream.TPV['lat'], 'lat', form)))
data_window.addstr(3, 2, 'Longitude: {} '.format(sexagesimal(data_stream.TPV['lon'], 'lon', form)))
data_window.addstr(4, 2, 'Altitude: {} {}'.format(*unit_conversion(data_stream.TPV['alt'], units, length=True)))
data_window.addstr(5, 2, 'Speed: {} {}'.format(*unit_conversion(data_stream.TPV['speed'], units)))
data_window.addstr(6, 2, 'Heading: {track}° True'.format(**data_stream.TPV))
data_window.addstr(7, 2, 'Climb: {} {}/s'.format(*unit_conversion(data_stream.TPV['climb'], units, length=True)))
data_window.addstr(8, 2, 'Status: {mode:<}D '.format(**data_stream.TPV))
data_window.addstr(9, 2, 'Latitude Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['epx'], units, length=True)))
data_window.addstr(10, 2, 'Longitude Err: +/-{} {}'.format(*unit_conversion(data_stream.TPV['epy'], units, length=True)))
data_window.addstr(11, 2, 'Altitude Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['epv'], units, length=True)))
data_window.addstr(12, 2, 'Course Err: +/-{epc} '.format(**data_stream.TPV), curses.A_DIM)
data_window.addstr(13, 2, 'Speed Err: +/-{} {} '.format(*unit_conversion(data_stream.TPV['eps'], units)), curses.A_DIM)
data_window.addstr(14, 2, 'Time Offset: +/-{ept} '.format(**data_stream.TPV), curses.A_DIM)
data_window.addstr(15, 2, 'gdop:{gdop} pdop:{pdop} tdop:{tdop}'.format(**data_stream.SKY))
data_window.addstr(16, 2, 'ydop:{ydop} xdop:{xdop} '.format(**data_stream.SKY))
data_window.addstr(17, 2, 'vdop:{vdop} hdop:{hdop} '.format(**data_stream.SKY))
sat_window.clear()
sat_window.box()
sat_window.addstr(0, 2, 'Using {0[1]}/{0[0]} satellites (truncated)'.format(satellites_used(data_stream.SKY['satellites'])))
sat_window.addstr(1, 2, 'PRN Elev Azimuth SNR Used')
line = 2
if isinstance(data_stream.SKY['satellites'], list): # Nested lists of dictionaries are strings before data is present
for sats in data_stream.SKY['satellites'][0:10]:
sat_window.addstr(line, 2, '{PRN:>2} {el:>6} {az:>5} {ss:>5} {used:}'.format(**sats))
line += 1
# device_window.clear()
device_window.box()
if not isinstance(data_stream.DEVICES['devices'], list): # Local machines need a 'device' kick start
gpsd_socket.send('?DEVICES;') # to have valid data I don't know why.
if isinstance(data_stream.DEVICES['devices'], list): # Nested lists of dictionaries are strings before data is present.
for gizmo in data_stream.DEVICES['devices']:
start_time, _uicroseconds = gizmo['activated'].split('.') # Remove '.000Z'
elapsed = elapsed_time_from(start_time)
device_window.addstr(1, 2, 'Activated: {}'.format(gizmo['activated']))
device_window.addstr(2, 2, 'Host:{0.host}:{0.port} {1}'.format(args, gizmo['path']))
device_window.addstr(3, 2, 'Driver:{driver} BPS:{bps}'.format(**gizmo))
device_window.addstr(4, 2, 'Cycle:{0} Hz {1!s:>14} Elapsed'.format(1 / gizmo['cycle'], elapsed))
packet_window.clear()
# packet_window.border(0)
packet_window.scrollok(True)
packet_window.addstr(0, 0, '{}'.format(new_data))
# sleep(.9)
data_window.refresh()
sat_window.refresh()
device_window.refresh()
packet_window.refresh()
else: # Reduced CPU cycles with the non-blocking socket read, by putting 'sleep' here, rather than hitting
sleep(.1) # the socket fast and furious with hundreds of empty checks between sleeps.
def show_nmea():
"""NMEA output in curses terminal"""
data_window = curses.newwin(24, 79, 0, 0)
for new_data in gpsd_socket:
if new_data:
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'):
shut_down()
elif key_press == ord('j'): # raw
gpsd_socket.watch(enable=False, gpsd_protocol='nmea')
gpsd_socket.watch(gpsd_protocol='json')
show_human()
data_window.border(0)
data_window.addstr(0, 2, 'GPS3 Python {}.{}.{} GPSD Interface Showing NMEA protocol'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(2, 2, '{}'.format(gpsd_socket.response))
data_window.refresh()
else:
sleep(.1)
if __name__ == '__main__':
args = add_args()
gpsd_socket = gps3.GPSDSocket()
gpsd_socket.connect(args.host, args.port)
gpsd_socket.watch(gpsd_protocol=args.gpsd_protocol)
data_stream = gps3.DataStream()
screen = curses.initscr()
screen.clear()
screen.scrollok(True)
curses.noecho()
curses.curs_set(0)
curses.cbreak()
try:
if 'json' in args.gpsd_protocol:
show_human()
if 'nmea' in args.gpsd_protocol:
show_nmea()
except KeyboardInterrupt:
shut_down()
except (OSError, IOError) as error:
gpsd_socket.close()
curses.nocbreak()
curses.echo()
curses.endwin()
sys.stderr.write('\rHUMAN error--> {}'.format(error))
sys.stderr.write('\rhuman connection to gpsd at \'{0}\' on port \'{1}\' failed.\n'.format(args.host, args.port))
sys.exit(1) # TODO: gpsd existence check and start
#
# Someday a cleaner Python interface will live here
#
# End
|
wadda/gps3 | gps3/gps3.py | GPSDSocket.connect | python | def connect(self, host=HOST, port=GPSD_PORT):
for alotta_stuff in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
family, socktype, proto, _canonname, host_port = alotta_stuff
try:
self.streamSock = socket.socket(family, socktype, proto)
self.streamSock.connect(host_port)
self.streamSock.setblocking(False)
except (OSError, IOError) as error:
sys.stderr.write('\r\nGPSDSocket.connect exception is--> {}'.format(error))
sys.stderr.write('\r\nGPS3 gpsd connection at \'{0}\' on port \'{1}\' failed\r\n'.format(host, port)) | Connect to a host on a given port.
Arguments:
host: default host='127.0.0.1'
port: default port=2947 | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/gps3/gps3.py#L50-L64 | null | class GPSDSocket(object):
"""Establish a socket with gpsd, by which to send commands and receive data."""
def __init__(self):
self.streamSock = None
self.response = None
def watch(self, enable=True, gpsd_protocol=PROTOCOL, devicepath=None):
"""watch gpsd in various gpsd_protocols or devices.
Arguments:
enable: (bool) stream data to socket
gpsd_protocol: (str) 'json' | 'nmea' | 'rare' | 'raw' | 'scaled' | 'split24' | 'pps'
devicepath: (str) device path - '/dev/ttyUSBn' for some number n or '/dev/whatever_works'
Returns:
command: (str) e.g., '?WATCH={"enable":true,"json":true};'
"""
# N.B.: 'timing' requires special attention, as it is undocumented and lives with dragons.
command = '?WATCH={{"enable":true,"{0}":true}}'.format(gpsd_protocol)
if gpsd_protocol == 'rare': # 1 for a channel, gpsd reports the unprocessed NMEA or AIVDM data stream
command = command.replace('"rare":true', '"raw":1')
if gpsd_protocol == 'raw': # 2 channel that processes binary data, received data verbatim without hex-dumping.
command = command.replace('"raw":true', '"raw",2')
if not enable:
command = command.replace('true', 'false') # sets -all- command values false .
if devicepath:
command = command.replace('}', ',"device":"') + devicepath + '"}'
return self.send(command)
def send(self, command):
"""Ship commands to the daemon
Arguments:
command: e.g., '?WATCH={{'enable':true,'json':true}}'|'?VERSION;'|'?DEVICES;'|'?DEVICE;'|'?POLL;'
"""
# The POLL command requests data from the last-seen fixes on all active GPS devices.
# Devices must previously have been activated by ?WATCH to be pollable.
try:
self.streamSock.send(bytes(command, encoding='utf-8'))
except TypeError:
self.streamSock.send(command) # 2.7 chokes on 'bytes' and 'encoding='
except (OSError, IOError) as error: # MOE, LEAVE THIS ALONE!...for now.
sys.stderr.write('\nGPS3 send command fail with {}\n'.format(error)) # [Errno 107] typically no socket
def __iter__(self):
"""banana""" # <--- for scale
return self
def next(self, timeout=0):
"""Return empty unless new data is ready for the client.
Arguments:
timeout: Default timeout=0 range zero to float specifies a time-out as a floating point
number in seconds. Will sit and wait for timeout seconds. When the timeout argument is omitted
the function blocks until at least one file descriptor is ready. A time-out value of zero specifies
a poll and never blocks.
"""
try:
waitin, _waitout, _waiterror = select.select((self.streamSock,), (), (), timeout)
if not waitin: return
else:
gpsd_response = self.streamSock.makefile() # '.makefile(buffering=4096)' In strictly Python3
self.response = gpsd_response.readline()
return self.response
except StopIteration as error:
sys.stderr.write('The readline exception in GPSDSocket.next is--> {}'.format(error))
__next__ = next # Workaround for changes in iterating between Python 2.7 and 3
def close(self):
"""turn off stream and close socket"""
if self.streamSock:
self.watch(enable=False)
self.streamSock.close()
self.streamSock = None
|
wadda/gps3 | gps3/gps3.py | GPSDSocket.watch | python | def watch(self, enable=True, gpsd_protocol=PROTOCOL, devicepath=None):
# N.B.: 'timing' requires special attention, as it is undocumented and lives with dragons.
command = '?WATCH={{"enable":true,"{0}":true}}'.format(gpsd_protocol)
if gpsd_protocol == 'rare': # 1 for a channel, gpsd reports the unprocessed NMEA or AIVDM data stream
command = command.replace('"rare":true', '"raw":1')
if gpsd_protocol == 'raw': # 2 channel that processes binary data, received data verbatim without hex-dumping.
command = command.replace('"raw":true', '"raw",2')
if not enable:
command = command.replace('true', 'false') # sets -all- command values false .
if devicepath:
command = command.replace('}', ',"device":"') + devicepath + '"}'
return self.send(command) | watch gpsd in various gpsd_protocols or devices.
Arguments:
enable: (bool) stream data to socket
gpsd_protocol: (str) 'json' | 'nmea' | 'rare' | 'raw' | 'scaled' | 'split24' | 'pps'
devicepath: (str) device path - '/dev/ttyUSBn' for some number n or '/dev/whatever_works'
Returns:
command: (str) e.g., '?WATCH={"enable":true,"json":true};' | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/gps3/gps3.py#L66-L87 | [
"def send(self, command):\n \"\"\"Ship commands to the daemon\n Arguments:\n command: e.g., '?WATCH={{'enable':true,'json':true}}'|'?VERSION;'|'?DEVICES;'|'?DEVICE;'|'?POLL;'\n \"\"\"\n # The POLL command requests data from the last-seen fixes on all active GPS devices.\n # Devices must previously have been activated by ?WATCH to be pollable.\n try:\n self.streamSock.send(bytes(command, encoding='utf-8'))\n except TypeError:\n self.streamSock.send(command) # 2.7 chokes on 'bytes' and 'encoding='\n except (OSError, IOError) as error: # MOE, LEAVE THIS ALONE!...for now.\n sys.stderr.write('\\nGPS3 send command fail with {}\\n'.format(error)) # [Errno 107] typically no socket\n"
] | class GPSDSocket(object):
"""Establish a socket with gpsd, by which to send commands and receive data."""
def __init__(self):
self.streamSock = None
self.response = None
def connect(self, host=HOST, port=GPSD_PORT):
"""Connect to a host on a given port.
Arguments:
host: default host='127.0.0.1'
port: default port=2947
"""
for alotta_stuff in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
family, socktype, proto, _canonname, host_port = alotta_stuff
try:
self.streamSock = socket.socket(family, socktype, proto)
self.streamSock.connect(host_port)
self.streamSock.setblocking(False)
except (OSError, IOError) as error:
sys.stderr.write('\r\nGPSDSocket.connect exception is--> {}'.format(error))
sys.stderr.write('\r\nGPS3 gpsd connection at \'{0}\' on port \'{1}\' failed\r\n'.format(host, port))
def send(self, command):
"""Ship commands to the daemon
Arguments:
command: e.g., '?WATCH={{'enable':true,'json':true}}'|'?VERSION;'|'?DEVICES;'|'?DEVICE;'|'?POLL;'
"""
# The POLL command requests data from the last-seen fixes on all active GPS devices.
# Devices must previously have been activated by ?WATCH to be pollable.
try:
self.streamSock.send(bytes(command, encoding='utf-8'))
except TypeError:
self.streamSock.send(command) # 2.7 chokes on 'bytes' and 'encoding='
except (OSError, IOError) as error: # MOE, LEAVE THIS ALONE!...for now.
sys.stderr.write('\nGPS3 send command fail with {}\n'.format(error)) # [Errno 107] typically no socket
def __iter__(self):
"""banana""" # <--- for scale
return self
def next(self, timeout=0):
"""Return empty unless new data is ready for the client.
Arguments:
timeout: Default timeout=0 range zero to float specifies a time-out as a floating point
number in seconds. Will sit and wait for timeout seconds. When the timeout argument is omitted
the function blocks until at least one file descriptor is ready. A time-out value of zero specifies
a poll and never blocks.
"""
try:
waitin, _waitout, _waiterror = select.select((self.streamSock,), (), (), timeout)
if not waitin: return
else:
gpsd_response = self.streamSock.makefile() # '.makefile(buffering=4096)' In strictly Python3
self.response = gpsd_response.readline()
return self.response
except StopIteration as error:
sys.stderr.write('The readline exception in GPSDSocket.next is--> {}'.format(error))
__next__ = next # Workaround for changes in iterating between Python 2.7 and 3
def close(self):
"""turn off stream and close socket"""
if self.streamSock:
self.watch(enable=False)
self.streamSock.close()
self.streamSock = None
|
wadda/gps3 | gps3/gps3.py | GPSDSocket.send | python | def send(self, command):
# The POLL command requests data from the last-seen fixes on all active GPS devices.
# Devices must previously have been activated by ?WATCH to be pollable.
try:
self.streamSock.send(bytes(command, encoding='utf-8'))
except TypeError:
self.streamSock.send(command) # 2.7 chokes on 'bytes' and 'encoding='
except (OSError, IOError) as error: # MOE, LEAVE THIS ALONE!...for now.
sys.stderr.write('\nGPS3 send command fail with {}\n'.format(error)) | Ship commands to the daemon
Arguments:
command: e.g., '?WATCH={{'enable':true,'json':true}}'|'?VERSION;'|'?DEVICES;'|'?DEVICE;'|'?POLL;' | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/gps3/gps3.py#L89-L101 | null | class GPSDSocket(object):
"""Establish a socket with gpsd, by which to send commands and receive data."""
def __init__(self):
self.streamSock = None
self.response = None
def connect(self, host=HOST, port=GPSD_PORT):
"""Connect to a host on a given port.
Arguments:
host: default host='127.0.0.1'
port: default port=2947
"""
for alotta_stuff in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
family, socktype, proto, _canonname, host_port = alotta_stuff
try:
self.streamSock = socket.socket(family, socktype, proto)
self.streamSock.connect(host_port)
self.streamSock.setblocking(False)
except (OSError, IOError) as error:
sys.stderr.write('\r\nGPSDSocket.connect exception is--> {}'.format(error))
sys.stderr.write('\r\nGPS3 gpsd connection at \'{0}\' on port \'{1}\' failed\r\n'.format(host, port))
def watch(self, enable=True, gpsd_protocol=PROTOCOL, devicepath=None):
"""watch gpsd in various gpsd_protocols or devices.
Arguments:
enable: (bool) stream data to socket
gpsd_protocol: (str) 'json' | 'nmea' | 'rare' | 'raw' | 'scaled' | 'split24' | 'pps'
devicepath: (str) device path - '/dev/ttyUSBn' for some number n or '/dev/whatever_works'
Returns:
command: (str) e.g., '?WATCH={"enable":true,"json":true};'
"""
# N.B.: 'timing' requires special attention, as it is undocumented and lives with dragons.
command = '?WATCH={{"enable":true,"{0}":true}}'.format(gpsd_protocol)
if gpsd_protocol == 'rare': # 1 for a channel, gpsd reports the unprocessed NMEA or AIVDM data stream
command = command.replace('"rare":true', '"raw":1')
if gpsd_protocol == 'raw': # 2 channel that processes binary data, received data verbatim without hex-dumping.
command = command.replace('"raw":true', '"raw",2')
if not enable:
command = command.replace('true', 'false') # sets -all- command values false .
if devicepath:
command = command.replace('}', ',"device":"') + devicepath + '"}'
return self.send(command)
# [Errno 107] typically no socket
def __iter__(self):
"""banana""" # <--- for scale
return self
def next(self, timeout=0):
"""Return empty unless new data is ready for the client.
Arguments:
timeout: Default timeout=0 range zero to float specifies a time-out as a floating point
number in seconds. Will sit and wait for timeout seconds. When the timeout argument is omitted
the function blocks until at least one file descriptor is ready. A time-out value of zero specifies
a poll and never blocks.
"""
try:
waitin, _waitout, _waiterror = select.select((self.streamSock,), (), (), timeout)
if not waitin: return
else:
gpsd_response = self.streamSock.makefile() # '.makefile(buffering=4096)' In strictly Python3
self.response = gpsd_response.readline()
return self.response
except StopIteration as error:
sys.stderr.write('The readline exception in GPSDSocket.next is--> {}'.format(error))
__next__ = next # Workaround for changes in iterating between Python 2.7 and 3
def close(self):
"""turn off stream and close socket"""
if self.streamSock:
self.watch(enable=False)
self.streamSock.close()
self.streamSock = None
|
wadda/gps3 | gps3/gps3.py | GPSDSocket.next | python | def next(self, timeout=0):
try:
waitin, _waitout, _waiterror = select.select((self.streamSock,), (), (), timeout)
if not waitin: return
else:
gpsd_response = self.streamSock.makefile() # '.makefile(buffering=4096)' In strictly Python3
self.response = gpsd_response.readline()
return self.response
except StopIteration as error:
sys.stderr.write('The readline exception in GPSDSocket.next is--> {}'.format(error)) | Return empty unless new data is ready for the client.
Arguments:
timeout: Default timeout=0 range zero to float specifies a time-out as a floating point
number in seconds. Will sit and wait for timeout seconds. When the timeout argument is omitted
the function blocks until at least one file descriptor is ready. A time-out value of zero specifies
a poll and never blocks. | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/gps3/gps3.py#L107-L124 | null | class GPSDSocket(object):
"""Establish a socket with gpsd, by which to send commands and receive data."""
def __init__(self):
self.streamSock = None
self.response = None
def connect(self, host=HOST, port=GPSD_PORT):
"""Connect to a host on a given port.
Arguments:
host: default host='127.0.0.1'
port: default port=2947
"""
for alotta_stuff in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
family, socktype, proto, _canonname, host_port = alotta_stuff
try:
self.streamSock = socket.socket(family, socktype, proto)
self.streamSock.connect(host_port)
self.streamSock.setblocking(False)
except (OSError, IOError) as error:
sys.stderr.write('\r\nGPSDSocket.connect exception is--> {}'.format(error))
sys.stderr.write('\r\nGPS3 gpsd connection at \'{0}\' on port \'{1}\' failed\r\n'.format(host, port))
def watch(self, enable=True, gpsd_protocol=PROTOCOL, devicepath=None):
"""watch gpsd in various gpsd_protocols or devices.
Arguments:
enable: (bool) stream data to socket
gpsd_protocol: (str) 'json' | 'nmea' | 'rare' | 'raw' | 'scaled' | 'split24' | 'pps'
devicepath: (str) device path - '/dev/ttyUSBn' for some number n or '/dev/whatever_works'
Returns:
command: (str) e.g., '?WATCH={"enable":true,"json":true};'
"""
# N.B.: 'timing' requires special attention, as it is undocumented and lives with dragons.
command = '?WATCH={{"enable":true,"{0}":true}}'.format(gpsd_protocol)
if gpsd_protocol == 'rare': # 1 for a channel, gpsd reports the unprocessed NMEA or AIVDM data stream
command = command.replace('"rare":true', '"raw":1')
if gpsd_protocol == 'raw': # 2 channel that processes binary data, received data verbatim without hex-dumping.
command = command.replace('"raw":true', '"raw",2')
if not enable:
command = command.replace('true', 'false') # sets -all- command values false .
if devicepath:
command = command.replace('}', ',"device":"') + devicepath + '"}'
return self.send(command)
def send(self, command):
"""Ship commands to the daemon
Arguments:
command: e.g., '?WATCH={{'enable':true,'json':true}}'|'?VERSION;'|'?DEVICES;'|'?DEVICE;'|'?POLL;'
"""
# The POLL command requests data from the last-seen fixes on all active GPS devices.
# Devices must previously have been activated by ?WATCH to be pollable.
try:
self.streamSock.send(bytes(command, encoding='utf-8'))
except TypeError:
self.streamSock.send(command) # 2.7 chokes on 'bytes' and 'encoding='
except (OSError, IOError) as error: # MOE, LEAVE THIS ALONE!...for now.
sys.stderr.write('\nGPS3 send command fail with {}\n'.format(error)) # [Errno 107] typically no socket
def __iter__(self):
"""banana""" # <--- for scale
return self
__next__ = next # Workaround for changes in iterating between Python 2.7 and 3
def close(self):
"""turn off stream and close socket"""
if self.streamSock:
self.watch(enable=False)
self.streamSock.close()
self.streamSock = None
|
wadda/gps3 | gps3/gps3.py | GPSDSocket.close | python | def close(self):
if self.streamSock:
self.watch(enable=False)
self.streamSock.close()
self.streamSock = None | turn off stream and close socket | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/gps3/gps3.py#L128-L133 | [
"def watch(self, enable=True, gpsd_protocol=PROTOCOL, devicepath=None):\n \"\"\"watch gpsd in various gpsd_protocols or devices.\n Arguments:\n enable: (bool) stream data to socket\n gpsd_protocol: (str) 'json' | 'nmea' | 'rare' | 'raw' | 'scaled' | 'split24' | 'pps'\n devicepath: (str) device path - '/dev/ttyUSBn' for some number n or '/dev/whatever_works'\n Returns:\n command: (str) e.g., '?WATCH={\"enable\":true,\"json\":true};'\n \"\"\"\n # N.B.: 'timing' requires special attention, as it is undocumented and lives with dragons.\n command = '?WATCH={{\"enable\":true,\"{0}\":true}}'.format(gpsd_protocol)\n\n if gpsd_protocol == 'rare': # 1 for a channel, gpsd reports the unprocessed NMEA or AIVDM data stream\n command = command.replace('\"rare\":true', '\"raw\":1')\n if gpsd_protocol == 'raw': # 2 channel that processes binary data, received data verbatim without hex-dumping.\n command = command.replace('\"raw\":true', '\"raw\",2')\n if not enable:\n command = command.replace('true', 'false') # sets -all- command values false .\n if devicepath:\n command = command.replace('}', ',\"device\":\"') + devicepath + '\"}'\n\n return self.send(command)\n"
] | class GPSDSocket(object):
"""Establish a socket with gpsd, by which to send commands and receive data."""
def __init__(self):
self.streamSock = None
self.response = None
def connect(self, host=HOST, port=GPSD_PORT):
"""Connect to a host on a given port.
Arguments:
host: default host='127.0.0.1'
port: default port=2947
"""
for alotta_stuff in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
family, socktype, proto, _canonname, host_port = alotta_stuff
try:
self.streamSock = socket.socket(family, socktype, proto)
self.streamSock.connect(host_port)
self.streamSock.setblocking(False)
except (OSError, IOError) as error:
sys.stderr.write('\r\nGPSDSocket.connect exception is--> {}'.format(error))
sys.stderr.write('\r\nGPS3 gpsd connection at \'{0}\' on port \'{1}\' failed\r\n'.format(host, port))
def watch(self, enable=True, gpsd_protocol=PROTOCOL, devicepath=None):
"""watch gpsd in various gpsd_protocols or devices.
Arguments:
enable: (bool) stream data to socket
gpsd_protocol: (str) 'json' | 'nmea' | 'rare' | 'raw' | 'scaled' | 'split24' | 'pps'
devicepath: (str) device path - '/dev/ttyUSBn' for some number n or '/dev/whatever_works'
Returns:
command: (str) e.g., '?WATCH={"enable":true,"json":true};'
"""
# N.B.: 'timing' requires special attention, as it is undocumented and lives with dragons.
command = '?WATCH={{"enable":true,"{0}":true}}'.format(gpsd_protocol)
if gpsd_protocol == 'rare': # 1 for a channel, gpsd reports the unprocessed NMEA or AIVDM data stream
command = command.replace('"rare":true', '"raw":1')
if gpsd_protocol == 'raw': # 2 channel that processes binary data, received data verbatim without hex-dumping.
command = command.replace('"raw":true', '"raw",2')
if not enable:
command = command.replace('true', 'false') # sets -all- command values false .
if devicepath:
command = command.replace('}', ',"device":"') + devicepath + '"}'
return self.send(command)
def send(self, command):
"""Ship commands to the daemon
Arguments:
command: e.g., '?WATCH={{'enable':true,'json':true}}'|'?VERSION;'|'?DEVICES;'|'?DEVICE;'|'?POLL;'
"""
# The POLL command requests data from the last-seen fixes on all active GPS devices.
# Devices must previously have been activated by ?WATCH to be pollable.
try:
self.streamSock.send(bytes(command, encoding='utf-8'))
except TypeError:
self.streamSock.send(command) # 2.7 chokes on 'bytes' and 'encoding='
except (OSError, IOError) as error: # MOE, LEAVE THIS ALONE!...for now.
sys.stderr.write('\nGPS3 send command fail with {}\n'.format(error)) # [Errno 107] typically no socket
def __iter__(self):
"""banana""" # <--- for scale
return self
def next(self, timeout=0):
"""Return empty unless new data is ready for the client.
Arguments:
timeout: Default timeout=0 range zero to float specifies a time-out as a floating point
number in seconds. Will sit and wait for timeout seconds. When the timeout argument is omitted
the function blocks until at least one file descriptor is ready. A time-out value of zero specifies
a poll and never blocks.
"""
try:
waitin, _waitout, _waiterror = select.select((self.streamSock,), (), (), timeout)
if not waitin: return
else:
gpsd_response = self.streamSock.makefile() # '.makefile(buffering=4096)' In strictly Python3
self.response = gpsd_response.readline()
return self.response
except StopIteration as error:
sys.stderr.write('The readline exception in GPSDSocket.next is--> {}'.format(error))
__next__ = next # Workaround for changes in iterating between Python 2.7 and 3
|
wadda/gps3 | gps3/gps3.py | DataStream.unpack | python | def unpack(self, gpsd_socket_response):
try:
fresh_data = json.loads(gpsd_socket_response) # The reserved word 'class' is popped from JSON object class
package_name = fresh_data.pop('class', 'ERROR') # gpsd data package errors are also 'ERROR'.
package = getattr(self, package_name, package_name) # packages are named for JSON object class
for key in package.keys():
package[key] = fresh_data.get(key, 'n/a') # Restores 'n/a' if key is absent in the socket response
except AttributeError: # 'str' object has no attribute 'keys'
sys.stderr.write('There is an unexpected exception in DataStream.unpack')
return
except (ValueError, KeyError) as error:
sys.stderr.write(str(error)) # Extra data or aberrant data in stream.
return | Sets new socket data as DataStream attributes in those initialised dictionaries
Arguments:
gpsd_socket_response (json object):
Provides:
self attribute dictionaries, e.g., self.TPV['lat'], self.SKY['gdop']
Raises:
AttributeError: 'str' object has no attribute 'keys' when the device falls out of the system
ValueError, KeyError: most likely extra, or mangled JSON data, should not happen, but that
applies to a lot of things. | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/gps3/gps3.py#L165-L189 | null | class DataStream(object):
"""Retrieve JSON Object(s) from GPSDSocket and unpack it into respective
gpsd 'class' dictionaries, TPV, SKY, etc. yielding hours of fun and entertainment.
"""
packages = {
'VERSION': {'release', 'proto_major', 'proto_minor', 'remote', 'rev'},
'TPV': {'alt', 'climb', 'device', 'epc', 'epd', 'eps', 'ept', 'epv', 'epx', 'epy', 'lat', 'lon', 'mode', 'speed', 'tag', 'time', 'track'},
'SKY': {'satellites', 'gdop', 'hdop', 'pdop', 'tdop', 'vdop', 'xdop', 'ydop'},
# Subset of SKY: 'satellites': {'PRN', 'ss', 'el', 'az', 'used'} # is always present.
'GST': {'alt', 'device', 'lat', 'lon', 'major', 'minor', 'orient', 'rms', 'time'},
'ATT': {'acc_len', 'acc_x', 'acc_y', 'acc_z', 'depth', 'device', 'dip', 'gyro_x', 'gyro_y', 'heading', 'mag_len', 'mag_st', 'mag_x',
'mag_y', 'mag_z', 'pitch', 'pitch_st', 'roll', 'roll_st', 'temperature', 'time', 'yaw', 'yaw_st'},
# 'POLL': {'active', 'tpv', 'sky', 'time'},
'PPS': {'device', 'clock_sec', 'clock_nsec', 'real_sec', 'real_nsec', 'precision'},
'TOFF': {'device', 'clock_sec', 'clock_nsec', 'real_sec', 'real_nsec'},
'DEVICES': {'devices', 'remote'},
'DEVICE': {'activated', 'bps', 'cycle', 'mincycle', 'driver', 'flags', 'native', 'parity', 'path', 'stopbits', 'subtype'},
# 'AIS': {} # see: http://catb.org/gpsd/AIVDM.html
'ERROR': {'message'}} # TODO: Full suite of possible GPSD output
def __init__(self):
"""Potential data packages from gpsd for a generator of class attribute dictionaries"""
for package_name, dataset in self.packages.items():
_emptydict = {key: 'n/a' for key in dataset}
setattr(self, package_name, _emptydict)
self.DEVICES['devices'] = {key: 'n/a' for key in self.packages['DEVICE']} # How does multiple listed devices work?
# self.POLL = {'tpv': self.TPV, 'sky': self.SKY, 'time': 'n/a', 'active': 'n/a'}
|
wadda/gps3 | gps3/agps3.py | GPSDSocket.send | python | def send(self, commands):
try:
self.streamSock.send(bytes(commands, encoding='utf-8'))
except TypeError:
self.streamSock.send(commands) # 2.7 chokes on 'bytes' and 'encoding='
except (OSError, IOError) as error: # HEY MOE, LEAVE THIS ALONE FOR NOW!
sys.stderr.write(f'\nAGPS3 send command fail with {error}\n') | Ship commands to the daemon
Arguments:
commands: e.g., '?WATCH={{'enable':true,'json':true}}'|'?VERSION;'|'?DEVICES;'|'?DEVICE;'|'?POLL;' | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/gps3/agps3.py#L87-L97 | null | class GPSDSocket(object):
"""Establish a socket with gpsd, by which to send commands and receive data."""
def __init__(self):
self.streamSock = None
self.response = None
def connect(self, host=HOST, port=GPSD_PORT):
"""Connect to a host on a given port.
Arguments:
host: default host='127.0.0.1'
port: default port=2947
"""
for alotta_stuff in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
family, socktype, proto, _canonname, host_port = alotta_stuff
try:
self.streamSock = socket.socket(family, socktype, proto)
self.streamSock.connect(host_port)
self.streamSock.setblocking(False)
except (OSError, IOError) as error:
sys.stderr.write(f'\r\nGPSDSocket.connect exception is--> {error}')
sys.stderr.write(f'\r\nAGPS3 connection to gpsd at \'{host}\' on port \'{port}\' failed\r\n')
def watch(self, enable=True, gpsd_protocol=PROTOCOL, devicepath=None):
"""watch gpsd in various gpsd_protocols or devices.
Arguments:
enable: (bool) stream data to socket
gpsd_protocol: (str) 'json' | 'nmea' | 'rare' | 'raw' | 'scaled' | 'split24' | 'pps'
devicepath: (str) device path - '/dev/ttyUSBn' for some number n or '/dev/whatever_works'
Returns:
command: (str) e.g., '?WATCH={"enable":true,"json":true};'
"""
# N.B.: 'timing' requires special attention, as it is undocumented and lives with dragons.
command = '?WATCH={{"enable":true,"{0}":true}}'.format(gpsd_protocol)
if gpsd_protocol == 'rare': # 1 for a channel, gpsd reports the unprocessed NMEA or AIVDM data stream
command = command.replace('"rare":true', '"raw":1')
if gpsd_protocol == 'raw': # 2 channel that processes binary data, received data verbatim without hex-dumping.
command = command.replace('"raw":true', '"raw",2')
if not enable:
command = command.replace('true', 'false') # sets -all- command values false .
if devicepath:
command = command.replace('}', ',"device":"') + devicepath + '"}'
return self.send(command)
# [Errno 107] Transport endpoint is not connected
def __iter__(self):
"""banana""" # <--- for scale
return self
def next(self, timeout=0):
"""Return empty unless new data is ready for the client.
Arguments:
timeout: Default timeout=0 range zero to float specifies a time-out as a floating point
number in seconds. Will sit and wait for timeout seconds. When the timeout argument is omitted
the function blocks until at least one file descriptor is ready. A time-out value of zero specifies
a poll and never blocks.
"""
try:
waitin, _waitout, _waiterror = select.select((self.streamSock,), (), (), timeout)
if not waitin: return
else:
gpsd_response = self.streamSock.makefile() # '.makefile(buffering=4096)' In strictly Python3
self.response = gpsd_response.readline()
return self.response
except StopIteration as error:
sys.stderr.write('The readline exception in GPSDSocket.next is--> {}'.format(error))
__next__ = next # Workaround for changes in iterating between Python 2.7 and 3
def close(self):
"""turn off stream and close socket"""
if self.streamSock:
self.watch(enable=False)
self.streamSock.close()
self.streamSock = None
|
wadda/gps3 | gps3/agps3.py | DataStream.unpack | python | def unpack(self, gpsd_socket_response):
try:
fresh_data = json.loads(gpsd_socket_response) # 'class' is popped for iterator lead
class_name = fresh_data.pop('class')
for key in self.packages[class_name]:
# Fudge around the namespace collision with GST data package lat/lon being standard deviations
if class_name == 'GST' and key == 'lat' or 'lon':
setattr(self, 'sd' + key, fresh_data.get(key, 'n/a'))
setattr(self, key, fresh_data.get(key, 'n/a')) # Updates and restores 'n/a' if attribute is absent in the data
except AttributeError: # 'str' object has no attribute 'keys'
sys.stderr.write('There is an unexpected exception unpacking JSON object')
return
except (ValueError, KeyError) as error:
sys.stderr.write(str(error)) # Extra data or aberrant data in stream.
return | Sets new socket data as DataStream attributes in those initialised dictionaries
Arguments:
gpsd_socket_response (json object):
Provides:
self attributes, e.g., self.lat, self.gdop
Raises:
AttributeError: 'str' object has no attribute 'keys' when the device falls out of the system
ValueError, KeyError: most likely extra, or mangled JSON data, should not happen, but that
applies to a lot of things. | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/gps3/agps3.py#L163-L189 | null | class DataStream(object):
"""Retrieve JSON Object(s) from GPSDSocket and unpack it into respective
object attributes, e.g., self.lat yielding hours of fun and entertainment.
"""
packages = {
'VERSION': {'release', 'proto_major', 'proto_minor', 'remote', 'rev'},
'TPV': {'alt', 'climb', 'device', 'epc', 'epd', 'eps', 'ept', 'epv', 'epx', 'epy', 'lat', 'lon', 'mode', 'speed', 'tag', 'time', 'track'},
'SKY': {'satellites', 'gdop', 'hdop', 'pdop', 'tdop', 'vdop', 'xdop', 'ydop'},
# Subset of SKY: \\\'satellites': {'PRN', 'ss', 'el', 'az', 'used'}/// # is always present.
'GST': {'alt', 'device', 'lat', 'lon', 'major', 'minor', 'orient', 'rms', 'time'},
# In 'GST', 'lat' and 'lon' present a name collision and are amended to 'sdlat', 'sdlon',
# because they are standard deviations of of 'TPV' 'lat' and 'lon'
'ATT': {'acc_len', 'acc_x', 'acc_y', 'acc_z', 'depth', 'device', 'dip', 'gyro_x', 'gyro_y', 'heading', 'mag_len', 'mag_st', 'mag_x',
'mag_y', 'mag_z', 'pitch', 'pitch_st', 'roll', 'roll_st', 'temperature', 'time', 'yaw', 'yaw_st'},
# 'POLL': {'active', 'tpv', 'sky', 'time'},
'PPS': {'device', 'clock_sec', 'clock_nsec', 'real_sec', 'real_nsec', 'precision'},
'TOFF': {'device', 'clock_sec', 'clock_nsec', 'real_sec', 'real_nsec'},
'DEVICES': {'devices', 'remote'},
'DEVICE': {'activated', 'bps', 'cycle', 'mincycle', 'driver', 'flags', 'native', 'parity', 'path', 'stopbits', 'subtype'},
# 'AIS': {} # see: http://catb.org/gpsd/AIVDM.html
'ERROR': {'message'}} # TODO: Full suite of possible GPSD output
def __init__(self):
"""Potential data packages from gpsd for a generator of class attributes"""
for laundry_list in self.packages.values():
for item in laundry_list:
# Fudge around the namespace collision with GST data package lat/lon being standard deviations
if laundry_list == 'GST' and item == 'lat' or 'lon':
setattr(self, 'sd' + item, 'n/a')
setattr(self, item, 'n/a')
|
wadda/gps3 | examples/ahuman.py | show_human | python | def show_human():
form = 'RAW'
units = 'raw'
data_window = curses.newwin(19, 39, 0, 0)
sat_window = curses.newwin(14, 39, 0, 40)
device_window = curses.newwin(6, 39, 13, 40)
packet_window = curses.newwin(7, 79, 19, 0)
for new_data in gpsd_socket:
if new_data:
data_stream.unpack(new_data)
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'): # quit
shut_down()
elif key_press == ord('a'): # NMEA
gpsd_socket.watch(enable=False, gpsd_protocol='json')
gpsd_socket.watch(gpsd_protocol='nmea')
show_nmea()
elif key_press == ord('0'): # raw
form = 'RAW'
units = 'raw'
data_window.clear()
elif key_press == ord('1'): # DDD
form = 'DDD'
data_window.clear()
elif key_press == ord('2'): # DMM
form = 'DMM'
data_window.clear()
elif key_press == ord('3'): # DMS
form = 'DMS'
data_window.clear()
elif key_press == ord('m'): # Metric
units = 'metric'
data_window.clear()
elif key_press == ord('i'): # Imperial
units = 'imperial'
data_window.clear()
elif key_press == ord('n'): # Nautical
units = 'nautical'
data_window.clear()
elif key_press == ord('d'): # Refresh device listings
gpsd_socket.send('?DEVICES;')
device_window.clear()
data_window.box()
data_window.addstr(0, 2, 'AGPS3 Python {}.{}.{} GPSD Interface'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(1, 2, 'Time: {} '.format(data_stream.time))
data_window.addstr(2, 2, 'Latitude: {} '.format(sexagesimal(data_stream.lat, 'lat', form)))
data_window.addstr(3, 2, 'Longitude: {} '.format(sexagesimal(data_stream.lon, 'lon', form)))
data_window.addstr(4, 2, 'Altitude: {} {}'.format(*unit_conversion(data_stream.alt, units, length=True)))
data_window.addstr(5, 2, 'Speed: {} {}'.format(*unit_conversion(data_stream.speed, units)))
data_window.addstr(6, 2, 'Heading: {}° True'.format(data_stream.track))
data_window.addstr(7, 2, 'Climb: {} {}'.format(*unit_conversion(data_stream.climb, units, length=True)))
data_window.addstr(8, 2, 'Status: {:<}D '.format(data_stream.mode))
data_window.addstr(9, 2, 'Latitude Err: +/-{} {}'.format(*unit_conversion(data_stream.epx, units, length=True)))
data_window.addstr(10, 2, 'Longitude Err: +/-{} {}'.format(*unit_conversion(data_stream.epy, units, length=True)))
data_window.addstr(11, 2, 'Altitude Err: +/-{} {}'.format(*unit_conversion(data_stream.epv, units, length=True)))
data_window.addstr(12, 2, 'Course Err: +/-{} '.format(data_stream.epc), curses.A_DIM)
data_window.addstr(13, 2, 'Speed Err: +/-{} {}'.format(*unit_conversion(data_stream.eps, units)), curses.A_DIM)
data_window.addstr(14, 2, 'Time Offset: +/-{} '.format(data_stream.ept), curses.A_DIM)
data_window.addstr(15, 2, 'gdop:{} pdop:{} tdop:{}'.format(data_stream.gdop, data_stream.pdop, data_stream.tdop))
data_window.addstr(16, 2, 'ydop:{} xdop:{} '.format(data_stream.ydop, data_stream.xdop))
data_window.addstr(17, 2, 'vdop:{} hdop:{} '.format(data_stream.vdop, data_stream.hdop))
sat_window.clear()
sat_window.box()
sat_window.addstr(0, 2, 'Using {0[1]}/{0[0]} satellites (truncated)'.format(satellites_used(data_stream.satellites)))
sat_window.addstr(1, 2, 'PRN Elev Azimuth SNR Used')
line = 2
if isinstance(data_stream.satellites, list): # Nested lists of dictionaries are strings before data is present
for sats in data_stream.satellites[0:10]:
sat_window.addstr(line, 2, '{PRN:>2} {el:>6} {az:>5} {ss:>5} {used:}'.format(**sats))
line += 1
device_window.clear()
device_window.box()
if not isinstance(data_stream.devices, list): # Local machines need a 'device' kick start
gpsd_socket.send('?DEVICES;') # to have valid data. I don't know why.
if isinstance(data_stream.devices, list): # Nested lists of dictionaries are strings before data is present.
for gizmo in data_stream.devices:
start_time, _uicroseconds = gizmo['activated'].split('.') # Remove '.000Z'
elapsed = elapsed_time_from(start_time)
device_window.addstr(1, 2, 'Activated: {}'.format(gizmo['activated']))
device_window.addstr(2, 2, 'Host:{0.host}:{0.port} {1}'.format(args, gizmo['path']))
device_window.addstr(3, 2, 'Driver:{driver} BPS:{bps}'.format(**gizmo))
device_window.addstr(4, 2, 'Cycle:{0} Hz {1!s:>14} Elapsed'.format(1 / gizmo['cycle'], elapsed))
packet_window.clear()
# packet_window.border(0)
packet_window.scrollok(True)
packet_window.addstr(0, 0, '{}'.format(new_data))
# sleep(.9)
data_window.refresh()
sat_window.refresh()
device_window.refresh()
packet_window.refresh()
else: # Reduced CPU cycles with the non-blocking socket read, by putting 'sleep' here, rather than hitting
sleep(.3) | Curses terminal with standard outputs | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/examples/ahuman.py#L161-L267 | [
"def satellites_used(feed):\n \"\"\"Counts number of satellites used in calculation from total visible satellites\n Arguments:\n feed feed=data_stream.satellites\n Returns:\n total_satellites(int):\n used_satellites (int):\n \"\"\"\n total_satellites = 0\n used_satellites = 0\n\n if not isinstance(feed, list):\n return 0, 0\n\n for satellites in feed:\n total_satellites += 1\n if satellites['used'] is True:\n used_satellites += 1\n return total_satellites, used_satellites\n",
"def elapsed_time_from(start_time):\n \"\"\"calculate time delta from latched time and current time\"\"\"\n time_then = make_time(start_time)\n time_now = datetime.utcnow().replace(microsecond=0)\n if time_then is None:\n return\n delta_t = time_now - time_then\n return delta_t\n",
"def unit_conversion(thing, units, length=False):\n \"\"\"converts base data between metric, imperial, or nautical units\"\"\"\n if 'n/a' == thing:\n return 'n/a'\n try:\n thing = round(thing * CONVERSION[units][0 + length], 2)\n except TypeError:\n thing = 'fubar'\n return thing, CONVERSION[units][2 + length]\n",
"def sexagesimal(sexathang, latlon, form='DDD'):\n \"\"\"\n Arguments:\n sexathang: (float), -15.560615 (negative = South), -146.241122 (negative = West) # Apataki Carenage\n latlon: (str) 'lat' | 'lon'\n form: (str), 'DDD'|'DMM'|'DMS', decimal Degrees, decimal Minutes, decimal Seconds\n Returns:\n latitude: e.g., '15°33'38.214\"S'\n longitude: e.g., '146°14'28.039\"W'\n \"\"\"\n cardinal = 'O'\n if not isinstance(sexathang, float):\n sexathang = 'n/a'\n return sexathang\n\n if latlon == 'lon':\n if sexathang > 0.0:\n cardinal = 'E'\n if sexathang < 0.0:\n cardinal = 'W'\n\n if latlon == 'lat':\n if sexathang > 0.0:\n cardinal = 'N'\n if sexathang < 0.0:\n cardinal = 'S'\n\n if form == 'RAW':\n sexathang = '{0:4.9f}°'.format(sexathang) # 4 to allow -100° through -179.999999° to -180°\n return sexathang\n\n if form == 'DDD':\n sexathang = '{0:3.6f}°'.format(abs(sexathang))\n\n if form == 'DMM':\n _latlon = abs(sexathang)\n minute_latlon, degree_latlon = modf(_latlon)\n minute_latlon *= 60\n sexathang = '{0}°{1:2.5f}\\''.format(int(degree_latlon), minute_latlon)\n\n if form == 'DMS':\n _latlon = abs(sexathang)\n minute_latlon, degree_latlon = modf(_latlon)\n second_latlon, minute_latlon = modf(minute_latlon * 60)\n second_latlon *= 60.0\n sexathang = '{0}°{1}\\'{2:2.3f}\\\"'.format(int(degree_latlon), int(minute_latlon), second_latlon)\n\n return sexathang + cardinal\n",
"def shut_down():\n \"\"\"Closes connection and restores terminal\"\"\"\n curses.nocbreak()\n curses.echo()\n curses.endwin()\n gpsd_socket.close()\n print('Keyboard interrupt received\\nTerminated by user\\nGood Bye.\\n')\n sys.exit(1)\n",
"def show_nmea():\n \"\"\"NMEA output in curses terminal\"\"\"\n data_window = curses.newwin(24, 79, 0, 0)\n\n for new_data in gpsd_socket:\n if new_data:\n screen.nodelay(1)\n key_press = screen.getch()\n if key_press == ord('q'):\n shut_down()\n elif key_press == ord('j'): # raw\n gpsd_socket.watch(enable=False, gpsd_protocol='nmea')\n gpsd_socket.watch(gpsd_protocol='json')\n show_human()\n\n data_window.border(0)\n data_window.addstr(0, 2, 'AGPS3 Python {}.{}.{} GPSD Interface Showing NMEA protocol'.format(*sys.version_info), curses.A_BOLD)\n data_window.addstr(2, 2, '{}'.format(gpsd_socket.response))\n data_window.refresh()\n else:\n sleep(.1)\n",
"def watch(self, enable=True, gpsd_protocol=PROTOCOL, devicepath=None):\n \"\"\"watch gpsd in various gpsd_protocols or devices.\n Arguments:\n enable: (bool) stream data to socket\n gpsd_protocol: (str) 'json' | 'nmea' | 'rare' | 'raw' | 'scaled' | 'split24' | 'pps'\n devicepath: (str) device path - '/dev/ttyUSBn' for some number n or '/dev/whatever_works'\n Returns:\n command: (str) e.g., '?WATCH={\"enable\":true,\"json\":true};'\n \"\"\"\n # N.B.: 'timing' requires special attention, as it is undocumented and lives with dragons.\n command = '?WATCH={{\"enable\":true,\"{0}\":true}}'.format(gpsd_protocol)\n\n if gpsd_protocol == 'rare': # 1 for a channel, gpsd reports the unprocessed NMEA or AIVDM data stream\n command = command.replace('\"rare\":true', '\"raw\":1')\n if gpsd_protocol == 'raw': # 2 channel that processes binary data, received data verbatim without hex-dumping.\n command = command.replace('\"raw\":true', '\"raw\",2')\n if not enable:\n command = command.replace('true', 'false') # sets -all- command values false .\n if devicepath:\n command = command.replace('}', ',\"device\":\"') + devicepath + '\"}'\n\n return self.send(command)\n",
"def send(self, commands):\n \"\"\"Ship commands to the daemon\n Arguments:\n commands: e.g., '?WATCH={{'enable':true,'json':true}}'|'?VERSION;'|'?DEVICES;'|'?DEVICE;'|'?POLL;'\n \"\"\"\n try:\n self.streamSock.send(bytes(commands, encoding='utf-8'))\n except TypeError:\n self.streamSock.send(commands) # 2.7 chokes on 'bytes' and 'encoding='\n except (OSError, IOError) as error: # HEY MOE, LEAVE THIS ALONE FOR NOW!\n sys.stderr.write(f'\\nAGPS3 send command fail with {error}\\n') # [Errno 107] Transport endpoint is not connected\n",
"def unpack(self, gpsd_socket_response):\n \"\"\"Sets new socket data as DataStream attributes in those initialised dictionaries\n Arguments:\n gpsd_socket_response (json object):\n Provides:\n self attributes, e.g., self.lat, self.gdop\n Raises:\n AttributeError: 'str' object has no attribute 'keys' when the device falls out of the system\n ValueError, KeyError: most likely extra, or mangled JSON data, should not happen, but that\n applies to a lot of things.\n \"\"\"\n try:\n fresh_data = json.loads(gpsd_socket_response) # 'class' is popped for iterator lead\n class_name = fresh_data.pop('class')\n for key in self.packages[class_name]:\n # Fudge around the namespace collision with GST data package lat/lon being standard deviations\n if class_name == 'GST' and key == 'lat' or 'lon':\n setattr(self, 'sd' + key, fresh_data.get(key, 'n/a'))\n setattr(self, key, fresh_data.get(key, 'n/a')) # Updates and restores 'n/a' if attribute is absent in the data\n\n except AttributeError: # 'str' object has no attribute 'keys'\n sys.stderr.write('There is an unexpected exception unpacking JSON object')\n return\n\n except (ValueError, KeyError) as error:\n sys.stderr.write(str(error)) # Extra data or aberrant data in stream.\n return\n"
] | #!/usr/bin/env python3
# coding=utf-8
"""
ahuman.py is to showcase agps3.py, a Python 2.7-3.5 GPSD interface
Defaults host='127.0.0.1', port=2947, gpsd_protocol='json'
Toggle Lat/Lon form with '0', '1', '2', '3' for RAW, DDD, DMM, DMS
Toggle units with '0', 'm', 'i', 'n', for 'raw', Metric, Imperial, Nautical
Toggle gpsd protocol with 'j', 'a' for 'json', 'nmea' displays
Quit with 'q' or '^c'
python[X] human.py --help for list of commandline options.
"""
import argparse
import curses
import sys
from datetime import datetime
from math import modf
from time import sleep
from gps3 import agps3 # Moe, remember to CHANGE to straight 'import agps3' if not installed,
# or check which Python version it's installed in. You forget sometimes.
__author__ = 'Moe'
__copyright__ = 'Copyright 2015-2016 Moe'
__license__ = 'MIT'
__version__ = '0.33.2'
CONVERSION = {'raw': (1, 1, 'm/s', 'meters'),
'metric': (3.6, 1, 'kph', 'meters'),
'nautical': (1.9438445, 1, 'kts', 'meters'),
'imperial': (2.2369363, 3.2808399, 'mph', 'feet')}
def add_args():
"""Adds commandline arguments and formatted Help"""
parser = argparse.ArgumentParser()
parser.add_argument('-host', action='store', dest='host', default='127.0.0.1', help='DEFAULT "127.0.0.1"')
parser.add_argument('-port', action='store', dest='port', default='2947', help='DEFAULT 2947', type=int)
parser.add_argument('-json', dest='gpsd_protocol', const='json', action='store_const', default='json', help='DEFAULT JSON objects */')
parser.add_argument('-device', dest='devicepath', action='store', help='alternate devicepath e.g.,"-device /dev/ttyUSB4"')
# Infrequently used options
parser.add_argument('-nmea', dest='gpsd_protocol', const='nmea', action='store_const', help='*/ output in NMEA */')
# parser.add_argument('-rare', dest='gpsd_protocol', const='rare', action='store_const', help='*/ output of packets in hex */')
# parser.add_argument('-raw', dest='gpsd_protocol', const='raw', action='store_const', help='*/ output of raw packets */')
# parser.add_argument('-scaled', dest='gpsd_protocol', const='scaled', action='store_const', help='*/ scale output to floats */')
# parser.add_argument('-timing', dest='gpsd_protocol', const='timing', action='store_const', help='*/ timing information */')
# parser.add_argument('-split24', dest='gpsd_protocol', const='split24', action='store_const', help='*/ split AIS Type 24s */')
# parser.add_argument('-pps', dest='gpsd_protocol', const='pps', action='store_const', help='*/ enable PPS JSON */')
parser.add_argument('-v', '--version', action='version', version='Version: {}'.format(__version__))
cli_args = parser.parse_args()
return cli_args
def satellites_used(feed):
"""Counts number of satellites used in calculation from total visible satellites
Arguments:
feed feed=data_stream.satellites
Returns:
total_satellites(int):
used_satellites (int):
"""
total_satellites = 0
used_satellites = 0
if not isinstance(feed, list):
return 0, 0
for satellites in feed:
total_satellites += 1
if satellites['used'] is True:
used_satellites += 1
return total_satellites, used_satellites
def make_time(gps_datetime_str):
"""Makes datetime object from string object"""
if not 'n/a' == gps_datetime_str:
datetime_string = gps_datetime_str
datetime_object = datetime.strptime(datetime_string, "%Y-%m-%dT%H:%M:%S")
return datetime_object
def elapsed_time_from(start_time):
"""calculate time delta from latched time and current time"""
time_then = make_time(start_time)
time_now = datetime.utcnow().replace(microsecond=0)
if time_then is None:
return
delta_t = time_now - time_then
return delta_t
def unit_conversion(thing, units, length=False):
"""converts base data between metric, imperial, or nautical units"""
if 'n/a' == thing:
return 'n/a'
try:
thing = round(thing * CONVERSION[units][0 + length], 2)
except TypeError:
thing = 'fubar'
return thing, CONVERSION[units][2 + length]
def sexagesimal(sexathang, latlon, form='DDD'):
"""
Arguments:
sexathang: (float), -15.560615 (negative = South), -146.241122 (negative = West) # Apataki Carenage
latlon: (str) 'lat' | 'lon'
form: (str), 'DDD'|'DMM'|'DMS', decimal Degrees, decimal Minutes, decimal Seconds
Returns:
latitude: e.g., '15°33'38.214"S'
longitude: e.g., '146°14'28.039"W'
"""
cardinal = 'O'
if not isinstance(sexathang, float):
sexathang = 'n/a'
return sexathang
if latlon == 'lon':
if sexathang > 0.0:
cardinal = 'E'
if sexathang < 0.0:
cardinal = 'W'
if latlon == 'lat':
if sexathang > 0.0:
cardinal = 'N'
if sexathang < 0.0:
cardinal = 'S'
if form == 'RAW':
sexathang = '{0:4.9f}°'.format(sexathang) # 4 to allow -100° through -179.999999° to -180°
return sexathang
if form == 'DDD':
sexathang = '{0:3.6f}°'.format(abs(sexathang))
if form == 'DMM':
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
minute_latlon *= 60
sexathang = '{0}°{1:2.5f}\''.format(int(degree_latlon), minute_latlon)
if form == 'DMS':
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
second_latlon, minute_latlon = modf(minute_latlon * 60)
second_latlon *= 60.0
sexathang = '{0}°{1}\'{2:2.3f}\"'.format(int(degree_latlon), int(minute_latlon), second_latlon)
return sexathang + cardinal
# the socket fast and furious with hundreds of empty checks between sleeps.
def show_nmea():
"""NMEA output in curses terminal"""
data_window = curses.newwin(24, 79, 0, 0)
for new_data in gpsd_socket:
if new_data:
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'):
shut_down()
elif key_press == ord('j'): # raw
gpsd_socket.watch(enable=False, gpsd_protocol='nmea')
gpsd_socket.watch(gpsd_protocol='json')
show_human()
data_window.border(0)
data_window.addstr(0, 2, 'AGPS3 Python {}.{}.{} GPSD Interface Showing NMEA protocol'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(2, 2, '{}'.format(gpsd_socket.response))
data_window.refresh()
else:
sleep(.1)
def shut_down():
"""Closes connection and restores terminal"""
curses.nocbreak()
curses.echo()
curses.endwin()
gpsd_socket.close()
print('Keyboard interrupt received\nTerminated by user\nGood Bye.\n')
sys.exit(1)
if __name__ == '__main__':
args = add_args()
gpsd_socket = agps3.GPSDSocket()
gpsd_socket.connect(args.host, args.port)
gpsd_socket.watch(gpsd_protocol=args.gpsd_protocol)
data_stream = agps3.DataStream()
screen = curses.initscr()
screen.clear()
screen.scrollok(True)
curses.noecho()
curses.curs_set(0)
curses.cbreak()
try:
if 'json' in args.gpsd_protocol:
show_human()
if 'nmea' in args.gpsd_protocol:
show_nmea()
except KeyboardInterrupt:
shut_down()
except (OSError, IOError) as error:
gpsd_socket.close()
curses.nocbreak()
curses.echo()
curses.endwin()
sys.stderr.write('\rAHUMAN error--> {}'.format(error))
sys.stderr.write('\rahuman connection to gpsd at \'{0}\' on port \'{1}\' failed.\n'.format(args.host, args.port))
sys.exit(1) # TODO: gpsd existence check and start
#
# Someday a cleaner Python interface will live here
#
# End
|
wadda/gps3 | examples/ahuman.py | show_nmea | python | def show_nmea():
data_window = curses.newwin(24, 79, 0, 0)
for new_data in gpsd_socket:
if new_data:
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'):
shut_down()
elif key_press == ord('j'): # raw
gpsd_socket.watch(enable=False, gpsd_protocol='nmea')
gpsd_socket.watch(gpsd_protocol='json')
show_human()
data_window.border(0)
data_window.addstr(0, 2, 'AGPS3 Python {}.{}.{} GPSD Interface Showing NMEA protocol'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(2, 2, '{}'.format(gpsd_socket.response))
data_window.refresh()
else:
sleep(.1) | NMEA output in curses terminal | train | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/examples/ahuman.py#L270-L290 | [
"def show_human():\n \"\"\"Curses terminal with standard outputs \"\"\"\n form = 'RAW'\n units = 'raw'\n\n data_window = curses.newwin(19, 39, 0, 0)\n sat_window = curses.newwin(14, 39, 0, 40)\n device_window = curses.newwin(6, 39, 13, 40)\n packet_window = curses.newwin(7, 79, 19, 0)\n\n for new_data in gpsd_socket:\n if new_data:\n data_stream.unpack(new_data)\n\n screen.nodelay(1)\n key_press = screen.getch()\n\n if key_press == ord('q'): # quit\n shut_down()\n elif key_press == ord('a'): # NMEA\n gpsd_socket.watch(enable=False, gpsd_protocol='json')\n gpsd_socket.watch(gpsd_protocol='nmea')\n show_nmea()\n elif key_press == ord('0'): # raw\n form = 'RAW'\n units = 'raw'\n data_window.clear()\n elif key_press == ord('1'): # DDD\n form = 'DDD'\n data_window.clear()\n elif key_press == ord('2'): # DMM\n form = 'DMM'\n data_window.clear()\n elif key_press == ord('3'): # DMS\n form = 'DMS'\n data_window.clear()\n elif key_press == ord('m'): # Metric\n units = 'metric'\n data_window.clear()\n elif key_press == ord('i'): # Imperial\n units = 'imperial'\n data_window.clear()\n elif key_press == ord('n'): # Nautical\n units = 'nautical'\n data_window.clear()\n elif key_press == ord('d'): # Refresh device listings\n gpsd_socket.send('?DEVICES;')\n device_window.clear()\n\n data_window.box()\n data_window.addstr(0, 2, 'AGPS3 Python {}.{}.{} GPSD Interface'.format(*sys.version_info), curses.A_BOLD)\n data_window.addstr(1, 2, 'Time: {} '.format(data_stream.time))\n data_window.addstr(2, 2, 'Latitude: {} '.format(sexagesimal(data_stream.lat, 'lat', form)))\n data_window.addstr(3, 2, 'Longitude: {} '.format(sexagesimal(data_stream.lon, 'lon', form)))\n data_window.addstr(4, 2, 'Altitude: {} {}'.format(*unit_conversion(data_stream.alt, units, length=True)))\n data_window.addstr(5, 2, 'Speed: {} {}'.format(*unit_conversion(data_stream.speed, units)))\n data_window.addstr(6, 2, 'Heading: {}° True'.format(data_stream.track))\n data_window.addstr(7, 2, 'Climb: {} {}'.format(*unit_conversion(data_stream.climb, units, length=True)))\n data_window.addstr(8, 2, 'Status: {:<}D '.format(data_stream.mode))\n data_window.addstr(9, 2, 'Latitude Err: +/-{} {}'.format(*unit_conversion(data_stream.epx, units, length=True)))\n data_window.addstr(10, 2, 'Longitude Err: +/-{} {}'.format(*unit_conversion(data_stream.epy, units, length=True)))\n data_window.addstr(11, 2, 'Altitude Err: +/-{} {}'.format(*unit_conversion(data_stream.epv, units, length=True)))\n data_window.addstr(12, 2, 'Course Err: +/-{} '.format(data_stream.epc), curses.A_DIM)\n data_window.addstr(13, 2, 'Speed Err: +/-{} {}'.format(*unit_conversion(data_stream.eps, units)), curses.A_DIM)\n data_window.addstr(14, 2, 'Time Offset: +/-{} '.format(data_stream.ept), curses.A_DIM)\n data_window.addstr(15, 2, 'gdop:{} pdop:{} tdop:{}'.format(data_stream.gdop, data_stream.pdop, data_stream.tdop))\n data_window.addstr(16, 2, 'ydop:{} xdop:{} '.format(data_stream.ydop, data_stream.xdop))\n data_window.addstr(17, 2, 'vdop:{} hdop:{} '.format(data_stream.vdop, data_stream.hdop))\n\n sat_window.clear()\n sat_window.box()\n sat_window.addstr(0, 2, 'Using {0[1]}/{0[0]} satellites (truncated)'.format(satellites_used(data_stream.satellites)))\n sat_window.addstr(1, 2, 'PRN Elev Azimuth SNR Used')\n line = 2\n if isinstance(data_stream.satellites, list): # Nested lists of dictionaries are strings before data is present\n for sats in data_stream.satellites[0:10]:\n sat_window.addstr(line, 2, '{PRN:>2} {el:>6} {az:>5} {ss:>5} {used:}'.format(**sats))\n line += 1\n\n device_window.clear()\n device_window.box()\n if not isinstance(data_stream.devices, list): # Local machines need a 'device' kick start\n gpsd_socket.send('?DEVICES;') # to have valid data. I don't know why.\n\n if isinstance(data_stream.devices, list): # Nested lists of dictionaries are strings before data is present.\n for gizmo in data_stream.devices:\n start_time, _uicroseconds = gizmo['activated'].split('.') # Remove '.000Z'\n elapsed = elapsed_time_from(start_time)\n\n device_window.addstr(1, 2, 'Activated: {}'.format(gizmo['activated']))\n device_window.addstr(2, 2, 'Host:{0.host}:{0.port} {1}'.format(args, gizmo['path']))\n device_window.addstr(3, 2, 'Driver:{driver} BPS:{bps}'.format(**gizmo))\n device_window.addstr(4, 2, 'Cycle:{0} Hz {1!s:>14} Elapsed'.format(1 / gizmo['cycle'], elapsed))\n\n packet_window.clear()\n # packet_window.border(0)\n packet_window.scrollok(True)\n packet_window.addstr(0, 0, '{}'.format(new_data))\n\n# sleep(.9)\n\n data_window.refresh()\n sat_window.refresh()\n device_window.refresh()\n packet_window.refresh()\n else: # Reduced CPU cycles with the non-blocking socket read, by putting 'sleep' here, rather than hitting\n sleep(.3) # the socket fast and furious with hundreds of empty checks between sleeps.\n",
"def shut_down():\n \"\"\"Closes connection and restores terminal\"\"\"\n curses.nocbreak()\n curses.echo()\n curses.endwin()\n gpsd_socket.close()\n print('Keyboard interrupt received\\nTerminated by user\\nGood Bye.\\n')\n sys.exit(1)\n",
"def watch(self, enable=True, gpsd_protocol=PROTOCOL, devicepath=None):\n \"\"\"watch gpsd in various gpsd_protocols or devices.\n Arguments:\n enable: (bool) stream data to socket\n gpsd_protocol: (str) 'json' | 'nmea' | 'rare' | 'raw' | 'scaled' | 'split24' | 'pps'\n devicepath: (str) device path - '/dev/ttyUSBn' for some number n or '/dev/whatever_works'\n Returns:\n command: (str) e.g., '?WATCH={\"enable\":true,\"json\":true};'\n \"\"\"\n # N.B.: 'timing' requires special attention, as it is undocumented and lives with dragons.\n command = '?WATCH={{\"enable\":true,\"{0}\":true}}'.format(gpsd_protocol)\n\n if gpsd_protocol == 'rare': # 1 for a channel, gpsd reports the unprocessed NMEA or AIVDM data stream\n command = command.replace('\"rare\":true', '\"raw\":1')\n if gpsd_protocol == 'raw': # 2 channel that processes binary data, received data verbatim without hex-dumping.\n command = command.replace('\"raw\":true', '\"raw\",2')\n if not enable:\n command = command.replace('true', 'false') # sets -all- command values false .\n if devicepath:\n command = command.replace('}', ',\"device\":\"') + devicepath + '\"}'\n\n return self.send(command)\n"
] | #!/usr/bin/env python3
# coding=utf-8
"""
ahuman.py is to showcase agps3.py, a Python 2.7-3.5 GPSD interface
Defaults host='127.0.0.1', port=2947, gpsd_protocol='json'
Toggle Lat/Lon form with '0', '1', '2', '3' for RAW, DDD, DMM, DMS
Toggle units with '0', 'm', 'i', 'n', for 'raw', Metric, Imperial, Nautical
Toggle gpsd protocol with 'j', 'a' for 'json', 'nmea' displays
Quit with 'q' or '^c'
python[X] human.py --help for list of commandline options.
"""
import argparse
import curses
import sys
from datetime import datetime
from math import modf
from time import sleep
from gps3 import agps3 # Moe, remember to CHANGE to straight 'import agps3' if not installed,
# or check which Python version it's installed in. You forget sometimes.
__author__ = 'Moe'
__copyright__ = 'Copyright 2015-2016 Moe'
__license__ = 'MIT'
__version__ = '0.33.2'
CONVERSION = {'raw': (1, 1, 'm/s', 'meters'),
'metric': (3.6, 1, 'kph', 'meters'),
'nautical': (1.9438445, 1, 'kts', 'meters'),
'imperial': (2.2369363, 3.2808399, 'mph', 'feet')}
def add_args():
"""Adds commandline arguments and formatted Help"""
parser = argparse.ArgumentParser()
parser.add_argument('-host', action='store', dest='host', default='127.0.0.1', help='DEFAULT "127.0.0.1"')
parser.add_argument('-port', action='store', dest='port', default='2947', help='DEFAULT 2947', type=int)
parser.add_argument('-json', dest='gpsd_protocol', const='json', action='store_const', default='json', help='DEFAULT JSON objects */')
parser.add_argument('-device', dest='devicepath', action='store', help='alternate devicepath e.g.,"-device /dev/ttyUSB4"')
# Infrequently used options
parser.add_argument('-nmea', dest='gpsd_protocol', const='nmea', action='store_const', help='*/ output in NMEA */')
# parser.add_argument('-rare', dest='gpsd_protocol', const='rare', action='store_const', help='*/ output of packets in hex */')
# parser.add_argument('-raw', dest='gpsd_protocol', const='raw', action='store_const', help='*/ output of raw packets */')
# parser.add_argument('-scaled', dest='gpsd_protocol', const='scaled', action='store_const', help='*/ scale output to floats */')
# parser.add_argument('-timing', dest='gpsd_protocol', const='timing', action='store_const', help='*/ timing information */')
# parser.add_argument('-split24', dest='gpsd_protocol', const='split24', action='store_const', help='*/ split AIS Type 24s */')
# parser.add_argument('-pps', dest='gpsd_protocol', const='pps', action='store_const', help='*/ enable PPS JSON */')
parser.add_argument('-v', '--version', action='version', version='Version: {}'.format(__version__))
cli_args = parser.parse_args()
return cli_args
def satellites_used(feed):
"""Counts number of satellites used in calculation from total visible satellites
Arguments:
feed feed=data_stream.satellites
Returns:
total_satellites(int):
used_satellites (int):
"""
total_satellites = 0
used_satellites = 0
if not isinstance(feed, list):
return 0, 0
for satellites in feed:
total_satellites += 1
if satellites['used'] is True:
used_satellites += 1
return total_satellites, used_satellites
def make_time(gps_datetime_str):
"""Makes datetime object from string object"""
if not 'n/a' == gps_datetime_str:
datetime_string = gps_datetime_str
datetime_object = datetime.strptime(datetime_string, "%Y-%m-%dT%H:%M:%S")
return datetime_object
def elapsed_time_from(start_time):
"""calculate time delta from latched time and current time"""
time_then = make_time(start_time)
time_now = datetime.utcnow().replace(microsecond=0)
if time_then is None:
return
delta_t = time_now - time_then
return delta_t
def unit_conversion(thing, units, length=False):
"""converts base data between metric, imperial, or nautical units"""
if 'n/a' == thing:
return 'n/a'
try:
thing = round(thing * CONVERSION[units][0 + length], 2)
except TypeError:
thing = 'fubar'
return thing, CONVERSION[units][2 + length]
def sexagesimal(sexathang, latlon, form='DDD'):
"""
Arguments:
sexathang: (float), -15.560615 (negative = South), -146.241122 (negative = West) # Apataki Carenage
latlon: (str) 'lat' | 'lon'
form: (str), 'DDD'|'DMM'|'DMS', decimal Degrees, decimal Minutes, decimal Seconds
Returns:
latitude: e.g., '15°33'38.214"S'
longitude: e.g., '146°14'28.039"W'
"""
cardinal = 'O'
if not isinstance(sexathang, float):
sexathang = 'n/a'
return sexathang
if latlon == 'lon':
if sexathang > 0.0:
cardinal = 'E'
if sexathang < 0.0:
cardinal = 'W'
if latlon == 'lat':
if sexathang > 0.0:
cardinal = 'N'
if sexathang < 0.0:
cardinal = 'S'
if form == 'RAW':
sexathang = '{0:4.9f}°'.format(sexathang) # 4 to allow -100° through -179.999999° to -180°
return sexathang
if form == 'DDD':
sexathang = '{0:3.6f}°'.format(abs(sexathang))
if form == 'DMM':
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
minute_latlon *= 60
sexathang = '{0}°{1:2.5f}\''.format(int(degree_latlon), minute_latlon)
if form == 'DMS':
_latlon = abs(sexathang)
minute_latlon, degree_latlon = modf(_latlon)
second_latlon, minute_latlon = modf(minute_latlon * 60)
second_latlon *= 60.0
sexathang = '{0}°{1}\'{2:2.3f}\"'.format(int(degree_latlon), int(minute_latlon), second_latlon)
return sexathang + cardinal
def show_human():
"""Curses terminal with standard outputs """
form = 'RAW'
units = 'raw'
data_window = curses.newwin(19, 39, 0, 0)
sat_window = curses.newwin(14, 39, 0, 40)
device_window = curses.newwin(6, 39, 13, 40)
packet_window = curses.newwin(7, 79, 19, 0)
for new_data in gpsd_socket:
if new_data:
data_stream.unpack(new_data)
screen.nodelay(1)
key_press = screen.getch()
if key_press == ord('q'): # quit
shut_down()
elif key_press == ord('a'): # NMEA
gpsd_socket.watch(enable=False, gpsd_protocol='json')
gpsd_socket.watch(gpsd_protocol='nmea')
show_nmea()
elif key_press == ord('0'): # raw
form = 'RAW'
units = 'raw'
data_window.clear()
elif key_press == ord('1'): # DDD
form = 'DDD'
data_window.clear()
elif key_press == ord('2'): # DMM
form = 'DMM'
data_window.clear()
elif key_press == ord('3'): # DMS
form = 'DMS'
data_window.clear()
elif key_press == ord('m'): # Metric
units = 'metric'
data_window.clear()
elif key_press == ord('i'): # Imperial
units = 'imperial'
data_window.clear()
elif key_press == ord('n'): # Nautical
units = 'nautical'
data_window.clear()
elif key_press == ord('d'): # Refresh device listings
gpsd_socket.send('?DEVICES;')
device_window.clear()
data_window.box()
data_window.addstr(0, 2, 'AGPS3 Python {}.{}.{} GPSD Interface'.format(*sys.version_info), curses.A_BOLD)
data_window.addstr(1, 2, 'Time: {} '.format(data_stream.time))
data_window.addstr(2, 2, 'Latitude: {} '.format(sexagesimal(data_stream.lat, 'lat', form)))
data_window.addstr(3, 2, 'Longitude: {} '.format(sexagesimal(data_stream.lon, 'lon', form)))
data_window.addstr(4, 2, 'Altitude: {} {}'.format(*unit_conversion(data_stream.alt, units, length=True)))
data_window.addstr(5, 2, 'Speed: {} {}'.format(*unit_conversion(data_stream.speed, units)))
data_window.addstr(6, 2, 'Heading: {}° True'.format(data_stream.track))
data_window.addstr(7, 2, 'Climb: {} {}'.format(*unit_conversion(data_stream.climb, units, length=True)))
data_window.addstr(8, 2, 'Status: {:<}D '.format(data_stream.mode))
data_window.addstr(9, 2, 'Latitude Err: +/-{} {}'.format(*unit_conversion(data_stream.epx, units, length=True)))
data_window.addstr(10, 2, 'Longitude Err: +/-{} {}'.format(*unit_conversion(data_stream.epy, units, length=True)))
data_window.addstr(11, 2, 'Altitude Err: +/-{} {}'.format(*unit_conversion(data_stream.epv, units, length=True)))
data_window.addstr(12, 2, 'Course Err: +/-{} '.format(data_stream.epc), curses.A_DIM)
data_window.addstr(13, 2, 'Speed Err: +/-{} {}'.format(*unit_conversion(data_stream.eps, units)), curses.A_DIM)
data_window.addstr(14, 2, 'Time Offset: +/-{} '.format(data_stream.ept), curses.A_DIM)
data_window.addstr(15, 2, 'gdop:{} pdop:{} tdop:{}'.format(data_stream.gdop, data_stream.pdop, data_stream.tdop))
data_window.addstr(16, 2, 'ydop:{} xdop:{} '.format(data_stream.ydop, data_stream.xdop))
data_window.addstr(17, 2, 'vdop:{} hdop:{} '.format(data_stream.vdop, data_stream.hdop))
sat_window.clear()
sat_window.box()
sat_window.addstr(0, 2, 'Using {0[1]}/{0[0]} satellites (truncated)'.format(satellites_used(data_stream.satellites)))
sat_window.addstr(1, 2, 'PRN Elev Azimuth SNR Used')
line = 2
if isinstance(data_stream.satellites, list): # Nested lists of dictionaries are strings before data is present
for sats in data_stream.satellites[0:10]:
sat_window.addstr(line, 2, '{PRN:>2} {el:>6} {az:>5} {ss:>5} {used:}'.format(**sats))
line += 1
device_window.clear()
device_window.box()
if not isinstance(data_stream.devices, list): # Local machines need a 'device' kick start
gpsd_socket.send('?DEVICES;') # to have valid data. I don't know why.
if isinstance(data_stream.devices, list): # Nested lists of dictionaries are strings before data is present.
for gizmo in data_stream.devices:
start_time, _uicroseconds = gizmo['activated'].split('.') # Remove '.000Z'
elapsed = elapsed_time_from(start_time)
device_window.addstr(1, 2, 'Activated: {}'.format(gizmo['activated']))
device_window.addstr(2, 2, 'Host:{0.host}:{0.port} {1}'.format(args, gizmo['path']))
device_window.addstr(3, 2, 'Driver:{driver} BPS:{bps}'.format(**gizmo))
device_window.addstr(4, 2, 'Cycle:{0} Hz {1!s:>14} Elapsed'.format(1 / gizmo['cycle'], elapsed))
packet_window.clear()
# packet_window.border(0)
packet_window.scrollok(True)
packet_window.addstr(0, 0, '{}'.format(new_data))
# sleep(.9)
data_window.refresh()
sat_window.refresh()
device_window.refresh()
packet_window.refresh()
else: # Reduced CPU cycles with the non-blocking socket read, by putting 'sleep' here, rather than hitting
sleep(.3) # the socket fast and furious with hundreds of empty checks between sleeps.
def shut_down():
"""Closes connection and restores terminal"""
curses.nocbreak()
curses.echo()
curses.endwin()
gpsd_socket.close()
print('Keyboard interrupt received\nTerminated by user\nGood Bye.\n')
sys.exit(1)
if __name__ == '__main__':
args = add_args()
gpsd_socket = agps3.GPSDSocket()
gpsd_socket.connect(args.host, args.port)
gpsd_socket.watch(gpsd_protocol=args.gpsd_protocol)
data_stream = agps3.DataStream()
screen = curses.initscr()
screen.clear()
screen.scrollok(True)
curses.noecho()
curses.curs_set(0)
curses.cbreak()
try:
if 'json' in args.gpsd_protocol:
show_human()
if 'nmea' in args.gpsd_protocol:
show_nmea()
except KeyboardInterrupt:
shut_down()
except (OSError, IOError) as error:
gpsd_socket.close()
curses.nocbreak()
curses.echo()
curses.endwin()
sys.stderr.write('\rAHUMAN error--> {}'.format(error))
sys.stderr.write('\rahuman connection to gpsd at \'{0}\' on port \'{1}\' failed.\n'.format(args.host, args.port))
sys.exit(1) # TODO: gpsd existence check and start
#
# Someday a cleaner Python interface will live here
#
# End
|
jmoiron/johnny-cache | johnny/cache.py | disallowed_table | python | def disallowed_table(*tables):
# XXX: When using a black or white list, this has to be done EVERY query;
# It'd be nice to make this as fast as possible. In general, queries
# should have relatively few tables involved, and I don't imagine that
# blacklists would grow very vast. The fastest i've been able to come
# up with is to pre-create a blacklist set and use intersect.
return not bool(settings.WHITELIST.issuperset(tables)) if settings.WHITELIST\
else bool(settings.BLACKLIST.intersection(tables)) | Returns True if a set of tables is in the blacklist or, if a whitelist is set,
any of the tables is not in the whitelist. False otherwise. | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/cache.py#L25-L34 | null | """Johnny's main caching functionality."""
from hashlib import md5
from uuid import uuid4
import django
from django.db.models.signals import post_save, post_delete
from . import localstore, signals
from . import settings
from .compat import (
force_bytes, force_text, string_types, text_type, empty_iter)
from .decorators import wraps, available_attrs
from .transaction import TransactionManager
class NotInCache(object):
#This is used rather than None to properly cache empty querysets
pass
no_result_sentinel = "22c52d96-156a-4638-a38d-aae0051ee9df"
local = localstore.LocalStore()
def get_backend(**kwargs):
"""
Get's a QueryCacheBackend object for the given options and current
version of django. If no arguments are given, and a QCB has been
created previously, ``get_backend`` returns that. Otherwise,
``get_backend`` will return the default backend.
"""
cls = QueryCacheBackend
return cls(**kwargs)
def enable():
"""Enable johnny-cache, for use in scripts, management commands, async
workers, or other code outside the django request flow."""
get_backend().patch()
def disable():
"""Disable johnny-cache. This will disable johnny-cache for the whole
process, and if writes happen during the time where johnny is disabled,
tables will not be invalidated properly. Use Carefully."""
get_backend().unpatch()
patch,unpatch = enable,disable
def resolve_table(x):
"""Return a table name for x, where x is either a model instance or a string."""
if isinstance(x, string_types):
return x
return x._meta.db_table
def invalidate(*tables, **kwargs):
"""Invalidate the current generation for one or more tables. The arguments
can be either strings representing database table names or models. Pass in
kwarg ``using`` to set the database."""
backend = get_backend()
db = kwargs.get('using', 'default')
if backend._patched:
for t in map(resolve_table, tables):
backend.keyhandler.invalidate_table(t, db)
def get_tables_for_query(query):
"""
Takes a Django 'query' object and returns all tables that will be used in
that query as a list. Note that where clauses can have their own
querysets with their own dependent queries, etc.
"""
from django.db.models.sql.where import WhereNode, SubqueryConstraint
from django.db.models.query import QuerySet
tables = set([v[0] for v in getattr(query,'alias_map',{}).values()])
def get_sub_query_tables(node):
query = node.query_object
if not hasattr(query, 'field_names'):
query = query.values(*node.targets)
else:
query = query._clone()
query = query.query
return set(v[0] for v in getattr(query, 'alias_map',{}).values())
def get_tables(node, tables):
if isinstance(node, SubqueryConstraint):
return get_sub_query_tables(node)
for child in node.children:
if isinstance(child, WhereNode): # and child.children:
tables |= set(get_tables(child, tables))
elif not hasattr(child, '__iter__'):
continue
else:
for item in (c for c in child if isinstance(c, QuerySet)):
tables |= get_tables_for_query(item.query)
return tables
if query.where and query.where.children:
where_nodes = [c for c in query.where.children if isinstance(c, (WhereNode, SubqueryConstraint))]
for node in where_nodes:
tables |= get_tables(node, tables)
return list(tables)
def get_tables_for_query_pre_16(query):
"""
Takes a Django 'query' object and returns all tables that will be used in
that query as a list. Note that where clauses can have their own
querysets with their own dependent queries, etc.
"""
from django.db.models.sql.where import WhereNode
from django.db.models.query import QuerySet
tables = set([v[0] for v in getattr(query,'alias_map',{}).values()])
def get_tables(node, tables):
for child in node.children:
if isinstance(child, WhereNode): # and child.children:
tables = get_tables(child, tables)
elif not hasattr(child, '__iter__'):
continue
else:
for item in (c for c in child if isinstance(c, QuerySet)):
tables |= set(get_tables_for_query(item.query))
return tables
if query.where and query.where.children:
where_nodes = [c for c in query.where.children if isinstance(c, WhereNode)]
for node in where_nodes:
tables |= get_tables(node, tables)
return list(tables)
if django.VERSION[:2] < (1, 6):
get_tables_for_query = get_tables_for_query_pre_16
# The KeyGen is used only to generate keys. Some of these keys will be used
# directly in the cache, while others are only general purpose functions to
# generate hashes off of one or more values.
class KeyGen(object):
"""This class is responsible for generating keys."""
def __init__(self, prefix):
self.prefix = prefix
def random_generator(self):
"""Creates a random unique id."""
return self.gen_key(force_bytes(uuid4()))
def gen_table_key(self, table, db='default'):
"""
Returns a key that is standard for a given table name and database
alias. Total length up to 212 (max for memcache is 250).
"""
table = force_text(table)
db = force_text(settings.DB_CACHE_KEYS[db])
if len(table) > 100:
table = table[0:68] + self.gen_key(table[68:])
if db and len(db) > 100:
db = db[0:68] + self.gen_key(db[68:])
return '%s_%s_table_%s' % (self.prefix, db, table)
def gen_multi_key(self, values, db='default'):
"""Takes a list of generations (not table keys) and returns a key."""
db = settings.DB_CACHE_KEYS[db]
if db and len(db) > 100:
db = db[0:68] + self.gen_key(db[68:])
return '%s_%s_multi_%s' % (self.prefix, db, self.gen_key(*values))
@staticmethod
def _convert(x):
if isinstance(x, text_type):
return x.encode('utf-8')
return force_bytes(x)
@staticmethod
def _recursive_convert(x, key):
for item in x:
if isinstance(item, (tuple, list)):
KeyGen._recursive_convert(item, key)
else:
key.update(KeyGen._convert(item))
def gen_key(self, *values):
"""Generate a key from one or more values."""
key = md5()
KeyGen._recursive_convert(values, key)
return key.hexdigest()
class KeyHandler(object):
"""Handles pulling and invalidating the key from from the cache based
on the table names. Higher-level logic dealing with johnny cache specific
keys go in this class."""
def __init__(self, cache_backend, keygen=KeyGen, prefix=None):
self.prefix = prefix
self.keygen = keygen(prefix)
self.cache_backend = cache_backend
def get_generation(self, *tables, **kwargs):
"""Get the generation key for any number of tables."""
db = kwargs.get('db', 'default')
if len(tables) > 1:
return self.get_multi_generation(tables, db)
return self.get_single_generation(tables[0], db)
def get_single_generation(self, table, db='default'):
"""Creates a random generation value for a single table name"""
key = self.keygen.gen_table_key(table, db)
val = self.cache_backend.get(key, None, db)
#if local.get('in_test', None): print force_bytes(val).ljust(32), key
if val is None:
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def get_multi_generation(self, tables, db='default'):
"""Takes a list of table names and returns an aggregate
value for the generation"""
generations = []
for table in tables:
generations.append(self.get_single_generation(table, db))
key = self.keygen.gen_multi_key(generations, db)
val = self.cache_backend.get(key, None, db)
#if local.get('in_test', None): print force_bytes(val).ljust(32), key
if val is None:
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def invalidate_table(self, table, db='default'):
"""Invalidates a table's generation and returns a new one
(Note that this also invalidates all multi generations
containing the table)"""
key = self.keygen.gen_table_key(table, db)
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def sql_key(self, generation, sql, params, order, result_type,
using='default'):
"""
Return the specific cache key for the sql query described by the
pieces of the query and the generation key.
"""
# these keys will always look pretty opaque
suffix = self.keygen.gen_key(sql, params, order, result_type)
using = settings.DB_CACHE_KEYS[using]
return '%s_%s_query_%s.%s' % (self.prefix, using, generation, suffix)
# XXX: Thread safety concerns? Should we only need to patch once per process?
class QueryCacheBackend(object):
"""This class is the engine behind the query cache. It reads the queries
going through the django Query and returns from the cache using
the generation keys, or on a miss from the database and caches the results.
Each time a model is updated the table keys for that model are re-created,
invalidating all cached querysets for that model.
There are different QueryCacheBackend's for different versions of django;
call ``johnny.cache.get_backend`` to automatically get the proper class.
"""
__shared_state = {}
def __init__(self, cache_backend=None, keyhandler=None, keygen=None):
self.__dict__ = self.__shared_state
self.prefix = settings.MIDDLEWARE_KEY_PREFIX
if keyhandler:
self.kh_class = keyhandler
if keygen:
self.kg_class = keygen
if not cache_backend and not hasattr(self, 'cache_backend'):
cache_backend = settings._get_backend()
if not keygen and not hasattr(self, 'kg_class'):
self.kg_class = KeyGen
if keyhandler is None and not hasattr(self, 'kh_class'):
self.kh_class = KeyHandler
if cache_backend:
self.cache_backend = TransactionManager(cache_backend,
self.kg_class)
self.keyhandler = self.kh_class(self.cache_backend,
self.kg_class, self.prefix)
self._patched = getattr(self, '_patched', False)
def _monkey_select(self, original):
from django.db.models.sql.constants import MULTI
from django.db.models.sql.datastructures import EmptyResultSet
@wraps(original, assigned=available_attrs(original))
def newfun(cls, *args, **kwargs):
if args:
result_type = args[0]
else:
result_type = kwargs.get('result_type', MULTI)
if any([isinstance(cls, c) for c in self._write_compilers]):
return original(cls, *args, **kwargs)
try:
sql, params = cls.as_sql()
if not sql:
raise EmptyResultSet
except EmptyResultSet:
if result_type == MULTI:
return empty_iter()
else:
return
db = getattr(cls, 'using', 'default')
key, val = None, NotInCache()
# check the blacklist for any of the involved tables; if it's not
# there, then look for the value in the cache.
tables = get_tables_for_query(cls.query)
# if the tables are blacklisted, send a qc_skip signal
blacklisted = disallowed_table(*tables)
try:
ordering_aliases = cls.ordering_aliases
except AttributeError:
ordering_aliases = cls.query.ordering_aliases
if blacklisted:
signals.qc_skip.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
key=key)
if tables and not blacklisted:
gen_key = self.keyhandler.get_generation(*tables, **{'db': db})
key = self.keyhandler.sql_key(gen_key, sql, params,
cls.get_ordering(),
result_type, db)
val = self.cache_backend.get(key, NotInCache(), db)
if not isinstance(val, NotInCache):
if val == no_result_sentinel:
val = []
signals.qc_hit.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
size=len(val), key=key)
return val
if not blacklisted:
signals.qc_miss.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
key=key)
val = original(cls, *args, **kwargs)
if hasattr(val, '__iter__'):
#Can't permanently cache lazy iterables without creating
#a cacheable data structure. Note that this makes them
#no longer lazy...
#todo - create a smart iterable wrapper
val = list(val)
if key is not None:
if not val:
self.cache_backend.set(key, no_result_sentinel, settings.MIDDLEWARE_SECONDS, db)
else:
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
return newfun
def _monkey_write(self, original):
@wraps(original, assigned=available_attrs(original))
def newfun(cls, *args, **kwargs):
db = getattr(cls, 'using', 'default')
from django.db.models.sql import compiler
# we have to do this before we check the tables, since the tables
# are actually being set in the original function
ret = original(cls, *args, **kwargs)
if isinstance(cls, compiler.SQLInsertCompiler):
#Inserts are a special case where cls.tables
#are not populated.
tables = [cls.query.model._meta.db_table]
else:
#if cls.query.tables != list(cls.query.table_map):
# pass
#tables = list(cls.query.table_map)
tables = cls.query.tables
for table in tables:
if not disallowed_table(table):
self.keyhandler.invalidate_table(table, db)
return ret
return newfun
def patch(self):
"""
monkey patches django.db.models.sql.compiler.SQL*Compiler series
"""
from django.db.models.sql import compiler
self._read_compilers = (
compiler.SQLCompiler,
compiler.SQLAggregateCompiler,
compiler.SQLDateCompiler,
)
self._write_compilers = (
compiler.SQLInsertCompiler,
compiler.SQLDeleteCompiler,
compiler.SQLUpdateCompiler,
)
if not self._patched:
self._original = {}
for reader in self._read_compilers:
self._original[reader] = reader.execute_sql
reader.execute_sql = self._monkey_select(reader.execute_sql)
for updater in self._write_compilers:
self._original[updater] = updater.execute_sql
updater.execute_sql = self._monkey_write(updater.execute_sql)
self._patched = True
self.cache_backend.patch()
self._handle_signals()
def unpatch(self):
"""un-applies this patch."""
if not self._patched:
return
for func in self._read_compilers + self._write_compilers:
func.execute_sql = self._original[func]
self.cache_backend.unpatch()
self._patched = False
def invalidate(self, instance, **kwargs):
if self._patched:
table = resolve_table(instance)
using = kwargs.get('using', 'default')
if not disallowed_table(table):
self.keyhandler.invalidate_table(table, db=using)
tables = set()
tables.add(table)
try:
instance._meta._related_objects_cache
except AttributeError:
instance._meta._fill_related_objects_cache()
for obj in instance._meta._related_objects_cache.keys():
obj_table = obj.model._meta.db_table
if obj_table not in tables:
tables.add(obj_table)
if not disallowed_table(obj_table):
self.keyhandler.invalidate_table(obj_table)
def _handle_signals(self):
post_save.connect(self.invalidate, sender=None)
post_delete.connect(self.invalidate, sender=None)
def flush_query_cache(self):
from django.db import connection
tables = connection.introspection.table_names()
#seen_models = connection.introspection.installed_models(tables)
for table in tables:
# we want this to just work, so invalidate even things in blacklist
self.keyhandler.invalidate_table(table)
|
jmoiron/johnny-cache | johnny/cache.py | invalidate | python | def invalidate(*tables, **kwargs):
backend = get_backend()
db = kwargs.get('using', 'default')
if backend._patched:
for t in map(resolve_table, tables):
backend.keyhandler.invalidate_table(t, db) | Invalidate the current generation for one or more tables. The arguments
can be either strings representing database table names or models. Pass in
kwarg ``using`` to set the database. | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/cache.py#L67-L76 | [
"def get_backend(**kwargs):\n \"\"\"\n Get's a QueryCacheBackend object for the given options and current\n version of django. If no arguments are given, and a QCB has been\n created previously, ``get_backend`` returns that. Otherwise,\n ``get_backend`` will return the default backend.\n \"\"\"\n cls = QueryCacheBackend\n return cls(**kwargs)\n"
] | """Johnny's main caching functionality."""
from hashlib import md5
from uuid import uuid4
import django
from django.db.models.signals import post_save, post_delete
from . import localstore, signals
from . import settings
from .compat import (
force_bytes, force_text, string_types, text_type, empty_iter)
from .decorators import wraps, available_attrs
from .transaction import TransactionManager
class NotInCache(object):
#This is used rather than None to properly cache empty querysets
pass
no_result_sentinel = "22c52d96-156a-4638-a38d-aae0051ee9df"
local = localstore.LocalStore()
def disallowed_table(*tables):
"""Returns True if a set of tables is in the blacklist or, if a whitelist is set,
any of the tables is not in the whitelist. False otherwise."""
# XXX: When using a black or white list, this has to be done EVERY query;
# It'd be nice to make this as fast as possible. In general, queries
# should have relatively few tables involved, and I don't imagine that
# blacklists would grow very vast. The fastest i've been able to come
# up with is to pre-create a blacklist set and use intersect.
return not bool(settings.WHITELIST.issuperset(tables)) if settings.WHITELIST\
else bool(settings.BLACKLIST.intersection(tables))
def get_backend(**kwargs):
"""
Get's a QueryCacheBackend object for the given options and current
version of django. If no arguments are given, and a QCB has been
created previously, ``get_backend`` returns that. Otherwise,
``get_backend`` will return the default backend.
"""
cls = QueryCacheBackend
return cls(**kwargs)
def enable():
"""Enable johnny-cache, for use in scripts, management commands, async
workers, or other code outside the django request flow."""
get_backend().patch()
def disable():
"""Disable johnny-cache. This will disable johnny-cache for the whole
process, and if writes happen during the time where johnny is disabled,
tables will not be invalidated properly. Use Carefully."""
get_backend().unpatch()
patch,unpatch = enable,disable
def resolve_table(x):
"""Return a table name for x, where x is either a model instance or a string."""
if isinstance(x, string_types):
return x
return x._meta.db_table
def get_tables_for_query(query):
"""
Takes a Django 'query' object and returns all tables that will be used in
that query as a list. Note that where clauses can have their own
querysets with their own dependent queries, etc.
"""
from django.db.models.sql.where import WhereNode, SubqueryConstraint
from django.db.models.query import QuerySet
tables = set([v[0] for v in getattr(query,'alias_map',{}).values()])
def get_sub_query_tables(node):
query = node.query_object
if not hasattr(query, 'field_names'):
query = query.values(*node.targets)
else:
query = query._clone()
query = query.query
return set(v[0] for v in getattr(query, 'alias_map',{}).values())
def get_tables(node, tables):
if isinstance(node, SubqueryConstraint):
return get_sub_query_tables(node)
for child in node.children:
if isinstance(child, WhereNode): # and child.children:
tables |= set(get_tables(child, tables))
elif not hasattr(child, '__iter__'):
continue
else:
for item in (c for c in child if isinstance(c, QuerySet)):
tables |= get_tables_for_query(item.query)
return tables
if query.where and query.where.children:
where_nodes = [c for c in query.where.children if isinstance(c, (WhereNode, SubqueryConstraint))]
for node in where_nodes:
tables |= get_tables(node, tables)
return list(tables)
def get_tables_for_query_pre_16(query):
"""
Takes a Django 'query' object and returns all tables that will be used in
that query as a list. Note that where clauses can have their own
querysets with their own dependent queries, etc.
"""
from django.db.models.sql.where import WhereNode
from django.db.models.query import QuerySet
tables = set([v[0] for v in getattr(query,'alias_map',{}).values()])
def get_tables(node, tables):
for child in node.children:
if isinstance(child, WhereNode): # and child.children:
tables = get_tables(child, tables)
elif not hasattr(child, '__iter__'):
continue
else:
for item in (c for c in child if isinstance(c, QuerySet)):
tables |= set(get_tables_for_query(item.query))
return tables
if query.where and query.where.children:
where_nodes = [c for c in query.where.children if isinstance(c, WhereNode)]
for node in where_nodes:
tables |= get_tables(node, tables)
return list(tables)
if django.VERSION[:2] < (1, 6):
get_tables_for_query = get_tables_for_query_pre_16
# The KeyGen is used only to generate keys. Some of these keys will be used
# directly in the cache, while others are only general purpose functions to
# generate hashes off of one or more values.
class KeyGen(object):
"""This class is responsible for generating keys."""
def __init__(self, prefix):
self.prefix = prefix
def random_generator(self):
"""Creates a random unique id."""
return self.gen_key(force_bytes(uuid4()))
def gen_table_key(self, table, db='default'):
"""
Returns a key that is standard for a given table name and database
alias. Total length up to 212 (max for memcache is 250).
"""
table = force_text(table)
db = force_text(settings.DB_CACHE_KEYS[db])
if len(table) > 100:
table = table[0:68] + self.gen_key(table[68:])
if db and len(db) > 100:
db = db[0:68] + self.gen_key(db[68:])
return '%s_%s_table_%s' % (self.prefix, db, table)
def gen_multi_key(self, values, db='default'):
"""Takes a list of generations (not table keys) and returns a key."""
db = settings.DB_CACHE_KEYS[db]
if db and len(db) > 100:
db = db[0:68] + self.gen_key(db[68:])
return '%s_%s_multi_%s' % (self.prefix, db, self.gen_key(*values))
@staticmethod
def _convert(x):
if isinstance(x, text_type):
return x.encode('utf-8')
return force_bytes(x)
@staticmethod
def _recursive_convert(x, key):
for item in x:
if isinstance(item, (tuple, list)):
KeyGen._recursive_convert(item, key)
else:
key.update(KeyGen._convert(item))
def gen_key(self, *values):
"""Generate a key from one or more values."""
key = md5()
KeyGen._recursive_convert(values, key)
return key.hexdigest()
class KeyHandler(object):
"""Handles pulling and invalidating the key from from the cache based
on the table names. Higher-level logic dealing with johnny cache specific
keys go in this class."""
def __init__(self, cache_backend, keygen=KeyGen, prefix=None):
self.prefix = prefix
self.keygen = keygen(prefix)
self.cache_backend = cache_backend
def get_generation(self, *tables, **kwargs):
"""Get the generation key for any number of tables."""
db = kwargs.get('db', 'default')
if len(tables) > 1:
return self.get_multi_generation(tables, db)
return self.get_single_generation(tables[0], db)
def get_single_generation(self, table, db='default'):
"""Creates a random generation value for a single table name"""
key = self.keygen.gen_table_key(table, db)
val = self.cache_backend.get(key, None, db)
#if local.get('in_test', None): print force_bytes(val).ljust(32), key
if val is None:
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def get_multi_generation(self, tables, db='default'):
"""Takes a list of table names and returns an aggregate
value for the generation"""
generations = []
for table in tables:
generations.append(self.get_single_generation(table, db))
key = self.keygen.gen_multi_key(generations, db)
val = self.cache_backend.get(key, None, db)
#if local.get('in_test', None): print force_bytes(val).ljust(32), key
if val is None:
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def invalidate_table(self, table, db='default'):
"""Invalidates a table's generation and returns a new one
(Note that this also invalidates all multi generations
containing the table)"""
key = self.keygen.gen_table_key(table, db)
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def sql_key(self, generation, sql, params, order, result_type,
using='default'):
"""
Return the specific cache key for the sql query described by the
pieces of the query and the generation key.
"""
# these keys will always look pretty opaque
suffix = self.keygen.gen_key(sql, params, order, result_type)
using = settings.DB_CACHE_KEYS[using]
return '%s_%s_query_%s.%s' % (self.prefix, using, generation, suffix)
# XXX: Thread safety concerns? Should we only need to patch once per process?
class QueryCacheBackend(object):
"""This class is the engine behind the query cache. It reads the queries
going through the django Query and returns from the cache using
the generation keys, or on a miss from the database and caches the results.
Each time a model is updated the table keys for that model are re-created,
invalidating all cached querysets for that model.
There are different QueryCacheBackend's for different versions of django;
call ``johnny.cache.get_backend`` to automatically get the proper class.
"""
__shared_state = {}
def __init__(self, cache_backend=None, keyhandler=None, keygen=None):
self.__dict__ = self.__shared_state
self.prefix = settings.MIDDLEWARE_KEY_PREFIX
if keyhandler:
self.kh_class = keyhandler
if keygen:
self.kg_class = keygen
if not cache_backend and not hasattr(self, 'cache_backend'):
cache_backend = settings._get_backend()
if not keygen and not hasattr(self, 'kg_class'):
self.kg_class = KeyGen
if keyhandler is None and not hasattr(self, 'kh_class'):
self.kh_class = KeyHandler
if cache_backend:
self.cache_backend = TransactionManager(cache_backend,
self.kg_class)
self.keyhandler = self.kh_class(self.cache_backend,
self.kg_class, self.prefix)
self._patched = getattr(self, '_patched', False)
def _monkey_select(self, original):
from django.db.models.sql.constants import MULTI
from django.db.models.sql.datastructures import EmptyResultSet
@wraps(original, assigned=available_attrs(original))
def newfun(cls, *args, **kwargs):
if args:
result_type = args[0]
else:
result_type = kwargs.get('result_type', MULTI)
if any([isinstance(cls, c) for c in self._write_compilers]):
return original(cls, *args, **kwargs)
try:
sql, params = cls.as_sql()
if not sql:
raise EmptyResultSet
except EmptyResultSet:
if result_type == MULTI:
return empty_iter()
else:
return
db = getattr(cls, 'using', 'default')
key, val = None, NotInCache()
# check the blacklist for any of the involved tables; if it's not
# there, then look for the value in the cache.
tables = get_tables_for_query(cls.query)
# if the tables are blacklisted, send a qc_skip signal
blacklisted = disallowed_table(*tables)
try:
ordering_aliases = cls.ordering_aliases
except AttributeError:
ordering_aliases = cls.query.ordering_aliases
if blacklisted:
signals.qc_skip.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
key=key)
if tables and not blacklisted:
gen_key = self.keyhandler.get_generation(*tables, **{'db': db})
key = self.keyhandler.sql_key(gen_key, sql, params,
cls.get_ordering(),
result_type, db)
val = self.cache_backend.get(key, NotInCache(), db)
if not isinstance(val, NotInCache):
if val == no_result_sentinel:
val = []
signals.qc_hit.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
size=len(val), key=key)
return val
if not blacklisted:
signals.qc_miss.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
key=key)
val = original(cls, *args, **kwargs)
if hasattr(val, '__iter__'):
#Can't permanently cache lazy iterables without creating
#a cacheable data structure. Note that this makes them
#no longer lazy...
#todo - create a smart iterable wrapper
val = list(val)
if key is not None:
if not val:
self.cache_backend.set(key, no_result_sentinel, settings.MIDDLEWARE_SECONDS, db)
else:
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
return newfun
def _monkey_write(self, original):
@wraps(original, assigned=available_attrs(original))
def newfun(cls, *args, **kwargs):
db = getattr(cls, 'using', 'default')
from django.db.models.sql import compiler
# we have to do this before we check the tables, since the tables
# are actually being set in the original function
ret = original(cls, *args, **kwargs)
if isinstance(cls, compiler.SQLInsertCompiler):
#Inserts are a special case where cls.tables
#are not populated.
tables = [cls.query.model._meta.db_table]
else:
#if cls.query.tables != list(cls.query.table_map):
# pass
#tables = list(cls.query.table_map)
tables = cls.query.tables
for table in tables:
if not disallowed_table(table):
self.keyhandler.invalidate_table(table, db)
return ret
return newfun
def patch(self):
"""
monkey patches django.db.models.sql.compiler.SQL*Compiler series
"""
from django.db.models.sql import compiler
self._read_compilers = (
compiler.SQLCompiler,
compiler.SQLAggregateCompiler,
compiler.SQLDateCompiler,
)
self._write_compilers = (
compiler.SQLInsertCompiler,
compiler.SQLDeleteCompiler,
compiler.SQLUpdateCompiler,
)
if not self._patched:
self._original = {}
for reader in self._read_compilers:
self._original[reader] = reader.execute_sql
reader.execute_sql = self._monkey_select(reader.execute_sql)
for updater in self._write_compilers:
self._original[updater] = updater.execute_sql
updater.execute_sql = self._monkey_write(updater.execute_sql)
self._patched = True
self.cache_backend.patch()
self._handle_signals()
def unpatch(self):
"""un-applies this patch."""
if not self._patched:
return
for func in self._read_compilers + self._write_compilers:
func.execute_sql = self._original[func]
self.cache_backend.unpatch()
self._patched = False
def invalidate(self, instance, **kwargs):
if self._patched:
table = resolve_table(instance)
using = kwargs.get('using', 'default')
if not disallowed_table(table):
self.keyhandler.invalidate_table(table, db=using)
tables = set()
tables.add(table)
try:
instance._meta._related_objects_cache
except AttributeError:
instance._meta._fill_related_objects_cache()
for obj in instance._meta._related_objects_cache.keys():
obj_table = obj.model._meta.db_table
if obj_table not in tables:
tables.add(obj_table)
if not disallowed_table(obj_table):
self.keyhandler.invalidate_table(obj_table)
def _handle_signals(self):
post_save.connect(self.invalidate, sender=None)
post_delete.connect(self.invalidate, sender=None)
def flush_query_cache(self):
from django.db import connection
tables = connection.introspection.table_names()
#seen_models = connection.introspection.installed_models(tables)
for table in tables:
# we want this to just work, so invalidate even things in blacklist
self.keyhandler.invalidate_table(table)
|
jmoiron/johnny-cache | johnny/cache.py | get_tables_for_query | python | def get_tables_for_query(query):
from django.db.models.sql.where import WhereNode, SubqueryConstraint
from django.db.models.query import QuerySet
tables = set([v[0] for v in getattr(query,'alias_map',{}).values()])
def get_sub_query_tables(node):
query = node.query_object
if not hasattr(query, 'field_names'):
query = query.values(*node.targets)
else:
query = query._clone()
query = query.query
return set(v[0] for v in getattr(query, 'alias_map',{}).values())
def get_tables(node, tables):
if isinstance(node, SubqueryConstraint):
return get_sub_query_tables(node)
for child in node.children:
if isinstance(child, WhereNode): # and child.children:
tables |= set(get_tables(child, tables))
elif not hasattr(child, '__iter__'):
continue
else:
for item in (c for c in child if isinstance(c, QuerySet)):
tables |= get_tables_for_query(item.query)
return tables
if query.where and query.where.children:
where_nodes = [c for c in query.where.children if isinstance(c, (WhereNode, SubqueryConstraint))]
for node in where_nodes:
tables |= get_tables(node, tables)
return list(tables) | Takes a Django 'query' object and returns all tables that will be used in
that query as a list. Note that where clauses can have their own
querysets with their own dependent queries, etc. | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/cache.py#L79-L116 | [
"def get_tables(node, tables):\n if isinstance(node, SubqueryConstraint):\n return get_sub_query_tables(node)\n for child in node.children:\n if isinstance(child, WhereNode): # and child.children:\n tables |= set(get_tables(child, tables))\n elif not hasattr(child, '__iter__'):\n continue\n else:\n for item in (c for c in child if isinstance(c, QuerySet)):\n tables |= get_tables_for_query(item.query)\n return tables\n"
] | """Johnny's main caching functionality."""
from hashlib import md5
from uuid import uuid4
import django
from django.db.models.signals import post_save, post_delete
from . import localstore, signals
from . import settings
from .compat import (
force_bytes, force_text, string_types, text_type, empty_iter)
from .decorators import wraps, available_attrs
from .transaction import TransactionManager
class NotInCache(object):
#This is used rather than None to properly cache empty querysets
pass
no_result_sentinel = "22c52d96-156a-4638-a38d-aae0051ee9df"
local = localstore.LocalStore()
def disallowed_table(*tables):
"""Returns True if a set of tables is in the blacklist or, if a whitelist is set,
any of the tables is not in the whitelist. False otherwise."""
# XXX: When using a black or white list, this has to be done EVERY query;
# It'd be nice to make this as fast as possible. In general, queries
# should have relatively few tables involved, and I don't imagine that
# blacklists would grow very vast. The fastest i've been able to come
# up with is to pre-create a blacklist set and use intersect.
return not bool(settings.WHITELIST.issuperset(tables)) if settings.WHITELIST\
else bool(settings.BLACKLIST.intersection(tables))
def get_backend(**kwargs):
"""
Get's a QueryCacheBackend object for the given options and current
version of django. If no arguments are given, and a QCB has been
created previously, ``get_backend`` returns that. Otherwise,
``get_backend`` will return the default backend.
"""
cls = QueryCacheBackend
return cls(**kwargs)
def enable():
"""Enable johnny-cache, for use in scripts, management commands, async
workers, or other code outside the django request flow."""
get_backend().patch()
def disable():
"""Disable johnny-cache. This will disable johnny-cache for the whole
process, and if writes happen during the time where johnny is disabled,
tables will not be invalidated properly. Use Carefully."""
get_backend().unpatch()
patch,unpatch = enable,disable
def resolve_table(x):
"""Return a table name for x, where x is either a model instance or a string."""
if isinstance(x, string_types):
return x
return x._meta.db_table
def invalidate(*tables, **kwargs):
"""Invalidate the current generation for one or more tables. The arguments
can be either strings representing database table names or models. Pass in
kwarg ``using`` to set the database."""
backend = get_backend()
db = kwargs.get('using', 'default')
if backend._patched:
for t in map(resolve_table, tables):
backend.keyhandler.invalidate_table(t, db)
def get_tables_for_query_pre_16(query):
"""
Takes a Django 'query' object and returns all tables that will be used in
that query as a list. Note that where clauses can have their own
querysets with their own dependent queries, etc.
"""
from django.db.models.sql.where import WhereNode
from django.db.models.query import QuerySet
tables = set([v[0] for v in getattr(query,'alias_map',{}).values()])
def get_tables(node, tables):
for child in node.children:
if isinstance(child, WhereNode): # and child.children:
tables = get_tables(child, tables)
elif not hasattr(child, '__iter__'):
continue
else:
for item in (c for c in child if isinstance(c, QuerySet)):
tables |= set(get_tables_for_query(item.query))
return tables
if query.where and query.where.children:
where_nodes = [c for c in query.where.children if isinstance(c, WhereNode)]
for node in where_nodes:
tables |= get_tables(node, tables)
return list(tables)
if django.VERSION[:2] < (1, 6):
get_tables_for_query = get_tables_for_query_pre_16
# The KeyGen is used only to generate keys. Some of these keys will be used
# directly in the cache, while others are only general purpose functions to
# generate hashes off of one or more values.
class KeyGen(object):
"""This class is responsible for generating keys."""
def __init__(self, prefix):
self.prefix = prefix
def random_generator(self):
"""Creates a random unique id."""
return self.gen_key(force_bytes(uuid4()))
def gen_table_key(self, table, db='default'):
"""
Returns a key that is standard for a given table name and database
alias. Total length up to 212 (max for memcache is 250).
"""
table = force_text(table)
db = force_text(settings.DB_CACHE_KEYS[db])
if len(table) > 100:
table = table[0:68] + self.gen_key(table[68:])
if db and len(db) > 100:
db = db[0:68] + self.gen_key(db[68:])
return '%s_%s_table_%s' % (self.prefix, db, table)
def gen_multi_key(self, values, db='default'):
"""Takes a list of generations (not table keys) and returns a key."""
db = settings.DB_CACHE_KEYS[db]
if db and len(db) > 100:
db = db[0:68] + self.gen_key(db[68:])
return '%s_%s_multi_%s' % (self.prefix, db, self.gen_key(*values))
@staticmethod
def _convert(x):
if isinstance(x, text_type):
return x.encode('utf-8')
return force_bytes(x)
@staticmethod
def _recursive_convert(x, key):
for item in x:
if isinstance(item, (tuple, list)):
KeyGen._recursive_convert(item, key)
else:
key.update(KeyGen._convert(item))
def gen_key(self, *values):
"""Generate a key from one or more values."""
key = md5()
KeyGen._recursive_convert(values, key)
return key.hexdigest()
class KeyHandler(object):
"""Handles pulling and invalidating the key from from the cache based
on the table names. Higher-level logic dealing with johnny cache specific
keys go in this class."""
def __init__(self, cache_backend, keygen=KeyGen, prefix=None):
self.prefix = prefix
self.keygen = keygen(prefix)
self.cache_backend = cache_backend
def get_generation(self, *tables, **kwargs):
"""Get the generation key for any number of tables."""
db = kwargs.get('db', 'default')
if len(tables) > 1:
return self.get_multi_generation(tables, db)
return self.get_single_generation(tables[0], db)
def get_single_generation(self, table, db='default'):
"""Creates a random generation value for a single table name"""
key = self.keygen.gen_table_key(table, db)
val = self.cache_backend.get(key, None, db)
#if local.get('in_test', None): print force_bytes(val).ljust(32), key
if val is None:
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def get_multi_generation(self, tables, db='default'):
"""Takes a list of table names and returns an aggregate
value for the generation"""
generations = []
for table in tables:
generations.append(self.get_single_generation(table, db))
key = self.keygen.gen_multi_key(generations, db)
val = self.cache_backend.get(key, None, db)
#if local.get('in_test', None): print force_bytes(val).ljust(32), key
if val is None:
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def invalidate_table(self, table, db='default'):
"""Invalidates a table's generation and returns a new one
(Note that this also invalidates all multi generations
containing the table)"""
key = self.keygen.gen_table_key(table, db)
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def sql_key(self, generation, sql, params, order, result_type,
using='default'):
"""
Return the specific cache key for the sql query described by the
pieces of the query and the generation key.
"""
# these keys will always look pretty opaque
suffix = self.keygen.gen_key(sql, params, order, result_type)
using = settings.DB_CACHE_KEYS[using]
return '%s_%s_query_%s.%s' % (self.prefix, using, generation, suffix)
# XXX: Thread safety concerns? Should we only need to patch once per process?
class QueryCacheBackend(object):
"""This class is the engine behind the query cache. It reads the queries
going through the django Query and returns from the cache using
the generation keys, or on a miss from the database and caches the results.
Each time a model is updated the table keys for that model are re-created,
invalidating all cached querysets for that model.
There are different QueryCacheBackend's for different versions of django;
call ``johnny.cache.get_backend`` to automatically get the proper class.
"""
__shared_state = {}
def __init__(self, cache_backend=None, keyhandler=None, keygen=None):
self.__dict__ = self.__shared_state
self.prefix = settings.MIDDLEWARE_KEY_PREFIX
if keyhandler:
self.kh_class = keyhandler
if keygen:
self.kg_class = keygen
if not cache_backend and not hasattr(self, 'cache_backend'):
cache_backend = settings._get_backend()
if not keygen and not hasattr(self, 'kg_class'):
self.kg_class = KeyGen
if keyhandler is None and not hasattr(self, 'kh_class'):
self.kh_class = KeyHandler
if cache_backend:
self.cache_backend = TransactionManager(cache_backend,
self.kg_class)
self.keyhandler = self.kh_class(self.cache_backend,
self.kg_class, self.prefix)
self._patched = getattr(self, '_patched', False)
def _monkey_select(self, original):
from django.db.models.sql.constants import MULTI
from django.db.models.sql.datastructures import EmptyResultSet
@wraps(original, assigned=available_attrs(original))
def newfun(cls, *args, **kwargs):
if args:
result_type = args[0]
else:
result_type = kwargs.get('result_type', MULTI)
if any([isinstance(cls, c) for c in self._write_compilers]):
return original(cls, *args, **kwargs)
try:
sql, params = cls.as_sql()
if not sql:
raise EmptyResultSet
except EmptyResultSet:
if result_type == MULTI:
return empty_iter()
else:
return
db = getattr(cls, 'using', 'default')
key, val = None, NotInCache()
# check the blacklist for any of the involved tables; if it's not
# there, then look for the value in the cache.
tables = get_tables_for_query(cls.query)
# if the tables are blacklisted, send a qc_skip signal
blacklisted = disallowed_table(*tables)
try:
ordering_aliases = cls.ordering_aliases
except AttributeError:
ordering_aliases = cls.query.ordering_aliases
if blacklisted:
signals.qc_skip.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
key=key)
if tables and not blacklisted:
gen_key = self.keyhandler.get_generation(*tables, **{'db': db})
key = self.keyhandler.sql_key(gen_key, sql, params,
cls.get_ordering(),
result_type, db)
val = self.cache_backend.get(key, NotInCache(), db)
if not isinstance(val, NotInCache):
if val == no_result_sentinel:
val = []
signals.qc_hit.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
size=len(val), key=key)
return val
if not blacklisted:
signals.qc_miss.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
key=key)
val = original(cls, *args, **kwargs)
if hasattr(val, '__iter__'):
#Can't permanently cache lazy iterables without creating
#a cacheable data structure. Note that this makes them
#no longer lazy...
#todo - create a smart iterable wrapper
val = list(val)
if key is not None:
if not val:
self.cache_backend.set(key, no_result_sentinel, settings.MIDDLEWARE_SECONDS, db)
else:
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
return newfun
def _monkey_write(self, original):
@wraps(original, assigned=available_attrs(original))
def newfun(cls, *args, **kwargs):
db = getattr(cls, 'using', 'default')
from django.db.models.sql import compiler
# we have to do this before we check the tables, since the tables
# are actually being set in the original function
ret = original(cls, *args, **kwargs)
if isinstance(cls, compiler.SQLInsertCompiler):
#Inserts are a special case where cls.tables
#are not populated.
tables = [cls.query.model._meta.db_table]
else:
#if cls.query.tables != list(cls.query.table_map):
# pass
#tables = list(cls.query.table_map)
tables = cls.query.tables
for table in tables:
if not disallowed_table(table):
self.keyhandler.invalidate_table(table, db)
return ret
return newfun
def patch(self):
"""
monkey patches django.db.models.sql.compiler.SQL*Compiler series
"""
from django.db.models.sql import compiler
self._read_compilers = (
compiler.SQLCompiler,
compiler.SQLAggregateCompiler,
compiler.SQLDateCompiler,
)
self._write_compilers = (
compiler.SQLInsertCompiler,
compiler.SQLDeleteCompiler,
compiler.SQLUpdateCompiler,
)
if not self._patched:
self._original = {}
for reader in self._read_compilers:
self._original[reader] = reader.execute_sql
reader.execute_sql = self._monkey_select(reader.execute_sql)
for updater in self._write_compilers:
self._original[updater] = updater.execute_sql
updater.execute_sql = self._monkey_write(updater.execute_sql)
self._patched = True
self.cache_backend.patch()
self._handle_signals()
def unpatch(self):
"""un-applies this patch."""
if not self._patched:
return
for func in self._read_compilers + self._write_compilers:
func.execute_sql = self._original[func]
self.cache_backend.unpatch()
self._patched = False
def invalidate(self, instance, **kwargs):
if self._patched:
table = resolve_table(instance)
using = kwargs.get('using', 'default')
if not disallowed_table(table):
self.keyhandler.invalidate_table(table, db=using)
tables = set()
tables.add(table)
try:
instance._meta._related_objects_cache
except AttributeError:
instance._meta._fill_related_objects_cache()
for obj in instance._meta._related_objects_cache.keys():
obj_table = obj.model._meta.db_table
if obj_table not in tables:
tables.add(obj_table)
if not disallowed_table(obj_table):
self.keyhandler.invalidate_table(obj_table)
def _handle_signals(self):
post_save.connect(self.invalidate, sender=None)
post_delete.connect(self.invalidate, sender=None)
def flush_query_cache(self):
from django.db import connection
tables = connection.introspection.table_names()
#seen_models = connection.introspection.installed_models(tables)
for table in tables:
# we want this to just work, so invalidate even things in blacklist
self.keyhandler.invalidate_table(table)
|
jmoiron/johnny-cache | johnny/cache.py | get_tables_for_query_pre_16 | python | def get_tables_for_query_pre_16(query):
from django.db.models.sql.where import WhereNode
from django.db.models.query import QuerySet
tables = set([v[0] for v in getattr(query,'alias_map',{}).values()])
def get_tables(node, tables):
for child in node.children:
if isinstance(child, WhereNode): # and child.children:
tables = get_tables(child, tables)
elif not hasattr(child, '__iter__'):
continue
else:
for item in (c for c in child if isinstance(c, QuerySet)):
tables |= set(get_tables_for_query(item.query))
return tables
if query.where and query.where.children:
where_nodes = [c for c in query.where.children if isinstance(c, WhereNode)]
for node in where_nodes:
tables |= get_tables(node, tables)
return list(tables) | Takes a Django 'query' object and returns all tables that will be used in
that query as a list. Note that where clauses can have their own
querysets with their own dependent queries, etc. | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/cache.py#L118-L144 | [
"def get_tables(node, tables):\n for child in node.children:\n if isinstance(child, WhereNode): # and child.children:\n tables = get_tables(child, tables)\n elif not hasattr(child, '__iter__'):\n continue\n else:\n for item in (c for c in child if isinstance(c, QuerySet)):\n tables |= set(get_tables_for_query(item.query))\n return tables\n"
] | """Johnny's main caching functionality."""
from hashlib import md5
from uuid import uuid4
import django
from django.db.models.signals import post_save, post_delete
from . import localstore, signals
from . import settings
from .compat import (
force_bytes, force_text, string_types, text_type, empty_iter)
from .decorators import wraps, available_attrs
from .transaction import TransactionManager
class NotInCache(object):
#This is used rather than None to properly cache empty querysets
pass
no_result_sentinel = "22c52d96-156a-4638-a38d-aae0051ee9df"
local = localstore.LocalStore()
def disallowed_table(*tables):
"""Returns True if a set of tables is in the blacklist or, if a whitelist is set,
any of the tables is not in the whitelist. False otherwise."""
# XXX: When using a black or white list, this has to be done EVERY query;
# It'd be nice to make this as fast as possible. In general, queries
# should have relatively few tables involved, and I don't imagine that
# blacklists would grow very vast. The fastest i've been able to come
# up with is to pre-create a blacklist set and use intersect.
return not bool(settings.WHITELIST.issuperset(tables)) if settings.WHITELIST\
else bool(settings.BLACKLIST.intersection(tables))
def get_backend(**kwargs):
"""
Get's a QueryCacheBackend object for the given options and current
version of django. If no arguments are given, and a QCB has been
created previously, ``get_backend`` returns that. Otherwise,
``get_backend`` will return the default backend.
"""
cls = QueryCacheBackend
return cls(**kwargs)
def enable():
"""Enable johnny-cache, for use in scripts, management commands, async
workers, or other code outside the django request flow."""
get_backend().patch()
def disable():
"""Disable johnny-cache. This will disable johnny-cache for the whole
process, and if writes happen during the time where johnny is disabled,
tables will not be invalidated properly. Use Carefully."""
get_backend().unpatch()
patch,unpatch = enable,disable
def resolve_table(x):
"""Return a table name for x, where x is either a model instance or a string."""
if isinstance(x, string_types):
return x
return x._meta.db_table
def invalidate(*tables, **kwargs):
"""Invalidate the current generation for one or more tables. The arguments
can be either strings representing database table names or models. Pass in
kwarg ``using`` to set the database."""
backend = get_backend()
db = kwargs.get('using', 'default')
if backend._patched:
for t in map(resolve_table, tables):
backend.keyhandler.invalidate_table(t, db)
def get_tables_for_query(query):
"""
Takes a Django 'query' object and returns all tables that will be used in
that query as a list. Note that where clauses can have their own
querysets with their own dependent queries, etc.
"""
from django.db.models.sql.where import WhereNode, SubqueryConstraint
from django.db.models.query import QuerySet
tables = set([v[0] for v in getattr(query,'alias_map',{}).values()])
def get_sub_query_tables(node):
query = node.query_object
if not hasattr(query, 'field_names'):
query = query.values(*node.targets)
else:
query = query._clone()
query = query.query
return set(v[0] for v in getattr(query, 'alias_map',{}).values())
def get_tables(node, tables):
if isinstance(node, SubqueryConstraint):
return get_sub_query_tables(node)
for child in node.children:
if isinstance(child, WhereNode): # and child.children:
tables |= set(get_tables(child, tables))
elif not hasattr(child, '__iter__'):
continue
else:
for item in (c for c in child if isinstance(c, QuerySet)):
tables |= get_tables_for_query(item.query)
return tables
if query.where and query.where.children:
where_nodes = [c for c in query.where.children if isinstance(c, (WhereNode, SubqueryConstraint))]
for node in where_nodes:
tables |= get_tables(node, tables)
return list(tables)
if django.VERSION[:2] < (1, 6):
get_tables_for_query = get_tables_for_query_pre_16
# The KeyGen is used only to generate keys. Some of these keys will be used
# directly in the cache, while others are only general purpose functions to
# generate hashes off of one or more values.
class KeyGen(object):
"""This class is responsible for generating keys."""
def __init__(self, prefix):
self.prefix = prefix
def random_generator(self):
"""Creates a random unique id."""
return self.gen_key(force_bytes(uuid4()))
def gen_table_key(self, table, db='default'):
"""
Returns a key that is standard for a given table name and database
alias. Total length up to 212 (max for memcache is 250).
"""
table = force_text(table)
db = force_text(settings.DB_CACHE_KEYS[db])
if len(table) > 100:
table = table[0:68] + self.gen_key(table[68:])
if db and len(db) > 100:
db = db[0:68] + self.gen_key(db[68:])
return '%s_%s_table_%s' % (self.prefix, db, table)
def gen_multi_key(self, values, db='default'):
"""Takes a list of generations (not table keys) and returns a key."""
db = settings.DB_CACHE_KEYS[db]
if db and len(db) > 100:
db = db[0:68] + self.gen_key(db[68:])
return '%s_%s_multi_%s' % (self.prefix, db, self.gen_key(*values))
@staticmethod
def _convert(x):
if isinstance(x, text_type):
return x.encode('utf-8')
return force_bytes(x)
@staticmethod
def _recursive_convert(x, key):
for item in x:
if isinstance(item, (tuple, list)):
KeyGen._recursive_convert(item, key)
else:
key.update(KeyGen._convert(item))
def gen_key(self, *values):
"""Generate a key from one or more values."""
key = md5()
KeyGen._recursive_convert(values, key)
return key.hexdigest()
class KeyHandler(object):
"""Handles pulling and invalidating the key from from the cache based
on the table names. Higher-level logic dealing with johnny cache specific
keys go in this class."""
def __init__(self, cache_backend, keygen=KeyGen, prefix=None):
self.prefix = prefix
self.keygen = keygen(prefix)
self.cache_backend = cache_backend
def get_generation(self, *tables, **kwargs):
"""Get the generation key for any number of tables."""
db = kwargs.get('db', 'default')
if len(tables) > 1:
return self.get_multi_generation(tables, db)
return self.get_single_generation(tables[0], db)
def get_single_generation(self, table, db='default'):
"""Creates a random generation value for a single table name"""
key = self.keygen.gen_table_key(table, db)
val = self.cache_backend.get(key, None, db)
#if local.get('in_test', None): print force_bytes(val).ljust(32), key
if val is None:
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def get_multi_generation(self, tables, db='default'):
"""Takes a list of table names and returns an aggregate
value for the generation"""
generations = []
for table in tables:
generations.append(self.get_single_generation(table, db))
key = self.keygen.gen_multi_key(generations, db)
val = self.cache_backend.get(key, None, db)
#if local.get('in_test', None): print force_bytes(val).ljust(32), key
if val is None:
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def invalidate_table(self, table, db='default'):
"""Invalidates a table's generation and returns a new one
(Note that this also invalidates all multi generations
containing the table)"""
key = self.keygen.gen_table_key(table, db)
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def sql_key(self, generation, sql, params, order, result_type,
using='default'):
"""
Return the specific cache key for the sql query described by the
pieces of the query and the generation key.
"""
# these keys will always look pretty opaque
suffix = self.keygen.gen_key(sql, params, order, result_type)
using = settings.DB_CACHE_KEYS[using]
return '%s_%s_query_%s.%s' % (self.prefix, using, generation, suffix)
# XXX: Thread safety concerns? Should we only need to patch once per process?
class QueryCacheBackend(object):
"""This class is the engine behind the query cache. It reads the queries
going through the django Query and returns from the cache using
the generation keys, or on a miss from the database and caches the results.
Each time a model is updated the table keys for that model are re-created,
invalidating all cached querysets for that model.
There are different QueryCacheBackend's for different versions of django;
call ``johnny.cache.get_backend`` to automatically get the proper class.
"""
__shared_state = {}
def __init__(self, cache_backend=None, keyhandler=None, keygen=None):
self.__dict__ = self.__shared_state
self.prefix = settings.MIDDLEWARE_KEY_PREFIX
if keyhandler:
self.kh_class = keyhandler
if keygen:
self.kg_class = keygen
if not cache_backend and not hasattr(self, 'cache_backend'):
cache_backend = settings._get_backend()
if not keygen and not hasattr(self, 'kg_class'):
self.kg_class = KeyGen
if keyhandler is None and not hasattr(self, 'kh_class'):
self.kh_class = KeyHandler
if cache_backend:
self.cache_backend = TransactionManager(cache_backend,
self.kg_class)
self.keyhandler = self.kh_class(self.cache_backend,
self.kg_class, self.prefix)
self._patched = getattr(self, '_patched', False)
def _monkey_select(self, original):
from django.db.models.sql.constants import MULTI
from django.db.models.sql.datastructures import EmptyResultSet
@wraps(original, assigned=available_attrs(original))
def newfun(cls, *args, **kwargs):
if args:
result_type = args[0]
else:
result_type = kwargs.get('result_type', MULTI)
if any([isinstance(cls, c) for c in self._write_compilers]):
return original(cls, *args, **kwargs)
try:
sql, params = cls.as_sql()
if not sql:
raise EmptyResultSet
except EmptyResultSet:
if result_type == MULTI:
return empty_iter()
else:
return
db = getattr(cls, 'using', 'default')
key, val = None, NotInCache()
# check the blacklist for any of the involved tables; if it's not
# there, then look for the value in the cache.
tables = get_tables_for_query(cls.query)
# if the tables are blacklisted, send a qc_skip signal
blacklisted = disallowed_table(*tables)
try:
ordering_aliases = cls.ordering_aliases
except AttributeError:
ordering_aliases = cls.query.ordering_aliases
if blacklisted:
signals.qc_skip.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
key=key)
if tables and not blacklisted:
gen_key = self.keyhandler.get_generation(*tables, **{'db': db})
key = self.keyhandler.sql_key(gen_key, sql, params,
cls.get_ordering(),
result_type, db)
val = self.cache_backend.get(key, NotInCache(), db)
if not isinstance(val, NotInCache):
if val == no_result_sentinel:
val = []
signals.qc_hit.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
size=len(val), key=key)
return val
if not blacklisted:
signals.qc_miss.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
key=key)
val = original(cls, *args, **kwargs)
if hasattr(val, '__iter__'):
#Can't permanently cache lazy iterables without creating
#a cacheable data structure. Note that this makes them
#no longer lazy...
#todo - create a smart iterable wrapper
val = list(val)
if key is not None:
if not val:
self.cache_backend.set(key, no_result_sentinel, settings.MIDDLEWARE_SECONDS, db)
else:
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
return newfun
def _monkey_write(self, original):
@wraps(original, assigned=available_attrs(original))
def newfun(cls, *args, **kwargs):
db = getattr(cls, 'using', 'default')
from django.db.models.sql import compiler
# we have to do this before we check the tables, since the tables
# are actually being set in the original function
ret = original(cls, *args, **kwargs)
if isinstance(cls, compiler.SQLInsertCompiler):
#Inserts are a special case where cls.tables
#are not populated.
tables = [cls.query.model._meta.db_table]
else:
#if cls.query.tables != list(cls.query.table_map):
# pass
#tables = list(cls.query.table_map)
tables = cls.query.tables
for table in tables:
if not disallowed_table(table):
self.keyhandler.invalidate_table(table, db)
return ret
return newfun
def patch(self):
"""
monkey patches django.db.models.sql.compiler.SQL*Compiler series
"""
from django.db.models.sql import compiler
self._read_compilers = (
compiler.SQLCompiler,
compiler.SQLAggregateCompiler,
compiler.SQLDateCompiler,
)
self._write_compilers = (
compiler.SQLInsertCompiler,
compiler.SQLDeleteCompiler,
compiler.SQLUpdateCompiler,
)
if not self._patched:
self._original = {}
for reader in self._read_compilers:
self._original[reader] = reader.execute_sql
reader.execute_sql = self._monkey_select(reader.execute_sql)
for updater in self._write_compilers:
self._original[updater] = updater.execute_sql
updater.execute_sql = self._monkey_write(updater.execute_sql)
self._patched = True
self.cache_backend.patch()
self._handle_signals()
def unpatch(self):
"""un-applies this patch."""
if not self._patched:
return
for func in self._read_compilers + self._write_compilers:
func.execute_sql = self._original[func]
self.cache_backend.unpatch()
self._patched = False
def invalidate(self, instance, **kwargs):
if self._patched:
table = resolve_table(instance)
using = kwargs.get('using', 'default')
if not disallowed_table(table):
self.keyhandler.invalidate_table(table, db=using)
tables = set()
tables.add(table)
try:
instance._meta._related_objects_cache
except AttributeError:
instance._meta._fill_related_objects_cache()
for obj in instance._meta._related_objects_cache.keys():
obj_table = obj.model._meta.db_table
if obj_table not in tables:
tables.add(obj_table)
if not disallowed_table(obj_table):
self.keyhandler.invalidate_table(obj_table)
def _handle_signals(self):
post_save.connect(self.invalidate, sender=None)
post_delete.connect(self.invalidate, sender=None)
def flush_query_cache(self):
from django.db import connection
tables = connection.introspection.table_names()
#seen_models = connection.introspection.installed_models(tables)
for table in tables:
# we want this to just work, so invalidate even things in blacklist
self.keyhandler.invalidate_table(table)
|
jmoiron/johnny-cache | johnny/cache.py | KeyGen.gen_table_key | python | def gen_table_key(self, table, db='default'):
table = force_text(table)
db = force_text(settings.DB_CACHE_KEYS[db])
if len(table) > 100:
table = table[0:68] + self.gen_key(table[68:])
if db and len(db) > 100:
db = db[0:68] + self.gen_key(db[68:])
return '%s_%s_table_%s' % (self.prefix, db, table) | Returns a key that is standard for a given table name and database
alias. Total length up to 212 (max for memcache is 250). | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/cache.py#L165-L176 | [
"def gen_key(self, *values):\n \"\"\"Generate a key from one or more values.\"\"\"\n key = md5()\n KeyGen._recursive_convert(values, key)\n return key.hexdigest()\n"
] | class KeyGen(object):
"""This class is responsible for generating keys."""
def __init__(self, prefix):
self.prefix = prefix
def random_generator(self):
"""Creates a random unique id."""
return self.gen_key(force_bytes(uuid4()))
def gen_multi_key(self, values, db='default'):
"""Takes a list of generations (not table keys) and returns a key."""
db = settings.DB_CACHE_KEYS[db]
if db and len(db) > 100:
db = db[0:68] + self.gen_key(db[68:])
return '%s_%s_multi_%s' % (self.prefix, db, self.gen_key(*values))
@staticmethod
def _convert(x):
if isinstance(x, text_type):
return x.encode('utf-8')
return force_bytes(x)
@staticmethod
def _recursive_convert(x, key):
for item in x:
if isinstance(item, (tuple, list)):
KeyGen._recursive_convert(item, key)
else:
key.update(KeyGen._convert(item))
def gen_key(self, *values):
"""Generate a key from one or more values."""
key = md5()
KeyGen._recursive_convert(values, key)
return key.hexdigest()
|
jmoiron/johnny-cache | johnny/cache.py | KeyGen.gen_multi_key | python | def gen_multi_key(self, values, db='default'):
db = settings.DB_CACHE_KEYS[db]
if db and len(db) > 100:
db = db[0:68] + self.gen_key(db[68:])
return '%s_%s_multi_%s' % (self.prefix, db, self.gen_key(*values)) | Takes a list of generations (not table keys) and returns a key. | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/cache.py#L178-L183 | [
"def gen_key(self, *values):\n \"\"\"Generate a key from one or more values.\"\"\"\n key = md5()\n KeyGen._recursive_convert(values, key)\n return key.hexdigest()\n"
] | class KeyGen(object):
"""This class is responsible for generating keys."""
def __init__(self, prefix):
self.prefix = prefix
def random_generator(self):
"""Creates a random unique id."""
return self.gen_key(force_bytes(uuid4()))
def gen_table_key(self, table, db='default'):
"""
Returns a key that is standard for a given table name and database
alias. Total length up to 212 (max for memcache is 250).
"""
table = force_text(table)
db = force_text(settings.DB_CACHE_KEYS[db])
if len(table) > 100:
table = table[0:68] + self.gen_key(table[68:])
if db and len(db) > 100:
db = db[0:68] + self.gen_key(db[68:])
return '%s_%s_table_%s' % (self.prefix, db, table)
@staticmethod
def _convert(x):
if isinstance(x, text_type):
return x.encode('utf-8')
return force_bytes(x)
@staticmethod
def _recursive_convert(x, key):
for item in x:
if isinstance(item, (tuple, list)):
KeyGen._recursive_convert(item, key)
else:
key.update(KeyGen._convert(item))
def gen_key(self, *values):
"""Generate a key from one or more values."""
key = md5()
KeyGen._recursive_convert(values, key)
return key.hexdigest()
|
jmoiron/johnny-cache | johnny/cache.py | KeyGen.gen_key | python | def gen_key(self, *values):
key = md5()
KeyGen._recursive_convert(values, key)
return key.hexdigest() | Generate a key from one or more values. | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/cache.py#L199-L203 | [
"def _recursive_convert(x, key):\n for item in x:\n if isinstance(item, (tuple, list)):\n KeyGen._recursive_convert(item, key)\n else:\n key.update(KeyGen._convert(item))\n"
] | class KeyGen(object):
"""This class is responsible for generating keys."""
def __init__(self, prefix):
self.prefix = prefix
def random_generator(self):
"""Creates a random unique id."""
return self.gen_key(force_bytes(uuid4()))
def gen_table_key(self, table, db='default'):
"""
Returns a key that is standard for a given table name and database
alias. Total length up to 212 (max for memcache is 250).
"""
table = force_text(table)
db = force_text(settings.DB_CACHE_KEYS[db])
if len(table) > 100:
table = table[0:68] + self.gen_key(table[68:])
if db and len(db) > 100:
db = db[0:68] + self.gen_key(db[68:])
return '%s_%s_table_%s' % (self.prefix, db, table)
def gen_multi_key(self, values, db='default'):
"""Takes a list of generations (not table keys) and returns a key."""
db = settings.DB_CACHE_KEYS[db]
if db and len(db) > 100:
db = db[0:68] + self.gen_key(db[68:])
return '%s_%s_multi_%s' % (self.prefix, db, self.gen_key(*values))
@staticmethod
def _convert(x):
if isinstance(x, text_type):
return x.encode('utf-8')
return force_bytes(x)
@staticmethod
def _recursive_convert(x, key):
for item in x:
if isinstance(item, (tuple, list)):
KeyGen._recursive_convert(item, key)
else:
key.update(KeyGen._convert(item))
|
jmoiron/johnny-cache | johnny/cache.py | KeyHandler.get_generation | python | def get_generation(self, *tables, **kwargs):
db = kwargs.get('db', 'default')
if len(tables) > 1:
return self.get_multi_generation(tables, db)
return self.get_single_generation(tables[0], db) | Get the generation key for any number of tables. | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/cache.py#L215-L220 | [
"def get_single_generation(self, table, db='default'):\n \"\"\"Creates a random generation value for a single table name\"\"\"\n key = self.keygen.gen_table_key(table, db)\n val = self.cache_backend.get(key, None, db)\n #if local.get('in_test', None): print force_bytes(val).ljust(32), key\n if val is None:\n val = self.keygen.random_generator()\n self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)\n return val\n",
"def get_multi_generation(self, tables, db='default'):\n \"\"\"Takes a list of table names and returns an aggregate\n value for the generation\"\"\"\n generations = []\n for table in tables:\n generations.append(self.get_single_generation(table, db))\n key = self.keygen.gen_multi_key(generations, db)\n val = self.cache_backend.get(key, None, db)\n #if local.get('in_test', None): print force_bytes(val).ljust(32), key\n if val is None:\n val = self.keygen.random_generator()\n self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)\n return val\n"
] | class KeyHandler(object):
"""Handles pulling and invalidating the key from from the cache based
on the table names. Higher-level logic dealing with johnny cache specific
keys go in this class."""
def __init__(self, cache_backend, keygen=KeyGen, prefix=None):
self.prefix = prefix
self.keygen = keygen(prefix)
self.cache_backend = cache_backend
def get_single_generation(self, table, db='default'):
"""Creates a random generation value for a single table name"""
key = self.keygen.gen_table_key(table, db)
val = self.cache_backend.get(key, None, db)
#if local.get('in_test', None): print force_bytes(val).ljust(32), key
if val is None:
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def get_multi_generation(self, tables, db='default'):
"""Takes a list of table names and returns an aggregate
value for the generation"""
generations = []
for table in tables:
generations.append(self.get_single_generation(table, db))
key = self.keygen.gen_multi_key(generations, db)
val = self.cache_backend.get(key, None, db)
#if local.get('in_test', None): print force_bytes(val).ljust(32), key
if val is None:
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def invalidate_table(self, table, db='default'):
"""Invalidates a table's generation and returns a new one
(Note that this also invalidates all multi generations
containing the table)"""
key = self.keygen.gen_table_key(table, db)
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def sql_key(self, generation, sql, params, order, result_type,
using='default'):
"""
Return the specific cache key for the sql query described by the
pieces of the query and the generation key.
"""
# these keys will always look pretty opaque
suffix = self.keygen.gen_key(sql, params, order, result_type)
using = settings.DB_CACHE_KEYS[using]
return '%s_%s_query_%s.%s' % (self.prefix, using, generation, suffix)
|
jmoiron/johnny-cache | johnny/cache.py | KeyHandler.get_single_generation | python | def get_single_generation(self, table, db='default'):
key = self.keygen.gen_table_key(table, db)
val = self.cache_backend.get(key, None, db)
#if local.get('in_test', None): print force_bytes(val).ljust(32), key
if val is None:
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val | Creates a random generation value for a single table name | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/cache.py#L222-L230 | null | class KeyHandler(object):
"""Handles pulling and invalidating the key from from the cache based
on the table names. Higher-level logic dealing with johnny cache specific
keys go in this class."""
def __init__(self, cache_backend, keygen=KeyGen, prefix=None):
self.prefix = prefix
self.keygen = keygen(prefix)
self.cache_backend = cache_backend
def get_generation(self, *tables, **kwargs):
"""Get the generation key for any number of tables."""
db = kwargs.get('db', 'default')
if len(tables) > 1:
return self.get_multi_generation(tables, db)
return self.get_single_generation(tables[0], db)
def get_multi_generation(self, tables, db='default'):
"""Takes a list of table names and returns an aggregate
value for the generation"""
generations = []
for table in tables:
generations.append(self.get_single_generation(table, db))
key = self.keygen.gen_multi_key(generations, db)
val = self.cache_backend.get(key, None, db)
#if local.get('in_test', None): print force_bytes(val).ljust(32), key
if val is None:
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def invalidate_table(self, table, db='default'):
"""Invalidates a table's generation and returns a new one
(Note that this also invalidates all multi generations
containing the table)"""
key = self.keygen.gen_table_key(table, db)
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def sql_key(self, generation, sql, params, order, result_type,
using='default'):
"""
Return the specific cache key for the sql query described by the
pieces of the query and the generation key.
"""
# these keys will always look pretty opaque
suffix = self.keygen.gen_key(sql, params, order, result_type)
using = settings.DB_CACHE_KEYS[using]
return '%s_%s_query_%s.%s' % (self.prefix, using, generation, suffix)
|
jmoiron/johnny-cache | johnny/cache.py | KeyHandler.get_multi_generation | python | def get_multi_generation(self, tables, db='default'):
generations = []
for table in tables:
generations.append(self.get_single_generation(table, db))
key = self.keygen.gen_multi_key(generations, db)
val = self.cache_backend.get(key, None, db)
#if local.get('in_test', None): print force_bytes(val).ljust(32), key
if val is None:
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val | Takes a list of table names and returns an aggregate
value for the generation | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/cache.py#L232-L244 | [
"def get_single_generation(self, table, db='default'):\n \"\"\"Creates a random generation value for a single table name\"\"\"\n key = self.keygen.gen_table_key(table, db)\n val = self.cache_backend.get(key, None, db)\n #if local.get('in_test', None): print force_bytes(val).ljust(32), key\n if val is None:\n val = self.keygen.random_generator()\n self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)\n return val\n"
] | class KeyHandler(object):
"""Handles pulling and invalidating the key from from the cache based
on the table names. Higher-level logic dealing with johnny cache specific
keys go in this class."""
def __init__(self, cache_backend, keygen=KeyGen, prefix=None):
self.prefix = prefix
self.keygen = keygen(prefix)
self.cache_backend = cache_backend
def get_generation(self, *tables, **kwargs):
"""Get the generation key for any number of tables."""
db = kwargs.get('db', 'default')
if len(tables) > 1:
return self.get_multi_generation(tables, db)
return self.get_single_generation(tables[0], db)
def get_single_generation(self, table, db='default'):
"""Creates a random generation value for a single table name"""
key = self.keygen.gen_table_key(table, db)
val = self.cache_backend.get(key, None, db)
#if local.get('in_test', None): print force_bytes(val).ljust(32), key
if val is None:
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def invalidate_table(self, table, db='default'):
"""Invalidates a table's generation and returns a new one
(Note that this also invalidates all multi generations
containing the table)"""
key = self.keygen.gen_table_key(table, db)
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def sql_key(self, generation, sql, params, order, result_type,
using='default'):
"""
Return the specific cache key for the sql query described by the
pieces of the query and the generation key.
"""
# these keys will always look pretty opaque
suffix = self.keygen.gen_key(sql, params, order, result_type)
using = settings.DB_CACHE_KEYS[using]
return '%s_%s_query_%s.%s' % (self.prefix, using, generation, suffix)
|
jmoiron/johnny-cache | johnny/cache.py | KeyHandler.sql_key | python | def sql_key(self, generation, sql, params, order, result_type,
using='default'):
# these keys will always look pretty opaque
suffix = self.keygen.gen_key(sql, params, order, result_type)
using = settings.DB_CACHE_KEYS[using]
return '%s_%s_query_%s.%s' % (self.prefix, using, generation, suffix) | Return the specific cache key for the sql query described by the
pieces of the query and the generation key. | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/cache.py#L255-L264 | null | class KeyHandler(object):
"""Handles pulling and invalidating the key from from the cache based
on the table names. Higher-level logic dealing with johnny cache specific
keys go in this class."""
def __init__(self, cache_backend, keygen=KeyGen, prefix=None):
self.prefix = prefix
self.keygen = keygen(prefix)
self.cache_backend = cache_backend
def get_generation(self, *tables, **kwargs):
"""Get the generation key for any number of tables."""
db = kwargs.get('db', 'default')
if len(tables) > 1:
return self.get_multi_generation(tables, db)
return self.get_single_generation(tables[0], db)
def get_single_generation(self, table, db='default'):
"""Creates a random generation value for a single table name"""
key = self.keygen.gen_table_key(table, db)
val = self.cache_backend.get(key, None, db)
#if local.get('in_test', None): print force_bytes(val).ljust(32), key
if val is None:
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def get_multi_generation(self, tables, db='default'):
"""Takes a list of table names and returns an aggregate
value for the generation"""
generations = []
for table in tables:
generations.append(self.get_single_generation(table, db))
key = self.keygen.gen_multi_key(generations, db)
val = self.cache_backend.get(key, None, db)
#if local.get('in_test', None): print force_bytes(val).ljust(32), key
if val is None:
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
def invalidate_table(self, table, db='default'):
"""Invalidates a table's generation and returns a new one
(Note that this also invalidates all multi generations
containing the table)"""
key = self.keygen.gen_table_key(table, db)
val = self.keygen.random_generator()
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
|
jmoiron/johnny-cache | johnny/cache.py | QueryCacheBackend.patch | python | def patch(self):
from django.db.models.sql import compiler
self._read_compilers = (
compiler.SQLCompiler,
compiler.SQLAggregateCompiler,
compiler.SQLDateCompiler,
)
self._write_compilers = (
compiler.SQLInsertCompiler,
compiler.SQLDeleteCompiler,
compiler.SQLUpdateCompiler,
)
if not self._patched:
self._original = {}
for reader in self._read_compilers:
self._original[reader] = reader.execute_sql
reader.execute_sql = self._monkey_select(reader.execute_sql)
for updater in self._write_compilers:
self._original[updater] = updater.execute_sql
updater.execute_sql = self._monkey_write(updater.execute_sql)
self._patched = True
self.cache_backend.patch()
self._handle_signals() | monkey patches django.db.models.sql.compiler.SQL*Compiler series | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/cache.py#L403-L429 | [
"def _monkey_select(self, original):\n from django.db.models.sql.constants import MULTI\n from django.db.models.sql.datastructures import EmptyResultSet\n\n @wraps(original, assigned=available_attrs(original))\n def newfun(cls, *args, **kwargs):\n if args:\n result_type = args[0]\n else:\n result_type = kwargs.get('result_type', MULTI)\n\n if any([isinstance(cls, c) for c in self._write_compilers]):\n return original(cls, *args, **kwargs)\n try:\n sql, params = cls.as_sql()\n if not sql:\n raise EmptyResultSet\n except EmptyResultSet:\n if result_type == MULTI:\n return empty_iter()\n else:\n return\n\n db = getattr(cls, 'using', 'default')\n key, val = None, NotInCache()\n # check the blacklist for any of the involved tables; if it's not\n # there, then look for the value in the cache.\n tables = get_tables_for_query(cls.query)\n # if the tables are blacklisted, send a qc_skip signal\n blacklisted = disallowed_table(*tables)\n\n try:\n ordering_aliases = cls.ordering_aliases\n except AttributeError:\n ordering_aliases = cls.query.ordering_aliases\n\n if blacklisted:\n signals.qc_skip.send(sender=cls, tables=tables,\n query=(sql, params, ordering_aliases),\n key=key)\n if tables and not blacklisted:\n gen_key = self.keyhandler.get_generation(*tables, **{'db': db})\n key = self.keyhandler.sql_key(gen_key, sql, params,\n cls.get_ordering(),\n result_type, db)\n val = self.cache_backend.get(key, NotInCache(), db)\n\n if not isinstance(val, NotInCache):\n if val == no_result_sentinel:\n val = []\n\n signals.qc_hit.send(sender=cls, tables=tables,\n query=(sql, params, ordering_aliases),\n size=len(val), key=key)\n return val\n\n if not blacklisted:\n signals.qc_miss.send(sender=cls, tables=tables,\n query=(sql, params, ordering_aliases),\n key=key)\n\n val = original(cls, *args, **kwargs)\n\n if hasattr(val, '__iter__'):\n #Can't permanently cache lazy iterables without creating\n #a cacheable data structure. Note that this makes them\n #no longer lazy...\n #todo - create a smart iterable wrapper\n val = list(val)\n if key is not None:\n if not val:\n self.cache_backend.set(key, no_result_sentinel, settings.MIDDLEWARE_SECONDS, db)\n else:\n self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)\n return val\n return newfun\n",
"def _monkey_write(self, original):\n @wraps(original, assigned=available_attrs(original))\n def newfun(cls, *args, **kwargs):\n db = getattr(cls, 'using', 'default')\n from django.db.models.sql import compiler\n # we have to do this before we check the tables, since the tables\n # are actually being set in the original function\n ret = original(cls, *args, **kwargs)\n\n if isinstance(cls, compiler.SQLInsertCompiler):\n #Inserts are a special case where cls.tables\n #are not populated.\n tables = [cls.query.model._meta.db_table]\n else:\n #if cls.query.tables != list(cls.query.table_map):\n # pass\n #tables = list(cls.query.table_map)\n tables = cls.query.tables\n for table in tables:\n if not disallowed_table(table):\n self.keyhandler.invalidate_table(table, db)\n return ret\n return newfun\n",
"def _handle_signals(self):\n post_save.connect(self.invalidate, sender=None)\n post_delete.connect(self.invalidate, sender=None)\n"
] | class QueryCacheBackend(object):
"""This class is the engine behind the query cache. It reads the queries
going through the django Query and returns from the cache using
the generation keys, or on a miss from the database and caches the results.
Each time a model is updated the table keys for that model are re-created,
invalidating all cached querysets for that model.
There are different QueryCacheBackend's for different versions of django;
call ``johnny.cache.get_backend`` to automatically get the proper class.
"""
__shared_state = {}
def __init__(self, cache_backend=None, keyhandler=None, keygen=None):
self.__dict__ = self.__shared_state
self.prefix = settings.MIDDLEWARE_KEY_PREFIX
if keyhandler:
self.kh_class = keyhandler
if keygen:
self.kg_class = keygen
if not cache_backend and not hasattr(self, 'cache_backend'):
cache_backend = settings._get_backend()
if not keygen and not hasattr(self, 'kg_class'):
self.kg_class = KeyGen
if keyhandler is None and not hasattr(self, 'kh_class'):
self.kh_class = KeyHandler
if cache_backend:
self.cache_backend = TransactionManager(cache_backend,
self.kg_class)
self.keyhandler = self.kh_class(self.cache_backend,
self.kg_class, self.prefix)
self._patched = getattr(self, '_patched', False)
def _monkey_select(self, original):
from django.db.models.sql.constants import MULTI
from django.db.models.sql.datastructures import EmptyResultSet
@wraps(original, assigned=available_attrs(original))
def newfun(cls, *args, **kwargs):
if args:
result_type = args[0]
else:
result_type = kwargs.get('result_type', MULTI)
if any([isinstance(cls, c) for c in self._write_compilers]):
return original(cls, *args, **kwargs)
try:
sql, params = cls.as_sql()
if not sql:
raise EmptyResultSet
except EmptyResultSet:
if result_type == MULTI:
return empty_iter()
else:
return
db = getattr(cls, 'using', 'default')
key, val = None, NotInCache()
# check the blacklist for any of the involved tables; if it's not
# there, then look for the value in the cache.
tables = get_tables_for_query(cls.query)
# if the tables are blacklisted, send a qc_skip signal
blacklisted = disallowed_table(*tables)
try:
ordering_aliases = cls.ordering_aliases
except AttributeError:
ordering_aliases = cls.query.ordering_aliases
if blacklisted:
signals.qc_skip.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
key=key)
if tables and not blacklisted:
gen_key = self.keyhandler.get_generation(*tables, **{'db': db})
key = self.keyhandler.sql_key(gen_key, sql, params,
cls.get_ordering(),
result_type, db)
val = self.cache_backend.get(key, NotInCache(), db)
if not isinstance(val, NotInCache):
if val == no_result_sentinel:
val = []
signals.qc_hit.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
size=len(val), key=key)
return val
if not blacklisted:
signals.qc_miss.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
key=key)
val = original(cls, *args, **kwargs)
if hasattr(val, '__iter__'):
#Can't permanently cache lazy iterables without creating
#a cacheable data structure. Note that this makes them
#no longer lazy...
#todo - create a smart iterable wrapper
val = list(val)
if key is not None:
if not val:
self.cache_backend.set(key, no_result_sentinel, settings.MIDDLEWARE_SECONDS, db)
else:
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
return newfun
def _monkey_write(self, original):
@wraps(original, assigned=available_attrs(original))
def newfun(cls, *args, **kwargs):
db = getattr(cls, 'using', 'default')
from django.db.models.sql import compiler
# we have to do this before we check the tables, since the tables
# are actually being set in the original function
ret = original(cls, *args, **kwargs)
if isinstance(cls, compiler.SQLInsertCompiler):
#Inserts are a special case where cls.tables
#are not populated.
tables = [cls.query.model._meta.db_table]
else:
#if cls.query.tables != list(cls.query.table_map):
# pass
#tables = list(cls.query.table_map)
tables = cls.query.tables
for table in tables:
if not disallowed_table(table):
self.keyhandler.invalidate_table(table, db)
return ret
return newfun
def unpatch(self):
"""un-applies this patch."""
if not self._patched:
return
for func in self._read_compilers + self._write_compilers:
func.execute_sql = self._original[func]
self.cache_backend.unpatch()
self._patched = False
def invalidate(self, instance, **kwargs):
if self._patched:
table = resolve_table(instance)
using = kwargs.get('using', 'default')
if not disallowed_table(table):
self.keyhandler.invalidate_table(table, db=using)
tables = set()
tables.add(table)
try:
instance._meta._related_objects_cache
except AttributeError:
instance._meta._fill_related_objects_cache()
for obj in instance._meta._related_objects_cache.keys():
obj_table = obj.model._meta.db_table
if obj_table not in tables:
tables.add(obj_table)
if not disallowed_table(obj_table):
self.keyhandler.invalidate_table(obj_table)
def _handle_signals(self):
post_save.connect(self.invalidate, sender=None)
post_delete.connect(self.invalidate, sender=None)
def flush_query_cache(self):
from django.db import connection
tables = connection.introspection.table_names()
#seen_models = connection.introspection.installed_models(tables)
for table in tables:
# we want this to just work, so invalidate even things in blacklist
self.keyhandler.invalidate_table(table)
|
jmoiron/johnny-cache | johnny/cache.py | QueryCacheBackend.unpatch | python | def unpatch(self):
if not self._patched:
return
for func in self._read_compilers + self._write_compilers:
func.execute_sql = self._original[func]
self.cache_backend.unpatch()
self._patched = False | un-applies this patch. | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/cache.py#L431-L438 | null | class QueryCacheBackend(object):
"""This class is the engine behind the query cache. It reads the queries
going through the django Query and returns from the cache using
the generation keys, or on a miss from the database and caches the results.
Each time a model is updated the table keys for that model are re-created,
invalidating all cached querysets for that model.
There are different QueryCacheBackend's for different versions of django;
call ``johnny.cache.get_backend`` to automatically get the proper class.
"""
__shared_state = {}
def __init__(self, cache_backend=None, keyhandler=None, keygen=None):
self.__dict__ = self.__shared_state
self.prefix = settings.MIDDLEWARE_KEY_PREFIX
if keyhandler:
self.kh_class = keyhandler
if keygen:
self.kg_class = keygen
if not cache_backend and not hasattr(self, 'cache_backend'):
cache_backend = settings._get_backend()
if not keygen and not hasattr(self, 'kg_class'):
self.kg_class = KeyGen
if keyhandler is None and not hasattr(self, 'kh_class'):
self.kh_class = KeyHandler
if cache_backend:
self.cache_backend = TransactionManager(cache_backend,
self.kg_class)
self.keyhandler = self.kh_class(self.cache_backend,
self.kg_class, self.prefix)
self._patched = getattr(self, '_patched', False)
def _monkey_select(self, original):
from django.db.models.sql.constants import MULTI
from django.db.models.sql.datastructures import EmptyResultSet
@wraps(original, assigned=available_attrs(original))
def newfun(cls, *args, **kwargs):
if args:
result_type = args[0]
else:
result_type = kwargs.get('result_type', MULTI)
if any([isinstance(cls, c) for c in self._write_compilers]):
return original(cls, *args, **kwargs)
try:
sql, params = cls.as_sql()
if not sql:
raise EmptyResultSet
except EmptyResultSet:
if result_type == MULTI:
return empty_iter()
else:
return
db = getattr(cls, 'using', 'default')
key, val = None, NotInCache()
# check the blacklist for any of the involved tables; if it's not
# there, then look for the value in the cache.
tables = get_tables_for_query(cls.query)
# if the tables are blacklisted, send a qc_skip signal
blacklisted = disallowed_table(*tables)
try:
ordering_aliases = cls.ordering_aliases
except AttributeError:
ordering_aliases = cls.query.ordering_aliases
if blacklisted:
signals.qc_skip.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
key=key)
if tables and not blacklisted:
gen_key = self.keyhandler.get_generation(*tables, **{'db': db})
key = self.keyhandler.sql_key(gen_key, sql, params,
cls.get_ordering(),
result_type, db)
val = self.cache_backend.get(key, NotInCache(), db)
if not isinstance(val, NotInCache):
if val == no_result_sentinel:
val = []
signals.qc_hit.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
size=len(val), key=key)
return val
if not blacklisted:
signals.qc_miss.send(sender=cls, tables=tables,
query=(sql, params, ordering_aliases),
key=key)
val = original(cls, *args, **kwargs)
if hasattr(val, '__iter__'):
#Can't permanently cache lazy iterables without creating
#a cacheable data structure. Note that this makes them
#no longer lazy...
#todo - create a smart iterable wrapper
val = list(val)
if key is not None:
if not val:
self.cache_backend.set(key, no_result_sentinel, settings.MIDDLEWARE_SECONDS, db)
else:
self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
return val
return newfun
def _monkey_write(self, original):
@wraps(original, assigned=available_attrs(original))
def newfun(cls, *args, **kwargs):
db = getattr(cls, 'using', 'default')
from django.db.models.sql import compiler
# we have to do this before we check the tables, since the tables
# are actually being set in the original function
ret = original(cls, *args, **kwargs)
if isinstance(cls, compiler.SQLInsertCompiler):
#Inserts are a special case where cls.tables
#are not populated.
tables = [cls.query.model._meta.db_table]
else:
#if cls.query.tables != list(cls.query.table_map):
# pass
#tables = list(cls.query.table_map)
tables = cls.query.tables
for table in tables:
if not disallowed_table(table):
self.keyhandler.invalidate_table(table, db)
return ret
return newfun
def patch(self):
"""
monkey patches django.db.models.sql.compiler.SQL*Compiler series
"""
from django.db.models.sql import compiler
self._read_compilers = (
compiler.SQLCompiler,
compiler.SQLAggregateCompiler,
compiler.SQLDateCompiler,
)
self._write_compilers = (
compiler.SQLInsertCompiler,
compiler.SQLDeleteCompiler,
compiler.SQLUpdateCompiler,
)
if not self._patched:
self._original = {}
for reader in self._read_compilers:
self._original[reader] = reader.execute_sql
reader.execute_sql = self._monkey_select(reader.execute_sql)
for updater in self._write_compilers:
self._original[updater] = updater.execute_sql
updater.execute_sql = self._monkey_write(updater.execute_sql)
self._patched = True
self.cache_backend.patch()
self._handle_signals()
def invalidate(self, instance, **kwargs):
if self._patched:
table = resolve_table(instance)
using = kwargs.get('using', 'default')
if not disallowed_table(table):
self.keyhandler.invalidate_table(table, db=using)
tables = set()
tables.add(table)
try:
instance._meta._related_objects_cache
except AttributeError:
instance._meta._fill_related_objects_cache()
for obj in instance._meta._related_objects_cache.keys():
obj_table = obj.model._meta.db_table
if obj_table not in tables:
tables.add(obj_table)
if not disallowed_table(obj_table):
self.keyhandler.invalidate_table(obj_table)
def _handle_signals(self):
post_save.connect(self.invalidate, sender=None)
post_delete.connect(self.invalidate, sender=None)
def flush_query_cache(self):
from django.db import connection
tables = connection.introspection.table_names()
#seen_models = connection.introspection.installed_models(tables)
for table in tables:
# we want this to just work, so invalidate even things in blacklist
self.keyhandler.invalidate_table(table)
|
jmoiron/johnny-cache | johnny/utils.py | celery_enable_all | python | def celery_enable_all():
from celery.signals import task_prerun, task_postrun, task_failure
task_prerun.connect(prerun_handler)
task_postrun.connect(postrun_handler)
# Also have to cleanup on failure.
task_failure.connect(postrun_handler) | Enable johnny-cache in all celery tasks, clearing the local-store
after each task. | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/utils.py#L22-L29 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Extra johnny utilities."""
from johnny.cache import get_backend, local, patch, unpatch
from johnny.decorators import wraps, available_attrs
__all__ = ["celery_enable_all", "celery_task_wrapper", "johnny_task_wrapper"]
def prerun_handler(*args, **kwargs):
"""Celery pre-run handler. Enables johnny-cache."""
patch()
def postrun_handler(*args, **kwargs):
"""Celery postrun handler. Unpatches and clears the localstore."""
unpatch()
local.clear()
def celery_task_wrapper(f):
"""
Provides a task wrapper for celery that sets up cache and ensures
that the local store is cleared after completion
"""
from celery.utils import fun_takes_kwargs
@wraps(f, assigned=available_attrs(f))
def newf(*args, **kwargs):
backend = get_backend()
was_patched = backend._patched
get_backend().patch()
# since this function takes all keyword arguments,
# we will pass only the ones the function below accepts,
# just as celery does
supported_keys = fun_takes_kwargs(f, kwargs)
new_kwargs = dict((key, val) for key, val in kwargs.items()
if key in supported_keys)
try:
ret = f(*args, **new_kwargs)
finally:
local.clear()
if not was_patched:
get_backend().unpatch()
return ret
return newf
# backwards compatible alias
johnny_task_wrapper = celery_task_wrapper
|
jmoiron/johnny-cache | johnny/utils.py | celery_task_wrapper | python | def celery_task_wrapper(f):
from celery.utils import fun_takes_kwargs
@wraps(f, assigned=available_attrs(f))
def newf(*args, **kwargs):
backend = get_backend()
was_patched = backend._patched
get_backend().patch()
# since this function takes all keyword arguments,
# we will pass only the ones the function below accepts,
# just as celery does
supported_keys = fun_takes_kwargs(f, kwargs)
new_kwargs = dict((key, val) for key, val in kwargs.items()
if key in supported_keys)
try:
ret = f(*args, **new_kwargs)
finally:
local.clear()
if not was_patched:
get_backend().unpatch()
return ret
return newf | Provides a task wrapper for celery that sets up cache and ensures
that the local store is cleared after completion | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/utils.py#L31-L57 | [
"def available_attrs(fn):\n \"\"\"\n Return the list of functools-wrappable attributes on a callable.\n This is required as a workaround for http://bugs.python.org/issue3445.\n \"\"\"\n return tuple(a for a in WRAPPER_ASSIGNMENTS if hasattr(fn, a))\n"
] | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Extra johnny utilities."""
from johnny.cache import get_backend, local, patch, unpatch
from johnny.decorators import wraps, available_attrs
__all__ = ["celery_enable_all", "celery_task_wrapper", "johnny_task_wrapper"]
def prerun_handler(*args, **kwargs):
"""Celery pre-run handler. Enables johnny-cache."""
patch()
def postrun_handler(*args, **kwargs):
"""Celery postrun handler. Unpatches and clears the localstore."""
unpatch()
local.clear()
def celery_enable_all():
"""Enable johnny-cache in all celery tasks, clearing the local-store
after each task."""
from celery.signals import task_prerun, task_postrun, task_failure
task_prerun.connect(prerun_handler)
task_postrun.connect(postrun_handler)
# Also have to cleanup on failure.
task_failure.connect(postrun_handler)
# backwards compatible alias
johnny_task_wrapper = celery_task_wrapper
|
jmoiron/johnny-cache | johnny/settings.py | _get_backend | python | def _get_backend():
enabled = [n for n, c in sorted(CACHES.items())
if c.get('JOHNNY_CACHE', False)]
if len(enabled) > 1:
warn("Multiple caches configured for johnny-cache; using %s." %
enabled[0])
if enabled:
return get_cache(enabled[0])
if CACHE_BACKEND:
backend = get_cache(CACHE_BACKEND)
if backend not in CACHES:
from django.core import signals
# Some caches -- python-memcached in particular -- need to do a
# cleanup at the end of a request cycle. If the cache provides a
# close() method, wire it up here.
if hasattr(backend, 'close'):
signals.request_finished.connect(backend.close)
return backend
return cache | Returns the actual django cache object johnny is configured to use.
This relies on the settings only; the actual active cache can
theoretically be changed at runtime. | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/settings.py#L29-L52 | null | from warnings import warn
from django.conf import settings
from django.core.cache import get_cache, cache
DISABLE_QUERYSET_CACHE = getattr(settings, 'DISABLE_QUERYSET_CACHE', False)
DEFAULT_BLACKLIST = ['south_migrationhistory']
BLACKLIST = list(getattr(settings, 'MAN_IN_BLACKLIST',
getattr(settings, 'JOHNNY_TABLE_BLACKLIST', []))) + DEFAULT_BLACKLIST
BLACKLIST = set(BLACKLIST)
WHITELIST = set(getattr(settings, 'JOHNNY_TABLE_WHITELIST', []))
DB_CACHE_KEYS = dict((name, db.get('JOHNNY_CACHE_KEY', name))
for name, db in settings.DATABASES.items())
MIDDLEWARE_KEY_PREFIX = getattr(settings, 'JOHNNY_MIDDLEWARE_KEY_PREFIX', 'jc')
MIDDLEWARE_SECONDS = getattr(settings, 'JOHNNY_MIDDLEWARE_SECONDS', 0)
CACHE_BACKEND = getattr(settings, 'JOHNNY_CACHE_BACKEND',
getattr(settings, 'CACHE_BACKEND', None))
CACHES = getattr(settings, 'CACHES', {})
|
jmoiron/johnny-cache | johnny/transaction.py | TransactionManager.set | python | def set(self, key, val, timeout=None, using=None):
if timeout is None:
timeout = self.timeout
if self.is_managed(using=using) and self._patched_var:
self.local[key] = val
else:
self.cache_backend.set(key, val, timeout) | Set will be using the generational key, so if another thread
bumps this key, the localstore version will still be invalid.
If the key is bumped during a transaction it will be new
to the global cache on commit, so it will still be a bump. | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/transaction.py#L81-L93 | [
"def is_managed(self, using=None):\n return is_managed(using=using)\n"
] | class TransactionManager(object):
"""
TransactionManager is a wrapper around a cache_backend that is
transaction aware.
If we are in a transaction, it will return the locally cached version.
* On rollback, it will flush all local caches
* On commit, it will push them up to the real shared cache backend
(ex. memcached).
"""
_patched_var = False
def __init__(self, cache_backend, keygen):
from johnny import cache, settings
self.timeout = settings.MIDDLEWARE_SECONDS
self.prefix = settings.MIDDLEWARE_KEY_PREFIX
self.cache_backend = cache_backend
self.local = cache.local
self.keygen = keygen(self.prefix)
self._originals = {}
self._dirty_backup = {}
self.local['trans_sids'] = {}
def _get_sid(self, using=None):
if 'trans_sids' not in self.local:
self.local['trans_sids'] = {}
d = self.local['trans_sids']
if using is None:
using = DEFAULT_DB_ALIAS
if using not in d:
d[using] = []
return d[using]
def _clear_sid_stack(self, using=None):
if using is None:
using = DEFAULT_DB_ALIAS
if using in self.local.get('trans_sids', {}):
del self.local['trans_sids']
def is_managed(self, using=None):
return is_managed(using=using)
def get(self, key, default=None, using=None):
if self.is_managed(using) and self._patched_var:
val = self.local.get(key, None)
if val:
return val
if self._uses_savepoints():
val = self._get_from_savepoints(key, using)
if val:
return val
return self.cache_backend.get(key, default)
def _get_from_savepoints(self, key, using=None):
sids = self._get_sid(using)
cp = list(sids)
cp.reverse()
for sid in cp:
if key in self.local[sid]:
return self.local[sid][key]
def _trunc_using(self, using):
if using is None:
using = DEFAULT_DB_ALIAS
using = johnny_settings.DB_CACHE_KEYS[using]
if len(using) > 100:
using = using[0:68] + self.keygen.gen_key(using[68:])
return using
def _clear(self, using=None):
self.local.clear('%s_%s_*' %
(self.prefix, self._trunc_using(using)))
def _flush(self, commit=True, using=None):
"""
Flushes the internal cache, either to the memcache or rolls back
"""
if commit:
# XXX: multi-set?
if self._uses_savepoints():
self._commit_all_savepoints(using)
c = self.local.mget('%s_%s_*' %
(self.prefix, self._trunc_using(using)))
for key, value in c.items():
self.cache_backend.set(key, value, self.timeout)
else:
if self._uses_savepoints():
self._rollback_all_savepoints(using)
self._clear(using)
self._clear_sid_stack(using)
def _patched(self, original, commit=True, unless_managed=False):
@wraps(original, assigned=available_attrs(original))
def newfun(using=None):
original(using=using)
# copying behavior of original func
# if it is an 'unless_managed' version we should do nothing if transaction is managed
if not unless_managed or not self.is_managed(using=using):
self._flush(commit=commit, using=using)
return newfun
def _uses_savepoints(self):
return connection.features.uses_savepoints
def _sid_key(self, sid, using=None):
if using is not None:
prefix = 'trans_savepoint_%s' % using
else:
prefix = 'trans_savepoint'
if sid is not None and sid.startswith(prefix):
return sid
return '%s_%s'%(prefix, sid)
def _create_savepoint(self, sid, using=None):
key = self._sid_key(sid, using)
#get all local dirty items
c = self.local.mget('%s_%s_*' %
(self.prefix, self._trunc_using(using)))
#store them to a dictionary in the localstore
if key not in self.local:
self.local[key] = {}
for k, v in c.items():
self.local[key][k] = v
#clear the dirty
self._clear(using)
#append the key to the savepoint stack
sids = self._get_sid(using)
if key not in sids:
sids.append(key)
def _rollback_savepoint(self, sid, using=None):
sids = self._get_sid(using)
key = self._sid_key(sid, using)
stack = []
try:
popped = None
while popped != key:
popped = sids.pop()
stack.insert(0, popped)
#delete items from localstore
for i in stack:
del self.local[i]
#clear dirty
self._clear(using)
except IndexError:
#key not found, don't delete from localstore, restore sid stack
for i in stack:
sids.insert(0, i)
def _commit_savepoint(self, sid, using=None):
# commit is not a commit but is in reality just a clear back to that
# savepoint and adds the items back to the dirty transaction.
key = self._sid_key(sid, using)
sids = self._get_sid(using)
stack = []
try:
popped = None
while popped != key:
popped = sids.pop()
stack.insert(0, popped)
self._store_dirty(using)
for i in stack:
for k, v in self.local.get(i, {}).items():
self.local[k] = v
del self.local[i]
self._restore_dirty(using)
except IndexError:
for i in stack:
sids.insert(0, i)
def _commit_all_savepoints(self, using=None):
sids = self._get_sid(using)
if sids:
self._commit_savepoint(sids[0], using)
def _rollback_all_savepoints(self, using=None):
sids = self._get_sid(using)
if sids:
self._rollback_savepoint(sids[0], using)
def _store_dirty(self, using=None):
c = self.local.mget('%s_%s_*' %
(self.prefix, self._trunc_using(using)))
backup = 'trans_dirty_store_%s' % self._trunc_using(using)
self.local[backup] = {}
for k, v in c.items():
self.local[backup][k] = v
self._clear(using)
def _restore_dirty(self, using=None):
backup = 'trans_dirty_store_%s' % self._trunc_using(using)
for k, v in self.local.get(backup, {}).items():
self.local[k] = v
del self.local[backup]
def _savepoint(self, original):
@wraps(original, assigned=available_attrs(original))
def newfun(using=None):
if using is not None:
sid = original(using=using)
else:
sid = original()
if self._uses_savepoints():
self._create_savepoint(sid, using)
return sid
return newfun
def _savepoint_rollback(self, original):
def newfun(sid, *args, **kwargs):
original(sid, *args, **kwargs)
if self._uses_savepoints():
if len(args) == 2:
using = args[1]
else:
using = kwargs.get('using', None)
self._rollback_savepoint(sid, using)
return newfun
def _savepoint_commit(self, original):
def newfun(sid, *args, **kwargs):
original(sid, *args, **kwargs)
if self._uses_savepoints():
if len(args) == 1:
using = args[0]
else:
using = kwargs.get('using', None)
self._commit_savepoint(sid, using)
return newfun
def _getreal(self, name):
return getattr(transaction, 'real_%s' % name,
getattr(transaction, name))
def patch(self):
"""
This function monkey patches commit and rollback
writes to the cache should not happen until commit (unless our state
isn't managed). It does not yet support savepoints.
"""
if not self._patched_var:
self._originals['rollback'] = self._getreal('rollback')
self._originals['rollback_unless_managed'] = self._getreal('rollback_unless_managed')
self._originals['commit'] = self._getreal('commit')
self._originals['commit_unless_managed'] = self._getreal('commit_unless_managed')
self._originals['savepoint'] = self._getreal('savepoint')
self._originals['savepoint_rollback'] = self._getreal('savepoint_rollback')
self._originals['savepoint_commit'] = self._getreal('savepoint_commit')
transaction.rollback = self._patched(transaction.rollback, False)
transaction.rollback_unless_managed = self._patched(transaction.rollback_unless_managed,
False, unless_managed=True)
transaction.commit = self._patched(transaction.commit, True)
transaction.commit_unless_managed = self._patched(transaction.commit_unless_managed,
True, unless_managed=True)
transaction.savepoint = self._savepoint(transaction.savepoint)
transaction.savepoint_rollback = self._savepoint_rollback(transaction.savepoint_rollback)
transaction.savepoint_commit = self._savepoint_commit(transaction.savepoint_commit)
self._patched_var = True
def unpatch(self):
for fun in self._originals:
setattr(transaction, fun, self._originals[fun])
self._patched_var = False
|
jmoiron/johnny-cache | johnny/transaction.py | TransactionManager._flush | python | def _flush(self, commit=True, using=None):
if commit:
# XXX: multi-set?
if self._uses_savepoints():
self._commit_all_savepoints(using)
c = self.local.mget('%s_%s_*' %
(self.prefix, self._trunc_using(using)))
for key, value in c.items():
self.cache_backend.set(key, value, self.timeout)
else:
if self._uses_savepoints():
self._rollback_all_savepoints(using)
self._clear(using)
self._clear_sid_stack(using) | Flushes the internal cache, either to the memcache or rolls back | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/transaction.py#L99-L115 | null | class TransactionManager(object):
"""
TransactionManager is a wrapper around a cache_backend that is
transaction aware.
If we are in a transaction, it will return the locally cached version.
* On rollback, it will flush all local caches
* On commit, it will push them up to the real shared cache backend
(ex. memcached).
"""
_patched_var = False
def __init__(self, cache_backend, keygen):
from johnny import cache, settings
self.timeout = settings.MIDDLEWARE_SECONDS
self.prefix = settings.MIDDLEWARE_KEY_PREFIX
self.cache_backend = cache_backend
self.local = cache.local
self.keygen = keygen(self.prefix)
self._originals = {}
self._dirty_backup = {}
self.local['trans_sids'] = {}
def _get_sid(self, using=None):
if 'trans_sids' not in self.local:
self.local['trans_sids'] = {}
d = self.local['trans_sids']
if using is None:
using = DEFAULT_DB_ALIAS
if using not in d:
d[using] = []
return d[using]
def _clear_sid_stack(self, using=None):
if using is None:
using = DEFAULT_DB_ALIAS
if using in self.local.get('trans_sids', {}):
del self.local['trans_sids']
def is_managed(self, using=None):
return is_managed(using=using)
def get(self, key, default=None, using=None):
if self.is_managed(using) and self._patched_var:
val = self.local.get(key, None)
if val:
return val
if self._uses_savepoints():
val = self._get_from_savepoints(key, using)
if val:
return val
return self.cache_backend.get(key, default)
def _get_from_savepoints(self, key, using=None):
sids = self._get_sid(using)
cp = list(sids)
cp.reverse()
for sid in cp:
if key in self.local[sid]:
return self.local[sid][key]
def _trunc_using(self, using):
if using is None:
using = DEFAULT_DB_ALIAS
using = johnny_settings.DB_CACHE_KEYS[using]
if len(using) > 100:
using = using[0:68] + self.keygen.gen_key(using[68:])
return using
def set(self, key, val, timeout=None, using=None):
"""
Set will be using the generational key, so if another thread
bumps this key, the localstore version will still be invalid.
If the key is bumped during a transaction it will be new
to the global cache on commit, so it will still be a bump.
"""
if timeout is None:
timeout = self.timeout
if self.is_managed(using=using) and self._patched_var:
self.local[key] = val
else:
self.cache_backend.set(key, val, timeout)
def _clear(self, using=None):
self.local.clear('%s_%s_*' %
(self.prefix, self._trunc_using(using)))
def _patched(self, original, commit=True, unless_managed=False):
@wraps(original, assigned=available_attrs(original))
def newfun(using=None):
original(using=using)
# copying behavior of original func
# if it is an 'unless_managed' version we should do nothing if transaction is managed
if not unless_managed or not self.is_managed(using=using):
self._flush(commit=commit, using=using)
return newfun
def _uses_savepoints(self):
return connection.features.uses_savepoints
def _sid_key(self, sid, using=None):
if using is not None:
prefix = 'trans_savepoint_%s' % using
else:
prefix = 'trans_savepoint'
if sid is not None and sid.startswith(prefix):
return sid
return '%s_%s'%(prefix, sid)
def _create_savepoint(self, sid, using=None):
key = self._sid_key(sid, using)
#get all local dirty items
c = self.local.mget('%s_%s_*' %
(self.prefix, self._trunc_using(using)))
#store them to a dictionary in the localstore
if key not in self.local:
self.local[key] = {}
for k, v in c.items():
self.local[key][k] = v
#clear the dirty
self._clear(using)
#append the key to the savepoint stack
sids = self._get_sid(using)
if key not in sids:
sids.append(key)
def _rollback_savepoint(self, sid, using=None):
sids = self._get_sid(using)
key = self._sid_key(sid, using)
stack = []
try:
popped = None
while popped != key:
popped = sids.pop()
stack.insert(0, popped)
#delete items from localstore
for i in stack:
del self.local[i]
#clear dirty
self._clear(using)
except IndexError:
#key not found, don't delete from localstore, restore sid stack
for i in stack:
sids.insert(0, i)
def _commit_savepoint(self, sid, using=None):
# commit is not a commit but is in reality just a clear back to that
# savepoint and adds the items back to the dirty transaction.
key = self._sid_key(sid, using)
sids = self._get_sid(using)
stack = []
try:
popped = None
while popped != key:
popped = sids.pop()
stack.insert(0, popped)
self._store_dirty(using)
for i in stack:
for k, v in self.local.get(i, {}).items():
self.local[k] = v
del self.local[i]
self._restore_dirty(using)
except IndexError:
for i in stack:
sids.insert(0, i)
def _commit_all_savepoints(self, using=None):
sids = self._get_sid(using)
if sids:
self._commit_savepoint(sids[0], using)
def _rollback_all_savepoints(self, using=None):
sids = self._get_sid(using)
if sids:
self._rollback_savepoint(sids[0], using)
def _store_dirty(self, using=None):
c = self.local.mget('%s_%s_*' %
(self.prefix, self._trunc_using(using)))
backup = 'trans_dirty_store_%s' % self._trunc_using(using)
self.local[backup] = {}
for k, v in c.items():
self.local[backup][k] = v
self._clear(using)
def _restore_dirty(self, using=None):
backup = 'trans_dirty_store_%s' % self._trunc_using(using)
for k, v in self.local.get(backup, {}).items():
self.local[k] = v
del self.local[backup]
def _savepoint(self, original):
@wraps(original, assigned=available_attrs(original))
def newfun(using=None):
if using is not None:
sid = original(using=using)
else:
sid = original()
if self._uses_savepoints():
self._create_savepoint(sid, using)
return sid
return newfun
def _savepoint_rollback(self, original):
def newfun(sid, *args, **kwargs):
original(sid, *args, **kwargs)
if self._uses_savepoints():
if len(args) == 2:
using = args[1]
else:
using = kwargs.get('using', None)
self._rollback_savepoint(sid, using)
return newfun
def _savepoint_commit(self, original):
def newfun(sid, *args, **kwargs):
original(sid, *args, **kwargs)
if self._uses_savepoints():
if len(args) == 1:
using = args[0]
else:
using = kwargs.get('using', None)
self._commit_savepoint(sid, using)
return newfun
def _getreal(self, name):
return getattr(transaction, 'real_%s' % name,
getattr(transaction, name))
def patch(self):
"""
This function monkey patches commit and rollback
writes to the cache should not happen until commit (unless our state
isn't managed). It does not yet support savepoints.
"""
if not self._patched_var:
self._originals['rollback'] = self._getreal('rollback')
self._originals['rollback_unless_managed'] = self._getreal('rollback_unless_managed')
self._originals['commit'] = self._getreal('commit')
self._originals['commit_unless_managed'] = self._getreal('commit_unless_managed')
self._originals['savepoint'] = self._getreal('savepoint')
self._originals['savepoint_rollback'] = self._getreal('savepoint_rollback')
self._originals['savepoint_commit'] = self._getreal('savepoint_commit')
transaction.rollback = self._patched(transaction.rollback, False)
transaction.rollback_unless_managed = self._patched(transaction.rollback_unless_managed,
False, unless_managed=True)
transaction.commit = self._patched(transaction.commit, True)
transaction.commit_unless_managed = self._patched(transaction.commit_unless_managed,
True, unless_managed=True)
transaction.savepoint = self._savepoint(transaction.savepoint)
transaction.savepoint_rollback = self._savepoint_rollback(transaction.savepoint_rollback)
transaction.savepoint_commit = self._savepoint_commit(transaction.savepoint_commit)
self._patched_var = True
def unpatch(self):
for fun in self._originals:
setattr(transaction, fun, self._originals[fun])
self._patched_var = False
|
jmoiron/johnny-cache | johnny/transaction.py | TransactionManager.patch | python | def patch(self):
if not self._patched_var:
self._originals['rollback'] = self._getreal('rollback')
self._originals['rollback_unless_managed'] = self._getreal('rollback_unless_managed')
self._originals['commit'] = self._getreal('commit')
self._originals['commit_unless_managed'] = self._getreal('commit_unless_managed')
self._originals['savepoint'] = self._getreal('savepoint')
self._originals['savepoint_rollback'] = self._getreal('savepoint_rollback')
self._originals['savepoint_commit'] = self._getreal('savepoint_commit')
transaction.rollback = self._patched(transaction.rollback, False)
transaction.rollback_unless_managed = self._patched(transaction.rollback_unless_managed,
False, unless_managed=True)
transaction.commit = self._patched(transaction.commit, True)
transaction.commit_unless_managed = self._patched(transaction.commit_unless_managed,
True, unless_managed=True)
transaction.savepoint = self._savepoint(transaction.savepoint)
transaction.savepoint_rollback = self._savepoint_rollback(transaction.savepoint_rollback)
transaction.savepoint_commit = self._savepoint_commit(transaction.savepoint_commit)
self._patched_var = True | This function monkey patches commit and rollback
writes to the cache should not happen until commit (unless our state
isn't managed). It does not yet support savepoints. | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/transaction.py#L262-L286 | [
"def _patched(self, original, commit=True, unless_managed=False):\n @wraps(original, assigned=available_attrs(original))\n def newfun(using=None):\n original(using=using)\n # copying behavior of original func\n # if it is an 'unless_managed' version we should do nothing if transaction is managed\n if not unless_managed or not self.is_managed(using=using):\n self._flush(commit=commit, using=using)\n\n return newfun\n",
"def _savepoint(self, original):\n @wraps(original, assigned=available_attrs(original))\n def newfun(using=None):\n if using is not None:\n sid = original(using=using)\n else:\n sid = original()\n if self._uses_savepoints():\n self._create_savepoint(sid, using)\n return sid\n return newfun\n",
"def _savepoint_rollback(self, original):\n def newfun(sid, *args, **kwargs):\n original(sid, *args, **kwargs)\n if self._uses_savepoints():\n if len(args) == 2:\n using = args[1]\n else:\n using = kwargs.get('using', None)\n self._rollback_savepoint(sid, using)\n return newfun\n",
"def _savepoint_commit(self, original):\n def newfun(sid, *args, **kwargs):\n original(sid, *args, **kwargs)\n if self._uses_savepoints():\n if len(args) == 1:\n using = args[0]\n else:\n using = kwargs.get('using', None)\n self._commit_savepoint(sid, using)\n return newfun\n",
"def _getreal(self, name):\n return getattr(transaction, 'real_%s' % name,\n getattr(transaction, name))\n"
] | class TransactionManager(object):
"""
TransactionManager is a wrapper around a cache_backend that is
transaction aware.
If we are in a transaction, it will return the locally cached version.
* On rollback, it will flush all local caches
* On commit, it will push them up to the real shared cache backend
(ex. memcached).
"""
_patched_var = False
def __init__(self, cache_backend, keygen):
from johnny import cache, settings
self.timeout = settings.MIDDLEWARE_SECONDS
self.prefix = settings.MIDDLEWARE_KEY_PREFIX
self.cache_backend = cache_backend
self.local = cache.local
self.keygen = keygen(self.prefix)
self._originals = {}
self._dirty_backup = {}
self.local['trans_sids'] = {}
def _get_sid(self, using=None):
if 'trans_sids' not in self.local:
self.local['trans_sids'] = {}
d = self.local['trans_sids']
if using is None:
using = DEFAULT_DB_ALIAS
if using not in d:
d[using] = []
return d[using]
def _clear_sid_stack(self, using=None):
if using is None:
using = DEFAULT_DB_ALIAS
if using in self.local.get('trans_sids', {}):
del self.local['trans_sids']
def is_managed(self, using=None):
return is_managed(using=using)
def get(self, key, default=None, using=None):
if self.is_managed(using) and self._patched_var:
val = self.local.get(key, None)
if val:
return val
if self._uses_savepoints():
val = self._get_from_savepoints(key, using)
if val:
return val
return self.cache_backend.get(key, default)
def _get_from_savepoints(self, key, using=None):
sids = self._get_sid(using)
cp = list(sids)
cp.reverse()
for sid in cp:
if key in self.local[sid]:
return self.local[sid][key]
def _trunc_using(self, using):
if using is None:
using = DEFAULT_DB_ALIAS
using = johnny_settings.DB_CACHE_KEYS[using]
if len(using) > 100:
using = using[0:68] + self.keygen.gen_key(using[68:])
return using
def set(self, key, val, timeout=None, using=None):
"""
Set will be using the generational key, so if another thread
bumps this key, the localstore version will still be invalid.
If the key is bumped during a transaction it will be new
to the global cache on commit, so it will still be a bump.
"""
if timeout is None:
timeout = self.timeout
if self.is_managed(using=using) and self._patched_var:
self.local[key] = val
else:
self.cache_backend.set(key, val, timeout)
def _clear(self, using=None):
self.local.clear('%s_%s_*' %
(self.prefix, self._trunc_using(using)))
def _flush(self, commit=True, using=None):
"""
Flushes the internal cache, either to the memcache or rolls back
"""
if commit:
# XXX: multi-set?
if self._uses_savepoints():
self._commit_all_savepoints(using)
c = self.local.mget('%s_%s_*' %
(self.prefix, self._trunc_using(using)))
for key, value in c.items():
self.cache_backend.set(key, value, self.timeout)
else:
if self._uses_savepoints():
self._rollback_all_savepoints(using)
self._clear(using)
self._clear_sid_stack(using)
def _patched(self, original, commit=True, unless_managed=False):
@wraps(original, assigned=available_attrs(original))
def newfun(using=None):
original(using=using)
# copying behavior of original func
# if it is an 'unless_managed' version we should do nothing if transaction is managed
if not unless_managed or not self.is_managed(using=using):
self._flush(commit=commit, using=using)
return newfun
def _uses_savepoints(self):
return connection.features.uses_savepoints
def _sid_key(self, sid, using=None):
if using is not None:
prefix = 'trans_savepoint_%s' % using
else:
prefix = 'trans_savepoint'
if sid is not None and sid.startswith(prefix):
return sid
return '%s_%s'%(prefix, sid)
def _create_savepoint(self, sid, using=None):
key = self._sid_key(sid, using)
#get all local dirty items
c = self.local.mget('%s_%s_*' %
(self.prefix, self._trunc_using(using)))
#store them to a dictionary in the localstore
if key not in self.local:
self.local[key] = {}
for k, v in c.items():
self.local[key][k] = v
#clear the dirty
self._clear(using)
#append the key to the savepoint stack
sids = self._get_sid(using)
if key not in sids:
sids.append(key)
def _rollback_savepoint(self, sid, using=None):
sids = self._get_sid(using)
key = self._sid_key(sid, using)
stack = []
try:
popped = None
while popped != key:
popped = sids.pop()
stack.insert(0, popped)
#delete items from localstore
for i in stack:
del self.local[i]
#clear dirty
self._clear(using)
except IndexError:
#key not found, don't delete from localstore, restore sid stack
for i in stack:
sids.insert(0, i)
def _commit_savepoint(self, sid, using=None):
# commit is not a commit but is in reality just a clear back to that
# savepoint and adds the items back to the dirty transaction.
key = self._sid_key(sid, using)
sids = self._get_sid(using)
stack = []
try:
popped = None
while popped != key:
popped = sids.pop()
stack.insert(0, popped)
self._store_dirty(using)
for i in stack:
for k, v in self.local.get(i, {}).items():
self.local[k] = v
del self.local[i]
self._restore_dirty(using)
except IndexError:
for i in stack:
sids.insert(0, i)
def _commit_all_savepoints(self, using=None):
sids = self._get_sid(using)
if sids:
self._commit_savepoint(sids[0], using)
def _rollback_all_savepoints(self, using=None):
sids = self._get_sid(using)
if sids:
self._rollback_savepoint(sids[0], using)
def _store_dirty(self, using=None):
c = self.local.mget('%s_%s_*' %
(self.prefix, self._trunc_using(using)))
backup = 'trans_dirty_store_%s' % self._trunc_using(using)
self.local[backup] = {}
for k, v in c.items():
self.local[backup][k] = v
self._clear(using)
def _restore_dirty(self, using=None):
backup = 'trans_dirty_store_%s' % self._trunc_using(using)
for k, v in self.local.get(backup, {}).items():
self.local[k] = v
del self.local[backup]
def _savepoint(self, original):
@wraps(original, assigned=available_attrs(original))
def newfun(using=None):
if using is not None:
sid = original(using=using)
else:
sid = original()
if self._uses_savepoints():
self._create_savepoint(sid, using)
return sid
return newfun
def _savepoint_rollback(self, original):
def newfun(sid, *args, **kwargs):
original(sid, *args, **kwargs)
if self._uses_savepoints():
if len(args) == 2:
using = args[1]
else:
using = kwargs.get('using', None)
self._rollback_savepoint(sid, using)
return newfun
def _savepoint_commit(self, original):
def newfun(sid, *args, **kwargs):
original(sid, *args, **kwargs)
if self._uses_savepoints():
if len(args) == 1:
using = args[0]
else:
using = kwargs.get('using', None)
self._commit_savepoint(sid, using)
return newfun
def _getreal(self, name):
return getattr(transaction, 'real_%s' % name,
getattr(transaction, name))
def unpatch(self):
for fun in self._originals:
setattr(transaction, fun, self._originals[fun])
self._patched_var = False
|
jmoiron/johnny-cache | johnny/localstore.py | LocalStore.mget | python | def mget(self, pat=None):
if pat is None:
return {}
expr = re.compile(fnmatch.translate(pat))
m = {}
for key in tuple(self.keys()):
#make sure the key is a str first
if isinstance(key, string_types):
if expr.match(key):
m[key] = self[key]
return m | Get a dictionary mapping of all k:v pairs with key matching
glob style expression `pat`. | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/localstore.py#L76-L90 | [
"def keys(self):\n return self.__dict__.keys()\n"
] | class LocalStore(threading.local):
"""
A thread-local OpenStruct that can be used as a local cache. An instance
is located at ``johnny.cache.local``, and is cleared on every request by
the ``LocalStoreClearMiddleware``. It can be a thread-safe way to handle
global contexts.
"""
def __init__(self, **d):
threading.local.__init__(self)
for k, v in d.items():
threading.local.__setattr__(self, k, v)
# dictionary API
def __getitem__(self, key):
return self.__dict__[key]
def __setitem__(self, key, value):
self.__dict__[key] = value
def __delitem__(self, key):
if key in self.__dict__:
del self.__dict__[key]
def __iter__(self):
return iter(self.__dict__)
def __len__(self):
return len(self.__dict__)
def keys(self):
return self.__dict__.keys()
def values(self):
return self.__dict__.values()
def items(self):
return self.__dict__.items()
def iterkeys(self):
warnings.warn(
'LocalStore.iterkeys() is deprecated, use .keys() instead',
DeprecationWarning)
return self.__dict__.keys()
def itervalues(self):
warnings.warn(
'LocalStore.itervalues() is deprecated, use .values() instead',
DeprecationWarning)
return self.__dict__.values()
def iteritems(self):
warnings.warn(
'LocalStore.iteritems() is deprecated, use .items() instead',
DeprecationWarning)
return self.__dict__.items()
def get(self, *args):
return self.__dict__.get(*args)
def update(self, d):
self.__dict__.update(d)
def setdefault(self, name, value):
return self.__dict__.setdefault(name, value)
def clear(self, pat=None):
"""
Minor diversion with built-in dict here; clear can take a glob
style expression and remove keys based on that expression.
"""
if pat is None:
return self.__dict__.clear()
expr = re.compile(fnmatch.translate(pat))
for key in tuple(self.keys()):
#make sure the key is a str first
if isinstance(key, string_types):
if expr.match(key):
del self.__dict__[key]
def __repr__(self):
return repr(self.__dict__)
def __str__(self):
return str(self.__dict__)
|
jmoiron/johnny-cache | johnny/localstore.py | LocalStore.clear | python | def clear(self, pat=None):
if pat is None:
return self.__dict__.clear()
expr = re.compile(fnmatch.translate(pat))
for key in tuple(self.keys()):
#make sure the key is a str first
if isinstance(key, string_types):
if expr.match(key):
del self.__dict__[key] | Minor diversion with built-in dict here; clear can take a glob
style expression and remove keys based on that expression. | train | https://github.com/jmoiron/johnny-cache/blob/d96ea94c5dfcde517ff8f65d6ba4e435d8a0168c/johnny/localstore.py#L92-L105 | [
"def keys(self):\n return self.__dict__.keys()\n"
] | class LocalStore(threading.local):
"""
A thread-local OpenStruct that can be used as a local cache. An instance
is located at ``johnny.cache.local``, and is cleared on every request by
the ``LocalStoreClearMiddleware``. It can be a thread-safe way to handle
global contexts.
"""
def __init__(self, **d):
threading.local.__init__(self)
for k, v in d.items():
threading.local.__setattr__(self, k, v)
# dictionary API
def __getitem__(self, key):
return self.__dict__[key]
def __setitem__(self, key, value):
self.__dict__[key] = value
def __delitem__(self, key):
if key in self.__dict__:
del self.__dict__[key]
def __iter__(self):
return iter(self.__dict__)
def __len__(self):
return len(self.__dict__)
def keys(self):
return self.__dict__.keys()
def values(self):
return self.__dict__.values()
def items(self):
return self.__dict__.items()
def iterkeys(self):
warnings.warn(
'LocalStore.iterkeys() is deprecated, use .keys() instead',
DeprecationWarning)
return self.__dict__.keys()
def itervalues(self):
warnings.warn(
'LocalStore.itervalues() is deprecated, use .values() instead',
DeprecationWarning)
return self.__dict__.values()
def iteritems(self):
warnings.warn(
'LocalStore.iteritems() is deprecated, use .items() instead',
DeprecationWarning)
return self.__dict__.items()
def get(self, *args):
return self.__dict__.get(*args)
def update(self, d):
self.__dict__.update(d)
def setdefault(self, name, value):
return self.__dict__.setdefault(name, value)
def mget(self, pat=None):
"""
Get a dictionary mapping of all k:v pairs with key matching
glob style expression `pat`.
"""
if pat is None:
return {}
expr = re.compile(fnmatch.translate(pat))
m = {}
for key in tuple(self.keys()):
#make sure the key is a str first
if isinstance(key, string_types):
if expr.match(key):
m[key] = self[key]
return m
def __repr__(self):
return repr(self.__dict__)
def __str__(self):
return str(self.__dict__)
|
projecthamster/hamster-lib | hamster_lib/reports.py | XMLWriter._fact_to_tuple | python | def _fact_to_tuple(self, fact):
# Fields that may have ``None`` value will be represented by ''
if fact.category:
category = fact.category.name
else:
category = ''
description = fact.description or ''
return FactTuple(
start=fact.start.strftime(self.datetime_format),
end=fact.end.strftime(self.datetime_format),
activity=text_type(fact.activity.name),
duration=fact.get_string_delta(format='%M'),
category=text_type(category),
description=text_type(description),
) | Convert a ``Fact`` to its normalized tuple.
This is where all type conversion for ``Fact`` attributes to strings as
well as any normalization happens.
Note:
Because different writers may require different types, we need to
do this individually.
Args:
fact (hamster_lib.Fact): Fact to be converted.
Returns:
FactTuple: Tuple representing the original ``Fact``. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/reports.py#L294-L326 | null | class XMLWriter(ReportWriter):
"""Writer for a basic xml export."""
# This is a straight forward copy of the 'legacy hamster' XMLWriter class
# contributed by 'tbaugis' in 11e3f66
def __init__(self, path, datetime_format="%Y-%m-%d %H:%M:%S"):
"""Setup the writer including a main xml document."""
self.datetime_format = datetime_format
self.file = open(path, 'wb')
self.document = Document()
self.fact_list = self.document.createElement("facts")
def _write_fact(self, fact_tuple):
"""
Create new fact element and populate attributes.
Once the child is prepared append it to ``fact_list``.
"""
fact = self.document.createElement("fact")
fact.setAttribute('start', fact_tuple.start)
fact.setAttribute('end', fact_tuple.end)
fact.setAttribute('activity', fact_tuple.activity)
fact.setAttribute('duration', fact_tuple.duration)
fact.setAttribute('category', fact_tuple.category)
fact.setAttribute('description', fact_tuple.description)
self.fact_list.appendChild(fact)
def _close(self):
"""
Append the xml fact list to the main document write file and cleanup.
``toxml`` should take care of encoding everything with UTF-8.
"""
self.document.appendChild(self.fact_list)
self.file.write(self.document.toxml(encoding='utf-8'))
return super(XMLWriter, self)._close()
|
projecthamster/hamster-lib | hamster_lib/reports.py | XMLWriter._write_fact | python | def _write_fact(self, fact_tuple):
fact = self.document.createElement("fact")
fact.setAttribute('start', fact_tuple.start)
fact.setAttribute('end', fact_tuple.end)
fact.setAttribute('activity', fact_tuple.activity)
fact.setAttribute('duration', fact_tuple.duration)
fact.setAttribute('category', fact_tuple.category)
fact.setAttribute('description', fact_tuple.description)
self.fact_list.appendChild(fact) | Create new fact element and populate attributes.
Once the child is prepared append it to ``fact_list``. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/reports.py#L328-L341 | null | class XMLWriter(ReportWriter):
"""Writer for a basic xml export."""
# This is a straight forward copy of the 'legacy hamster' XMLWriter class
# contributed by 'tbaugis' in 11e3f66
def __init__(self, path, datetime_format="%Y-%m-%d %H:%M:%S"):
"""Setup the writer including a main xml document."""
self.datetime_format = datetime_format
self.file = open(path, 'wb')
self.document = Document()
self.fact_list = self.document.createElement("facts")
def _fact_to_tuple(self, fact):
"""
Convert a ``Fact`` to its normalized tuple.
This is where all type conversion for ``Fact`` attributes to strings as
well as any normalization happens.
Note:
Because different writers may require different types, we need to
do this individually.
Args:
fact (hamster_lib.Fact): Fact to be converted.
Returns:
FactTuple: Tuple representing the original ``Fact``.
"""
# Fields that may have ``None`` value will be represented by ''
if fact.category:
category = fact.category.name
else:
category = ''
description = fact.description or ''
return FactTuple(
start=fact.start.strftime(self.datetime_format),
end=fact.end.strftime(self.datetime_format),
activity=text_type(fact.activity.name),
duration=fact.get_string_delta(format='%M'),
category=text_type(category),
description=text_type(description),
)
def _close(self):
"""
Append the xml fact list to the main document write file and cleanup.
``toxml`` should take care of encoding everything with UTF-8.
"""
self.document.appendChild(self.fact_list)
self.file.write(self.document.toxml(encoding='utf-8'))
return super(XMLWriter, self)._close()
|
projecthamster/hamster-lib | hamster_lib/reports.py | XMLWriter._close | python | def _close(self):
self.document.appendChild(self.fact_list)
self.file.write(self.document.toxml(encoding='utf-8'))
return super(XMLWriter, self)._close() | Append the xml fact list to the main document write file and cleanup.
``toxml`` should take care of encoding everything with UTF-8. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/reports.py#L343-L352 | null | class XMLWriter(ReportWriter):
"""Writer for a basic xml export."""
# This is a straight forward copy of the 'legacy hamster' XMLWriter class
# contributed by 'tbaugis' in 11e3f66
def __init__(self, path, datetime_format="%Y-%m-%d %H:%M:%S"):
"""Setup the writer including a main xml document."""
self.datetime_format = datetime_format
self.file = open(path, 'wb')
self.document = Document()
self.fact_list = self.document.createElement("facts")
def _fact_to_tuple(self, fact):
"""
Convert a ``Fact`` to its normalized tuple.
This is where all type conversion for ``Fact`` attributes to strings as
well as any normalization happens.
Note:
Because different writers may require different types, we need to
do this individually.
Args:
fact (hamster_lib.Fact): Fact to be converted.
Returns:
FactTuple: Tuple representing the original ``Fact``.
"""
# Fields that may have ``None`` value will be represented by ''
if fact.category:
category = fact.category.name
else:
category = ''
description = fact.description or ''
return FactTuple(
start=fact.start.strftime(self.datetime_format),
end=fact.end.strftime(self.datetime_format),
activity=text_type(fact.activity.name),
duration=fact.get_string_delta(format='%M'),
category=text_type(category),
description=text_type(description),
)
def _write_fact(self, fact_tuple):
"""
Create new fact element and populate attributes.
Once the child is prepared append it to ``fact_list``.
"""
fact = self.document.createElement("fact")
fact.setAttribute('start', fact_tuple.start)
fact.setAttribute('end', fact_tuple.end)
fact.setAttribute('activity', fact_tuple.activity)
fact.setAttribute('duration', fact_tuple.duration)
fact.setAttribute('category', fact_tuple.category)
fact.setAttribute('description', fact_tuple.description)
self.fact_list.appendChild(fact)
|
projecthamster/hamster-lib | hamster_lib/helpers/time.py | get_day_end | python | def get_day_end(config):
day_start_datetime = datetime.datetime.combine(datetime.date.today(), config['day_start'])
day_end_datetime = day_start_datetime - datetime.timedelta(seconds=1)
return day_end_datetime.time() | Get the day end time given the day start. This assumes full 24h day.
Args:
config (dict): Configdict. Needed to extract ``day_start``.
Note:
This is merely a convinience funtion so we do not have to deduct this from ``day_start``
by hand all the time. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/time.py#L33-L46 | null | # -*- encoding: utf-8 -*-
# Copyright (C) 2015-2016 Eric Goller <eric.goller@ninjaduck.solutions>
# This file is part of 'hamster-lib'.
#
# 'hamster-lib' is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# 'hamster-lib' is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with 'hamster-lib'. If not, see <http://www.gnu.org/licenses/>.
"""This module provides several time realted convinience functions."""
from __future__ import absolute_import, unicode_literals
import datetime
import re
from collections import namedtuple
TimeFrame = namedtuple('Timeframe', ('start_date', 'start_time',
'end_date', 'end_time', 'offset'))
def end_day_to_datetime(end_day, config):
"""
Convert a given end day to its proper datetime.
This is non trivial because of variable ``day_start``. We want to make sure
that even if an 'end day' is specified the actual point in time may reach into the following
day.
Args:
end (datetime.date): Raw end date that is to be adjusted.
config: Controller config containing information on when a workday starts.
Returns:
datetime.datetime: The endday as a adjusted datetime object.
Example:
Given a ``day_start`` of ``5:30`` and end date of ``2015-04-01`` we actually want to
consider even points in time up to ``2015-04-02 5:29``. That is to represent that a
*work day*
does not match *calendar days*.
Note:
An alternative implementation for the similar problem in legacy hamster:
``hamster.storage.db.Storage.__get_todays_facts``.
"""
day_start_time = config['day_start']
day_end_time = get_day_end(config)
if day_start_time == datetime.time(0, 0, 0):
end = datetime.datetime.combine(end_day, day_end_time)
else:
end = datetime.datetime.combine(end_day, day_end_time) + datetime.timedelta(days=1)
return end
def extract_time_info(text):
"""
Extract valid time(-range) information from a string according to our specs.
Args:
text (text_type): Raw string containing encoded time(-span) information.
Date/Time-combinations are expected in a ``YYYY-MM-DD hh:mm`` format.
Relative times can be given with ``-minutes``.
Please note that either *relative* or *absolute* times will be considered.
It is possible to either just specify a start date (as time, date,
or datetime) or a timerange (start and end). If a timerange is given
start and end need to be delimited exactly by ' - '.
Returns:
tuple: ``(timeframe, rest)`` tuple. Where ``timeframe`` is a tuple that
provides convinient access to all seperate elements extracted from
the raw string and ``rest`` is any substring stat has not been
matched to valid time/date info.
Note:
* Relative times always return just ``(None, None, None, None, timedelta)``.
"""
# [TODO] Add a list of supported formats.
def get_time(time, seconds=None):
"""Convert a times string representation to datetime.time instance."""
if time is None:
return time
if seconds:
time_format = '%H:%M:%S'
else:
time_format = '%H:%M'
return datetime.datetime.strptime(time.strip(), time_format).time()
def get_date(date):
"""Convert a dates string representation to datetime.date instance."""
if date:
date = datetime.datetime.strptime(date.strip(), "%Y-%m-%d").date()
return date
def date_time_from_groupdict(groupdict):
"""Return a date/time tuple by introspecting a passed dict."""
if groupdict['datetime']:
dt = parse_time(groupdict['datetime'])
time = dt.time()
date = dt.date()
else:
date = get_date(groupdict.get('date'))
time = get_time(groupdict.get('time'), groupdict.get('seconds'))
return (date, time)
# Baseline/default values.
result = {
'start_date': None,
'start_time': None,
'end_date': None,
'end_time': None,
'offset': None
}
rest = None
# Individual patterns for time/date substrings.
relative_pattern = '(?P<relative>-\d+)'
time_pattern = '(?P<time>\d{2}:\d{2}(?P<seconds>:\d{2})?)'
date_pattern = '(?P<date>\d{4}-\d{2}-\d{2})'
datetime_pattern = '(?P<datetime>\d{4}-\d{2}-\d{2} \d{2}:\d{2}(:\d{2})?)'
start = re.match('^({}|{}|{}|{}) (?P<rest>.+)'.format(relative_pattern, datetime_pattern,
date_pattern, time_pattern), text)
if start:
start_groups = start.groupdict()
if start_groups['relative']:
result['offset'] = datetime.timedelta(minutes=abs(int(start_groups['relative'])))
else:
date, time = date_time_from_groupdict(start_groups)
result['start_date'] = date
result['start_time'] = time
rest = start_groups['rest']
if rest:
end = re.match('^- ({}|{}|{}) (?P<rest>.+)'.format(datetime_pattern, date_pattern,
time_pattern), rest)
else:
end = None
if end and not start_groups['relative']:
end_groups = end.groupdict()
date, time = date_time_from_groupdict(end_groups)
result['end_date'] = date
result['end_time'] = time
rest = end_groups['rest']
result = TimeFrame(result['start_date'], result['start_time'], result['end_date'],
result['end_time'], result['offset'])
# Consider the whole string as 'rest' if no time/date info was extracted
if not rest:
rest = text
return (result, rest.strip())
def complete_timeframe(timeframe, config, partial=False):
"""
Apply fallback strategy to incomplete timeframes.
Our fallback strategy is as follows:
* Missing start-date: Fallback to ``today``.
* Missing start-time: Fallback to ``store.config['day_start']``.
* Missing end-date: Fallback to ``today`` for ``day_start='00:00`,
``tomorrow`` otherwise.
See ``hamster_lib.helpers.end_day_to_datetime`` for details and
explanations.
* Missing end-time: 1 second before ``store.config['day_start']``.
Args:
timeframe (TimeFrame): ``TimeFrame`` instance incorporating all
available information available about the timespan. Any missing info
will be completed per fallback strategy.
config (dict): A config-dict providing settings relevant to determine
fallback values.
partial (bool, optional): If true, we will only complete start/end times if there
is at least either date or time information present. Defaults to
``False``.
Returns:
tuple: ``(start, end)`` tuple. Where ``start`` and ``end`` are full
``datetime.datetime`` instances.
Raises:
TypeError: If any of the ``timeframe`` values is of inappropriate
datetime type.
"""
def complete_start_date(date):
"""
Assign ``today`` if ``date=None``, else ensure its a ``datetime.date`` instance.
Args:
date (datetime.date): Startdate information.
Returns:
datetime.date: Either the original date or the default solution.
Raises:
TypeError: If ``date``` is neither ``None`` nor ``datetime.date`` instance.
Note:
Reference behavior taken from [hamster-cli](https://github.com/projecthamster/
hamster/blob/master/src/hamster-cli#L368).
"""
if not date:
date = datetime.date.today()
else:
if not isinstance(date, datetime.date):
raise TypeError(_(
"Expected datetime.date instance, got {type} instead.".format(
type=type(date))
))
return date
def complete_start_time(time, day_start):
"""Assign ``day_start`` if no start-time is given."""
if not time:
time = day_start
else:
if not isinstance(time, datetime.time):
raise TypeError(_(
"Expected datetime.time instance, got {type} instead.".format(
type=type(time))
))
return time
def complete_start(date, time, config):
return datetime.datetime.combine(
complete_start_date(timeframe.start_date),
complete_start_time(timeframe.start_time, config['day_start']),
)
def complete_end_date(date):
if not date:
date = datetime.date.today()
else:
if not isinstance(date, datetime.date):
raise TypeError(_(
"Expected datetime.date instance, got {type} instead.".format(
type=type(date))
))
return date
def complete_end(date, time, config):
date = complete_end_date(date)
if time:
result = datetime.datetime.combine(date, time)
else:
result = end_day_to_datetime(date, config)
return result
start, end = None, None
if any((timeframe.offset, timeframe.start_time, timeframe.start_date)) or not partial:
if not timeframe.offset:
start = complete_start(timeframe.start_date, timeframe.start_time, config)
else:
start = datetime.datetime.now() - timeframe.offset
if any((timeframe.end_date, timeframe.end_time)) or not partial:
end = complete_end(timeframe.end_date, timeframe.end_time, config)
return (start, end)
def parse_time(time):
"""
Parse a date/time string and return a corresponding datetime object.
Args:
time (str): A ``string` of one of the following formats: ``%H:%M``, ``%Y-%m-%d`` or
``%Y-%m-%d %H:%M``.
Returns:
datetime.datetime: Depending on input string either returns ``datetime.date``,
``datetime.time`` or ``datetime.datetime``.
Raises:
ValueError: If ``time`` can not be matched against any of the accepted formats.
Note:
This parse just a singlular date, time or datetime representation.
"""
length = len(time.strip().split())
if length == 1:
try:
result = datetime.datetime.strptime(time, '%H:%M:%S').time()
except ValueError:
try:
result = datetime.datetime.strptime(time, '%H:%M').time()
except ValueError:
result = datetime.datetime.strptime(time, '%Y-%m-%d').date()
elif length == 2:
try:
result = datetime.datetime.strptime(time, '%Y-%m-%d %H:%M:%S')
except ValueError:
result = datetime.datetime.strptime(time, '%Y-%m-%d %H:%M')
else:
raise ValueError(_(
"String does not seem to be in one of our supported time formats."
))
return result
def validate_start_end_range(range_tuple):
"""
Perform basic sanity checks on a timeframe.
Args:
range_tuple (tuple): ``(start, end)`` tuple as returned by
``complete_timeframe``.
Raises:
ValueError: If start > end.
Returns:
tuple: ``(start, end)`` tuple that passed validation.
Note:
``timeframes`` may be incomplete, especially if ``complete_timeframe(partial=True)`` has
been used to construct them.
"""
start, end = range_tuple
if (start and end) and (start > end):
raise ValueError(_("Start after end!"))
return range_tuple
|
projecthamster/hamster-lib | hamster_lib/helpers/time.py | end_day_to_datetime | python | def end_day_to_datetime(end_day, config):
day_start_time = config['day_start']
day_end_time = get_day_end(config)
if day_start_time == datetime.time(0, 0, 0):
end = datetime.datetime.combine(end_day, day_end_time)
else:
end = datetime.datetime.combine(end_day, day_end_time) + datetime.timedelta(days=1)
return end | Convert a given end day to its proper datetime.
This is non trivial because of variable ``day_start``. We want to make sure
that even if an 'end day' is specified the actual point in time may reach into the following
day.
Args:
end (datetime.date): Raw end date that is to be adjusted.
config: Controller config containing information on when a workday starts.
Returns:
datetime.datetime: The endday as a adjusted datetime object.
Example:
Given a ``day_start`` of ``5:30`` and end date of ``2015-04-01`` we actually want to
consider even points in time up to ``2015-04-02 5:29``. That is to represent that a
*work day*
does not match *calendar days*.
Note:
An alternative implementation for the similar problem in legacy hamster:
``hamster.storage.db.Storage.__get_todays_facts``. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/time.py#L49-L82 | [
"def get_day_end(config):\n \"\"\"\n Get the day end time given the day start. This assumes full 24h day.\n\n Args:\n config (dict): Configdict. Needed to extract ``day_start``.\n\n Note:\n This is merely a convinience funtion so we do not have to deduct this from ``day_start``\n by hand all the time.\n \"\"\"\n day_start_datetime = datetime.datetime.combine(datetime.date.today(), config['day_start'])\n day_end_datetime = day_start_datetime - datetime.timedelta(seconds=1)\n return day_end_datetime.time()\n"
] | # -*- encoding: utf-8 -*-
# Copyright (C) 2015-2016 Eric Goller <eric.goller@ninjaduck.solutions>
# This file is part of 'hamster-lib'.
#
# 'hamster-lib' is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# 'hamster-lib' is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with 'hamster-lib'. If not, see <http://www.gnu.org/licenses/>.
"""This module provides several time realted convinience functions."""
from __future__ import absolute_import, unicode_literals
import datetime
import re
from collections import namedtuple
TimeFrame = namedtuple('Timeframe', ('start_date', 'start_time',
'end_date', 'end_time', 'offset'))
def get_day_end(config):
"""
Get the day end time given the day start. This assumes full 24h day.
Args:
config (dict): Configdict. Needed to extract ``day_start``.
Note:
This is merely a convinience funtion so we do not have to deduct this from ``day_start``
by hand all the time.
"""
day_start_datetime = datetime.datetime.combine(datetime.date.today(), config['day_start'])
day_end_datetime = day_start_datetime - datetime.timedelta(seconds=1)
return day_end_datetime.time()
def extract_time_info(text):
"""
Extract valid time(-range) information from a string according to our specs.
Args:
text (text_type): Raw string containing encoded time(-span) information.
Date/Time-combinations are expected in a ``YYYY-MM-DD hh:mm`` format.
Relative times can be given with ``-minutes``.
Please note that either *relative* or *absolute* times will be considered.
It is possible to either just specify a start date (as time, date,
or datetime) or a timerange (start and end). If a timerange is given
start and end need to be delimited exactly by ' - '.
Returns:
tuple: ``(timeframe, rest)`` tuple. Where ``timeframe`` is a tuple that
provides convinient access to all seperate elements extracted from
the raw string and ``rest`` is any substring stat has not been
matched to valid time/date info.
Note:
* Relative times always return just ``(None, None, None, None, timedelta)``.
"""
# [TODO] Add a list of supported formats.
def get_time(time, seconds=None):
"""Convert a times string representation to datetime.time instance."""
if time is None:
return time
if seconds:
time_format = '%H:%M:%S'
else:
time_format = '%H:%M'
return datetime.datetime.strptime(time.strip(), time_format).time()
def get_date(date):
"""Convert a dates string representation to datetime.date instance."""
if date:
date = datetime.datetime.strptime(date.strip(), "%Y-%m-%d").date()
return date
def date_time_from_groupdict(groupdict):
"""Return a date/time tuple by introspecting a passed dict."""
if groupdict['datetime']:
dt = parse_time(groupdict['datetime'])
time = dt.time()
date = dt.date()
else:
date = get_date(groupdict.get('date'))
time = get_time(groupdict.get('time'), groupdict.get('seconds'))
return (date, time)
# Baseline/default values.
result = {
'start_date': None,
'start_time': None,
'end_date': None,
'end_time': None,
'offset': None
}
rest = None
# Individual patterns for time/date substrings.
relative_pattern = '(?P<relative>-\d+)'
time_pattern = '(?P<time>\d{2}:\d{2}(?P<seconds>:\d{2})?)'
date_pattern = '(?P<date>\d{4}-\d{2}-\d{2})'
datetime_pattern = '(?P<datetime>\d{4}-\d{2}-\d{2} \d{2}:\d{2}(:\d{2})?)'
start = re.match('^({}|{}|{}|{}) (?P<rest>.+)'.format(relative_pattern, datetime_pattern,
date_pattern, time_pattern), text)
if start:
start_groups = start.groupdict()
if start_groups['relative']:
result['offset'] = datetime.timedelta(minutes=abs(int(start_groups['relative'])))
else:
date, time = date_time_from_groupdict(start_groups)
result['start_date'] = date
result['start_time'] = time
rest = start_groups['rest']
if rest:
end = re.match('^- ({}|{}|{}) (?P<rest>.+)'.format(datetime_pattern, date_pattern,
time_pattern), rest)
else:
end = None
if end and not start_groups['relative']:
end_groups = end.groupdict()
date, time = date_time_from_groupdict(end_groups)
result['end_date'] = date
result['end_time'] = time
rest = end_groups['rest']
result = TimeFrame(result['start_date'], result['start_time'], result['end_date'],
result['end_time'], result['offset'])
# Consider the whole string as 'rest' if no time/date info was extracted
if not rest:
rest = text
return (result, rest.strip())
def complete_timeframe(timeframe, config, partial=False):
"""
Apply fallback strategy to incomplete timeframes.
Our fallback strategy is as follows:
* Missing start-date: Fallback to ``today``.
* Missing start-time: Fallback to ``store.config['day_start']``.
* Missing end-date: Fallback to ``today`` for ``day_start='00:00`,
``tomorrow`` otherwise.
See ``hamster_lib.helpers.end_day_to_datetime`` for details and
explanations.
* Missing end-time: 1 second before ``store.config['day_start']``.
Args:
timeframe (TimeFrame): ``TimeFrame`` instance incorporating all
available information available about the timespan. Any missing info
will be completed per fallback strategy.
config (dict): A config-dict providing settings relevant to determine
fallback values.
partial (bool, optional): If true, we will only complete start/end times if there
is at least either date or time information present. Defaults to
``False``.
Returns:
tuple: ``(start, end)`` tuple. Where ``start`` and ``end`` are full
``datetime.datetime`` instances.
Raises:
TypeError: If any of the ``timeframe`` values is of inappropriate
datetime type.
"""
def complete_start_date(date):
"""
Assign ``today`` if ``date=None``, else ensure its a ``datetime.date`` instance.
Args:
date (datetime.date): Startdate information.
Returns:
datetime.date: Either the original date or the default solution.
Raises:
TypeError: If ``date``` is neither ``None`` nor ``datetime.date`` instance.
Note:
Reference behavior taken from [hamster-cli](https://github.com/projecthamster/
hamster/blob/master/src/hamster-cli#L368).
"""
if not date:
date = datetime.date.today()
else:
if not isinstance(date, datetime.date):
raise TypeError(_(
"Expected datetime.date instance, got {type} instead.".format(
type=type(date))
))
return date
def complete_start_time(time, day_start):
"""Assign ``day_start`` if no start-time is given."""
if not time:
time = day_start
else:
if not isinstance(time, datetime.time):
raise TypeError(_(
"Expected datetime.time instance, got {type} instead.".format(
type=type(time))
))
return time
def complete_start(date, time, config):
return datetime.datetime.combine(
complete_start_date(timeframe.start_date),
complete_start_time(timeframe.start_time, config['day_start']),
)
def complete_end_date(date):
if not date:
date = datetime.date.today()
else:
if not isinstance(date, datetime.date):
raise TypeError(_(
"Expected datetime.date instance, got {type} instead.".format(
type=type(date))
))
return date
def complete_end(date, time, config):
date = complete_end_date(date)
if time:
result = datetime.datetime.combine(date, time)
else:
result = end_day_to_datetime(date, config)
return result
start, end = None, None
if any((timeframe.offset, timeframe.start_time, timeframe.start_date)) or not partial:
if not timeframe.offset:
start = complete_start(timeframe.start_date, timeframe.start_time, config)
else:
start = datetime.datetime.now() - timeframe.offset
if any((timeframe.end_date, timeframe.end_time)) or not partial:
end = complete_end(timeframe.end_date, timeframe.end_time, config)
return (start, end)
def parse_time(time):
"""
Parse a date/time string and return a corresponding datetime object.
Args:
time (str): A ``string` of one of the following formats: ``%H:%M``, ``%Y-%m-%d`` or
``%Y-%m-%d %H:%M``.
Returns:
datetime.datetime: Depending on input string either returns ``datetime.date``,
``datetime.time`` or ``datetime.datetime``.
Raises:
ValueError: If ``time`` can not be matched against any of the accepted formats.
Note:
This parse just a singlular date, time or datetime representation.
"""
length = len(time.strip().split())
if length == 1:
try:
result = datetime.datetime.strptime(time, '%H:%M:%S').time()
except ValueError:
try:
result = datetime.datetime.strptime(time, '%H:%M').time()
except ValueError:
result = datetime.datetime.strptime(time, '%Y-%m-%d').date()
elif length == 2:
try:
result = datetime.datetime.strptime(time, '%Y-%m-%d %H:%M:%S')
except ValueError:
result = datetime.datetime.strptime(time, '%Y-%m-%d %H:%M')
else:
raise ValueError(_(
"String does not seem to be in one of our supported time formats."
))
return result
def validate_start_end_range(range_tuple):
"""
Perform basic sanity checks on a timeframe.
Args:
range_tuple (tuple): ``(start, end)`` tuple as returned by
``complete_timeframe``.
Raises:
ValueError: If start > end.
Returns:
tuple: ``(start, end)`` tuple that passed validation.
Note:
``timeframes`` may be incomplete, especially if ``complete_timeframe(partial=True)`` has
been used to construct them.
"""
start, end = range_tuple
if (start and end) and (start > end):
raise ValueError(_("Start after end!"))
return range_tuple
|
projecthamster/hamster-lib | hamster_lib/helpers/time.py | extract_time_info | python | def extract_time_info(text):
# [TODO] Add a list of supported formats.
def get_time(time, seconds=None):
"""Convert a times string representation to datetime.time instance."""
if time is None:
return time
if seconds:
time_format = '%H:%M:%S'
else:
time_format = '%H:%M'
return datetime.datetime.strptime(time.strip(), time_format).time()
def get_date(date):
"""Convert a dates string representation to datetime.date instance."""
if date:
date = datetime.datetime.strptime(date.strip(), "%Y-%m-%d").date()
return date
def date_time_from_groupdict(groupdict):
"""Return a date/time tuple by introspecting a passed dict."""
if groupdict['datetime']:
dt = parse_time(groupdict['datetime'])
time = dt.time()
date = dt.date()
else:
date = get_date(groupdict.get('date'))
time = get_time(groupdict.get('time'), groupdict.get('seconds'))
return (date, time)
# Baseline/default values.
result = {
'start_date': None,
'start_time': None,
'end_date': None,
'end_time': None,
'offset': None
}
rest = None
# Individual patterns for time/date substrings.
relative_pattern = '(?P<relative>-\d+)'
time_pattern = '(?P<time>\d{2}:\d{2}(?P<seconds>:\d{2})?)'
date_pattern = '(?P<date>\d{4}-\d{2}-\d{2})'
datetime_pattern = '(?P<datetime>\d{4}-\d{2}-\d{2} \d{2}:\d{2}(:\d{2})?)'
start = re.match('^({}|{}|{}|{}) (?P<rest>.+)'.format(relative_pattern, datetime_pattern,
date_pattern, time_pattern), text)
if start:
start_groups = start.groupdict()
if start_groups['relative']:
result['offset'] = datetime.timedelta(minutes=abs(int(start_groups['relative'])))
else:
date, time = date_time_from_groupdict(start_groups)
result['start_date'] = date
result['start_time'] = time
rest = start_groups['rest']
if rest:
end = re.match('^- ({}|{}|{}) (?P<rest>.+)'.format(datetime_pattern, date_pattern,
time_pattern), rest)
else:
end = None
if end and not start_groups['relative']:
end_groups = end.groupdict()
date, time = date_time_from_groupdict(end_groups)
result['end_date'] = date
result['end_time'] = time
rest = end_groups['rest']
result = TimeFrame(result['start_date'], result['start_time'], result['end_date'],
result['end_time'], result['offset'])
# Consider the whole string as 'rest' if no time/date info was extracted
if not rest:
rest = text
return (result, rest.strip()) | Extract valid time(-range) information from a string according to our specs.
Args:
text (text_type): Raw string containing encoded time(-span) information.
Date/Time-combinations are expected in a ``YYYY-MM-DD hh:mm`` format.
Relative times can be given with ``-minutes``.
Please note that either *relative* or *absolute* times will be considered.
It is possible to either just specify a start date (as time, date,
or datetime) or a timerange (start and end). If a timerange is given
start and end need to be delimited exactly by ' - '.
Returns:
tuple: ``(timeframe, rest)`` tuple. Where ``timeframe`` is a tuple that
provides convinient access to all seperate elements extracted from
the raw string and ``rest`` is any substring stat has not been
matched to valid time/date info.
Note:
* Relative times always return just ``(None, None, None, None, timedelta)``. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/time.py#L85-L186 | [
"def date_time_from_groupdict(groupdict):\n \"\"\"Return a date/time tuple by introspecting a passed dict.\"\"\"\n if groupdict['datetime']:\n dt = parse_time(groupdict['datetime'])\n time = dt.time()\n date = dt.date()\n else:\n date = get_date(groupdict.get('date'))\n time = get_time(groupdict.get('time'), groupdict.get('seconds'))\n return (date, time)\n"
] | # -*- encoding: utf-8 -*-
# Copyright (C) 2015-2016 Eric Goller <eric.goller@ninjaduck.solutions>
# This file is part of 'hamster-lib'.
#
# 'hamster-lib' is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# 'hamster-lib' is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with 'hamster-lib'. If not, see <http://www.gnu.org/licenses/>.
"""This module provides several time realted convinience functions."""
from __future__ import absolute_import, unicode_literals
import datetime
import re
from collections import namedtuple
TimeFrame = namedtuple('Timeframe', ('start_date', 'start_time',
'end_date', 'end_time', 'offset'))
def get_day_end(config):
"""
Get the day end time given the day start. This assumes full 24h day.
Args:
config (dict): Configdict. Needed to extract ``day_start``.
Note:
This is merely a convinience funtion so we do not have to deduct this from ``day_start``
by hand all the time.
"""
day_start_datetime = datetime.datetime.combine(datetime.date.today(), config['day_start'])
day_end_datetime = day_start_datetime - datetime.timedelta(seconds=1)
return day_end_datetime.time()
def end_day_to_datetime(end_day, config):
"""
Convert a given end day to its proper datetime.
This is non trivial because of variable ``day_start``. We want to make sure
that even if an 'end day' is specified the actual point in time may reach into the following
day.
Args:
end (datetime.date): Raw end date that is to be adjusted.
config: Controller config containing information on when a workday starts.
Returns:
datetime.datetime: The endday as a adjusted datetime object.
Example:
Given a ``day_start`` of ``5:30`` and end date of ``2015-04-01`` we actually want to
consider even points in time up to ``2015-04-02 5:29``. That is to represent that a
*work day*
does not match *calendar days*.
Note:
An alternative implementation for the similar problem in legacy hamster:
``hamster.storage.db.Storage.__get_todays_facts``.
"""
day_start_time = config['day_start']
day_end_time = get_day_end(config)
if day_start_time == datetime.time(0, 0, 0):
end = datetime.datetime.combine(end_day, day_end_time)
else:
end = datetime.datetime.combine(end_day, day_end_time) + datetime.timedelta(days=1)
return end
def complete_timeframe(timeframe, config, partial=False):
"""
Apply fallback strategy to incomplete timeframes.
Our fallback strategy is as follows:
* Missing start-date: Fallback to ``today``.
* Missing start-time: Fallback to ``store.config['day_start']``.
* Missing end-date: Fallback to ``today`` for ``day_start='00:00`,
``tomorrow`` otherwise.
See ``hamster_lib.helpers.end_day_to_datetime`` for details and
explanations.
* Missing end-time: 1 second before ``store.config['day_start']``.
Args:
timeframe (TimeFrame): ``TimeFrame`` instance incorporating all
available information available about the timespan. Any missing info
will be completed per fallback strategy.
config (dict): A config-dict providing settings relevant to determine
fallback values.
partial (bool, optional): If true, we will only complete start/end times if there
is at least either date or time information present. Defaults to
``False``.
Returns:
tuple: ``(start, end)`` tuple. Where ``start`` and ``end`` are full
``datetime.datetime`` instances.
Raises:
TypeError: If any of the ``timeframe`` values is of inappropriate
datetime type.
"""
def complete_start_date(date):
"""
Assign ``today`` if ``date=None``, else ensure its a ``datetime.date`` instance.
Args:
date (datetime.date): Startdate information.
Returns:
datetime.date: Either the original date or the default solution.
Raises:
TypeError: If ``date``` is neither ``None`` nor ``datetime.date`` instance.
Note:
Reference behavior taken from [hamster-cli](https://github.com/projecthamster/
hamster/blob/master/src/hamster-cli#L368).
"""
if not date:
date = datetime.date.today()
else:
if not isinstance(date, datetime.date):
raise TypeError(_(
"Expected datetime.date instance, got {type} instead.".format(
type=type(date))
))
return date
def complete_start_time(time, day_start):
"""Assign ``day_start`` if no start-time is given."""
if not time:
time = day_start
else:
if not isinstance(time, datetime.time):
raise TypeError(_(
"Expected datetime.time instance, got {type} instead.".format(
type=type(time))
))
return time
def complete_start(date, time, config):
return datetime.datetime.combine(
complete_start_date(timeframe.start_date),
complete_start_time(timeframe.start_time, config['day_start']),
)
def complete_end_date(date):
if not date:
date = datetime.date.today()
else:
if not isinstance(date, datetime.date):
raise TypeError(_(
"Expected datetime.date instance, got {type} instead.".format(
type=type(date))
))
return date
def complete_end(date, time, config):
date = complete_end_date(date)
if time:
result = datetime.datetime.combine(date, time)
else:
result = end_day_to_datetime(date, config)
return result
start, end = None, None
if any((timeframe.offset, timeframe.start_time, timeframe.start_date)) or not partial:
if not timeframe.offset:
start = complete_start(timeframe.start_date, timeframe.start_time, config)
else:
start = datetime.datetime.now() - timeframe.offset
if any((timeframe.end_date, timeframe.end_time)) or not partial:
end = complete_end(timeframe.end_date, timeframe.end_time, config)
return (start, end)
def parse_time(time):
"""
Parse a date/time string and return a corresponding datetime object.
Args:
time (str): A ``string` of one of the following formats: ``%H:%M``, ``%Y-%m-%d`` or
``%Y-%m-%d %H:%M``.
Returns:
datetime.datetime: Depending on input string either returns ``datetime.date``,
``datetime.time`` or ``datetime.datetime``.
Raises:
ValueError: If ``time`` can not be matched against any of the accepted formats.
Note:
This parse just a singlular date, time or datetime representation.
"""
length = len(time.strip().split())
if length == 1:
try:
result = datetime.datetime.strptime(time, '%H:%M:%S').time()
except ValueError:
try:
result = datetime.datetime.strptime(time, '%H:%M').time()
except ValueError:
result = datetime.datetime.strptime(time, '%Y-%m-%d').date()
elif length == 2:
try:
result = datetime.datetime.strptime(time, '%Y-%m-%d %H:%M:%S')
except ValueError:
result = datetime.datetime.strptime(time, '%Y-%m-%d %H:%M')
else:
raise ValueError(_(
"String does not seem to be in one of our supported time formats."
))
return result
def validate_start_end_range(range_tuple):
"""
Perform basic sanity checks on a timeframe.
Args:
range_tuple (tuple): ``(start, end)`` tuple as returned by
``complete_timeframe``.
Raises:
ValueError: If start > end.
Returns:
tuple: ``(start, end)`` tuple that passed validation.
Note:
``timeframes`` may be incomplete, especially if ``complete_timeframe(partial=True)`` has
been used to construct them.
"""
start, end = range_tuple
if (start and end) and (start > end):
raise ValueError(_("Start after end!"))
return range_tuple
|
projecthamster/hamster-lib | hamster_lib/helpers/time.py | complete_timeframe | python | def complete_timeframe(timeframe, config, partial=False):
def complete_start_date(date):
"""
Assign ``today`` if ``date=None``, else ensure its a ``datetime.date`` instance.
Args:
date (datetime.date): Startdate information.
Returns:
datetime.date: Either the original date or the default solution.
Raises:
TypeError: If ``date``` is neither ``None`` nor ``datetime.date`` instance.
Note:
Reference behavior taken from [hamster-cli](https://github.com/projecthamster/
hamster/blob/master/src/hamster-cli#L368).
"""
if not date:
date = datetime.date.today()
else:
if not isinstance(date, datetime.date):
raise TypeError(_(
"Expected datetime.date instance, got {type} instead.".format(
type=type(date))
))
return date
def complete_start_time(time, day_start):
"""Assign ``day_start`` if no start-time is given."""
if not time:
time = day_start
else:
if not isinstance(time, datetime.time):
raise TypeError(_(
"Expected datetime.time instance, got {type} instead.".format(
type=type(time))
))
return time
def complete_start(date, time, config):
return datetime.datetime.combine(
complete_start_date(timeframe.start_date),
complete_start_time(timeframe.start_time, config['day_start']),
)
def complete_end_date(date):
if not date:
date = datetime.date.today()
else:
if not isinstance(date, datetime.date):
raise TypeError(_(
"Expected datetime.date instance, got {type} instead.".format(
type=type(date))
))
return date
def complete_end(date, time, config):
date = complete_end_date(date)
if time:
result = datetime.datetime.combine(date, time)
else:
result = end_day_to_datetime(date, config)
return result
start, end = None, None
if any((timeframe.offset, timeframe.start_time, timeframe.start_date)) or not partial:
if not timeframe.offset:
start = complete_start(timeframe.start_date, timeframe.start_time, config)
else:
start = datetime.datetime.now() - timeframe.offset
if any((timeframe.end_date, timeframe.end_time)) or not partial:
end = complete_end(timeframe.end_date, timeframe.end_time, config)
return (start, end) | Apply fallback strategy to incomplete timeframes.
Our fallback strategy is as follows:
* Missing start-date: Fallback to ``today``.
* Missing start-time: Fallback to ``store.config['day_start']``.
* Missing end-date: Fallback to ``today`` for ``day_start='00:00`,
``tomorrow`` otherwise.
See ``hamster_lib.helpers.end_day_to_datetime`` for details and
explanations.
* Missing end-time: 1 second before ``store.config['day_start']``.
Args:
timeframe (TimeFrame): ``TimeFrame`` instance incorporating all
available information available about the timespan. Any missing info
will be completed per fallback strategy.
config (dict): A config-dict providing settings relevant to determine
fallback values.
partial (bool, optional): If true, we will only complete start/end times if there
is at least either date or time information present. Defaults to
``False``.
Returns:
tuple: ``(start, end)`` tuple. Where ``start`` and ``end`` are full
``datetime.datetime`` instances.
Raises:
TypeError: If any of the ``timeframe`` values is of inappropriate
datetime type. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/time.py#L189-L297 | [
"def complete_start(date, time, config):\n return datetime.datetime.combine(\n complete_start_date(timeframe.start_date),\n complete_start_time(timeframe.start_time, config['day_start']),\n )\n",
"def complete_end(date, time, config):\n date = complete_end_date(date)\n if time:\n result = datetime.datetime.combine(date, time)\n else:\n result = end_day_to_datetime(date, config)\n return result\n"
] | # -*- encoding: utf-8 -*-
# Copyright (C) 2015-2016 Eric Goller <eric.goller@ninjaduck.solutions>
# This file is part of 'hamster-lib'.
#
# 'hamster-lib' is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# 'hamster-lib' is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with 'hamster-lib'. If not, see <http://www.gnu.org/licenses/>.
"""This module provides several time realted convinience functions."""
from __future__ import absolute_import, unicode_literals
import datetime
import re
from collections import namedtuple
TimeFrame = namedtuple('Timeframe', ('start_date', 'start_time',
'end_date', 'end_time', 'offset'))
def get_day_end(config):
"""
Get the day end time given the day start. This assumes full 24h day.
Args:
config (dict): Configdict. Needed to extract ``day_start``.
Note:
This is merely a convinience funtion so we do not have to deduct this from ``day_start``
by hand all the time.
"""
day_start_datetime = datetime.datetime.combine(datetime.date.today(), config['day_start'])
day_end_datetime = day_start_datetime - datetime.timedelta(seconds=1)
return day_end_datetime.time()
def end_day_to_datetime(end_day, config):
"""
Convert a given end day to its proper datetime.
This is non trivial because of variable ``day_start``. We want to make sure
that even if an 'end day' is specified the actual point in time may reach into the following
day.
Args:
end (datetime.date): Raw end date that is to be adjusted.
config: Controller config containing information on when a workday starts.
Returns:
datetime.datetime: The endday as a adjusted datetime object.
Example:
Given a ``day_start`` of ``5:30`` and end date of ``2015-04-01`` we actually want to
consider even points in time up to ``2015-04-02 5:29``. That is to represent that a
*work day*
does not match *calendar days*.
Note:
An alternative implementation for the similar problem in legacy hamster:
``hamster.storage.db.Storage.__get_todays_facts``.
"""
day_start_time = config['day_start']
day_end_time = get_day_end(config)
if day_start_time == datetime.time(0, 0, 0):
end = datetime.datetime.combine(end_day, day_end_time)
else:
end = datetime.datetime.combine(end_day, day_end_time) + datetime.timedelta(days=1)
return end
def extract_time_info(text):
"""
Extract valid time(-range) information from a string according to our specs.
Args:
text (text_type): Raw string containing encoded time(-span) information.
Date/Time-combinations are expected in a ``YYYY-MM-DD hh:mm`` format.
Relative times can be given with ``-minutes``.
Please note that either *relative* or *absolute* times will be considered.
It is possible to either just specify a start date (as time, date,
or datetime) or a timerange (start and end). If a timerange is given
start and end need to be delimited exactly by ' - '.
Returns:
tuple: ``(timeframe, rest)`` tuple. Where ``timeframe`` is a tuple that
provides convinient access to all seperate elements extracted from
the raw string and ``rest`` is any substring stat has not been
matched to valid time/date info.
Note:
* Relative times always return just ``(None, None, None, None, timedelta)``.
"""
# [TODO] Add a list of supported formats.
def get_time(time, seconds=None):
"""Convert a times string representation to datetime.time instance."""
if time is None:
return time
if seconds:
time_format = '%H:%M:%S'
else:
time_format = '%H:%M'
return datetime.datetime.strptime(time.strip(), time_format).time()
def get_date(date):
"""Convert a dates string representation to datetime.date instance."""
if date:
date = datetime.datetime.strptime(date.strip(), "%Y-%m-%d").date()
return date
def date_time_from_groupdict(groupdict):
"""Return a date/time tuple by introspecting a passed dict."""
if groupdict['datetime']:
dt = parse_time(groupdict['datetime'])
time = dt.time()
date = dt.date()
else:
date = get_date(groupdict.get('date'))
time = get_time(groupdict.get('time'), groupdict.get('seconds'))
return (date, time)
# Baseline/default values.
result = {
'start_date': None,
'start_time': None,
'end_date': None,
'end_time': None,
'offset': None
}
rest = None
# Individual patterns for time/date substrings.
relative_pattern = '(?P<relative>-\d+)'
time_pattern = '(?P<time>\d{2}:\d{2}(?P<seconds>:\d{2})?)'
date_pattern = '(?P<date>\d{4}-\d{2}-\d{2})'
datetime_pattern = '(?P<datetime>\d{4}-\d{2}-\d{2} \d{2}:\d{2}(:\d{2})?)'
start = re.match('^({}|{}|{}|{}) (?P<rest>.+)'.format(relative_pattern, datetime_pattern,
date_pattern, time_pattern), text)
if start:
start_groups = start.groupdict()
if start_groups['relative']:
result['offset'] = datetime.timedelta(minutes=abs(int(start_groups['relative'])))
else:
date, time = date_time_from_groupdict(start_groups)
result['start_date'] = date
result['start_time'] = time
rest = start_groups['rest']
if rest:
end = re.match('^- ({}|{}|{}) (?P<rest>.+)'.format(datetime_pattern, date_pattern,
time_pattern), rest)
else:
end = None
if end and not start_groups['relative']:
end_groups = end.groupdict()
date, time = date_time_from_groupdict(end_groups)
result['end_date'] = date
result['end_time'] = time
rest = end_groups['rest']
result = TimeFrame(result['start_date'], result['start_time'], result['end_date'],
result['end_time'], result['offset'])
# Consider the whole string as 'rest' if no time/date info was extracted
if not rest:
rest = text
return (result, rest.strip())
def parse_time(time):
"""
Parse a date/time string and return a corresponding datetime object.
Args:
time (str): A ``string` of one of the following formats: ``%H:%M``, ``%Y-%m-%d`` or
``%Y-%m-%d %H:%M``.
Returns:
datetime.datetime: Depending on input string either returns ``datetime.date``,
``datetime.time`` or ``datetime.datetime``.
Raises:
ValueError: If ``time`` can not be matched against any of the accepted formats.
Note:
This parse just a singlular date, time or datetime representation.
"""
length = len(time.strip().split())
if length == 1:
try:
result = datetime.datetime.strptime(time, '%H:%M:%S').time()
except ValueError:
try:
result = datetime.datetime.strptime(time, '%H:%M').time()
except ValueError:
result = datetime.datetime.strptime(time, '%Y-%m-%d').date()
elif length == 2:
try:
result = datetime.datetime.strptime(time, '%Y-%m-%d %H:%M:%S')
except ValueError:
result = datetime.datetime.strptime(time, '%Y-%m-%d %H:%M')
else:
raise ValueError(_(
"String does not seem to be in one of our supported time formats."
))
return result
def validate_start_end_range(range_tuple):
"""
Perform basic sanity checks on a timeframe.
Args:
range_tuple (tuple): ``(start, end)`` tuple as returned by
``complete_timeframe``.
Raises:
ValueError: If start > end.
Returns:
tuple: ``(start, end)`` tuple that passed validation.
Note:
``timeframes`` may be incomplete, especially if ``complete_timeframe(partial=True)`` has
been used to construct them.
"""
start, end = range_tuple
if (start and end) and (start > end):
raise ValueError(_("Start after end!"))
return range_tuple
|
projecthamster/hamster-lib | hamster_lib/helpers/time.py | parse_time | python | def parse_time(time):
length = len(time.strip().split())
if length == 1:
try:
result = datetime.datetime.strptime(time, '%H:%M:%S').time()
except ValueError:
try:
result = datetime.datetime.strptime(time, '%H:%M').time()
except ValueError:
result = datetime.datetime.strptime(time, '%Y-%m-%d').date()
elif length == 2:
try:
result = datetime.datetime.strptime(time, '%Y-%m-%d %H:%M:%S')
except ValueError:
result = datetime.datetime.strptime(time, '%Y-%m-%d %H:%M')
else:
raise ValueError(_(
"String does not seem to be in one of our supported time formats."
))
return result | Parse a date/time string and return a corresponding datetime object.
Args:
time (str): A ``string` of one of the following formats: ``%H:%M``, ``%Y-%m-%d`` or
``%Y-%m-%d %H:%M``.
Returns:
datetime.datetime: Depending on input string either returns ``datetime.date``,
``datetime.time`` or ``datetime.datetime``.
Raises:
ValueError: If ``time`` can not be matched against any of the accepted formats.
Note:
This parse just a singlular date, time or datetime representation. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/time.py#L300-L337 | null | # -*- encoding: utf-8 -*-
# Copyright (C) 2015-2016 Eric Goller <eric.goller@ninjaduck.solutions>
# This file is part of 'hamster-lib'.
#
# 'hamster-lib' is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# 'hamster-lib' is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with 'hamster-lib'. If not, see <http://www.gnu.org/licenses/>.
"""This module provides several time realted convinience functions."""
from __future__ import absolute_import, unicode_literals
import datetime
import re
from collections import namedtuple
TimeFrame = namedtuple('Timeframe', ('start_date', 'start_time',
'end_date', 'end_time', 'offset'))
def get_day_end(config):
"""
Get the day end time given the day start. This assumes full 24h day.
Args:
config (dict): Configdict. Needed to extract ``day_start``.
Note:
This is merely a convinience funtion so we do not have to deduct this from ``day_start``
by hand all the time.
"""
day_start_datetime = datetime.datetime.combine(datetime.date.today(), config['day_start'])
day_end_datetime = day_start_datetime - datetime.timedelta(seconds=1)
return day_end_datetime.time()
def end_day_to_datetime(end_day, config):
"""
Convert a given end day to its proper datetime.
This is non trivial because of variable ``day_start``. We want to make sure
that even if an 'end day' is specified the actual point in time may reach into the following
day.
Args:
end (datetime.date): Raw end date that is to be adjusted.
config: Controller config containing information on when a workday starts.
Returns:
datetime.datetime: The endday as a adjusted datetime object.
Example:
Given a ``day_start`` of ``5:30`` and end date of ``2015-04-01`` we actually want to
consider even points in time up to ``2015-04-02 5:29``. That is to represent that a
*work day*
does not match *calendar days*.
Note:
An alternative implementation for the similar problem in legacy hamster:
``hamster.storage.db.Storage.__get_todays_facts``.
"""
day_start_time = config['day_start']
day_end_time = get_day_end(config)
if day_start_time == datetime.time(0, 0, 0):
end = datetime.datetime.combine(end_day, day_end_time)
else:
end = datetime.datetime.combine(end_day, day_end_time) + datetime.timedelta(days=1)
return end
def extract_time_info(text):
"""
Extract valid time(-range) information from a string according to our specs.
Args:
text (text_type): Raw string containing encoded time(-span) information.
Date/Time-combinations are expected in a ``YYYY-MM-DD hh:mm`` format.
Relative times can be given with ``-minutes``.
Please note that either *relative* or *absolute* times will be considered.
It is possible to either just specify a start date (as time, date,
or datetime) or a timerange (start and end). If a timerange is given
start and end need to be delimited exactly by ' - '.
Returns:
tuple: ``(timeframe, rest)`` tuple. Where ``timeframe`` is a tuple that
provides convinient access to all seperate elements extracted from
the raw string and ``rest`` is any substring stat has not been
matched to valid time/date info.
Note:
* Relative times always return just ``(None, None, None, None, timedelta)``.
"""
# [TODO] Add a list of supported formats.
def get_time(time, seconds=None):
"""Convert a times string representation to datetime.time instance."""
if time is None:
return time
if seconds:
time_format = '%H:%M:%S'
else:
time_format = '%H:%M'
return datetime.datetime.strptime(time.strip(), time_format).time()
def get_date(date):
"""Convert a dates string representation to datetime.date instance."""
if date:
date = datetime.datetime.strptime(date.strip(), "%Y-%m-%d").date()
return date
def date_time_from_groupdict(groupdict):
"""Return a date/time tuple by introspecting a passed dict."""
if groupdict['datetime']:
dt = parse_time(groupdict['datetime'])
time = dt.time()
date = dt.date()
else:
date = get_date(groupdict.get('date'))
time = get_time(groupdict.get('time'), groupdict.get('seconds'))
return (date, time)
# Baseline/default values.
result = {
'start_date': None,
'start_time': None,
'end_date': None,
'end_time': None,
'offset': None
}
rest = None
# Individual patterns for time/date substrings.
relative_pattern = '(?P<relative>-\d+)'
time_pattern = '(?P<time>\d{2}:\d{2}(?P<seconds>:\d{2})?)'
date_pattern = '(?P<date>\d{4}-\d{2}-\d{2})'
datetime_pattern = '(?P<datetime>\d{4}-\d{2}-\d{2} \d{2}:\d{2}(:\d{2})?)'
start = re.match('^({}|{}|{}|{}) (?P<rest>.+)'.format(relative_pattern, datetime_pattern,
date_pattern, time_pattern), text)
if start:
start_groups = start.groupdict()
if start_groups['relative']:
result['offset'] = datetime.timedelta(minutes=abs(int(start_groups['relative'])))
else:
date, time = date_time_from_groupdict(start_groups)
result['start_date'] = date
result['start_time'] = time
rest = start_groups['rest']
if rest:
end = re.match('^- ({}|{}|{}) (?P<rest>.+)'.format(datetime_pattern, date_pattern,
time_pattern), rest)
else:
end = None
if end and not start_groups['relative']:
end_groups = end.groupdict()
date, time = date_time_from_groupdict(end_groups)
result['end_date'] = date
result['end_time'] = time
rest = end_groups['rest']
result = TimeFrame(result['start_date'], result['start_time'], result['end_date'],
result['end_time'], result['offset'])
# Consider the whole string as 'rest' if no time/date info was extracted
if not rest:
rest = text
return (result, rest.strip())
def complete_timeframe(timeframe, config, partial=False):
"""
Apply fallback strategy to incomplete timeframes.
Our fallback strategy is as follows:
* Missing start-date: Fallback to ``today``.
* Missing start-time: Fallback to ``store.config['day_start']``.
* Missing end-date: Fallback to ``today`` for ``day_start='00:00`,
``tomorrow`` otherwise.
See ``hamster_lib.helpers.end_day_to_datetime`` for details and
explanations.
* Missing end-time: 1 second before ``store.config['day_start']``.
Args:
timeframe (TimeFrame): ``TimeFrame`` instance incorporating all
available information available about the timespan. Any missing info
will be completed per fallback strategy.
config (dict): A config-dict providing settings relevant to determine
fallback values.
partial (bool, optional): If true, we will only complete start/end times if there
is at least either date or time information present. Defaults to
``False``.
Returns:
tuple: ``(start, end)`` tuple. Where ``start`` and ``end`` are full
``datetime.datetime`` instances.
Raises:
TypeError: If any of the ``timeframe`` values is of inappropriate
datetime type.
"""
def complete_start_date(date):
"""
Assign ``today`` if ``date=None``, else ensure its a ``datetime.date`` instance.
Args:
date (datetime.date): Startdate information.
Returns:
datetime.date: Either the original date or the default solution.
Raises:
TypeError: If ``date``` is neither ``None`` nor ``datetime.date`` instance.
Note:
Reference behavior taken from [hamster-cli](https://github.com/projecthamster/
hamster/blob/master/src/hamster-cli#L368).
"""
if not date:
date = datetime.date.today()
else:
if not isinstance(date, datetime.date):
raise TypeError(_(
"Expected datetime.date instance, got {type} instead.".format(
type=type(date))
))
return date
def complete_start_time(time, day_start):
"""Assign ``day_start`` if no start-time is given."""
if not time:
time = day_start
else:
if not isinstance(time, datetime.time):
raise TypeError(_(
"Expected datetime.time instance, got {type} instead.".format(
type=type(time))
))
return time
def complete_start(date, time, config):
return datetime.datetime.combine(
complete_start_date(timeframe.start_date),
complete_start_time(timeframe.start_time, config['day_start']),
)
def complete_end_date(date):
if not date:
date = datetime.date.today()
else:
if not isinstance(date, datetime.date):
raise TypeError(_(
"Expected datetime.date instance, got {type} instead.".format(
type=type(date))
))
return date
def complete_end(date, time, config):
date = complete_end_date(date)
if time:
result = datetime.datetime.combine(date, time)
else:
result = end_day_to_datetime(date, config)
return result
start, end = None, None
if any((timeframe.offset, timeframe.start_time, timeframe.start_date)) or not partial:
if not timeframe.offset:
start = complete_start(timeframe.start_date, timeframe.start_time, config)
else:
start = datetime.datetime.now() - timeframe.offset
if any((timeframe.end_date, timeframe.end_time)) or not partial:
end = complete_end(timeframe.end_date, timeframe.end_time, config)
return (start, end)
def validate_start_end_range(range_tuple):
"""
Perform basic sanity checks on a timeframe.
Args:
range_tuple (tuple): ``(start, end)`` tuple as returned by
``complete_timeframe``.
Raises:
ValueError: If start > end.
Returns:
tuple: ``(start, end)`` tuple that passed validation.
Note:
``timeframes`` may be incomplete, especially if ``complete_timeframe(partial=True)`` has
been used to construct them.
"""
start, end = range_tuple
if (start and end) and (start > end):
raise ValueError(_("Start after end!"))
return range_tuple
|
projecthamster/hamster-lib | hamster_lib/helpers/time.py | validate_start_end_range | python | def validate_start_end_range(range_tuple):
start, end = range_tuple
if (start and end) and (start > end):
raise ValueError(_("Start after end!"))
return range_tuple | Perform basic sanity checks on a timeframe.
Args:
range_tuple (tuple): ``(start, end)`` tuple as returned by
``complete_timeframe``.
Raises:
ValueError: If start > end.
Returns:
tuple: ``(start, end)`` tuple that passed validation.
Note:
``timeframes`` may be incomplete, especially if ``complete_timeframe(partial=True)`` has
been used to construct them. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/time.py#L340-L364 | null | # -*- encoding: utf-8 -*-
# Copyright (C) 2015-2016 Eric Goller <eric.goller@ninjaduck.solutions>
# This file is part of 'hamster-lib'.
#
# 'hamster-lib' is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# 'hamster-lib' is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with 'hamster-lib'. If not, see <http://www.gnu.org/licenses/>.
"""This module provides several time realted convinience functions."""
from __future__ import absolute_import, unicode_literals
import datetime
import re
from collections import namedtuple
TimeFrame = namedtuple('Timeframe', ('start_date', 'start_time',
'end_date', 'end_time', 'offset'))
def get_day_end(config):
"""
Get the day end time given the day start. This assumes full 24h day.
Args:
config (dict): Configdict. Needed to extract ``day_start``.
Note:
This is merely a convinience funtion so we do not have to deduct this from ``day_start``
by hand all the time.
"""
day_start_datetime = datetime.datetime.combine(datetime.date.today(), config['day_start'])
day_end_datetime = day_start_datetime - datetime.timedelta(seconds=1)
return day_end_datetime.time()
def end_day_to_datetime(end_day, config):
"""
Convert a given end day to its proper datetime.
This is non trivial because of variable ``day_start``. We want to make sure
that even if an 'end day' is specified the actual point in time may reach into the following
day.
Args:
end (datetime.date): Raw end date that is to be adjusted.
config: Controller config containing information on when a workday starts.
Returns:
datetime.datetime: The endday as a adjusted datetime object.
Example:
Given a ``day_start`` of ``5:30`` and end date of ``2015-04-01`` we actually want to
consider even points in time up to ``2015-04-02 5:29``. That is to represent that a
*work day*
does not match *calendar days*.
Note:
An alternative implementation for the similar problem in legacy hamster:
``hamster.storage.db.Storage.__get_todays_facts``.
"""
day_start_time = config['day_start']
day_end_time = get_day_end(config)
if day_start_time == datetime.time(0, 0, 0):
end = datetime.datetime.combine(end_day, day_end_time)
else:
end = datetime.datetime.combine(end_day, day_end_time) + datetime.timedelta(days=1)
return end
def extract_time_info(text):
"""
Extract valid time(-range) information from a string according to our specs.
Args:
text (text_type): Raw string containing encoded time(-span) information.
Date/Time-combinations are expected in a ``YYYY-MM-DD hh:mm`` format.
Relative times can be given with ``-minutes``.
Please note that either *relative* or *absolute* times will be considered.
It is possible to either just specify a start date (as time, date,
or datetime) or a timerange (start and end). If a timerange is given
start and end need to be delimited exactly by ' - '.
Returns:
tuple: ``(timeframe, rest)`` tuple. Where ``timeframe`` is a tuple that
provides convinient access to all seperate elements extracted from
the raw string and ``rest`` is any substring stat has not been
matched to valid time/date info.
Note:
* Relative times always return just ``(None, None, None, None, timedelta)``.
"""
# [TODO] Add a list of supported formats.
def get_time(time, seconds=None):
"""Convert a times string representation to datetime.time instance."""
if time is None:
return time
if seconds:
time_format = '%H:%M:%S'
else:
time_format = '%H:%M'
return datetime.datetime.strptime(time.strip(), time_format).time()
def get_date(date):
"""Convert a dates string representation to datetime.date instance."""
if date:
date = datetime.datetime.strptime(date.strip(), "%Y-%m-%d").date()
return date
def date_time_from_groupdict(groupdict):
"""Return a date/time tuple by introspecting a passed dict."""
if groupdict['datetime']:
dt = parse_time(groupdict['datetime'])
time = dt.time()
date = dt.date()
else:
date = get_date(groupdict.get('date'))
time = get_time(groupdict.get('time'), groupdict.get('seconds'))
return (date, time)
# Baseline/default values.
result = {
'start_date': None,
'start_time': None,
'end_date': None,
'end_time': None,
'offset': None
}
rest = None
# Individual patterns for time/date substrings.
relative_pattern = '(?P<relative>-\d+)'
time_pattern = '(?P<time>\d{2}:\d{2}(?P<seconds>:\d{2})?)'
date_pattern = '(?P<date>\d{4}-\d{2}-\d{2})'
datetime_pattern = '(?P<datetime>\d{4}-\d{2}-\d{2} \d{2}:\d{2}(:\d{2})?)'
start = re.match('^({}|{}|{}|{}) (?P<rest>.+)'.format(relative_pattern, datetime_pattern,
date_pattern, time_pattern), text)
if start:
start_groups = start.groupdict()
if start_groups['relative']:
result['offset'] = datetime.timedelta(minutes=abs(int(start_groups['relative'])))
else:
date, time = date_time_from_groupdict(start_groups)
result['start_date'] = date
result['start_time'] = time
rest = start_groups['rest']
if rest:
end = re.match('^- ({}|{}|{}) (?P<rest>.+)'.format(datetime_pattern, date_pattern,
time_pattern), rest)
else:
end = None
if end and not start_groups['relative']:
end_groups = end.groupdict()
date, time = date_time_from_groupdict(end_groups)
result['end_date'] = date
result['end_time'] = time
rest = end_groups['rest']
result = TimeFrame(result['start_date'], result['start_time'], result['end_date'],
result['end_time'], result['offset'])
# Consider the whole string as 'rest' if no time/date info was extracted
if not rest:
rest = text
return (result, rest.strip())
def complete_timeframe(timeframe, config, partial=False):
"""
Apply fallback strategy to incomplete timeframes.
Our fallback strategy is as follows:
* Missing start-date: Fallback to ``today``.
* Missing start-time: Fallback to ``store.config['day_start']``.
* Missing end-date: Fallback to ``today`` for ``day_start='00:00`,
``tomorrow`` otherwise.
See ``hamster_lib.helpers.end_day_to_datetime`` for details and
explanations.
* Missing end-time: 1 second before ``store.config['day_start']``.
Args:
timeframe (TimeFrame): ``TimeFrame`` instance incorporating all
available information available about the timespan. Any missing info
will be completed per fallback strategy.
config (dict): A config-dict providing settings relevant to determine
fallback values.
partial (bool, optional): If true, we will only complete start/end times if there
is at least either date or time information present. Defaults to
``False``.
Returns:
tuple: ``(start, end)`` tuple. Where ``start`` and ``end`` are full
``datetime.datetime`` instances.
Raises:
TypeError: If any of the ``timeframe`` values is of inappropriate
datetime type.
"""
def complete_start_date(date):
"""
Assign ``today`` if ``date=None``, else ensure its a ``datetime.date`` instance.
Args:
date (datetime.date): Startdate information.
Returns:
datetime.date: Either the original date or the default solution.
Raises:
TypeError: If ``date``` is neither ``None`` nor ``datetime.date`` instance.
Note:
Reference behavior taken from [hamster-cli](https://github.com/projecthamster/
hamster/blob/master/src/hamster-cli#L368).
"""
if not date:
date = datetime.date.today()
else:
if not isinstance(date, datetime.date):
raise TypeError(_(
"Expected datetime.date instance, got {type} instead.".format(
type=type(date))
))
return date
def complete_start_time(time, day_start):
"""Assign ``day_start`` if no start-time is given."""
if not time:
time = day_start
else:
if not isinstance(time, datetime.time):
raise TypeError(_(
"Expected datetime.time instance, got {type} instead.".format(
type=type(time))
))
return time
def complete_start(date, time, config):
return datetime.datetime.combine(
complete_start_date(timeframe.start_date),
complete_start_time(timeframe.start_time, config['day_start']),
)
def complete_end_date(date):
if not date:
date = datetime.date.today()
else:
if not isinstance(date, datetime.date):
raise TypeError(_(
"Expected datetime.date instance, got {type} instead.".format(
type=type(date))
))
return date
def complete_end(date, time, config):
date = complete_end_date(date)
if time:
result = datetime.datetime.combine(date, time)
else:
result = end_day_to_datetime(date, config)
return result
start, end = None, None
if any((timeframe.offset, timeframe.start_time, timeframe.start_date)) or not partial:
if not timeframe.offset:
start = complete_start(timeframe.start_date, timeframe.start_time, config)
else:
start = datetime.datetime.now() - timeframe.offset
if any((timeframe.end_date, timeframe.end_time)) or not partial:
end = complete_end(timeframe.end_date, timeframe.end_time, config)
return (start, end)
def parse_time(time):
"""
Parse a date/time string and return a corresponding datetime object.
Args:
time (str): A ``string` of one of the following formats: ``%H:%M``, ``%Y-%m-%d`` or
``%Y-%m-%d %H:%M``.
Returns:
datetime.datetime: Depending on input string either returns ``datetime.date``,
``datetime.time`` or ``datetime.datetime``.
Raises:
ValueError: If ``time`` can not be matched against any of the accepted formats.
Note:
This parse just a singlular date, time or datetime representation.
"""
length = len(time.strip().split())
if length == 1:
try:
result = datetime.datetime.strptime(time, '%H:%M:%S').time()
except ValueError:
try:
result = datetime.datetime.strptime(time, '%H:%M').time()
except ValueError:
result = datetime.datetime.strptime(time, '%Y-%m-%d').date()
elif length == 2:
try:
result = datetime.datetime.strptime(time, '%Y-%m-%d %H:%M:%S')
except ValueError:
result = datetime.datetime.strptime(time, '%Y-%m-%d %H:%M')
else:
raise ValueError(_(
"String does not seem to be in one of our supported time formats."
))
return result
|
projecthamster/hamster-lib | hamster_lib/helpers/config_helpers.py | get_config_path | python | def get_config_path(appdirs=DEFAULT_APPDIRS, file_name=DEFAULT_CONFIG_FILENAME):
return os.path.join(appdirs.user_config_dir, file_name) | Return the path where the config file is stored.
Args:
app_name (text_type, optional): Name of the application, defaults to
``'projecthamster``. Allows you to use your own application specific
namespace if you wish.
file_name (text_type, optional): Name of the config file. Defaults to
``config.conf``.
Returns:
str: Fully qualified path (dir & filename) where we expect the config file. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/config_helpers.py#L141-L155 | null | # -*- coding: utf-8 -*-
# This file is part of 'hamster-lib'.
#
# 'hamster-lib' is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# 'hamster-lib' is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with 'hamster-lib'. If not, see <http://www.gnu.org/licenses/>.
"""
Provide functions that provide common config related functionality.
This module provide easy to use convenience functions to handle common configuration
related tasks. Clients can use those to provide consistent behaviour and focus on
their specific requirements instead.
The easiest way to make use of those helpers is to call ``load_config_file`` (which will also
handle creating a new one if none exists) and ``write_config_file``.
Clients may use ``backend_config_to_configparser`` and its counter part
``configparser_to_backend_config`` to delegate conversion between a backend config dict and a
``ConfigParser`` instance.
Note:
Backend config key/value information:
store: A ``string`` indicating which store (``hamster_lib.REGISTERED_BACKENDS``) to use.
day_start: ``datetime.time`` that specifies the when to start a new day.
fact_min_delta: ``int`` specifying minimal fact duration. Facts shorter than this will be
rejected.
tmpfile_path: ``string`` indicating where the file representing the ongoing fact is to be
stored.
db_engine: ``string`` indicating which db-engine to use. Options depend on store choice.
db_path: ``string`` indicating where to save the db file if the selected db option saves to
disk. Depends on store/engine choice.
db_host: ``string`` indicating the host of the db server. Depends on store/engine choice.
db_port: ``int`` indicating the port of the db server. Depends on store/engine choice.
db_name: ``string`` indicating the db-name. Depends on store/engine choice.
db_user: ``string`` indicating the username to access the db server. Depends on
store/engine choice.
db_password: ``string`` indicating the password to access the db server. Depends on
store/engine choice.
Please also note that a backend *config dict* does except ``None`` / ``empty`` values, its
``ConfigParser`` representation does not include those however!
"""
from __future__ import absolute_import, unicode_literals
import datetime
import os
import appdirs
import hamster_lib
from configparser import SafeConfigParser
from six import text_type
class HamsterAppDirs(appdirs.AppDirs):
"""Custom class that ensure appdirs exist."""
def __init__(self, *args, **kwargs):
"""Add create flag value to instance."""
super(HamsterAppDirs, self).__init__(*args, **kwargs)
self.create = True
@property
def user_data_dir(self):
"""Return ``user_data_dir``."""
directory = appdirs.user_data_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_data_dir(self):
"""Return ``site_data_dir``."""
directory = appdirs.site_data_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_config_dir(self):
"""Return ``user_config_dir``."""
directory = appdirs.user_config_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_config_dir(self):
"""Return ``site_config_dir``."""
directory = appdirs.site_config_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_cache_dir(self):
"""Return ``user_cache_dir``."""
directory = appdirs.user_cache_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_log_dir(self):
"""Return ``user_log_dir``."""
directory = appdirs.user_log_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
def _ensure_directory_exists(self, directory):
"""Ensure that the passed path exists."""
if not os.path.lexists(directory):
os.makedirs(directory)
return directory
DEFAULT_APP_NAME = 'projecthamster'
DEFAULT_APPDIRS = HamsterAppDirs(DEFAULT_APP_NAME)
DEFAULT_CONFIG_FILENAME = '{}.conf'.format(DEFAULT_APPDIRS.appname)
def write_config_file(config_instance, appdirs=DEFAULT_APPDIRS,
file_name=DEFAULT_CONFIG_FILENAME):
"""
Write a ConfigParser instance to file at the correct location.
Args:
config_instance: Config instance to safe to file.
appdirs (HamsterAppDirs, optional): ``HamsterAppDirs`` instance storing app/user specific
path information.
file_name (text_type, optional): Name of the config file. Defaults to
``DEFAULT_CONFIG_FILENAME``.
Returns:
SafeConfigParser: Instance written to file.
"""
path = get_config_path(appdirs, file_name)
with open(path, 'w') as fobj:
config_instance.write(fobj)
return config_instance
def load_config_file(appdirs=DEFAULT_APPDIRS, file_name=DEFAULT_CONFIG_FILENAME,
fallback_config_instance=None):
"""
Retrieve config information from file at default location.
If no config file is found a new one will be created either with ``fallback_config_instance``
as content or if none is provided with the result of ``get_default_backend_config``.
Args:
appdirs (HamsterAppDirs, optional): ``HamsterAppDirs`` instance storing app/user specific
path information.
file_name (text_type, optional): Name of the config file. Defaults to
``DEFAULT_CONFIG_FILENAME``.
fallback_config_instance (ConfigParser): Backend config that is to be used to populate the
config file that is created if no pre-existing one can be found.
Returns:
SafeConfigParser: Config loaded from file, either from the the pre-existing config
file or the one created with fallback values.
"""
if not fallback_config_instance:
fallback_config_instance = backend_config_to_configparser(
get_default_backend_config(appdirs)
)
config = SafeConfigParser()
path = get_config_path(appdirs, file_name)
if not config.read(path):
config = write_config_file(
fallback_config_instance, appdirs=appdirs, file_name=file_name
)
return config
def get_default_backend_config(appdirs):
"""
Return a default config dictionary.
Args:
appdirs (HamsterAppDirs): ``HamsterAppDirs`` instance encapsulating the apps details.
Returns:
dict: Dictionary with a default configuration.
Note:
Those defaults are independent of the particular config-store.
"""
return {
'store': 'sqlalchemy',
'day_start': datetime.time(5, 30, 0),
'fact_min_delta': 1,
'tmpfile_path': os.path.join(appdirs.user_data_dir, '{}.tmp'.format(appdirs.appname)),
'db_engine': 'sqlite',
'db_path': os.path.join(appdirs.user_data_dir, '{}.sqlite'.format(appdirs.appname)),
}
# [TODO]
# Provide better error handling
def backend_config_to_configparser(config):
"""
Return a ConfigParser instance representing a given backend config dictionary.
Args:
config (dict): Dictionary of config key/value pairs.
Returns:
SafeConfigParser: SafeConfigParser instance representing config.
Note:
We do not provide *any* validation about mandatory values what so ever.
"""
def get_store():
return config.get('store')
def get_day_start():
day_start = config.get('day_start')
if day_start:
day_start = day_start.strftime('%H:%M:%S')
return day_start
def get_fact_min_delta():
return text_type(config.get('fact_min_delta'))
def get_tmpfile_path():
return text_type(config.get('tmpfile_path'))
def get_db_engine():
return text_type(config.get('db_engine'))
def get_db_path():
return text_type(config.get('db_path'))
def get_db_host():
return text_type(config.get('db_host'))
def get_db_port():
return text_type(config.get('db_port'))
def get_db_name():
return text_type(config.get('db_name'))
def get_db_user():
return text_type(config.get('db_user'))
def get_db_password():
return text_type(config.get('db_password'))
cp_instance = SafeConfigParser()
cp_instance.add_section('Backend')
cp_instance.set('Backend', 'store', get_store())
cp_instance.set('Backend', 'day_start', get_day_start())
cp_instance.set('Backend', 'fact_min_delta', get_fact_min_delta())
cp_instance.set('Backend', 'tmpfile_path', get_tmpfile_path())
cp_instance.set('Backend', 'db_engine', get_db_engine())
cp_instance.set('Backend', 'db_path', get_db_path())
cp_instance.set('Backend', 'db_host', get_db_host())
cp_instance.set('Backend', 'db_port', get_db_port())
cp_instance.set('Backend', 'db_name', get_db_name())
cp_instance.set('Backend', 'db_user', get_db_user())
cp_instance.set('Backend', 'db_password', get_db_password())
return cp_instance
# [TODO]
# Provide better error handling
# Provide validation! For this it would probably be enough to validate a config
# dict. We do not actually need to validate a CP-instance but just its resulting
# dict.
def configparser_to_backend_config(cp_instance):
"""
Return a config dict generated from a configparser instance.
This functions main purpose is to ensure config dict values are properly typed.
Note:
This can be used with any ``ConfigParser`` backend instance not just the default one
in order to extract its config.
If a key is not found in ``cp_instance`` the resulting dict will have ``None``
assigned to this dict key.
"""
def get_store():
# [TODO]
# This should be deligated to a dedicated validation function!
store = cp_instance.get('Backend', 'store')
if store not in hamster_lib.REGISTERED_BACKENDS.keys():
raise ValueError(_("Unrecognized store option."))
return store
def get_day_start():
try:
day_start = datetime.datetime.strptime(cp_instance.get('Backend',
'day_start'), '%H:%M:%S').time()
except ValueError:
raise ValueError(_(
"We encountered an error when parsing configs 'day_start'"
" value! Aborting ..."
))
return day_start
def get_fact_min_delta():
return cp_instance.getint('Backend', 'fact_min_delta')
def get_tmpfile_path():
return cp_instance.get('Backend', 'tmpfile_path')
def get_db_engine():
return text_type(cp_instance.get('Backend', 'db_engine'))
def get_db_path():
return text_type(cp_instance.get('Backend', 'db_path'))
def get_db_host():
return text_type(cp_instance.get('Backend', 'db_host'))
def get_db_port():
return cp_instance.getint('Backend', 'db_port')
def get_db_name():
return text_type(cp_instance.get('Backend', 'db_name'))
def get_db_user():
return text_type(cp_instance.get('Backend', 'db_user'))
def get_db_password():
return text_type(cp_instance.get('Backend', 'db_password'))
result = {
'store': get_store(),
'day_start': get_day_start(),
'fact_min_delta': get_fact_min_delta(),
'tmpfile_path': get_tmpfile_path(),
'db_engine': get_db_engine(),
'db_path': get_db_path(),
'db_host': get_db_host(),
'db_port': get_db_port(),
'db_name': get_db_name(),
'db_user': get_db_user(),
'db_password': get_db_password(),
}
return result
|
projecthamster/hamster-lib | hamster_lib/helpers/config_helpers.py | write_config_file | python | def write_config_file(config_instance, appdirs=DEFAULT_APPDIRS,
file_name=DEFAULT_CONFIG_FILENAME):
path = get_config_path(appdirs, file_name)
with open(path, 'w') as fobj:
config_instance.write(fobj)
return config_instance | Write a ConfigParser instance to file at the correct location.
Args:
config_instance: Config instance to safe to file.
appdirs (HamsterAppDirs, optional): ``HamsterAppDirs`` instance storing app/user specific
path information.
file_name (text_type, optional): Name of the config file. Defaults to
``DEFAULT_CONFIG_FILENAME``.
Returns:
SafeConfigParser: Instance written to file. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/config_helpers.py#L158-L177 | [
"def get_config_path(appdirs=DEFAULT_APPDIRS, file_name=DEFAULT_CONFIG_FILENAME):\n \"\"\"\n Return the path where the config file is stored.\n\n Args:\n app_name (text_type, optional): Name of the application, defaults to\n ``'projecthamster``. Allows you to use your own application specific\n namespace if you wish.\n file_name (text_type, optional): Name of the config file. Defaults to\n ``config.conf``.\n\n Returns:\n str: Fully qualified path (dir & filename) where we expect the config file.\n \"\"\"\n return os.path.join(appdirs.user_config_dir, file_name)\n"
] | # -*- coding: utf-8 -*-
# This file is part of 'hamster-lib'.
#
# 'hamster-lib' is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# 'hamster-lib' is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with 'hamster-lib'. If not, see <http://www.gnu.org/licenses/>.
"""
Provide functions that provide common config related functionality.
This module provide easy to use convenience functions to handle common configuration
related tasks. Clients can use those to provide consistent behaviour and focus on
their specific requirements instead.
The easiest way to make use of those helpers is to call ``load_config_file`` (which will also
handle creating a new one if none exists) and ``write_config_file``.
Clients may use ``backend_config_to_configparser`` and its counter part
``configparser_to_backend_config`` to delegate conversion between a backend config dict and a
``ConfigParser`` instance.
Note:
Backend config key/value information:
store: A ``string`` indicating which store (``hamster_lib.REGISTERED_BACKENDS``) to use.
day_start: ``datetime.time`` that specifies the when to start a new day.
fact_min_delta: ``int`` specifying minimal fact duration. Facts shorter than this will be
rejected.
tmpfile_path: ``string`` indicating where the file representing the ongoing fact is to be
stored.
db_engine: ``string`` indicating which db-engine to use. Options depend on store choice.
db_path: ``string`` indicating where to save the db file if the selected db option saves to
disk. Depends on store/engine choice.
db_host: ``string`` indicating the host of the db server. Depends on store/engine choice.
db_port: ``int`` indicating the port of the db server. Depends on store/engine choice.
db_name: ``string`` indicating the db-name. Depends on store/engine choice.
db_user: ``string`` indicating the username to access the db server. Depends on
store/engine choice.
db_password: ``string`` indicating the password to access the db server. Depends on
store/engine choice.
Please also note that a backend *config dict* does except ``None`` / ``empty`` values, its
``ConfigParser`` representation does not include those however!
"""
from __future__ import absolute_import, unicode_literals
import datetime
import os
import appdirs
import hamster_lib
from configparser import SafeConfigParser
from six import text_type
class HamsterAppDirs(appdirs.AppDirs):
"""Custom class that ensure appdirs exist."""
def __init__(self, *args, **kwargs):
"""Add create flag value to instance."""
super(HamsterAppDirs, self).__init__(*args, **kwargs)
self.create = True
@property
def user_data_dir(self):
"""Return ``user_data_dir``."""
directory = appdirs.user_data_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_data_dir(self):
"""Return ``site_data_dir``."""
directory = appdirs.site_data_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_config_dir(self):
"""Return ``user_config_dir``."""
directory = appdirs.user_config_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_config_dir(self):
"""Return ``site_config_dir``."""
directory = appdirs.site_config_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_cache_dir(self):
"""Return ``user_cache_dir``."""
directory = appdirs.user_cache_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_log_dir(self):
"""Return ``user_log_dir``."""
directory = appdirs.user_log_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
def _ensure_directory_exists(self, directory):
"""Ensure that the passed path exists."""
if not os.path.lexists(directory):
os.makedirs(directory)
return directory
DEFAULT_APP_NAME = 'projecthamster'
DEFAULT_APPDIRS = HamsterAppDirs(DEFAULT_APP_NAME)
DEFAULT_CONFIG_FILENAME = '{}.conf'.format(DEFAULT_APPDIRS.appname)
def get_config_path(appdirs=DEFAULT_APPDIRS, file_name=DEFAULT_CONFIG_FILENAME):
"""
Return the path where the config file is stored.
Args:
app_name (text_type, optional): Name of the application, defaults to
``'projecthamster``. Allows you to use your own application specific
namespace if you wish.
file_name (text_type, optional): Name of the config file. Defaults to
``config.conf``.
Returns:
str: Fully qualified path (dir & filename) where we expect the config file.
"""
return os.path.join(appdirs.user_config_dir, file_name)
def load_config_file(appdirs=DEFAULT_APPDIRS, file_name=DEFAULT_CONFIG_FILENAME,
fallback_config_instance=None):
"""
Retrieve config information from file at default location.
If no config file is found a new one will be created either with ``fallback_config_instance``
as content or if none is provided with the result of ``get_default_backend_config``.
Args:
appdirs (HamsterAppDirs, optional): ``HamsterAppDirs`` instance storing app/user specific
path information.
file_name (text_type, optional): Name of the config file. Defaults to
``DEFAULT_CONFIG_FILENAME``.
fallback_config_instance (ConfigParser): Backend config that is to be used to populate the
config file that is created if no pre-existing one can be found.
Returns:
SafeConfigParser: Config loaded from file, either from the the pre-existing config
file or the one created with fallback values.
"""
if not fallback_config_instance:
fallback_config_instance = backend_config_to_configparser(
get_default_backend_config(appdirs)
)
config = SafeConfigParser()
path = get_config_path(appdirs, file_name)
if not config.read(path):
config = write_config_file(
fallback_config_instance, appdirs=appdirs, file_name=file_name
)
return config
def get_default_backend_config(appdirs):
"""
Return a default config dictionary.
Args:
appdirs (HamsterAppDirs): ``HamsterAppDirs`` instance encapsulating the apps details.
Returns:
dict: Dictionary with a default configuration.
Note:
Those defaults are independent of the particular config-store.
"""
return {
'store': 'sqlalchemy',
'day_start': datetime.time(5, 30, 0),
'fact_min_delta': 1,
'tmpfile_path': os.path.join(appdirs.user_data_dir, '{}.tmp'.format(appdirs.appname)),
'db_engine': 'sqlite',
'db_path': os.path.join(appdirs.user_data_dir, '{}.sqlite'.format(appdirs.appname)),
}
# [TODO]
# Provide better error handling
def backend_config_to_configparser(config):
"""
Return a ConfigParser instance representing a given backend config dictionary.
Args:
config (dict): Dictionary of config key/value pairs.
Returns:
SafeConfigParser: SafeConfigParser instance representing config.
Note:
We do not provide *any* validation about mandatory values what so ever.
"""
def get_store():
return config.get('store')
def get_day_start():
day_start = config.get('day_start')
if day_start:
day_start = day_start.strftime('%H:%M:%S')
return day_start
def get_fact_min_delta():
return text_type(config.get('fact_min_delta'))
def get_tmpfile_path():
return text_type(config.get('tmpfile_path'))
def get_db_engine():
return text_type(config.get('db_engine'))
def get_db_path():
return text_type(config.get('db_path'))
def get_db_host():
return text_type(config.get('db_host'))
def get_db_port():
return text_type(config.get('db_port'))
def get_db_name():
return text_type(config.get('db_name'))
def get_db_user():
return text_type(config.get('db_user'))
def get_db_password():
return text_type(config.get('db_password'))
cp_instance = SafeConfigParser()
cp_instance.add_section('Backend')
cp_instance.set('Backend', 'store', get_store())
cp_instance.set('Backend', 'day_start', get_day_start())
cp_instance.set('Backend', 'fact_min_delta', get_fact_min_delta())
cp_instance.set('Backend', 'tmpfile_path', get_tmpfile_path())
cp_instance.set('Backend', 'db_engine', get_db_engine())
cp_instance.set('Backend', 'db_path', get_db_path())
cp_instance.set('Backend', 'db_host', get_db_host())
cp_instance.set('Backend', 'db_port', get_db_port())
cp_instance.set('Backend', 'db_name', get_db_name())
cp_instance.set('Backend', 'db_user', get_db_user())
cp_instance.set('Backend', 'db_password', get_db_password())
return cp_instance
# [TODO]
# Provide better error handling
# Provide validation! For this it would probably be enough to validate a config
# dict. We do not actually need to validate a CP-instance but just its resulting
# dict.
def configparser_to_backend_config(cp_instance):
"""
Return a config dict generated from a configparser instance.
This functions main purpose is to ensure config dict values are properly typed.
Note:
This can be used with any ``ConfigParser`` backend instance not just the default one
in order to extract its config.
If a key is not found in ``cp_instance`` the resulting dict will have ``None``
assigned to this dict key.
"""
def get_store():
# [TODO]
# This should be deligated to a dedicated validation function!
store = cp_instance.get('Backend', 'store')
if store not in hamster_lib.REGISTERED_BACKENDS.keys():
raise ValueError(_("Unrecognized store option."))
return store
def get_day_start():
try:
day_start = datetime.datetime.strptime(cp_instance.get('Backend',
'day_start'), '%H:%M:%S').time()
except ValueError:
raise ValueError(_(
"We encountered an error when parsing configs 'day_start'"
" value! Aborting ..."
))
return day_start
def get_fact_min_delta():
return cp_instance.getint('Backend', 'fact_min_delta')
def get_tmpfile_path():
return cp_instance.get('Backend', 'tmpfile_path')
def get_db_engine():
return text_type(cp_instance.get('Backend', 'db_engine'))
def get_db_path():
return text_type(cp_instance.get('Backend', 'db_path'))
def get_db_host():
return text_type(cp_instance.get('Backend', 'db_host'))
def get_db_port():
return cp_instance.getint('Backend', 'db_port')
def get_db_name():
return text_type(cp_instance.get('Backend', 'db_name'))
def get_db_user():
return text_type(cp_instance.get('Backend', 'db_user'))
def get_db_password():
return text_type(cp_instance.get('Backend', 'db_password'))
result = {
'store': get_store(),
'day_start': get_day_start(),
'fact_min_delta': get_fact_min_delta(),
'tmpfile_path': get_tmpfile_path(),
'db_engine': get_db_engine(),
'db_path': get_db_path(),
'db_host': get_db_host(),
'db_port': get_db_port(),
'db_name': get_db_name(),
'db_user': get_db_user(),
'db_password': get_db_password(),
}
return result
|
projecthamster/hamster-lib | hamster_lib/helpers/config_helpers.py | load_config_file | python | def load_config_file(appdirs=DEFAULT_APPDIRS, file_name=DEFAULT_CONFIG_FILENAME,
fallback_config_instance=None):
if not fallback_config_instance:
fallback_config_instance = backend_config_to_configparser(
get_default_backend_config(appdirs)
)
config = SafeConfigParser()
path = get_config_path(appdirs, file_name)
if not config.read(path):
config = write_config_file(
fallback_config_instance, appdirs=appdirs, file_name=file_name
)
return config | Retrieve config information from file at default location.
If no config file is found a new one will be created either with ``fallback_config_instance``
as content or if none is provided with the result of ``get_default_backend_config``.
Args:
appdirs (HamsterAppDirs, optional): ``HamsterAppDirs`` instance storing app/user specific
path information.
file_name (text_type, optional): Name of the config file. Defaults to
``DEFAULT_CONFIG_FILENAME``.
fallback_config_instance (ConfigParser): Backend config that is to be used to populate the
config file that is created if no pre-existing one can be found.
Returns:
SafeConfigParser: Config loaded from file, either from the the pre-existing config
file or the one created with fallback values. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/config_helpers.py#L180-L211 | [
"def get_config_path(appdirs=DEFAULT_APPDIRS, file_name=DEFAULT_CONFIG_FILENAME):\n \"\"\"\n Return the path where the config file is stored.\n\n Args:\n app_name (text_type, optional): Name of the application, defaults to\n ``'projecthamster``. Allows you to use your own application specific\n namespace if you wish.\n file_name (text_type, optional): Name of the config file. Defaults to\n ``config.conf``.\n\n Returns:\n str: Fully qualified path (dir & filename) where we expect the config file.\n \"\"\"\n return os.path.join(appdirs.user_config_dir, file_name)\n",
"def write_config_file(config_instance, appdirs=DEFAULT_APPDIRS,\n file_name=DEFAULT_CONFIG_FILENAME):\n \"\"\"\n Write a ConfigParser instance to file at the correct location.\n\n Args:\n config_instance: Config instance to safe to file.\n appdirs (HamsterAppDirs, optional): ``HamsterAppDirs`` instance storing app/user specific\n path information.\n file_name (text_type, optional): Name of the config file. Defaults to\n ``DEFAULT_CONFIG_FILENAME``.\n\n Returns:\n SafeConfigParser: Instance written to file.\n \"\"\"\n\n path = get_config_path(appdirs, file_name)\n with open(path, 'w') as fobj:\n config_instance.write(fobj)\n return config_instance\n",
"def backend_config_to_configparser(config):\n \"\"\"\n Return a ConfigParser instance representing a given backend config dictionary.\n\n Args:\n config (dict): Dictionary of config key/value pairs.\n\n Returns:\n SafeConfigParser: SafeConfigParser instance representing config.\n\n Note:\n We do not provide *any* validation about mandatory values what so ever.\n \"\"\"\n def get_store():\n return config.get('store')\n\n def get_day_start():\n day_start = config.get('day_start')\n if day_start:\n day_start = day_start.strftime('%H:%M:%S')\n return day_start\n\n def get_fact_min_delta():\n return text_type(config.get('fact_min_delta'))\n\n def get_tmpfile_path():\n return text_type(config.get('tmpfile_path'))\n\n def get_db_engine():\n return text_type(config.get('db_engine'))\n\n def get_db_path():\n return text_type(config.get('db_path'))\n\n def get_db_host():\n return text_type(config.get('db_host'))\n\n def get_db_port():\n return text_type(config.get('db_port'))\n\n def get_db_name():\n return text_type(config.get('db_name'))\n\n def get_db_user():\n return text_type(config.get('db_user'))\n\n def get_db_password():\n return text_type(config.get('db_password'))\n\n cp_instance = SafeConfigParser()\n cp_instance.add_section('Backend')\n cp_instance.set('Backend', 'store', get_store())\n cp_instance.set('Backend', 'day_start', get_day_start())\n cp_instance.set('Backend', 'fact_min_delta', get_fact_min_delta())\n cp_instance.set('Backend', 'tmpfile_path', get_tmpfile_path())\n cp_instance.set('Backend', 'db_engine', get_db_engine())\n cp_instance.set('Backend', 'db_path', get_db_path())\n cp_instance.set('Backend', 'db_host', get_db_host())\n cp_instance.set('Backend', 'db_port', get_db_port())\n cp_instance.set('Backend', 'db_name', get_db_name())\n cp_instance.set('Backend', 'db_user', get_db_user())\n cp_instance.set('Backend', 'db_password', get_db_password())\n\n return cp_instance\n",
"def get_default_backend_config(appdirs):\n \"\"\"\n Return a default config dictionary.\n\n Args:\n appdirs (HamsterAppDirs): ``HamsterAppDirs`` instance encapsulating the apps details.\n\n Returns:\n dict: Dictionary with a default configuration.\n\n Note:\n Those defaults are independent of the particular config-store.\n \"\"\"\n return {\n 'store': 'sqlalchemy',\n 'day_start': datetime.time(5, 30, 0),\n 'fact_min_delta': 1,\n 'tmpfile_path': os.path.join(appdirs.user_data_dir, '{}.tmp'.format(appdirs.appname)),\n 'db_engine': 'sqlite',\n 'db_path': os.path.join(appdirs.user_data_dir, '{}.sqlite'.format(appdirs.appname)),\n }\n"
] | # -*- coding: utf-8 -*-
# This file is part of 'hamster-lib'.
#
# 'hamster-lib' is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# 'hamster-lib' is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with 'hamster-lib'. If not, see <http://www.gnu.org/licenses/>.
"""
Provide functions that provide common config related functionality.
This module provide easy to use convenience functions to handle common configuration
related tasks. Clients can use those to provide consistent behaviour and focus on
their specific requirements instead.
The easiest way to make use of those helpers is to call ``load_config_file`` (which will also
handle creating a new one if none exists) and ``write_config_file``.
Clients may use ``backend_config_to_configparser`` and its counter part
``configparser_to_backend_config`` to delegate conversion between a backend config dict and a
``ConfigParser`` instance.
Note:
Backend config key/value information:
store: A ``string`` indicating which store (``hamster_lib.REGISTERED_BACKENDS``) to use.
day_start: ``datetime.time`` that specifies the when to start a new day.
fact_min_delta: ``int`` specifying minimal fact duration. Facts shorter than this will be
rejected.
tmpfile_path: ``string`` indicating where the file representing the ongoing fact is to be
stored.
db_engine: ``string`` indicating which db-engine to use. Options depend on store choice.
db_path: ``string`` indicating where to save the db file if the selected db option saves to
disk. Depends on store/engine choice.
db_host: ``string`` indicating the host of the db server. Depends on store/engine choice.
db_port: ``int`` indicating the port of the db server. Depends on store/engine choice.
db_name: ``string`` indicating the db-name. Depends on store/engine choice.
db_user: ``string`` indicating the username to access the db server. Depends on
store/engine choice.
db_password: ``string`` indicating the password to access the db server. Depends on
store/engine choice.
Please also note that a backend *config dict* does except ``None`` / ``empty`` values, its
``ConfigParser`` representation does not include those however!
"""
from __future__ import absolute_import, unicode_literals
import datetime
import os
import appdirs
import hamster_lib
from configparser import SafeConfigParser
from six import text_type
class HamsterAppDirs(appdirs.AppDirs):
"""Custom class that ensure appdirs exist."""
def __init__(self, *args, **kwargs):
"""Add create flag value to instance."""
super(HamsterAppDirs, self).__init__(*args, **kwargs)
self.create = True
@property
def user_data_dir(self):
"""Return ``user_data_dir``."""
directory = appdirs.user_data_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_data_dir(self):
"""Return ``site_data_dir``."""
directory = appdirs.site_data_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_config_dir(self):
"""Return ``user_config_dir``."""
directory = appdirs.user_config_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_config_dir(self):
"""Return ``site_config_dir``."""
directory = appdirs.site_config_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_cache_dir(self):
"""Return ``user_cache_dir``."""
directory = appdirs.user_cache_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_log_dir(self):
"""Return ``user_log_dir``."""
directory = appdirs.user_log_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
def _ensure_directory_exists(self, directory):
"""Ensure that the passed path exists."""
if not os.path.lexists(directory):
os.makedirs(directory)
return directory
DEFAULT_APP_NAME = 'projecthamster'
DEFAULT_APPDIRS = HamsterAppDirs(DEFAULT_APP_NAME)
DEFAULT_CONFIG_FILENAME = '{}.conf'.format(DEFAULT_APPDIRS.appname)
def get_config_path(appdirs=DEFAULT_APPDIRS, file_name=DEFAULT_CONFIG_FILENAME):
"""
Return the path where the config file is stored.
Args:
app_name (text_type, optional): Name of the application, defaults to
``'projecthamster``. Allows you to use your own application specific
namespace if you wish.
file_name (text_type, optional): Name of the config file. Defaults to
``config.conf``.
Returns:
str: Fully qualified path (dir & filename) where we expect the config file.
"""
return os.path.join(appdirs.user_config_dir, file_name)
def write_config_file(config_instance, appdirs=DEFAULT_APPDIRS,
file_name=DEFAULT_CONFIG_FILENAME):
"""
Write a ConfigParser instance to file at the correct location.
Args:
config_instance: Config instance to safe to file.
appdirs (HamsterAppDirs, optional): ``HamsterAppDirs`` instance storing app/user specific
path information.
file_name (text_type, optional): Name of the config file. Defaults to
``DEFAULT_CONFIG_FILENAME``.
Returns:
SafeConfigParser: Instance written to file.
"""
path = get_config_path(appdirs, file_name)
with open(path, 'w') as fobj:
config_instance.write(fobj)
return config_instance
def get_default_backend_config(appdirs):
"""
Return a default config dictionary.
Args:
appdirs (HamsterAppDirs): ``HamsterAppDirs`` instance encapsulating the apps details.
Returns:
dict: Dictionary with a default configuration.
Note:
Those defaults are independent of the particular config-store.
"""
return {
'store': 'sqlalchemy',
'day_start': datetime.time(5, 30, 0),
'fact_min_delta': 1,
'tmpfile_path': os.path.join(appdirs.user_data_dir, '{}.tmp'.format(appdirs.appname)),
'db_engine': 'sqlite',
'db_path': os.path.join(appdirs.user_data_dir, '{}.sqlite'.format(appdirs.appname)),
}
# [TODO]
# Provide better error handling
def backend_config_to_configparser(config):
"""
Return a ConfigParser instance representing a given backend config dictionary.
Args:
config (dict): Dictionary of config key/value pairs.
Returns:
SafeConfigParser: SafeConfigParser instance representing config.
Note:
We do not provide *any* validation about mandatory values what so ever.
"""
def get_store():
return config.get('store')
def get_day_start():
day_start = config.get('day_start')
if day_start:
day_start = day_start.strftime('%H:%M:%S')
return day_start
def get_fact_min_delta():
return text_type(config.get('fact_min_delta'))
def get_tmpfile_path():
return text_type(config.get('tmpfile_path'))
def get_db_engine():
return text_type(config.get('db_engine'))
def get_db_path():
return text_type(config.get('db_path'))
def get_db_host():
return text_type(config.get('db_host'))
def get_db_port():
return text_type(config.get('db_port'))
def get_db_name():
return text_type(config.get('db_name'))
def get_db_user():
return text_type(config.get('db_user'))
def get_db_password():
return text_type(config.get('db_password'))
cp_instance = SafeConfigParser()
cp_instance.add_section('Backend')
cp_instance.set('Backend', 'store', get_store())
cp_instance.set('Backend', 'day_start', get_day_start())
cp_instance.set('Backend', 'fact_min_delta', get_fact_min_delta())
cp_instance.set('Backend', 'tmpfile_path', get_tmpfile_path())
cp_instance.set('Backend', 'db_engine', get_db_engine())
cp_instance.set('Backend', 'db_path', get_db_path())
cp_instance.set('Backend', 'db_host', get_db_host())
cp_instance.set('Backend', 'db_port', get_db_port())
cp_instance.set('Backend', 'db_name', get_db_name())
cp_instance.set('Backend', 'db_user', get_db_user())
cp_instance.set('Backend', 'db_password', get_db_password())
return cp_instance
# [TODO]
# Provide better error handling
# Provide validation! For this it would probably be enough to validate a config
# dict. We do not actually need to validate a CP-instance but just its resulting
# dict.
def configparser_to_backend_config(cp_instance):
"""
Return a config dict generated from a configparser instance.
This functions main purpose is to ensure config dict values are properly typed.
Note:
This can be used with any ``ConfigParser`` backend instance not just the default one
in order to extract its config.
If a key is not found in ``cp_instance`` the resulting dict will have ``None``
assigned to this dict key.
"""
def get_store():
# [TODO]
# This should be deligated to a dedicated validation function!
store = cp_instance.get('Backend', 'store')
if store not in hamster_lib.REGISTERED_BACKENDS.keys():
raise ValueError(_("Unrecognized store option."))
return store
def get_day_start():
try:
day_start = datetime.datetime.strptime(cp_instance.get('Backend',
'day_start'), '%H:%M:%S').time()
except ValueError:
raise ValueError(_(
"We encountered an error when parsing configs 'day_start'"
" value! Aborting ..."
))
return day_start
def get_fact_min_delta():
return cp_instance.getint('Backend', 'fact_min_delta')
def get_tmpfile_path():
return cp_instance.get('Backend', 'tmpfile_path')
def get_db_engine():
return text_type(cp_instance.get('Backend', 'db_engine'))
def get_db_path():
return text_type(cp_instance.get('Backend', 'db_path'))
def get_db_host():
return text_type(cp_instance.get('Backend', 'db_host'))
def get_db_port():
return cp_instance.getint('Backend', 'db_port')
def get_db_name():
return text_type(cp_instance.get('Backend', 'db_name'))
def get_db_user():
return text_type(cp_instance.get('Backend', 'db_user'))
def get_db_password():
return text_type(cp_instance.get('Backend', 'db_password'))
result = {
'store': get_store(),
'day_start': get_day_start(),
'fact_min_delta': get_fact_min_delta(),
'tmpfile_path': get_tmpfile_path(),
'db_engine': get_db_engine(),
'db_path': get_db_path(),
'db_host': get_db_host(),
'db_port': get_db_port(),
'db_name': get_db_name(),
'db_user': get_db_user(),
'db_password': get_db_password(),
}
return result
|
projecthamster/hamster-lib | hamster_lib/helpers/config_helpers.py | get_default_backend_config | python | def get_default_backend_config(appdirs):
return {
'store': 'sqlalchemy',
'day_start': datetime.time(5, 30, 0),
'fact_min_delta': 1,
'tmpfile_path': os.path.join(appdirs.user_data_dir, '{}.tmp'.format(appdirs.appname)),
'db_engine': 'sqlite',
'db_path': os.path.join(appdirs.user_data_dir, '{}.sqlite'.format(appdirs.appname)),
} | Return a default config dictionary.
Args:
appdirs (HamsterAppDirs): ``HamsterAppDirs`` instance encapsulating the apps details.
Returns:
dict: Dictionary with a default configuration.
Note:
Those defaults are independent of the particular config-store. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/config_helpers.py#L214-L234 | null | # -*- coding: utf-8 -*-
# This file is part of 'hamster-lib'.
#
# 'hamster-lib' is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# 'hamster-lib' is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with 'hamster-lib'. If not, see <http://www.gnu.org/licenses/>.
"""
Provide functions that provide common config related functionality.
This module provide easy to use convenience functions to handle common configuration
related tasks. Clients can use those to provide consistent behaviour and focus on
their specific requirements instead.
The easiest way to make use of those helpers is to call ``load_config_file`` (which will also
handle creating a new one if none exists) and ``write_config_file``.
Clients may use ``backend_config_to_configparser`` and its counter part
``configparser_to_backend_config`` to delegate conversion between a backend config dict and a
``ConfigParser`` instance.
Note:
Backend config key/value information:
store: A ``string`` indicating which store (``hamster_lib.REGISTERED_BACKENDS``) to use.
day_start: ``datetime.time`` that specifies the when to start a new day.
fact_min_delta: ``int`` specifying minimal fact duration. Facts shorter than this will be
rejected.
tmpfile_path: ``string`` indicating where the file representing the ongoing fact is to be
stored.
db_engine: ``string`` indicating which db-engine to use. Options depend on store choice.
db_path: ``string`` indicating where to save the db file if the selected db option saves to
disk. Depends on store/engine choice.
db_host: ``string`` indicating the host of the db server. Depends on store/engine choice.
db_port: ``int`` indicating the port of the db server. Depends on store/engine choice.
db_name: ``string`` indicating the db-name. Depends on store/engine choice.
db_user: ``string`` indicating the username to access the db server. Depends on
store/engine choice.
db_password: ``string`` indicating the password to access the db server. Depends on
store/engine choice.
Please also note that a backend *config dict* does except ``None`` / ``empty`` values, its
``ConfigParser`` representation does not include those however!
"""
from __future__ import absolute_import, unicode_literals
import datetime
import os
import appdirs
import hamster_lib
from configparser import SafeConfigParser
from six import text_type
class HamsterAppDirs(appdirs.AppDirs):
"""Custom class that ensure appdirs exist."""
def __init__(self, *args, **kwargs):
"""Add create flag value to instance."""
super(HamsterAppDirs, self).__init__(*args, **kwargs)
self.create = True
@property
def user_data_dir(self):
"""Return ``user_data_dir``."""
directory = appdirs.user_data_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_data_dir(self):
"""Return ``site_data_dir``."""
directory = appdirs.site_data_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_config_dir(self):
"""Return ``user_config_dir``."""
directory = appdirs.user_config_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_config_dir(self):
"""Return ``site_config_dir``."""
directory = appdirs.site_config_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_cache_dir(self):
"""Return ``user_cache_dir``."""
directory = appdirs.user_cache_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_log_dir(self):
"""Return ``user_log_dir``."""
directory = appdirs.user_log_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
def _ensure_directory_exists(self, directory):
"""Ensure that the passed path exists."""
if not os.path.lexists(directory):
os.makedirs(directory)
return directory
DEFAULT_APP_NAME = 'projecthamster'
DEFAULT_APPDIRS = HamsterAppDirs(DEFAULT_APP_NAME)
DEFAULT_CONFIG_FILENAME = '{}.conf'.format(DEFAULT_APPDIRS.appname)
def get_config_path(appdirs=DEFAULT_APPDIRS, file_name=DEFAULT_CONFIG_FILENAME):
"""
Return the path where the config file is stored.
Args:
app_name (text_type, optional): Name of the application, defaults to
``'projecthamster``. Allows you to use your own application specific
namespace if you wish.
file_name (text_type, optional): Name of the config file. Defaults to
``config.conf``.
Returns:
str: Fully qualified path (dir & filename) where we expect the config file.
"""
return os.path.join(appdirs.user_config_dir, file_name)
def write_config_file(config_instance, appdirs=DEFAULT_APPDIRS,
file_name=DEFAULT_CONFIG_FILENAME):
"""
Write a ConfigParser instance to file at the correct location.
Args:
config_instance: Config instance to safe to file.
appdirs (HamsterAppDirs, optional): ``HamsterAppDirs`` instance storing app/user specific
path information.
file_name (text_type, optional): Name of the config file. Defaults to
``DEFAULT_CONFIG_FILENAME``.
Returns:
SafeConfigParser: Instance written to file.
"""
path = get_config_path(appdirs, file_name)
with open(path, 'w') as fobj:
config_instance.write(fobj)
return config_instance
def load_config_file(appdirs=DEFAULT_APPDIRS, file_name=DEFAULT_CONFIG_FILENAME,
fallback_config_instance=None):
"""
Retrieve config information from file at default location.
If no config file is found a new one will be created either with ``fallback_config_instance``
as content or if none is provided with the result of ``get_default_backend_config``.
Args:
appdirs (HamsterAppDirs, optional): ``HamsterAppDirs`` instance storing app/user specific
path information.
file_name (text_type, optional): Name of the config file. Defaults to
``DEFAULT_CONFIG_FILENAME``.
fallback_config_instance (ConfigParser): Backend config that is to be used to populate the
config file that is created if no pre-existing one can be found.
Returns:
SafeConfigParser: Config loaded from file, either from the the pre-existing config
file or the one created with fallback values.
"""
if not fallback_config_instance:
fallback_config_instance = backend_config_to_configparser(
get_default_backend_config(appdirs)
)
config = SafeConfigParser()
path = get_config_path(appdirs, file_name)
if not config.read(path):
config = write_config_file(
fallback_config_instance, appdirs=appdirs, file_name=file_name
)
return config
# [TODO]
# Provide better error handling
def backend_config_to_configparser(config):
"""
Return a ConfigParser instance representing a given backend config dictionary.
Args:
config (dict): Dictionary of config key/value pairs.
Returns:
SafeConfigParser: SafeConfigParser instance representing config.
Note:
We do not provide *any* validation about mandatory values what so ever.
"""
def get_store():
return config.get('store')
def get_day_start():
day_start = config.get('day_start')
if day_start:
day_start = day_start.strftime('%H:%M:%S')
return day_start
def get_fact_min_delta():
return text_type(config.get('fact_min_delta'))
def get_tmpfile_path():
return text_type(config.get('tmpfile_path'))
def get_db_engine():
return text_type(config.get('db_engine'))
def get_db_path():
return text_type(config.get('db_path'))
def get_db_host():
return text_type(config.get('db_host'))
def get_db_port():
return text_type(config.get('db_port'))
def get_db_name():
return text_type(config.get('db_name'))
def get_db_user():
return text_type(config.get('db_user'))
def get_db_password():
return text_type(config.get('db_password'))
cp_instance = SafeConfigParser()
cp_instance.add_section('Backend')
cp_instance.set('Backend', 'store', get_store())
cp_instance.set('Backend', 'day_start', get_day_start())
cp_instance.set('Backend', 'fact_min_delta', get_fact_min_delta())
cp_instance.set('Backend', 'tmpfile_path', get_tmpfile_path())
cp_instance.set('Backend', 'db_engine', get_db_engine())
cp_instance.set('Backend', 'db_path', get_db_path())
cp_instance.set('Backend', 'db_host', get_db_host())
cp_instance.set('Backend', 'db_port', get_db_port())
cp_instance.set('Backend', 'db_name', get_db_name())
cp_instance.set('Backend', 'db_user', get_db_user())
cp_instance.set('Backend', 'db_password', get_db_password())
return cp_instance
# [TODO]
# Provide better error handling
# Provide validation! For this it would probably be enough to validate a config
# dict. We do not actually need to validate a CP-instance but just its resulting
# dict.
def configparser_to_backend_config(cp_instance):
"""
Return a config dict generated from a configparser instance.
This functions main purpose is to ensure config dict values are properly typed.
Note:
This can be used with any ``ConfigParser`` backend instance not just the default one
in order to extract its config.
If a key is not found in ``cp_instance`` the resulting dict will have ``None``
assigned to this dict key.
"""
def get_store():
# [TODO]
# This should be deligated to a dedicated validation function!
store = cp_instance.get('Backend', 'store')
if store not in hamster_lib.REGISTERED_BACKENDS.keys():
raise ValueError(_("Unrecognized store option."))
return store
def get_day_start():
try:
day_start = datetime.datetime.strptime(cp_instance.get('Backend',
'day_start'), '%H:%M:%S').time()
except ValueError:
raise ValueError(_(
"We encountered an error when parsing configs 'day_start'"
" value! Aborting ..."
))
return day_start
def get_fact_min_delta():
return cp_instance.getint('Backend', 'fact_min_delta')
def get_tmpfile_path():
return cp_instance.get('Backend', 'tmpfile_path')
def get_db_engine():
return text_type(cp_instance.get('Backend', 'db_engine'))
def get_db_path():
return text_type(cp_instance.get('Backend', 'db_path'))
def get_db_host():
return text_type(cp_instance.get('Backend', 'db_host'))
def get_db_port():
return cp_instance.getint('Backend', 'db_port')
def get_db_name():
return text_type(cp_instance.get('Backend', 'db_name'))
def get_db_user():
return text_type(cp_instance.get('Backend', 'db_user'))
def get_db_password():
return text_type(cp_instance.get('Backend', 'db_password'))
result = {
'store': get_store(),
'day_start': get_day_start(),
'fact_min_delta': get_fact_min_delta(),
'tmpfile_path': get_tmpfile_path(),
'db_engine': get_db_engine(),
'db_path': get_db_path(),
'db_host': get_db_host(),
'db_port': get_db_port(),
'db_name': get_db_name(),
'db_user': get_db_user(),
'db_password': get_db_password(),
}
return result
|
projecthamster/hamster-lib | hamster_lib/helpers/config_helpers.py | backend_config_to_configparser | python | def backend_config_to_configparser(config):
def get_store():
return config.get('store')
def get_day_start():
day_start = config.get('day_start')
if day_start:
day_start = day_start.strftime('%H:%M:%S')
return day_start
def get_fact_min_delta():
return text_type(config.get('fact_min_delta'))
def get_tmpfile_path():
return text_type(config.get('tmpfile_path'))
def get_db_engine():
return text_type(config.get('db_engine'))
def get_db_path():
return text_type(config.get('db_path'))
def get_db_host():
return text_type(config.get('db_host'))
def get_db_port():
return text_type(config.get('db_port'))
def get_db_name():
return text_type(config.get('db_name'))
def get_db_user():
return text_type(config.get('db_user'))
def get_db_password():
return text_type(config.get('db_password'))
cp_instance = SafeConfigParser()
cp_instance.add_section('Backend')
cp_instance.set('Backend', 'store', get_store())
cp_instance.set('Backend', 'day_start', get_day_start())
cp_instance.set('Backend', 'fact_min_delta', get_fact_min_delta())
cp_instance.set('Backend', 'tmpfile_path', get_tmpfile_path())
cp_instance.set('Backend', 'db_engine', get_db_engine())
cp_instance.set('Backend', 'db_path', get_db_path())
cp_instance.set('Backend', 'db_host', get_db_host())
cp_instance.set('Backend', 'db_port', get_db_port())
cp_instance.set('Backend', 'db_name', get_db_name())
cp_instance.set('Backend', 'db_user', get_db_user())
cp_instance.set('Backend', 'db_password', get_db_password())
return cp_instance | Return a ConfigParser instance representing a given backend config dictionary.
Args:
config (dict): Dictionary of config key/value pairs.
Returns:
SafeConfigParser: SafeConfigParser instance representing config.
Note:
We do not provide *any* validation about mandatory values what so ever. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/config_helpers.py#L239-L302 | [
"def get_store():\n return config.get('store')\n",
"def get_day_start():\n day_start = config.get('day_start')\n if day_start:\n day_start = day_start.strftime('%H:%M:%S')\n return day_start\n",
"def get_fact_min_delta():\n return text_type(config.get('fact_min_delta'))\n",
"def get_tmpfile_path():\n return text_type(config.get('tmpfile_path'))\n",
"def get_db_engine():\n return text_type(config.get('db_engine'))\n",
"def get_db_path():\n return text_type(config.get('db_path'))\n",
"def get_db_host():\n return text_type(config.get('db_host'))\n",
"def get_db_port():\n return text_type(config.get('db_port'))\n",
"def get_db_name():\n return text_type(config.get('db_name'))\n",
"def get_db_user():\n return text_type(config.get('db_user'))\n",
"def get_db_password():\n return text_type(config.get('db_password'))\n"
] | # -*- coding: utf-8 -*-
# This file is part of 'hamster-lib'.
#
# 'hamster-lib' is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# 'hamster-lib' is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with 'hamster-lib'. If not, see <http://www.gnu.org/licenses/>.
"""
Provide functions that provide common config related functionality.
This module provide easy to use convenience functions to handle common configuration
related tasks. Clients can use those to provide consistent behaviour and focus on
their specific requirements instead.
The easiest way to make use of those helpers is to call ``load_config_file`` (which will also
handle creating a new one if none exists) and ``write_config_file``.
Clients may use ``backend_config_to_configparser`` and its counter part
``configparser_to_backend_config`` to delegate conversion between a backend config dict and a
``ConfigParser`` instance.
Note:
Backend config key/value information:
store: A ``string`` indicating which store (``hamster_lib.REGISTERED_BACKENDS``) to use.
day_start: ``datetime.time`` that specifies the when to start a new day.
fact_min_delta: ``int`` specifying minimal fact duration. Facts shorter than this will be
rejected.
tmpfile_path: ``string`` indicating where the file representing the ongoing fact is to be
stored.
db_engine: ``string`` indicating which db-engine to use. Options depend on store choice.
db_path: ``string`` indicating where to save the db file if the selected db option saves to
disk. Depends on store/engine choice.
db_host: ``string`` indicating the host of the db server. Depends on store/engine choice.
db_port: ``int`` indicating the port of the db server. Depends on store/engine choice.
db_name: ``string`` indicating the db-name. Depends on store/engine choice.
db_user: ``string`` indicating the username to access the db server. Depends on
store/engine choice.
db_password: ``string`` indicating the password to access the db server. Depends on
store/engine choice.
Please also note that a backend *config dict* does except ``None`` / ``empty`` values, its
``ConfigParser`` representation does not include those however!
"""
from __future__ import absolute_import, unicode_literals
import datetime
import os
import appdirs
import hamster_lib
from configparser import SafeConfigParser
from six import text_type
class HamsterAppDirs(appdirs.AppDirs):
"""Custom class that ensure appdirs exist."""
def __init__(self, *args, **kwargs):
"""Add create flag value to instance."""
super(HamsterAppDirs, self).__init__(*args, **kwargs)
self.create = True
@property
def user_data_dir(self):
"""Return ``user_data_dir``."""
directory = appdirs.user_data_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_data_dir(self):
"""Return ``site_data_dir``."""
directory = appdirs.site_data_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_config_dir(self):
"""Return ``user_config_dir``."""
directory = appdirs.user_config_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_config_dir(self):
"""Return ``site_config_dir``."""
directory = appdirs.site_config_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_cache_dir(self):
"""Return ``user_cache_dir``."""
directory = appdirs.user_cache_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_log_dir(self):
"""Return ``user_log_dir``."""
directory = appdirs.user_log_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
def _ensure_directory_exists(self, directory):
"""Ensure that the passed path exists."""
if not os.path.lexists(directory):
os.makedirs(directory)
return directory
DEFAULT_APP_NAME = 'projecthamster'
DEFAULT_APPDIRS = HamsterAppDirs(DEFAULT_APP_NAME)
DEFAULT_CONFIG_FILENAME = '{}.conf'.format(DEFAULT_APPDIRS.appname)
def get_config_path(appdirs=DEFAULT_APPDIRS, file_name=DEFAULT_CONFIG_FILENAME):
"""
Return the path where the config file is stored.
Args:
app_name (text_type, optional): Name of the application, defaults to
``'projecthamster``. Allows you to use your own application specific
namespace if you wish.
file_name (text_type, optional): Name of the config file. Defaults to
``config.conf``.
Returns:
str: Fully qualified path (dir & filename) where we expect the config file.
"""
return os.path.join(appdirs.user_config_dir, file_name)
def write_config_file(config_instance, appdirs=DEFAULT_APPDIRS,
file_name=DEFAULT_CONFIG_FILENAME):
"""
Write a ConfigParser instance to file at the correct location.
Args:
config_instance: Config instance to safe to file.
appdirs (HamsterAppDirs, optional): ``HamsterAppDirs`` instance storing app/user specific
path information.
file_name (text_type, optional): Name of the config file. Defaults to
``DEFAULT_CONFIG_FILENAME``.
Returns:
SafeConfigParser: Instance written to file.
"""
path = get_config_path(appdirs, file_name)
with open(path, 'w') as fobj:
config_instance.write(fobj)
return config_instance
def load_config_file(appdirs=DEFAULT_APPDIRS, file_name=DEFAULT_CONFIG_FILENAME,
fallback_config_instance=None):
"""
Retrieve config information from file at default location.
If no config file is found a new one will be created either with ``fallback_config_instance``
as content or if none is provided with the result of ``get_default_backend_config``.
Args:
appdirs (HamsterAppDirs, optional): ``HamsterAppDirs`` instance storing app/user specific
path information.
file_name (text_type, optional): Name of the config file. Defaults to
``DEFAULT_CONFIG_FILENAME``.
fallback_config_instance (ConfigParser): Backend config that is to be used to populate the
config file that is created if no pre-existing one can be found.
Returns:
SafeConfigParser: Config loaded from file, either from the the pre-existing config
file or the one created with fallback values.
"""
if not fallback_config_instance:
fallback_config_instance = backend_config_to_configparser(
get_default_backend_config(appdirs)
)
config = SafeConfigParser()
path = get_config_path(appdirs, file_name)
if not config.read(path):
config = write_config_file(
fallback_config_instance, appdirs=appdirs, file_name=file_name
)
return config
def get_default_backend_config(appdirs):
"""
Return a default config dictionary.
Args:
appdirs (HamsterAppDirs): ``HamsterAppDirs`` instance encapsulating the apps details.
Returns:
dict: Dictionary with a default configuration.
Note:
Those defaults are independent of the particular config-store.
"""
return {
'store': 'sqlalchemy',
'day_start': datetime.time(5, 30, 0),
'fact_min_delta': 1,
'tmpfile_path': os.path.join(appdirs.user_data_dir, '{}.tmp'.format(appdirs.appname)),
'db_engine': 'sqlite',
'db_path': os.path.join(appdirs.user_data_dir, '{}.sqlite'.format(appdirs.appname)),
}
# [TODO]
# Provide better error handling
# [TODO]
# Provide better error handling
# Provide validation! For this it would probably be enough to validate a config
# dict. We do not actually need to validate a CP-instance but just its resulting
# dict.
def configparser_to_backend_config(cp_instance):
"""
Return a config dict generated from a configparser instance.
This functions main purpose is to ensure config dict values are properly typed.
Note:
This can be used with any ``ConfigParser`` backend instance not just the default one
in order to extract its config.
If a key is not found in ``cp_instance`` the resulting dict will have ``None``
assigned to this dict key.
"""
def get_store():
# [TODO]
# This should be deligated to a dedicated validation function!
store = cp_instance.get('Backend', 'store')
if store not in hamster_lib.REGISTERED_BACKENDS.keys():
raise ValueError(_("Unrecognized store option."))
return store
def get_day_start():
try:
day_start = datetime.datetime.strptime(cp_instance.get('Backend',
'day_start'), '%H:%M:%S').time()
except ValueError:
raise ValueError(_(
"We encountered an error when parsing configs 'day_start'"
" value! Aborting ..."
))
return day_start
def get_fact_min_delta():
return cp_instance.getint('Backend', 'fact_min_delta')
def get_tmpfile_path():
return cp_instance.get('Backend', 'tmpfile_path')
def get_db_engine():
return text_type(cp_instance.get('Backend', 'db_engine'))
def get_db_path():
return text_type(cp_instance.get('Backend', 'db_path'))
def get_db_host():
return text_type(cp_instance.get('Backend', 'db_host'))
def get_db_port():
return cp_instance.getint('Backend', 'db_port')
def get_db_name():
return text_type(cp_instance.get('Backend', 'db_name'))
def get_db_user():
return text_type(cp_instance.get('Backend', 'db_user'))
def get_db_password():
return text_type(cp_instance.get('Backend', 'db_password'))
result = {
'store': get_store(),
'day_start': get_day_start(),
'fact_min_delta': get_fact_min_delta(),
'tmpfile_path': get_tmpfile_path(),
'db_engine': get_db_engine(),
'db_path': get_db_path(),
'db_host': get_db_host(),
'db_port': get_db_port(),
'db_name': get_db_name(),
'db_user': get_db_user(),
'db_password': get_db_password(),
}
return result
|
projecthamster/hamster-lib | hamster_lib/helpers/config_helpers.py | configparser_to_backend_config | python | def configparser_to_backend_config(cp_instance):
def get_store():
# [TODO]
# This should be deligated to a dedicated validation function!
store = cp_instance.get('Backend', 'store')
if store not in hamster_lib.REGISTERED_BACKENDS.keys():
raise ValueError(_("Unrecognized store option."))
return store
def get_day_start():
try:
day_start = datetime.datetime.strptime(cp_instance.get('Backend',
'day_start'), '%H:%M:%S').time()
except ValueError:
raise ValueError(_(
"We encountered an error when parsing configs 'day_start'"
" value! Aborting ..."
))
return day_start
def get_fact_min_delta():
return cp_instance.getint('Backend', 'fact_min_delta')
def get_tmpfile_path():
return cp_instance.get('Backend', 'tmpfile_path')
def get_db_engine():
return text_type(cp_instance.get('Backend', 'db_engine'))
def get_db_path():
return text_type(cp_instance.get('Backend', 'db_path'))
def get_db_host():
return text_type(cp_instance.get('Backend', 'db_host'))
def get_db_port():
return cp_instance.getint('Backend', 'db_port')
def get_db_name():
return text_type(cp_instance.get('Backend', 'db_name'))
def get_db_user():
return text_type(cp_instance.get('Backend', 'db_user'))
def get_db_password():
return text_type(cp_instance.get('Backend', 'db_password'))
result = {
'store': get_store(),
'day_start': get_day_start(),
'fact_min_delta': get_fact_min_delta(),
'tmpfile_path': get_tmpfile_path(),
'db_engine': get_db_engine(),
'db_path': get_db_path(),
'db_host': get_db_host(),
'db_port': get_db_port(),
'db_name': get_db_name(),
'db_user': get_db_user(),
'db_password': get_db_password(),
}
return result | Return a config dict generated from a configparser instance.
This functions main purpose is to ensure config dict values are properly typed.
Note:
This can be used with any ``ConfigParser`` backend instance not just the default one
in order to extract its config.
If a key is not found in ``cp_instance`` the resulting dict will have ``None``
assigned to this dict key. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/config_helpers.py#L310-L381 | [
"def get_store():\n # [TODO]\n # This should be deligated to a dedicated validation function!\n store = cp_instance.get('Backend', 'store')\n if store not in hamster_lib.REGISTERED_BACKENDS.keys():\n raise ValueError(_(\"Unrecognized store option.\"))\n return store\n",
"def get_day_start():\n try:\n day_start = datetime.datetime.strptime(cp_instance.get('Backend',\n 'day_start'), '%H:%M:%S').time()\n except ValueError:\n raise ValueError(_(\n \"We encountered an error when parsing configs 'day_start'\"\n \" value! Aborting ...\"\n ))\n return day_start\n",
"def get_fact_min_delta():\n return cp_instance.getint('Backend', 'fact_min_delta')\n",
"def get_tmpfile_path():\n return cp_instance.get('Backend', 'tmpfile_path')\n",
"def get_db_engine():\n return text_type(cp_instance.get('Backend', 'db_engine'))\n",
"def get_db_path():\n return text_type(cp_instance.get('Backend', 'db_path'))\n",
"def get_db_host():\n return text_type(cp_instance.get('Backend', 'db_host'))\n",
"def get_db_port():\n return cp_instance.getint('Backend', 'db_port')\n",
"def get_db_name():\n return text_type(cp_instance.get('Backend', 'db_name'))\n",
"def get_db_user():\n return text_type(cp_instance.get('Backend', 'db_user'))\n",
"def get_db_password():\n return text_type(cp_instance.get('Backend', 'db_password'))\n"
] | # -*- coding: utf-8 -*-
# This file is part of 'hamster-lib'.
#
# 'hamster-lib' is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# 'hamster-lib' is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with 'hamster-lib'. If not, see <http://www.gnu.org/licenses/>.
"""
Provide functions that provide common config related functionality.
This module provide easy to use convenience functions to handle common configuration
related tasks. Clients can use those to provide consistent behaviour and focus on
their specific requirements instead.
The easiest way to make use of those helpers is to call ``load_config_file`` (which will also
handle creating a new one if none exists) and ``write_config_file``.
Clients may use ``backend_config_to_configparser`` and its counter part
``configparser_to_backend_config`` to delegate conversion between a backend config dict and a
``ConfigParser`` instance.
Note:
Backend config key/value information:
store: A ``string`` indicating which store (``hamster_lib.REGISTERED_BACKENDS``) to use.
day_start: ``datetime.time`` that specifies the when to start a new day.
fact_min_delta: ``int`` specifying minimal fact duration. Facts shorter than this will be
rejected.
tmpfile_path: ``string`` indicating where the file representing the ongoing fact is to be
stored.
db_engine: ``string`` indicating which db-engine to use. Options depend on store choice.
db_path: ``string`` indicating where to save the db file if the selected db option saves to
disk. Depends on store/engine choice.
db_host: ``string`` indicating the host of the db server. Depends on store/engine choice.
db_port: ``int`` indicating the port of the db server. Depends on store/engine choice.
db_name: ``string`` indicating the db-name. Depends on store/engine choice.
db_user: ``string`` indicating the username to access the db server. Depends on
store/engine choice.
db_password: ``string`` indicating the password to access the db server. Depends on
store/engine choice.
Please also note that a backend *config dict* does except ``None`` / ``empty`` values, its
``ConfigParser`` representation does not include those however!
"""
from __future__ import absolute_import, unicode_literals
import datetime
import os
import appdirs
import hamster_lib
from configparser import SafeConfigParser
from six import text_type
class HamsterAppDirs(appdirs.AppDirs):
"""Custom class that ensure appdirs exist."""
def __init__(self, *args, **kwargs):
"""Add create flag value to instance."""
super(HamsterAppDirs, self).__init__(*args, **kwargs)
self.create = True
@property
def user_data_dir(self):
"""Return ``user_data_dir``."""
directory = appdirs.user_data_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_data_dir(self):
"""Return ``site_data_dir``."""
directory = appdirs.site_data_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_config_dir(self):
"""Return ``user_config_dir``."""
directory = appdirs.user_config_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_config_dir(self):
"""Return ``site_config_dir``."""
directory = appdirs.site_config_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_cache_dir(self):
"""Return ``user_cache_dir``."""
directory = appdirs.user_cache_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_log_dir(self):
"""Return ``user_log_dir``."""
directory = appdirs.user_log_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
def _ensure_directory_exists(self, directory):
"""Ensure that the passed path exists."""
if not os.path.lexists(directory):
os.makedirs(directory)
return directory
DEFAULT_APP_NAME = 'projecthamster'
DEFAULT_APPDIRS = HamsterAppDirs(DEFAULT_APP_NAME)
DEFAULT_CONFIG_FILENAME = '{}.conf'.format(DEFAULT_APPDIRS.appname)
def get_config_path(appdirs=DEFAULT_APPDIRS, file_name=DEFAULT_CONFIG_FILENAME):
"""
Return the path where the config file is stored.
Args:
app_name (text_type, optional): Name of the application, defaults to
``'projecthamster``. Allows you to use your own application specific
namespace if you wish.
file_name (text_type, optional): Name of the config file. Defaults to
``config.conf``.
Returns:
str: Fully qualified path (dir & filename) where we expect the config file.
"""
return os.path.join(appdirs.user_config_dir, file_name)
def write_config_file(config_instance, appdirs=DEFAULT_APPDIRS,
file_name=DEFAULT_CONFIG_FILENAME):
"""
Write a ConfigParser instance to file at the correct location.
Args:
config_instance: Config instance to safe to file.
appdirs (HamsterAppDirs, optional): ``HamsterAppDirs`` instance storing app/user specific
path information.
file_name (text_type, optional): Name of the config file. Defaults to
``DEFAULT_CONFIG_FILENAME``.
Returns:
SafeConfigParser: Instance written to file.
"""
path = get_config_path(appdirs, file_name)
with open(path, 'w') as fobj:
config_instance.write(fobj)
return config_instance
def load_config_file(appdirs=DEFAULT_APPDIRS, file_name=DEFAULT_CONFIG_FILENAME,
fallback_config_instance=None):
"""
Retrieve config information from file at default location.
If no config file is found a new one will be created either with ``fallback_config_instance``
as content or if none is provided with the result of ``get_default_backend_config``.
Args:
appdirs (HamsterAppDirs, optional): ``HamsterAppDirs`` instance storing app/user specific
path information.
file_name (text_type, optional): Name of the config file. Defaults to
``DEFAULT_CONFIG_FILENAME``.
fallback_config_instance (ConfigParser): Backend config that is to be used to populate the
config file that is created if no pre-existing one can be found.
Returns:
SafeConfigParser: Config loaded from file, either from the the pre-existing config
file or the one created with fallback values.
"""
if not fallback_config_instance:
fallback_config_instance = backend_config_to_configparser(
get_default_backend_config(appdirs)
)
config = SafeConfigParser()
path = get_config_path(appdirs, file_name)
if not config.read(path):
config = write_config_file(
fallback_config_instance, appdirs=appdirs, file_name=file_name
)
return config
def get_default_backend_config(appdirs):
"""
Return a default config dictionary.
Args:
appdirs (HamsterAppDirs): ``HamsterAppDirs`` instance encapsulating the apps details.
Returns:
dict: Dictionary with a default configuration.
Note:
Those defaults are independent of the particular config-store.
"""
return {
'store': 'sqlalchemy',
'day_start': datetime.time(5, 30, 0),
'fact_min_delta': 1,
'tmpfile_path': os.path.join(appdirs.user_data_dir, '{}.tmp'.format(appdirs.appname)),
'db_engine': 'sqlite',
'db_path': os.path.join(appdirs.user_data_dir, '{}.sqlite'.format(appdirs.appname)),
}
# [TODO]
# Provide better error handling
def backend_config_to_configparser(config):
"""
Return a ConfigParser instance representing a given backend config dictionary.
Args:
config (dict): Dictionary of config key/value pairs.
Returns:
SafeConfigParser: SafeConfigParser instance representing config.
Note:
We do not provide *any* validation about mandatory values what so ever.
"""
def get_store():
return config.get('store')
def get_day_start():
day_start = config.get('day_start')
if day_start:
day_start = day_start.strftime('%H:%M:%S')
return day_start
def get_fact_min_delta():
return text_type(config.get('fact_min_delta'))
def get_tmpfile_path():
return text_type(config.get('tmpfile_path'))
def get_db_engine():
return text_type(config.get('db_engine'))
def get_db_path():
return text_type(config.get('db_path'))
def get_db_host():
return text_type(config.get('db_host'))
def get_db_port():
return text_type(config.get('db_port'))
def get_db_name():
return text_type(config.get('db_name'))
def get_db_user():
return text_type(config.get('db_user'))
def get_db_password():
return text_type(config.get('db_password'))
cp_instance = SafeConfigParser()
cp_instance.add_section('Backend')
cp_instance.set('Backend', 'store', get_store())
cp_instance.set('Backend', 'day_start', get_day_start())
cp_instance.set('Backend', 'fact_min_delta', get_fact_min_delta())
cp_instance.set('Backend', 'tmpfile_path', get_tmpfile_path())
cp_instance.set('Backend', 'db_engine', get_db_engine())
cp_instance.set('Backend', 'db_path', get_db_path())
cp_instance.set('Backend', 'db_host', get_db_host())
cp_instance.set('Backend', 'db_port', get_db_port())
cp_instance.set('Backend', 'db_name', get_db_name())
cp_instance.set('Backend', 'db_user', get_db_user())
cp_instance.set('Backend', 'db_password', get_db_password())
return cp_instance
# [TODO]
# Provide better error handling
# Provide validation! For this it would probably be enough to validate a config
# dict. We do not actually need to validate a CP-instance but just its resulting
# dict.
|
projecthamster/hamster-lib | hamster_lib/helpers/config_helpers.py | HamsterAppDirs.user_data_dir | python | def user_data_dir(self):
directory = appdirs.user_data_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory | Return ``user_data_dir``. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/config_helpers.py#L76-L82 | null | class HamsterAppDirs(appdirs.AppDirs):
"""Custom class that ensure appdirs exist."""
def __init__(self, *args, **kwargs):
"""Add create flag value to instance."""
super(HamsterAppDirs, self).__init__(*args, **kwargs)
self.create = True
@property
@property
def site_data_dir(self):
"""Return ``site_data_dir``."""
directory = appdirs.site_data_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_config_dir(self):
"""Return ``user_config_dir``."""
directory = appdirs.user_config_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_config_dir(self):
"""Return ``site_config_dir``."""
directory = appdirs.site_config_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_cache_dir(self):
"""Return ``user_cache_dir``."""
directory = appdirs.user_cache_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_log_dir(self):
"""Return ``user_log_dir``."""
directory = appdirs.user_log_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
def _ensure_directory_exists(self, directory):
"""Ensure that the passed path exists."""
if not os.path.lexists(directory):
os.makedirs(directory)
return directory
|
projecthamster/hamster-lib | hamster_lib/helpers/config_helpers.py | HamsterAppDirs.site_config_dir | python | def site_config_dir(self):
directory = appdirs.site_config_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory | Return ``site_config_dir``. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/config_helpers.py#L103-L109 | null | class HamsterAppDirs(appdirs.AppDirs):
"""Custom class that ensure appdirs exist."""
def __init__(self, *args, **kwargs):
"""Add create flag value to instance."""
super(HamsterAppDirs, self).__init__(*args, **kwargs)
self.create = True
@property
def user_data_dir(self):
"""Return ``user_data_dir``."""
directory = appdirs.user_data_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_data_dir(self):
"""Return ``site_data_dir``."""
directory = appdirs.site_data_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_config_dir(self):
"""Return ``user_config_dir``."""
directory = appdirs.user_config_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
@property
def user_cache_dir(self):
"""Return ``user_cache_dir``."""
directory = appdirs.user_cache_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_log_dir(self):
"""Return ``user_log_dir``."""
directory = appdirs.user_log_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
def _ensure_directory_exists(self, directory):
"""Ensure that the passed path exists."""
if not os.path.lexists(directory):
os.makedirs(directory)
return directory
|
projecthamster/hamster-lib | hamster_lib/helpers/config_helpers.py | HamsterAppDirs.user_cache_dir | python | def user_cache_dir(self):
directory = appdirs.user_cache_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory | Return ``user_cache_dir``. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/config_helpers.py#L112-L118 | null | class HamsterAppDirs(appdirs.AppDirs):
"""Custom class that ensure appdirs exist."""
def __init__(self, *args, **kwargs):
"""Add create flag value to instance."""
super(HamsterAppDirs, self).__init__(*args, **kwargs)
self.create = True
@property
def user_data_dir(self):
"""Return ``user_data_dir``."""
directory = appdirs.user_data_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_data_dir(self):
"""Return ``site_data_dir``."""
directory = appdirs.site_data_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_config_dir(self):
"""Return ``user_config_dir``."""
directory = appdirs.user_config_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_config_dir(self):
"""Return ``site_config_dir``."""
directory = appdirs.site_config_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
@property
def user_log_dir(self):
"""Return ``user_log_dir``."""
directory = appdirs.user_log_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
def _ensure_directory_exists(self, directory):
"""Ensure that the passed path exists."""
if not os.path.lexists(directory):
os.makedirs(directory)
return directory
|
projecthamster/hamster-lib | hamster_lib/helpers/config_helpers.py | HamsterAppDirs._ensure_directory_exists | python | def _ensure_directory_exists(self, directory):
if not os.path.lexists(directory):
os.makedirs(directory)
return directory | Ensure that the passed path exists. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/config_helpers.py#L129-L133 | null | class HamsterAppDirs(appdirs.AppDirs):
"""Custom class that ensure appdirs exist."""
def __init__(self, *args, **kwargs):
"""Add create flag value to instance."""
super(HamsterAppDirs, self).__init__(*args, **kwargs)
self.create = True
@property
def user_data_dir(self):
"""Return ``user_data_dir``."""
directory = appdirs.user_data_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_data_dir(self):
"""Return ``site_data_dir``."""
directory = appdirs.site_data_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_config_dir(self):
"""Return ``user_config_dir``."""
directory = appdirs.user_config_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def site_config_dir(self):
"""Return ``site_config_dir``."""
directory = appdirs.site_config_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_cache_dir(self):
"""Return ``user_cache_dir``."""
directory = appdirs.user_cache_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
@property
def user_log_dir(self):
"""Return ``user_log_dir``."""
directory = appdirs.user_log_dir(self.appname, self.appauthor,
version=self.version)
if self.create:
self._ensure_directory_exists(directory)
return directory
|
projecthamster/hamster-lib | hamster_lib/helpers/helpers.py | _load_tmp_fact | python | def _load_tmp_fact(filepath):
from hamster_lib import Fact
try:
with open(filepath, 'rb') as fobj:
fact = pickle.load(fobj)
except IOError:
fact = False
else:
if not isinstance(fact, Fact):
raise TypeError(_(
"Something went wrong. It seems our pickled file does not contain"
" valid Fact instance. [Content: '{content}'; Type: {type}".format(
content=fact, type=type(fact))
))
return fact | Load an 'ongoing fact' from a given location.
Args:
filepath: Full path to the tmpfile location.
Returns:
hamster_lib.Fact: ``Fact`` representing the 'ongoing fact'. Returns ``False``
if no file was found.
Raises:
TypeError: If for some reason our stored instance is no instance of
``hamster_lib.Fact``. | train | https://github.com/projecthamster/hamster-lib/blob/bc34c822c239a6fa0cde3a4f90b0d00506fb5a4f/hamster_lib/helpers/helpers.py#L33-L62 | null | # -*- encoding: utf-8 -*-
# Copyright (C) 2015-2016 Eric Goller <eric.goller@ninjaduck.solutions>
# This file is part of 'hamster-lib'.
#
# 'hamster-lib' is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# 'hamster-lib' is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with 'hamster-lib'. If not, see <http://www.gnu.org/licenses/>.
"""
This module provides several convenience and intermediate functions to perform common tasks.
"""
import pickle
from hamster_lib.helpers import time as time_helpers
# Non public helpers
# These should be of very little use for any client module.
def parse_raw_fact(raw_fact):
"""
Extract semantically meaningful sub-components from a ``raw fact`` text.
Args:
raw_fact (text_type): ``raw fact`` text to be parsed.
Returns:
dict: dict with sub-components as values.
"""
def at_split(string):
"""
Return everything in front of the (leftmost) '@'-symbol, if it was used.
Args:
string (str): The string to be parsed.
Returns:
tuple: (front, back) representing the substrings before and after the
most left ``@`` symbol. If no such symbol was present at all,
``back=None``. Both substrings have been trimmed of any leading
and trailing whitespace.
Note:
If our string contains multiple ``@`` symbols, all but the most left
one will be treated as part of the regular ``back`` string.
This allows for usage of the symbol in descriptions, categories and tags.
Also note that *no tags are extracted* any tags included will be considered
part of the ``category`` string. We are likely to remove this parsing function
in ``0.14.0`` in favour of a regex based solution so we will not spend
time on tags for now
"""
result = string.split('@', 1)
length = len(result)
if length == 1:
front, back = result[0].strip(), None
else:
front, back = result
front, back = front.strip(), back.strip()
return (front, back)
def comma_split(string):
"""
Split string at the most left comma.
Args:
string (str): String to be processed. At this stage this should
look something like ``<Category> and <tags>, <Description>
Returns
tuple: (category_and_tags, description). Both substrings have their
leading/trailing whitespace removed.
``category_and_tags`` may include >=0 tags indicated by a leading ``#``.
As we have used the most left ``,`` to separate both substrings that
means that categories and tags can not contain any ``,`` but the
description text may contain as many as wished.
"""
result = string.split(',', 1)
length = len(result)
if length == 1:
category, description = result[0].strip(), None
else:
category, description = tuple(result)
category, description = category.strip(), description.strip()
return (category.strip(), description)
time_info, rest = time_helpers.extract_time_info(raw_fact)
activity_name, back = at_split(rest)
if back:
category_name, description = comma_split(back)
else:
category_name, description = None, None
return {
'timeinfo': time_info,
'category': category_name,
'activity': activity_name,
'description': description,
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.