text stringlengths 0 1.05M | meta dict |
|---|---|
# Airplay Mode for PiRadio
# Limited functionality for now.
# When enabled, the mode starts the Shairport Sync service to make the device
# visible to Apple devices.
#
# No metadata is available at the moment. This may change in future releases.
from subprocess import call
from resources.basemode import RadioBaseMode
ON = ["sudo", "systemctl", "start", "shairport-sync.service"]
OFF = ["sudo", "systemctl", "stop", "shairport-sync.service"]
class ModeAirplay(RadioBaseMode):
name = "Airplay"
def __init__(self):
super(ModeAirplay, self).__init__()
# Create a basic menu
self.menu = [("Show Device Name", self.show_device)]
self.build_menu()
# Service should be disabled by default
call(OFF)
# No metadata is currently available so let's just define a fixed
# item for display
self.metadata = {"Artist": "PiRadio"}
def enter(self):
# Start the service
call(ON)
# Send our metadata
self.show_text("metadata", self.metadata)
def exit(self):
# Stop the servive
call(OFF)
def show_device(self):
# Show text if the menu item is used.
self.show_text("menuinfo", "PiRadio")
| {
"repo_name": "elParaguayo/PiRadio",
"path": "modes/airplay.py",
"copies": "1",
"size": "1239",
"license": "mit",
"hash": -2144685760708891100,
"line_mean": 24.8125,
"line_max": 77,
"alpha_frac": 0.6359967716,
"autogenerated": false,
"ratio": 3.920886075949367,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 48
} |
"""airports.py provides an example Steno3D project of airports"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
from .base import BaseExample, exampleproperty
from ..point import Mesh0D, Point
from ..project import Project
DEG2RAD = np.pi/180
FT2KM = 12*2.54/100/1000
RADIUS = 6371
class Airports(BaseExample):
"""Class containing components of airport project. Components can be
viewed individually or copied into new resources or projects with
get_resources() and get_project(), respectively.
"""
@exampleproperty
def filenames(self):
"""airport files"""
return ['airports.dat', 'latitude.npy', 'longitude.npy',
'altitude.npy', 'license.txt']
@exampleproperty
def datafile(self):
"""full path to airport data file"""
return Airports.fetch_data(filename='airports.dat',
download_if_missing=False,
verbose=False)
@exampleproperty
def latitude(self):
"""Airport lat, degrees, from openflights.org"""
return np.load(Airports.fetch_data(filename='latitude.npy',
download_if_missing=False,
verbose=False))
@exampleproperty
def longitude(self):
"""Airport lon, degrees, from openflights.org"""
return np.load(Airports.fetch_data(filename='longitude.npy',
download_if_missing=False,
verbose=False))
@exampleproperty
def altitude(self):
"""Airport alt, km, from openflights.org"""
return np.load(Airports.fetch_data(filename='altitude.npy',
download_if_missing=False,
verbose=False))
@classmethod
def get_project(self):
"""return airport points project"""
proj = Project(
title='Airport',
description='Project with airport points'
)
Point(
project=proj,
mesh=Mesh0D(
vertices=np.c_[self.geo_to_xyz(self.latitude,
self.longitude,
self.altitude)]
),
title='Airport Points'
)
return proj
@staticmethod
def geo_to_xyz(lat, lon, alt):
"""function geo_to_xyz
Inputs:
lat: latitude, degrees
lon: longitude, degrees
alt: altitude, km
Outputs:
x, y, z: spatial coordiantes relative to the center of the earth
Note:
This function assumes a shpherical earth
"""
lat *= DEG2RAD
lon *= DEG2RAD
x = (RADIUS + alt)*np.cos(lat)*np.cos(lon)
y = (RADIUS + alt)*np.cos(lat)*np.sin(lon)
z = (RADIUS + alt)*np.sin(lat)
return x, y, z
@staticmethod
def read_airports_data(filename):
"""Extract latitude, longitude, and altitude from file"""
lat = [] # Latitude
lon = [] # Longitude
alt = [] # Altitude
with open(filename) as f:
for line in f:
data = line.rstrip().split(',')
lat.append(float(data[6])*DEG2RAD)
lon.append(float(data[7])*DEG2RAD)
alt.append(float(data[8])*FT2KM)
return np.array(lat), np.array(lon), np.array(alt)
| {
"repo_name": "3ptscience/steno3dpy",
"path": "steno3d/examples/airports.py",
"copies": "1",
"size": "3650",
"license": "mit",
"hash": 5240546841310380000,
"line_mean": 31.3008849558,
"line_max": 76,
"alpha_frac": 0.5364383562,
"autogenerated": false,
"ratio": 4.329774614472123,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 113
} |
""" Airship automated installation script
usage: python2.7 <(curl -fsSL raw.github.com/mgax/airship/master/install_airship.py) /var/local/my_awesome_app
"""
import os
import sys
import subprocess
import urllib
import json
AIRSHIP_PACKAGE = 'https://github.com/mgax/airship/tarball/master'
AIRSHIP_GIT = 'git+https://github.com/mgax/airship.git#egg=Airship'
PATH_PY_URL = 'https://raw.github.com/jaraco/path.py/2.3/path.py'
VIRTUALENV_URL = 'https://raw.github.com/pypa/virtualenv/develop/virtualenv.py'
DISTRIBUTE_URL = ('http://pypi.python.org/packages/source/'
'd/distribute/distribute-0.6.32.tar.gz')
PIP_URL = 'https://github.com/qwcode/pip/tarball/53bbdf5' # wheel_install branch
WHEEL_URL = ('http://pypi.python.org/packages/source/'
'w/wheel/wheel-0.14.0.tar.gz')
AIRSHIP_CFG_TEMPLATE = """\
python:
dist: {python_dist}
interpreter: {python_interpreter}
port_map:
web: {web_port}
env:
"""
def filename(url):
return url.split('/')[-1]
def install(airship_home, python_bin, devel):
username = os.popen('whoami').read().strip()
virtualenv_path = airship_home / 'opt' / 'airship-venv'
virtualenv_bin = virtualenv_path / 'bin'
airship_cfg = airship_home / 'etc' / 'airship.yaml'
virtualenv_path.makedirs_p()
if not (virtualenv_bin / 'python').isfile():
import virtualenv
print "creating virtualenv in {virtualenv_path} ...".format(**locals())
virtualenv.create_environment(virtualenv_path,
search_dirs=[airship_home / 'dist'],
use_distribute=True,
never_download=True)
subprocess.check_call([virtualenv_bin / 'pip', 'install',
airship_home / 'dist' / filename(WHEEL_URL)])
if devel:
print "installing airship in development mode ..."
airship_req = ['-e', AIRSHIP_GIT]
else:
print "installing airship ..."
airship_req = [AIRSHIP_PACKAGE]
subprocess.check_call([virtualenv_bin / 'pip', 'install'] + airship_req)
if not airship_cfg.isfile():
import random
(airship_home / 'etc').mkdir_p()
base = random.randint(20, 600) * 100
airship_cfg.write_bytes(AIRSHIP_CFG_TEMPLATE.format(
python_dist=json.dumps(airship_home / dist),
web_port=json.dumps(base),
python_interpreter=json.dumps(sys.executable),
))
subprocess.check_call([virtualenv_bin / 'airship',
airship_home, 'init'])
cmd = "{airship_home}/bin/supervisord".format(**locals())
fullcmd = "su {username} -c '{cmd}'".format(**locals())
print
print ("Installation complete! Run the following command "
"on system startup:\n")
print " " + fullcmd
print
print "To start supervisord now, run this:"
print
print " " + cmd
print
def download_to(url, parent_folder, fname=None):
if fname is None:
fname = filename(url)
file_path = os.path.join(parent_folder, fname)
if os.path.isfile(file_path):
print "skipping {file_path}, already downloaded".format(**locals())
return
print "downloading {url} to {file_path}".format(**locals())
http = urllib.urlopen(url)
with open(file_path, 'wb') as f:
f.write(http.read())
http.close()
if __name__ == '__main__':
airship_home = os.path.abspath(sys.argv[1])
dist = os.path.join(airship_home, 'dist')
if not os.path.isdir(dist):
os.makedirs(dist)
if len(sys.argv) > 2 and sys.argv[2] == '-e':
devel = True
else:
devel = False
download_to(PATH_PY_URL, dist)
download_to(VIRTUALENV_URL, dist)
download_to(DISTRIBUTE_URL, dist)
download_to(PIP_URL, dist, 'pip-1.2.1.post1-2012-11-28.tar.gz')
download_to(WHEEL_URL, dist)
sys.path[0:0] = [dist]
from path import path
install(path(airship_home), sys.executable, devel)
| {
"repo_name": "mgax/airship",
"path": "install_airship.py",
"copies": "1",
"size": "4017",
"license": "bsd-2-clause",
"hash": -8046051157195396000,
"line_mean": 32.475,
"line_max": 110,
"alpha_frac": 0.6114015434,
"autogenerated": false,
"ratio": 3.3475,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44589015434,
"avg_score": null,
"num_lines": null
} |
airStats = {
'hpPoints':80,
'epPoints':120,
'epRegenCount':5,
'epRegenChance':100,
'abilities':[
'Category 1 wind -- Cost:15 EP - Damage:10 HP',
'Tornado -- Cost:30 EP - Damage:25 HP',
'Updraft -- Cost:10 EP - Damage:5 HP',
'Vortex Blast -- Cost:25 EP - Damage:0~20 EP - Gain:10~30 HP'
]
}
waterStats = {
'hpPoints':120,
'epPoints':100,
'epRegenCount':15,
'epRegenChance':80,
'abilities':[
'Water Spout -- Cost:30 EP - Damage:20~30 HP',
'Quick Sand -- Cost:15 EP - Damage:15 HP',
'Poison -- Cost:50 EP - Damage:5 HP per round',
'Winter Blast -- Cost:? EP- Damage:Cost/2+5=HP'
]
}
earthStats = {
'hpPoints':150,
'epPoints':100,
'epRegenCount':30,
'epRegenChance':50,
'abilities':[
'Earth Quake Level VII -- Cost:0~50 EP - Damage:0~50 HP',
'Sink Hole -- Cost:45 EP - Damage:25 EP per round for 4 rounds',
'Metal Shot -- Cost:30 EP - Damage:20 HP',
'Fissure -- Cost:15 EP - Damage:10 HP'
]
}
fireStats = {
'hpPoints':100,
'epPoints':100,
'epRegenCount':50,
'epRegenChance':25,
'abilities':[
'Refuel -- Cost:5~10 EP per log - Gain:10~20 HP for 1~2 rounds per log',
'Fire Devil -- Cost:20~30 EP - Damage:10~30 HP',
'Lightning Strike -- Cost:50 - Damage:30~50',
'Lava Flow -- Cost:25~45 - Damage:20~45'
]
}
cloneStats = {
'hpPoints':110,
'epPoints':110,
'epRegenCount':10,
'epRegenChance':100,
'abilities':[
'Call in Clones -- Cost:10 EP - Clones Called in: 2',
'Blaster Gun -- Cost:5 EP - Damage:5 HP',
'Nuclear Bomb -- Cost:20 EP - Damage:40~60 HP',
'Electro Whip -- Cost:10 EP - Damage:10~25 HP',
'Electro Snake -- Cost:15 EP - Damage:20~40 HP for 4~6 Rounds',
'Plasma Blaster -- Cost:70 EP - Damage:70~100 HP - Can\'t be used in round 1, 2, and 3'
]
}
narratorStats = {
'hpPoints':100,
'epPoints':100,
'epRegenCount':15,
'epRegenChance':85,
'abilities':[
'boring story -- Cost:15 EP - Damage:15 HP',
'mathematics lecture -- Cost:30 EP - Effect: Target Falls Asleep',
'sermon -- Cost:10 EP - Damage:5 EP',
'relaxing tale -- Cost:25 EP - Gain:10~15 HP'
]
}
jediStats = {
'hpPoints':150,
'epPoints':130,
'epRegenCount':10,
'epRegenChance':100,
'abilities':[
'Force Push -- Cost:10 EP - Damage:10 HP',
'Force Ball -- Cost:5 EP - Damage:10 HP',
'Lightsaber -- Cost:10 EP - Damage:20 HP',
'Force Block -- Cost:10 - Protection:30%'
]
}
walleStats = {
'hpPoints':150,
'epPoints':150,
'epRegenCount':20,
'epRegenChance':25,
'abilities':[
'Laser -- Cost:5 EP - Damage:10 HP',
'Trash Compactor -- Cost:20 EP - Damage:30 HP',
'EVE\'s Blaster -- Cost:30 EP - Damage:20 HP',
'Cute Eyes -- Cost:2 EP - Causes Enemy to Deal 10HP to Itself'
]
}
selectedPotterSpell = 0
potterSpell = ['Episkey -- Cost:10 EP - Health:+50%','The Killing Curse -- Cost:EP']
potterStats = {
'hpPoints':100, #HP
'epPoints':100, #EP
'epRegenCount':10, #EP Regen
'epRegenChance':75, #Regen chance
'abilities':[
potterSpell[selectedPotterSpell],
'Expelliarmus -- Cost:10 EP Chance:50%',
'Sectumsempra -- Cost:30 EP - Damage:0-100% HP',
'Expulso -- Cost:15 - Damage:15-20 HP'
]
} | {
"repo_name": "Thurii/legendary-telegram",
"path": "element_game_stats.py",
"copies": "1",
"size": "3078",
"license": "cc0-1.0",
"hash": 2399175430104281600,
"line_mean": 24.4462809917,
"line_max": 89,
"alpha_frac": 0.6309291748,
"autogenerated": false,
"ratio": 2.2017167381974247,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3332645912997425,
"avg_score": null,
"num_lines": null
} |
# AI_sample_game.py written by Duncan Murray 26/4/2015
import os
import planet
import simulator
import character
import aikif.agents.agent as agt
class ReallySimpleGameAI():
"""
simple example of an AI which walks characters
in a world
"""
def __init__(self):
p = planet.Planet('SimpleAIWorld', 5, 20, 10, 0.2, 0.2, 0.2, 0.5)
traits = character.CharacterCollection(os.getcwd() + os.sep + 'data')
#self.a1 = traits.generate_random_character()
self.a1 = agt.Agent('Jack')
self.a2 = agt.Agent('Jill')
#print('type(self.a1) = ', type(self.a1))
actions = ['walk', 'fight']
self.s = simulator.Simulator('Test of SimWorld', p, [self.a1, self.a2], actions)
self.s.run() # with no params, it defaults to turn based mode
def __str__(self):
return str(self.s)
def run(self):
"""
This AI simple moves the characters towards the opposite
edges of the grid for 3 steps or until event halts the
simulation
"""
x, y = 1,0 # set the direction
num_steps = 0
while self.s.get_state() != 'Halted':
self.s.command({'name':'walk', 'type':'move', 'direction':[x, y]}, self.a1)
self.s.command({'name':'walk', 'type':'run', 'direction':[x, y+1]}, self.a2)
num_steps += 1
if num_steps >= 3:
break
for a in self.s.agents:
print(a.name, 'finished at position ', a.coords['x'], a.coords['y'])
if __name__ == '__main__':
my_game = ReallySimpleGameAI()
my_game.run()
| {
"repo_name": "acutesoftware/virtual-AI-simulator",
"path": "vais/AI_sample_game.py",
"copies": "1",
"size": "1644",
"license": "mit",
"hash": -8670213196275809000,
"line_mean": 31.9,
"line_max": 88,
"alpha_frac": 0.5535279805,
"autogenerated": false,
"ratio": 3.3896907216494845,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4443218702149484,
"avg_score": null,
"num_lines": null
} |
"""aiserverproj URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
from aiserverapp import views
router = routers.DefaultRouter()
router.register(r'district', views.DistrictViewSet)
router.register(r'block', views.BlockViewSet)
router.register(r'village', views.VillageViewSet)
router.register(r'centre', views.CentreViewSet)
router.register(r'child', views.ChildViewSet)
router.register(r'skill', views.SkillViewSet)
router.register(r'assessment', views.AssessmentViewSet)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'export', views.center_based_report),
]
| {
"repo_name": "PayPal-Opportunity-Hack-Chennai-2015/AID-India",
"path": "server/aiserverproj/aiserverproj/urls.py",
"copies": "1",
"size": "1436",
"license": "apache-2.0",
"hash": 1611113674148764000,
"line_mean": 36.7894736842,
"line_max": 83,
"alpha_frac": 0.7395543175,
"autogenerated": false,
"ratio": 3.4854368932038833,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47249912107038833,
"avg_score": null,
"num_lines": null
} |
# AISim1.py
# Reversi (Othello)
"""
Reversi Simulation 1 - a computer vs. computer tile flipping game
An example from Chapter 16 of
'Invent Your Own Games With Python' by Al Sweigart
A.C. LoGreco
"""
import random
import sys
def drawBoard(board):
"""
This function prints out the board that it was passed. Returns None.
"""
HLINE = ' +---+---+---+---+---+---+---+---+'
VLINE = ' | | | | | | | | |'
print(' 1 2 3 4 5 6 7 8')
print(HLINE)
for y in range(8):
print(VLINE)
print(y+1, end=' ')
for x in range(8):
print('| %s' % (board[x][y]), end=' ')
print('|')
print(VLINE)
print(HLINE)
def resetBoard(board):
"""
Blanks out the board it is passed, except for the original starting position.
"""
for x in range(8):
for y in range(8):
board[x][y] = ' '
# Starting pieces:
board[3][3] = 'X'
board[3][4] = 'O'
board[4][3] = 'O'
board[4][4] = 'X'
def getNewBoard():
"""
Creates a brand new, blank board data structure.
"""
board = []
for i in range(8):
board.append([' '] * 8)
return board
def isValidMove(board, tile, xstart, ystart):
"""
Returns False if the player's move on space xstart, ystart is invalid.
If it is a valid move, returns a list of spaces that would become the
player's if they made a move here.
"""
if board[xstart][ystart] != ' ' or not isOnBoard(xstart, ystart):
return False
board[xstart][ystart] = tile # temporarily set the tile on the board.
if tile == 'X':
otherTile = 'O'
else:
otherTile = 'X'
tilesToFlip = []
for xdirection, ydirection in [[0, 1], [1, 1], [1, 0], [1, -1], [0, -1],
[-1, -1], [-1, 0], [-1, 1]]:
x, y = xstart, ystart
x += xdirection # first step in the direction
y += ydirection # first step in the direction
if isOnBoard(x, y) and board[x][y] == otherTile:
# There is a piece belonging to the other player next to our piece.
x += xdirection
y += ydirection
if not isOnBoard(x, y):
continue
while board[x][y] == otherTile:
x += xdirection
y += ydirection
if not isOnBoard(x, y):
# break out of while loop, then continue in for loop
break
if not isOnBoard(x, y):
continue
if board[x][y] == tile:
# There are pieces to flip over. Go in the reverse direction
# until we reach the original space, noting all the tiles
# along the way.
while True:
x -= xdirection
y -= ydirection
if x == xstart and y == ystart:
break
tilesToFlip.append([x, y])
board[xstart][ystart] = ' ' # restore the empty space
if len(tilesToFlip) == 0:
# If no tiles were flipped, this is not a valid move.
return False
return tilesToFlip
def isOnBoard(x, y):
"""
Returns True if the coordinates are loacated on the board.
"""
return x >= 0 and x <= 7 and y >= 0 and y <= 7
def getBoardWithValidMoves(board, tile):
"""
Returns a new board with '.' marking the valid moves the given player
can make.
"""
dupeBoard = getBoardCopy(board)
for x, y in getValidMoves(dupeBoard, tile):
dupeBoard[x][y] = '.'
return dupeBoard
def getValidMoves(board, tile):
"""
Returns a list of [x,y] lists of valid moves for the given player on the
given board.
"""
validMoves = []
for x in range(8):
for y in range(8):
if isValidMove(board, tile, x, y) != False:
validMoves.append([x, y])
return validMoves
def getScoreOfBoard(board):
"""
Determine the score by counting the tiles. Returns a dictionary with
keys 'X' and 'O'.
"""
xscore = 0
oscore = 0
for x in range(8):
for y in range (8):
if board[x][y] == 'X':
xscore += 1
if board[x][y] == 'O':
oscore += 1
return {'X':xscore, 'O':oscore}
def enterPlayerTile():
"""
Lets the player type which tile they want to be.
Returns a list with the player's tile as the first item, and the
computer's tile as the second.
"""
tile = ''
while not (tile == 'X' or tile == 'O'):
print('Do you want to be X or O?')
tile = input().upper()
# the first element in the list is the player's tile, the second is the
# computer's tile.
if tile == 'X':
return ['X', 'O']
else:
return ['O', 'X']
def whoGoesFirst():
"""
Randomly choose the player who goes first.
"""
if random.randint(0, 1) == 0:
return 'computer'
else:
return 'player'
def playAgain():
"""
This function returns True if the player wants to play again,
otherwise it returns False.
"""
print('Do you want to play again? (yes or no)')
return input().lower().startswith('y')
def makeMove(board, tile, xstart, ystart):
"""
Place the tile on the board at xstart, ystart, and flip any of the
opponent's pieces.
Returns False if this is an invalid move, True if it is valid.
"""
tilesToFlip = isValidMove(board, tile, xstart, ystart)
if tilesToFlip == False:
return False
board[xstart][ystart] = tile
for x, y in tilesToFlip:
board[x][y] = tile
return True
def getBoardCopy(board):
"""
Make a duplicate of the board list and return the duplicate.
"""
dupeBoard = getNewBoard()
for x in range(8):
for y in range(8):
dupeBoard[x][y] = board[x][y]
return dupeBoard
def isOnCorner(x, y):
"""
Returns True if the position is in one of the four corners.
"""
return ((x == 0 and y == 0) or
(x == 7 and y == 0) or
(x == 0 and y == 7) or
(x == 7 and y == 7))
def getPlayerMove(board, playerTile):
"""
Let the player type in their move.
Returns the move as [x, y].
(or returns the strings 'hints' or 'quit')
"""
DIGITS1TO8 = '1 2 3 4 5 6 7 8'.split()
while True:
print('Enter your move, or type quit to end the game, or hints to turn off/on hints.')
move = input().lower()
if move == 'quit':
return 'quit'
if move == 'hints':
return 'hints'
if len(move) == 2 and move[0] in DIGITS1TO8 and move[1] in DIGITS1TO8:
x = int(move[0]) - 1
y = int(move[1]) - 1
if isValidMove(board, playerTile, x, y) == False:
continue
else:
break
else:
print('That is not a valid move. Type the x digit (1-8), then the y digit (1-8).')
print('For example, 81 will be the top-right corner.')
return [x, y]
def getComputerMove(board, computerTile):
"""
Given a board and the computer's tile, determine where to
move and return that move as a [x, y] list.
"""
possibleMoves = getValidMoves(board, computerTile)
# randomize the order of possible moves
random.shuffle(possibleMoves)
# always go for a corner if available.
for x, y in possibleMoves:
if isOnCorner(x, y):
return [x, y]
# Go through all the possible moves and remember the best scoring move.
bestScore = -1
for x, y in possibleMoves:
dupeBoard = getBoardCopy(board)
makeMove(dupeBoard, computerTile, x, y)
score = getScoreOfBoard(dupeBoard)[computerTile]
if score > bestScore:
bestMove = [x, y]
bestScore = score
return bestMove
def showPoints(playerTile, computerTile):
"""
Prints out the current score.
"""
scores = getScoreOfBoard(mainBoard)
print('You have %s points. The computer has %s points.' %
(scores[playerTile], scores[computerTile]))
# Main Game Loop
print('Welcome to Reversi!')
while True:
# Reset the board and game.
mainBoard = getNewBoard()
resetBoard(mainBoard)
if whoGoesFirst() == 'player':
turn = 'X'
else:
turn = 'O'
print('The ' + turn + ' will go first.')
while True:
drawBoard(mainBoard)
scores = getScoreOfBoard(mainBoard)
print('X has %s points. O has %s points' % (scores['X'], scores['O']))
input('Press Enter to continue.')
if turn == 'X':
# X's turn.
otherTile = 'O'
x, y = getComputerMove(mainBoard, 'X')
makeMove(mainBoard, 'X', x, y)
else:
# O's turn.
otherTile = 'X'
x, y = getComputerMove(mainBoard, 'O')
makeMove(mainBoard, 'O', x, y)
if getValidMoves(mainBoard, otherTile) == []:
break
else:
turn = otherTile
# Display the final score.
drawBoard(mainBoard)
scores = getScoreOfBoard(mainBoard)
print('X scored %s points. O scored %s points.' %
(scores['X'], scores['O']))
if not playAgain():
sys.exit()
| {
"repo_name": "aclogreco/InventGamesWP",
"path": "ch16/AISim1.py",
"copies": "1",
"size": "9436",
"license": "bsd-2-clause",
"hash": -7855244984931019000,
"line_mean": 25.2841225627,
"line_max": 94,
"alpha_frac": 0.5391055532,
"autogenerated": false,
"ratio": 3.654531371030209,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9682557899745423,
"avg_score": 0.002215804896957007,
"num_lines": 359
} |
# AISim2.py
# Reversi (Othello)
"""
Reversi Simulation 1 - a computer vs. computer tile flipping game simulation
An example from Chapter 16 of
'Invent Your Own Games With Python' by Al Sweigart
A.C. LoGreco
"""
import random
import sys
def drawBoard(board):
"""
This function prints out the board that it was passed. Returns None.
"""
HLINE = ' +---+---+---+---+---+---+---+---+'
VLINE = ' | | | | | | | | |'
print(' 1 2 3 4 5 6 7 8')
print(HLINE)
for y in range(8):
print(VLINE)
print(y+1, end=' ')
for x in range(8):
print('| %s' % (board[x][y]), end=' ')
print('|')
print(VLINE)
print(HLINE)
def resetBoard(board):
"""
Blanks out the board it is passed, except for the original starting position.
"""
for x in range(8):
for y in range(8):
board[x][y] = ' '
# Starting pieces:
board[3][3] = 'X'
board[3][4] = 'O'
board[4][3] = 'O'
board[4][4] = 'X'
def getNewBoard():
"""
Creates a brand new, blank board data structure.
"""
board = []
for i in range(8):
board.append([' '] * 8)
return board
def isValidMove(board, tile, xstart, ystart):
"""
Returns False if the player's move on space xstart, ystart is invalid.
If it is a valid move, returns a list of spaces that would become the
player's if they made a move here.
"""
if board[xstart][ystart] != ' ' or not isOnBoard(xstart, ystart):
return False
board[xstart][ystart] = tile # temporarily set the tile on the board.
if tile == 'X':
otherTile = 'O'
else:
otherTile = 'X'
tilesToFlip = []
for xdirection, ydirection in [[0, 1], [1, 1], [1, 0], [1, -1], [0, -1],
[-1, -1], [-1, 0], [-1, 1]]:
x, y = xstart, ystart
x += xdirection # first step in the direction
y += ydirection # first step in the direction
if isOnBoard(x, y) and board[x][y] == otherTile:
# There is a piece belonging to the other player next to our piece.
x += xdirection
y += ydirection
if not isOnBoard(x, y):
continue
while board[x][y] == otherTile:
x += xdirection
y += ydirection
if not isOnBoard(x, y):
# break out of while loop, then continue in for loop
break
if not isOnBoard(x, y):
continue
if board[x][y] == tile:
# There are pieces to flip over. Go in the reverse direction
# until we reach the original space, noting all the tiles
# along the way.
while True:
x -= xdirection
y -= ydirection
if x == xstart and y == ystart:
break
tilesToFlip.append([x, y])
board[xstart][ystart] = ' ' # restore the empty space
if len(tilesToFlip) == 0:
# If no tiles were flipped, this is not a valid move.
return False
return tilesToFlip
def isOnBoard(x, y):
"""
Returns True if the coordinates are loacated on the board.
"""
return x >= 0 and x <= 7 and y >= 0 and y <= 7
def getBoardWithValidMoves(board, tile):
"""
Returns a new board with '.' marking the valid moves the given player
can make.
"""
dupeBoard = getBoardCopy(board)
for x, y in getValidMoves(dupeBoard, tile):
dupeBoard[x][y] = '.'
return dupeBoard
def getValidMoves(board, tile):
"""
Returns a list of [x,y] lists of valid moves for the given player on the
given board.
"""
validMoves = []
for x in range(8):
for y in range(8):
if isValidMove(board, tile, x, y) != False:
validMoves.append([x, y])
return validMoves
def getScoreOfBoard(board):
"""
Determine the score by counting the tiles. Returns a dictionary with
keys 'X' and 'O'.
"""
xscore = 0
oscore = 0
for x in range(8):
for y in range (8):
if board[x][y] == 'X':
xscore += 1
if board[x][y] == 'O':
oscore += 1
return {'X':xscore, 'O':oscore}
def enterPlayerTile():
"""
Lets the player type which tile they want to be.
Returns a list with the player's tile as the first item, and the
computer's tile as the second.
"""
tile = ''
while not (tile == 'X' or tile == 'O'):
print('Do you want to be X or O?')
tile = input().upper()
# the first element in the list is the player's tile, the second is the
# computer's tile.
if tile == 'X':
return ['X', 'O']
else:
return ['O', 'X']
def whoGoesFirst():
"""
Randomly choose the player who goes first.
"""
if random.randint(0, 1) == 0:
return 'computer'
else:
return 'player'
def playAgain():
"""
This function returns True if the player wants to play again,
otherwise it returns False.
"""
print('Do you want to play again? (yes or no)')
return input().lower().startswith('y')
def makeMove(board, tile, xstart, ystart):
"""
Place the tile on the board at xstart, ystart, and flip any of the
opponent's pieces.
Returns False if this is an invalid move, True if it is valid.
"""
tilesToFlip = isValidMove(board, tile, xstart, ystart)
if tilesToFlip == False:
return False
board[xstart][ystart] = tile
for x, y in tilesToFlip:
board[x][y] = tile
return True
def getBoardCopy(board):
"""
Make a duplicate of the board list and return the duplicate.
"""
dupeBoard = getNewBoard()
for x in range(8):
for y in range(8):
dupeBoard[x][y] = board[x][y]
return dupeBoard
def isOnCorner(x, y):
"""
Returns True if the position is in one of the four corners.
"""
return ((x == 0 and y == 0) or
(x == 7 and y == 0) or
(x == 0 and y == 7) or
(x == 7 and y == 7))
def getPlayerMove(board, playerTile):
"""
Let the player type in their move.
Returns the move as [x, y].
(or returns the strings 'hints' or 'quit')
"""
DIGITS1TO8 = '1 2 3 4 5 6 7 8'.split()
while True:
print('Enter your move, or type quit to end the game, or hints to turn off/on hints.')
move = input().lower()
if move == 'quit':
return 'quit'
if move == 'hints':
return 'hints'
if len(move) == 2 and move[0] in DIGITS1TO8 and move[1] in DIGITS1TO8:
x = int(move[0]) - 1
y = int(move[1]) - 1
if isValidMove(board, playerTile, x, y) == False:
continue
else:
break
else:
print('That is not a valid move. Type the x digit (1-8), then the y digit (1-8).')
print('For example, 81 will be the top-right corner.')
return [x, y]
def getComputerMove(board, computerTile):
"""
Given a board and the computer's tile, determine where to
move and return that move as a [x, y] list.
"""
possibleMoves = getValidMoves(board, computerTile)
# randomize the order of possible moves
random.shuffle(possibleMoves)
# always go for a corner if available.
for x, y in possibleMoves:
if isOnCorner(x, y):
return [x, y]
# Go through all the possible moves and remember the best scoring move.
bestScore = -1
for x, y in possibleMoves:
dupeBoard = getBoardCopy(board)
makeMove(dupeBoard, computerTile, x, y)
score = getScoreOfBoard(dupeBoard)[computerTile]
if score > bestScore:
bestMove = [x, y]
bestScore = score
return bestMove
def showPoints(playerTile, computerTile):
"""
Prints out the current score.
"""
scores = getScoreOfBoard(mainBoard)
print('You have %s points. The computer has %s points.' %
(scores[playerTile], scores[computerTile]))
# Main Game Loop
print('Welcome to Reversi!')
xwins = 0
owins = 0
ties = 0
numGames = int(input('Enter number of games to run: '))
for game in range(numGames):
print('Game #%s:' % (game), end=' ')
# Reset the board and game.
mainBoard = getNewBoard()
resetBoard(mainBoard)
if whoGoesFirst() == 'player':
turn = 'X'
else:
turn = 'O'
while True:
if turn == 'X':
# X's turn.
otherTile = 'O'
x, y = getComputerMove(mainBoard, 'X')
makeMove(mainBoard, 'X', x, y)
else:
# O's turn.
otherTile = 'X'
x, y = getComputerMove(mainBoard, 'O')
makeMove(mainBoard, 'O', x, y)
if getValidMoves(mainBoard, otherTile) == []:
break
else:
turn = otherTile
# Display the final score.
scores = getScoreOfBoard(mainBoard)
print('X scored %s points. O scored %s points.' %
(scores['X'], scores['O']))
if scores['X'] > scores['O']:
xwins += 1
elif scores['X'] < scores['O']:
owins += 1
else:
ties += 1
numGames = float(numGames)
xpercent = round(((xwins / numGames) * 100), 2)
opercent = round(((owins / numGames) * 100), 2)
tiepercent = round(((ties / numGames) * 100), 2)
print('X wins %s games (%s%%), O wins %s games (%s%%), ties for %s games (%s%%) of %s games total.' %
(xwins, xpercent, owins, opercent, ties, tiepercent, numGames))
| {
"repo_name": "aclogreco/InventGamesWP",
"path": "ch16/AISim2.py",
"copies": "1",
"size": "9764",
"license": "bsd-2-clause",
"hash": 3369069184803645000,
"line_mean": 25.460704607,
"line_max": 101,
"alpha_frac": 0.5405571487,
"autogenerated": false,
"ratio": 3.575247162211644,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4615804310911644,
"avg_score": null,
"num_lines": null
} |
# AISim#.py
# Reversi (Othello)
"""
Reversi Simulation 1 - a computer vs. computer tile flipping game simulation
This version includes multiple AI algorithms.
An example from Chapter 16 of
'Invent Your Own Games With Python' by Al Sweigart
A.C. LoGreco
"""
import random
import sys
def drawBoard(board):
"""
This function prints out the board that it was passed. Returns None.
"""
HLINE = ' +---+---+---+---+---+---+---+---+'
VLINE = ' | | | | | | | | |'
print(' 1 2 3 4 5 6 7 8')
print(HLINE)
for y in range(8):
print(VLINE)
print(y+1, end=' ')
for x in range(8):
print('| %s' % (board[x][y]), end=' ')
print('|')
print(VLINE)
print(HLINE)
def resetBoard(board):
"""
Blanks out the board it is passed, except for the original starting position.
"""
for x in range(8):
for y in range(8):
board[x][y] = ' '
# Starting pieces:
board[3][3] = 'X'
board[3][4] = 'O'
board[4][3] = 'O'
board[4][4] = 'X'
def getNewBoard():
"""
Creates a brand new, blank board data structure.
"""
board = []
for i in range(8):
board.append([' '] * 8)
return board
def isValidMove(board, tile, xstart, ystart):
"""
Returns False if the player's move on space xstart, ystart is invalid.
If it is a valid move, returns a list of spaces that would become the
player's if they made a move here.
"""
if board[xstart][ystart] != ' ' or not isOnBoard(xstart, ystart):
return False
board[xstart][ystart] = tile # temporarily set the tile on the board.
if tile == 'X':
otherTile = 'O'
else:
otherTile = 'X'
tilesToFlip = []
for xdirection, ydirection in [[0, 1], [1, 1], [1, 0], [1, -1], [0, -1],
[-1, -1], [-1, 0], [-1, 1]]:
x, y = xstart, ystart
x += xdirection # first step in the direction
y += ydirection # first step in the direction
if isOnBoard(x, y) and board[x][y] == otherTile:
# There is a piece belonging to the other player next to our piece.
x += xdirection
y += ydirection
if not isOnBoard(x, y):
continue
while board[x][y] == otherTile:
x += xdirection
y += ydirection
if not isOnBoard(x, y):
# break out of while loop, then continue in for loop
break
if not isOnBoard(x, y):
continue
if board[x][y] == tile:
# There are pieces to flip over. Go in the reverse direction
# until we reach the original space, noting all the tiles
# along the way.
while True:
x -= xdirection
y -= ydirection
if x == xstart and y == ystart:
break
tilesToFlip.append([x, y])
board[xstart][ystart] = ' ' # restore the empty space
if len(tilesToFlip) == 0:
# If no tiles were flipped, this is not a valid move.
return False
return tilesToFlip
def isOnBoard(x, y):
"""
Returns True if the coordinates are loacated on the board.
"""
return x >= 0 and x <= 7 and y >= 0 and y <= 7
def getBoardWithValidMoves(board, tile):
"""
Returns a new board with '.' marking the valid moves the given player
can make.
"""
dupeBoard = getBoardCopy(board)
for x, y in getValidMoves(dupeBoard, tile):
dupeBoard[x][y] = '.'
return dupeBoard
def getValidMoves(board, tile):
"""
Returns a list of [x,y] lists of valid moves for the given player on the
given board.
"""
validMoves = []
for x in range(8):
for y in range(8):
if isValidMove(board, tile, x, y) != False:
validMoves.append([x, y])
return validMoves
def getScoreOfBoard(board):
"""
Determine the score by counting the tiles. Returns a dictionary with
keys 'X' and 'O'.
"""
xscore = 0
oscore = 0
for x in range(8):
for y in range (8):
if board[x][y] == 'X':
xscore += 1
if board[x][y] == 'O':
oscore += 1
return {'X':xscore, 'O':oscore}
def enterPlayerTile():
"""
Lets the player type which tile they want to be.
Returns a list with the player's tile as the first item, and the
computer's tile as the second.
"""
tile = ''
while not (tile == 'X' or tile == 'O'):
print('Do you want to be X or O?')
tile = input().upper()
# the first element in the list is the player's tile, the second is the
# computer's tile.
if tile == 'X':
return ['X', 'O']
else:
return ['O', 'X']
def whoGoesFirst():
"""
Randomly choose the player who goes first.
"""
if random.randint(0, 1) == 0:
return 'computer'
else:
return 'player'
def playAgain():
"""
This function returns True if the player wants to play again,
otherwise it returns False.
"""
print('Do you want to play again? (yes or no)')
return input().lower().startswith('y')
def makeMove(board, tile, xstart, ystart):
"""
Place the tile on the board at xstart, ystart, and flip any of the
opponent's pieces.
Returns False if this is an invalid move, True if it is valid.
"""
tilesToFlip = isValidMove(board, tile, xstart, ystart)
if tilesToFlip == False:
return False
board[xstart][ystart] = tile
for x, y in tilesToFlip:
board[x][y] = tile
return True
def getBoardCopy(board):
"""
Make a duplicate of the board list and return the duplicate.
"""
dupeBoard = getNewBoard()
for x in range(8):
for y in range(8):
dupeBoard[x][y] = board[x][y]
return dupeBoard
def isOnCorner(x, y):
"""
Returns True if the position is in one of the four corners.
"""
return ((x == 0 and y == 0) or
(x == 7 and y == 0) or
(x == 0 and y == 7) or
(x == 7 and y == 7))
def getPlayerMove(board, playerTile):
"""
Let the player type in their move.
Returns the move as [x, y].
(or returns the strings 'hints' or 'quit')
"""
DIGITS1TO8 = '1 2 3 4 5 6 7 8'.split()
while True:
print('Enter your move, or type quit to end the game, or hints to turn off/on hints.')
move = input().lower()
if move == 'quit':
return 'quit'
if move == 'hints':
return 'hints'
if len(move) == 2 and move[0] in DIGITS1TO8 and move[1] in DIGITS1TO8:
x = int(move[0]) - 1
y = int(move[1]) - 1
if isValidMove(board, playerTile, x, y) == False:
continue
else:
break
else:
print('That is not a valid move. Type the x digit (1-8), then the y digit (1-8).')
print('For example, 81 will be the top-right corner.')
return [x, y]
def getComputerMove(board, computerTile):
"""
Given a board and the computer's tile, determine where to
move and return that move as a [x, y] list.
"""
possibleMoves = getValidMoves(board, computerTile)
# randomize the order of possible moves
random.shuffle(possibleMoves)
# always go for a corner if available.
for x, y in possibleMoves:
if isOnCorner(x, y):
return [x, y]
# Go through all the possible moves and remember the best scoring move.
bestScore = -1
for x, y in possibleMoves:
dupeBoard = getBoardCopy(board)
makeMove(dupeBoard, computerTile, x, y)
score = getScoreOfBoard(dupeBoard)[computerTile]
if score > bestScore:
bestMove = [x, y]
bestScore = score
return bestMove
def showPoints(playerTile, computerTile):
"""
Prints out the current score.
"""
scores = getScoreOfBoard(mainBoard)
print('You have %s points. The computer has %s points.' %
(scores[playerTile], scores[computerTile]))
def getRandomMove(board, tile):
"""
Return a random move.
"""
return random.choice( getValidMoves(board, tile) )
def isOnSide(x, y):
"""
Returns TRUE if the board location is on the side of the board.
"""
return x == 0 or x == 7 or y == 0 or y == 7
def getCornerSideBestMove(board, tile):
"""
Return a corner move, or a side move, or the best move.
"""
possibleMoves = getValidMoves(board, tile)
# randomize the order of the possible moves.
random.shuffle(possibleMoves)
# always go for a corner if available.
for x, y in possibleMoves:
if isOnCorner(x, y):
return [x, y]
# if there is no corner, return a side move.
for x, y in possibleMoves:
if isOnSide(x, y):
return [x, y]
return getComputerMove(board, tile)
def getSideBestMove(board, tile):
"""
Return a side move or the best move.
"""
possibleMoves = getValidMoves(board, tile)
# randomize the order of the possible moves.
random.shuffle(possibleMoves)
# return a side move, if available
for x, y in possibleMoves:
if isOnSide(x, y):
return [x, y]
return getComputerMove(board, tile)
def getWorstMove(board, tile):
"""
Return the move that flips the least number of tiles.
"""
possibleMoves = getValidMoves(board, tile)
# randomize.the order of the possible moves
random.shuffle(possibleMoves)
# Go through all the possible moves and remember the worst scoring move.
worstScore = 64
for x, y in possibleMoves:
dupeBoard = getBoardCopy(board)
makeMove(dupeBoard, tile, x, y)
score = getScoreOfBoard(dupeBoard)[tile]
if score < worstScore:
worstMove = [x, y]
worstScore = score
return worstMove
def getCornerWorstMove(board, tile):
"""
Return a corner or the move that flips the least number of tiles.
"""
possibleMoves = getValidMoves(board, tile)
# randomize the order of the possible moves.
random.shuffle(possibleMoves)
# always go for a corner if available.
for x, y in possibleMoves:
if isOnCorner(x, y):
return [x, y]
return getWorstMove(board, tile)
# Main Game Loop
print('Welcome to Reversi!')
xwins = 0
owins = 0
ties = 0
numGames = int(input('Enter number of games to run: '))
for game in range(numGames):
print('Game #%s:' % (game), end=' ')
# Reset the board and game.
mainBoard = getNewBoard()
resetBoard(mainBoard)
if whoGoesFirst() == 'player':
turn = 'X'
else:
turn = 'O'
while True:
if turn == 'X':
# X's turn.
otherTile = 'O'
x, y = getComputerMove(mainBoard, 'X')
#x, y = getRandomMove(mainBoard, 'X')
#x, y = getCornerSideBestMove(mainBoard, 'X')
#x, y = getSideBestMove(mainBoard, 'X')
#x, y = getWorstMove(mainBoard, 'X')
#x, y = getCornerWorstMove(mainBoard, 'X')
makeMove(mainBoard, 'X', x, y)
else:
# O's turn.
otherTile = 'X'
x, y = getComputerMove(mainBoard, 'O')
#x, y = getRandomMove(mainBoard, 'O')
#x, y = getCornerSideBestMove(mainBoard, 'O')
#x, y = getSideBestMove(mainBoard, 'O')
#x, y = getWorstMove(mainBoard, 'O')
#x, y = getCornerWorstMove(mainBoard, 'O')
makeMove(mainBoard, 'O', x, y)
if getValidMoves(mainBoard, otherTile) == []:
break
else:
turn = otherTile
# Display the final score.
scores = getScoreOfBoard(mainBoard)
print('X scored %s points. O scored %s points.' %
(scores['X'], scores['O']))
if scores['X'] > scores['O']:
xwins += 1
elif scores['X'] < scores['O']:
owins += 1
else:
ties += 1
numGames = float(numGames)
xpercent = round(((xwins / numGames) * 100), 2)
opercent = round(((owins / numGames) * 100), 2)
tiepercent = round(((ties / numGames) * 100), 2)
print('X wins %s games (%s%%), O wins %s games (%s%%), ties for %s games (%s%%) of %s games total.' %
(xwins, xpercent, owins, opercent, ties, tiepercent, numGames))
| {
"repo_name": "aclogreco/InventGamesWP",
"path": "ch16/AISim3.py",
"copies": "1",
"size": "12600",
"license": "bsd-2-clause",
"hash": -5351669126102697000,
"line_mean": 25.6949152542,
"line_max": 101,
"alpha_frac": 0.556984127,
"autogenerated": false,
"ratio": 3.5553047404063207,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4612288867406321,
"avg_score": null,
"num_lines": null
} |
"""A janky globstar implementation, not completely stupid but not really
optimized. Homegrown because glob2 had no license at the time. It may be
worth replacing in the future, but for now it's adequate.
https://github.com/miracle2k/python-glob2/issues/2
"""
import os
from posixpath import join
import re
import sys
from os.path import isfile, isdir
from fnmatch import fnmatch
IGNORED_FOLDERS = ('.git', '.svn', '.hg')
def tokenize_glob(glob):
# Super thorough. Spared no expense.
return glob.split('/')
def search_folder(path, search_path, globstar_mode=False):
if search_path[0] == '**':
return search_folder(path, search_path[1:], globstar_mode=True)
matches = []
listing = os.listdir(path)
if len(search_path) == 1:
matches += find_files(path, search_path[0])
if not globstar_mode:
return matches
for item in listing:
if isdir(os.path.join(path, item)) and item not in IGNORED_FOLDERS:
is_matching_folder = fnmatch(item, search_path[0])
# `not is_matching_folder` prevents double-counting.
if globstar_mode and not is_matching_folder:
matches += search_folder(
join(path, item), search_path, globstar_mode=True)
elif is_matching_folder:
matches += search_folder(join(path, item), search_path[1:])
return matches
def find_files(folder, pattern):
"""Returns a list of files in <folder> matching <pattern>.
Standard glob expansion only (with fnmatch).
"""
found = []
for entry in os.listdir(folder):
if isfile(os.path.join(folder, entry)):
if fnmatch(entry, pattern):
found.append(join(folder, entry))
return found
def glob(file_glob, root='.'):
"""Returns a list of files paths in [root] that match [file_glob]. Supports
standard glob tokens and globstars. Leading dots are stripped from paths for
compatibility with Yuno's UI and the data files and my happiness.
"""
file_glob = re.sub(r'(\*\*/){2,}', '**/', file_glob)
file_glob = re.sub(r'^\.{1,2}[\\/]+', '', file_glob)
tests = []
search_path = tokenize_glob(file_glob)
return search_folder(root, search_path)
| {
"repo_name": "bulatb/yuno",
"path": "yuno/core/recursive_glob.py",
"copies": "1",
"size": "2264",
"license": "mit",
"hash": -7864864079607873000,
"line_mean": 28.0256410256,
"line_max": 80,
"alpha_frac": 0.6347173145,
"autogenerated": false,
"ratio": 3.6457326892109503,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47804500037109504,
"avg_score": null,
"num_lines": null
} |
# ajaxはPOST、それ以外はGET
import os
import sys
import glob
import json
import DBreader as db
import tweepy as tp
import flask
from functools import wraps
import zipfile
import urllib
import datetime
# TL回収用
import adminTL
import gettweet
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# 自身の名称を app という名前でインスタンス化する
app = flask.Flask(__name__)
setting = json.load(open("setting.json"))
app.secret_key = setting['SecretKey']
app.debug = setting['Debug'] # デバッグモード
# 回収
if(setting['Debug'] == False):
t1 = adminTL.TLThread()
t1.setDaemon(True)
t1.start()
# 認証後に使用可能
def tp_api():
auth = tp.OAuthHandler(setting['twitter_API']['CK'], setting['twitter_API']['CS'], setting['twitter_API']['Callback_URL'])
auth.set_access_token(flask.session['key'],flask.session['secret'])
return tp.API(auth)
def admin_check():
return flask.session['name'] == setting['AdminID']
def login_check(func):
@wraps(func)
def checker(*args, **kwargs):
# きちんと認証していればセッション情報がある
try:
if flask.session['userID'] is None:
return flask.redirect(flask.url_for('twitter_oauth'))
except:
return flask.redirect(flask.url_for('twitter_oauth'))
return func(*args, **kwargs)
return checker
# ここからウェブアプリケーション用のルーティングを記述
# トップページ
@app.route('/')
def index():
key = flask.request.cookies.get('key')
secret = flask.request.cookies.get('secret')
if key is None or secret is None:
return flask.render_template('index.html', route="index")
else:
flask.session['key'] = key
flask.session['secret'] = secret
return flask.redirect(flask.url_for('twitter_authed', cookie=True))
# 証明書更新
@app.route('/.well-known/acme-challenge/<token_value>')
def letsencrypt(token_value):
with open('/var/www/html/tpts_cert/.well-known/acme-challenge/{}'.format(token_value)) as f:
answer = f.readline().strip()
return answer
# このページについて
@app.route('/about')
def about():
return flask.render_template('about.html', count=setting["MaxCount"], route="about")
# twitter認証
@app.route('/twitter_auth', methods=['GET'])
def twitter_oauth():
# cookieチェック
key = flask.request.cookies.get('key')
secret = flask.request.cookies.get('secret')
if key is None or secret is None:
# tweepy でアプリのOAuth認証を行う
auth_temp = tp.OAuthHandler(setting['twitter_API']['CK'], setting['twitter_API']['CS'], setting['twitter_API']['Callback_URL'])
# 連携アプリ認証用の URL を取得
redirect_url = auth_temp.get_authorization_url()
# 認証後に必要な request_token を session に保存
flask.session['request_token'] = auth_temp.request_token
# リダイレクト
return flask.redirect(redirect_url.replace("authorize","authenticate"))
else:
flask.session['key'] = key
flask.session['secret'] = secret
return flask.redirect(flask.url_for('twitter_authed', cookie=True))
# twitter認証完了
@app.route('/authed', methods=['GET'])
def twitter_authed():
# 認証情報取得
if flask.request.args.get('cookie') != "True":
auth_temp = tp.OAuthHandler(setting['twitter_API']['CK'], setting['twitter_API']['CS'], setting['twitter_API']['Callback_URL'])
auth_temp.request_token = flask.session['request_token']
auth_temp.get_access_token(flask.request.args.get('oauth_verifier'))
flask.session['key'] = auth_temp.access_token
flask.session['secret'] = auth_temp.access_token_secret
flask.session['request_token'] = None
# 認証ユーザー取得
flask.session['name'] = tp_api().me().screen_name
flask.session['userID'] = tp_api().me().id_str
response = flask.make_response(flask.redirect(flask.url_for('user_page')))
response.set_cookie('key', flask.session['key'])
response.set_cookie('secret', flask.session['secret'])
return response
# ログアウトボタン
@app.route('/logout')
def logout():
response = flask.make_response(flask.redirect(flask.url_for('index')))
response.set_cookie('key', '', expires=0)
response.set_cookie('secret', '', expires=0)
flask.session.clear()
return response
# こっから下は認証が必要
# ユーザーメニュー
@app.route('/menu')
@login_check
def user_page():
filelist = []
if admin_check() or setting['AdminShow'] or setting['LimitMode']:
if sys.platform == "win32":
filelist = sorted([path.split(os.sep)[1].split('.')[0] for path in glob.glob("DB/admin/*.db")])
else:
filelist = sorted([path.split(os.sep)[2].split('.')[0] for path in glob.glob("DB/admin/*.db")])
return flask.render_template('menu.html', admin=admin_check(), setting=setting, dblist=filelist, select=filelist[-1])
else:
return flask.render_template('menu.html', admin=admin_check(), setting=setting)
# 管理者用
# ログページ
@app.route('/admin/logs')
@login_check
def log_page():
if admin_check():
if sys.platform == "win32":
loglist = sorted([path.split(os.sep)[1].split('.')[0] for path in glob.glob("DB/log/*.log")])
else:
loglist = sorted([path.split(os.sep)[2].split('.')[0] for path in glob.glob("DB/log/*.log")])
else:
return flask.redirect(flask.url_for('user_page'))
return flask.render_template('log.html', filelist=loglist)
@app.route('/getlog', methods=['POST'])
@login_check
def get_log():
if admin_check():
filename = flask.request.form['date']
log = open("DB/log/{}.log".format(filename))
text = log.read()
log.close()
else:
return flask.redirect(flask.url_for('user_page'))
if text == "":
text = "まだログは記録されていません"
return text
@app.route('/delete', methods=['POST'])
@login_check
def deltefile():
if admin_check():
now = datetime.datetime.now()
# userDB
for path in glob.glob("DB/user/*.db"):
check = now - datetime.datetime.fromtimestamp(os.stat(path).st_mtime)
if check.days >= 3:
try:
os.remove(path)
except:
continue
# adminDB
for path in glob.glob("DB/admin/*.db"):
check = now - datetime.datetime.fromtimestamp(os.stat(path).st_mtime)
if check.days >= 14:
try:
os.remove(path)
except:
continue
# adminLog
for path in glob.glob("DB/log/*.log"):
check = now - datetime.datetime.fromtimestamp(os.stat(path).st_mtime)
if check.days >= 14:
try:
os.remove(path)
except:
continue
else:
flask.abort(401)
return "OK"
# 共通ページ
# 画像リストビュー生成
@app.route('/makelist', methods=['POST'])
@login_check
def make_list():
mode = flask.request.form['mode']
dbname = flask.session['userID']
try:
query = flask.request.form['query']
except:
query = ""
if mode == "admin":
if admin_check() == False and setting['AdminShow'] == False and setting['LimitMode'] == False:
flask.abort(401)
dbname = flask.request.form['date']
else:
gettweet.getTweets(tp_api(),mode,setting["MaxCount"],query)
return "/view?mode={}&dbname={}".format(mode,dbname)
# 画像リスト
@app.route('/view', methods=['GET'])
@login_check
def image_list():
mode = flask.request.args.get('mode')
dbname = flask.session['userID']
try:
if mode == "admin":
if admin_check() == False and setting['AdminShow'] == False and setting['LimitMode'] == False:
return flask.render_template('error.html')
dbname = flask.request.args.get('dbname')
images,count,result = db.get_list("DB/admin/" + dbname + ".db", "list")
else:
images,count,result = db.get_list("DB/user/" + dbname + ".db", mode)
except:
return flask.render_template('error.html')
return flask.render_template('view.html', filelist=images, count=count, mode=mode, dbname=dbname, result=result)
# 画像詳細
@app.route('/detail', methods=['GET'])
@login_check
def image_detail():
mode = flask.request.args.get('mode')
image_id = flask.request.args.get('id')
dbname = flask.session['userID']
try:
if mode == "admin":
if admin_check() == False and setting['AdminShow'] == False and setting['LimitMode'] == False:
return flask.render_template('error.html')
dbname = flask.request.args.get('dbname')
detail,html,idinfo,count = db.get_detail(int(image_id), "DB/admin/"+dbname+".db", "list")
else:
detail,html,idinfo,count = db.get_detail(int(image_id), "DB/user/"+dbname+".db", mode)
except:
return flask.render_template('error.html')
detail['eshi'] = "%40eshi_hantei%20tweet_id%3A" + detail['url'].split('/')[-1]
return flask.render_template('detail.html', data=detail, html=html, idcount=idinfo, mode=mode, dbname=dbname, max=count-1)
if __name__ == '__main__':
# debug server
app.run(host='0.0.0.0') # どこからでもアクセス可能に
| {
"repo_name": "marron-akanishi/TPTS_web",
"path": "app.py",
"copies": "1",
"size": "9601",
"license": "mit",
"hash": -726485549788426900,
"line_mean": 33.8884615385,
"line_max": 135,
"alpha_frac": 0.6123911366,
"autogenerated": false,
"ratio": 3.107571085988352,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4219962222588352,
"avg_score": null,
"num_lines": null
} |
"""Ajax views tests."""
from flask import url_for
from wordbook.domain.repo.translation import Repo
def test_add_translation(app, client):
"""Test of add_translation ajax view.
Translation should be added to repository and returned in response.
"""
data = dict(
word='sheep',
ipa='sziiip',
simplified='sziip',
translation='owca',
from_language='en',
into_language='pl',
)
resp = app.test_client().post(
url_for('ajax.add_translation'),
data=data,
headers={'Content-Type': 'application/x-www-form-urlencoded'})
assert resp.status_code == 200
assert resp.json == dict(translation=dict(id=1, simplified='sziip', ipa='sziiip', translation='owca', word='sheep'))
repo = Repo()
translation = repo.get(resp.json['translation']['id'])
assert translation.translated == 'owca'
assert translation.from_language == 'en'
assert translation.simplified == 'sziip'
assert translation.word == 'sheep'
assert translation.ipa == 'sziiip'
assert translation.into_language == 'pl'
| {
"repo_name": "lizardschool/wordbook",
"path": "tests/flaskapp/test_ajax_views.py",
"copies": "1",
"size": "1100",
"license": "mit",
"hash": -4551572702714964500,
"line_mean": 32.3333333333,
"line_max": 120,
"alpha_frac": 0.6436363636,
"autogenerated": false,
"ratio": 3.832752613240418,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9975136785505582,
"avg_score": 0.0002504382669671926,
"num_lines": 33
} |
ajax = widget_inputs["check1"]
images = widget_inputs["check2"]
add_divs_to_page = widget_inputs["check3"]
web_worker = widget_inputs["check4"]
comments = []
def commentizer(new):
if new not in comments:
comments.append(new)
is_correct = False
if not ajax:
is_correct = is_correct and False
commentizer("Take another look at the first one. Hint: what's the definition of AJAX?")
else:
is_correct = True
if images:
is_correct = is_correct and False
commentizer("Take another look at the second one. Where is the image manipulation work happening?")
else:
is_correct = is_correct and True
if add_divs_to_page:
is_correct = is_correct and False
commentizer("Take another look at the third one. What kind of work is appending divs to the page? Synchronous or asynchronous?")
else:
is_correct = is_correct and True
if not web_worker:
is_correct = is_correct and False
commentizer("Take another look at the last one. Where do web workers run? How do you communicate with web workers?")
else:
is_correct = is_correct and True
if is_correct:
commentizer("Great job! You're recognizing when and where Promises are useful.")
grade_result["comment"] = "\n\n".join(comments)
grade_result["correct"] = is_correct | {
"repo_name": "udacity/course-promises",
"path": "quizzes/widget/1.6.async-scenarios.py",
"copies": "1",
"size": "1271",
"license": "mit",
"hash": 3065843803116942000,
"line_mean": 30.0243902439,
"line_max": 132,
"alpha_frac": 0.7128245476,
"autogenerated": false,
"ratio": 3.453804347826087,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4666628895426087,
"avg_score": null,
"num_lines": null
} |
# Ajayrama Kumaraswamy, 07 Oct 2014
# Usage: "python splitVCard.py contacts.vcf" where "contacts.vcf" contains multiple contacts.
# A folder (in the case named contacts) will be created and the contacts will be stored inside the directory in files
# named 1.vcf, 2.vcf.., one contact being stored per file.
import os
from shutil import rmtree
class vcf(object):
"""
Class to handle a vcard string
"""
def __init__(self):
self.vcfStr = []
self.name = []
self.valid = True
self.uk = 1
def write(self, fName):
"""
writes the vcard into fName
"""
if self.name == '':
self.name = 'Unknown' + str(self.uk)
self.uk += 1
with open(os.path.join(fName), 'w') as out:
for field in self.vcfStr:
out.write(field)
def addLine(self, stri):
if self.valid:
if stri[-2:] == '\r\n':
stri = stri[:-2] + '\n'
self.vcfStr.append(stri)
if __name__ == '__main__':
import sys
assert len(sys.argv) == 2, 'this script takes only one commandline argument'
fleName = sys.argv[1]
dirName = fleName[:-4]
if os.path.isdir(dirName):
rmtree(dirName)
os.mkdir(dirName)
fle = open(fleName, 'r')
presLine = fle.readline()
count = 1
while presLine != '':
if presLine[:11] == 'BEGIN:VCARD':
presVCF = vcf()
presVCF.addLine(presLine)
if presLine[:9] == 'END:VCARD':
presVCF.write(os.path.join(dirName, str(count) + '.vcf'))
count += 1
presVCF.valid = False
if presLine[:2] == 'FN':
if presLine[-2:] == '\r\n':
presLine = presLine[:-2]
presVCF.name = presLine[3:]
presLine = fle.readline()
| {
"repo_name": "dEvasEnApati/vcardSplitterCombiner",
"path": "splitVCard.py",
"copies": "2",
"size": "1860",
"license": "apache-2.0",
"hash": 3114265900693593000,
"line_mean": 20.8823529412,
"line_max": 117,
"alpha_frac": 0.5322580645,
"autogenerated": false,
"ratio": 3.522727272727273,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5054985337227272,
"avg_score": null,
"num_lines": null
} |
# A jiffy to try and identify the FreeR column in an MTZ file - will look for
# FreeR_flag, then *free*, will check that the column type is 'I' and so
# will be useful when an external reflection file is passed in for copying
# of the FreeR column.
from xia2.Wrappers.CCP4.Mtzdump import Mtzdump
def FindFreeFlag(hklin):
"""Try to find the FREE column in hklin. Raise exception if no column is
found or if more than one candidate is found."""
# get the information we need here...
mtzdump = Mtzdump()
mtzdump.set_hklin(hklin)
mtzdump.dump()
columns = mtzdump.get_columns()
ctypes = {c[0]: c[1] for c in columns}
if "FreeR_flag" in ctypes:
if ctypes["FreeR_flag"] != "I":
raise RuntimeError("FreeR_flag column found: type not I")
return "FreeR_flag"
# ok, so the usual one wasn't there, look for anything with "free"
# in it...
possibilities = [c for c in ctypes if "free" in c.lower()]
if not possibilities:
raise RuntimeError("no candidate FreeR_flag columns found")
if len(possibilities) == 1:
if ctypes[possibilities[0]] != "I":
raise RuntimeError(
"FreeR_flag column found (%s): type not I" % possibilities[0]
)
return possibilities[0]
| {
"repo_name": "xia2/xia2",
"path": "src/xia2/Modules/FindFreeFlag.py",
"copies": "1",
"size": "1302",
"license": "bsd-3-clause",
"hash": 1971868602642990600,
"line_mean": 29.2790697674,
"line_max": 77,
"alpha_frac": 0.6397849462,
"autogenerated": false,
"ratio": 3.5867768595041323,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9726561805704133,
"avg_score": 0,
"num_lines": 43
} |
a = ['jio a, +19','inc a','tpl a','inc a','tpl a','inc a','tpl a','tpl a','inc a','inc a','tpl a','tpl a','inc a','inc a','tpl a','inc a','inc a','tpl a','jmp +23','tpl a','tpl a','inc a','inc a','tpl a','inc a','inc a','tpl a','inc a','tpl a','inc a','tpl a','inc a','tpl a','inc a','inc a','tpl a','inc a','inc a','tpl a','tpl a','inc a','jio a, +8','inc b','jie a, +4','tpl a','inc a','jmp +2','hlf a','jmp -7']
pos = 0
d = {'a':1, 'b':0}
while pos < len(a):
cmd = a[pos]
#print (cmd, d)
if cmd[2] == 'f':
#hlf
d[cmd[4]] = d[cmd[4]] // 2
pos += 1
if cmd[2] == 'l':
d[cmd[4]] = d[cmd[4]] * 3
pos += 1
if cmd[2] == 'c':
d[cmd[4]] = d[cmd[4]] + 1
pos += 1
if cmd[2] == 'p':
blocks = cmd.split()
pos += int(blocks[1])
if cmd[2] == 'e':
blocks = cmd.split(', ')
step = int(blocks[1])
if d[cmd[4]] % 2 == 0:
pos += step
else:
pos += 1
if cmd[2] == 'o':
blocks = cmd.split(', ')
step = int(blocks[1])
if d[cmd[4]] == 1:
pos += step
else:
pos += 1
print(d)
| {
"repo_name": "imylyanyk/AdventOfCode",
"path": "day23.py",
"copies": "1",
"size": "1229",
"license": "mit",
"hash": 1370332523217162800,
"line_mean": 27.5813953488,
"line_max": 414,
"alpha_frac": 0.3783563873,
"autogenerated": false,
"ratio": 2.8123569794050343,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8542849873060621,
"avg_score": 0.02957269872888283,
"num_lines": 43
} |
# AJ Looney
# 9/7/15
# Facial-Detection
import cv2
from cascadeFile import CascadeFile
class DetectFace:
def __init__(self, vid, smile=False, eye=False):
self.active = True
self.smile = smile
self.eye = eye
self.vid = vid
self.faceCascade = CascadeFile("haarcascade_frontalface_default.xml").getClassifier()
if self.eye:
pass # self.eyeCascade = CascadeFile("haarcascade_eye.xml").getClassifier()
if self.smile:
self.smileCascade = CascadeFile("haarcascade_smile.xml").getClassifier()
self.activate()
def activate(self):
while self.active:
ret, frame = self.vid.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = self.faceCascade.detectMultiScale(
gray,
scaleFactor=1.4,
minNeighbors=5,
minSize=(15, 15)
)
# Draw a rectangle around the faces
for (x, y, w, h) in faces:
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
if self.smile:
roi_gray = gray[y:y + h, x:x + w]
roi_color = frame[y:y + h, x:x + w]
smiles = self.smileCascade.detectMultiScale(
roi_gray,
scaleFactor=1.05,
minNeighbors=5,
minSize=(15, 15)
)
for (ex, ey, ew, eh) in smiles[:1]:
cv2.rectangle(roi_color, (ex, ey), (ex + ew, ey + eh), (255, 0, 255), 2)
if self.eye:
pass # Implement eye detection
# Display the resulting frame
cv2.imshow('Facial Detection', frame)
k = cv2.waitKey(1)
if k == 27 or k == 113: # Esc key to stop
self.active = False
cv2.destroyAllWindows()
if k == 115: # Toggle smile
self.smile = not self.smile
continue
elif k == -1: # normally -1 returned,so don't print it
continue
else:
continue
| {
"repo_name": "Looney4444/Facial-Detection",
"path": "src/DetectFace.py",
"copies": "1",
"size": "2229",
"license": "apache-2.0",
"hash": 1704010552028980500,
"line_mean": 31.7794117647,
"line_max": 96,
"alpha_frac": 0.4782413638,
"autogenerated": false,
"ratio": 3.73993288590604,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47181742497060397,
"avg_score": null,
"num_lines": null
} |
# A job for creating and managing key generation
from os import environ, remove, path
import subprocess
jenkins_scripts = environ.get("JENKINS_SCRIPTS")
aws_access_key = environ.get("AWS_ACCESS_KEY")
aws_secret_key = environ.get("AWS_SECRET_KEY")
domain_name=environ.get('Domain_Name')
state=environ.get('State')
city=environ.get('City')
legal_name=environ.get('Legal_Name')
bucket=environ.get('Databag_Name')
if path.exists("/tmp/{domain_name}.csr".format(domain_name=domain_name)):
remove("/tmp/{domain_name}.csr".format(domain_name=domain_name))
if path.exists("/tmp/{domain_name}.key".format(domain_name=domain_name)):
remove("/tmp/{domain_name}.key".format(domain_name=domain_name))
# Enter the required information about the company
# Run the SSL command
command='openssl req -new -newkey rsa:2048 -nodes -out /tmp/{domain_name}.csr -keyout /tmp/{domain_name}.key -subj'.format(domain_name=domain_name).split(" ")
command+=['/C=US/ST={state}/L={city}/O={legal_name}/CN={domain_name}'.format(state=state, city=city, legal_name=legal_name, domain_name=domain_name)]
out = subprocess.check_output(command, stderr=subprocess.STDOUT)
# Upload the CSR file to EC2
command="s3upload -l DEBUG -k {aws_access_key} -sk {aws_secret_key} -f -np 8 -s 100 /tmp/{domain_name}.csr s3://nmdarchive/{bucket}/{domain_name}.csr".format(aws_access_key=aws_access_key,aws_secret_key=aws_secret_key,domain_name=domain_name, bucket=bucket)
out = subprocess.check_output(command.split(" "), stderr=subprocess.STDOUT)
# Upload the KEY file to EC2
command="s3upload -l DEBUG -k {aws_access_key} -sk {aws_secret_key} -f -np 8 -s 100 /tmp/{domain_name}.key s3://nmdarchive/{bucket}/{domain_name}.key".format(aws_access_key=aws_access_key,aws_secret_key=aws_secret_key,domain_name=domain_name, bucket=bucket)
out = subprocess.check_output(command.split(" "), stderr=subprocess.STDOUT)
# Clean-up the local files
remove("/tmp/{domain_name}.csr".format(domain_name=domain_name))
remove("/tmp/{domain_name}.key".format(domain_name=domain_name))
# Input them into the nmdproxy/certs databag? TODO
# Show the nmd file commands to pull the files and share them
print "Your cert files have been uploaded to the nmdarchive. To download them, copy paste the following text into iTerm:\n"
print "mkdir ~/Downloads/{bucket} &&".format(bucket=bucket)
print "cd ~/Downloads/{bucket} &&".format(bucket=bucket)
print "drud file get {bucket}/{domain_name}.key &&".format(bucket=bucket, domain_name=domain_name)
print "drud file get {bucket}/{domain_name}.csr &&".format(bucket=bucket, domain_name=domain_name)
print "open .\n\n" | {
"repo_name": "TopherGopher/aws-infra.jenkins-scripts",
"path": "csr_generation_and_upload.py",
"copies": "1",
"size": "2602",
"license": "mit",
"hash": -6446074609047677000,
"line_mean": 52.1224489796,
"line_max": 257,
"alpha_frac": 0.7401998463,
"autogenerated": false,
"ratio": 3.0829383886255926,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9289534016345014,
"avg_score": 0.0067208437161158305,
"num_lines": 49
} |
# A 'job server' which actually runs the requests. At the moment this is
# simply a proof of concept. It runs one job at a time and has no
# provisions for supporting multiple job managers or multiple in flight jobs.
# This job-server will try to run jobs which haven't been started before, but
# will not retry jobs which have previously run and failed (for *any* reason)
import os
import subprocess
import shutil
import sys
import time
import datetime
import json
from common import common_setup, add_message_to_log
# For simplicity, we're going to use django's orm interface for accessing the
# database. The avoids having the (autogenerated) db field names hard coded.
common_setup()
from api import models
# open connection to log
# claim active job manager role (others may keep running)
# A generator routine which enumerates the set of pending jobs to be run
def pending_jobs():
messages = models.LogMessage.objects.all()
jobs = dict()
for message in messages:
data = json.loads(message.payload)
action = data["action"]
if action == "job_start":
assert not message.request.id in jobs
jobs[message.request.id] = True
elif action in ["job_started", "job_finished", "job_stop", 'job_abort']:
assert message.request.id in jobs
jobs[message.request.id] = False
for key, value in jobs.items():
# A true value implies this job hasn't yet run...
if value:
request = models.Request.objects.get(pk=key)
request.parameters = json.loads(request.parameters)
# shed load if the request is too old
starttime = request.datetime
too_old = datetime.datetime.now() - datetime.timedelta(hours=12)
too_old = too_old.replace(tzinfo=None)
if starttime.replace(tzinfo=None) < too_old:
message_dict = {"action": "job_abort"}
add_message_to_log(message_dict, request)
continue;
yield request
def run_job(job):
jobtype = job.parameters["job_type"]
print "Running job: " + jobtype + " " +str(job)
if "nop-skip" == jobtype:
# used when testing the frontend, we just ignore it. We do not
# want to adjust the status here.
return
# Before actually starting the job, record the fact we're about to do so.
# If we see this job in the log after a restart, we don't want it to
# rerun (since it may have caused us to crash in the first place)
message_dict = {"action": "job_started"}
add_message_to_log(message_dict, job)
# TODO: async using popen and observe jobs
# TODO: logging, log files?
# TODO: set cwd
# TODO: remove shell=True via explicit command path
if jobtype == "echo":
print "echo: " + str(job)
elif jobtype in ["build", "clang-modernize", "clang-tidy", "clang-format"]:
repo = job.repo
cmd = "python run-%s-job.py %s %s" % (jobtype, repo, job.id)
subprocess.call(cmd, shell=True)
pass
else:
print "error: illegal job type!"
# Record the fact the job finished (normally)
# TODO: add 'job_aborted'
message_dict = {"action": "job_finished"}
add_message_to_log(message_dict, job)
print "Entering job-server loop"
started = datetime.datetime.now()
# job server is externally restarted periodically
while datetime.datetime.now() < started + datetime.timedelta(minutes=120):
print "Checking for work @" +str(datetime.datetime.now())
# pull out any open requests
# - use a last processed job ID
# - possibly a pair: max completed, last considered
# agressively shed load as required
# TDOO: take into consideration load on the system
# Batch process pending job requests - this is currently strictly FIFO,
# but more complicated policies can and should be applied.
for job in pending_jobs():
print "pending: " + str(job)
# Note: Need to rate limit the work somehow, for now, this is
# handled by having a single blocking call per job
run_job(job)
#TODO: implement various job manager commands
# e.g. restart, stop
# sleep for X seconds, then check for new jobs
# TODO: implement a reasonable backoff policy here, or does
# it actually matter?
time.sleep(5);
# TODO: When we get around to implementing a parallelization
# scheme, implement graceful shutdown for jobs running at timeout.
| {
"repo_name": "preames/lc-service",
"path": "job-server/main.py",
"copies": "1",
"size": "4521",
"license": "bsd-3-clause",
"hash": -3407603752033924000,
"line_mean": 35.756097561,
"line_max": 80,
"alpha_frac": 0.6560495466,
"autogenerated": false,
"ratio": 3.9762532981530345,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.010197591010643457,
"num_lines": 123
} |
"""A job to send a HTTP (GET or DELETE) periodically."""
import logging
import requests
from ndscheduler.corescheduler import job
logger = logging.getLogger(__name__)
class CurlJob(job.JobBase):
TIMEOUT = 10
@classmethod
def meta_info(cls):
return {
'job_class_string': '%s.%s' % (cls.__module__, cls.__name__),
'notes': 'This sends a HTTP request to a particular URL',
'arguments': [
# url
{'type': 'string', 'description': 'What URL you want to make a GET call?'},
# Request Type
{'type': 'string', 'description': 'What request type do you want? '
'(currently supported: GET/DELETE)'},
],
'example_arguments': ('["http://localhost:8888/api/v1/jobs", "GET"]'
'["http://localhost:8888/api/v1/jobs/ba12e", "DELETE"]')
}
def run(self, url, request_type, *args, **kwargs):
print('Calling GET on url: %s' % (url))
session = requests.Session()
result = session.request(request_type,
url,
timeout=self.TIMEOUT,
headers=None,
data=None)
return result.text
if __name__ == "__main__":
job = CurlJob.create_test_instance()
job.run('http://localhost:888/api/v1/jobs')
| {
"repo_name": "Nextdoor/ndscheduler",
"path": "simple_scheduler/jobs/curl_job.py",
"copies": "1",
"size": "1481",
"license": "bsd-2-clause",
"hash": -7748917719032585000,
"line_mean": 31.9111111111,
"line_max": 91,
"alpha_frac": 0.490209318,
"autogenerated": false,
"ratio": 4.280346820809249,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5270556138809248,
"avg_score": null,
"num_lines": null
} |
"""A job to send a HTTP (GET or DELETE) periodically."""
import logging
import requests
from ndscheduler import job
logger = logging.getLogger(__name__)
class CurlJob(job.JobBase):
TIMEOUT = 10
@classmethod
def meta_info(cls):
return {
'job_class_string': '%s.%s' % (cls.__module__, cls.__name__),
'notes': 'This sends a HTTP request to a particular URL',
'arguments': [
# url
{'type': 'string', 'description': 'What URL you want to make a GET call?'},
# Request Type
{'type': 'string', 'description': 'What request type do you want? '
'(currently supported: GET/DELETE)'},
],
'example_arguments': ('["http://localhost:8888/api/v1/jobs", "GET"]'
'["http://localhost:8888/api/v1/jobs/ba12e", "DELETE"]')
}
def run(self, url, request_type, *args, **kwargs):
print('Calling GET on url: %s' % (url))
session = requests.Session()
result = session.request(request_type,
url,
timeout=self.TIMEOUT,
headers=None,
data=None)
print(result.text)
if __name__ == "__main__":
job = CurlJob.create_test_instance()
job.run('http://localhost:888/api/v1/jobs')
| {
"repo_name": "DurgaChowdary/ndscheduler",
"path": "simple_scheduler/jobs/curl_job.py",
"copies": "1",
"size": "1467",
"license": "bsd-2-clause",
"hash": 2979742059552332300,
"line_mean": 31.6,
"line_max": 91,
"alpha_frac": 0.4853442399,
"autogenerated": false,
"ratio": 4.252173913043478,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0012771700774921386,
"num_lines": 45
} |
"""A job to send slack message periodically."""
import logging
import json
import os
import socket
import requests
from ndscheduler.corescheduler import job
logger = logging.getLogger(__name__)
class SlackJob(job.JobBase):
MAX_RETRIES = 3
TIMEOUT = 10
@classmethod
def meta_info(cls):
return {
'job_class_string': '%s.%s' % (cls.__module__, cls.__name__),
'notes': ('This sends message to a Slack channel. To run this job, you have to run '
'scheduler with environment variable SIMPLE_SCHEDULER_SLACK_URL'),
'arguments': [
# channel
{'type': 'string', 'description': 'What channel you want to send the message to'},
# name
{'type': 'string', 'description': 'This bot\'s name'},
# icon_emoji
{'type': 'string', 'description': 'An emoji for this bot\'s avatar'},
# message
{'type': 'string', 'description': 'The actual message you want to send.'},
],
'example_arguments': ('["#slack-bot-test", "ndscheduler chat bot", ":satisfied:",'
' "Standup, team! @channel"]')
}
def run(self, channel, name, icon_emoji, message, *args, **kwargs):
try:
# This URL looks like this:
# http://hooks.slack.com/services/T024TTTTT/BBB72BBL/AZAAA9u0pA4ad666eMgbi555
# (not a real api url, don't try it :)
#
# You can get this url by adding an incoming webhook:
# https://nextdoor.slack.com/apps/new/A0F7XDUAZ-incoming-webhooks
url = os.environ['SIMPLE_SCHEDULER_SLACK_URL']
except KeyError:
logger.error('Environment variable SIMPLE_SCHEDULER_SLACK_URL is not specified. '
'So we cannot send slack message.')
raise KeyError('You have to set Environment variable SIMPLE_SCHEDULER_SLACK_URL first.')
else:
session = requests.Session()
adapter = requests.adapters.HTTPAdapter(max_retries=self.MAX_RETRIES)
session.mount('http://', adapter)
session.mount('https://', adapter)
message += ' // `sent from %s`' % socket.gethostname()
payload = {
'channel': channel,
'username': name,
'text': message,
'link_names': 1,
"mrkdwn": 1,
'icon_emoji': icon_emoji
}
session.request('POST', url, timeout=self.TIMEOUT,
headers={'content-type': 'application/json'},
data=json.dumps(payload))
if __name__ == "__main__":
# You can easily test this job here
job = SlackJob.create_test_instance()
job.run('#slack-bot-test', 'ndscheduler', ':satisfied:', 'Standup, team! @channel')
| {
"repo_name": "Nextdoor/ndscheduler",
"path": "simple_scheduler/jobs/slack_job.py",
"copies": "1",
"size": "2950",
"license": "bsd-2-clause",
"hash": -4475711735287276500,
"line_mean": 35.875,
"line_max": 100,
"alpha_frac": 0.54,
"autogenerated": false,
"ratio": 4.190340909090909,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5230340909090909,
"avg_score": null,
"num_lines": null
} |
"""A job to send slack message periodically."""
import logging
import json
import os
import socket
import requests
from ndscheduler import job
logger = logging.getLogger(__name__)
class SlackJob(job.JobBase):
MAX_RETRIES = 3
TIMEOUT = 10
@classmethod
def meta_info(cls):
return {
'job_class_string': '%s.%s' % (cls.__module__, cls.__name__),
'notes': ('This sends message to a Slack channel. To run this job, you have to run '
'scheduler with environment variable SIMPLE_SCHEDULER_SLACK_URL'),
'arguments': [
# channel
{'type': 'string', 'description': 'What channel you want to send the message to'},
# name
{'type': 'string', 'description': 'This bot\'s name'},
# icon_emoji
{'type': 'string', 'description': 'An emoji for this bot\'s avatar'},
# message
{'type': 'string', 'description': 'The actual message you want to send.'},
],
'example_arguments': ('["#slack-bot-test", "ndscheduler chat bot", ":satisfied:",'
' "Standup, team! @channel"]')
}
def run(self, channel, name, icon_emoji, message, *args, **kwargs):
try:
# This URL looks like this:
# http://hooks.slack.com/services/T024TTTTT/BBB72BBL/AZAAA9u0pA4ad666eMgbi555
# (not a real api url, don't try it :)
#
# You can get this url by adding an incoming webhook:
# https://nextdoor.slack.com/apps/new/A0F7XDUAZ-incoming-webhooks
url = os.environ['SIMPLE_SCHEDULER_SLACK_URL']
except KeyError:
logger.error('Environment variable SIMPLE_SCHEDULER_SLACK_URL is not specified. '
'So we cannot send slack message.')
raise KeyError('You have to set Environment variable SIMPLE_SCHEDULER_SLACK_URL first.')
else:
session = requests.Session()
adapter = requests.adapters.HTTPAdapter(max_retries=self.MAX_RETRIES)
session.mount('http://', adapter)
session.mount('https://', adapter)
message += ' // `sent from %s`' % socket.gethostname()
payload = {
'channel': channel,
'username': name,
'text': message,
'link_names': 1,
"mrkdwn": 1,
'icon_emoji': icon_emoji
}
session.request('POST', url, timeout=self.TIMEOUT,
headers={'content-type': 'application/json'},
data=json.dumps(payload))
if __name__ == "__main__":
# You can easily test this job here
job = SlackJob.create_test_instance()
job.run('#slack-bot-test', 'ndscheduler', ':satisfied:', 'Standup, team! @channel')
| {
"repo_name": "DurgaChowdary/ndscheduler",
"path": "simple_scheduler/jobs/slack_job.py",
"copies": "1",
"size": "2935",
"license": "bsd-2-clause",
"hash": 2169233528810239500,
"line_mean": 36.1518987342,
"line_max": 100,
"alpha_frac": 0.538330494,
"autogenerated": false,
"ratio": 4.186875891583452,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0018471463365338915,
"num_lines": 79
} |
import cgi
import os
import yaml
import StringIO
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
if os.environ.get('APPENGINE_RUNTIME') == 'python27':
# internal django version is fine with python27
from google.appengine._internal.django.utils.html import escape
else:
# with python2.5 we load a more decent version than 0.96
from google.appengine.dist import use_library
use_library('django', '1.2')
from django.utils.html import escape
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext import db
from google.appengine.ext.webapp import template
from google.appengine.api import memcache
NEW_VALUE_WHEN_DEPLOYED = os.environ['CURRENT_VERSION_ID']
class Menu(db.Model):
name = db.StringProperty(required=True)
date = db.DateTimeProperty(auto_now_add=True)
# special_kind sert a identifier les menus qui servent a composer les menus speciaux (ressources, dioceses...)
special_kind = db.StringProperty()
author = db.UserProperty()
shared = db.BooleanProperty()
class Link(db.Model):
name = db.StringProperty(required=True)
menu = db.ReferenceProperty(Menu)
date = db.DateTimeProperty(auto_now_add=True)
url = db.LinkProperty()
order = db.IntegerProperty(required=True)
class Navbar(db.Model):
code = db.StringProperty(required=True) # should be lowercase (will be the script filename)
name = db.StringProperty(required=True)
author = db.UserProperty()
date = db.DateTimeProperty(auto_now_add=True)
first_menu = db.ReferenceProperty(Menu, collection_name="navbar_first_set")
second_menu = db.ReferenceProperty(Menu, collection_name="navbar_second_set")
third_menu = db.ReferenceProperty(Menu, collection_name="navbar_third_set")
fourth_menu = db.ReferenceProperty(Menu, collection_name="navbar_fourth_set")
settings = db.StringListProperty()
# cse: custom search engine
cse_unique_id = db.StringProperty()
class Administrator(db.Model):
user = db.UserProperty()
admin = db.BooleanProperty(default= False)
| {
"repo_name": "pmleveque/cross-site-navigation",
"path": "cefbase.py",
"copies": "1",
"size": "2262",
"license": "mit",
"hash": -5584614913412147000,
"line_mean": 34.9047619048,
"line_max": 114,
"alpha_frac": 0.7400530504,
"autogenerated": false,
"ratio": 3.5734597156398102,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.481351276603981,
"avg_score": null,
"num_lines": null
} |
# A JSON-based API(view) for your app.
# Most rules would look like:
# @jsonify.when("isinstance(obj, YourClass)")
# def jsonify_yourclass(obj):
# return [obj.val1, obj.val2]
# @jsonify can convert your objects to following types:
# lists, dicts, numbers and strings
from turbojson.jsonify import jsonify
from turbojson.jsonify import jsonify_sqlobject
from ecrm.model import User, Group, Permission
@jsonify.when('isinstance(obj, Group)')
def jsonify_group(obj):
result = jsonify_sqlobject( obj )
result["users"] = [u.user_name for u in obj.users]
result["permissions"] = [p.permission_name for p in obj.permissions]
return result
@jsonify.when('isinstance(obj, User)')
def jsonify_user(obj):
result = jsonify_sqlobject( obj )
del result['password']
result["groups"] = [g.group_name for g in obj.groups]
result["permissions"] = [p.permission_name for p in obj.permissions]
return result
@jsonify.when('isinstance(obj, Permission)')
def jsonify_permission(obj):
result = jsonify_sqlobject( obj )
result["groups"] = [g.group_name for g in obj.groups]
return result
| {
"repo_name": "LamCiuLoeng/bossini",
"path": "ecrm/json.py",
"copies": "1",
"size": "1121",
"license": "mit",
"hash": -2813798006828335000,
"line_mean": 32.9696969697,
"line_max": 72,
"alpha_frac": 0.7109723461,
"autogenerated": false,
"ratio": 3.407294832826748,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46182671789267477,
"avg_score": null,
"num_lines": null
} |
# A JSON-based API(view) for your app.
# Most rules would look like:
# @jsonify.when("isinstance(obj, YourClass)")
# def jsonify_yourclass(obj):
# return [obj.val1, obj.val2]
# @jsonify can convert your objects to following types:
# lists, dicts, numbers and strings
from turbojson.jsonify import jsonify
from turbojson.jsonify import jsonify_sqlobject
from buzzbot.model import User, Group, Permission
@jsonify.when('isinstance(obj, Group)')
def jsonify_group(obj):
result = jsonify_sqlobject( obj )
result["users"] = [u.user_name for u in obj.users]
result["permissions"] = [p.permission_name for p in obj.permissions]
return result
@jsonify.when('isinstance(obj, User)')
def jsonify_user(obj):
result = jsonify_sqlobject( obj )
del result['password']
result["groups"] = [g.group_name for g in obj.groups]
result["permissions"] = [p.permission_name for p in obj.permissions]
return result
@jsonify.when('isinstance(obj, Permission)')
def jsonify_permission(obj):
result = jsonify_sqlobject( obj )
result["groups"] = [g.group_name for g in obj.groups]
return result
| {
"repo_name": "pbarton666/buzz_bot",
"path": "bot_project/buzzbot/json.py",
"copies": "1",
"size": "1157",
"license": "mit",
"hash": 1442032543179757800,
"line_mean": 33.0606060606,
"line_max": 72,
"alpha_frac": 0.6914433881,
"autogenerated": false,
"ratio": 3.402941176470588,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4594384564570588,
"avg_score": null,
"num_lines": null
} |
"""A JSON Encoder similar to TaggedJSONSerializer from Flask, that knows
about Result Proxies and adds some convenience for other common types.
This encoder is a good fit because it will traverse the data structure it
encodes recursively, raising any SuspendTask/TaskError exceptions stored in
task results. Any serializer is supposed to do that.
"""
import sys
if sys.version_info < (3,):
uni = unicode
else:
uni = str
import collections
import json
import uuid
from base64 import b64decode
from base64 import b64encode
from flowy.result import is_result_proxy, TaskError, SuspendTask, wait
from flowy.operations import first
__all__ = ['traverse_data', 'dumps', 'loads']
def check_err_and_placeholders(result, value):
err, placeholders = result
try:
wait(value)
except TaskError:
if err is None:
err = value
else:
err = first(err, value)
except SuspendTask:
placeholders = True
return err, placeholders
def collect_err_and_results(result, value):
err, results = result
if not is_result_proxy(value):
return result
try:
wait(value)
except TaskError:
if err is None:
err = value
else:
err = first(err, value)
except SuspendTask:
pass
else:
if results is None:
results = []
results.append(value)
return err, results
def traverse_data(value, f=check_err_and_placeholders, initial=(None, False), seen=frozenset(), make_list=True):
if is_result_proxy(value):
try:
wait(value)
except TaskError:
return value, f(initial, value)
except SuspendTask:
return value, f(initial, value)
return value.__wrapped__, f(initial, value)
if isinstance(value, (bytes, uni)):
return value, f(initial, value)
res = initial
if isinstance(value, collections.Iterable):
if id(value) in seen:
raise ValueError('Recursive structure.')
seen = seen | frozenset([id(value)])
if isinstance(value, collections.Mapping):
d = {}
for k, v in value.items():
k_, res = traverse_data(k, f, res, seen, make_list=False)
v_, res = traverse_data(v, f, res, seen, make_list=make_list)
d[k_] = v_
return d, res
if (
isinstance(value, collections.Iterable)
and isinstance(value, collections.Sized)
):
l = []
for x in value:
x_, res = traverse_data(x, f, res, seen, make_list=make_list)
l.append(x_)
if make_list:
return l, res
return tuple(l), res
if isinstance(value, collections.Iterable):
raise ValueError('Unsized iterables not allowed.')
return value, f(initial, value)
def dumps(value):
return json.dumps(_tag(value))
def _tag(value):
if isinstance(value, uuid.UUID):
return {' u': value.hex}
elif isinstance(value, bytes):
return {' b': b64encode(value).decode('ascii')}
elif callable(getattr(value, '__json__', None)):
return _tag(value.__json__())
elif isinstance(value, (list, tuple)):
return [_tag(x) for x in value]
elif isinstance(value, dict):
return dict((k, _tag(v)) for k, v in value.items())
return value
def loads(value):
return json.loads(value, object_hook=_obj_hook)
def _obj_hook(obj):
if len(obj) != 1:
return obj
key, value = next(iter(obj.items()))
if key == ' u':
return uuid.UUID(value)
elif key == ' b':
return b64decode(value)
return obj
| {
"repo_name": "severb/flowy",
"path": "flowy/serialization.py",
"copies": "1",
"size": "3659",
"license": "mit",
"hash": 1154026848799148300,
"line_mean": 25.9044117647,
"line_max": 112,
"alpha_frac": 0.6078163433,
"autogenerated": false,
"ratio": 3.88016967126193,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.498798601456193,
"avg_score": null,
"num_lines": null
} |
'''A JSON-RPC Server with a task queue for performing lengthy calculations'''
import os
import base
import time
from unuk.utils import get_logger, load_settings
settings = load_settings('taskqueue.settings')
from unuk.core.jsonrpc import JSONRPC, Proxy
from unuk.contrib.tasks import Controller
def callback(**fields):
pass
class JsonCalculator(JSONRPC):
'''JSON RPC Server with a task controller'''
def __init__(self, pool):
logger = get_logger(name = 'taskqueue')
super(JsonCalculator,self).__init__(logger = logger)
self.controller = Controller(pool).start()
def jsonrpc_calculate(self, request, **kwargs):
'''Dispatch new task and returns task id'''
request = self.controller.dispatch('taskqueue.tasks.calctask', **kwargs)
request.bind('success',callback)
return request.info.todict()
def jsonrpc_shutdown(self, request, **kwargs):
self.stop()
def stop(self):
self.logger.info('Stopping calculation pool')
self.controller.stop()
def proxy():
from unuk.contrib.txweb import jsonrpc
port = jsonrpc.ApplicationServer.default_port
return Proxy(url = 'http://localhost:%s' % port,
proxies = {})
def run():
from unuk.contrib.txweb import jsonrpc, start
server = jsonrpc.ApplicationServer(JsonCalculator)
start()
| {
"repo_name": "pombredanne/unuk",
"path": "examples/taskqueue/application.py",
"copies": "1",
"size": "1430",
"license": "bsd-3-clause",
"hash": 7949384851384214000,
"line_mean": 28.4255319149,
"line_max": 80,
"alpha_frac": 0.6475524476,
"autogenerated": false,
"ratio": 4.097421203438396,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5244973651038396,
"avg_score": null,
"num_lines": null
} |
"""A JSON service for statvent.
You can also run this module as a command and it will read the data from your
stat pipes and serve it up as JSON via HTTP. It tries to gracefully handle
dead pipes, and will unlink them if it finds any.
"""
import optparse
import os
import signal
import time
import traceback
import urlparse
try:
import simplejson as json
except ImportError:
import json
from collections import defaultdict
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from statvent.stats import config
INTERRUPTED_SYSTEM_CALL = 4
PIPE_OPEN_TIMEOUT = 0.1
def main():
parser = optparse.OptionParser()
parser.add_option(
'-p', '--port', type="int", help="The port to listen on.", default=7828,
)
parser.add_option(
'-i', '--ip', help="The IP address to listen on.", default='',
)
parser.add_option(
'-l', '--path', help="The HTTP path for serving stats.",
default="/stats",
)
parser.add_option(
'-d', '--pipe-dir', help="The directory where the stats pipes live.",
)
opts, args = parser.parse_args()
if opts.pipe_dir:
config['pipe_dir'] = opts.pipe_dir
http_stat_publisher(opts.ip, opts.port, opts.path)
def http_stat_publisher(ip='', port=7828, path='/stats'):
class _StatsHandler(BaseHTTPRequestHandler):
def do_GET(self):
parsed = urlparse.urlparse(self.path)
self.headers['Content-Type'] = 'application/json'
if parsed.path == path:
collected_stats = _load_all_from_pipes()
status_code = 200
body = json.dumps({
'stats':collected_stats,
'timestamp':time.time(),
})
else:
status_code = 400
body = json.dumps({
'message':'The requested resource was not found',
'path':parsed.path,
})
self.send_response(status_code)
self.end_headers()
self.wfile.write(body)
HTTPServer((ip, port), _StatsHandler).serve_forever()
# FIXME The function below is begging to be refactored.
def _load_all_from_pipes():
all_stats = defaultdict(int)
if os.path.exists(config['pipe_dir']):
for filename in os.listdir(config['pipe_dir']):
pipe_path = os.path.join(config['pipe_dir'], filename)
_set_pipe_open_timeout(PIPE_OPEN_TIMEOUT)
try:
with open(pipe_path) as pipe:
_clear_pipe_open_timeout()
for line in pipe:
cleaned = line.strip()
name, raw_value = cleaned.rsplit(':', 1)
try:
value = int(raw_value.strip())
except ValueError:
value = float(raw_value.strip())
all_stats[name.strip()] += value
except IOError, e:
if e.errno == INTERRUPTED_SYSTEM_CALL:
# Our timeout fired - no one is writing to this pipe.
# Let's try and clean it up.
try:
os.unlink(pipe_path)
except:
traceback.print_exc()
else:
raise
return dict(all_stats)
def _set_pipe_open_timeout(timeout):
interval = 0
signal.setitimer(signal.ITIMER_REAL, timeout, interval)
if timeout:
def _noop(sig, frame):
# Don't do anything with the signal - just pass on.
pass
signal.signal(signal.SIGALRM, _noop)
else:
signal.signal(signal.SIGALRM, signal.SIG_DFL)
def _clear_pipe_open_timeout():
_set_pipe_open_timeout(0)
if __name__ == '__main__':
main()
| {
"repo_name": "dowski/statvent",
"path": "statvent/web.py",
"copies": "1",
"size": "3869",
"license": "bsd-2-clause",
"hash": -4715942450179535000,
"line_mean": 30.7131147541,
"line_max": 80,
"alpha_frac": 0.5479452055,
"autogenerated": false,
"ratio": 4.1781857451403885,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.002039425020864629,
"num_lines": 122
} |
"""A Jupyter Notebook interface to Klampt.
Examples:
Basic usage::
from klampt import *
from klampt.vis.ipython import KlamptWidget
from IPython.display import display
world = WorldModel()
... #set up the world...
kvis = KlamptWidget(world,width=800,height=640)
display(kvis) # This pops up a window in Jupyter
Immedate changes can be made using the methods in KlamptWidget::
kvis.addText(name="text_id",text="hello",position=(10,10))
kvis.addSphere(x=0,y=1.5,z=0,r=0.4)
Change the configuration of things in the world, and then call update() to see the changes::
robot = world.robot(0)
q = robot.getConfig()
q[2] += 1.0
robot.setConfig(q)
kvis.update() # The previous changes are not made until this is called
If you completely change the number of objects in the world, or their underlying geometries,
you will need to call w.setWorld(world) again. This is relatively expensive, so try not to
do it too often::
world.readElement(...)
kvis.setWorld(world)
"""
from klampt import ThreeJSGetScene,ThreeJSGetTransforms
from klampt.math import vectorops,so3,se3
from klampt.model import types
from klampt.model.trajectory import Trajectory,RobotTrajectory,SE3Trajectory
from klampt import RobotModel,RobotModelLink
import json
import time
import math
import ipywidgets as widgets
from ipywidgets import interact, interactive, fixed, interact_manual
from traitlets import Unicode, Dict, List, Int, validate, observe
import traitlets
import threading
DEFAULT_POINT_RADIUS = 0.05
DEFAULT_AXIS_LENGTH = 0.2
DEFAULT_AXIS_WIDTH = 1
class KlamptWidget(widgets.DOMWidget):
"""
A Python interface with the Jupyter notebook frontend.
The API is similar to the vis module, but has a reduced and slightly modified
set of hooks.
Attributes:
width (Int): the width of the view in pixels (public property)
height (Int): the height of the view in pixels (public property)
scene (Dict): the scene JSON message (private)
transforms (Dict): the transforms JSON message (private)
rpc (Dict): the rpc JSON message (private)
_camera (Dict): the incoming camera JSON message from the frontend (private)
camera (Dict): the outgoing camera JSON message (private)
drawn (Int): the incoming drawn message from the frontend (private)
events (Dict): incoming events from the frontend (private)
world (WorldModel): the WorldModel isinstance
_extras (dict): a dict mapping extra item names to (type,threejs_items) pairs
_rpc_calls (list): a list of pending RPC calls between beginRpc() and endRpc()
_aggregating_rpc (int): non-zero if between beginRpc and endRpc
"""
_model_name = Unicode('KlamptModel').tag(sync=True)
_view_name = Unicode('KlamptView').tag(sync=True)
_model_module = Unicode('klampt-jupyter-widget').tag(sync=True)
_view_module = Unicode('klampt-jupyter-widget').tag(sync=True)
_model_module_version = Unicode('0.1.0').tag(sync=True)
_view_module_version = Unicode('0.1.0').tag(sync=True)
width = Int(800).tag(sync=True)
height = Int(600).tag(sync=True)
scene = Dict().tag(sync=True)
transforms = Dict().tag(sync=True)
rpc = Dict().tag(sync=True)
_camera = Dict().tag(sync=True)
camera = Dict().tag(sync=True)
events = List().tag(sync=True)
drawn = Int(0).tag(sync=True)
def __init__(self,world=None,*args,**kwargs):
widgets.DOMWidget.__init__(self,*args,**kwargs)
self.world = world
self._extras = dict()
self._aggregating_rpc = 0
self._rpc_calls = []
if world is not None:
self.setWorld(world)
self.rpc = {}
return
def setWorld(self,world):
"""Resets the world to a new WorldModel object. """
self.world = world
self._extras = dict()
self._aggregating_rpc = 0
self._rpc_calls = []
s = ThreeJSGetScene(self.world)
self.scene = json.loads(s)
def update(self):
"""Updates the view with changes to the world. Unlike setWorld(), this only pushes the geometry
transforms, so it's much faster."""
if self.world:
s = ThreeJSGetTransforms(self.world)
self.transforms = json.loads(s)
def clear(self):
"""Clears everything from the visualization, including the world."""
self._extras = dict()
self._do_rpc({'type':'reset_scene'})
def clearExtras(self):
"""Erases all ghosts, lines, points, text, etc from the visualization, but keeps the world."""
self._extras = dict()
self._do_rpc({'type':'clear_extras'})
#TODO: implement this to be more similar to the vis API
#def clearText(self):
def add(self,name,item,type='auto'):
"""Adds the item to the world, and returns a list of identifiers associated with it.
Args:
name (str): the name of the item, which will be used to refer to it from now on
item: the item data
type (str, optional): either 'auto' (default) or a string describing the type of
``item``, which can help disambiguate some types like 'Config' vs 'Vector3'
(see below)
Supports items of type:
* Config, as a ghost (list, same size as robot)
* Configs, as a set of ghosts (list of lists, same size as robot)
* Vector3, drawn as a sphere (3-list)
* RigidTransform, drawn as an xform (pair of 9-list and 3-list)
* Configs, drawn as a polyline (list of 3-lists)
* Trajectory, drawn either as:
* a polyline (3D Trajectory objects),
* set of milestones (Trajectory or RobotTrajectory objects)
* a polyline + set of rigid transform milestones (SE3Trajectory objects)
* WorldModel, but only one world at once is supported (same as setWorld).
"""
if type == 'auto':
try:
candidates = types.objectToTypes(item,self.world)
except Exception:
raise ValueError("Invalid item, not a known Klamp't type")
if isinstance(candidates,(list,tuple)):
type = candidates[0]
else:
type = candidates
if type == 'Config':
res = self.addGhost(name)
self.setGhostConfig(item,name)
return [res]
elif type == 'Configs':
if len(item[0]) == 3:
#it's a polyline
return [self.addPolyline(name,item)]
else:
#it's a set of configurations
names = []
for i,q in enumerate(item):
iname = name+'_'+str(i)
self.addGhost(iname)
self.setGhostConfig(q,iname)
names.append(iname)
self._extras[name] = ('Configs',names)
return names
elif type == 'Vector3':
res = self.addSphere(name,item[0],item[1],item[2],DEFAULT_POINT_RADIUS)
return [res]
elif type == 'RigidTransform':
res = self.addXform(name,length=DEFAULT_AXIS_LENGTH,width=DEFAULT_AXIS_WIDTH)
self.setTransform(name,R=item[0],t=item[1])
return [res]
elif type == 'Trajectory':
if isinstance(item,SE3Trajectory):
res = []
ttraj = []
for i in item.milestones:
T = item.to_se3(item.milestones[i])
res += self.add(name+"_milestone_"+str(i),T)
ttraj.append(T[1])
res += self.add(name,ttraj)
self._extras[name] = ('Trajectory',res)
return res
elif isinstance(item,RobotTrajectory):
#it's a set of configurations
rindex = item.robot.index
names = []
for i,q in enumerate(item):
iname = name+'_'+str(i)
self.addGhost(iname,rindex)
self.setGhostConfig(q,iname,rindex)
names.append(iname)
self._extras[name] = ('Configs',names)
return names
else:
return self.add(name,item.milestones)
elif type == 'WorldModel':
if name != 'world' or self.world is not None:
print "KlamptWidget.add: Warning, only one world is supported, and should be added as world"
self.setWorld(item)
else:
raise ValueError("KlamptWidget can't handle objects of type "+type+" yet")
def remove(self,name):
"""Removes a certain named target, e.g. a ghost, line, text, etc."""
self._do_rpc({'type':'remove','object':name})
def hide(self,name,hidden=True):
"""Hides/shows named target, e.g. a ghost, line, text, etc."""
self._do_rpc({'type':'set_visible','object':name,'value':(not hidden)})
def resetCamera(self):
"""Resets the camera to the original view"""
self._do_rpc({'type':'reset_camera'})
def getCamera(self):
"""Returns a data structure representing the current camera view"""
res = dict(self._camera).copy()
if 'r' in res:
del res['r']
return res
def setCamera(self,cam):
"""Sets the current camera view"""
self.camera = cam
marked = dict(cam).copy()
marked['r'] = 1
self._camera = marked
def hide(self,name,value=False):
"""Changes the visibility status of a certain named target"""
target_name = name
if name in self._extras:
type,data = self._extras[target]
if type == 'Config':
target_name = data
elif type == 'Configs' or type == 'Trajectory':
self.beginRpc(strict=False)
for subitem in data:
self._do_rpc({'type':'set_visible','object':subitem,'value':value})
self.endRpc(strict=False)
return
self._do_rpc({'type':'set_visible','object':target_name,'value':value})
def setColor(self,target,r,g,b,a=1.0):
"""Sets the given RobotModel, RobotModelLink, named link, indexed link,
or object name to some RGBA color (each channel in the range [0,1])."""
recursive=False
target_name = None
if isinstance(target, (int, long, float, complex)):
robot = self.world.robot(0)
target_as_link = robot.link(target)
target_name=target_as_link.getName()
elif isinstance(target,RobotModelLink):
target_name=target.getName()
elif isinstance(target,RobotModel):
target_name=target.getName()
recursive = True
elif isinstance(target, basestring):
target_name=target
if target in self._extras:
type,data = self._extras[target]
if type == 'Config':
target_name = data
recursive = True
elif type == 'Configs' or type == 'Trajectory':
#it's a group set everything under the group
self.beginRpc(strict=False)
for subitem in data:
self.setColor(subitem,r,g,b,a)
self.endRpc(strict=False)
return
else:
#see if it's the name of a robot
try:
self.world.robot(target).index
recursive = True
except Exception:
found = False
for r in range(self.world.numRobots()):
if self.world.robot(r).link(target) >= 0:
found = True
break
if not found:
raise ValueError("ERROR: setColor requires target of either robot, link, index, or string name of object!")
else:
raise ValueError("ERROR: setColor requires target of either robot, link, index, or string name of object!")
rgba_color = [r,g,b,a]
if recursive:
self._do_rpc({'type':'set_color','object':target_name,'rgba':rgba_color,'recursive':True})
else:
self._do_rpc({'type':'set_color','object':target_name,'rgba':rgba_color})
#print "Setting link color!",('object',target_name,'rgba'),rgba_color
def setTransform(self,name,R=so3.identity(),t=[0]*3,matrix=None):
"""Sets the transform of the target object. If matrix is given, it's a 16-element
array giving the 4x4 homogeneous transform matrix, in row-major format. Otherwise,
R and t are the 9-element klampt.so3 rotation and 3-element translation."""
if matrix != None:
self._do_rpc({'type':'set_transform','object':name,'matrix':matrix})
else:
self._do_rpc({'type':'set_transform','object':name,'matrix':[R[0],R[3],R[6],t[0],R[1],R[4],R[7],t[1],R[2],R[5],R[8],t[2],0,0,0,1]})
def addGhost(self,name="ghost",robot=0):
"""Adds a ghost configuration of the robot that can be posed independently.
name can be set to identify multiple ghosts.
The identifier of the ghost in the three.js scene is prefixname + robot.getName(),
and all the links are identified by prefixname + link name."""
if robot < 0 or robot >= self.world.numRobots():
raise ValueError("Invalid robot specified")
target_name=self.world.robot(robot).getName()
self._do_rpc({'type':'add_ghost','object':target_name,'prefix_name':name})
self._extras[name] = ('Config',name+target_name)
return name
def getRobotConfig(self,robot=0):
"""A convenience function. Gets the robot's configuration in the visualization
world."""
if robot < 0 or robot >= self.world.numRobots():
raise ValueError("Invalid robot specified")
robot = self.world.robot(robot)
q = robot.getConfig()
return q
def setGhostConfig(self,q,name="ghost",robot=0):
"""Sets the configuration of the ghost to q. If the ghost is named, place its name
in prefixname."""
if robot < 0 or robot >= self.world.numRobots():
raise ValueError("Invalid robot specified")
robot = self.world.robot(robot)
q_original = robot.getConfig()
if len(q) != robot.numLinks():
raise ValueError("Config must be correct size: %d != %d"%(len(q),robot.numLinks()))
robot.setConfig(q)
self.beginRpc(strict=False)
rpcs = []
for i in range(robot.numLinks()):
T = robot.link(i).getTransform()
p = robot.link(i).getParent()
if p>=0:
Tp = robot.link(p).getTransform()
T = se3.mul(se3.inv(Tp),T)
mat = se3.homogeneous(T)
#mat is now a 4x4 homogeneous matrix
linkname = name+robot.link(i).getName()
#send to the ghost link with name "name"...
self._do_rpc({'type':'set_transform','object':linkname,'matrix':[mat[0][0],mat[0][1],mat[0][2],mat[0][3],mat[1][0],mat[1][1],mat[1][2],mat[1][3],mat[2][0],mat[2][1],mat[2][2],mat[2][3],mat[3][0],mat[3][1],mat[3][2],mat[3][3]]})
self.endRpc(strict=False)
robot.setConfig(q_original) #restore original config
def addText(self,name="HUD_Text1",text="",position=None):
"""Adds a new piece of text displayed on the screen. name is a unique identifier of
the text, and position=(x,y) are the coordinates of upper left corner of the the text,
in percent. """
if position is None:
x,y = None,None
else:
x,y = position
self._extras[name] = ('Text',(x,y,text))
self._do_rpc({'type':'add_text','name':name,'x':x,'y':y,'text':text})
def addSphere(self,name="Sphere1",x=0,y=0,z=0,r=1):
"""Adds a new sphere to the world with the given x,y,z position and radius r."""
self._extras[name] = ('Sphere',(x,y,z,r))
self._do_rpc({'type':'add_sphere','name':name,'x':x,'y':y,'z':z,'r':r})
def addLine(self,name="Line1",x1=0,y1=0,z1=0,x2=1,y2=1,z2=1):
"""Adds a new line segment to the world connecting point (x1,y1,z1) to (x2,y2,z2)"""
verts = [x1,y1,z1,x2,y2,z2]
self._extras[name] = ('Line',verts)
self._do_rpc({'type':'add_line','name':name,'verts':verts})
def addXform(self,name="Xform1",length=DEFAULT_AXIS_LENGTH,width=DEFAULT_AXIS_WIDTH):
"""Adds a new transform widget to the world with the given line length and width"""
self._extras[name] = ('RigidTransform',(length,width))
self._do_rpc({'type':'add_xform','name':name,'length':length,'width':width})
def addPolyline(self,name="Line1",pts=[]):
"""Adds a new polygonal line segment to the world connecting the given list of 3-tuples"""
verts = sum(pts,[])
self._extras[name] = ('Line',verts)
self._do_rpc({'type':'add_line','name':name,'verts':verts})
def addTriangle(self,name="Tri1",a=(0,0,0),b=(1,0,0),c=(0,1,0)):
"""Adds a new triangle with vertices a,b,c. a,b, and c are 3-lists or 3-tuples."""
verts = a+b+c
self._extras[name] = ('Trilist',verts)
self._do_rpc({'type':'add_trilist','name':name,'verts':verts})
def addQuad(self,name="Quad1",a=(0,0,0),b=(1,0,0),c=(1,1,0),d=(0,1,0)):
"""Adds a new quad (in CCW order) with vertices a,b,c,d. a,b,c and d are 3-lists or 3-tuples."""
verts = a+b+c+a+c+d
self._extras[name] = ('Trilist',verts)
self._do_rpc({'type':'add_trilist','name':name,'verts':verts})
def addBillboard(self,name="Billboard",image=[[]],format='auto',crange=[0,1],colormap='auto',filter='linear',size=(1,1)):
"""Adds a 2D billboard to the world. The image is a 2D array of
values, which is texure-mapped to a quad.
By default, the billboard is centered at (0,0,0) and faces up.
To modify its location or orientation, call ``setTransform`` on it.
Args:
name (str): the name used to refer to this item
image (list of lists or str): a 2D array of single-channel values, (r,g,b) tuples, or (r,g,b,a)
tuples. Rows are listed top to bottom, rows from left to right. Or, can also be a URL.
format (str, optional): The image format. Can be:
* 'auto': autodetect the type from the image. If the image contains values, the format is 'value'.
* 'value': the values are mapped through either 'opacity', 'rainbow', or gradient
color mapping.
* 'rgb': if the image contains values, they are interpreted as RGB values packed in 24 bit
integers. Otherwise, the first 3 channels of the tuple are used.
* 'rgba': if the image contains values, they are interpreted as RGB values packed in 32 bit
integers. Otherwise, they are assumed to be (r,g,b,a) tuples
crange (pair of numbers, optional): the range of the given values / channels. By default [0,1], but if you are using uint8
encoding this should be set to [0,255].
colormap (optional): how the color of the billboard should be set based on the image. Valid values are:
* 'auto': if the image contains values, the gradient ((0,0,0),(1,1,1)) is used. Otherwise
'replace' is used.
* (color1,color2): interpolates between the two given (r,g,b) or (r,g,b,a) tuples.
* 'opacity': sets the alpha channel only.
* 'modulate': the value / rgb / rgba texture modulates the billboard color as set by setColor
filter (str, optional): how values between pixels are interpolated. Either 'nearest' or 'linear'.
size (pair of numbers, optional): the (width,height) pair of the billboard, in world units.
"""
if not isinstance(image,str):
import struct
import base64
bytes = []
w,h = None,None
h = len(image)
for row in image:
if w == None:
w = len(row)
else:
assert w == len(row),"Image is not a 2D array"
pixel = image[0][0]
if format == 'auto':
if hasattr(pixel,'__iter__'):
if len(pixel) == 4:
format = 'rgba'
else:
format = 'rgb'
else:
format = 'value'
else:
if not hasattr(pixel,'__iter__'):
format = 'p'+format
gradient = (type(colormap) != str)
for row in image:
for pixel in row:
if format == 'value':
u = min(1,max(0,(pixel - crange[0]) / (crange[1]-crange[0])))
if gradient:
color = vectorops.interpolate(gradient[0],gradient[1],u)
r = 0xff * min(1,max(0,color[0]))
g = 0xff * min(1,max(0,color[1]))
b = 0xff * min(1,max(0,color[2]))
packed = (0xff << 24) | (int(b) << 16) | (int(g) << 8) | int(r)
bytes.append(struct.pack('<I',packed))
else:
val = 0xff * u
bytes.append(struct.pack('B',val))
elif format == 'prgb' or format == 'prgba':
bytes.append(struct.pack('<I', pixel))
elif format == 'rgb':
r = 0xff * min(1,max(0,(pixel[0] - crange[0]) / (crange[1]-crange[0])))
g = 0xff * min(1,max(0,(pixel[1] - crange[0]) / (crange[1]-crange[0])))
b = 0xff * min(1,max(0,(pixel[2] - crange[0]) / (crange[1]-crange[0])))
packed = (0xff << 24) | (int(b) << 16) | (int(g) << 8) | int(r)
bytes.append(struct.pack('<I', packed))
elif format == 'rgba':
r = 0xff * min(1,max(0,(pixel[0] - crange[0]) / (crange[1]-crange[0])))
g = 0xff * min(1,max(0,(pixel[1] - crange[0]) / (crange[1]-crange[0])))
b = 0xff * min(1,max(0,(pixel[2] - crange[0]) / (crange[1]-crange[0])))
a = 0xff * min(1,max(0,(pixel[3] - crange[0]) / (crange[1]-crange[0])))
packed = (int(a) << 24) | (int(b) << 16) | (int(g) << 8) | int(r)
bytes.append(struct.pack('<I', packed))
else:
raise ValueError("Invalid format "+format)
image = base64.b64encode(''.join(bytes))
self._do_rpc({'type':'add_billboard','name':name,'imagedata':image,'width':w,'height':h,'size':size,'filter':filter,'colormap':colormap})
else:
self._do_rpc({'type':'add_billboard','name':name,'image':image,'size':size,'filter':filter,'colormap':colormap})
self._extras[name] = ('Billboard',image)
def beginRpc(self,strict=True):
"""Begins collecting a set of RPC calls to be sent at once, which is a bit faster than doing multiple
addX or setX calls.
Usage::
widget.beginRpc()
widget.addX()
...
widget.setX()
widget.endRpc() #this sends all the messages at once
"""
if self._aggregating_rpc == 0:
assert len(self._rpc_calls)==0
if self._aggregating_rpc != 0 and strict:
raise RuntimeError("Each beginRpc() call must be ended with an endRpc() call")
self._aggregating_rpc += 1
return
def _do_rpc(self,msg):
"""Internally used to send or queue an RPC call"""
if self._aggregating_rpc:
self._rpc_calls.append(msg)
else:
self.rpc = msg
def endRpc(self,strict=True):
"""Ends collecting a set of RPC calls to be sent at once, and sends the accumulated message"""
if self._aggregating_rpc <= 0 or (self._aggregating_rpc!=1 and strict):
raise ValueError("Each beginRpc() call must be ended with an endRpc() call")
self._aggregating_rpc -= 1
if self._aggregating_rpc == 0 and len(self._rpc_calls) > 0:
self.rpc = {'type':'multiple','calls':self._rpc_calls}
self._rpc_calls = []
@observe('_camera')
def _recv_camera(self,cam):
#trigger an update?
marked = cam['new'].copy()
marked['r'] = 1
self._camera = marked
@observe('events')
def _recv_events(self,events):
elist = events['new']
if len(elist) > 0:
for event in elist:
self.on_event(event)
self.events = []
@observe('drawn')
def _recv_drawn(self,drawn):
self.drawn = 0
print "Klampt widget drawn!"
def on_event(self,e):
print "KlamptWidget got event",e
def EditConfig(robot,klampt_widget=None,ghost=None,link_selector='slider',link_subset=None,callback=None):
"""Creates a Jupyter widget for interactive editing of the robot's configuration.
Args:
robot (RobotModel): the robot to edit
klampt_widget (KlamptWidget, optional): the KlamptWidget visualization to update, or None if you
don't want to visualize the editing.
ghost (str, optional): if not None, this is the name of the ghost that should be updated. Widget
updates are shown on the given ghost rather than the actual robot. To get the ghost
configuration, you'll need to update the callback.
link_selector (str): how to select links. Either:
* 'slider': uses an IntSlider widget
* 'dropdown': uses a Dropdown widget
* 'all': shows sliders for all links
link_subset (list, optional): if given, only a subset of links are shown. Otherwise, only non-fixed links are shown.
callback (function, optional): a function callback(index,q) called when a DOF's value has changed.
Returns:
VBox: a widget to be displayed as you like
"""
qmin,qmax = robot.getJointLimits()
qedit = robot.getConfig()[:]
if link_subset == None:
link_subset = [i for i in xrange(robot.numLinks()) if qmin[i] != qmax[i]]
else:
for link in link_subset:
if link < 0 or link >= robot.numLinks():
raise ValueError("Invalid link specified in link_subset")
link_subset = link_subset[:]
def _dochange_link(link):
if not math.isinf(qmin[link]):
joint_slider.min = qmin[link]
joint_slider.max = qmax[link]
else:
joint_slider.min = -2
joint_slider.max = 2
joint_slider.value = qedit[link]
if klampt_widget and ghost == None:
#show selected link in color
#restore old colors
klampt_widget.beginRpc()
for i in link_subset:
klampt_widget.setColor(i,*robot.link(link).appearance().getColor())
#change new color
color = robot.link(link).appearance().getColor()
r,g,b,a = color
r = 1.0-(1.0-r)*0.5
g = 1.0-(1.0-g)*0.5
klampt_widget.setColor(link,r,g,b,a)
klampt_widget.endRpc()
def _dochange(link,value):
if ghost:
qold = robot.getConfig()
qedit[link] = value
robot.setConfig(qedit)
if klampt_widget:
if ghost:
klampt_widget.setGhostConfig(qedit,ghost,robot.index)
else:
klampt_widget.update()
if ghost:
robot.setConfig(qold)
if callback:
callback(link,qedit)
if link_selector == 'slider':
link_slider=widgets.IntSlider(description='Link',min=0,max=len(link_subset)-1,value=0)
joint_slider=widgets.FloatSlider(description='Value',min=0,max=1,value=0.5,step=0.001)
@interact(index=link_slider)
def change_link(index):
link = link_subset[index]
_dochange_link(link)
link_slider.observe(lambda change:change_link(change['new']),'value')
def change_joint_value(value):
link = link_subset[link_slider.value]
_dochange(link,value)
joint_slider.observe(lambda change:change_joint_value(change['new']),'value')
return widgets.VBox([link_slider,joint_slider])
elif link_selector == 'dropdown':
link_dropdown=widgets.Dropdown(description='Link',options=[robot.link(i).getName() for i in link_subset],value=robot.link(link_subset[0]).getName())
joint_slider=widgets.FloatSlider(description='Value',min=0,max=1,value=0.5,step=0.001)
def change_link(name):
link = robot.link(name).index
_dochange_link(link)
link_dropdown.observe(lambda change:change_link(change['new']),'value')
def change_joint_value(value):
link = robot.link(link_dropdown.value).index
_dochange(link,value)
joint_slider.observe(lambda change:change_joint_value(change['new']),'value')
return widgets.VBox([link_dropdown,joint_slider])
elif link_selector == 'all':
sliders = []
for link in link_subset:
sliders.append(widgets.FloatSlider(description=robot.link(link).getName(),min=qmin[link],max=qmax[link],value=qedit[link],step=0.001))
sliders[-1].observe(lambda value,link=link:_dochange(link,value['new']),'value')
return widgets.VBox(sliders)
else:
raise ValueError("Invalid link_selector, must be slider, dropdown, or all")
def EditPoint(value=None,min=None,max=None,labels=None,
klampt_widget=None,point_name='edited_point',point_radius=DEFAULT_POINT_RADIUS,
callback=None):
"""Creates a Jupyter widget for interactive editing of an xyz point
Args:
value (list of 3 floats, optional): the initial value of the point. If given, this must
be a list and will hold the edited values.
min/max (list of 3 floats, optional): the minimum and maximum of the point
labels (list of strs, optional): if given, the labels of each channel
klampt_widget (KlamptWidget, optional): the KlamptWidget visualization to update,
or None if you don't want to visualize the point.
point_name (str, optional): the name of the point in the visualization world to edit.
point_radius (float, optional): the radius of the visualized point.
callback (function ,optional): a function callback(xyz) called when a DOF's value has changed.
Returns:
VBox: a widget that can be displayed as you like
"""
if value is None:
value = [0,0,0]
else:
if not isinstance(value,list):
raise ValueError("value must be a 3-element list")
if len(value) != 3:
raise ValueError("value must be a 3-element list")
if labels is None:
labels = 'xyz'
if min is None:
min = [-5,-5,-5]
elif isinstance(min,(int,float)):
min = [min,min,min]
if max is None:
max = [5,5,5]
elif isinstance(max,(int,float)):
max = [max,max,max]
if len(min) != 3:
raise ValueError("min must be a 3-element list")
if len(max) != 3:
raise ValueError("max must be a 3-element list")
if klampt_widget:
klampt_widget.addSphere(name=point_name,x=value[0],y=value[1],z=value[2],r=point_radius)
def _dochange(index,element):
value[index] = element
if klampt_widget:
klampt_widget.addSphere(name=point_name,x=value[0],y=value[1],z=value[2],r=point_radius)
if callback:
callback(value)
elems = []
for i in range(3):
elems.append(widgets.FloatSlider(description=labels[i],value=value[i],min=min[i],max=max[i],step=0.001))
elems[-1].observe(lambda v,i=i:_dochange(i,v['new']),'value')
return widgets.VBox(elems)
def EditTransform(value=None,xmin=None,xmax=None,labels=None,
klampt_widget=None,xform_name='edited_xform',axis_length=DEFAULT_AXIS_LENGTH,axis_width=DEFAULT_AXIS_WIDTH,
callback=None):
"""Creates a Jupyter widget for interactive editing of a rigid transform point
Args:
value (klampt.se3 element), optional: the initial value of the transform (klampt.se3 element).
If given as (R,t), the R and t members must be lists and will hold the edited values.
xmin/xmax (list of 3 floats, optional): the minimum and maximum of the translation
labels (list of strs, optional): if given, the labels of roll,pitch,yaw and x,y,z
klampt_widget (KlamptWidget, optional): the KlamptWidget visualization to update, or None if
you don't want to visualize the point.
xform_name (str, optional): the name of the xform in the visualization world to edit.
axis_length,axis_width (float, optional): the length and width of the visualized widget
callback (function, optional): a function callback((R,t)) called when a DOF's value has changed.
Returns:
VBox: a widget that can be displayed as you like
"""
if value is None:
value = se3.identity()
else:
if not isinstance(value,(tuple,list)):
raise ValueError("value must be a 2-element sequence")
if len(value) != 2:
raise ValueError("value must be a 2-element sequence")
if len(value[0]) != 9:
raise ValueError("value[0] must be a 9-element list")
if len(value[1]) != 3:
raise ValueError("value[1] must be a 3-element list")
if labels is None:
labels = ['roll','pitch','yaw','x','y','z']
if xmin is None:
xmin = [-5,-5,-5]
elif isinstance(xmin,(int,float)):
xmin = [xmin,xmin,xmin]
if xmax is None:
xmax = [5,5,5]
elif isinstance(xmax,(int,float)):
xmax = [xmax,xmax,xmax]
if len(xmin) != 3:
raise ValueError("xmin must be a 3-element list")
if len(xmax) != 3:
raise ValueError("xmax must be a 3-element list")
if klampt_widget:
klampt_widget.addXform(name=xform_name,length=axis_length,width=axis_width)
klampt_widget.setTransform(name=xform_name,R=value[0],t=value[1])
rpy = list(so3.rpy(value[0]))
def _do_rotation_change(index,element):
rpy[index] = element
value[0][:] = so3.from_rpy(rpy)
if klampt_widget:
klampt_widget.setTransform(name=xform_name,R=value[0],t=value[1])
if callback:
callback(value)
def _do_translation_change(index,element):
value[1][index] = element
if klampt_widget:
klampt_widget.setTransform(name=xform_name,R=value[0],t=value[1])
if callback:
callback(value)
elems = []
for i in range(3):
elems.append(widgets.FloatSlider(description=labels[i],value=rpy[i],min=0,max=math.pi*2,step=0.001))
elems[-1].observe(lambda v,i=i:_do_rotation_change(i,v['new']),'value')
for i in range(3):
elems.append(widgets.FloatSlider(description=labels[3+i],value=value[1][i],min=xmin[i],max=xmax[i],step=0.001))
elems[-1].observe(lambda v,i=i:_do_translation_change(i,v['new']),'value')
return widgets.VBox(elems)
class Playback(widgets.VBox):
"""A play/pause/reset widget associated with a KlamptWidget.
Attributes:
klampt_widget (KlamptWidget, optional): the widget that should be updated after each advance call
advance (function, optional): a function to be called for each new frame.
pause (function, optional): a function to be called when pause is clicked.
reset (function, optional): a function to be called when reset is clicked.
maxframes (int, optional): the maximum number of frames. If None, this is unlimited.
framerate (int, optional): number of frames per second desired. If None, frames are run as
quickly as possible
quiet (bool): if True, suppresses output during play
playbutton, stepbutton, pausebutton, resetbutton (Button): the Button widgets
"""
def __init__(self,klampt_widget=None,advance=None,reset=None,pause=None,maxframes=None,framerate=None,quiet=False):
"""Arguments are the same as the members"""
self.klampt_widget = klampt_widget
self.advance = advance
self.reset = reset
self.pause = pause
self.maxframes = maxframes
self.framerate = framerate
self.quiet = quiet
self.playbutton = widgets.Button(
description='Play',
disabled=False,
button_style='', # 'success', 'info', 'warning', 'danger' or ''
tooltip='Start the animation',
icon='play')
self.stepbutton = widgets.Button(
description='Step',
disabled=False,
button_style='', # 'success', 'info', 'warning', 'danger' or ''
tooltip='Step the animation',
icon='step-forward')
self.pausebutton = widgets.Button(
description='Pause',
disabled=True,
button_style='', # 'success', 'info', 'warning', 'danger' or ''
tooltip='Pause the animation',
icon='pause')
self.resetbutton = widgets.Button(
description='Reset',
disabled=False,
button_style='', # 'success', 'info', 'warning', 'danger' or ''
tooltip='Reset the animation',
icon='undo')
lock = threading.Lock()
playdata = {'thread':None,'stop':0}
self.playdata = playdata
self.lock = lock
self.frame = 0
#If we don't create this now, exceptions will never be printed
self.out = widgets.Output()
def play_thread_func(lock,playdata):
#print "Starting play thread"
if self.framerate is None:
dt = 0
else:
dt = 1.0/self.framerate
playdata['stop'] = 0
def do_advance(drawn=False):
if playdata['stop']:
return
lock.acquire()
try:
self._advance()
except Exception as e:
with self.out:
print "Exception occurred during Playback.advance, stopping animation"
print e
playdata['stop'] = 1
lock.release()
return
self.frame += 1
lock.release()
if self.klampt_widget:
self.klampt_widget.observe(do_advance,'drawn')
t0 = time.time()
do_advance()
while True:
if playdata['stop']:
break
lock.acquire()
if self.maxframes is not None and self.frame >= self.maxframes:
#print "Stopping play by completion"
self.playbutton.disabled = False
self.pausebutton.disabled = True
self.frame = 0
lock.release()
break
lock.release()
if not self.klampt_widget:
do_advance()
t1 = time.time()
time.sleep(max(dt-(t1-t0),0))
t0 = t1
if self.klampt_widget:
self.klampt_widget.unobserve(do_advance,'drawn')
playdata['thread'] = None
return
def on_play(b):
#print "Play clicked"
self.pausebutton.disabled = False
self.playbutton.disabled = True
assert playdata['thread'] == None
playdata['thread'] = threading.Thread(target=play_thread_func,args=(lock,playdata))
playdata['thread'].start()
def on_pause(b):
#print "Pause clicked"
self.stop()
self._pause()
def on_step(b):
#print "Step clicked"
self.stop()
self.frame += 1
self._advance()
def on_reset(b):
#print "Reset clicked"
self.stop()
self.frame = 0
self.out.clear_output()
self._reset()
self.playbutton.on_click(on_play)
self.stepbutton.on_click(on_step)
self.pausebutton.on_click(on_pause)
self.resetbutton.on_click(on_reset)
widgets.VBox.__init__(self,[widgets.HBox([self.playbutton,self.stepbutton,self.pausebutton,self.resetbutton]),
self.out])
def stop(self):
"""Stops any ongoing playback"""
lock = self.lock
playdata = self.playdata
if playdata['thread'] is not None:
#playing
lock.acquire()
playdata['stop'] = 1
lock.release()
playdata['thread'].join()
playdata['thread'] = None
playdata['stop'] = 0
self.pausebutton.disabled = True
self.playbutton.disabled = False
def _advance(self):
if self.advance:
if self.quiet:
self.advance()
else:
with self.out:
self.advance()
if self.klampt_widget:
self.klampt_widget.update()
def _reset(self):
if self.reset:
with self.out:
self.reset()
if self.klampt_widget:
self.klampt_widget.update()
def _pause(self):
if self.pause:
with self.out:
self.pause()
if self.klampt_widget:
self.klampt_widget.update()
| {
"repo_name": "krishauser/Klampt",
"path": "Python/python2_version/klampt/vis/ipython/widgets.py",
"copies": "1",
"size": "42495",
"license": "bsd-3-clause",
"hash": 1126795481282057900,
"line_mean": 41.495,
"line_max": 239,
"alpha_frac": 0.5657136134,
"autogenerated": false,
"ratio": 3.842919153553988,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4908632766953988,
"avg_score": null,
"num_lines": null
} |
# ajutils.py by ApolloJustice
# for use with Python 3
# non PEP-8 compliant because honestly fuck that
# probably not commented because too lazy
__module_name__ = 'AJUtils'
__module_version__ = '1.0'
__module_description__ = 'General commands.'
__author__ = 'ApolloJustice'
zncPrefix = '*'
import hexchat
def nea():
hexchat.emit_print('Notice', '%s [S]' % __module_name__, 'No arguments given.')
def unf():
hexchat.emit_print('Notice', '%s [S]' % __module_name__, 'User not found.')
def getHost(uList, target):
for user in uList:
if user.nick.lower() == target.lower(): break
return user
def clearstatus(word, word_eol, userdata):
anyLeft = 1
while anyLeft == 1:
try:
statusContexts = hexchat.find_context(channel='{0}status'.format(zncPrefix))
statusContexts.command("close")
except AttributeError:
anyLeft = 0
return hexchat.EAT_ALL
def disablechan(word, word_eol, userdata):
chan = hexchat.get_info('channel')
action = userdata
if action == "disabled": hexchat.command('RAW PRIVMSG *status :disablechan %s' % chan)
if len(word) == 1:
hexchat.command('raw PART %s' % chan)
elif len(word) >= 2:
hexchat.command('raw PART %s :%s' % (chan, word_eol[1]))
hexchat.emit_print('Notice', '%s [S]' % __module_name__, 'Parted %s and %s it in ZNC.' % (chan, action))
return hexchat.EAT_ALL
def sudo(word, word_eol, userdata):
if len(word) <= 1:
nea()
return hexchat.EAT_ALL
chan = hexchat.get_info('channel')
cmd = word_eol[1]
hexchat.command('RAW PRIVMSG ChanServ :op %s' % chan)
hexchat.command('timer 1 %s' % cmd)
hexchat.command('timer 1.7 RAW PRIVMSG ChanServ :deop %s' % chan)
return hexchat.EAT_ALL
def topicappend(word, word_eol, userdata):
oldtopic = hexchat.get_info('topic')
newtopic = '%s | %s' % (oldtopic.rstrip(), word_eol[1])
if len(word) <= 1:
nea()
return hexchat.EAT_ALL
hexchat.command('topic %s' % newtopic)
return hexchat.EAT_ALL
def hostignore(word, word_eol, userdata):
userlist = hexchat.get_list('users')
action = userdata
if len(word) <= 1:
nea()
return hexchat.EAT_ALLs
user = getHost(userlist, word[1])
host = user.host.split('@')[1]
if user.nick.lower() == word[1].lower(): hexchat.command('%s *!*@%s' % (action, host))
else: unf()
return hexchat.EAT_ALL
def hostMode(word, word_eol, userdata):
userlist = hexchat.get_list('users')
mode = userdata
if len(word) <= 1:
nea()
return hexchat.EAT_ALL
if any(ext in word[1] for ext in ["@", "$", ":"]):
hexchat.command('raw MODE %s %s %s' % (hexchat.get_info('channel'), mode, word[1]))
return hexchat.EAT_ALL
user = getHost(userlist, word[1])
host = user.host.split('@')[1]
if user.nick.lower() == word[1].lower(): hexchat.command('raw MODE %s %s *!*@%s' % (hexchat.get_info('channel'), mode, host))
else: unf()
return hexchat.EAT_ALL
def editflags(word, word_eol, userdata):
if len(word) <= 1:
hexchat.command('msg chanserv access ' + hexchat.get_info('channel') + ' list')
return hexchat.EAT_ALL
if '#' not in word[1]: hexchat.command('msg chanserv flags ' + hexchat.get_info('channel') + ' ' + word_eol[1])
if '#' in word[1]: hexchat.command('msg chanserv flags ' + word_eol[1])
return hexchat.EAT_ALL
def showver(word, word_eol, userdata):
hexchat.command('me is using HexChat v%s' % hexchat.get_info('version'))
return hexchat.EAT_ALL
hexchat.hook_command('sudo', sudo, help='/sudo Executes a command as op on channels you have flag +o on.')
hexchat.hook_command('topicappend', topicappend, help='/topicappend Adds a string to the topic')
hexchat.hook_command('appendtopic', topicappend, help='/appendtopic Adds a string to the topic')
hexchat.hook_command('part', disablechan, userdata="disabled", help='/part parts and disables chan on znc')
hexchat.hook_command('temppart', disablechan, userdata="did not disable", help='/temppart parts without disabling chan on znc')
hexchat.hook_command('ignorehost', hostignore, userdata="ignore", help='/ignorehost ignores a user\'s host')
hexchat.hook_command('unignorehost', hostignore, userdata="unignore", help='/unignorehost ignores a user\'s host')
hexchat.hook_command('quiet', hostMode, userdata="+q", help='/quiet quiets a user')
hexchat.hook_command('unquiet', hostMode, userdata="-q", help='/unquiet unquiets a user')
hexchat.hook_command('iexempt', hostMode, userdata="+I", help='/exempt adds an invite exemption for a user')
hexchat.hook_command('uniexempt', hostMode, userdata="-I", help='/unexempt removes an invite exemption for a user')
hexchat.hook_command('exempt', hostMode, userdata="+e", help='/exempt adds a ban exemption for a user')
hexchat.hook_command('unexempt', hostMode, userdata="-e", help='/unexempt removes a ban exemption for a user')
hexchat.hook_command('flags', editflags, help='/flags edits chanserv flags for a user')
hexchat.hook_command('clearstatus', clearstatus, help='/clearstatus closes all ZNC status windows. You can set your prefix at the top of the python file')
hexchat.hook_command('showver', showver)
hexchat.emit_print('Notice', __module_name__ + ' [S]', '%s by %s loaded. You are using version %s of the script.' % (__module_name__, __author__, __module_version__)) | {
"repo_name": "ApolloJustice/HexChat-pyscripts",
"path": "ajutils.py",
"copies": "1",
"size": "5182",
"license": "mit",
"hash": 9001510288552405000,
"line_mean": 36.5579710145,
"line_max": 166,
"alpha_frac": 0.6889231957,
"autogenerated": false,
"ratio": 2.9227298364354204,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.41116530321354205,
"avg_score": null,
"num_lines": null
} |
# aka OVERKILL.PY
#
# Some fun utilities to serialize python data into compressed strings.
# Need this because a) memcached has a 1MB datasize limit and b) memcached
# requires plain, ASCII-only strings.
try:
from cPickle import dumps, loads
except ImportError:
from pickle import dumps, loads
from hashlib import md5
import os
import zlib
from base64 import b64encode, b64decode
try:
import pylibmc
except:
pylibmc = None
import memcache
import threading
_locals = threading.local()
if os.environ.get('MEMCACHIER_SERVERS', None):
os.environ['MEMCACHE_SERVERS'] = os.environ.get('MEMCACHIER_SERVERS')
os.environ['MEMCACHE_USERNAME'] = os.environ.get('MEMCACHIER_USERNAME')
os.environ['MEMCACHE_PASSWORD'] = os.environ.get('MEMCACHIER_PASSWORD')
__all__ = [
'get_cache_client',
'get_key',
'encode_value',
'decode_value',
'cache_get',
'cache_set'
]
def get_cache_client():
client = getattr(_locals, "cache_client", None)
if client:
return client
try:
on_heroku = (os.environ.get('MEMCACHE_SERVERS') and
os.environ.get('MEMCACHE_USERNAME') and
os.environ.get('MEMCACHE_PASSWORD'))
except:
on_heroku = False
if on_heroku:
client = pylibmc.Client(
servers=[os.environ.get('MEMCACHE_SERVERS')],
username=os.environ.get('MEMCACHE_USERNAME'),
password=os.environ.get('MEMCACHE_PASSWORD'),
binary=True
)
else:
if pylibmc:
client = pylibmc.Client(servers=['127.0.0.1:55838'], binary=True)
else:
client = memcache.Client(['127.0.0.1:55838'], debug=0)
_locals.cache_client = client
return client
def get_key(key):
if type(key) == unicode:
key = key.encode('utf-8')
return md5(key).hexdigest()
###########################################################################
def encode_value(data, compression_level=1):
serialized = dumps(data, -1)
if compression_level:
compressed = zlib.compress(serialized, compression_level)
else:
compressed = serialized
coded = b64encode(compressed)
return str(coded)
def decode_value(data):
coded = str(data)
compressed = b64decode(coded)
return loads(zlib.decompress(compressed))
###########################################################################
def cache_get(key, default=None):
"""
Gets a serialized 'rich data type' value from the database
or returns `default` if the value did not exist or the value
could not be parsed successfully.
"""
mc = get_cache_client()
try:
return decode_value(mc.get(get_key(key))) or default
except:
return default
def cache_set(key, obj, timeout=None):
mc = get_cache_client()
try:
mc.set(get_key(key), encode_value(obj), timeout)
except:
pass
return obj
| {
"repo_name": "mtigas/radiowut",
"path": "cacheutil.py",
"copies": "1",
"size": "2920",
"license": "mit",
"hash": -7133824887433191000,
"line_mean": 26.2897196262,
"line_max": 77,
"alpha_frac": 0.6089041096,
"autogenerated": false,
"ratio": 3.700887198986058,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9733585568681661,
"avg_score": 0.015241147980879388,
"num_lines": 107
} |
#akara.resource
"""
There is no superclass for an Akara resource. It's any object with an id data mamber
self.policy - instance of L{akara.policy.manager}
"""
#__all__ = ['manager', 'standard_index']
CREATED = 'akara:created'
UPDATED = 'akara:updated'
CONTENT_LENGTH = 'akara:size'
CONTENT_TYPE = 'akara:type'
class resource(object):
'''
An analogue of a Web resource
In effect it serves as a cache of the actual stored repository data
Standard repository metadata:
* Content type (internet media type)
* Size
* creation date
* last mod date
'''
def __init__(self, rid, manager):
self._manager = manager
self.rid = rid
self._metadata = None #Mixes/caches repository metadata and user metadata
self._content = None
return
def __getitem__(self, name):
return self._metadata[name]
def _get_content(self):
if self._content is None: self._sync()
return self._content
def _set_content(self, c):
if self._content is None: self._sync()
self._content = c
content = property(_get_content, _set_content)
@property
def metadata(self):
if self._metadata is None: self._sync()
return self._metadata
def _sync(self):
'''
Sync up this copy with the database
'''
drv = self._manager._driver
content, self.metadata = drv.get_resource(self.rid)
self.content = content.read()
return
class manager(dict):
"""
Maps aliases to IDs
"""
#Manager itself is a very simple dict interface. You would generally use a more specialized
#object that includes the persistence layer
#def __init__(self, input_dict={}):
# self.update(input_dict)
def __init__(self, driver):
self._driver = driver
self.aliases = {}
#FIXME: replace with MRU
self._cache = {}
return
def lookup(self, name):
'''
Look up resource by ID
'''
rid = name
if rid in self.aliases:
rid = self.aliases[rid]
if rid in self._cache:
return elf._cache[rid]
if self._driver.has_resource(rid):
return resource(rid, self)
else:
raise RuntimeError('Resource not found: %s'%str(rid))
#raise ResourceError
return
| {
"repo_name": "uogbuji/akara",
"path": "lib/resource/__init__.py",
"copies": "1",
"size": "2414",
"license": "apache-2.0",
"hash": -2144753600971282200,
"line_mean": 24.1458333333,
"line_max": 96,
"alpha_frac": 0.5828500414,
"autogenerated": false,
"ratio": 4.1194539249146755,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5202303966314675,
"avg_score": null,
"num_lines": null
} |
"""akara.services - adapters to use Python functions as WSGI handlers
This module is meant to be used by functions in an Akara extension
module.
"""
import httplib
import warnings
import functools
import cgi
import inspect
from cStringIO import StringIO
from xml.sax.saxutils import escape as xml_escape
from BaseHTTPServer import BaseHTTPRequestHandler
http_responses = BaseHTTPRequestHandler.responses
del BaseHTTPRequestHandler
from amara import tree, writers
from akara import logger, registry
__all__ = ("service", "simple_service", "method_dispatcher")
ERROR_DOCUMENT_TEMPLATE = """<?xml version="1.0" encoding="ISO-8859-1"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
<head>
<title>%(reason)s</title>
</head>
<body>
<h1>%(reason)s</h1>
<p>
%(message)s
</p>
<h2>Error %(code)s</h2>
</body>
</html>
"""
class _HTTPError(Exception):
"Internal class."
# I don't think the API is quite right.
# Should code be a number or include the reason in a string, like "200 OK"?
def __init__(self, code, message=None):
assert isinstance(code, int) # Being a bit paranoid about the API
self.code = code
self.reason, self.message = http_responses[code]
if message is not None:
self.message = message
self.text = ERROR_DOCUMENT_TEMPLATE % dict(code=self.code,
reason=xml_escape(self.reason),
message=xml_escape(self.message))
self.headers = [("Content-Type", "text/html")]
def make_wsgi_response(self, environ, start_response):
start_response("%s %s" % (self.code, self.reason), self.headers)
return [self.text]
class _HTTP405(_HTTPError):
def __init__(self, methods):
_HTTPError.__init__(self, 405)
self.headers.append( ("Allow", ", ".join(methods)) )
# Pull out any query arguments and set up input from any POST request
def _get_function_args(environ, allow_repeated_args):
request_method = environ.get("REQUEST_METHOD")
if request_method == "POST":
try:
request_length = int(environ["CONTENT_LENGTH"])
except (KeyError, ValueError):
raise _HTTPError(httplib.LENGTH_REQUIRED)
if request_length < 0:
raise _HTTPError(httplib.BAD_REQUEST)
request_bytes = environ["wsgi.input"].read(request_length)
request_content_type = environ.get("CONTENT_TYPE", None)
args = (request_bytes, request_content_type)
else:
args = ()
# Build up the keyword parameters from the query string
query_string = environ["QUERY_STRING"]
kwargs = {}
if query_string:
qs_dict = cgi.parse_qs(query_string)
if allow_repeated_args:
kwargs = qs_dict
else:
for k, v in qs_dict.iteritems():
if len(v) == 1:
kwargs[k] = v[0]
else:
raise _HTTPError(400,
message="Using the %r query parameter multiple times is not supported" % (k,))
return args, kwargs
######
def new_request(environ):
"prepare the akara.request and akara.response environment for a new request"
from akara import request, response
request.environ = environ
response.code = "200 OK"
response.headers = []
def send_headers(start_response, default_content_type, content_length):
"Send the WSGI headers, using values from akara.request.*"
from akara import response
code = response.code
if isinstance(code, int):
reason = http_responses[code][0]
code = "%d %s" % (code, reason)
has_content_type = False
has_content_length = False
for k, v in response.headers:
k = k.lower()
if k == "content-type":
has_content_type = True
elif k == "content-length":
has_content_length = True
if not has_content_type:
response.headers.append( ("Content-Type", default_content_type) )
if not has_content_length and content_length is not None:
response.headers.append( ("Content-Length", content_length) )
start_response(code, response.headers)
def convert_body(body, content_type, encoding, writer):
if isinstance(body, str):
if content_type is None:
content_type = "text/plain"
return [body], content_type, len(body)
if isinstance(body, tree.entity):
# XXX have Amara tell me the content type (with encoding)
# This is trac #29
if content_type is None:
if "html" in writer.lower():
content_type = "text/html"
else:
content_type = "application/xml"
w = writers.lookup(writer)
body = body.xml_encode(w, encoding)
return [body], content_type, len(body)
if isinstance(body, unicode):
body = body.encode(encoding)
if content_type is None:
content_type = "text/plain; charset=%s" % (encoding,)
return [body], content_type, len(body)
# Probably one of the normal WSGI responses
if content_type is None:
content_type = "text/plain"
return body, content_type, None
# The HTTP spec says a method can be and 1*CHAR, where CHAR is a
# US-ASCII character excepting control characters and "punctuation".
# (like '(){}' and even ' '). We're a bit more strict than that
# because we haven't seen people use words like "get".
def _check_is_valid_method(method):
min_c = min(method)
max_c = max(method)
if min_c < 'A' or max_c > 'Z':
raise ValueError("HTTP method %r value is not valid. "
"It must contain only uppercase ASCII letters" % (method,))
def _no_slashes(path):
if path is not None and "/" in path:
# Really these are more like mount points
raise ValueError("service paths may not contain a '/'")
def ignore_start_response(status, response_headers, exc_info=None):
pass
def _make_query_template(func):
argspec = inspect.getargspec(func)
if argspec.varargs is not None or argspec.keywords is not None:
# Can't handle *args or **kwargs in the parameter list
return None
if not argspec.args:
return ""
num_required = len(argspec.args) - len(argspec.defaults or ())
arg_info = [(arg, i >= num_required) for (i, arg) in enumerate(argspec.args)]
# I present these in alphabetical order to reduce template changes
# should the parameter list change.
arg_info.sort()
terms = []
for arg, is_optional in arg_info:
if is_optional:
fmt = "%s={%s?}"
else:
fmt = "%s={%s}"
terms.append( fmt % (arg, arg) )
return "?" + "&".join(terms)
def _handle_notify(environ, f, service_list):
for service_id in service_list:
service = registry.get_a_service_by_id(service_id)
service_environ = environ.copy()
service_environ["PATH_INFO"] = service.path
f.seek(0)
new_request(service_environ)
try:
service.handler(service_environ, ignore_start_response)
except Exception:
raise
# XXX
pass
FROM_ENVIRON = object()
def _handle_notify_before(environ, body, service_list):
if not service_list:
return
if body is FROM_ENVIRON:
body = environ["wsgi.input"].read()
f = StringIO(body)
environ["wsgi.input"] = f
_handle_notify(environ, f, service_list)
f.seek(0)
def _handle_notify_after(environ, result, service_list):
if not service_list:
return result
f = StringIO()
for block in result:
f.write(block)
# XXX ALso need to set the CONTENT_TYPE (and others?)
environ["CONTENT_LENGTH"] = f.tell()
environ["wsgi.input"] = f
_handle_notify(environ, f, service_list)
f.seek(0)
return f
###### public decorators
## Guide to help in understanding
# @service(*args) -> returns a service_wrapper
#
# @service(*args)
# def func(): pass -> returns a wrapper() which calls func
def service(service_id, path=None,
encoding="utf-8", writer="xml",
pipelines = None,
query_template = None,
wsgi_wrapper=None,
notify_before = None,
notify_after = None):
_no_slashes(path)
def service_wrapper(func):
@functools.wraps(func)
def wrapper(environ, start_response):
_handle_notify_before(environ, FROM_ENVIRON, notify_before)
# 'service' passes the WSGI request straight through
# to the handler so there's almost no point in
# setting up the environment. However, I can conceive
# of tools which might access 'environ' directly, and
# I want to be consistent with the simple* interfaces.
new_request(environ)
result = func(environ, start_response)
# You need to make sure you sent the correct content-type!
result, ctype, length = convert_body(result, None, encoding, writer)
result = _handle_notify_after(environ, result, notify_after)
return result
pth = path
if pth is None:
pth = func.__name__
# If an outer WSGI wrapper was specified, place it around the service wrapper being created
if wsgi_wrapper:
wrapper = wsgi_wrapper(wrapper)
registry.register_service(service_id, pth, wrapper, query_template=query_template)
return wrapper
return service_wrapper
## Guide to help in understanding
# @simple_service(*args) -> returns a service_wrapper
#
# @simple_service(*args)
# def func(): pass -> returns a wrapper() which calls func
def simple_service(method, service_id, path=None,
content_type=None, encoding="utf-8", writer="xml",
allow_repeated_args=False,
query_template=None,
wsgi_wrapper=None,
notify_before=None, notify_after=None):
_no_slashes(path)
"""Add the function as an Akara resource
These affect how the resource is registered in Akara
method - the supported HTTP method (either "GET" or "POST")
service_id - a string which identifies this service; should be a URL
path - the local URL path to the resource (must not at present
contain a '/') If None, use the function's name as the path.
query_template - An Akara URL service template (based on OpenSource; see akara.opensource)
Can be used to help consumers compose resources withing this service. The same
template is used for all HTTP methods
These control how to turn the return value into an HTTP response
content_type - the response content-type. If not specified, and if
"Content-Type" is not listed in akara.response.headers then infer
the content-type based on what the decorated function returns.
(See akara.services.convert_body for details)
encoding - Used to convert a returned Unicode string or an Amara tree
to the bytes used in the HTTP response
writer - Used to serialize the Amara tree for the HTTP response.
This must be a name which can be used as an Amara.writer.lookup.
This affects how to convert the QUERY_STRING into function call parameters
allow_repeated_args - The query string may have multiple items with the
same name, as in "?a=x&a=y&a=z&b=w". If True, this is converted into
a function call parameter like "f(a=['x','y','z'], b=['w'])". If
False then this is treated as an error. Suppose the query string
contains no repeated arguments, as in "?a=x&b=w". If
allow_repeated_args is True then the function is called as
as "f(a=['x'], b=['w'])" and if False, like "f(a='x', b='w')".
A simple_service decorated function can get request information from
akara.request and use akara.response to set the HTTP reponse code
and the HTTP response headers.
Here is an example of use:
@simple_service("GET", "http://example.com/get_date")
def date(format="%Y-%m-%d %H:%M:%S"):
'''get the current date'''
import datetime
return datetime.datetime.now().strftime(format)
which can be called with URLs like:
http://localhost:8880/date
http://localhost:8880/date?format=%25m-%25d-%25Y
Integration with other WSGI components:
The @simple_service decorator creates and returns a low-level handler
function that conforms to the WSGI calling conventions. However,
it is not safe to directly use the resulting handler with arbitrary
third-party WSGI components (e.g., to wrap the Akara handler with
an WSGI middleware component). This is because Akara handlers return
values other than sequences of byte-strings. For example, they might
return XML trees, Unicode, or other data types that would not be
correctly interpreted by other WSGI components.
To integrate other WSGI components with Akara, use the wsgi_wrapper
argument to @simple_service. For example:
def wrapper(app):
# Create an WSGI wrapper around WSGI application app
...
return wrapped_app
@simple_service("GET", "http://example.com/get_date", wsgi_wrapper=wrapper)
def date(format):
...
When specified, Akara will do the following:
- Arrange to have the wsgi_wrapper placed at the outermost layer
of Akara's processing. That is, control will pass into
the WSGI wrapper before any Akara-specific processing related
to the @simple_service handler takes place.
- Ensure that all output returned back to the WSGI wrapper
strictly conforms to the WSGI standard (is a sequence of bytes)
The wrapper function given with wsgi_wrapper should accept a function
as input and return an WSGI application as output. This application
should be a callable that accepts (environ, start_response).
See implementation notes in the code below.
"""
_no_slashes(path)
_check_is_valid_method(method)
if method not in ("GET", "POST"):
raise ValueError(
"simple_service only supports GET and POST methods, not %s" % (method,))
def service_wrapper(func):
@functools.wraps(func)
def wrapper(environ, start_response):
try:
if environ.get("REQUEST_METHOD") != method:
if method == "GET":
raise _HTTP405(["GET"])
else:
raise _HTTP405(["POST"])
args, kwargs = _get_function_args(environ, allow_repeated_args)
except _HTTPError, err:
return err.make_wsgi_response(environ, start_response)
if args:
body = args[0]
else:
body = ""
_handle_notify_before(environ, body, notify_before)
new_request(environ)
result = func(*args, **kwargs)
result, ctype, clength = convert_body(result, content_type, encoding, writer)
send_headers(start_response, ctype, clength)
result = _handle_notify_after(environ, result, notify_after)
return result
pth = path
if pth is None:
pth = func.__name__
# Construct the default query template, if needed and possible.
qt = query_template
if qt is None and method == "GET" and not allow_repeated_args:
qt = _make_query_template(func)
if qt is not None:
qt = pth + qt
# If an wsgi_wrapper was given, wrapper the service wrapper with it
if wsgi_wrapper:
wrapper = wsgi_wrapper(wrapper)
registry.register_service(service_id, pth, wrapper, query_template=qt)
return wrapper
return service_wrapper
# XXX idea for the majority of services which deal with XML
# @xml_service("http://example.com/cool_xml", "cool")
# def cool(xml_tree, param1):
# ...
# return xml_tree
#def xml_service()
## Use for services which dispatch based on HTTP method type (GET, POST, ...)
# Nomenclature: the service is identified by its service id.
# All handlers for a given service id implement a given protocol.
# Use a method_dispatcher when a service does different things
# based on the HTTP method (GET, POST, ...) and you want a
# different Python function to handle each method.
class service_method_dispatcher(object):
"""WSGI dispatcher based on request HTTP method
This is an internal class. You should not need to use it.
"""
def __init__(self, path, wsgi_wrapper=None):
self.path = path
self.method_table = {}
self.wsgi_wrapper = wsgi_wrapper
def add_handler(self, method, handler):
if method in self.method_table:
logger.warn("Replacing %r method handler for %r" %
(method, self.path))
else:
logger.info("Created %r method handler for %r" %
(method, self.path))
# If an outer WSGI wrapper was specified, wrap it around the handler method
if self.wsgi_wrapper:
handler = self.wsgi_wrapper(handler)
self.method_table[method] = handler
def __call__(self, environ, start_response):
method = environ.get("REQUEST_METHOD")
handler = self.method_table.get(method, None)
if handler is not None:
return handler(environ, start_response)
err = _HTTP405(sorted(self.method_table.keys()))
return err.make_wsgi_response(environ, start_response)
def head_method(self, environ, start_response):
handler = self.method_table.get("GET",None)
if handler is not None:
handler(environ, start_response)
return ['']
err = _HTTP405(sorted(self.method_table.keys()))
return err.make_wsgi_response(environ, start_response)
## Guide to help in understanding
# @method_dispatcher(*args) -> returns a method_dispatcher_wrapper
#
# @method_dispatcher(*args)
# def func(): pass -> returns a service_dispatcher_decorator
# service_dispatcher_decorator.method(*args) -> returns
# a service_dispatch_decorator_method_wrapper
#
# service_dispatcher_decorator.method(*args)
# def method_func(): pass --> returns a method_wrapper which calls method_func
#
# service_dispatcher_decorator.simple_method(*args)
# def method_func(): pass --> returns a method_wrapper which calls method_func
# This is the top-level decorator
def method_dispatcher(service_id, path=None, wsgi_wrapper=None, query_template=None):
"""Add an Akara resource which dispatches to other functions based on the HTTP method
Used for resources which handle, say, both GET and POST requests.
service_id - a string which identifies this service; should be a URL
path - the local URL path to the resource (must not at present
contain a '/') If None, use the function's name as the path.
wsgi_wrapper - An outer WSGI component to be wrapped around the methods
query_template - An Akara URL service template (based on OpenSource; see akara.opensource)
Can be used to help consumers compose resources withing this service. The same
template is used for all HTTP methods
Example of use:
@method_dispatcher("http://example.com/example_service")
def something():
'''docstring used for the service'''
@something.simple_method(method="GET", content_type="text/plain",
allow_repeated_args=True)
def something_get(names=[]):
return "Hi " + ", ".join(names) + "!\n"
@something.method("POST")
def something_post(environ, start_response):
start_response("200 OK", [("Content-Type", "image/gif")])
return image_bytes
If you have curl installed then you could access the GET option as:
curl http://localhost:8880/something?name=Andrew&name=Sara+Marie
and access the POST option as:
curl --data "" http://localhost:8880/something
"""
_no_slashes(path)
def method_dispatcher_wrapper(func):
# Have to handle a missing docstring here as otherwise
# the registry will try to get it from the dispatcher.
doc = inspect.getdoc(func) or ""
pth = path
if pth is None:
pth = func.__name__
dispatcher = service_method_dispatcher(pth, wsgi_wrapper)
registry.register_service(service_id, pth, dispatcher, doc)
return service_dispatcher_decorator(dispatcher)
return method_dispatcher_wrapper
class service_dispatcher_decorator(object):
"""Helper class used by method_dispatcher to add new handlers to the given resource
You should not need to create this directly. Instead, use 'method_dispatcher'
"""
def __init__(self, dispatcher):
self.dispatcher = dispatcher
def method(self, method, encoding="utf-8", writer="xml"):
"""Register a function as a resource handler for a given HTTP method
method - the relevant HTTP method
encoding - Used to convert a returned Unicode string or an Amara tree
to the bytes used in the HTTP response
writer - Used to serialize the Amara tree for the HTTP response.
This must be a name which can be used as an Amara.writer.lookup.
The decorated function must take the normal WSGI parameters
(environ, start_response) and it must call start_response with
all the needed headers, including Content-Type. The function
may return an Akara tree or a Unicode string, in which case it
it serialized and converted to bytes based in the 'writer' and
'encoding' options.
"""
_check_is_valid_method(method)
def service_dispatch_decorator_method_wrapper(func):
@functools.wraps(func)
def method_wrapper(environ, start_response):
# 'method' passes the WSGI request straight through
# to the handler so there's almost no point in
# setting up the environment. However, I can conceive
# of tools which might access 'environ' directly, and
# I want to be consistent with the simple* interfaces.
new_request(environ)
result = func(environ, start_response)
# You need to make sure you sent the correct content-type!
result, ctype, clength = convert_body(result, None, encoding, writer)
return result
self.dispatcher.add_handler(method, method_wrapper)
return method_wrapper
return service_dispatch_decorator_method_wrapper
def simple_method(self, method, content_type=None,
encoding="utf-8", writer="xml", allow_repeated_args=False):
_check_is_valid_method(method)
if method not in ("GET", "POST"):
raise ValueError(
"simple_method only supports GET and POST methods, not %s" %
(method,))
def service_dispatch_decorator_simple_method_wrapper(func):
@functools.wraps(func)
def simple_method_wrapper(environ, start_response):
try:
args, kwargs = _get_function_args(environ, allow_repeated_args)
except _HTTPError, err:
return err.make_wsgi_response(environ, start_response)
new_request(environ)
result = func(*args, **kwargs)
result, ctype, clength = convert_body(result, content_type, encoding, writer)
send_headers(start_response, ctype, clength)
return result
self.dispatcher.add_handler(method, simple_method_wrapper)
return simple_method_wrapper
return service_dispatch_decorator_simple_method_wrapper
# XXX Idea
#def xml_method(self, method="POST", content_type="text/xml"):
# ...
# Install some built-in services
@simple_service("GET", "http://purl.org/xml3k/akara/services/registry", "",
allow_repeated_args=False)
def list_services(service=None):
return registry.list_services(ident=service) # XXX 'ident' or 'service' ?
| {
"repo_name": "uogbuji/akara",
"path": "lib/services.py",
"copies": "1",
"size": "24661",
"license": "apache-2.0",
"hash": 361344488637400100,
"line_mean": 37.7142857143,
"line_max": 99,
"alpha_frac": 0.6299420137,
"autogenerated": false,
"ratio": 4.141225860621327,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5271167874321326,
"avg_score": null,
"num_lines": null
} |
# akerl, 2013
# https://github.com/akerl/conflib
'''conflib is designed to simply stacking of configurations
useful when you have defaults & user input, or global and local settings
'''
class Config(object):
''' *configs is a stack of dicts or other Config objects
order flows from least to most 'powerful', for example:
Config(Defaults, Global, Local)
validation_dict is passed to self.validate()
'''
def __init__(self,
*configs,
validation_dict={}):
self.options = {}
for config in configs:
self.stack(config)
self.validate(validation_dict)
''' new_dict is stacked onto the current options
in a key conflict, new_dict's value takes precedence
'''
def stack(self, new_config):
if type(new_config) is type(self):
new_config = new_config.options
self.options.update(new_config)
''' validation_dict is a dict of key / validation pairs
validation interpretation is handled in self._do_validation()
'''
def validate(self, validation_dict={}):
for option, validation in validation_dict.items():
if option in self.options:
try:
self.options[option] = self._do_validation(
option, validation, self.options[option])
except ValueError:
print('Validation failed for {0}: {1}'.format(
option, self.options[option])
)
raise
''' argument is the key for the option we're dealing with
(currently not used in the function itself)
validation is the operator used to determine correctness
bool converts common boolean things to True/False
int converts value to an integer
a list of tuples validates against all items in all tuples
if there is a match, returns that_tuple[0]
a list checks for inclusion of vaiue in the list
a callable will be run, and its value will be returned directly
this allows you to modify value, but requires that you
manually raise ValueError if the value is malformed
anything else will compare type(value) to validation
value is the provided option to run validation on
'''
@staticmethod
def _do_validation(option, validation, value):
if validation is bool:
if value in ['y', 'yes', '1', 1, True]:
return True
elif value in ['n', 'no', '0', 0, False]:
return False
else:
raise ValueError
elif validation is int:
return int(value)
elif type(validation) is list:
if type(validation[0]) is tuple:
for item in validation:
if value in item:
return item[0]
raise ValueError
elif value in validation:
return value
else:
raise ValueError
elif type(validation) is type:
if type(value) is validation:
return value
else:
raise ValueError
elif hasattr(validation, '__call__'):
try:
return validation(value)
except TypeError:
raise ValueError
else:
raise ValueError
| {
"repo_name": "akerl/conflib",
"path": "conflib/conflib.py",
"copies": "1",
"size": "3518",
"license": "mit",
"hash": 1047407001123281400,
"line_mean": 37.2391304348,
"line_max": 75,
"alpha_frac": 0.5571347356,
"autogenerated": false,
"ratio": 5.196454948301329,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 92
} |
# akerl, 2013
# https://github.com/akerl/modlib
'''modlib maintains a stack of dynamically-loaded module objects
useful when you need to import dynamic modules with some degree of control
'''
from os.path import isfile, expanduser
from importlib import import_module
class Modstack(object):
'''formula is used by other methods to determine path of module
it is a .format-able string accepting keyword arguments
target is the thing to import from the module
use None to import the full module
'''
def __init__(self,
formula='{0}',
target=None):
self.stack = {}
self.target = target
self.formula = formula
'''args and kwargs are used for compilation
'''
def compile_path(self, *args, **kwargs):
path = self.formula.format(*args, **kwargs)
path = expanduser(path)
return path
'''args and kwargs go straight to compile_path
you ask for a match, you get back a thing
'''
def get(self, *args, **kwargs):
path = self.compile_path(*args, **kwargs)
if path in self.stack:
return self.stack[path]
else:
if self.target is None:
new_object = import_module(path)
else:
new_object = getattr(import_module(path), self.target)
self.stack.update({path: new_object})
return new_object
| {
"repo_name": "akerl/modlib",
"path": "modlib/modlib.py",
"copies": "1",
"size": "1429",
"license": "mit",
"hash": 9169512810954617000,
"line_mean": 32.2325581395,
"line_max": 74,
"alpha_frac": 0.610916725,
"autogenerated": false,
"ratio": 4.2656716417910445,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5376588366791044,
"avg_score": null,
"num_lines": null
} |
"""A kernel client for in-process kernels."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from IPython.kernel.channelsabc import HBChannelABC
from .socket import DummySocket
#-----------------------------------------------------------------------------
# Channel classes
#-----------------------------------------------------------------------------
class InProcessChannel(object):
"""Base class for in-process channels."""
proxy_methods = []
def __init__(self, client=None):
super(InProcessChannel, self).__init__()
self.client = client
self._is_alive = False
def is_alive(self):
return self._is_alive
def start(self):
self._is_alive = True
def stop(self):
self._is_alive = False
def call_handlers(self, msg):
""" This method is called in the main thread when a message arrives.
Subclasses should override this method to handle incoming messages.
"""
raise NotImplementedError('call_handlers must be defined in a subclass.')
def flush(self, timeout=1.0):
pass
def call_handlers_later(self, *args, **kwds):
""" Call the message handlers later.
The default implementation just calls the handlers immediately, but this
method exists so that GUI toolkits can defer calling the handlers until
after the event loop has run, as expected by GUI frontends.
"""
self.call_handlers(*args, **kwds)
def process_events(self):
""" Process any pending GUI events.
This method will be never be called from a frontend without an event
loop (e.g., a terminal frontend).
"""
raise NotImplementedError
class InProcessHBChannel(object):
"""A dummy heartbeat channel interface for in-process kernels.
Normally we use the heartbeat to check that the kernel process is alive.
When the kernel is in-process, that doesn't make sense, but clients still
expect this interface.
"""
time_to_dead = 3.0
def __init__(self, client=None):
super(InProcessHBChannel, self).__init__()
self.client = client
self._is_alive = False
self._pause = True
def is_alive(self):
return self._is_alive
def start(self):
self._is_alive = True
def stop(self):
self._is_alive = False
def pause(self):
self._pause = True
def unpause(self):
self._pause = False
def is_beating(self):
return not self._pause
HBChannelABC.register(InProcessHBChannel)
| {
"repo_name": "wolfram74/numerical_methods_iserles_notes",
"path": "venv/lib/python2.7/site-packages/IPython/kernel/inprocess/channels.py",
"copies": "4",
"size": "2629",
"license": "mit",
"hash": -2258868846302330400,
"line_mean": 26.1030927835,
"line_max": 81,
"alpha_frac": 0.602890833,
"autogenerated": false,
"ratio": 4.55632582322357,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.715921665622357,
"avg_score": null,
"num_lines": null
} |
"""A kernel client for in-process kernels."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from IPython.kernel.channelsabc import (
ShellChannelABC, IOPubChannelABC,
HBChannelABC, StdInChannelABC,
)
from .socket import DummySocket
#-----------------------------------------------------------------------------
# Channel classes
#-----------------------------------------------------------------------------
class InProcessChannel(object):
"""Base class for in-process channels."""
proxy_methods = []
def __init__(self, client=None):
super(InProcessChannel, self).__init__()
self.client = client
self._is_alive = False
#--------------------------------------------------------------------------
# Channel interface
#--------------------------------------------------------------------------
def is_alive(self):
return self._is_alive
def start(self):
self._is_alive = True
def stop(self):
self._is_alive = False
def call_handlers(self, msg):
""" This method is called in the main thread when a message arrives.
Subclasses should override this method to handle incoming messages.
"""
raise NotImplementedError(
'call_handlers must be defined in a subclass.')
#--------------------------------------------------------------------------
# InProcessChannel interface
#--------------------------------------------------------------------------
def call_handlers_later(self, *args, **kwds):
""" Call the message handlers later.
The default implementation just calls the handlers immediately, but this
method exists so that GUI toolkits can defer calling the handlers until
after the event loop has run, as expected by GUI frontends.
"""
self.call_handlers(*args, **kwds)
def process_events(self):
""" Process any pending GUI events.
This method will be never be called from a frontend without an event
loop (e.g., a terminal frontend).
"""
raise NotImplementedError
class InProcessShellChannel(InProcessChannel):
"""See `IPython.kernel.channels.ShellChannel` for docstrings."""
# flag for whether execute requests should be allowed to call raw_input
allow_stdin = True
proxy_methods = [
'execute',
'complete',
'inspect',
'history',
'shutdown',
'kernel_info',
]
#--------------------------------------------------------------------------
# ShellChannel interface
#--------------------------------------------------------------------------
def execute(self, code, silent=False, store_history=True,
user_expressions={}, allow_stdin=None):
if allow_stdin is None:
allow_stdin = self.allow_stdin
content = dict(code=code, silent=silent, store_history=store_history,
user_expressions=user_expressions,
allow_stdin=allow_stdin)
msg = self.client.session.msg('execute_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def complete(self, code, cursor_pos=None):
if cursor_pos is None:
cursor_pos = len(code)
content = dict(code=code, cursor_pos=cursor_pos)
msg = self.client.session.msg('complete_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def inspect(self, code, cursor_pos=None, detail_level=0):
if cursor_pos is None:
cursor_pos = len(code)
content = dict(code=code, cursor_pos=cursor_pos,
detail_level=detail_level,
)
msg = self.client.session.msg('inspect_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def history(self, raw=True, output=False, hist_access_type='range', **kwds):
content = dict(raw=raw, output=output,
hist_access_type=hist_access_type, **kwds)
msg = self.client.session.msg('history_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def shutdown(self, restart=False):
# FIXME: What to do here?
raise NotImplementedError('Cannot shutdown in-process kernel')
def kernel_info(self):
"""Request kernel info."""
msg = self.client.session.msg('kernel_info_request')
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
#--------------------------------------------------------------------------
# Protected interface
#--------------------------------------------------------------------------
def _dispatch_to_kernel(self, msg):
""" Send a message to the kernel and handle a reply.
"""
kernel = self.client.kernel
if kernel is None:
raise RuntimeError('Cannot send request. No kernel exists.')
stream = DummySocket()
self.client.session.send(stream, msg)
msg_parts = stream.recv_multipart()
kernel.dispatch_shell(stream, msg_parts)
idents, reply_msg = self.client.session.recv(stream, copy=False)
self.call_handlers_later(reply_msg)
class InProcessIOPubChannel(InProcessChannel):
"""See `IPython.kernel.channels.IOPubChannel` for docstrings."""
def flush(self, timeout=1.0):
pass
class InProcessStdInChannel(InProcessChannel):
"""See `IPython.kernel.channels.StdInChannel` for docstrings."""
proxy_methods = ['input']
def input(self, string):
kernel = self.client.kernel
if kernel is None:
raise RuntimeError('Cannot send input reply. No kernel exists.')
kernel.raw_input_str = string
class InProcessHBChannel(InProcessChannel):
"""See `IPython.kernel.channels.HBChannel` for docstrings."""
time_to_dead = 3.0
def __init__(self, *args, **kwds):
super(InProcessHBChannel, self).__init__(*args, **kwds)
self._pause = True
def pause(self):
self._pause = True
def unpause(self):
self._pause = False
def is_beating(self):
return not self._pause
#-----------------------------------------------------------------------------
# ABC Registration
#-----------------------------------------------------------------------------
ShellChannelABC.register(InProcessShellChannel)
IOPubChannelABC.register(InProcessIOPubChannel)
HBChannelABC.register(InProcessHBChannel)
StdInChannelABC.register(InProcessStdInChannel)
| {
"repo_name": "mattvonrocketstein/smash",
"path": "smashlib/ipy3x/kernel/inprocess/channels.py",
"copies": "1",
"size": "6705",
"license": "mit",
"hash": -6196055306194000000,
"line_mean": 32.0295566502,
"line_max": 80,
"alpha_frac": 0.5422818792,
"autogenerated": false,
"ratio": 4.685534591194968,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5727816470394969,
"avg_score": null,
"num_lines": null
} |
""" A kernel client for in-process kernels. """
#-----------------------------------------------------------------------------
# Copyright (C) 2012 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# IPython imports
from IPython.kernel.channelsabc import (
ShellChannelABC, IOPubChannelABC,
HBChannelABC, StdInChannelABC,
)
# Local imports
from .socket import DummySocket
#-----------------------------------------------------------------------------
# Channel classes
#-----------------------------------------------------------------------------
class InProcessChannel(object):
"""Base class for in-process channels."""
proxy_methods = []
def __init__(self, client):
super(InProcessChannel, self).__init__()
self.client = client
self._is_alive = False
#--------------------------------------------------------------------------
# Channel interface
#--------------------------------------------------------------------------
def is_alive(self):
return self._is_alive
def start(self):
self._is_alive = True
def stop(self):
self._is_alive = False
def call_handlers(self, msg):
""" This method is called in the main thread when a message arrives.
Subclasses should override this method to handle incoming messages.
"""
raise NotImplementedError('call_handlers must be defined in a subclass.')
#--------------------------------------------------------------------------
# InProcessChannel interface
#--------------------------------------------------------------------------
def call_handlers_later(self, *args, **kwds):
""" Call the message handlers later.
The default implementation just calls the handlers immediately, but this
method exists so that GUI toolkits can defer calling the handlers until
after the event loop has run, as expected by GUI frontends.
"""
self.call_handlers(*args, **kwds)
def process_events(self):
""" Process any pending GUI events.
This method will be never be called from a frontend without an event
loop (e.g., a terminal frontend).
"""
raise NotImplementedError
class InProcessShellChannel(InProcessChannel):
"""See `IPython.kernel.channels.ShellChannel` for docstrings."""
# flag for whether execute requests should be allowed to call raw_input
allow_stdin = True
proxy_methods = [
'execute',
'complete',
'object_info',
'history',
'shutdown',
]
#--------------------------------------------------------------------------
# ShellChannel interface
#--------------------------------------------------------------------------
def execute(self, code, silent=False, store_history=True,
user_variables=[], user_expressions={}, allow_stdin=None):
if allow_stdin is None:
allow_stdin = self.allow_stdin
content = dict(code=code, silent=silent, store_history=store_history,
user_variables=user_variables,
user_expressions=user_expressions,
allow_stdin=allow_stdin)
msg = self.client.session.msg('execute_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def complete(self, text, line, cursor_pos, block=None):
content = dict(text=text, line=line, block=block, cursor_pos=cursor_pos)
msg = self.client.session.msg('complete_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def object_info(self, oname, detail_level=0):
content = dict(oname=oname, detail_level=detail_level)
msg = self.client.session.msg('object_info_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def history(self, raw=True, output=False, hist_access_type='range', **kwds):
content = dict(raw=raw, output=output,
hist_access_type=hist_access_type, **kwds)
msg = self.client.session.msg('history_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def shutdown(self, restart=False):
# FIXME: What to do here?
raise NotImplementedError('Cannot shutdown in-process kernel')
#--------------------------------------------------------------------------
# Protected interface
#--------------------------------------------------------------------------
def _dispatch_to_kernel(self, msg):
""" Send a message to the kernel and handle a reply.
"""
kernel = self.client.kernel
if kernel is None:
raise RuntimeError('Cannot send request. No kernel exists.')
stream = DummySocket()
self.client.session.send(stream, msg)
msg_parts = stream.recv_multipart()
kernel.dispatch_shell(stream, msg_parts)
idents, reply_msg = self.client.session.recv(stream, copy=False)
self.call_handlers_later(reply_msg)
class InProcessIOPubChannel(InProcessChannel):
"""See `IPython.kernel.channels.IOPubChannel` for docstrings."""
def flush(self, timeout=1.0):
pass
class InProcessStdInChannel(InProcessChannel):
"""See `IPython.kernel.channels.StdInChannel` for docstrings."""
proxy_methods = ['input']
def input(self, string):
kernel = self.client.kernel
if kernel is None:
raise RuntimeError('Cannot send input reply. No kernel exists.')
kernel.raw_input_str = string
class InProcessHBChannel(InProcessChannel):
"""See `IPython.kernel.channels.HBChannel` for docstrings."""
time_to_dead = 3.0
def __init__(self, *args, **kwds):
super(InProcessHBChannel, self).__init__(*args, **kwds)
self._pause = True
def pause(self):
self._pause = True
def unpause(self):
self._pause = False
def is_beating(self):
return not self._pause
#-----------------------------------------------------------------------------
# ABC Registration
#-----------------------------------------------------------------------------
ShellChannelABC.register(InProcessShellChannel)
IOPubChannelABC.register(InProcessIOPubChannel)
HBChannelABC.register(InProcessHBChannel)
StdInChannelABC.register(InProcessStdInChannel)
| {
"repo_name": "noslenfa/tdjangorest",
"path": "uw/lib/python2.7/site-packages/IPython/kernel/inprocess/channels.py",
"copies": "2",
"size": "6812",
"license": "apache-2.0",
"hash": 8097668796476168000,
"line_mean": 34.1134020619,
"line_max": 81,
"alpha_frac": 0.5256899589,
"autogenerated": false,
"ratio": 4.8796561604584525,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0015596975005484537,
"num_lines": 194
} |
""" a kernel implementation of a (localised) seamless update from a coupling between
two weighted ensembles (coarse and fine) to two coupled evenly weighted ensembles """
from __future__ import absolute_import
from __future__ import division
from firedrake import *
from firedrake.mg.utils import get_level
from fade import *
from fade.ml import *
from fade.emd.emd_kernel import *
import numpy as np
from pyop2.profiling import timed_stage
def seamless_coupling_update(ensemble_1, ensemble_2, weights_1, weights_2, r_loc_c=0, r_loc_f=0):
""" performs a seamless coupling (localised) ensemble transform update from a coupling
between two weighted ensembles (coarse and fine) into two evenly weighted ensembles.
NB: The two ensembles have to belong to the same hierarchy
:arg ensemble_1: list of :class:`Function`s in the coarse ensemble
:type ensemble_1: tuple / list
:arg ensemble_2: list of :class:`Function`s in the fine ensemble
:type ensemble_2: tuple / list
:arg weights_1: list of :class:`Function`s representing the importance weights for first
ensemble
:type weights_1: tuple / list
:arg weights_2: list of :class:`Function`s representing the importance weights for second
ensemble
:type weights_2: tuple / list
Optional Arguments:
:arg r_loc_c: Radius of coarsening localisation for the coarse cost functions. Default: 0
:type r_loc_c: int
:arg r_loc_f: Radius of coarsening localisation for the fine cost functions. Default: 0
:type r_loc_f: int
"""
if len(ensemble_1) < 1 or len(ensemble_2) < 1:
raise ValueError('ensembles cannot be indexed')
if len(weights_1) < 1 or len(weights_2) < 1:
raise ValueError('weights cannot be indexed')
# check that ensemble_1 and ensemble_2 have inputs in the same hierarchy
mesh_1 = ensemble_1[0].function_space().mesh()
mesh_2 = ensemble_2[0].function_space().mesh()
hierarchy_1, lvl_1 = get_level(mesh_1)
hierarchy_2, lvl_2 = get_level(mesh_2)
if lvl_1 is None or lvl_2 is None:
raise ValueError('Both ensembles to be coupled need to be on meshes part of same hierarchy')
if hierarchy_1 is not hierarchy_2:
raise ValueError('Both ensembles to be coupled need to be on meshes part of same hierarchy')
# check if 1 is coarse and 2 is fine
if lvl_1 < lvl_2:
ensemble_c = ensemble_1
weights_c = weights_1
ensemble_f = ensemble_2
weights_f = weights_2
else:
raise ValueError('Coarse ensemble needs to be the first ensemble, followed by a finer one')
n = len(ensemble_c)
if n is not len(ensemble_f):
raise ValueError('Both ensembles need to be of the same length')
# function spaces of both ensembles and create vector function space
fsc = ensemble_c[0].function_space()
fsf = ensemble_f[0].function_space()
fam = fsc.ufl_element().family()
deg = fsc.ufl_element().degree()
assert fam == fsf.ufl_element().family()
assert deg == fsf.ufl_element().degree()
vfsc = VectorFunctionSpace(mesh_1, fam, deg, dim=n)
vfsf = VectorFunctionSpace(mesh_2, fam, deg, dim=n)
# check that weights have same length
assert len(weights_c) == n
assert len(weights_f) == n
# check that weights add up to one
with timed_stage("Checking weights are normalized"):
cc = Function(fsc)
cf = Function(fsf)
for k in range(n):
cc.dat.data[:] += weights_c[k].dat.data[:]
cf.dat.data[:] += weights_f[k].dat.data[:]
if np.max(np.abs(cc.dat.data[:] - 1)) > 1e-3 or np.max(np.abs(cf.dat.data[:] - 1)) > 1e-3:
raise ValueError('Coarse weights dont add up to 1')
# preallocate new / intermediate ensembles and assign basis coeffs to new vector function
with timed_stage("Preallocating functions"):
ensemble_c_f = Function(vfsc)
ensemble_f_f = Function(vfsf)
new_ensemble_c_f = Function(vfsc)
new_ensemble_f_f = Function(vfsf)
int_ensemble_c_f = Function(vfsc)
if n == 1:
ensemble_c_f.dat.data[:] = ensemble_c[0].dat.data[:]
ensemble_f_f.dat.data[:] = ensemble_f[0].dat.data[:]
else:
for i in range(n):
ensemble_c_f.dat.data[:, i] = ensemble_c[i].dat.data[:]
ensemble_f_f.dat.data[:, i] = ensemble_f[i].dat.data[:]
# define even weights
with timed_stage("Preallocating functions"):
even_weights_c = []
even_weights_f = []
fc = Function(fsc).assign(1.0 / n)
ff = Function(fsf).assign(1.0 / n)
for k in range(n):
even_weights_c.append(fc)
even_weights_f.append(ff)
# inject fine weights and ensembles down to coarse mesh
with timed_stage("Injecting finer ensemble / weights down to coarse mesh"):
inj_ensemble_f_f = Function(vfsc)
inj_weights_f = []
totals = Function(fsc)
for i in range(n):
g = Function(fsc)
inject(weights_f[i], g)
inj_weights_f.append(g)
totals.dat.data[:] += inj_weights_f[i].dat.data[:]
inject(ensemble_f_f, inj_ensemble_f_f)
# re-normalize injected fine weights
for i in range(n):
inj_weights_f[i].dat.data[:] = np.divide(inj_weights_f[i].dat.data[:], totals.dat.data[:])
with timed_stage("Coupling between weighted coarse and fine ensembles"):
kernel_transform(ensemble_c_f, inj_ensemble_f_f, weights_c,
inj_weights_f, int_ensemble_c_f, r_loc_c)
with timed_stage("Finer ensemble transform"):
kernel_transform(ensemble_f_f, ensemble_f_f, weights_f,
even_weights_f, new_ensemble_f_f, r_loc_f)
with timed_stage("Coupling weighted intermediate ensemble and transformed finer ensemble"):
# inject transformed finer ensemble
inj_new_ensemble_f_f = Function(vfsc)
inj_f = Function(fsc)
f_f = Function(fsf)
if n == 1:
f_f.dat.data[:] = new_ensemble_f_f.dat.data[:]
inject(f_f, inj_f)
inj_new_ensemble_f_f.dat.data[:] = inj_f.dat.data[:]
else:
for i in range(n):
f_f.dat.data[:] = new_ensemble_f_f.dat.data[:, i]
inject(f_f, inj_f)
inj_new_ensemble_f_f.dat.data[:, i] = inj_f.dat.data[:]
kernel_transform(int_ensemble_c_f, inj_new_ensemble_f_f,
inj_weights_f, even_weights_c, new_ensemble_c_f, r_loc_c)
# check that components have the same mean
with timed_stage("Checking posterior mean consistency"):
mc = Function(fsc)
mf = Function(fsf)
for k in range(n):
mc.dat.data[:] += np.multiply(ensemble_c[k].dat.data[:], weights_c[k].dat.data[:])
mf.dat.data[:] += np.multiply(ensemble_f[k].dat.data[:], weights_f[k].dat.data[:])
# override ensembles
if n == 1:
ensemble_c[0].dat.data[:] = new_ensemble_c_f.dat.data[:]
ensemble_f[0].dat.data[:] = new_ensemble_f_f.dat.data[:]
else:
for i in range(n):
ensemble_c[i].dat.data[:] = new_ensemble_c_f.dat.data[:, i]
ensemble_f[i].dat.data[:] = new_ensemble_f_f.dat.data[:, i]
# reset weights
for i in range(n):
weights_c[i].assign(1.0 / n)
weights_f[i].assign(1.0 / n)
# check that components have the same mean
with timed_stage("Checking posterior mean consistency"):
mnc = Function(fsc)
mnf = Function(fsf)
for k in range(n):
mnc.dat.data[:] += np.multiply(ensemble_c[k].dat.data[:], weights_c[k].dat.data[:])
mnf.dat.data[:] += np.multiply(ensemble_f[k].dat.data[:], weights_f[k].dat.data[:])
assert np.max(np.abs(mnc.dat.data[:] - mc.dat.data[:])) < 1e-5
assert np.max(np.abs(mnf.dat.data[:] - mf.dat.data[:])) < 1e-5
return ensemble_c, ensemble_f
| {
"repo_name": "alsgregory/FADE",
"path": "fade/ml/coupling.py",
"copies": "2",
"size": "8073",
"license": "mit",
"hash": -9190670743584968000,
"line_mean": 38.5735294118,
"line_max": 100,
"alpha_frac": 0.6065898675,
"autogenerated": false,
"ratio": 3.3651521467278034,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49717420142278035,
"avg_score": null,
"num_lines": null
} |
"""A kernel manager for in-process kernels."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from IPython.utils.traitlets import Instance, DottedObjectName
from IPython.kernel.managerabc import KernelManagerABC
from IPython.kernel.manager import KernelManager
from IPython.kernel.zmq.session import Session
class InProcessKernelManager(KernelManager):
"""A manager for an in-process kernel.
This class implements the interface of
`IPython.kernel.kernelmanagerabc.KernelManagerABC` and allows
(asynchronous) frontends to be used seamlessly with an in-process kernel.
See `IPython.kernel.kernelmanager.KernelManager` for docstrings.
"""
# The kernel process with which the KernelManager is communicating.
kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel')
# the client class for KM.client() shortcut
client_class = DottedObjectName('IPython.kernel.inprocess.BlockingInProcessKernelClient')
def _session_default(self):
# don't sign in-process messages
return Session(key=b'', parent=self)
#--------------------------------------------------------------------------
# Kernel management methods
#--------------------------------------------------------------------------
def start_kernel(self, **kwds):
from IPython.kernel.inprocess.ipkernel import InProcessKernel
self.kernel = InProcessKernel(parent=self, session=self.session)
def shutdown_kernel(self):
self._kill_kernel()
def restart_kernel(self, now=False, **kwds):
self.shutdown_kernel()
self.start_kernel(**kwds)
@property
def has_kernel(self):
return self.kernel is not None
def _kill_kernel(self):
self.kernel = None
def interrupt_kernel(self):
raise NotImplementedError("Cannot interrupt in-process kernel.")
def signal_kernel(self, signum):
raise NotImplementedError("Cannot signal in-process kernel.")
def is_alive(self):
return self.kernel is not None
def client(self, **kwargs):
kwargs['kernel'] = self.kernel
return super(InProcessKernelManager, self).client(**kwargs)
#-----------------------------------------------------------------------------
# ABC Registration
#-----------------------------------------------------------------------------
KernelManagerABC.register(InProcessKernelManager)
| {
"repo_name": "wolfram74/numerical_methods_iserles_notes",
"path": "venv/lib/python2.7/site-packages/IPython/kernel/inprocess/manager.py",
"copies": "4",
"size": "2469",
"license": "mit",
"hash": -7985418457950066000,
"line_mean": 33.7746478873,
"line_max": 93,
"alpha_frac": 0.6265694613,
"autogenerated": false,
"ratio": 4.7664092664092665,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00649232047279638,
"num_lines": 71
} |
"""A kernel manager for in-process kernels."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from traitlets import Instance, DottedObjectName, default
from jupyter_client.managerabc import KernelManagerABC
from jupyter_client.manager import KernelManager
from jupyter_client.session import Session
from .constants import INPROCESS_KEY
class InProcessKernelManager(KernelManager):
"""A manager for an in-process kernel.
This class implements the interface of
`jupyter_client.kernelmanagerabc.KernelManagerABC` and allows
(asynchronous) frontends to be used seamlessly with an in-process kernel.
See `jupyter_client.kernelmanager.KernelManager` for docstrings.
"""
# The kernel process with which the KernelManager is communicating.
kernel = Instance('ipykernel.inprocess.ipkernel.InProcessKernel',
allow_none=True)
# the client class for KM.client() shortcut
client_class = DottedObjectName('ipykernel.inprocess.BlockingInProcessKernelClient')
@default('blocking_class')
def _default_blocking_class(self):
from .blocking import BlockingInProcessKernelClient
return BlockingInProcessKernelClient
@default('session')
def _default_session(self):
# don't sign in-process messages
return Session(key=INPROCESS_KEY, parent=self)
#--------------------------------------------------------------------------
# Kernel management methods
#--------------------------------------------------------------------------
def start_kernel(self, **kwds):
from ipykernel.inprocess.ipkernel import InProcessKernel
self.kernel = InProcessKernel(parent=self, session=self.session)
def shutdown_kernel(self):
self.kernel.iopub_thread.stop()
self._kill_kernel()
def restart_kernel(self, now=False, **kwds):
self.shutdown_kernel()
self.start_kernel(**kwds)
@property
def has_kernel(self):
return self.kernel is not None
def _kill_kernel(self):
self.kernel = None
def interrupt_kernel(self):
raise NotImplementedError("Cannot interrupt in-process kernel.")
def signal_kernel(self, signum):
raise NotImplementedError("Cannot signal in-process kernel.")
def is_alive(self):
return self.kernel is not None
def client(self, **kwargs):
kwargs['kernel'] = self.kernel
return super(InProcessKernelManager, self).client(**kwargs)
#-----------------------------------------------------------------------------
# ABC Registration
#-----------------------------------------------------------------------------
KernelManagerABC.register(InProcessKernelManager)
| {
"repo_name": "sserrot/champion_relationships",
"path": "venv/Lib/site-packages/ipykernel/inprocess/manager.py",
"copies": "9",
"size": "2764",
"license": "mit",
"hash": 6489665061387190000,
"line_mean": 33.1234567901,
"line_max": 88,
"alpha_frac": 0.6331403763,
"autogenerated": false,
"ratio": 4.629815745393635,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9762956121693634,
"avg_score": null,
"num_lines": null
} |
"""A kernel manager for in-process kernels."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from traitlets import Instance, DottedObjectName
from jupyter_client.managerabc import KernelManagerABC
from jupyter_client.manager import KernelManager
from jupyter_client.session import Session
class InProcessKernelManager(KernelManager):
"""A manager for an in-process kernel.
This class implements the interface of
`jupyter_client.kernelmanagerabc.KernelManagerABC` and allows
(asynchronous) frontends to be used seamlessly with an in-process kernel.
See `jupyter_client.kernelmanager.KernelManager` for docstrings.
"""
# The kernel process with which the KernelManager is communicating.
kernel = Instance('ipykernel.inprocess.ipkernel.InProcessKernel',
allow_none=True)
# the client class for KM.client() shortcut
client_class = DottedObjectName('ipykernel.inprocess.BlockingInProcessKernelClient')
def _session_default(self):
# don't sign in-process messages
return Session(key=b'', parent=self)
#--------------------------------------------------------------------------
# Kernel management methods
#--------------------------------------------------------------------------
def start_kernel(self, **kwds):
from ipykernel.inprocess.ipkernel import InProcessKernel
self.kernel = InProcessKernel(parent=self, session=self.session)
def shutdown_kernel(self):
self._kill_kernel()
def restart_kernel(self, now=False, **kwds):
self.shutdown_kernel()
self.start_kernel(**kwds)
@property
def has_kernel(self):
return self.kernel is not None
def _kill_kernel(self):
self.kernel = None
def interrupt_kernel(self):
raise NotImplementedError("Cannot interrupt in-process kernel.")
def signal_kernel(self, signum):
raise NotImplementedError("Cannot signal in-process kernel.")
def is_alive(self):
return self.kernel is not None
def client(self, **kwargs):
kwargs['kernel'] = self.kernel
return super(InProcessKernelManager, self).client(**kwargs)
#-----------------------------------------------------------------------------
# ABC Registration
#-----------------------------------------------------------------------------
KernelManagerABC.register(InProcessKernelManager)
| {
"repo_name": "bdh1011/wau",
"path": "venv/lib/python2.7/site-packages/ipykernel/inprocess/manager.py",
"copies": "1",
"size": "2467",
"license": "mit",
"hash": -5736140954056489000,
"line_mean": 33.2638888889,
"line_max": 88,
"alpha_frac": 0.6214025132,
"autogenerated": false,
"ratio": 4.699047619047619,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5820450132247619,
"avg_score": null,
"num_lines": null
} |
"""A kernel manager for in-process kernels."""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import Instance, DottedObjectName
from IPython.kernel.managerabc import KernelManagerABC
from IPython.kernel.manager import KernelManager
#-----------------------------------------------------------------------------
# Main kernel manager class
#-----------------------------------------------------------------------------
class InProcessKernelManager(KernelManager):
"""A manager for an in-process kernel.
This class implements the interface of
`IPython.kernel.kernelmanagerabc.KernelManagerABC` and allows
(asynchronous) frontends to be used seamlessly with an in-process kernel.
See `IPython.kernel.kernelmanager.KernelManager` for docstrings.
"""
# The kernel process with which the KernelManager is communicating.
kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel')
# the client class for KM.client() shortcut
client_class = DottedObjectName(
'IPython.kernel.inprocess.BlockingInProcessKernelClient')
#--------------------------------------------------------------------------
# Kernel management methods
#--------------------------------------------------------------------------
def start_kernel(self, **kwds):
from IPython.kernel.inprocess.ipkernel import InProcessKernel
self.kernel = InProcessKernel()
def shutdown_kernel(self):
self._kill_kernel()
def restart_kernel(self, now=False, **kwds):
self.shutdown_kernel()
self.start_kernel(**kwds)
@property
def has_kernel(self):
return self.kernel is not None
def _kill_kernel(self):
self.kernel = None
def interrupt_kernel(self):
raise NotImplementedError("Cannot interrupt in-process kernel.")
def signal_kernel(self, signum):
raise NotImplementedError("Cannot signal in-process kernel.")
def is_alive(self):
return self.kernel is not None
def client(self, **kwargs):
kwargs['kernel'] = self.kernel
return super(InProcessKernelManager, self).client(**kwargs)
#-----------------------------------------------------------------------------
# ABC Registration
#-----------------------------------------------------------------------------
KernelManagerABC.register(InProcessKernelManager)
| {
"repo_name": "mattvonrocketstein/smash",
"path": "smashlib/ipy3x/kernel/inprocess/manager.py",
"copies": "1",
"size": "2873",
"license": "mit",
"hash": -1987688352129125600,
"line_mean": 34.9125,
"line_max": 79,
"alpha_frac": 0.5210581274,
"autogenerated": false,
"ratio": 5.5570599613152805,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6578118088715281,
"avg_score": null,
"num_lines": null
} |
"""A kernel manager for multiple kernels
Authors:
* Brian Granger
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import absolute_import
import os
import uuid
import zmq
from IPython.config.configurable import LoggingConfigurable
from IPython.utils.importstring import import_item
from IPython.utils.traitlets import (
Instance, Dict, Unicode, Any, DottedObjectName
)
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class DuplicateKernelError(Exception):
pass
def kernel_method(f):
"""decorator for proxying MKM.method(kernel_id) to individual KMs by ID"""
def wrapped(self, kernel_id, *args, **kwargs):
# get the kernel
km = self.get_kernel(kernel_id)
method = getattr(km, f.__name__)
# call the kernel's method
r = method(*args, **kwargs)
# last thing, call anything defined in the actual class method
# such as logging messages
f(self, kernel_id, *args, **kwargs)
# return the method result
return r
return wrapped
class MultiKernelManager(LoggingConfigurable):
"""A class for managing multiple kernels."""
kernel_manager_class = DottedObjectName(
"IPython.kernel.ioloop.IOLoopKernelManager", config=True,
help="""The kernel manager class. This is configurable to allow
subclassing of the KernelManager for customized behavior.
"""
)
def _kernel_manager_class_changed(self, name, old, new):
self.kernel_manager_factory = import_item(new)
kernel_manager_factory = Any(help="this is kernel_manager_class after import")
def _kernel_manager_factory_default(self):
return import_item(self.kernel_manager_class)
context = Instance('zmq.Context')
def _context_default(self):
return zmq.Context.instance()
connection_dir = Unicode('')
_kernels = Dict()
def list_kernel_ids(self):
"""Return a list of the kernel ids of the active kernels."""
# Create a copy so we can iterate over kernels in operations
# that delete keys.
return list(self._kernels.keys())
def __len__(self):
"""Return the number of running kernels."""
return len(self.list_kernel_ids())
def __contains__(self, kernel_id):
return kernel_id in self._kernels
def start_kernel(self, **kwargs):
"""Start a new kernel.
The caller can pick a kernel_id by passing one in as a keyword arg,
otherwise one will be picked using a uuid.
To silence the kernel's stdout/stderr, call this using::
km.start_kernel(stdout=PIPE, stderr=PIPE)
"""
kernel_id = kwargs.pop('kernel_id', unicode(uuid.uuid4()))
if kernel_id in self:
raise DuplicateKernelError('Kernel already exists: %s' % kernel_id)
# kernel_manager_factory is the constructor for the KernelManager
# subclass we are using. It can be configured as any Configurable,
# including things like its transport and ip.
km = self.kernel_manager_factory(connection_file=os.path.join(
self.connection_dir, "kernel-%s.json" % kernel_id),
parent=self, autorestart=True, log=self.log
)
km.start_kernel(**kwargs)
self._kernels[kernel_id] = km
return kernel_id
@kernel_method
def shutdown_kernel(self, kernel_id, now=False):
"""Shutdown a kernel by its kernel uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel to shutdown.
now : bool
Should the kernel be shutdown forcibly using a signal.
"""
self.log.info("Kernel shutdown: %s" % kernel_id)
self.remove_kernel(kernel_id)
def remove_kernel(self, kernel_id):
"""remove a kernel from our mapping.
Mainly so that a kernel can be removed if it is already dead,
without having to call shutdown_kernel.
The kernel object is returned.
"""
return self._kernels.pop(kernel_id)
def shutdown_all(self, now=False):
"""Shutdown all kernels."""
for kid in self.list_kernel_ids():
self.shutdown_kernel(kid, now=now)
@kernel_method
def interrupt_kernel(self, kernel_id):
"""Interrupt (SIGINT) the kernel by its uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel to interrupt.
"""
self.log.info("Kernel interrupted: %s" % kernel_id)
@kernel_method
def signal_kernel(self, kernel_id, signum):
"""Sends a signal to the kernel by its uuid.
Note that since only SIGTERM is supported on Windows, this function
is only useful on Unix systems.
Parameters
==========
kernel_id : uuid
The id of the kernel to signal.
"""
self.log.info("Signaled Kernel %s with %s" % (kernel_id, signum))
@kernel_method
def restart_kernel(self, kernel_id):
"""Restart a kernel by its uuid, keeping the same ports.
Parameters
==========
kernel_id : uuid
The id of the kernel to interrupt.
"""
self.log.info("Kernel restarted: %s" % kernel_id)
@kernel_method
def is_alive(self, kernel_id):
"""Is the kernel alive.
This calls KernelManager.is_alive() which calls Popen.poll on the
actual kernel subprocess.
Parameters
==========
kernel_id : uuid
The id of the kernel.
"""
def _check_kernel_id(self, kernel_id):
"""check that a kernel id is valid"""
if kernel_id not in self:
raise KeyError("Kernel with id not found: %s" % kernel_id)
def get_kernel(self, kernel_id):
"""Get the single KernelManager object for a kernel by its uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel.
"""
self._check_kernel_id(kernel_id)
return self._kernels[kernel_id]
@kernel_method
def add_restart_callback(self, kernel_id, callback, event='restart'):
"""add a callback for the KernelRestarter"""
@kernel_method
def remove_restart_callback(self, kernel_id, callback, event='restart'):
"""remove a callback for the KernelRestarter"""
@kernel_method
def get_connection_info(self, kernel_id):
"""Return a dictionary of connection data for a kernel.
Parameters
==========
kernel_id : uuid
The id of the kernel.
Returns
=======
connection_dict : dict
A dict of the information needed to connect to a kernel.
This includes the ip address and the integer port
numbers of the different channels (stdin_port, iopub_port,
shell_port, hb_port).
"""
@kernel_method
def connect_iopub(self, kernel_id, identity=None):
"""Return a zmq Socket connected to the iopub channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
@kernel_method
def connect_shell(self, kernel_id, identity=None):
"""Return a zmq Socket connected to the shell channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
@kernel_method
def connect_stdin(self, kernel_id, identity=None):
"""Return a zmq Socket connected to the stdin channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
@kernel_method
def connect_hb(self, kernel_id, identity=None):
"""Return a zmq Socket connected to the hb channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
| {
"repo_name": "marcoantoniooliveira/labweb",
"path": "oscar/lib/python2.7/site-packages/IPython/kernel/multikernelmanager.py",
"copies": "2",
"size": "9118",
"license": "bsd-3-clause",
"hash": 6667340723475497000,
"line_mean": 29.292358804,
"line_max": 82,
"alpha_frac": 0.5632814214,
"autogenerated": false,
"ratio": 4.5612806403201605,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0027508743166873577,
"num_lines": 301
} |
"""A kernel manager for multiple kernels"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import absolute_import
import os
import uuid
import zmq
from IPython.config.configurable import LoggingConfigurable
from IPython.utils.importstring import import_item
from IPython.utils.traitlets import (
Instance, Dict, List, Unicode, Any, DottedObjectName
)
from IPython.utils.py3compat import unicode_type
from .kernelspec import NATIVE_KERNEL_NAME
class DuplicateKernelError(Exception):
pass
def kernel_method(f):
"""decorator for proxying MKM.method(kernel_id) to individual KMs by ID"""
def wrapped(self, kernel_id, *args, **kwargs):
# get the kernel
km = self.get_kernel(kernel_id)
method = getattr(km, f.__name__)
# call the kernel's method
r = method(*args, **kwargs)
# last thing, call anything defined in the actual class method
# such as logging messages
f(self, kernel_id, *args, **kwargs)
# return the method result
return r
return wrapped
class MultiKernelManager(LoggingConfigurable):
"""A class for managing multiple kernels."""
ipython_kernel_argv = List(Unicode)
default_kernel_name = Unicode(NATIVE_KERNEL_NAME, config=True,
help="The name of the default kernel to start"
)
kernel_manager_class = DottedObjectName(
"IPython.kernel.ioloop.IOLoopKernelManager", config=True,
help="""The kernel manager class. This is configurable to allow
subclassing of the KernelManager for customized behavior.
"""
)
def _kernel_manager_class_changed(self, name, old, new):
self.kernel_manager_factory = import_item(new)
kernel_manager_factory = Any(
help="this is kernel_manager_class after import")
def _kernel_manager_factory_default(self):
return import_item(self.kernel_manager_class)
context = Instance('zmq.Context')
def _context_default(self):
return zmq.Context.instance()
connection_dir = Unicode('')
_kernels = Dict()
def list_kernel_ids(self):
"""Return a list of the kernel ids of the active kernels."""
# Create a copy so we can iterate over kernels in operations
# that delete keys.
return list(self._kernels.keys())
def __len__(self):
"""Return the number of running kernels."""
return len(self.list_kernel_ids())
def __contains__(self, kernel_id):
return kernel_id in self._kernels
def start_kernel(self, kernel_name=None, **kwargs):
"""Start a new kernel.
The caller can pick a kernel_id by passing one in as a keyword arg,
otherwise one will be picked using a uuid.
To silence the kernel's stdout/stderr, call this using::
km.start_kernel(stdout=PIPE, stderr=PIPE)
"""
kernel_id = kwargs.pop('kernel_id', unicode_type(uuid.uuid4()))
if kernel_id in self:
raise DuplicateKernelError('Kernel already exists: %s' % kernel_id)
if kernel_name is None:
kernel_name = self.default_kernel_name
# kernel_manager_factory is the constructor for the KernelManager
# subclass we are using. It can be configured as any Configurable,
# including things like its transport and ip.
km = self.kernel_manager_factory(connection_file=os.path.join(
self.connection_dir, "kernel-%s.json" % kernel_id),
parent=self, autorestart=True, log=self.log, kernel_name=kernel_name,
)
# FIXME: remove special treatment of IPython kernels
if km.ipython_kernel:
kwargs.setdefault('extra_arguments', self.ipython_kernel_argv)
km.start_kernel(**kwargs)
self._kernels[kernel_id] = km
return kernel_id
@kernel_method
def shutdown_kernel(self, kernel_id, now=False, restart=False):
"""Shutdown a kernel by its kernel uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel to shutdown.
now : bool
Should the kernel be shutdown forcibly using a signal.
restart : bool
Will the kernel be restarted?
"""
self.log.info("Kernel shutdown: %s" % kernel_id)
self.remove_kernel(kernel_id)
@kernel_method
def request_shutdown(self, kernel_id, restart=False):
"""Ask a kernel to shut down by its kernel uuid"""
@kernel_method
def finish_shutdown(self, kernel_id, waittime=1, pollinterval=0.1):
"""Wait for a kernel to finish shutting down, and kill it if it doesn't
"""
self.log.info("Kernel shutdown: %s" % kernel_id)
@kernel_method
def cleanup(self, kernel_id, connection_file=True):
"""Clean up a kernel's resources"""
def remove_kernel(self, kernel_id):
"""remove a kernel from our mapping.
Mainly so that a kernel can be removed if it is already dead,
without having to call shutdown_kernel.
The kernel object is returned.
"""
return self._kernels.pop(kernel_id)
def shutdown_all(self, now=False):
"""Shutdown all kernels."""
kids = self.list_kernel_ids()
for kid in kids:
self.request_shutdown(kid)
for kid in kids:
self.finish_shutdown(kid)
self.cleanup(kid)
@kernel_method
def interrupt_kernel(self, kernel_id):
"""Interrupt (SIGINT) the kernel by its uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel to interrupt.
"""
self.log.info("Kernel interrupted: %s" % kernel_id)
@kernel_method
def signal_kernel(self, kernel_id, signum):
"""Sends a signal to the kernel by its uuid.
Note that since only SIGTERM is supported on Windows, this function
is only useful on Unix systems.
Parameters
==========
kernel_id : uuid
The id of the kernel to signal.
"""
self.log.info("Signaled Kernel %s with %s" % (kernel_id, signum))
@kernel_method
def restart_kernel(self, kernel_id, now=False):
"""Restart a kernel by its uuid, keeping the same ports.
Parameters
==========
kernel_id : uuid
The id of the kernel to interrupt.
"""
self.log.info("Kernel restarted: %s" % kernel_id)
@kernel_method
def is_alive(self, kernel_id):
"""Is the kernel alive.
This calls KernelManager.is_alive() which calls Popen.poll on the
actual kernel subprocess.
Parameters
==========
kernel_id : uuid
The id of the kernel.
"""
def _check_kernel_id(self, kernel_id):
"""check that a kernel id is valid"""
if kernel_id not in self:
raise KeyError("Kernel with id not found: %s" % kernel_id)
def get_kernel(self, kernel_id):
"""Get the single KernelManager object for a kernel by its uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel.
"""
self._check_kernel_id(kernel_id)
return self._kernels[kernel_id]
@kernel_method
def add_restart_callback(self, kernel_id, callback, event='restart'):
"""add a callback for the KernelRestarter"""
@kernel_method
def remove_restart_callback(self, kernel_id, callback, event='restart'):
"""remove a callback for the KernelRestarter"""
@kernel_method
def get_connection_info(self, kernel_id):
"""Return a dictionary of connection data for a kernel.
Parameters
==========
kernel_id : uuid
The id of the kernel.
Returns
=======
connection_dict : dict
A dict of the information needed to connect to a kernel.
This includes the ip address and the integer port
numbers of the different channels (stdin_port, iopub_port,
shell_port, hb_port).
"""
@kernel_method
def connect_iopub(self, kernel_id, identity=None):
"""Return a zmq Socket connected to the iopub channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
@kernel_method
def connect_shell(self, kernel_id, identity=None):
"""Return a zmq Socket connected to the shell channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
@kernel_method
def connect_stdin(self, kernel_id, identity=None):
"""Return a zmq Socket connected to the stdin channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
@kernel_method
def connect_hb(self, kernel_id, identity=None):
"""Return a zmq Socket connected to the hb channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
| {
"repo_name": "mattvonrocketstein/smash",
"path": "smashlib/ipy3x/kernel/multikernelmanager.py",
"copies": "1",
"size": "9811",
"license": "mit",
"hash": -1142344876163740000,
"line_mean": 29.1876923077,
"line_max": 81,
"alpha_frac": 0.5968810519,
"autogenerated": false,
"ratio": 4.323931247245483,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00007551015681096982,
"num_lines": 325
} |
"""A kernel manager for multiple kernels"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import absolute_import
import os
import uuid
import zmq
from traitlets.config.configurable import LoggingConfigurable
from ipython_genutils.importstring import import_item
from traitlets import (
Instance, Dict, List, Unicode, Any, DottedObjectName
)
from ipython_genutils.py3compat import unicode_type
from .kernelspec import NATIVE_KERNEL_NAME, KernelSpecManager
class DuplicateKernelError(Exception):
pass
def kernel_method(f):
"""decorator for proxying MKM.method(kernel_id) to individual KMs by ID"""
def wrapped(self, kernel_id, *args, **kwargs):
# get the kernel
km = self.get_kernel(kernel_id)
method = getattr(km, f.__name__)
# call the kernel's method
r = method(*args, **kwargs)
# last thing, call anything defined in the actual class method
# such as logging messages
f(self, kernel_id, *args, **kwargs)
# return the method result
return r
return wrapped
class MultiKernelManager(LoggingConfigurable):
"""A class for managing multiple kernels."""
default_kernel_name = Unicode(NATIVE_KERNEL_NAME, config=True,
help="The name of the default kernel to start"
)
kernel_spec_manager = Instance(KernelSpecManager, allow_none=True)
kernel_manager_class = DottedObjectName(
"jupyter_client.ioloop.IOLoopKernelManager", config=True,
help="""The kernel manager class. This is configurable to allow
subclassing of the KernelManager for customized behavior.
"""
)
def _kernel_manager_class_changed(self, name, old, new):
self.kernel_manager_factory = import_item(new)
kernel_manager_factory = Any(help="this is kernel_manager_class after import")
def _kernel_manager_factory_default(self):
return import_item(self.kernel_manager_class)
context = Instance('zmq.Context')
def _context_default(self):
return zmq.Context.instance()
connection_dir = Unicode('')
_kernels = Dict()
def list_kernel_ids(self):
"""Return a list of the kernel ids of the active kernels."""
# Create a copy so we can iterate over kernels in operations
# that delete keys.
return list(self._kernels.keys())
def __len__(self):
"""Return the number of running kernels."""
return len(self.list_kernel_ids())
def __contains__(self, kernel_id):
return kernel_id in self._kernels
def start_kernel(self, kernel_name=None, **kwargs):
"""Start a new kernel.
The caller can pick a kernel_id by passing one in as a keyword arg,
otherwise one will be picked using a uuid.
The kernel ID for the newly started kernel is returned.
"""
kernel_id = kwargs.pop('kernel_id', unicode_type(uuid.uuid4()))
if kernel_id in self:
raise DuplicateKernelError('Kernel already exists: %s' % kernel_id)
if kernel_name is None:
kernel_name = self.default_kernel_name
# kernel_manager_factory is the constructor for the KernelManager
# subclass we are using. It can be configured as any Configurable,
# including things like its transport and ip.
constructor_kwargs = {}
if self.kernel_spec_manager:
constructor_kwargs['kernel_spec_manager'] = self.kernel_spec_manager
km = self.kernel_manager_factory(connection_file=os.path.join(
self.connection_dir, "kernel-%s.json" % kernel_id),
parent=self, log=self.log, kernel_name=kernel_name,
**constructor_kwargs
)
km.start_kernel(**kwargs)
self._kernels[kernel_id] = km
return kernel_id
@kernel_method
def shutdown_kernel(self, kernel_id, now=False, restart=False):
"""Shutdown a kernel by its kernel uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel to shutdown.
now : bool
Should the kernel be shutdown forcibly using a signal.
restart : bool
Will the kernel be restarted?
"""
self.log.info("Kernel shutdown: %s" % kernel_id)
self.remove_kernel(kernel_id)
@kernel_method
def request_shutdown(self, kernel_id, restart=False):
"""Ask a kernel to shut down by its kernel uuid"""
@kernel_method
def finish_shutdown(self, kernel_id, waittime=1, pollinterval=0.1):
"""Wait for a kernel to finish shutting down, and kill it if it doesn't
"""
self.log.info("Kernel shutdown: %s" % kernel_id)
@kernel_method
def cleanup(self, kernel_id, connection_file=True):
"""Clean up a kernel's resources"""
def remove_kernel(self, kernel_id):
"""remove a kernel from our mapping.
Mainly so that a kernel can be removed if it is already dead,
without having to call shutdown_kernel.
The kernel object is returned.
"""
return self._kernels.pop(kernel_id)
def shutdown_all(self, now=False):
"""Shutdown all kernels."""
kids = self.list_kernel_ids()
for kid in kids:
self.request_shutdown(kid)
for kid in kids:
self.finish_shutdown(kid)
self.cleanup(kid)
self.remove_kernel(kid)
@kernel_method
def interrupt_kernel(self, kernel_id):
"""Interrupt (SIGINT) the kernel by its uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel to interrupt.
"""
self.log.info("Kernel interrupted: %s" % kernel_id)
@kernel_method
def signal_kernel(self, kernel_id, signum):
"""Sends a signal to the kernel by its uuid.
Note that since only SIGTERM is supported on Windows, this function
is only useful on Unix systems.
Parameters
==========
kernel_id : uuid
The id of the kernel to signal.
"""
self.log.info("Signaled Kernel %s with %s" % (kernel_id, signum))
@kernel_method
def restart_kernel(self, kernel_id, now=False):
"""Restart a kernel by its uuid, keeping the same ports.
Parameters
==========
kernel_id : uuid
The id of the kernel to interrupt.
"""
self.log.info("Kernel restarted: %s" % kernel_id)
@kernel_method
def is_alive(self, kernel_id):
"""Is the kernel alive.
This calls KernelManager.is_alive() which calls Popen.poll on the
actual kernel subprocess.
Parameters
==========
kernel_id : uuid
The id of the kernel.
"""
def _check_kernel_id(self, kernel_id):
"""check that a kernel id is valid"""
if kernel_id not in self:
raise KeyError("Kernel with id not found: %s" % kernel_id)
def get_kernel(self, kernel_id):
"""Get the single KernelManager object for a kernel by its uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel.
"""
self._check_kernel_id(kernel_id)
return self._kernels[kernel_id]
@kernel_method
def add_restart_callback(self, kernel_id, callback, event='restart'):
"""add a callback for the KernelRestarter"""
@kernel_method
def remove_restart_callback(self, kernel_id, callback, event='restart'):
"""remove a callback for the KernelRestarter"""
@kernel_method
def get_connection_info(self, kernel_id):
"""Return a dictionary of connection data for a kernel.
Parameters
==========
kernel_id : uuid
The id of the kernel.
Returns
=======
connection_dict : dict
A dict of the information needed to connect to a kernel.
This includes the ip address and the integer port
numbers of the different channels (stdin_port, iopub_port,
shell_port, hb_port).
"""
@kernel_method
def connect_iopub(self, kernel_id, identity=None):
"""Return a zmq Socket connected to the iopub channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
@kernel_method
def connect_shell(self, kernel_id, identity=None):
"""Return a zmq Socket connected to the shell channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
@kernel_method
def connect_stdin(self, kernel_id, identity=None):
"""Return a zmq Socket connected to the stdin channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
@kernel_method
def connect_hb(self, kernel_id, identity=None):
"""Return a zmq Socket connected to the hb channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
| {
"repo_name": "ArcherSys/ArcherSys",
"path": "Lib/site-packages/jupyter_client/multikernelmanager.py",
"copies": "6",
"size": "9790",
"license": "mit",
"hash": 802074518801046400,
"line_mean": 29.8832807571,
"line_max": 82,
"alpha_frac": 0.5994892748,
"autogenerated": false,
"ratio": 4.303296703296703,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0015892852386137923,
"num_lines": 317
} |
"""A kernel manager relating notebooks and kernels
Authors:
* Brian Granger
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from tornado import web
from IPython.kernel.multikernelmanager import MultiKernelManager
from IPython.utils.traitlets import (
Dict, List, Unicode,
)
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class MappingKernelManager(MultiKernelManager):
"""A KernelManager that handles notebook mapping and HTTP error handling"""
def _kernel_manager_class_default(self):
return "IPython.kernel.ioloop.IOLoopKernelManager"
kernel_argv = List(Unicode)
_notebook_mapping = Dict()
#-------------------------------------------------------------------------
# Methods for managing kernels and sessions
#-------------------------------------------------------------------------
def kernel_for_notebook(self, notebook_id):
"""Return the kernel_id for a notebook_id or None."""
return self._notebook_mapping.get(notebook_id)
def set_kernel_for_notebook(self, notebook_id, kernel_id):
"""Associate a notebook with a kernel."""
if notebook_id is not None:
self._notebook_mapping[notebook_id] = kernel_id
def notebook_for_kernel(self, kernel_id):
"""Return the notebook_id for a kernel_id or None."""
for notebook_id, kid in self._notebook_mapping.iteritems():
if kernel_id == kid:
return notebook_id
return None
def delete_mapping_for_kernel(self, kernel_id):
"""Remove the kernel/notebook mapping for kernel_id."""
notebook_id = self.notebook_for_kernel(kernel_id)
if notebook_id is not None:
del self._notebook_mapping[notebook_id]
def _handle_kernel_died(self, kernel_id):
"""notice that a kernel died"""
self.log.warn("Kernel %s died, removing from map.", kernel_id)
self.delete_mapping_for_kernel(kernel_id)
self.remove_kernel(kernel_id)
def start_kernel(self, notebook_id=None, **kwargs):
"""Start a kernel for a notebook an return its kernel_id.
Parameters
----------
notebook_id : uuid
The uuid of the notebook to associate the new kernel with. If this
is not None, this kernel will be persistent whenever the notebook
requests a kernel.
"""
kernel_id = self.kernel_for_notebook(notebook_id)
if kernel_id is None:
kwargs['extra_arguments'] = self.kernel_argv
kernel_id = super(MappingKernelManager, self).start_kernel(**kwargs)
self.set_kernel_for_notebook(notebook_id, kernel_id)
self.log.info("Kernel started: %s" % kernel_id)
self.log.debug("Kernel args: %r" % kwargs)
# register callback for failed auto-restart
self.add_restart_callback(kernel_id,
lambda : self._handle_kernel_died(kernel_id),
'dead',
)
else:
self.log.info("Using existing kernel: %s" % kernel_id)
return kernel_id
def shutdown_kernel(self, kernel_id, now=False):
"""Shutdown a kernel by kernel_id"""
super(MappingKernelManager, self).shutdown_kernel(kernel_id, now=now)
self.delete_mapping_for_kernel(kernel_id)
# override _check_kernel_id to raise 404 instead of KeyError
def _check_kernel_id(self, kernel_id):
"""Check a that a kernel_id exists and raise 404 if not."""
if kernel_id not in self:
raise web.HTTPError(404, u'Kernel does not exist: %s' % kernel_id)
| {
"repo_name": "noslenfa/tdjangorest",
"path": "uw/lib/python2.7/site-packages/IPython/html/services/kernels/kernelmanager.py",
"copies": "2",
"size": "4209",
"license": "apache-2.0",
"hash": -2850908001190806000,
"line_mean": 37.2636363636,
"line_max": 80,
"alpha_frac": 0.5433594678,
"autogenerated": false,
"ratio": 4.605032822757112,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.013263316403713844,
"num_lines": 110
} |
"""A kernel manager with a tornado IOLoop"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import ioloop
from zmq.eventloop.zmqstream import ZMQStream
from traitlets import (
Instance,
Type,
)
from jupyter_client.manager import KernelManager, AsyncKernelManager
from .restarter import IOLoopKernelRestarter, AsyncIOLoopKernelRestarter
def as_zmqstream(f):
def wrapped(self, *args, **kwargs):
socket = f(self, *args, **kwargs)
return ZMQStream(socket, self.loop)
return wrapped
class IOLoopKernelManager(KernelManager):
loop = Instance('tornado.ioloop.IOLoop')
def _loop_default(self):
return ioloop.IOLoop.current()
restarter_class = Type(
default_value=IOLoopKernelRestarter,
klass=IOLoopKernelRestarter,
help=(
'Type of KernelRestarter to use. '
'Must be a subclass of IOLoopKernelRestarter.\n'
'Override this to customize how kernel restarts are managed.'
),
config=True,
)
_restarter = Instance('jupyter_client.ioloop.IOLoopKernelRestarter', allow_none=True)
def start_restarter(self):
if self.autorestart and self.has_kernel:
if self._restarter is None:
self._restarter = self.restarter_class(
kernel_manager=self, loop=self.loop,
parent=self, log=self.log
)
self._restarter.start()
def stop_restarter(self):
if self.autorestart:
if self._restarter is not None:
self._restarter.stop()
connect_shell = as_zmqstream(KernelManager.connect_shell)
connect_control = as_zmqstream(KernelManager.connect_control)
connect_iopub = as_zmqstream(KernelManager.connect_iopub)
connect_stdin = as_zmqstream(KernelManager.connect_stdin)
connect_hb = as_zmqstream(KernelManager.connect_hb)
class AsyncIOLoopKernelManager(AsyncKernelManager):
loop = Instance('tornado.ioloop.IOLoop')
def _loop_default(self):
return ioloop.IOLoop.current()
restarter_class = Type(
default_value=AsyncIOLoopKernelRestarter,
klass=AsyncIOLoopKernelRestarter,
help=(
'Type of KernelRestarter to use. '
'Must be a subclass of AsyncIOLoopKernelManager.\n'
'Override this to customize how kernel restarts are managed.'
),
config=True,
)
_restarter = Instance('jupyter_client.ioloop.AsyncIOLoopKernelRestarter', allow_none=True)
def start_restarter(self):
if self.autorestart and self.has_kernel:
if self._restarter is None:
self._restarter = self.restarter_class(
kernel_manager=self, loop=self.loop,
parent=self, log=self.log
)
self._restarter.start()
def stop_restarter(self):
if self.autorestart:
if self._restarter is not None:
self._restarter.stop()
connect_shell = as_zmqstream(AsyncKernelManager.connect_shell)
connect_control = as_zmqstream(AsyncKernelManager.connect_control)
connect_iopub = as_zmqstream(AsyncKernelManager.connect_iopub)
connect_stdin = as_zmqstream(AsyncKernelManager.connect_stdin)
connect_hb = as_zmqstream(AsyncKernelManager.connect_hb)
| {
"repo_name": "sserrot/champion_relationships",
"path": "venv/Lib/site-packages/jupyter_client/ioloop/manager.py",
"copies": "1",
"size": "3398",
"license": "mit",
"hash": 2793422577878640000,
"line_mean": 32.3137254902,
"line_max": 94,
"alpha_frac": 0.65891701,
"autogenerated": false,
"ratio": 3.964994165694282,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5123911175694282,
"avg_score": null,
"num_lines": null
} |
"""A kernel manager with a tornado IOLoop"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import absolute_import
from zmq.eventloop import ioloop
from zmq.eventloop.zmqstream import ZMQStream
from IPython.utils.traitlets import (
Instance
)
from IPython.kernel.manager import KernelManager
from .restarter import IOLoopKernelRestarter
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def as_zmqstream(f):
def wrapped(self, *args, **kwargs):
socket = f(self, *args, **kwargs)
return ZMQStream(socket, self.loop)
return wrapped
class IOLoopKernelManager(KernelManager):
loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
def _loop_default(self):
return ioloop.IOLoop.instance()
_restarter = Instance('IPython.kernel.ioloop.IOLoopKernelRestarter')
def start_restarter(self):
if self.autorestart and self.has_kernel:
if self._restarter is None:
self._restarter = IOLoopKernelRestarter(
kernel_manager=self, loop=self.loop,
parent=self, log=self.log
)
self._restarter.start()
def stop_restarter(self):
if self.autorestart:
if self._restarter is not None:
self._restarter.stop()
connect_shell = as_zmqstream(KernelManager.connect_shell)
connect_iopub = as_zmqstream(KernelManager.connect_iopub)
connect_stdin = as_zmqstream(KernelManager.connect_stdin)
connect_hb = as_zmqstream(KernelManager.connect_hb)
| {
"repo_name": "wolfram74/numerical_methods_iserles_notes",
"path": "venv/lib/python2.7/site-packages/IPython/kernel/ioloop/manager.py",
"copies": "14",
"size": "2141",
"license": "mit",
"hash": -3314566430515189000,
"line_mean": 33.5322580645,
"line_max": 78,
"alpha_frac": 0.5296590378,
"autogenerated": false,
"ratio": 4.76837416481069,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0021651878954648558,
"num_lines": 62
} |
"""A kernel manager with a tornado IOLoop"""
#-----------------------------------------------------------------------------
# Copyright (c) The Jupyter Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import absolute_import
from zmq.eventloop import ioloop
from zmq.eventloop.zmqstream import ZMQStream
from traitlets import (
Instance
)
from jupyter_client.manager import KernelManager
from .restarter import IOLoopKernelRestarter
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def as_zmqstream(f):
def wrapped(self, *args, **kwargs):
socket = f(self, *args, **kwargs)
return ZMQStream(socket, self.loop)
return wrapped
class IOLoopKernelManager(KernelManager):
loop = Instance('zmq.eventloop.ioloop.IOLoop')
def _loop_default(self):
return ioloop.IOLoop.instance()
_restarter = Instance('jupyter_client.ioloop.IOLoopKernelRestarter', allow_none=True)
def start_restarter(self):
if self.autorestart and self.has_kernel:
if self._restarter is None:
self._restarter = IOLoopKernelRestarter(
kernel_manager=self, loop=self.loop,
parent=self, log=self.log
)
self._restarter.start()
def stop_restarter(self):
if self.autorestart:
if self._restarter is not None:
self._restarter.stop()
connect_shell = as_zmqstream(KernelManager.connect_shell)
connect_iopub = as_zmqstream(KernelManager.connect_iopub)
connect_stdin = as_zmqstream(KernelManager.connect_stdin)
connect_hb = as_zmqstream(KernelManager.connect_hb)
| {
"repo_name": "ammarkhann/FinalSeniorCode",
"path": "lib/python2.7/site-packages/jupyter_client/ioloop/manager.py",
"copies": "12",
"size": "2120",
"license": "mit",
"hash": -6897679466263419000,
"line_mean": 33.1935483871,
"line_max": 89,
"alpha_frac": 0.5268867925,
"autogenerated": false,
"ratio": 4.732142857142857,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
AKEY = 'a'
BKEY = 'b'
CKEY = 'c'
DKEY = 'd'
EKEY = 'e'
FKEY = 'f'
GKEY = 'g'
HKEY = 'h'
IKEY = 'i'
JKEY = 'j'
KKEY = 'k'
LKEY = 'l'
MKEY = 'm'
NKEY = 'n'
OKEY = 'o'
PKEY = 'p'
QKEY = 'q'
RKEY = 'r'
SKEY = 's'
TKEY = 't'
UKEY = 'u'
VKEY = 'v'
WKEY = 'w'
XKEY = 'x'
YKEY = 'y'
ZKEY = 'z'
ZEROKEY = '0'
ONEKEY = '1'
TWOKEY = '2'
THREEKEY = '3'
FOURKEY = '4'
FIVEKEY = '5'
SIXKEY = '6'
SEVENKEY = '7'
EIGHTKEY = '8'
NINEKEY = '9'
CAPSLOCKKEY = 211
LEFTCTRLKEY = 212
LEFTALTKEY = 213
RIGHTALTKEY = 214
RIGHTCTRLKEY = 215
RIGHTSHIFTKEY = 216
LEFTSHIFTKEY = 217
ESCKEY = 218
TABKEY = 219
RETKEY = ENTERKEY = 220
SPACEKEY = 221
LINEFEEDKEY = 222
BACKSPACEKEY = 223
DELKEY = 224
SEMICOLONKEY = 225
PERIODKEY = 226
COMMAKEY = 227
QUOTEKEY = 228
ACCENTGRAVEKEY = 229
MINUSKEY = 230
SLASHKEY = 232
BACKSLASHKEY = 233
EQUALKEY = 234
LEFTBRACKETKEY = 235
RIGHTBRACKETKEY = 236
LEFTARROWKEY = 137
DOWNARROWKEY = 138
RIGHTARROWKEY = 139
UPARROWKEY = 140
PAD0 = 150
PAD1 = 151
PAD2 = 152
PAD3 = 153
PAD4 = 154
PAD5 = 155
PAD6 = 156
PAD7 = 157
PAD8 = 158
PAD9 = 159
PADPERIOD = 199
PADSLASHKEY = 161
PADASTERKEY = 160
PADMINUS = 162
PADENTER = 163
PADPLUSKEY = 164
F1KEY = 300
F2KEY = 301
F3KEY = 302
F4KEY = 303
F5KEY = 304
F6KEY = 305
F7KEY = 306
F8KEY = 307
F9KEY = 308
F10KEY = 309
F11KEY = 310
F12KEY = 311
F13KEY = 312
F14KEY = 313
F15KEY = 314
F16KEY = 315
F17KEY = 316
F18KEY = 317
F19KEY = 318
PAUSEKEY = 165
INSERTKEY = 166
HOMEKEY = 167
PAGEUPKEY = 168
PAGEDOWNKEY = 169
ENDKEY = 170
OSKEY = 172
| {
"repo_name": "Remwrath/bgui",
"path": "bgui/key_defs.py",
"copies": "9",
"size": "1501",
"license": "mit",
"hash": 2494647455241124400,
"line_mean": 11.6134453782,
"line_max": 23,
"alpha_frac": 0.6568954031,
"autogenerated": false,
"ratio": 2.1022408963585435,
"config_test": false,
"has_no_keywords": true,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 119
} |
"""A key-value[] store that implements reservoir sampling on the values."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import random
import threading
class Reservoir(object):
"""A map-to-arrays container, with deterministic Reservoir Sampling.
Items are added with an associated key. Items may be retrieved by key, and
a list of keys can also be retrieved. If size is not zero, then it dictates
the maximum number of items that will be stored with each key. Once there are
more items for a given key, they are replaced via reservoir sampling, such
that each item has an equal probability of being included in the sample.
Deterministic means that for any given seed and bucket size, the sequence of
values that are kept for any given tag will always be the same, and that this
is independent of any insertions on other tags. That is:
>>> separate_reservoir = reservoir.Reservoir(10)
>>> interleaved_reservoir = reservoir.Reservoir(10)
>>> for i in xrange(100):
>>> separate_reservoir.AddItem('key1', i)
>>> for i in xrange(100):
>>> separate_reservoir.AddItem('key2', i)
>>> for i in xrange(100):
>>> interleaved_reservoir.AddItem('key1', i)
>>> interleaved_reservoir.AddItem('key2', i)
separate_reservoir and interleaved_reservoir will be in identical states.
See: https://en.wikipedia.org/wiki/Reservoir_sampling
Adding items has amortized O(1) runtime.
"""
def __init__(self, size, seed=0):
"""Creates a new reservoir.
Args:
size: The number of values to keep in the reservoir for each tag. If 0,
all values will be kept.
seed: The seed of the random number generator to use when sampling.
Different values for |seed| will produce different samples from the same
input items.
Raises:
ValueError: If size is negative or not an integer.
"""
if size < 0 or size != round(size):
raise ValueError('size must be nonegative integer, was %s' % size)
self._buckets = collections.defaultdict(
lambda: _ReservoirBucket(size, random.Random(seed)))
# _mutex guards the keys - creating new keys, retreiving by key, etc
# the internal items are guarded by the ReservoirBuckets' internal mutexes
self._mutex = threading.Lock()
def Keys(self):
"""Return all the keys in the reservoir.
Returns:
['list', 'of', 'keys'] in the Reservoir.
"""
with self._mutex:
return list(self._buckets.keys())
def Items(self, key):
"""Return items associated with given key.
Args:
key: The key for which we are finding associated items.
Raises:
KeyError: If the key is not found in the reservoir.
Returns:
[list, of, items] associated with that key.
"""
with self._mutex:
if key not in self._buckets:
raise KeyError('Key %s was not found in Reservoir' % key)
bucket = self._buckets[key]
return bucket.Items()
def AddItem(self, key, item):
"""Add a new item to the Reservoir with the given tag.
The new item is guaranteed to be kept in the Reservoir. One other item might
be replaced.
Args:
key: The key to store the item under.
item: The item to add to the reservoir.
"""
with self._mutex:
bucket = self._buckets[key]
bucket.AddItem(item)
def FilterItems(self, filterFn):
"""Filter items within a Reservoir, using a filtering function.
Args:
filterFn: A function that returns True for the items to be kept.
Returns:
The number of items removed.
"""
with self._mutex:
return sum(bucket.FilterItems(filterFn)
for bucket in self._buckets.values())
class _ReservoirBucket(object):
"""A container for items from a stream, that implements reservoir sampling.
It always stores the most recent item as its final item.
"""
def __init__(self, _max_size, _random=None):
"""Create the _ReservoirBucket.
Args:
_max_size: The maximum size the reservoir bucket may grow to. If size is
zero, the bucket has unbounded size.
_random: The random number generator to use. If not specified, defaults to
random.Random(0).
Raises:
ValueError: if the size is not a nonnegative integer.
"""
if _max_size < 0 or _max_size != round(_max_size):
raise ValueError('_max_size must be nonegative int, was %s' % _max_size)
self.items = []
# This mutex protects the internal items, ensuring that calls to Items and
# AddItem are thread-safe
self._mutex = threading.Lock()
self._max_size = _max_size
self._num_items_seen = 0
if _random is not None:
self._random = _random
else:
self._random = random.Random(0)
def AddItem(self, item):
"""Add an item to the ReservoirBucket, replacing an old item if necessary.
The new item is guaranteed to be added to the bucket, and to be the last
element in the bucket. If the bucket has reached capacity, then an old item
will be replaced. With probability (_max_size/_num_items_seen) a random item
in the bucket will be popped out and the new item will be appended
to the end. With probability (1 - _max_size/_num_items_seen)
the last item in the bucket will be replaced.
Since the O(n) replacements occur with O(1/_num_items_seen) likelihood,
the amortized runtime is O(1).
Args:
item: The item to add to the bucket.
"""
with self._mutex:
if len(self.items) < self._max_size or self._max_size == 0:
self.items.append(item)
else:
r = self._random.randint(0, self._num_items_seen)
if r < self._max_size:
self.items.pop(r)
self.items.append(item)
else:
self.items[-1] = item
self._num_items_seen += 1
def FilterItems(self, filterFn):
"""Filter items in a ReservoirBucket, using a filtering function.
Filtering items from the reservoir bucket must update the
internal state variable self._num_items_seen, which is used for determining
the rate of replacement in reservoir sampling. Ideally, self._num_items_seen
would contain the exact number of items that have ever seen by the
ReservoirBucket and satisfy filterFn. However, the ReservoirBucket does not
have access to all items seen -- it only has access to the subset of items
that have survived sampling (self.items). Therefore, we estimate
self._num_items_seen by scaling it by the same ratio as the ratio of items
not removed from self.items.
Args:
filterFn: A function that returns True for items to be kept.
Returns:
The number of items removed from the bucket.
"""
with self._mutex:
size_before = len(self.items)
self.items = filter(filterFn, self.items)
size_diff = size_before - len(self.items)
# Estimate a correction the the number of items seen
prop_remaining = len(self.items) / float(
size_before) if size_before > 0 else 0
self._num_items_seen = int(round(self._num_items_seen * prop_remaining))
return size_diff
def Items(self):
"""Get all the items in the bucket."""
with self._mutex:
return self.items
| {
"repo_name": "kcartier/tensorflow-toe-in-the-water",
"path": "tensorflow/python/summary/impl/reservoir.py",
"copies": "1",
"size": "7300",
"license": "apache-2.0",
"hash": 6131645538671578000,
"line_mean": 33.5971563981,
"line_max": 80,
"alpha_frac": 0.6723287671,
"autogenerated": false,
"ratio": 3.860391327340032,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.005049298402385667,
"num_lines": 211
} |
"""A key-value[] store that implements reservoir sampling on the values."""
import collections
import random
import threading
class Reservoir(object):
"""A map-to-arrays container, with deterministic Reservoir Sampling.
Items are added with an associated key. Items may be retrieved by key, and
a list of keys can also be retrieved. If size is not zero, then it dictates
the maximum number of items that will be stored with each key. Once there are
more items for a given key, they are replaced via reservoir sampling, such
that each item has an equal probability of being included in the sample.
Deterministic means that for any given seed and bucket size, the sequence of
values that are kept for any given tag will always be the same, and that this
is independent of any insertions on other tags. That is:
>>> separate_reservoir = reservoir.Reservoir(10)
>>> interleaved_reservoir = reservoir.Reservoir(10)
>>> for i in xrange(100):
>>> separate_reservoir.AddItem('key1', i)
>>> for i in xrange(100):
>>> separate_reservoir.AddItem('key2', i)
>>> for i in xrange(100):
>>> interleaved_reservoir.AddItem('key1', i)
>>> interleaved_reservoir.AddItem('key2', i)
separate_reservoir and interleaved_reservoir will be in identical states.
See: https://en.wikipedia.org/wiki/Reservoir_sampling
Adding items has amortized O(1) runtime.
"""
def __init__(self, size, seed=0):
"""Creates a new reservoir.
Args:
size: The number of values to keep in the reservoir for each tag. If 0,
all values will be kept.
seed: The seed of the random number generator to use when sampling.
Different values for |seed| will produce different samples from the same
input items.
Raises:
ValueError: If size is negative or not an integer.
"""
if size < 0 or size != round(size):
raise ValueError('size must be nonegative integer, was %s' % size)
self._buckets = collections.defaultdict(
lambda: _ReservoirBucket(size, random.Random(seed)))
# _mutex guards the keys - creating new keys, retreiving by key, etc
# the internal items are guarded by the ReservoirBuckets' internal mutexes
self._mutex = threading.Lock()
def Keys(self):
"""Return all the keys in the reservoir.
Returns:
['list', 'of', 'keys'] in the Reservoir.
"""
with self._mutex:
return self._buckets.keys()
def Items(self, key):
"""Return items associated with given key.
Args:
key: The key for which we are finding associated items.
Raises:
KeyError: If the key is not ofund in the reservoir.
Returns:
[list, of, items] associated with that key.
"""
with self._mutex:
if key not in self._buckets:
raise KeyError('Key %s was not found in Reservoir' % key)
bucket = self._buckets[key]
return bucket.Items()
def AddItem(self, key, item):
"""Add a new item to the Reservoir with the given tag.
The new item is guaranteed to be kept in the Reservoir. One other item might
be replaced.
Args:
key: The key to store the item under.
item: The item to add to the reservoir.
"""
with self._mutex:
bucket = self._buckets[key]
bucket.AddItem(item)
class _ReservoirBucket(object):
"""A container for items from a stream, that implements reservoir sampling.
It always stores the most recent item as its final item.
"""
def __init__(self, _max_size, _random=None):
"""Create the _ReservoirBucket.
Args:
_max_size: The maximum size the reservoir bucket may grow to. If size is
zero, the bucket has unbounded size.
_random: The random number generator to use. If not specified, defaults to
random.Random(0).
Raises:
ValueError: if the size is not a nonnegative integer.
"""
if _max_size < 0 or _max_size != round(_max_size):
raise ValueError('_max_size must be nonegative int, was %s' % _max_size)
self.items = []
# This mutex protects the internal items, ensuring that calls to Items and
# AddItem are thread-safe
self._mutex = threading.Lock()
self._max_size = _max_size
self._count = 0
if _random is not None:
self._random = _random
else:
self._random = random.Random(0)
def AddItem(self, item):
"""Add an item to the ReservoirBucket, replacing an old item if necessary.
The new item is guaranteed to be added to the bucket, and to be the last
element in the bucket. If the bucket has reached capacity, then an old item
will be replaced. With probability (_max_size/_count) a random item in the
bucket will be popped out and the new item will be appended to the end. With
probability (1 - _max_size/_count) the last item in the bucket will be
replaced.
Since the O(n) replacements occur with O(1/_count) liklihood, the amortized
runtime is O(1).
Args:
item: The item to add to the bucket.
"""
with self._mutex:
if len(self.items) < self._max_size or self._max_size == 0:
self.items.append(item)
else:
r = self._random.randint(0, self._count)
if r < self._max_size:
self.items.pop(r)
self.items.append(item)
else:
self.items[-1] = item
self._count += 1
def Items(self):
"""Get all the items in the bucket."""
with self._mutex:
return self.items
| {
"repo_name": "brendandburns/tensorflow",
"path": "tensorflow/python/summary/impl/reservoir.py",
"copies": "5",
"size": "5458",
"license": "apache-2.0",
"hash": 8486158812770790000,
"line_mean": 32.2804878049,
"line_max": 80,
"alpha_frac": 0.6637962624,
"autogenerated": false,
"ratio": 3.7955493741307373,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.005045432334457443,
"num_lines": 164
} |
"""Akhet installation script.
"""
import os
from setuptools import setup
from setuptools import find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, "README.txt")).read()
README = README.split("\n\n", 1)[0] + "\n"
CHANGES = open(os.path.join(here, "CHANGES.txt")).read()
requires = [
"pyramid",
]
entry_points = """
[paste.paster_create_template]
akhet=akhet.paster_templates:AkhetProjectTemplate
"""
setup(name="Akhet",
version="1.0.2",
description="Pyramid application templates inspired by Pylons 1.",
long_description=README,
#long_description=README + "\n\n" + CHANGES,
classifiers=[
"Intended Audience :: Developers",
"Framework :: Pylons",
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
],
keywords="web wsgi pylons pyramid",
author="Mike Orr",
author_email="sluggoster@gmail.com",
url="https://bitbucket.org/sluggo/akhet",
license="MIT",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
tests_require = requires,
install_requires = requires,
test_suite="akhet",
entry_points=entry_points,
)
| {
"repo_name": "koansys/akhet",
"path": "setup.py",
"copies": "1",
"size": "1264",
"license": "mit",
"hash": -4693349044940925000,
"line_mean": 26.4782608696,
"line_max": 72,
"alpha_frac": 0.6289556962,
"autogenerated": false,
"ratio": 3.482093663911846,
"config_test": false,
"has_no_keywords": true,
"few_assignments": false,
"quality_score": 0.46110493601118463,
"avg_score": null,
"num_lines": null
} |
#a kind of python meli api
import json
import logging
import time
import requests
from requests_ssl_fix import SSLAdapter
from requests.adapters import HTTPAdapter
import ssl
requests.packages.urllib3.disable_warnings()
req_s = requests.Session()
req_s.mount('https://', SSLAdapter(ssl.PROTOCOL_TLSv1))
#req_s = requests.Session()
#req_s.mount('https://', HTTPAdapter(max_retries=8))
BASE_URL = 'https://api.mercadolibre.com/'
BASE_SITE_URL = BASE_URL + 'sites/'
SLEEP_TIME = 0.1 #in seconds
ERROR_SLEEP_TIME = 0.1
class MeliAPI():
def __init__(self):
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# create a file handler
handler = logging.FileHandler('logs/mapi.log')
handler.setLevel(logging.INFO)
# create a logging format
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(handler)
self.logger = logger
#TODO: replace it with requests retry
def make_call(self, url, params=None):
time.sleep(SLEEP_TIME)
for i in range(10):
if i != 0:
self.logger.info("%s - retrying... %d" % (url,i))
time.sleep(ERROR_SLEEP_TIME*i)
try:
res = requests.get(url, verify=False)
except requests.ConnectionError, e:
self.logger.info(e)
continue
if res.status_code == 200:
#data = json.loads(res.text)
data = res.json()
if data:
return data
continue
#TODO: replace it with requests retry
def make_call_v2(self, url, params=None):
time.sleep(SLEEP_TIME)
for i in range(10):
if i != 0:
self.logger.info("%s - retrying... %d" % (url,i))
time.sleep(ERROR_SLEEP_TIME*i)
try:
res = requests.get(url)
except requests.ConnectionError, e:
self.logger.info(e)
continue
if res.status_code == 200:
#data = json.loads(res.text)
data = res.json()
return data
def get_seller_info(self, seller_id):
url = BASE_URL + 'users/%s' % seller_id
self.logger.info(url)
data = self.make_call(url)
return data
def get_item_description(self, item_id):
url = BASE_URL + 'items/%s/descriptions' % (item_id)
print url
self.logger.info(url)
data = self.make_call_v2(url)
return data
def get_item_visits(self, item_id, date_from, date_to):
#https://api.mercadolibre.com/items/{Items_id}/visits?date_from=2014-06-01T00:00:00.000-00:00&date_to=2014-06-10T00:00:00.000-00:00'
url = BASE_URL + 'items/%s/visits?&date_from=%s&date_to=%s' % (item_id, date_from, date_to)
print url
self.logger.info(url)
data = self.make_call(url)
return data
def get_items_visits(self, ids_list, date_from, date_to): #bulk results
#https://api.mercadolibre.com/items/{Items_id}/visits?date_from=2014-06-01T00:00:00.000-00:00&date_to=2014-06-10T00:00:00.000-00:00'
url = BASE_URL + 'items/visits?&date_from=%s&date_to=%s&ids=%s' % (date_from, date_to, ",".join(ids_list))
self.logger.info(url)
data = self.make_call(url)
return data
def get_items_data(self, items_ids):
#Retrieves the information of a list of items: GET/items?ids=:ids
url = BASE_URL + 'items/?ids=%s' % ",".join(items_ids)
self.logger.info(url[:50])
while True:
data = self.make_call(url)
try:
data[0]
except:
print "******************ERROR*********************"
print url
continue
return data
def search_by_category(self, site_id, cat_id, limit, offset):
#get the category items
url = BASE_SITE_URL + '%s/search?category=%s&limit=%s&offset=%s&condition=new' % (site_id, cat_id, limit, offset)
self.logger.info(url)
while True:
data = self.make_call(url)
try:
data['results']
except:
print "****************ERROR********************"
print url
continue
return data
def search_item(self, site_id, query):
url = BASE_SITE_URL + '%s/search?q=%s' % (site_id, query)
data = self.make_call(url)
return data
def get_category(self, cat_id):
"""
get category info
"""
url = BASE_URL + 'categories/%s' % cat_id
data = self.make_call(url)
return data
| {
"repo_name": "mfalcon/meli-collection",
"path": "meli_api.py",
"copies": "1",
"size": "4980",
"license": "apache-2.0",
"hash": 269278827794773220,
"line_mean": 30.7197452229,
"line_max": 140,
"alpha_frac": 0.5335341365,
"autogenerated": false,
"ratio": 3.598265895953757,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9574782892342231,
"avg_score": 0.011403428022305241,
"num_lines": 157
} |
#a kind of python meli api
import json
import logging
import time
import requests
BASE_URL = 'https://api.mercadolibre.com/'
BASE_SITE_URL = BASE_URL + 'sites/'
SLEEP_TIME = 0.05 #in seconds
ERROR_SLEEP_TIME = 0.1
class MeliAPI():
def __init__(self, sid='MLA'):
self.sid = sid
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# create a file handler
handler = logging.FileHandler('logs/mapi.log')
handler.setLevel(logging.INFO)
# create a logging format
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(handler)
self.logger = logger
#TODO: replace it with requests retry
def make_call(self, url, params=None):
headers = {'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:24.0) Gecko/20100101 Firefox/24.0'}
time.sleep(SLEEP_TIME)
for i in range(10):
if i != 0:
self.logger.info("%s - retrying... %d" % (url,i))
time.sleep(ERROR_SLEEP_TIME*i)
try:
res = requests.get(url, headers=headers)
except requests.ConnectionError, e:
self.logger.info(e)
continue
if res.status_code == 200:
data = json.loads(res.text)
return data
continue
'''
def make_call(self, url):
time.sleep(SLEEP_TIME)
reqs_session = requests.Session()
reqs_adapter = requests.adapters.HTTPAdapter(max_retries=5)
reqs_session.mount('http://', reqs_adapter)
reqs_session.mount('https://', reqs_adapter)
res = reqs_session.get(url)
if res.status_code == 200:
data = json.loads(res.text)
return data
return None
'''
def get_seller_info(self, seller_id):
url = BASE_URL + 'users/%s' % seller_id
self.logger.info(url)
data = self.make_call(url)
return data
def get_item_visits(self, item_id, date_from, date_to):
#https://api.mercadolibre.com/items/{Items_id}/visits?date_from=2014-06-01T00:00:00.000-00:00&date_to=2014-06-10T00:00:00.000-00:00'
url = BASE_URL + 'items/%s/visits?&date_from=%s&date_to=%s' % (item_id, date_from, date_to)
print url
self.logger.info(url)
data = self.make_call(url)
return data
def get_items_data(self, items_ids):
#Retrieves the information of a list of items: GET/items?ids=:ids
url = BASE_URL + 'items/?ids=%s' % ",".join(items_ids)
self.logger.info(url[:50])
data = self.make_call(url)
return data
def search_by_category(self, cat_id, limit, offset):
#get the category items
url = BASE_SITE_URL + '%s/search?category=%s&limit=%s&offset=%s&condition=new' % (self.sid, cat_id, limit, offset)
self.logger.info(url)
data = self.make_call(url)
return data
def search_item(self, query):
url = BASE_SITE_URL + '%s/search?q=%s' % (self.sid, query)
data = self.make_call(url)
return data
def get_category(self, cat_id):
"""
get category info
"""
url = BASE_URL + 'categories/%s' % cat_id
data = self.make_call(url)
return data
| {
"repo_name": "mfalcon/melive",
"path": "meli_api.py",
"copies": "1",
"size": "3507",
"license": "mit",
"hash": 6308156812912320000,
"line_mean": 31.4722222222,
"line_max": 140,
"alpha_frac": 0.5625891075,
"autogenerated": false,
"ratio": 3.493027888446215,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4555616995946215,
"avg_score": null,
"num_lines": null
} |
"""Akismet spam checker backend for Zinnia"""
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.utils.encoding import smart_str
from zinnia.settings import PROTOCOL
from zinnia_akismet.api import APIKeyError
from zinnia_akismet.api import Akismet
if not getattr(settings, 'AKISMET_API_KEY', ''):
raise ImproperlyConfigured('You have to set AKISMET_API_KEY')
AKISMET_API_KEY = settings.AKISMET_API_KEY
def backend(comment, content_object, request):
"""
Akismet spam checker backend for Zinnia.
"""
blog_url = '%s://%s/' % (PROTOCOL, Site.objects.get_current().domain)
akismet = Akismet(key=AKISMET_API_KEY, blog_url=blog_url)
if not akismet.verify_key():
raise APIKeyError('Your Akismet API key is invalid.')
akismet_data = {
'user_ip': request.META.get('REMOTE_ADDR', ''),
'user_agent': request.META.get('HTTP_USER_AGENT', ''),
'referrer': request.META.get('HTTP_REFERER', 'unknown'),
'permalink': content_object.get_absolute_url(),
'comment_type': 'comment',
'comment_author': smart_str(comment.name),
'comment_author_email': smart_str(comment.email),
'comment_author_url': smart_str(comment.url),
}
is_spam = akismet.comment_check(smart_str(comment.comment),
data=akismet_data, build_data=True)
return is_spam
| {
"repo_name": "django-blog-zinnia/zinnia-spam-checker-akismet",
"path": "zinnia_akismet/akismet.py",
"copies": "1",
"size": "1469",
"license": "bsd-3-clause",
"hash": 160395757767265180,
"line_mean": 34.8292682927,
"line_max": 73,
"alpha_frac": 0.6739278421,
"autogenerated": false,
"ratio": 3.353881278538813,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9527809120638813,
"avg_score": 0,
"num_lines": 41
} |
"""Akismet spam checker backend for Zinnia"""
from django.conf import settings
from django.utils.encoding import smart_str
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from zinnia.settings import PROTOCOL
try:
from akismet import Akismet
from akismet import APIKeyError
except ImportError:
raise ImproperlyConfigured('akismet module is not available')
if not getattr(settings, 'AKISMET_SECRET_API_KEY', ''):
raise ImproperlyConfigured('You have to set AKISMET_SECRET_API_KEY')
AKISMET_API_KEY = settings.AKISMET_SECRET_API_KEY
def backend(comment, content_object, request):
"""Akismet spam checker backend for Zinnia"""
blog_url = '%s://%s/' % (PROTOCOL, Site.objects.get_current().domain)
akismet = Akismet(key=AKISMET_API_KEY, blog_url=blog_url)
if not akismet.verify_key():
raise APIKeyError('Your Akismet API key is invalid.')
akismet_data = {
'user_ip': request.META.get('REMOTE_ADDR', ''),
'user_agent': request.META.get('HTTP_USER_AGENT', ''),
'referrer': request.META.get('HTTP_REFERER', 'unknown'),
'permalink': content_object.get_absolute_url(),
'comment_type': 'comment',
'comment_author': smart_str(comment.userinfo.get('name', '')),
'comment_author_email': smart_str(comment.userinfo.get('email', '')),
'comment_author_url': smart_str(comment.userinfo.get('url', '')),
}
is_spam = akismet.comment_check(smart_str(comment.comment),
data=akismet_data, build_data=True)
return is_spam
| {
"repo_name": "westinedu/similarinterest",
"path": "zinnia/spam_checker/backends/automattic.py",
"copies": "3",
"size": "1616",
"license": "bsd-3-clause",
"hash": -8902216685147546000,
"line_mean": 37.4761904762,
"line_max": 77,
"alpha_frac": 0.6775990099,
"autogenerated": false,
"ratio": 3.4309978768577496,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.560859688675775,
"avg_score": null,
"num_lines": null
} |
# Akismet spam filtering: http://sciyoshi.com/blog/2009/jul/17/prevent-django-newcomments-spam-akismet-reloaded/
from django.contrib.comments.moderation import CommentModerator, moderator, AlreadyModerated
from django.contrib.sites.models import Site
from django.conf import settings
class AkismetModerator(CommentModerator):
email_notification = True
def check_spam(self, request, comment, key, blog_url=None, base_url=None):
try:
from akismet import Akismet
except:
return False
if blog_url is None:
blog_url = 'http://%s/' % Site.objects.get_current().domain
ak = Akismet(key=key, blog_url=blog_url)
if base_url is not None:
ak.baseurl = base_url
else:
ak.baseurl = 'api.antispam.typepad.com/1.1/'
if ak.verify_key():
data = {
'user_ip': request.META.get('HTTP_X_FORWARDED_FOR', '127.0.0.1'),
'user_agent': request.META.get('HTTP_USER_AGENT', ''),
'referrer': request.META.get('HTTP_REFERER', ''),
'comment_type': 'comment',
'comment_author': comment.user_name.encode('utf-8')
}
if ak.comment_check(comment.comment.encode('utf-8'), data=data, build_data=True):
return True
return False
def allow(self, comment, content_object, request):
allow = super(AkismetModerator, self).allow(comment, content_object, request)
spam = self.check_spam(request, comment, key=settings.TYPEPAD_ANTISPAM_API_KEY)
return not spam and allow
# try:
# moderator.register(Entry, EntryModerator)
# except AlreadyModerated:
# moderator.unregister(Entry)
# moderator.register(Entry, EntryModerator)
| {
"repo_name": "albatrossandco/brubeck_cms",
"path": "brubeck/core/moderation.py",
"copies": "1",
"size": "1918",
"license": "bsd-3-clause",
"hash": -5090757203409511000,
"line_mean": 36.36,
"line_max": 112,
"alpha_frac": 0.5849843587,
"autogenerated": false,
"ratio": 3.7315175097276265,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.969134624919619,
"avg_score": 0.02503112384628721,
"num_lines": 50
} |
# A.Klimentov Mar 3, 2014
#
# DEFT tasks handling
#
# Mar 15, 2014. Add json
# Mar 19, 2014. Add JEDI Client.py
# Mar 20, 2014. Dataset tables synchronization
# Ignore tasks and datasets with ID < 4 000 000
# Mar 24, 2014. Requests state update vs Task state
# Mar 31, 2014. Add taskinfo into task partition table
# Apr 03, 2014. t_production_task partitioned
# Apr 27, 2014. add SSO
# May 14, 2014. move production containers handling
# add Rucio clients to a separate file
# June 2, 2014. Set current priority = priority if current_priority=None
# June 12, 2014. changes in DEFT/JEDI synchronization part
#
# Last Edit : Jun 12, 2014 ak
#
import re
import sys
import os
import getopt
import commands
import datetime
import time
import cx_Oracle
import simplejson as json
# set path and import JEDI client
sys.path.append('/data/atlswing/site-packages')
import jedi.client
import deft_conf
import deft_pass
import deft_user_list
import DButils
#-
import base64
import cookielib
import requests
from random import choice
from pprint import pprint
import cernsso
#-
verbose = False
task_finish_states = 'done,finish,failed,obsolete,aborted'
task_aborted_states = 'aborted,failed,obsolete'
datasets_deletion_states = 'toBeDeleted,Deleted'
user_task_label = 'user' # JEDI prodsourcelabel parameter
JEDI_datasets_final_statesL = ['aborted','broken','done','failed','partial','ready']
DEFT_datasets_done_statesL = ['done']
DEFT_datasets_final_statesL = \
['aborted','broken','done','failed','deleted,toBeDeleted,toBeErased,waitErased,toBeCleaned,waitCleaned']
DEFT_datasets_postproduction_states = 'deleted,toBeErased,waitErased,toBeCleaned,waitCleaned'
DEFT_tasks_abort_statesL = ['aborted','broken','failed']
Request_update_statesL = ['registered','running','done','finished','failed']
MIN_DEFT_TASK_ID = 4000000
#synch intervals (hours)
REQUEST_SYNCH_INTERVAL = 12000
TASK_SYNCH_INTERVAL = 12000
DATASET_SYNCH_INTERVAL = 72
#
TASK_RECOVERY_STEP = '.recov.'
class DEFTClient(object):
#
# author D.Golubkov
#
# auth_url = 'https://atlas-info-mon.cern.ch/api/deft'
# # kerberos
# try :
# sso_cookies = cernsso.Cookies(auth_url).get()
# except Exception, e :
# raise Exception("SSO authentication error: %s" % str(e))
def __init__(self):
self.ssocookies = cernsso.Cookies(self._getAPIScope()).get()
def _getAPIScope(self):
return 'https://atlas-info-mon.cern.ch/api/deft'
def _sendRequest(self, request):
message = base64.b64encode(json.dumps(request))
payload = {'message': message}
return requests.post(self._getAPIScope(), data=payload, cookies=self.ssocookies, verify=False).json()
def getUserInfo(self):
request = {'method': 'getUserInfo'}
return self._sendRequest(request)
def createDEFTTask(self, dataset, taskId):
request = {'method': 'createDEFTTask', 'dataset': dataset, 'taskId': taskId}
return self._sendRequest(request)
def createProdsysListTask(self, dataset):
request = {'method': 'createProdsysListTask', 'dataset': dataset}
return self._sendRequest(request)
def usage() :
"""
Usage python deft_handling.py cmd
Initialization options :
-h[elp] display this help message and exit
-v[erbose] run in verbose mode
-u[update] run in update mode (update database)
Action options :
--change-task-state -t[id] TaskID -s[state] State set task state to 'State'
--check-aborted-tasks check aborted tasks and set datasets state accordingly
--finish-task -t[id] TaskID end task; running jobs will be finished;
--kill-task -t[id] end task and kill all running jobs
--obsolete-task -t[id] obsolete task
--change-task-priority -t[id] TaskID -p[riority] Priority set task priority to 'Priority'
--synchronizeJediDeft synchronize DEFT task tables with JEDI ones
"""
print usage.__doc__
def execCmd(cmd,flag) :
(s,o) = commands.getstatusoutput(cmd)
if s != 0 and flag == 1:
print cmd
print s
print o
if verbose :
print s
print o
return s,o
def findProcess(task,command,option):
status = 0
ret = 0
if option != '' and task != '' :
cmd = "ps -ef | grep %s | grep -c %s"%(task,command)
if verbose > 2 : print cmd
(status,ret) = execCmd(cmd,0)
if status == 0 :
if int(ret) > 2 :
print "./deft_handling.py -INFO- There is an active process %s %s. %s"%(task,command,time.ctime())
if option == 'Quit' :
print "Quit."
sys.exit(1)
return
def connectDEFT(flag) :
#
# connect in RO mode if flag = 'R'
# connect in Update mode = 'W'
error = 0
dbname = deft_conf.daemon['deftDB_INTR']
deftDB = deft_conf.daemon['deftDB_host']
if flag == 'W' :
dbuser = deft_conf.daemon['deftDB_writer']
dbpwd = deft_pass.deft_pass_intr['atlas_deft_w']
else :
dbuser = deft_conf.daemon['deftDB_reader']
dbpwd = deft_pass.deft_pass_intr['atlas_deft_r']
(pdb,dbcur) = DButils.connectDEFT(dbname,dbuser,dbpwd)
return pdb, dbcur, deftDB
def connectJEDI(flag) :
#
# connect in RO mode if flag = 'R'
# connect in Update mode = 'W'
error = 0
dbname = deft_conf.daemon['deftDB_ADCR']
deftDB = deft_conf.daemon['deftDB_host']
if flag == 'W' :
dbuser = deft_conf.daemon['deftDB_writer']
dbpwd = deft_pass.deft_pass_intr['atlas_jedi_w']
else :
dbuser = deft_conf.daemon['deftDB_reader']
dbpwd = deft_pass.deft_pass_intr['atlas_jedi_r']
(pdb,dbcur) = DButils.connectDEFT(dbname,dbuser,dbpwd)
return pdb, dbcur, deftDB
def connectPandaJEDI(flag) :
#
# connect in RO mode if flag = 'R'
# connect in Update mode = 'W'
error = 0
dbname = deft_conf.daemon['jediDB_ADCR']
deftDB = deft_conf.daemon['jediDB_host']
if flag == 'W' :
dbuser = deft_conf.daemon['deftDB_writer']
dbpwd = deft_pass.deft_pass_intr['atlas_jedi_w']
else :
dbuser = deft_conf.daemon['deftDB_reader']
dbpwd = deft_pass.deft_pass_intr['atlas_jedi_r']
(pdb,dbcur) = DButils.connectDEFT(dbname,dbuser,dbpwd)
return pdb, dbcur, deftDB
def JediTaskCmd(cmd,task_id,priority) :
#
status = 0
timenow = time.ctime()
msg = "INFO. %s %s at %s"%(cmd, task_id,timenow)
# find task with rask_id and check its status
#connect to Oracle
(pdb,dbcur,deftDB) = connectJEDI('R')
#
t_table_JEDI = "%s.%s"%(deftDB,deft_conf.daemon['t_task'])
sql = "SELECT taskid,status FROM %s WHERE taskid=%s"%(t_table_JEDI,task_id)
if verbose : print sql
tasks = DButils.QueryAll(pdb,sql)
DButils.closeDB(pdb,dbcur)
tid = -1
tstatus = 'unknown'
for t in tasks :
tid = t[0]
tstatus = t[1]
if tid == task_id :
# check status
if task_finish_states.find(tstatus) < 0 :
# check command
if cmd == 'finishTask' :
(status,output) = jedi.client.finishTask(task_id)
elif cmd == 'killTask' :
(status,output) = jedi.client.killTask(task_id)
elif cmd == 'changeTaskPriority' :
(status,output) = jedi.client.changeTaskPriority(task_id,priority)
else :
status = -1
msg = "WARNING. Unknown command : %s"%(cmd)
else :
status = -1
msg = "WARNING. Task : %s State : %s (in %s). Cmd : %s CANNOT BE EXECUTED"%(task_id,tstatus,t_table_JEDI,cmd)
else :
status = -1
msg = "WARNING. Task %s NOT FOUND in %s"%(task_id,t_table_JEDI)
if status != 0 and status != -1 :
msg = 'ERROR. jedi.client.finisheTask(%s)'%(task_id)
print msg,' (Return Status : ',status,')'
return status
def obsoleteTaskState(task_id, dbupdate) :
# set task state to 'obsolete' and update datasets states accordingly
error = 0
status = 'unknown'
project = 'unknown'
obsoleteFlag = 0
# find task and check its state
#connect to Oracle
(pdb,dbcur,deftDB) = connectDEFT('R')
#
t_table_DEFT = "%s.%s"%(deftDB,deft_conf.daemon['t_production_task'])
t_table_JEDI = "%s.%s"%(deftDB,deft_conf.daemon['t_task'])
t_dataset_DEFT = "%s.%s"%(deftDB,deft_conf.daemon['t_production_dataset'])
t_input_dataset= "%s.%s"%(deftDB,deft_conf.daemon['t_input_dataset'])
#
sql = "SELECT taskid,status,project FROM %s WHERE taskid=%s"%(t_table_DEFT,task_id)
if verbose : print sql
tasks = DButils.QueryAll(pdb,sql)
DButils.closeDB(pdb,dbcur)
if len(tasks) > 0 :
for t in tasks :
tid = t[0]
status = t[1]
project = t[2]
if task_finish_states.find(status) < 0 or status=='obsolete' : error = 1
if project == 'user' : error = 1
if error == 0 :
obsoleteFlag = 1
else :
error =1
if error == 1 :
print "ERROR. obsoleteTaskState. Task %s NOT FOUND or it has invalid state/project ('%s'/'%s')"%(task_id,status,project)
else :
if obsoleteFlag == 1 :
# update tables
sql = "update %s SET status = 'obsolete' where task_id=%s"%(t_table_DEFT,task_id)
sql_update.append(sql)
sql = "update %s SET status = 'obsolete' where task_id=%s"%(t_table_JEDI,task_id)
sql_update.append(sql)
sql = "update %s SET status = 'waitingErase' where task_id=%s"%(t_dataset_DEFT,task_id)
sql_update.append(sql)
sql = "update %s SET status = 'waitingErase' where task_id=%s"%(t_input_dataset,task_id)
sql_update.append(sql)
print sql
if dbupdate == True :
(pdb,dbcur,deftDB) = connectDEFT('W')
for sql in sql_update :
print sql
DButils.QueryUpdate(pdb,sql)
DButils.QueryCommit(pdb)
DButils.closeDB(pdb,dbcur)
else :
print "INFO. obsoleteTaskState : no database update"
def changeTaskState(task_id, task_state,dbupdate) :
#
error = 0
#connect to Oracle
(pdb,dbcur,deftDB) = connectDEFT('R')
# get task info
t_table = "%s.%s"%(deftDB,deft_conf.daemon['t_production_task'])
sql = "SELECT taskid,status FROM %s WHERE taskid=%s"%(t_table)
if verbose : print sql
tasks = DButils.QueryAll(pdb,sql)
DButils.closeDB(pdb,dbcur)
if len(tasks) > 0 :
for t in tasks :
tid = t[0]
state = t[1]
print "changeTaskState INFO. Task : %s Status : %s"%(tid, status)
if state in ("done","finished","pending") :
sql = "UPDATE %s SET status='%s', update_time=current_timestmap "%(t_table,task_state)
sql+= "WHERE taskid=%s"%(task_id)
print sql
# update ADCR database accordingly
else :
print "changeTaskState INFO. Task state CANNOT BE CHANGED"
else :
print "changeTaskState Error. Can not find info for task %s"%(task_id)
error = 1
return error
def checkAbortedTasks() :
#
# check tasks state and set dataset state accordingly
#
TR_ID_Min = 4000000
TR_ID_Max = 5000000
TR_ID_Start_From = 4000000
dbupdate = True
DEFT_tasks_abort_states = ''
for s in DEFT_tasks_abort_statesL :
DEFT_tasks_abort_states += "'%s',"%(s)
DEFT_tasks_abort_states = DEFT_tasks_abort_states[0:(len(DEFT_tasks_abort_states)-1)]
DEFT_datasets_final_states = ''
for s in DEFT_datasets_done_statesL :
DEFT_datasets_final_states += "'%s',"%(s)
DEFT_datasets_final_states = DEFT_datasets_final_states[0:(len(DEFT_datasets_final_states)-1)]
user_project = 'user%'
timenow = int(time.time())
findProcess('deft_handling','checkAbortedTasks','Quit')
# connect to Oracle
(pdb,dbcur,deftDB) = connectDEFT('R')
# select tasks
t_table_DEFT = "%s.%s"%(deftDB,deft_conf.daemon['t_production_task'])
sql = "SELECT TASKID FROM %s WHERE "%(t_table_DEFT)
sql+= "STATUS IN (%s) "%(DEFT_tasks_abort_states)
sql+= "AND project NOT LIKE '%s' "%(user_project)
sql+= "AND (taskid>%s AND taskid <%s) ORDER by taskid "%(TR_ID_Min,TR_ID_Max)
print sql
tids = DButils.QueryAll(pdb,sql)
# select datasets
t_table_datasets = "%s.%s"%(deftDB,deft_conf.daemon['t_production_dataset'])
sql = "SELECT TASKID, name FROM %s "%(t_table_datasets)
sql+= "WHERE STATUS IN (%s) "%(DEFT_datasets_final_states)
sql+= "AND name NOT LIKE '%s' "%(user_project)
sql+= "AND (taskid>%s AND taskid <%s) ORDER by taskid "%(TR_ID_Min,TR_ID_Max)
print sql
dids = DButils.QueryAll(pdb,sql)
DButils.closeDB(pdb,dbcur)
T0 = time.time()
sql_update = []
print "INFO. checkAbortedTasks. Selection done, start TIDs comparison @",time.ctime(T0)
for d in dids :
d_tid = d[0]
d_name= d[1]
for t in tids :
t_tid = t[0]
if d_tid == t_tid :
sql_upd = "UPDATE %s SET status='toBeDeleted', TIMESTAMP=CURRENT_TIMESTAMP "%(t_table_datasets)
sql_upd+= "WHERE name = '%s' "%(d_name)
sql_update.append(sql_upd)
elif t_tid > d_tid :
break
T1 = time.time()
print "INFO. checkAbortedTasks. Comparison done @",time.ctime(T1)," (",int(T1-T0+1)," sec)"
if dbupdate and len(sql_update) > 0 :
print "INFO. checkAbortedTasks. Update database"
(pdb,dbcur,deftDB) = connectDEFT('W')
for sql in sql_update :
print sql
DButils.QueryUpdate(pdb,sql)
DButils.QueryCommit(pdb)
DButils.closeDB(pdb,dbcur)
else :
msg = "INFO. checkAbortedTasks. NO database update "
if len(sql_update) < 1 : msg += " (no records to delete)"
if dbupdate == False : msq += " (dbupdate flag = False)"
print msg
def insertJediTasksJSON(user_task_list):
#
# insert JEDI user tasks into DEFT t_production_task table
#
user_task_params = {'taskid' : -1,'total_done_jobs':-1,'status' :'','submit_time' : 'None', 'start_time' : 'None',\
'priority' : '-1'}
user_task_step_id = deft_conf.daemon['user_task_step_id']
user_task_request_id = deft_conf.daemon['user_task_request_id']
deft_task_params = {}
sql_update = []
projects_list = []
user_project_name= ''
dbupdate = True
verbose = False
for i in range(0,len(user_task_list)) :
jedi_taskid = user_task_list[i]
deft_task_params[i] = {
'TASKID' : -1,
'STEP_ID' : user_task_step_id,
'PR_ID' : user_task_request_id,
'PARENT_TID' : -1,
'TASKNAME' : '',
'PROJECT' : 'user',
'DSN' : '',
'PHYS_SHORT' : '',
'SIMULATION_TYPE' : 'anal',
'PHYS_GROUP' : 'user',
'PROVENANCE' : 'user',
'STATUS' : 'TBD',
'TOTAL_EVENTS' : -1,
'TOTAL_REQ_JOBS' : 0,
'TOTAL_DONE_JOBS' : 0,
'SUBMIT_TIME' : 'None',
'START_TIME' : 'None',
'TIMESTAMP' : 'None',
'TASKPRIORITY' : -1,
'INPUTDATASET' : 'XYZ',
'PHYSICS_TAG' : 'None',
'VO' : 'XYZ',
'PRODSOURCELABEL' : '',
'USERNAME' : 'XYZ',
'CURRENT_PRIORITY': -1,
'CHAIN_TID' : -1
}
jedi_task_params = ''
jedi_task_names = ['userName','taskName','taskPriority','vo']
# connect to Oracle
(pdb,dbcur,deftDB) = connectJEDI('R')
t_table_DEFT = "%s.%s"%(deftDB,deft_conf.daemon['t_production_task'])
t_table_JEDI = "%s.%s"%(deftDB,deft_conf.daemon['t_task'])
t_table_projects= "%s.%s"%(deftDB,deft_conf.daemon['t_projects'])
i = 0
jedi_task_params = ''
for p in user_task_list :
jedi_tid = p['taskid']
#--sql = "SELECT dbms_lob.substr(jedi_task_parameters,80000,1) FROM %s WHERE taskid=%s"%(t_table_JEDI,jedi_tid)
#--sql = "SELECT dbms_lob.substr(jedi_task_parameters) FROM %s WHERE taskid=%s"%(t_table_JEDI,jedi_tid)
sql = "SELECT jedi_task_parameters FROM %s WHERE taskid=%s"%(t_table_JEDI,jedi_tid)
if verbose == True : print sql
tasksJEDI_CLOB = DButils.QueryAll(pdb,sql)
for t in tasksJEDI_CLOB :
task_param = t[0]
task_param_dict = json.loads(str(task_param))
Skip_Record = False
for jtn in jedi_task_names :
param = task_param_dict[jtn]
jtnC = jtn.upper()
if jtnC == 'TASKPRIORITY' :
deft_task_params[i]['CURRENT_PRIORITY'] = param
else :
deft_task_params[i][jtnC] = param
deft_task_params[i]['TASKID'] = jedi_tid
deft_task_params[i]['PARENT_TID'] = jedi_tid
deft_task_params[i]['CHAIN_TID'] = jedi_tid
deft_task_params[i]['STATUS'] = p['status']
if p['start_time'] != 'None' : deft_task_params[i]['START_TIME'] = p['start_time']
if p['submit_time']!= 'None' : deft_task_params[i]['SUBMIT_TIME']= p['submit_time']
if p['total_done_jobs'] != None : deft_task_params[i]['TOTAL_DONE_JOBS'] = p['total_done_jobs']
if p['total_events'] != None : deft_task_params[i]['TOTAL_EVENTS'] = p['total_events']
jj = deft_task_params[i]['TASKNAME'].split('.')
user_project_name = jj[0]
print user_project_name
# form insert string
deft_names_0 = "TASKID,STEP_ID,PR_ID,PARENT_TID,TASKNAME,PROJECT,STATUS,TOTAL_EVENTS,TOTAL_REQ_JOBS,TOTAL_DONE_JOBS,"
deft_namea_1 = "SUBMIT_TIME, START_TIME,TIMESTAMP"
if deft_task_params[i]['TOTAL_REQ_JOBS'] == 0 :
if deft_task_params[i]['TOTAL_DONE_JOBS'] > 0 :
deft_task_params[i]['TOTAL_REQ_JOBS'] = deft_task_params[i]['TOTAL_DONE_JOBS']
sql = "INSERT INTO %s "%(t_table_DEFT)
sqlN= "(%s "%(deft_names_0)
sqlV = "VALUES(%s,%s,%s,%s,'%s',"%\
(deft_task_params[i]['TASKID'],user_task_step_id,user_task_request_id,\
deft_task_params[i]['TASKID'],deft_task_params[i]['TASKNAME'])
sqlV+="'%s','%s',%s,%s,%s,"%\
('user',deft_task_params[i]['STATUS'],deft_task_params[i]['TOTAL_EVENTS'],\
deft_task_params[i]['TOTAL_REQ_JOBS'],deft_task_params[i]['TOTAL_DONE_JOBS'])
if deft_task_params[i]['SUBMIT_TIME'] != 'None' :
sqlN += "SUBMIT_TIME,"
sqlV += "TO_TIMESTAMP('%s','YYYY-MM-DD HH24:MI:SS'),"%(deft_task_params[i]['SUBMIT_TIME'])
if deft_task_params[i]['START_TIME'] != 'None' and deft_task_params[i]['START_TIME'] != None:
sqlN += "START_TIME,"
sqlV += "TO_TIMESTAMP('%s','YYYY-MM-DD HH24:MI:SS'),"%(deft_task_params[i]['START_TIME'])
sqlN += "TIMESTAMP,"
sqlV += "current_timestamp,"
sqlN += "VO,PRODSOURCELABEL,USERNAME,CURRENT_PRIORITY,PRIORITY,CHAIN_TID,BUG_REPORT) "
sqlV += "'%s','%s','%s', %s,%s,%s,%s)"%\
(deft_task_params[i]['VO'],'user',deft_task_params[i]['USERNAME'],\
deft_task_params[i]['CURRENT_PRIORITY'],deft_task_params[i]['TASKPRIORITY'],\
deft_task_params[i]['CHAIN_TID'],-1)
sql += sqlN
sql += sqlV
#-
# # and insert the same string into t_production_task_listpart
# sqlP = sql.replace(t_table_DEFT,t_table_DEFT_P)
# print sqlP
#-
# check project
project_found = False
for p in projects_list :
if p == user_project_name :
project_found = True
break
if project_found : break
if project_found == False : projects_list.append(user_project_name)
sql_update.append(sql)
i += 1
DButils.closeDB(pdb,dbcur)
if dbupdate == True :
timenow = int(time.time())
(pdb,dbcur,deftDB) = connectDEFT('W')
# insert new projects (id any)
for tp in projects_list :
sql = "SELECT distinct project FROM %s ORDER by project"%(t_table_projects)
print sql
task_projects = DButils.QueryAll(pdb,sql)
project_found = False
for td in task_projects :
t_project = td[0]
if t_project == tp :
project_found = True
if project_found : break
if project_found == False :
print "INFO.SynchronizeJediDeftTasks. New project %s. Insert it into %s"%(tp,t_table_projects)
sql = "INSERT INTO %s (PROJECT,BEGIN_TIME,END_TIME,STATUS,TIMESTAMP) "
sql+= "VALUES('%s',%s,%s,'active',%s)"%(tp,timenow,timenow+10*365*24*60*60,timenow)
print sql
sql_update.append(sql)
for sql in sql_update :
print sql
DButils.QueryUpdate(pdb,sql)
DButils.QueryCommit(pdb)
DButils.closeDB(pdb,dbcur)
def synchronizeJediDeftDatasets () :
#
# get list of all tasks updated in 12h
#
timeInterval = DATASET_SYNCH_INTERVAL # hours
JEDI_datasets_final_states = ''
for s in JEDI_datasets_final_statesL :
JEDI_datasets_final_states += "'%s',"%(s)
JEDI_datasets_final_states = JEDI_datasets_final_states[0:(len(JEDI_datasets_final_states)-1)]
DEFT_datasets_final_states = ''
for s in DEFT_datasets_final_statesL :
DEFT_datasets_final_states += "'%s',"%(s)
DEFT_datasets_final_states = DEFT_datasets_final_states[0:(len(DEFT_datasets_final_states)-1)]
# connect to Oracle
(pdb,dbcur,deftDB) = connectDEFT('R')
t_table_DEFT = "%s.%s"%(deftDB,deft_conf.daemon['t_production_task'])
t_table_datasets_DEFT = "%s.%s"%(deftDB,deft_conf.daemon['t_production_dataset'])
sql = "SELECT taskid, status, phys_group, timestamp, project, username, pr_id FROM %s "%(t_table_DEFT)
sql+= "WHERE TIMESTAMP > current_timestamp - %s AND taskid >= %s "%(timeInterval,MIN_DEFT_TASK_ID)
sql+= "ORDER BY taskid"
print sql
tasksDEFT = DButils.QueryAll(pdb,sql)
print "%s DEFT tasks match to the criteria"%(len(tasksDEFT))
if len(tasksDEFT) > 0 :
minTaskID = -1
maxTaskID = -1
sql = "SELECT min(taskid),max(taskid) FROM %s "%(t_table_DEFT)
sql +="WHERE TIMESTAMP > current_timestamp - %s AND taskid >= %s "%(timeInterval,MIN_DEFT_TASK_ID)
print sql
MMtasks = DButils.QueryAll(pdb,sql)
for t in MMtasks :
minTaskID = t[0]
maxTaskID = t[1]
print "INFO. Check datasets produced by %s - %s tasks"%(minTaskID,maxTaskID)
sql = "SELECT taskid, name, status, phys_group, timestamp "
sql+= "FROM %s WHERE taskid >= %s and taskid <= %s "%(t_table_datasets_DEFT,minTaskID,maxTaskID)
sql += "ORDER BY taskid"
datasetsDEFT = DButils.QueryAll(pdb,sql)
DButils.closeDB(pdb,dbcur)
sql_update = []
if len(tasksDEFT) > 0 and len(datasetsDEFT) > 0 :
# step #1 : synchronize DEFT t_production_task and t_production_dataset content
for t in tasksDEFT :
t_tid = t[0]
t_status = t[1]
t_phys_group = t[2]
t_project = t[4]
t_owner = t[5]
t_reqid = t[6]
if verbose : print "INFO. check status %s"%(t_status)
if task_aborted_states.find(t_status) >= 0 :
for d in datasetsDEFT :
d_tid = d[0]
if d_tid == t_tid :
d_status = d[2]
if d_status == None or d_status=='None' : d_status='unknown'
if datasets_deletion_states.find(d_status) < 0 :
sql = "UPDATE %s SET status='toBeDeleted',timestamp=current_timestamp,pr_id=%s WHERE taskid=%s"%\
(t_table_datasets_DEFT,t_tid,t_reqid)
sql_update.append(sql)
break
elif d_status == 'unknown' :
sql= "UPDATE %s SET status='%s',TIMESTAMP=current_timestamp, pr_id=%s WHERE taskid=%s"\
(t_table_datasets_DEFT,t_status,t_tid,t_reqid)
sql_update.append(sql)
elif d_tid > t_tid :
print "WARNING. Cannot find dataset in %s for task %s (project: %s)"%\
(t_table_datasets_DEFT,t_tid,t_project)
break
if len(sql_update) :
print "INFO. synchronizeJediDeftDatasets. Step1 : Start database update"
(pdb,dbcur,deftDB) = connectDEFT('W')
for sql in sql_update :
if verbose : print sql
DButils.QueryUpdate(pdb,sql)
DButils.QueryCommit(pdb)
DButils.closeDB(pdb,dbcur)
#step #2. synchronize DEFT t_production_dataset and JEDI atlas_panda.jedi_datasets content
#connect to JEDI and get list of production datasets
#
# form DEFT datasets list
#(pdb,dbcur,deftDB) = connectDEFT('R')
#sql = "SELECT taskid,status FROM %s WHERE status IN (%s) "%(t_table_datasets_DEFT, DEFT_datasets_final_states)
#sql+= "AND (taskid >= %s and taskid <= %s) ORDER BY taskid "%(minTaskID,maxTaskID)
#if verbose : print sql
#datasetsDEFT = DButils.QueryAll(pdb,sql)
#DButils.closeDB(pdb,dbcur)
# get JEDI datasets list
sql_update = []
(pdb,dbcur,jediDB) = connectPandaJEDI('R')
t_table_datasets_JEDI = "%s.%s"%(jediDB,deft_conf.daemon['t_jedi_datasets'])
sql = "SELECT jeditaskid, datasetname, status, nfilesfinished, nevents, creationtime, frozentime "
sql+= "FROM %s "%(t_table_datasets_JEDI)
sql+= "WHERE jeditaskid >= %s AND jeditaskid <= %s "%(minTaskID,maxTaskID)
sql+= "AND datasetname NOT LIKE '%s' "%('user%')
sql+= "AND status IN (%s) "%(JEDI_datasets_final_states)
sql+= "ORDER BY jeditaskid"
print sql
datasetsJEDI = DButils.QueryAll(pdb,sql)
DButils.closeDB(pdb,dbcur)
for d in datasetsDEFT :
d_tid = d[0]
d_name = d[1]
if d[2] == None :
d_status = 'unknown'
else :
d_status = d[2]
d_phys_group = d[3]
found = False
for j in datasetsJEDI :
j_tid = j[0]
j_name = j[1]
j_status = j[2]
if d_tid == j_tid :
if d_name == j_name :
try :
j_nfiles = int(j[3])
except :
j_nfiles = 0
try :
j_nevents = int(j[4])
except :
j_nevents = 0
found = True
if j_status != d_status :
if DEFT_datasets_final_states.find(d_status) < 0 :
if DEFT_datasets_postproduction_states.find(d_status) < 0 :
sql = "UPDATE %s "%(t_table_datasets_DEFT)
sql+= "SET EVENTS = %s, FILES = %s, STATUS = '%s', "%(j_nevents, j_nfiles, j_status)
sql+= "TIMESTAMP = current_timestamp "
sql+= "WHERE taskid = %s AND name = '%s' "%(d_tid,d_name)
print sql
sql_update.append(sql)
else :
if verbose :
print "Task : ",j_tid,d_tid
print "DEFT : ",d_name
print "JEDI : ",j_name
else :
print "INFO. dataset : ",d_name
print "DEFT state : %s, JEDI state : %s"%(d_status,j_status)
print "NO %s update. DEFT dataset state is final"%(t_table_datasets_DEFT)
elif j_tid > t_tid :
print "INFO. Dataset for %s task and states in '(%s)'"%(t_tid,JEDI_datasets_final_states)
break
# update database
if len(sql_update) :
(pdb,dbcur,deftDB) = connectDEFT('W')
print "INFO. synchronizeJediDeftDatasets. Step2 : Start database update"
for sql in sql_update :
if verbose : print sql
DButils.QueryUpdate(pdb,sql)
DButils.QueryCommit(pdb)
DButils.closeDB(pdb,dbcur)
else :
print "INFO. No tasks or/and datasets match to time interval"
def synchronizeJediDeftTasks() :
#
# read task information from t_task and update t_production_tasks accordingly
#
user_task_list = []
user_task_params = {'taskid' : -1,'total_done_jobs':-1,'status' :'','submit_time' : -1, 'start_time' : 'None',\
'priority' : '-1','total_req_jobs':-1, 'total_events':0}
updateIntervalHours = TASK_SYNCH_INTERVAL
timeIntervalOracleHours = "%s/%s"%(updateIntervalHours,24)
post_production_status = ['aborted','obsolete']
running_status = ['running','submitted','submitting','registered','assigned']
end_status = ['done','failed','finished','broken']
# connect to Oracle
(pdb,dbcur,deftDB) = connectDEFT('R')
t_table_DEFT = "%s.%s"%(deftDB,deft_conf.daemon['t_production_task'])
t_table_JEDI = "%s.%s"%(deftDB,deft_conf.daemon['t_task'])
sql_select = "SELECT taskid, status,total_req_jobs,total_done_jobs,submit_time, start_time, current_priority,total_events "
sql = sql_select
sql += "FROM %s WHERE timestamp > current_timestamp - %s "%(t_table_DEFT,timeIntervalOracleHours)
sql += "AND taskid > %s "%(MIN_DEFT_TASK_ID)
sql += "ORDER by taskid"
print sql
tasksDEFT = DButils.QueryAll(pdb,sql)
DButils.closeDB(pdb,dbcur)
print "%s DEFT tasks match to the criteria"%(len(tasksDEFT))
(pdb,dbcur,deftDB) = connectJEDI('R')
sql_select = "SELECT taskid, status,total_done_jobs,submit_time, start_time, prodsourcelabel,"
sql_select+= "priority,current_priority, taskname, total_req_jobs, total_events "
sql = sql_select
sql += "FROM %s WHERE timestamp > current_timestamp - %s "%(t_table_JEDI,timeIntervalOracleHours)
sql += "AND taskid > %s "%(MIN_DEFT_TASK_ID)
sql += "ORDER by taskid"
print sql
tasksJEDI = DButils.QueryAll(pdb,sql)
print "%s JEDI tasks match to the criteria"%(len(tasksJEDI))
DButils.closeDB(pdb,dbcur)
sql_update_deft = []
for tj in tasksJEDI :
tj_tid = tj[0]
tj_status = tj[1]
tj_done = tj[2]
if tj_done == None : tj_done = 0
tj_submit = tj[3]
tj_start = tj[4]
tj_prodsourcelabel = tj[5]
tj_prio = tj[6]
tj_curprio = tj[7]
tj_taskname= tj[8]
tj_req_jobs= tj[9]
tj_total_events = tj[10]
if tj_req_jobs == None or tj_req_jobs < 0 :
tj_req_jobs = -1
found = False
for td in tasksDEFT :
td_tid = td[0]
td_status = td[1]
td_req_jobs = td[2]
td_done = td[3]
td_submit = td[4]
td_start = td[5]
td_priority = td[6]
td_total_events = td[7]
if td_tid == tj_tid :
# compare records
print "Compare records for TID = %s"%(tj_tid)
found = True
break
elif td_tid > tj_tid :
break
if found == False :
if tj_prodsourcelabel == user_task_label or tj_taskname.find(TASK_RECOVERY_STEP) > 0 :
print "synchroniseJediDeft INFO. Task %s NOT FOUND in %s. It is users task"%(tj_tid,t_table_DEFT)
user_task_params['taskid'] = tj_tid
user_task_params['status'] = tj_status
user_task_params['total_done_jobs'] = tj_done
user_task_params['submit_time'] = tj_submit
user_task_params['start_time'] = tj_start
user_task_params['priority'] = tj_prio
user_task_params['current_priority']= tj_curprio
user_task_params['prodsourcelabel'] = tj_prodsourcelabel
user_task_params['total_req_jobs'] = tj_req_jobs
user_task_params['total_events'] = tj_total_events
user_task_list.append(user_task_params.copy())
else :
print "synchroniseJediDeft WARNING. Task %s NOT FOUND in %s"%(tj_tid,t_table_DEFT)
if found == True :
print "JEDI Task ID, status, priority, total_events ",tj_tid, tj_status, tj_curprio, tj_total_events
td_status = td[1]
sql_update = None
if tj_status != td_status :
print "Status has changed. DEFT, JEDI : %s, %s"%(td_status,tj_status)
if td_status in post_production_status :
print "Ignore. DEFT status (in post_production)... %s"%(td_status)
else :
td_status = tj_status
td_done = tj_done
td_req_jobs = tj_req_jobs
td_total_events = tj_total_events
sql_update = "UPDATE %s SET status='%s',total_done_jobs=%s,total_req_jobs=%s,total_events=%s"%\
(t_table_DEFT,td_status,td_done,td_req_jobs,td_total_events)
if tj_start == None :
print "Warning. Task ID = %s : invalid start time in t_task : %s (%s)"%(td_tid,tj_start,td_start)
else :
td_start = tj_start
sql_update += ",start_time=to_timestamp('%s','YYYY-MM-DD HH24:MI:SS')"%(td_start)
if tj_submit == None or tj_submit == 'None' :
print "Warning. Task ID = %s : invalid submit time in t_task : %s (%s)"%(td_tid,tj_submit,td_submit)
else :
td_submit = tj_submit
sql_update += ",submit_time=to_timestamp('%s','YYYY-MM-DD HH24:MI:SS')"%(td_submit)
sql_update += ",TIMESTAMP = current_timestamp "
sql_update += "WHERE taskid = %s"%(td_tid)
elif tj_curprio != td_priority :
if td_status in post_production_status or td_status in end_status :
print "Ignore. DEFT status (in post_production)... %s"%(td_status)
else :
sql_update = 'XYZ'
if tj_curprio != None :
sql_update = "UPDATE %s SET current_priority=%s "%(t_table_DEFT, tj_curprio)
else :
if td_priority == None :
sql_update = "UPDATE %s SET current_priority=%s "%(t_table_DEFT, td_priority)
if sql_update != 'XYZ' :
sql_update+= ",TIMESTAMP = current_timestamp "
sql_update+= "WHERE taskid = %s"%(td_tid)
elif tj_total_events != td_total_events :
if tj_total_events != None :
print "Total events (DEFT, JEDI) : ",td_total_events, tj_total_events
td_total_events = tj_total_events
sql_update = "UPDATE %s SET total_events = %s, "%(t_table_DEFT,td_total_events)
sql_update += "TIMESTAMP = current_timestamp "
sql_update += "WHERE taskid = %s"%(td_tid)
elif tj_done != td_done :
if tj_done != None :
print "Total done jobs (DEFT, JEDI) : ",td_done, tj_done
td_done = tj_done
sql_update = "UPDATE %s SET total_done_jobs = %s, "%(t_table_DEFT,td_done)
sql_update += "TIMESTAMP = current_timestamp "
sql_update += "WHERE taskid = %s"%(td_tid)
elif tj_req_jobs != td_req_jobs :
print "Total requested jobs (DEFT, JEDI) : ",td_req_jobs, tj_req_jobs
td_req_jobs = tj_req_jobs
sql_update = "UPDATE %s SET total_req_jobs = %s,"%(t_table_DEFT,td_req_jobs)
sql_update += "TIMESTAMP = current_timestamp "
sql_update += "WHERE taskid = %s"%(td_tid)
else :
print "DEFT Task ID, status, priority, total_events ",td_tid, td_status, td_priority, td_total_events
print "Synch done"
if sql_update != None and sql_update != 'XYZ' :
print sql_update
sql_update_deft.append(sql_update)
db_update = True
if len(sql_update_deft) and db_update == True :
print "Update database information (",len(sql_update_deft)," records)"
(pdb,dbcur,deftDB) = connectDEFT('W')
for sql in sql_update_deft :
print sql
DButils.QueryUpdate(pdb,sql)
DButils.QueryCommit(pdb)
DButils.closeDB(pdb,dbcur)
elif db_update == False :
print "INFO. No database update : db_update = %s"%(db_update)
if len(user_task_list) :
print "INFO. process JEDI users tasks"
insertJediTasksJSON(user_task_list)
def synchronizeDeftRequests():
# update Production request status (caveat do not process user's requests)
error = 0
# connect to Oracle
(pdb,dbcur,deftDB) = connectDEFT('R')
t_table_Tasks = "%s.%s"%(deftDB,deft_conf.daemon['t_production_task'])
t_table_Requests = "%s.%s"%(deftDB,deft_conf.daemon['t_prodmanager_request'])
t_table_Request_State = "%s.%s"%(deftDB,deft_conf.daemon['t_prodmanager_request_status'])
request_update_list = ''
for r in Request_update_statesL :
request_update_list += "'%s',"%(r)
request_update_list = request_update_list[0:(len(request_update_list)-1)]
sql = "SELECT taskid,pr_id,chain_tid, status,step_id FROM %s "%(t_table_Tasks)
sql+= "WHERE status IN (%s) "%(request_update_list)
sql+= "AND taskid > %s "%(MIN_DEFT_TASK_ID)
sql+= "AND TIMESTAMP > current_timestamp - %s "%(REQUEST_SYNCH_INTERVAL)
sql+= "AND project NOT LIKE '%s' "%('user%')
sql+="ORDER BY TASKID, PR_ID, STEP_ID"
print sql
tasksDEFT = DButils.QueryAll(pdb,sql)
requests = []
done_requests = []
final_requests= []
sql_update = []
if len(tasksDEFT) :
# select list of requests
for t in tasksDEFT :
task_id = t[0]
req_id = t[1]
try :
req_id = int(req_id)
except :
print "WARNING. Unknown request ID : %s (Task ID : %s)"%(req_id,task_id)
requests.append(req_id)
requests.sort()
rold = -1
for r in requests :
if r != rold :
final_requests.append(r)
rold = r
else :
print "INFO. NO new tasks in the last % hours"%(REQUEST_TIME_INTERVAL_HOURS)
for request in final_requests :
sql = "SELECT req_s_id, pr_id, status FROM %s WHERE PR_ID=%s "%(t_table_Request_State,request)
reqDEFT = DButils.QueryAll(pdb,sql)
for r_s_s in reqDEFT :
r_step_id = r_s_s[0]
r_req_id = r_s_s[1]
r_status = r_s_s[2]
status = r_status
print "INFO. Process request : %s, Step : %s Current state : %s"%(r_req_id,r_step_id,r_status)
# now go through list of tasks and find task with for request and step
for t in tasksDEFT :
task_id = t[0]
req_id = t[1]
step_id = t[4]
if req_id == r_req_id and step_id == r_step_id :
task_status = t[3].lower()
if task_status == 'registered' :
if r_status == 'approved' or r_status == 'registered' or r_status =='waiting' :\
r_status = 'processed'
if task_status == 'running' :
if r_status == 'approved' or r_status == 'registered' or r_status =='waiting' or r_status == 'processed':\
r_status = 'executing'
if task_status == 'done' :
# check was it the last task in chain
done_requests.append(task_id)
if r_status != status :
status = r_status
sql_update.append(sql)
# insert new record into t_prodmanager_request_status table
sql = "INSERT INTO %s "%(t_table_Request_State)
sql+= "(REQ_S_ID,COMMENT,OWNER,STATUS,TIMESTAMP,PR_ID) "
sql+= "VALUES(%s,'%s','%s',%s,current_timestamp,'%s'"%\
(step,'automatic update','ProdManager',status,request)
print sql
sys.exit(1)
DButils.closeDB(pdb,dbcur)
dbupdate = True
if dbupdate :
(pdb,dbcur,deftDB) = connectDEFT('W')
for sql in sql_update :
if verbose : print sql
DButils.QueryUpdate(pdb,sql)
DButils.QueryCommit(pdb)
DButils.closeDB(pdb,dbcur)
elif db_update == False :
print "INFO. No database update : db_update = %s"%(db_update)
def synchronizeJediDeft() :
#
T0 = int(time.time())
print "INFO. synchronizeJediDeftTasks. Started at %s"%(time.ctime(T0))
synchronizeJediDeftTasks()
T1 = int(time.time())
dT10 = int((T1 - T0)/60)
print "INFO. synchronizeJediDeftTasks. done at %s (%s sec)"%(time.ctime(T0),dT10)
print "INFO. synchronizeJediDeftDatasetss. Started at %s"%(time.ctime(T1))
synchronizeJediDeftDatasets()
T2 = int(time.time())
dT21 = int((T2-T1)/60)
print "INFO. synchronizeJediDeftDatasets. done at %s (%s sec)"%(time.ctime(T2),dT21)
T3 = int(time.time())
print "INFO. synchronize JediDeftRequests. Started at %s"%(time.ctime(T3))
synchronizeDeftRequests()
T4 = int(time.time())
dT43 = ((T4 - T3)/60)
print "INFO. synchronizeJediDeftRequests. done at %s (%s sec)"%(time.ctime(T4),dT43)
def main() :
msg = ''
error = 0
# SSO; May 15 : some issues with SSO
deft_client = DEFTClient()
sso_info = deft_client.getUserInfo()
#- print sso_info
#- userName = sso_info['userName']
userId = sso_info['userId']
#userId='alexei'
print "INFO deft_handling : user ID : ",userId
# simple authentication, will be replaced by Dmitry's CERN SSO
# whoami = os.getlogin()
#if 'alexei.atlswing.'.find(whoami) < 0 :
if deft_user_list.deft_users.find(userId) < 0 :
print "%s : you are not allowed to change Tier and Cloud state "%(whoami)
sys.exit(0)
try:
opts, args = getopt.getopt(sys.argv[1:], "h:f:k:p:r:c:o:t:vu", \
["help",\
"change-task-state",\
"check-aborted-tasks",\
"finish-task",\
"kill-task",\
"obsolete-task",\
"change-task-priority",\
"synchronizeJediDeft",
"tid",\
"prio"])
except getopt.GetoptError as err:
# print help information and exit:
print str(err) # will print something like "option -a not recognized"
usage()
sys.exit(2)
dbupdate = False
status = 0 # return status
changeTaskStateF = False
checkAbortedTasksF = False
finishTaskF = False
killTaskF = False
obsoleteTaskStateF = False
changeTaskPriorityF = False
synchronizeJediDeftF = False
task_id = -1 # Task ID
task_prio = -1 # Task priority
task_state='unknown' # Task state
for o, a in opts:
if o in ("-h","--help") :
usage()
sys.exit(1)
elif o == "-v" :
verbose = True
elif o == "-u" :
dbupdate = True
elif o == "-c" :
task_state = a.strip()
elif o == "-t" :
task_id = int(a.strip())
elif o == "-p" :
task_prio = int(a.strip())
elif o == "--change-task-state" :
changeTaskStateF = True
elif o == '--finish-task' :
finishTaskF = True
elif o == "--killTask" :
killTaskF = True
elif o == "--obsolete-task" :
obsoleteTaskStateF = True
elif o == "--change-task-priority" :
changeTaskPriorityF = True
elif o == "--check-aborted-tasks":
checkAbortedTasksF = True
elif o == "--synchronizeJediDeft" :
synchronizeJediDeftF = True
if changeTaskStateF or obsoleteTaskStateF or changeTaskPriorityF :
# check that other actions are not in progress
findProcess('deft_handling','Task','Quit')
findProcess('deft_handling','synchronizeJediDeft','Quit')
if changeTaskStateF == True :
if task_id < 0 or task_state == 'unknown' :
msg = "ERROR. Check task ID or/and Task State"
error = 1
else :
msg = ("INFO. Change state for task : %s to %s")%(task_id,task_state)
print msg
if error == 0 :
changeTaskState(task_id,task_state,dbupdate)
if obsoleteTaskStateF == True :
if task_id < 0 :
msg = "ERROR. Check task ID"
error = 1
else :
msg = ("INFO. Obsolete task : %s ")%(task_id)
print msg
if error == 0 :
obsoleteTaskState(task_id,dbupdate)
if changeTaskPriorityF == True :
if task_id < 0 or task_prio < 0 :
msg = "ERROR. Check task ID or/and Task Priority"
error = 1
else :
msg = ("INFO. Execute JEDI cmd to change priority for task : %s to %s")%(task_id,task_prio)
print msg
if error == 0 :
status = JediTaskCmd('changeTaskPriority',task_id,task_prio)
if checkAbortedTasksF == True :
findProcess('deft_handling','checkAbortedTasks','Quit')
status = checkAbortedTasks()
if finishTaskF == True :
if task_id < 0 :
msg = "ERROR. Check task ID "
error = 1
else :
msg = ("INFO. Execute JEDI command to finish task : %s ")%(task_id)
print msg
if error == 0 :
status = JediTaskCmd('finishTask',task_id,task_prio)
if killTaskF == True :
if task_id < 0 :
msg = "ERROR. Check task ID "
error = 1
else :
msg = ("INFO. Execute JEDI cmd to kill task : %s ")%(task_id)
print msg
if error == 0 :
status = JediTaskCmd('killTask',task_id,task_prio)
if synchronizeJediDeftF == True :
error = 0
findProcess('deft_handling','synchronizeJediDeft','Quit')
synchronizeJediDeft()
main()
| {
"repo_name": "PanDAWMS/panda-bigmon-atlas",
"path": "atlas/postproduction/deft/deft_handling.py",
"copies": "1",
"size": "46427",
"license": "apache-2.0",
"hash": -3693314298390160400,
"line_mean": 37.9488255034,
"line_max": 128,
"alpha_frac": 0.5612466883,
"autogenerated": false,
"ratio": 3.286867256637168,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4348113944937168,
"avg_score": null,
"num_lines": null
} |
# A.Klimentov May 13, 2014
#
# register productiuon containers and add datasets
#
# Last Edit : Jun 11, 2014 ak
#
import re
import sys
import os
import getopt
import commands
import datetime
import time
import cx_Oracle
import deft_conf
import deft_pass
import deft_user_list
import DButils
#
from dq2.clientapi.DQ2 import DQ2
from dq2.common.DQConstants import DatasetState
from dq2.common.DQException import DQBackendException
from dq2.common.client.DQClientException import DQInternalServerException
from dq2.common.dao.DQDaoException import *
from dq2.common import get_dict_item
from dq2.location.DQLocationConstants import LocationState
from dq2.clientapi.cli.SetMetaDataAttribute import SetMetaDataAttribute
#
os.environ['RUCIO_ACCOUNT'] = 'alexei'
#
dq2api = DQ2()
verbose = False
task_finish_states = 'done,finish,failed,obsolete,aborted'
task_aborted_states = 'aborted,failed,obsolete'
datasets_deletion_states = 'toBeDeleted,Deleted'
user_task_label = 'user' # JEDI prodsourcelabel parameter
JEDI_datasets_final_statesL = ['aborted','broken','done','failed','partial','ready']
DEFT_datasets_done_statesL = ['done']
DEFT_datasets_final_statesL = \
['aborted','broken','done','failed','deleted,toBeDeleted,toBeErased,waitErased,toBeCleaned,waitCleaned']
DEFT_datasets_postproduction_states = 'deleted,toBeErased,waitErased,toBeCleaned,waitCleaned'
DEFT_tasks_abort_statesL = ['aborted','broken','failed']
Request_update_statesL = ['registered','running','done','finished','failed']
MIN_DEFT_TASK_ID = 4000000
#synch intervals (hours)
REQUEST_SYNCH_INTERVAL = 12000
TASK_SYNCH_INTERVAL = 12000
DATASET_SYNCH_INTERVAL = 72
def connectDEFT(flag) :
#
# connect in RO mode if flag = 'R'
# connect in Update mode = 'W'
error = 0
dbname = deft_conf.daemon['deftDB_INTR']
deftDB = deft_conf.daemon['deftDB_host']
if flag == 'W' :
dbuser = deft_conf.daemon['deftDB_writer']
dbpwd = deft_pass.deft_pass_intr['atlas_deft_w']
else :
dbuser = deft_conf.daemon['deftDB_reader']
dbpwd = deft_pass.deft_pass_intr['atlas_deft_r']
(pdb,dbcur) = DButils.connectDEFT(dbname,dbuser,dbpwd)
return pdb, dbcur, deftDB
def addTidDatasetToContainer():
#
# get list of TID datasets
# register container if it isn't registered yet
# add TID dataset to Container
#
timeInterval = DATASET_SYNCH_INTERVAL # hours
nDQ2ErrorsInRow = 0
nContainers = 0
nDatasets = 0
dbupdate = True
minTaskID = MIN_DEFT_TASK_ID
maxTaskID = minTaskID*10
DEFT_datasets_final_states = ''
for s in DEFT_datasets_done_statesL :
DEFT_datasets_final_states += "'%s',"%(s)
DEFT_datasets_final_states = DEFT_datasets_final_states[0:(len(DEFT_datasets_final_states)-1)]
T0 = int(time.time())
# connect to Oracle
(pdb,dbcur,deftDB) = connectDEFT('R')
t_table_datasets_DEFT = "%s.%s"%(deftDB,deft_conf.daemon['t_production_dataset'])
t_table_containers_DEFT = "%s.%s"%(deftDB,deft_conf.daemon['t_production_container'])
# get list of datasets
sql = "SELECT name, taskid, pr_id, status, phys_group, timestamp FROM %s "%(t_table_datasets_DEFT)
sql+= "WHERE TIMESTAMP > current_timestamp - %s AND taskid >= %s "%(timeInterval,MIN_DEFT_TASK_ID)
sql+= "AND (name NOT LIKE '%s' AND name NOT LIKE '%s') "%('user%','%.log.%')
sql+= "AND status in (%s) "%(DEFT_datasets_final_states)
sql+= "ORDER BY taskid"
print sql
dsets = DButils.QueryAll(pdb,sql)
# get min and max task ID
sql = "SELECT min(parent_tid), max(parent_tid) FROM %s "%(t_table_datasets_DEFT)
sql+= "WHERE TIMESTAMP > current_timestamp - %s AND taskid >= %s "%(timeInterval,MIN_DEFT_TASK_ID)
sql+= "AND (name NOT LIKE '%s' AND name NOT like '%s') "%('user%','%.log.%')
sql+= "AND status in (%s) "%(DEFT_datasets_final_states)
print sql
mimax = DButils.QueryAll(pdb,sql)
if len(mimax) :
for i in mimax :
minTaskID = i[0]
maxTaskID = i[1]
else :
print "Warning. Cannot find information for query"
# get list of containers
sql = "SELECT name, parent_tid, status, c_time FROM %s "%(t_table_containers_DEFT)
sql+= "WHERE parent_tid >= %s and parent_tid <= %s "%(minTaskID, maxTaskID)
sql+= "ORDER by parent_tid"
print sql
cntrs = DButils.QueryAll(pdb,sql)
DButils.closeDB(pdb,dbcur)
# prepare a list of containers to be registered
sql_update = []
for ds in dsets :
dsname = ds[0]
d_tid = ds[1]
d_rid = ds[2]
d_status = ds[3]
d_phgroup= ds[4]
if dsname.find('.log') < 0 :
junk = dsname.split('_tid')
top_dsname = junk[0].strip()
cnt_name = "%s/"%(top_dsname)
print "Check containers list"
cnt_list_flag = 0 # container registered in database
ddm_list_flag = 0 # in DDM
reg_dset_flag = 0 # new dataset(s) added to the container
for cn in cntrs :
cname = cn[0]
c_tid = cn[1]
c_time= cn[3]
if c_tid == d_tid :
if cname == cnt_name :
print "Container %s found in database (task id = %s, registration time : %s)"%\
(cname,c_tid,c_time)
cnt_list_flag = 1
if c_tid > d_tid :
break
if cnt_list_flag != 1 :
print "Container %s NOT found in database "%(cnt_name)
print "Check DDM catalog : %s"%(cnt_name)
error = 0
try :
ret = dq2api.listDatasets(cnt_name)
print ret
if len(ret) > 0 : ddm_list_flag = 1
except :
print "ERROR - cannot execute : %s%s%s "%("ret = dq2api.listDatasets(",cnt_name,")")
error = 1
if ddm_list_flag == 1 and error == 0 :
msg = "Container %s exists in DDM "%(cnt_name)
if cnt_list_flag == 0 : msg += ". Get meta-info and add it to the database"
if cnt_list_flag == 1 : msg += "and in database. Do nothing. Proceed to datasets registration in the container"
print msg
if ddm_list_flag == 0 :
print "Register container : %s (Container already registered : %s)"%(cnt_name,nContainers)
try :
dq2api.registerContainer(cnt_name)
nContainers += 1
except :
error = 1
print "Error dq2api.registerContainer(%s)"%(cnt_name)
print "do no update database (%s)"%(cnt_name)
if error == 1 :
print "Error in DDM part. quit"
sys.exit(1)
if error == 0 :
# get creation date
creationdate = dq2api.getMetaDataAttribute(cnt_name, ['creationdate',])
c_time = creationdate['creationdate']
ELEMENTS = []
print "Register new elements in %s (%s)"%(cnt_name,dsname)
ELEMENTS.append(dsname)
try :
ret = dq2api.registerDatasetsInContainer(cnt_name,ELEMENTS)
nDatasets += len(ELEMENTS)
reg_dset_flag = 1
except (DQException):
print "Warning : %s already has dataset : %s"%(cnt_name,dsname)
reg_dset_flag = 0
except :
"""fatal error... I increment the error but you can exit if you want"""
nDQ2ErrorsInRow +=1
error = 1
print "Fatal error in registerDatasetsInContainer. Quit "
sys.exit(1)
# form sql statetment
sql =''
if d_rid == None or d_rid == 'None' : d_rid=-1
if cnt_list_flag == 1 :
if reg_dset_flag == 1 :
sql = "UPDATE %s SET d_time = current_timestamp WHERE name = '%s'"%(t_table_containers_DEFT,cnt_name)
sql_update.append(sql)
else :
sql = "INSERT INTO %s VALUES"%(t_table_containers_DEFT)
if reg_dset_flag == 0 :
sql+= "('%s',%s,%s,'%s','%s',to_timestamp('%s','YYYY-MM-DD HH24:MI:SS'),to_timestamp('%s','YYYY-MM-DD HH24:MI:SS'),current_timestamp)"%(cnt_name,d_tid,d_rid,'registered',d_phgroup,c_time,c_time)
else :
sql+= "('%s',%s,%s,'%s','%s',to_timestamp('%s','YYYY-MM-DD HH24:MI:SS'),current_timestamp,current_timestamp)"%\
(cnt_name,d_tid,d_rid,'registered',d_phgroup,c_time)
if len(sql) : sql_update.append(sql)
if dbupdate :
(pdb,dbcur,deftDB) = connectDEFT('W')
for sql in sql_update :
print "SQL update : ",sql
DButils.QueryUpdate(pdb,sql)
DButils.QueryCommit(pdb)
DButils.closeDB(pdb,dbcur)
else :
print "No database update"
Tf = int(time.time())
dT = Tf - T0
print time.ctime()
print "addTiddatasets. Container registered : %s, Datasets registered : %s"%(nContainers, nDatasets)
print "addTidDatasets. Total time : %s sec"%(dT)
def main() :
addTidDatasetToContainer()
main()
| {
"repo_name": "PanDAWMS/panda-bigmon-atlas",
"path": "atlas/postproduction/deft/add_containter.py",
"copies": "1",
"size": "9730",
"license": "apache-2.0",
"hash": -7812118469978455000,
"line_mean": 36.4230769231,
"line_max": 214,
"alpha_frac": 0.5568345324,
"autogenerated": false,
"ratio": 3.4962270930650376,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9362058958980937,
"avg_score": 0.038200533296820036,
"num_lines": 260
} |
a_l = [0, 1, 2]
b_l = [10, 20, 30]
a_t = (0, 1, 2)
b_t = (10, 20, 30)
a_s = 'abc'
b_s = 'xyz'
print(a_l + b_l)
# [0, 1, 2, 10, 20, 30]
print(a_t + b_t)
# (0, 1, 2, 10, 20, 30)
print(a_s + b_s)
# abcxyz
# print(a_l + 3)
# TypeError: can only concatenate list (not "int") to list
print(a_l + [3])
# [0, 1, 2, 3]
# print(a_t + (3))
# TypeError: can only concatenate tuple (not "int") to tuple
print(a_t + (3, ))
# (0, 1, 2, 3)
a_l += b_l
print(a_l)
# [0, 1, 2, 10, 20, 30]
a_t += b_t
print(a_t)
# (0, 1, 2, 10, 20, 30)
a_s += b_s
print(a_s)
# abcxyz
print(b_l * 3)
# [10, 20, 30, 10, 20, 30, 10, 20, 30]
print(3 * b_l)
# [10, 20, 30, 10, 20, 30, 10, 20, 30]
print(b_t * 3)
# (10, 20, 30, 10, 20, 30, 10, 20, 30)
print(3 * b_t)
# (10, 20, 30, 10, 20, 30, 10, 20, 30)
print(b_s * 3)
# xyzxyzxyz
print(3 * b_s)
# xyzxyzxyz
# print(b_l * 0.5)
# TypeError: can't multiply sequence by non-int of type 'float'
print(b_l * -1)
# []
b_l *= 3
print(b_l)
# [10, 20, 30, 10, 20, 30, 10, 20, 30]
b_t *= 3
print(b_t)
# (10, 20, 30, 10, 20, 30, 10, 20, 30)
b_s *= 3
print(b_s)
# xyzxyzxyz
a_l = [0, 1, 2]
b_l = [10, 20, 30]
c_l = a_l + b_l * 3
print(c_l)
# [0, 1, 2, 10, 20, 30, 10, 20, 30, 10, 20, 30]
| {
"repo_name": "nkmk/python-snippets",
"path": "notebook/arithmetic_operator_list_tuple_str.py",
"copies": "1",
"size": "1210",
"license": "mit",
"hash": -6346641609654221000,
"line_mean": 13.4047619048,
"line_max": 63,
"alpha_frac": 0.494214876,
"autogenerated": false,
"ratio": 1.7872968980797637,
"config_test": false,
"has_no_keywords": true,
"few_assignments": false,
"quality_score": 0.27815117740797635,
"avg_score": null,
"num_lines": null
} |
a = """L1, L5, R1, R3, L4, L5, R5, R1, L2, L2, L3, R4, L2, R3, R1, L2, R5, R3, L4, R4, L3, R3, R3, L2, R1, L3, R2, L1, R4, L2, R4, L4, R5, L3, R1, R1, L1, L3, L2, R1, R3, R2, L1, R4, L4, R2, L189, L4, R5, R3, L1, R47, R4, R1, R3, L3, L3, L2, R70, L1, R4, R185, R5, L4, L5, R4, L1, L4, R5, L3, R2, R3, L5, L3, R5, L1, R5, L4, R1, R2, L2, L5, L2, R4, L3, R5, R1, L5, L4, L3, R4, L3, L4, L1, L5, L5, R5, L5, L2, L1, L2, L4, L1, L2, R3, R1, R1, L2, L5, R2, L3, L5, L4, L2, L1, L2, R3, L1, L4, R3, R3, L2, R5, L1, L3, L3, L3, L5, R5, R1, R2, L3, L2, R4, R1, R1, R3, R4, R3, L3, R3, L5, R2, L2, R4, R5, L4, L3, L1, L5, L1, R1, R2, L1, R3, R4, R5, R2, R3, L2, L1, L5""" # NOQA
def locations(inp):
parts = [p.strip() for p in inp.split(',')]
parts = [(p[0], int(p[1:])) for p in parts if p]
direction = 'N'
x, y = 0, 0
yield 0, 0
for turn, dist in parts:
if turn == 'L':
direction = {'N': 'W', 'W': 'S', 'S': 'E', 'E': 'N'}[direction]
elif turn == 'R':
direction = {'N': 'E', 'E': 'S', 'S': 'W', 'W': 'N'}[direction]
else:
raise ValueError('turn: {0}'.format(turn))
for i in range(dist):
if direction == 'N':
y += 1
elif direction == 'S':
y -= 1
elif direction == 'W':
x -= 1
elif direction == 'E':
x += 1
yield x, y
def distance(pos):
x, y = pos
return abs(x) + abs(y)
def p1(inp):
for pos in locations(inp):
pass
return distance(pos)
def p2(inp):
visited = set()
for pos in locations(inp):
print pos
if pos in visited:
return distance(pos)
visited.add(pos)
# assert p1('R2, L3') == 5
# assert p1('R2, R2, R2') == 2
# assert p1('R5, L5, R5, R3') == 12
assert p1(a) == 253
# assert p2('R8, R4, R4, R8') == 4
# print p2(a)
| {
"repo_name": "tehasdf/AdventOfCode2016",
"path": "p1.py",
"copies": "1",
"size": "1913",
"license": "mit",
"hash": -862583638469329700,
"line_mean": 31.9827586207,
"line_max": 670,
"alpha_frac": 0.4605331939,
"autogenerated": false,
"ratio": 2.1446188340807173,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.31051520279807177,
"avg_score": null,
"num_lines": null
} |
"""A L1 loss on BatchNorm gamma that targets the number of activations."""
from __future__ import absolute_import
from __future__ import division
# [internal] enable type annotations
from __future__ import print_function
from typing import Optional
from morph_net.framework import batch_norm_source_op_handler
from morph_net.framework import conv2d_transpose_source_op_handler
from morph_net.framework import conv_source_op_handler
from morph_net.framework import generic_regularizers
from morph_net.framework import matmul_source_op_handler
from morph_net.framework import op_handler_decorator
from morph_net.framework import op_handlers
from morph_net.framework import op_regularizer_manager as orm
from morph_net.network_regularizers import cost_calculator
from morph_net.network_regularizers import resource_function
import tensorflow.compat.v1 as tf
from typing import Type, List
class GammaActivationRegularizer(generic_regularizers.NetworkRegularizer):
"""A NetworkRegularizer that targets activation count using Gamma L1."""
def __init__(self,
output_boundary: List[tf.Operation],
gamma_threshold,
regularizer_decorator: Optional[Type[
generic_regularizers.OpRegularizer]] = None,
decorator_parameters=None,
input_boundary: Optional[List[tf.Operation]] = None,
force_group=None,
regularizer_blacklist=None):
"""Creates a GammaActivationRegularizer object.
Args:
output_boundary: An OpRegularizer will be created for all these
operations, and recursively for all ops they depend on via data
dependency that does not involve ops from input_boundary.
gamma_threshold: A float scalar, will be used as a 'gamma_threshold' for
all instances GammaL1Regularizer created by this class.
regularizer_decorator: A class of OpRegularizer decorator to use.
decorator_parameters: A dictionary of parameters to pass to the decorator
factory. To be used only with decorators that requires parameters,
otherwise use None.
input_boundary: A list of ops that represent the input boundary of the
subgraph being regularized (input boundary is not regularized).
force_group: List of regex for ops that should be force-grouped. Each
regex corresponds to a separate group. Use '|' operator to specify
multiple patterns in a single regex. See op_regularizer_manager for more
detail.
regularizer_blacklist: List of regex for ops that should not be
regularized. See op_regularizer_manager for more detail.
"""
source_op_handler = batch_norm_source_op_handler.BatchNormSourceOpHandler(
gamma_threshold)
if regularizer_decorator:
source_op_handler = op_handler_decorator.OpHandlerDecorator(
source_op_handler, regularizer_decorator, decorator_parameters)
op_handler_dict = op_handlers.get_gamma_op_handler_dict()
op_handler_dict.update({
'FusedBatchNorm': source_op_handler,
'FusedBatchNormV2': source_op_handler,
'FusedBatchNormV3': source_op_handler,
})
self._manager = orm.OpRegularizerManager(
output_boundary,
op_handler_dict,
input_boundary=input_boundary,
force_group=force_group,
regularizer_blacklist=regularizer_blacklist)
self._calculator = cost_calculator.CostCalculator(
self._manager, resource_function.activation_count_function)
def get_regularization_term(self, ops=None):
return self._calculator.get_regularization_term(ops)
def get_cost(self, ops=None):
return self._calculator.get_cost(ops)
@property
def op_regularizer_manager(self):
return self._manager
@property
def name(self):
return 'GammaActivationCount'
@property
def cost_name(self):
return 'Activations'
class GroupLassoActivationRegularizer(generic_regularizers.NetworkRegularizer):
"""A NetworkRegularizer that targets activation count using L1 group lasso."""
def __init__(self,
output_boundary: List[tf.Operation],
threshold,
l1_fraction=0,
regularizer_decorator: Optional[Type[
generic_regularizers.OpRegularizer]] = None,
decorator_parameters=None,
input_boundary: Optional[List[tf.Operation]] = None,
force_group=None,
regularizer_blacklist=None):
"""Creates a GroupLassoActivationRegularizer object.
Args:
output_boundary: An OpRegularizer will be created for all these
operations, and recursively for all ops they depend on via data
dependency that does not involve ops from input_boundary.
threshold: A float scalar, will be used as a 'threshold' for all
regularizer instances created by this class.
l1_fraction: Relative weight of L1 in L1 + L2 regularization.
regularizer_decorator: A class of OpRegularizer decorator to use.
decorator_parameters: A dictionary of parameters to pass to the decorator
factory. To be used only with decorators that requires parameters,
otherwise use None.
input_boundary: A list of ops that represent the input boundary of the
subgraph being regularized (input boundary is not regularized).
force_group: List of regex for ops that should be force-grouped. Each
regex corresponds to a separate group. Use '|' operator to specify
multiple patterns in a single regex. See op_regularizer_manager for more
detail.
regularizer_blacklist: List of regex for ops that should not be
regularized. See op_regularizer_manager for more detail.
"""
conv2d_handler = conv_source_op_handler.Conv2DSourceOpHandler(
threshold, l1_fraction)
conv2d_transpose_handler = (
conv2d_transpose_source_op_handler.Conv2DTransposeSourceOpHandler(
threshold, l1_fraction))
matmul_handler = matmul_source_op_handler.MatMulSourceOpHandler(
threshold, l1_fraction)
if regularizer_decorator:
conv2d_handler = op_handler_decorator.OpHandlerDecorator(
conv2d_handler, regularizer_decorator, decorator_parameters)
conv2d_transpose_handler = op_handler_decorator.OpHandlerDecorator(
conv2d_transpose_handler, regularizer_decorator, decorator_parameters)
matmul_handler = op_handler_decorator.OpHandlerDecorator(
matmul_handler, regularizer_decorator, decorator_parameters)
op_handler_dict = op_handlers.get_group_lasso_op_handler_dict()
op_handler_dict.update({
'Conv2D': conv2d_handler,
'Conv2DBackpropInput': conv2d_transpose_handler,
'MatMul': matmul_handler,
})
self._manager = orm.OpRegularizerManager(
output_boundary,
op_handler_dict,
input_boundary=input_boundary,
force_group=force_group,
regularizer_blacklist=regularizer_blacklist)
self._calculator = cost_calculator.CostCalculator(
self._manager, resource_function.activation_count_function)
def get_regularization_term(self, ops=None):
return self._calculator.get_regularization_term(ops)
def get_cost(self, ops=None):
return self._calculator.get_cost(ops)
@property
def op_regularizer_manager(self):
return self._manager
@property
def name(self):
return 'GroupLassoActivationCount'
@property
def cost_name(self):
return 'Activations'
| {
"repo_name": "google-research/morph-net",
"path": "morph_net/network_regularizers/activation_regularizer.py",
"copies": "1",
"size": "7520",
"license": "apache-2.0",
"hash": -6452031395115190000,
"line_mean": 41.0111731844,
"line_max": 80,
"alpha_frac": 0.7090425532,
"autogenerated": false,
"ratio": 4.32183908045977,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.553088163365977,
"avg_score": null,
"num_lines": null
} |
# ALAINA KAFKES IMPLEMENTATION - alainakafkes
## Mergesort implementation in Python
## Runtime: O(n log(n))
## Space: O(n)
## Advantages: guaranteed good complexity (no need to worry about choice of pivot as in quicksort)
## Disadvantages: more temporary data storage needed than quicksort
#def mergesort(arr):
# if len(arr) <= 1:
# return arr
# midpoint = len(arr) // 2
# return merger(mergesort(arr[:mid]), mergesort(arr[mid:]))
#def merger(arrLeft, arrRight):
# if not arrLeft:
# return arrRight
# if not arrRight:
# return arrLeft
# if arrLeft[0] < arrRight[0]:
# return arrLeft[0] + merger(arrLeft[1:], arrRight)
# return arrRight[0] + merger(arrLeft, arrRight[1:])
# YASH KATARIYA IMPLEMENTATION - yashk2810
def merge(L,R,A): # Recursive and stable(for equal numbers it preserves their original position) algorithm
nL=len(L)
nR=len(R)
i=0
j=0
k=0
while i<nL and j<nR:
if L[i]<R[j]:
A[k]=L[i]
i+=1
else:
A[k]=R[j]
j+=1
k+=1
while i<nL:
A[k]=L[i]
i+=1
k+=1
while j<nR:
A[k]=R[j]
j+=1
k+=1
def mergesort(A): # Time complexity is Worst Case:- O(nlogn)
n=len(A) # Space complexity is O(n)
if n<2:
return A
mid=n/2
left=A[:mid]
right=A[mid:]
mergesort(left)
mergesort(right)
merge(left,right,A)
return A
print mergesort([2,4,1,6,8,5,3])
| {
"repo_name": "saru95/DSA",
"path": "Python/mergesort.py",
"copies": "1",
"size": "1356",
"license": "mit",
"hash": 6069667117075721000,
"line_mean": 20.1875,
"line_max": 109,
"alpha_frac": 0.6342182891,
"autogenerated": false,
"ratio": 2.3915343915343916,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8012951477533121,
"avg_score": 0.10256024062025411,
"num_lines": 64
} |
AL = 'AL'
NL = 'NL'
MLB = 'MLB'
LEAGUES = [AL, NL, MLB]
def mlb_teams(year):
""" For given year return teams active in the majors.
Caveat, list is not complete; those included are only those
with a current team still active.
"""
year = int(year)
return sorted(al_teams(year) + nl_teams(year))
def al_teams(year):
""" For given year return teams existing in AL.
Caveat, list is not complete; those included are only those
with a current team still active.
"""
teams = []
year = int(year)
if year >= 1901:
teams.append('BOS')
teams.append('CLE')
teams.append('CHW')
teams.append('DET')
else:
return []
if year >= 1903:
teams.append('NYY')
if year >= 1969:
teams.append('KCR')
if year >= 1977:
teams.append('SEA')
teams.append('TOR')
league = AL
angels(year, teams)
astros(year, teams, league)
athletics(year, teams)
brewers(year, teams, league)
orioles(year, teams)
rangers(year, teams)
rays(year, teams)
twins(year, teams)
return sorted(teams)
def nl_teams(year):
""" For given year return teams existing in NL.
Caveat, list is not complete; those included are only those
with a current team still active.
"""
teams = []
year = int(year)
if year >= 1876:
teams.append('CHC')
else:
return []
if year >= 1883:
teams.append('PHI')
if year >= 1887:
teams.append('PIT')
if year >= 1890:
teams.append('CIN')
if year >= 1892:
teams.append('STL')
if year >= 1962:
teams.append('NYM')
if year >= 1969:
teams.append('SDP')
if year >= 1993:
teams.append('COL')
if year >= 1996:
teams.append('ARI')
league = NL
astros(year, teams, league)
braves(year, teams)
brewers(year, teams, league)
dodgers(year, teams)
giants(year, teams)
marlins(year, teams)
nationals(year, teams)
return sorted(teams)
TEAMS = {AL : al_teams, NL : nl_teams, MLB : mlb_teams}
def angels(year, teams):
""" Append appropriate Angels abbreviation for year if applicable.
"""
if year >= 2005:
teams.append('LAA')
elif year >= 1997:
teams.append('ANA')
elif year >= 1965:
teams.append('CAL')
elif year >= 1961:
teams.append('LAA')
def astros(year, teams, league):
""" Append appropriate Astros abbreviation for year if applicable.
"""
if year >= 2013 and league == AL:
teams.append('HOU')
elif year >= 1962 and year < 2013 and league == NL:
teams.append('HOU')
def athletics(year, teams):
""" Append appropriate Athletics abbreviation for year if applicable.
"""
if year >= 1968:
teams.append('OAK')
elif year >= 1955:
teams.append('KCA')
elif year >= 1901:
teams.append('PHA')
def braves(year, teams):
""" Append appropriate Braves abbreviation for year if applicable.
"""
if year >= 1966:
teams.append('ATL')
elif year >= 1953:
teams.append('MLN')
elif year >= 1876:
teams.append('BSN')
def brewers(year, teams, league):
""" Append appropriate Brewers abbreviation for year if applicable.
"""
if year >= 1970:
if year >= 1993 and league == NL:
teams.append('MIL')
elif year < 1993 and league == AL:
teams.append('MIL')
elif year == 1969 and league == AL:
teams.append('SEP')
def dodgers(year, teams):
""" Append appropriate Dodgers abbreviation for year if applicable.
"""
if year >= 1958:
teams.append('LAD')
elif year >= 1884:
teams.append('BRO')
def giants(year, teams):
""" Append appropriate Giants abbreviation for year if applicable.
"""
if year >= 1958:
teams.append('SFG')
elif year >= 1883:
teams.append('NYG')
def marlins(year, teams):
""" Append appropriate Marlins abbreviation for year if applicable.
"""
if year >= 2012:
teams.append('MIA')
elif year >= 1993:
teams.append('FLA')
def nationals(year, teams):
""" Append appropriate Nationals abbreviation for year if applicable.
"""
if year >= 2005:
teams.append('WSN')
elif year >= 1969:
teams.append('MON')
def orioles(year, teams):
""" Append appropriate Orioles abbreviation for year if applicable.
"""
if year >= 1954:
teams.append('BAL')
elif year >= 1902:
teams.append('SLB')
elif year == 1901:
teams.append('MLA')
def rangers(year, teams):
""" Append appropriate Rangers abbreviation for year if applicable.
"""
if year >= 1972:
teams.append('TEX')
elif year >= 1961:
teams.append('WSA')
def rays(year, teams):
""" Append appropriate Rays abbreviation for year if applicable.
"""
if year >= 2008:
teams.append('TBR')
elif year >= 1998:
teams.append('TBD')
def twins(year, teams):
""" Append appropriate Twins abbreviation for year if applicable.
"""
if year >= 1961:
teams.append('MIN')
elif year >= 1901:
teams.append('WSH')
def valid_teams_subset(year, teams):
""" Ensure teams list is valid.
"""
all_teams = mlb_teams(int(year))
for team in teams:
if team not in all_teams:
return False
return True
| {
"repo_name": "barajasr/Baseball-Reference-Plotting",
"path": "Teams.py",
"copies": "1",
"size": "5487",
"license": "bsd-2-clause",
"hash": -2996144160093778400,
"line_mean": 24.8820754717,
"line_max": 73,
"alpha_frac": 0.5806451613,
"autogenerated": false,
"ratio": 3.515054452274183,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45956996135741834,
"avg_score": null,
"num_lines": null
} |
# Alamo Alerts Module
import shutil
import requests
import json
import os
import arrow
import tweepy
# define global vars (local time object)
utcTime = arrow.utcnow()
localTime = utcTime.to('US/Central')
# define twitterbot with auth keys
twitterAuth = tweepy.OAuthHandler("MzMGLR7WDxAVnFm1mfCwqqZ5c", "L7zzwuL7RWXL7FgXGOUlWDj83DdUGIl2JyM2zH4T3g19o3MnRa")
twitterAuth.set_access_token("856950304725770240-AaDXJODlclQPVlGAhj5jo6PDS2jZSth", "KqV9NwRuRiN8iZ0Ha8LJ513QnUXaJnsbgpUFZ6XA8rMCK")
twitterApi = tweepy.API(twitterAuth)
# make a copy of the original Storage JSON file first
shutil.copy2("/home/altonchaney/webapps/htdocs/alamoAlert/alamoDataStorage.json", "/home/altonchaney/webapps/htdocs/alamoAlert/alamoDataStorage-"+ localTime.format('YYYYMMDD') +".json")
# GET Alamo API
alamoFeed = requests.get('http://feeds.drafthouse.com/adcService/showtimes.svc/market/0000/')
alamoFeedJson = alamoFeed.json()
# get the copied Storing JSON file
with open("/home/altonchaney/webapps/htdocs/alamoAlert/alamoDataStorage-"+ localTime.format('YYYYMMDD') +".json") as storageDataJson:
storageData = json.load(storageDataJson)
# print("-------------------------------")
# print(storageData)
# print("-------------------------------")
# create a new writeable Storing JSON file which we will use to write new objects to and replace our old Storing JSON file if changes are made
with open("/home/altonchaney/webapps/htdocs/alamoAlert/alamoDataStorage.json", "w") as storageDataJsonNew:
# json.dump(storageData, storageDataJsonNew)
# set up a flag for each cinema we look at (this way we don't get tons of duplicates per date)
cinemaTweetFlag0003 = 0
# for each theater we look at, a tweet status string will be created
twitterStatus0003 = ""
# print('New Movies On Sale:')
# for each 'Dates', do the following
for date in alamoFeedJson["Market"]["Dates"]:
# save the date for looking up additional times
currentDate = date["DateId"]
# filter data down to specific 'Cinemas'
# look for 'CinemaId' "0003" or 'CinemaSlug' "village"
for cinema in date["Cinemas"]:
# temporary condition so we look a the Village location specifically
if cinema["CinemaId"] == "0003":
# print(cinema["CinemaName"] +" showings found for " + date["Date"] + ":")
for storageDataCinema in storageData["Cinemas"]:
if storageDataCinema["CinemaId"] == cinema["CinemaId"]:
currentStorageCinema = storageDataCinema
# print(currentStorageCinema)
else:
continue
# if new 'Films' exist (perform a search against the Storing JSON)
for film in cinema["Films"]:
# set up an 'undefined' object that gets over-written when we find non-matches in the Storing JSON
currentStorageFilm = {
"FilmSlug": "undefined"
}
# look through our current cinema storage and replace the undefined object if a match is found
for storageFilm in currentStorageCinema["Films"]:
if storageFilm["FilmSlug"] == film["FilmSlug"]:
currentStorageFilm = storageFilm
# if no object replacements were done in the previous loop, we'll then check if we should add an additional time flag
if currentStorageFilm["FilmSlug"] != "undefined":
# ----------------------------------- #
# Additional Time Functionality Start #
# ----------------------------------- #
# for each 'Series' within 'Films'
for series in film["Series"]:
# for each 'Formats' within 'Series'
for seriesformat in series["Formats"]:
# for each 'Sessions' within 'Formats'
for session in seriesformat["Sessions"]:
if currentStorageFilm["FilmOnSaleAddl"] == "false":
# set up dates from the current lookup date as well as the Storage JSON 'FilmOnSaleDate'
premiereDateFormatted = arrow.get(currentStorageFilm["FilmOnSaleDate"], 'YYYYMMDD')
currentDateFormatted = arrow.get(currentDate, 'YYYYMMDD')
dateDiff = 0
newTimeFlag = 0
# generate a simple number for day range
for d in arrow.Arrow.range('day', premiereDateFormatted, currentDateFormatted):
dateDiff += 1
# we'll also append any additional times in here if needed, but only if the showing is < 4 days from opening
if (session["SessionTime"] not in currentStorageFilm["FilmShowtimes"]) and dateDiff < 4:
currentStorageFilm["FilmShowtimes"] += session["SessionTime"] + " "
newTimeFlag = 1
# if the additional show flag for this movie is not flipped AND the new date is within 3 days of the opening date AND the movies name is not already contained in the tweet text (for brand new movies)
if dateDiff > 1 and dateDiff < 4 and (film["FilmName"] not in twitterStatus0003) and newTimeFlag == 1:
currentStorageFilm["FilmOnSaleAddl"] = "true"
# here we'll also add a line item to the tweet for the 'new' film on sale
if cinemaTweetFlag0003 == 0:
twitterStatus0003 += "Now On Sale at "+ cinema["CinemaName"] +":"
cinemaTweetFlag0003 = 1
twitterStatus0003 += "\n- " + film["FilmName"] + " [Add'l Times]"
continue
else:
continue
# note: this may be 'continue' overkill but for now it helps run through the script much faster (too many nested for loops :/ )
continue
continue
# ----------------------------------- #
# Additional Time Functionality End #
# ----------------------------------- #
continue
else:
# print "- "+ film["FilmName"]
# create a temporary film object we will store in our db if needed
newFilmObj = {
"FilmId": film["FilmId"],
"FilmName": film["FilmName"],
"FilmSlug": film["FilmSlug"],
"FilmOnSale": "false",
"FilmOnSaleAddl": "false",
"FilmOnSaleDate": date["DateId"],
"FilmShowtimes": ""
}
# for each 'Series'
for series in film["Series"]:
# for each 'Formats'
for seriesformat in series["Formats"]:
# for each 'Sessions'
for session in seriesformat["Sessions"]:
if session["SessionStatus"] == "onsale":
newFilmObj["FilmOnSale"] = "true"
newFilmObj["FilmShowtimes"] += session["SessionTime"] + " "
# before we append the new object lets first check again our current list of objs
filmExistsFlag = 0
for storageFilm in currentStorageCinema["Films"]:
if newFilmObj["FilmSlug"] == storageFilm["FilmSlug"]:
filmExistsFlag = 1
continue
# write newFilmObj into object store
if filmExistsFlag == 0:
currentStorageCinema["Films"].append(newFilmObj)
# here we'll also add a line item to the tweet for the 'new' film on sale
if cinemaTweetFlag0003 == 0:
twitterStatus0003 += "Now On Sale at "+ cinema["CinemaName"] +":"
cinemaTweetFlag0003 = 1
twitterStatus0003 += "\n- " + film["FilmName"]
continue
else:
continue
# note: this may be 'continue' overkill but for now it helps run through the script much faster (too many nested for loops :/ )
continue
continue
continue
continue
# - if alert exists for this cinema, send twitter alert
# - finally delete Alert Text object
# TWITTER STATUS TEXT EXAMPLE:
# "Multi-Line List:\n- Line Item 1\n- Line Item 2\n- Line Item 3\n- Line Item 4\n- Line Item 5\n- Line Item 6\n- Line Item 7\n- Line Item 8"
if twitterStatus0003 != "":
print(twitterStatus0003)
twitterApi.update_status(twitterStatus0003)
else:
print("No new movies :(")
# - if any changes were made to it, PUT Storing API
# then replace the original file with the new data
storageData["Cinemas"] = []
storageData["Cinemas"].append(currentStorageCinema)
json.dump(storageData, storageDataJsonNew)
# and replace the the old file with the new file
os.renames("/home/altonchaney/webapps/htdocs/alamoAlert/alamoDataStorage-"+ localTime.format('YYYYMMDD') +".json", "/home/altonchaney/webapps/htdocs/alamoAlert/z_archive/alamoDataStorage-"+ localTime.format('YYYYMMDD') +".json") | {
"repo_name": "altonchaney/alamoAlerts",
"path": "backend/alamoAlerts.py",
"copies": "1",
"size": "11377",
"license": "mit",
"hash": 8658678046511747000,
"line_mean": 60.5027027027,
"line_max": 243,
"alpha_frac": 0.4745539246,
"autogenerated": false,
"ratio": 4.95945945945946,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0034864269636461125,
"num_lines": 185
} |
""" A lan connect class using udp
"""
__author__ = "Oliver Lindemann <oliver@expyriment.org>"
__version__ = "0.5"
import atexit
import os
import socket
from multiprocessing import Process, Event, Queue
import logging
from .types import UDPData
from .polling_time_profile import PollingTimeProfile
from .process_priority_manager import get_priority
from .timer import Timer, app_timer, get_time_ms
def get_lan_ip():
if os.name != "nt":
# linux
from subprocess import check_output
rtn = check_output("hostname -I".split(" "))
rtn = rtn.decode().split(" ")
return rtn[0].strip()
else:
# windows
# code bas on http://stackoverflow.com/questions/11735821/python-get-localhost-ip
return socket.gethostbyname(socket.gethostname())
class UDPConnection(object):
# DOC document the usage "connecting" "unconnecting"
COMMAND_CHAR = b"$"
CONNECT = COMMAND_CHAR + b"connect"
UNCONNECT = COMMAND_CHAR + b"unconnect"
COMMAND_REPLY = COMMAND_CHAR + b"ok"
PING = COMMAND_CHAR + b"ping"
MY_IP = get_lan_ip()
def __init__(self, udp_port=5005):
self.udp_port = udp_port
self._socket = socket.socket(socket.AF_INET, # Internet
socket.SOCK_DGRAM) # UDP
self._socket.bind((UDPConnection.MY_IP, self.udp_port))
self._socket.setblocking(False)
self.peer_ip = None
self.timer = Timer(sync_timer=app_timer) # own timer, because often
# used in own process
@property
def my_ip(self):
return UDPConnection.MY_IP
def __str__(self):
return "ip: {0} (port: {1}); peer: {2}".format(UDPConnection.MY_IP,
self.udp_port, self.peer_ip)
def receive(self, timeout):
"""checks for received data and returns it
In contrast to poll the function keep polling until timeout if no new
data are available.
timeout in seconds
"""
t = get_time_ms()
timeout_ms = int(timeout*1000)
while True:
rtn = self.poll()
if rtn is not None:
#print("UDP receive: {0}".format(rtn))
return rtn
if (get_time_ms() - t) > timeout_ms:
return None
def poll(self):
"""returns data (bytes) or None if no data found
process also commands
if send is unkown input is ignored
"""
try:
data, sender = self._socket.recvfrom(1024)
except:
return None
# process data
if data == UDPConnection.CONNECT:
#connection request
self.peer_ip = sender[0]
if not self.send(UDPConnection.COMMAND_REPLY):
self.peer_ip = None
elif sender[0] != self.peer_ip:
return None # ignore data
elif data == UDPConnection.PING:
self.send(UDPConnection.COMMAND_REPLY)
elif data == self.UNCONNECT:
self.unconnect_peer()
return data
def send(self, data, timeout=1.0):
"""returns if problems or not
timeout in seconds (default = 1.0)
return False if failed to send
"""
timeout_ms = int(timeout*1000)
if self.peer_ip is None:
return False
start = get_time_ms()
if isinstance(data, str):
data = data.encode() # force to byte
while get_time_ms() - start < timeout_ms:
try:
self._socket.sendto(data, (self.peer_ip, self.udp_port))
#print("UDP send: {0}".format(data))
return True
except:
pass
return False
def connect_peer(self, peer_ip, timeout=1.0):
self.unconnect_peer()
self.peer_ip = peer_ip
if self.send(UDPConnection.CONNECT, timeout=timeout) and \
self.wait_input(UDPConnection.COMMAND_REPLY, duration=timeout):
return True
self.peer_ip = None
return False
def wait_input(self, input_string, duration=1.0):
"""poll the connection and waits for a specific input"""
start = get_time_ms()
duration_ms = int(duration*1000)
while get_time_ms() - start < duration_ms:
in_ = self.poll()
if in_ == input_string:
return True
return False
def unconnect_peer(self, timeout=1.0):
self.send(UDPConnection.UNCONNECT, timeout=timeout)
self.peer_ip = None
@property
def is_connected(self):
return self.peer_ip is not None
def ping(self, timeout=0.5):
"""returns boolean if succeeded and ping time in ms"""
if self.peer_ip == None:
return False, None
start = get_time_ms()
if self.send(UDPConnection.PING, timeout=timeout) and \
self.wait_input(UDPConnection.COMMAND_REPLY, duration=timeout):
return True, get_time_ms() - start
return False, None
def clear_receive_buffer(self):
data = ""
while data is not None:
data = self.poll()
def poll_last_data(self):
"""polls all data and returns only the last one
return None if not data found"""
rtn = None
tmp = self.poll()
while tmp is not None:
rtn = tmp
tmp = self.poll()
return rtn
class UDPConnectionProcess(Process):
"""UDPConnectionProcess polls and writes to a data queue.
Example::
# Server that prints each input and echos it to the client
# that is currently connected
from udp_connection import UDPConnectionProcess, Queue
receive_queue = Queue()
udp_p = UDPConnectionProcess(receive_queue=receive_queue)
udp_p.start()
udp_p.event_polling.set() # start polling
while True:
data = receive_queue.get()
print(data)
if data is not None:
udp_p.send_queue.put(data.string)
Example::
# connecting to a server
""" # DOC
def __init__(self, event_trigger = (),
event_ignore_tag = None):
"""Initialize UDPConnectionProcess
Parameters
----------
receive_queue: multiprocessing.Queue
the queue to which the received data should be put
peer_ip : string
the IP of the peer to which the connection should be established
sync_clock : Clock
the internal clock for timestamps will synchronized with this clock
event_trigger: multiprocessing.Event() (or list of..)
event trigger(s) to be set. If Udp event is received and it is not a
command to set this event (typical of sensor recording processes).
event_ignore_tag:
udp data that start with this tag will be ignored for event triggering
""" # DOC
super(UDPConnectionProcess, self).__init__()
self.receive_queue = Queue()
self.send_queue = Queue()
self.event_is_connected = Event()
self._event_quit_request = Event()
self._event_is_polling = Event()
self._event_ignore_tag = event_ignore_tag
if isinstance(event_trigger, type(Event) ):
event_trigger = (event_trigger)
try:
self._event_trigger = tuple(event_trigger)
except:
self._event_trigger = ()
atexit.register(self.quit)
@property
def my_ip(self):
return UDPConnection.MY_IP
def quit(self):
self._event_quit_request.set()
if self.is_alive():
self.join()
def pause(self):
self._event_is_polling.clear()
def start_polling(self):
self._event_is_polling.set()
def run(self):
udp_connection = UDPConnection(udp_port=5005)
self.start_polling()
ptp = PollingTimeProfile()
prev_event_polling = None
while not self._event_quit_request.is_set():
if prev_event_polling != self._event_is_polling.is_set():
# event pooling changed
prev_event_polling = self._event_is_polling.is_set()
if prev_event_polling:
logging.warning("UDP start, pid {}, priority {}".format(
self.pid, get_priority(self.pid)))
else:
logging.warning("UDP stop")
ptp.stop()
if not self._event_is_polling.is_set():
self._event_is_polling.wait(timeout=0.1)
else:
data = udp_connection.poll()
t = udp_connection.timer.time
ptp.update(t)
if data is not None:
d = UDPData(string=data, time=t)
self.receive_queue.put(d)
if self._event_ignore_tag is not None and \
not d.startswith(self._event_ignore_tag):
for ev in self._event_trigger:
# set all connected trigger
ev.set()
try:
udp_connection.send(self.send_queue.get_nowait())
except:
pass
# has connection changed?
if self.event_is_connected.is_set() != udp_connection.is_connected:
if udp_connection.is_connected:
self.event_is_connected.set()
else:
self.event_is_connected.clear()
if not udp_connection.is_connected:
udp_connection.timer.wait(200)
udp_connection.unconnect_peer()
logging.warning("UDP quit, {}".format(ptp.get_profile_str()))
| {
"repo_name": "lindemann09/pyForceDAQ",
"path": "forceDAQ/_lib/udp_connection.py",
"copies": "1",
"size": "9900",
"license": "mit",
"hash": 927618546579611000,
"line_mean": 30.1320754717,
"line_max": 89,
"alpha_frac": 0.5522222222,
"autogenerated": false,
"ratio": 4.2002545608824775,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5252476783082477,
"avg_score": null,
"num_lines": null
} |
"""A land-ocean setup from `bib.anderson2009spatially`."""
import numpy as np
import dapper.mods as modelling
from dapper.mods.Lorenz96.sakov2008 import X0, Dyn, LPs, Nx, Tplot
from dapper.tools.localization import localization_setup, pairwise_distances
from dapper.tools.viz import xtrema
t = modelling.Chronology(0.05, dtObs=0.05, KObs=4000, Tplot=Tplot, BurnIn=2000*0.05)
# Define obs sites
obs_sites = 0.395 + 0.01*np.arange(1, 21)
obs_sites *= 40
# Surrounding inds
ii_below = obs_sites.astype(int)
ii_above = ii_below + 1
# Linear-interpolation weights
w_above = obs_sites - ii_below
w_below = 1 - w_above
# Define obs matrix
H = np.zeros((20, 40))
H[np.arange(20), ii_below] = w_below
H[np.arange(20), ii_above] = w_above
# Measure obs-state distances
y2x_dists = pairwise_distances(obs_sites[:, None], np.arange(Nx)[:, None], domain=(Nx,))
batches = np.arange(40)[:, None]
# Define operator
Obs = {
'M': len(H),
'model': lambda E, t: E @ H.T,
'linear': lambda E, t: H,
'noise': 1,
'localizer': localization_setup(lambda t: y2x_dists, batches),
}
HMM = modelling.HiddenMarkovModel(
Dyn, Obs, t, X0, LP=LPs(),
sectors={'land': np.arange(*xtrema(obs_sites)).astype(int)})
####################
# Suggested tuning
####################
# Reproduce Anderson Figure 2
# -----------------------------------------------------------------------------------
# xp = SL_EAKF(N=6, infl=sqrt(1.1), loc_rad=0.2/1.82*40)
# for lbl in ['err','std']:
# stat = getattr(xp.stats,lbl).f[HMM.t.maskObs_BI]
# plt.plot(sqrt(np.mean(stat**2, axis=0)),label=lbl)
#
# Note: for this xp, one must to be lucky with the random seed to avoid
# blow up in the ocean sector (which is not constrained by obs) due to infl.
# Instead, I recommend lowering dt (as in Miyoshi 2011) to stabilize integration.
| {
"repo_name": "nansencenter/DAPPER",
"path": "dapper/mods/Lorenz96/anderson2009.py",
"copies": "1",
"size": "1831",
"license": "mit",
"hash": 1263194428738432500,
"line_mean": 32.9074074074,
"line_max": 88,
"alpha_frac": 0.6340797378,
"autogenerated": false,
"ratio": 2.816923076923077,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3951002814723077,
"avg_score": null,
"num_lines": null
} |
# Alan Gustavo Valdez Cascajares A01336955
# Rafael Manriquez Valdez A01196010
import ply.yacc as yacc
import sys
sys.path.append("../../Lancer")
# Clases definidas por nosotros
from LancerLex import tokens
from Directories.FunctionDirectory import FunctionDirectory
from Directories.Stack import Stack
from Semantic_Cubes.SemanticCubeDict import SemanticCubeDict
from Quadruples.Quadruple import Quadruple
from VirtualMachine.LancerVM import LancerVM
from Memory.MainMemory import MainMemory
# Variables globales a utilizar
funcDir = FunctionDirectory()
currentScope = ""
globalScope = ""
OperandStack = []
OperatorStack = []
TypeStack = []
JumpStack = []
ReturnStack = []
semanticCube = SemanticCubeDict()
quadruples = []
tempCont = 0
quadCont = 1
FunctionToCall = ""
ArgumentNumber = 0
ArgumentStack = []
ArgumentTypeStack = []
VM = LancerVM()
dimensionVariableName = ""
dimension = {}
hasReturnStatement = False
drawParameters = []
drawColor = ""
debug_mode = 0;
trashValues = {'int': 1, 'float': 1.0, 'bool': True, 'string': 'Null'}
ERROR_CODES = {'func_already_declared': -5, 'variable_already_declared': -6, 'func_not_declared': -7,
'variable_not_declared': -8, 'type_mismatch': -9, 'syntax_error': -10, 'parameter_type_mismatch': -11,
'memory_error': 3, 'no_return_statement': 4, 'return_type_mismatch': 5}
# Reglas gramaticales expresadas en funciones
def p_expression_programa(p):
'prog : PROGRAMA ID create_func_dir SEMICOLON vars function MAIN switch_global_scope bloque'
print('Correct Syntax.')
def p_expression_switch_global_scope(p):
'switch_global_scope : '
global globalScope
global currentScope
currentScope = globalScope
funcDir.fillStartingQuad(globalScope, quadCont)
def p_expression_create_func_dir(p):
'create_func_dir : '
# Save current function name (scope)
global currentScope
global globalScope
globalScope = p[-1]
currentScope = p[-1]
# Create function directory variable
funcDir.addFunction(currentScope, 'void')
def p_expression_vars(p):
'''vars : VAR ID array_declaration COLON type add_var SEMICOLON vars
| empty'''
def p_expression_add_var(p):
'add_var : '
global currentScope
global dimensionVariableName
global dimension
varName = p[-4]
varType = p[-1]
if currentScope == globalScope:
if dimensionVariableName != "":
virtualAddress = VM.memory.addDimensionGlobalValue(dimension['superior'], trashValues[varType], varType)
else:
virtualAddress = VM.memory.addGlobalValue(trashValues[varType], varType)
else:
if dimensionVariableName != "":
virtualAddress = VM.memory.addDimenionTempValue(dimension['superior'], trashValues[varType], varType)
else:
virtualAddress = VM.memory.addTempValue(trashValues[varType], varType)
if virtualAddress == None:
print('ERROR: Impossible memory allocation, out of memory.')
sys.exit(ERROR_CODES['memory_error'])
if not funcDir.addFunctionVariable(currentScope, varName, varType, virtualAddress):
print('Error: Variable already declared.')
sys.exit(ERROR_CODES['variable_already_declared'])
if dimensionVariableName != "":
funcDir.addDimensionToVariable(currentScope, varName, dimension)
dimensionVariableName = ""
dimension = {}
def p_expression_array_declaration(p):
'''array_declaration : LARRAY identify_dimensional_var ss_expression create_dimension RARRAY
| empty'''
def p_expression_identify_dimensional_var(p):
'identify_dimensional_var : '
global dimensionVariableName
dimensionVariableName = p[-2]
def p_expression_create_dimension(p):
'create_dimension : '
global dimension
indexVirtualAddress = OperandStack.pop()
indexType = TypeStack.pop()
if indexType != 'int':
print('ERROR: Expected int index type, found {0} in line {1}'.format(indexType, p.lexer.lineno))
sys.exit(ERROR_CODES['parameter_type_mismatch'])
else:
dimensionSize = VM.memory.getValueFromVirtualAddress(indexVirtualAddress)
dimension = {'inferior': 0, 'superior': dimensionSize - 1}
def p_expression_type(p):
'''type : INT_TYPE
| FLOAT_TYPE
| STRING_TYPE
| BOOL_TYPE'''
p[0] = p[1]
def p_expression_array(p):
'''array : LARRAY identify_dimension ss_expression validate_array_bounds_quad RARRAY
| empty'''
def p_expression_function(p):
'''function : FUNC func_type ID add_to_func_dir LPAREN parameters RPAREN vars starting_quad bloque end_proc function
| empty'''
def p_expression_end_proc(p):
'end_proc : '
global quadCont
global ReturnStack
functionType = funcDir.getFunctionType(currentScope)
if functionType != 'void':
if not hasReturnStatement:
print('Error: Function {0} of type {1} has no return statement.'.format(currentScope, functionType))
sys.exit(ERROR_CODES['no_return_statement'])
else:
quad = Quadruple(quadCont, 'ENDPROC', None, None, None)
quadruples.append(quad)
for i in ReturnStack:
quadruples[i - 1].fillJumpQuad(quadCont)
ReturnStack = []
quadCont += 1
else:
quad = Quadruple(quadCont, 'ENDPROC', None, None, None)
quadruples.append(quad)
ReturnStack = []
quadCont += 1
def p_expression_starting_quad(p):
'starting_quad : '
funcDir.fillStartingQuad(currentScope, quadCont)
def p_expression_func_type(p):
'''func_type : VOID
| type'''
p[0] = p[1]
def p_expression_add_to_func_dir(p):
'add_to_func_dir : '
global currentScope
funcName = p[-1]
funcType = p[-2]
if not funcType == 'void':
virtualAddress = VM.memory.addGlobalValue(trashValues[funcType], funcType)
funcDir.addFunctionVariable(globalScope, funcName, funcType, virtualAddress)
if not funcDir.functionExists(funcName):
currentScope = funcName
funcDir.addFunction(funcName, funcType)
else:
print('Error: Function already declared in line {0}.'.format(p.lexer.lineno))
sys.exit(ERROR_CODES['func_already_declared'])
def p_expression_parameters(p):
'''parameters : type ID add_params array more_params
| empty'''
def p_expression_more_params(p):
'''more_params : COMA type ID add_params more_params
| empty'''
def p_expression_add_params(p):
'add_params : '
global currentScope
paramName = p[-1]
paramType = p[-2]
if currentScope == globalScope:
virtualAddress = VM.memory.addGlobalValue(trashValues[paramType], paramType)
else:
virtualAddress = VM.memory.addTempValue(trashValues[paramType], paramType)
if funcDir.addFunctionVariable(currentScope, paramName, paramType, virtualAddress):
funcDir.addParameterTypes(currentScope, [paramType])
funcDir.addParameterAddress(currentScope, [virtualAddress])
def p_expression_bloque(p):
'bloque : LBRACKET est RBRACKET'
def p_expression_est(p):
'''est : estatuto est
| empty'''
def p_expression_estatuto(p):
'''estatuto : asignacion
| escritura
| condicion
| lectura
| ciclo
| voidfunction
| functioncall
| predefined
| return'''
def p_expression_return(p):
'return : RETURN ss_expression SEMICOLON'
global hasReturnStatement
global quadCont
hasReturnStatement = True
leftOperand = OperandStack.pop()
leftOperandType = TypeStack.pop()
functionType = funcDir.getFunctionType(currentScope)
functionVariable = funcDir.getVariable(globalScope, currentScope)
functionProperties = functionVariable[1][1]
if leftOperandType != functionType:
print("Return statement is trying to return {0} and function return type is {1}.".format(leftOperandType, functionType))
sys.exit()
quad = Quadruple(quadCont, 'RETURN', leftOperand, None, functionProperties)
quadruples.append(quad)
quadCont += 1
quad = Quadruple(quadCont, 'GoTo', None, None, None)
quadruples.append(quad)
ReturnStack.append(quadCont)
quadCont += 1
def p_expression_predefined(p):
'''predefined : drawcircle
| drawrectangle
| drawtriangle
| drawline
| drawoval'''
def p_expression_color(p):
'''color : RED
| GREEN
| BLUE
| YELLOW
| BROWN
| BLACK'''
p[0] = p[1]
def p_expression_drawline(p):
'drawline : DRAWLINE LPAREN ss_expression collect_draw_argument COMA ss_expression collect_draw_argument COMA ss_expression collect_draw_argument COMA ss_expression collect_draw_argument COMA color collect_draw_color RPAREN SEMICOLON'
global quadCont
global drawColor
global drawParameters
quad = Quadruple(quadCont, 'DRAWLINE', drawParameters, drawColor, None)
quadruples.append(quad)
quadCont += 1
drawColor = ""
drawParameters = []
def p_expression_collect_draw_argument(p):
'collect_draw_argument : '
operand = OperandStack.pop()
operandType = TypeStack.pop()
if operandType != 'int' and operandType != 'float':
print('Error: Argument type mismatch. Expected float or int, got {0} in line {1}'.format(operandType, p.lexer.lineno))
sys.exit(ERROR_CODES['parameter_type_mismatch'])
else:
drawParameters.append(operand)
def p_expression_collect_draw_color(p):
'collect_draw_color : '
global drawColor
drawColor = p[-1]
def p_expression_drawrectangle(p):
'''drawrectangle : DRAWRECTANGLE LPAREN ss_expression collect_draw_argument COMA ss_expression collect_draw_argument COMA ss_expression collect_draw_argument COMA ss_expression collect_draw_argument COMA color collect_draw_color RPAREN SEMICOLON'''
global quadCont
global drawColor
global drawParameters
quad = Quadruple(quadCont, 'DRAWRECTANGLE', drawParameters, drawColor, None)
quadruples.append(quad)
quadCont += 1
drawColor = ""
drawParameters = []
def p_expression_drawcircle(p):
'drawcircle : DRAWCIRCLE LPAREN ss_expression collect_draw_argument COMA ss_expression collect_draw_argument COMA ss_expression collect_draw_argument COMA color collect_draw_color RPAREN SEMICOLON'
global quadCont
global drawColor
global drawParameters
quad = Quadruple(quadCont, 'DRAWCIRCLE', drawParameters, drawColor, None)
quadruples.append(quad)
quadCont += 1
drawColor = ""
drawParameters = []
def p_expression_drawtriangle(p):
'drawtriangle : DRAWTRIANGLE LPAREN ss_expression collect_draw_argument COMA ss_expression collect_draw_argument COMA ss_expression collect_draw_argument COMA ss_expression collect_draw_argument COMA ss_expression collect_draw_argument COMA ss_expression collect_draw_argument COMA color collect_draw_color RPAREN SEMICOLON'
global quadCont
global drawColor
global drawParameters
quad = Quadruple(quadCont, 'DRAWTRIANGLE', drawParameters, drawColor, None)
quadruples.append(quad)
quadCont += 1
drawColor = ""
drawParameters = []
def p_expression_drawoval(p):
'drawoval : DRAWOVAL LPAREN ss_expression collect_draw_argument COMA ss_expression collect_draw_argument COMA ss_expression collect_draw_argument COMA ss_expression collect_draw_argument COMA color collect_draw_color RPAREN SEMICOLON'
global quadCont
global drawColor
global drawParameters
quad = Quadruple(quadCont, 'DRAWOVAL', drawParameters, drawColor, None)
quadruples.append(quad)
quadCont += 1
drawColor = ""
drawParameters = []
def p_expression_voidfunction(p):
'''voidfunction : ID validate_function_id LPAREN generate_era call_params RPAREN SEMICOLON argument_validation'''
def p_expression_functioncall(p):
'''functioncall : ID validate_function_id LPAREN generate_era call_params RPAREN argument_validation'''
def p_expression_argument_validation(p):
'argument_validation : '
global quadCont
global ArgumentNumber
global ArgumentStack
global ArgumentTypeStack
global FunctionToCall
if FunctionToCall == "":
virtualAddress = OperandStack.pop()
FunctionToCall = funcDir.getFunctionIdByAddress(globalScope, virtualAddress)
functionParameterAddresses = funcDir.getParameterAddresses(FunctionToCall)
if funcDir.validateParameters(FunctionToCall, ArgumentTypeStack):
for argument in ArgumentStack:
quad = Quadruple(quadCont, 'Parameter', argument, None, functionParameterAddresses[ArgumentNumber])
quadruples.append(quad)
quadCont += 1
ArgumentNumber += 1
quad = Quadruple(quadCont, 'GoSub', FunctionToCall, None, funcDir.getFunctionStartingQuad(FunctionToCall))
quadruples.append(quad)
quadCont += 1
functionType = funcDir.getFunctionType(FunctionToCall)
if functionType != 'void':
funcDir.addTempVariable(globalScope, functionType)
virtualAddress = VM.memory.addTempValue(trashValues[functionType], functionType)
functionVariable = funcDir.getVariable(globalScope, FunctionToCall)
quad = Quadruple(quadCont, '=', functionVariable[1][1], None, virtualAddress)
quadruples.append(quad)
quadCont += 1
OperandStack.append(virtualAddress)
ArgumentStack = []
ArgumentTypeStack = []
FunctionToCall = ""
ArgumentNumber = 0
else:
print('Error: argument type mismatch in line {0} using function {1}.'.format(p.lexer.lineno, FunctionToCall))
sys.exit(ERROR_CODES['parameter_type_mismatch'])
def p_expression_generate_era(p):
'generate_era : '
global quadCont
function = p[-3]
quad = Quadruple(quadCont, 'ERA', function, None, None)
quadruples. append(quad)
quadCont += 1
def p_expression_validate_function_id(p):
'validate_function_id : '
global FunctionToCall
FunctionToCall = p[-1]
if not funcDir.functionExists(FunctionToCall):
print('Error: function {0} not declared in line {1}'.format(FunctionToCall, p.lexer.lineno))
sys.exit(ERROR_CODES['func_not_declared'])
def p_expression_call_params(p):
'''call_params : ss_expression function_argument_collection more_call_params
| empty'''
def p_expression_more_call_params(p):
'''more_call_params : COMA ss_expression function_argument_collection more_call_params
| empty'''
def p_expression_function_argument_collection(p):
'function_argument_collection : '
functionArgumentCollection(p)
def p_expression_ciclo(p):
'''ciclo : WHILE create_while_quad LPAREN ss_expression RPAREN while_expression_evaluation bloque while_end'''
def p_expression_create_while_quad(p):
'create_while_quad : '
whileConditionQuads(p)
def p_expression_while_expression_evaluation(p):
'while_expression_evaluation : '
whileEvaluationQuad(p)
def p_expression_while_end(p):
'while_end : '
whileEndQuad(p)
def p_expression_lectura(p):
'lectura : ID push_id_operand array ASSIGN push_operator INPUT SEMICOLON'
inputAssignment(p)
def p_expression_asignacion(p):
'asignacion : ID push_id_operand array ASSIGN push_operator ss_expression SEMICOLON'
assignQuad(p)
def p_expression_ss_expression(p):
'''ss_expression : ss_not s_expression solve_not_operation'''
def p_expression_ss_not(p):
'''ss_not : NOT push_operator
| empty'''
def p_expression_solve_not_operation(p):
'solve_not_operation : '
if len(OperatorStack) > 0:
if OperatorStack[len(OperatorStack) - 1] == '!!':
solveNotOperation(p)
def p_expression_s_expression(p):
's_expression : expresion s_and_or'
def p_expression_s_and_or(p):
'''s_and_or : AND push_operator s_expression solve_pending_andor
| OR push_operator s_expression solve_pending_andor
| empty'''
def p_expression_solve_pending_andor(p):
'solve_pending_andor : '
andor = {'&&', '||'}
if OperatorStack[len(OperatorStack) - 1] in andor:
solvePendingOperations(p)
def p_expression_expresion(p):
'expresion : expr exp'
def p_expression_expr(p):
'expr : termino term'
def p_expression_exp(p):
'''exp : empty
| GT push_operator expr solve_pending_relational
| LT push_operator expr solve_pending_relational
| GE push_operator expr solve_pending_relational
| LE push_operator expr solve_pending_relational
| EQUAL push_operator expr solve_pending_relational
| DIFFERENT push_operator expr solve_pending_relational'''
def p_expression_solve_pending_relational(p):
'solve_pending_relational : '
relOps = {'<', '>', '==', '>=', '<=', '<>'}
if OperatorStack[len(OperatorStack) - 1] in relOps:
solvePendingOperations(p)
def p_exppression_termino(p):
'termino : factor fact'
def p_expression_term(p):
'''term : PLUS push_operator termino solve_pending_term term
| MINUS push_operator termino solve_pending_term term
| empty'''
def p_expression_solve_pending_term(p):
'solve_pending_term : '
if OperatorStack[len(OperatorStack) - 1] == '+' or OperatorStack[len(OperatorStack) - 1] == '-':
solvePendingOperations(p)
def p_expression_factor(p):
'''factor : LPAREN create_false_bottom ss_expression RPAREN delete_false_bottom
| constante'''
#
# def p_expression_solve_negatives(p):
# 'solve_negatives : '
#
# global quadCont
#
# if len(OperatorStack) > 0:
# if OperatorStack[len(OperatorStack) - 1] == '-' and len(OperandStack) > 2:
# rightOperand = OperandStack.pop()
# rightType = TypeStack.pop()
# operator = OperatorStack.pop()
#
# if rightType == 'int' or rightType == 'float':
#
# quad = Quadruple(quadCont, 'negative', 0, rightOperand, rightOperand)
# quadruples.append(quad)
#
# quadCont += 1
#
# OperandStack.append(rightOperand)
# TypeStack.append(rightType)
def p_expression_create_false_bottom(p):
'create_false_bottom : '
OperatorStack.append('(')
def p_expression_delete_false_bottom(p):
'delete_false_bottom : '
OperatorStack.pop()
def p_expression_fact(p):
'''fact : TIMES push_operator factor solve_pending_factor fact
| DIVISION push_operator factor solve_pending_factor fact
| empty'''
def p_expression_solve_pending_factor(p):
'solve_pending_factor : '
if OperatorStack[len(OperatorStack) - 1] == '*' or OperatorStack[len(OperatorStack) - 1] == '/':
solvePendingOperations(p)
# def p_expression_signo(p):
# '''signo : MINUS push_operator
# | empty'''
def p_expression_constante(p):
'''constante : ID push_id_operand id_func_array
| CTEI push_int_operand
| CTEF push_float_operand
| CTES push_string_operand
| cteb push_bool_operand'''
def p_expression_cteb(p):
'''cteb : TRUE
| FALSE'''
p[0] = p[1]
def p_expression_id_func_array(p):
'''id_func_array : LARRAY identify_dimension ss_expression validate_array_bounds_quad RARRAY
| LPAREN generate_era call_params RPAREN argument_validation
| empty'''
def p_expression_identify_dimension(p):
'identify_dimension : '
global dimension
varName = p[-3]
dimension = funcDir.getDimensions(currentScope, varName)
if dimension is None:
dimension = funcDir.getDimensions(globalScope, varName)
if dimension is None:
print('Error: variable {0} is not an indexed variable in line {1}'.format(p[-1], p.lexer.lineno))
sys.exit(ERROR_CODES['variable_not_declared'])
else:
dimension = dimension['dimensions']
else:
dimension = dimension['dimensions']
#print(dimension)
def p_expression_validate_array_bounds_quad(p):
'validate_array_bounds_quad : '
global quadCont
indexVirtualAddress = OperandStack.pop()
indexType = TypeStack.pop()
if indexType != 'int':
print('ERROR: Expected int index type, found {0} in line {1}'.format(indexType, p.lexer.lineno))
sys.exit(ERROR_CODES['parameter_type_mismatch'])
else:
quad = Quadruple(quadCont, 'validate', indexVirtualAddress, dimension[1]['inferior'], dimension[1]['superior'])
quadruples.append(quad)
quadCont += 1
arrayBaseAddress = OperandStack.pop()
arrayType = TypeStack.pop()
operationalBaseAddress = VM.memory.addTempValue(arrayBaseAddress, 'int')
tempVirtualAddress = VM.memory.addTempValue(arrayBaseAddress, arrayType)
quad = Quadruple(quadCont, '+', operationalBaseAddress, indexVirtualAddress, tempVirtualAddress)
quadruples.append(quad)
quadCont += 1
OperandStack.append([tempVirtualAddress])
TypeStack.append(arrayType)
def p_expression_push_id_operand(p):
'''push_id_operand : '''
global currentScope
variable = funcDir.getVariable(currentScope, p[-1])
if variable is None:
variable = funcDir.getVariable(globalScope, p[-1])
if variable is None:
print('Error: variable {0} not declared in line {1}'.format(p[-1], p.lexer.lineno))
sys.exit(ERROR_CODES['variable_not_declared'])
else:
variableInfo = variable[1]
OperandStack.append(variableInfo[1])
TypeStack.append(variableInfo[0])
else:
variableInfo = variable[1]
OperandStack.append(variableInfo[1])
TypeStack.append(variableInfo[0])
def p_expression_push_int_operand(p):
'''push_int_operand : '''
global OperandStack
global OperatorStack
virtualAddress = VM.memory.addConstantValue(int(p[-1]), 'int')
OperandStack.append(virtualAddress)
TypeStack.append('int')
def p_expression_push_float_operand(p):
'''push_float_operand : '''
global OperandStack
global OperatorStack
virtualAddress = VM.memory.addConstantValue(float(p[-1]), 'float')
OperandStack.append(virtualAddress)
TypeStack.append('float')
def p_expression_push_string_operand(p):
'''push_string_operand : '''
global OperandStack
global OperatorStack
oldString = p[-1]
newString = oldString[1:-1]
virtualAddress = VM.memory.addConstantValue(newString, 'string')
OperandStack.append(virtualAddress)
TypeStack.append('string')
def p_expression_push_bool_operand(p):
'push_bool_operand : '
global OperandStack
global OperatorStack
if p[-1] == "true":
virtualAddress = VM.memory.addConstantValue(True, 'bool')
else:
virtualAddress = VM.memory.addConstantValue(False, 'bool')
OperandStack.append(virtualAddress)
TypeStack.append('bool')
def p_expression_push_operator(p):
'push_operator : '
OperatorStack.append(p[-1])
def p_expression_escritura(p):
'escritura : PRINT LPAREN ss_expression RPAREN SEMICOLON'
global quadCont
printParameter = OperandStack.pop()
printParameterType = TypeStack.pop()
quad = Quadruple(quadCont, 'print', printParameter, None, None)
quadruples.append(quad)
quadCont += 1
def p_expression_condicion(p):
'condicion : IF LPAREN ss_expression RPAREN create_condition_quad bloque else'
endConditionQuads(p)
def p_expression_create_condition_quad(p):
'create_condition_quad : '
conditionQuads(p)
def p_expression_else(p):
'''else : ELSE else_condition_quad bloque
| empty'''
def p_expression_else_condition_quad(p):
'else_condition_quad : '
elseConditionQuad(p)
# Error de sintaxis
def p_error(p):
print('Syntax error in input in line {0}'.format(p.lexer.lineno))
sys.exit(ERROR_CODES['syntax_error'])
# Definicion de espacio vacio
def p_empty(p):
'empty :'
pass
def solvePendingOperations(p):
right_operand = OperandStack.pop()
right_type = TypeStack.pop()
left_operand = OperandStack.pop()
left_type = TypeStack.pop()
operator = OperatorStack.pop()
tempVarString = "t"
global semanticCube
global tempCont
global quadCont
semanticResult = semanticCube.getSemanticType(left_type, right_type, operator)
if semanticResult != 'error':
virtualTempAddress = VM.memory.addTempValue(trashValues[semanticResult], semanticResult)
funcDir.addTempVariable(currentScope, semanticResult)
# tempVarString = tempVarString + str(tempCont)
quad = Quadruple(quadCont, operator, left_operand, right_operand, virtualTempAddress)
quadruples.append(quad)
quadCont += 1
# tempCont += 1
OperandStack.append(virtualTempAddress)
TypeStack.append(semanticResult)
else:
print('ERROR: operation type mismatch in line {0}'.format(p.lexer.lineno))
sys.exit(ERROR_CODES['type_mismatch'])
def assignQuad(p):
right_operand = OperandStack.pop()
right_type = TypeStack.pop()
left_operand = OperandStack.pop()
left_type = TypeStack.pop()
operator = OperatorStack.pop()
global semanticCube
global tempCont
global quadCont
semanticResult = semanticCube.getSemanticType(left_type, right_type, operator)
#print("{0} {1} {2} = {3}".format(left_type, operator, right_type, semanticResult))
if semanticResult != 'error':
quad = Quadruple(quadCont, operator, right_operand, None, left_operand)
quadruples.append(quad)
quadCont += 1
else:
print('ERROR: assignment type mismatch in line {0}'.format(p.lexer.lineno))
sys.exit(ERROR_CODES['type_mismatch'])
def solveNotOperation(p):
right_operand = OperandStack.pop()
right_type = TypeStack.pop()
operator = OperatorStack.pop()
global semanticCube
global tempCont
global quadCont
if right_type == 'bool':
semanticResult = 'bool'
else:
semanticResult = 'error'
if semanticResult != 'error':
virtualTempAddress = VM.memory.addTempValue(trashValues[semanticResult], semanticResult)
quad = Quadruple(quadCont, operator, right_operand, None, virtualTempAddress)
quadruples.append(quad)
OperandStack.append(virtualTempAddress)
TypeStack.append(semanticResult)
quadCont += 1
else:
print('ERROR: assignment type mismatch in line {0}'.format(p.lexer.lineno))
sys.exit(ERROR_CODES['type_mismatch'])
def inputAssignment(p):
global tempCont
global quadCont
right_operand = OperandStack.pop()
right_type = TypeStack.pop()
operator = OperatorStack.pop()
inputQuad = Quadruple(quadCont, 'input', right_type, None, right_operand)
quadruples.append(inputQuad)
quadCont += 1
def conditionQuads(p):
expressionType = TypeStack.pop()
if expressionType != 'bool':
print('ERROR: operation type mismatch in line {0}'.format(p.lexer.lineno))
sys.exit(ERROR_CODES['type_mismatch'])
else:
global quadCont
expressionResult = OperandStack.pop()
quad = Quadruple(quadCont, 'GoToF', expressionResult, None, None)
quadruples.append(quad)
JumpStack.append(quadCont - 1)
quadCont += 1
def elseConditionQuad(p):
global quadCont
quad = Quadruple(quadCont, 'GoTo', None, None, None)
quadruples.append(quad)
false = JumpStack.pop()
JumpStack.append(quadCont - 1)
quadCont += 1
quad = quadruples[false]
quad.fillJumpQuad(quadCont)
def endConditionQuads(p):
end = JumpStack.pop()
quad = quadruples[end]
quad.fillJumpQuad(quadCont)
def whileConditionQuads(p):
JumpStack.append(quadCont)
def whileEvaluationQuad(p):
expressionType = TypeStack.pop()
if expressionType != 'bool':
print('ERROR: operation type mismatch in line {0}'.format(p.lexer.lineno))
sys.exit(ERROR_CODES['type_mismatch'])
else:
global quadCont
expressionResult = OperandStack.pop()
quad = Quadruple(quadCont, 'GoToF', expressionResult, None, None)
quadruples.append(quad)
JumpStack.append(quadCont - 1)
quadCont += 1
def whileEndQuad(p):
end = JumpStack.pop()
ret = JumpStack.pop()
global quadCont
endQuad = quadruples[end]
quad = Quadruple(quadCont, 'GoTo', None, None, ret)
quadCont += 1
quadruples.append(quad)
endQuad.fillJumpQuad(quadCont)
def functionArgumentCollection(p):
argument = OperandStack.pop()
argumentType = TypeStack.pop()
ArgumentStack.append(argument)
ArgumentTypeStack.append(argumentType)
def initParser():
# Construir el parser
parser = yacc.yacc(debug=1)
# Escribir el nombre o ruta del archivo a leer
print("Nombre o ruta del archivo a analizar: ")
fileName = raw_input()
# Abrir archivo
file = open(fileName, 'r')
code = file.read()
# Parsear el codigo leido del archivo
parser.parse(code)
VM.getInstructions(quadruples)
VM.setFuncDir(funcDir)
VM.setMainName(globalScope)
VM.setInitialInstructionPointer(funcDir.getFunctionStartingQuad(globalScope))
VM.setDebugMode(debug_mode)
if debug_mode == 1:
print(funcDir.functions)
for function in funcDir.functions:
func = funcDir.functions[function]
print('{0} = {1}'.format(function, func))
print(func['variables'].variables)
print('Operand stack: {0}'.format(OperandStack))
print('Type stack: {0}'.format(TypeStack))
print('Operator stack: {0}'.format(OperatorStack))
print('Jump stack: {0}'.format(JumpStack))
VM.printInstructions()
VM.executeInstructions()
if debug_mode == 1:
VM.printMainMemory()
initParser()
| {
"repo_name": "agvaldezc/Lancer",
"path": "Parser_Lexer/LancerYaccV2.py",
"copies": "1",
"size": "30379",
"license": "mit",
"hash": 5080227925333343000,
"line_mean": 26.4426377597,
"line_max": 328,
"alpha_frac": 0.6635833964,
"autogenerated": false,
"ratio": 3.671178247734139,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48347616441341384,
"avg_score": null,
"num_lines": null
} |
# Alan Richardson (Ausar Geophysical)
# 2017/01/09
# Simple first attempt using Ridge regression to predict missing PE values, and SVC for the facies
import numpy as np
import pandas as pd
from sklearn import preprocessing, cross_validation, grid_search, linear_model, svm, metrics
# Load + preprocessing
train_data = pd.read_csv('../facies_vectors.csv');
train_data = train_data[train_data['Well Name'] != 'Recruit F9']
train_data = train_data.sample(frac=1).reset_index(drop=True)
Ytrain=train_data['Facies']
Xtrain=train_data.drop('Facies',axis=1)
test_data = pd.read_csv('../validation_data_nofacies.csv')
Xtest = test_data.copy()
le=preprocessing.LabelEncoder()
le.fit(Xtrain['Formation'])
for d in [Xtrain, Xtest]:
d.drop('Well Name', axis=1, inplace=True)
d['FormationClass']=le.transform(d['Formation'])
d.drop('Formation', axis=1, inplace=True)
Xtrain=pd.get_dummies(Xtrain,prefix=['Formation', 'NM_M'],columns=['FormationClass', 'NM_M'])
Xtest=pd.get_dummies(Xtest,prefix=['Formation', 'NM_M'],columns=['FormationClass', 'NM_M'])
# Impute missing PE values and standardise data
scalerNoPE = preprocessing.StandardScaler().fit(Xtrain.drop('PE',axis=1))
Xtrain.loc[:,Xtrain.columns != 'PE']=scalerNoPE.transform(Xtrain.drop('PE',axis=1))
XtrainDropNoPE = Xtrain.dropna(axis=0)
scalerPE = preprocessing.StandardScaler().fit(XtrainDropNoPE['PE'].reshape(-1,1))
XtrainDropNoPE.loc[:,'PE']=scalerPE.transform(XtrainDropNoPE['PE'].reshape(-1,1))
Xtrain.loc[~Xtrain.PE.isnull(),'PE']=XtrainDropNoPE.loc[:,'PE']
YPE=XtrainDropNoPE['PE'];
XPE=XtrainDropNoPE.drop('PE',axis=1)
regRidge = grid_search.GridSearchCV(estimator=linear_model.Ridge(), param_grid=[{'alpha': [0.001,0.01,0.1,1,10,100,1000,10000]}], cv=10)
regRidge.fit(XPE, YPE)
Xtrain.loc[Xtrain.PE.isnull(),'PE'] = regRidge.predict(Xtrain.loc[Xtrain.PE.isnull(),:].drop('PE',axis=1))
print regRidge.best_score_
Xtest.loc[:,Xtrain.columns != 'PE']=scalerNoPE.transform(Xtest.drop('PE',axis=1))
Xtest.loc[:,'PE']=scalerPE.transform(Xtest['PE'].reshape(-1,1))
# Predict facies
clf = grid_search.GridSearchCV(estimator=svm.SVC(), param_grid=[{'C': [0.1,0.3,1,3], 'class_weight': [None, 'balanced']}], scoring='f1_micro')
clf.fit(Xtrain, Ytrain)
test_data['Facies']=clf.predict(Xtest)
test_data.to_csv('ar4_predicted_facies_submission001.csv')
| {
"repo_name": "seg/2016-ml-contest",
"path": "ar4/ar4_submission001.py",
"copies": "2",
"size": "2318",
"license": "apache-2.0",
"hash": 3563587541606404600,
"line_mean": 45.36,
"line_max": 142,
"alpha_frac": 0.7243313201,
"autogenerated": false,
"ratio": 2.7464454976303316,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9388824166990253,
"avg_score": 0.016390530148015968,
"num_lines": 50
} |
# Alan Richardson (Ausar Geophysical)
# 2017/01/31
import numpy as np
import scipy.signal
import pandas as pd
from sklearn import preprocessing, metrics
from sklearn.neighbors import KNeighborsClassifier, KNeighborsRegressor
from sklearn.base import clone
from matplotlib import pyplot as plt
import scipy.optimize
from scipy.optimize import lsq_linear
import fastdtw
from scipy.sparse import csr_matrix
from scipy.sparse.linalg import lsqr
from scipy.signal import medfilt, gaussian
import xgboost as xgb
from xgboost.sklearn import XGBClassifier, XGBRegressor
eps = 1e-5
def load_data():
train_data = pd.read_csv('../facies_vectors.csv');
train_data = train_data[train_data['Well Name'] != 'Recruit F9'].reset_index(drop=True)
validation_data = pd.read_csv('../validation_data_nofacies.csv')
return pd.concat([train_data, validation_data]).reset_index(drop=True)
def get_wellnames(data):
return data['Well Name'].unique()
def get_numwells(data):
return len(get_wellnames(data))
def set_well_value(data, wellname, colname, val):
data.loc[data['Well Name']==wellname, colname] = val
def get_well_value(data, wellname, colname):
return data.loc[data['Well Name']==wellname, colname].values[0]
def make_label_encoders(data, names):
les=[]
for name in names:
le=preprocessing.LabelEncoder()
le.fit(data[name])
les.append({'name': name, 'le': le})
return les
def apply_label_encoders(data, les):
for le in les:
data['%sClass' % le['name']]=le['le'].transform(data[le['name']])
data.drop(le['name'], axis=1, inplace=True)
def make_onehot_encoders(data, names):
ohes = []
for name in names:
ohe=preprocessing.OneHotEncoder()
ohe.fit(data[name])
ohes.append({'name': name, 'ohe': ohe})
return ohes
def apply_onehot_encoders(data, ohes):
for ohe in ohes:
ohdata = pd.DataFrame(ohe['ohe'].transform(data[ohe['name']]).toarray())
data=data.join(ohdata)
data.drop(ohe['name'],axis=1,inplace=True)
return data
def make_scalers(data, names, stype='Robust'):
scalers = []
for name in names:
if (stype == 'Robust'):
scaler = preprocessing.RobustScaler()
elif (stype == 'Standard'):
scaler = preprocessing.StandardScaler()
else:
raise ValueError('unknown stype')
scaler.fit(data[name].dropna(axis=0, inplace=False).values.reshape(-1, 1))
scalers.append({'name': name, 'scaler': scaler})
return scalers
def apply_scalers(data, scalers):
for scaler in scalers:
data.loc[~data[scaler['name']].isnull(), scaler['name']] = scaler['scaler'].transform(data[scaler['name']].dropna(axis=0, inplace=False).values.reshape(-1,1))
def neigh_interp(data):
odata = load_data()
wellnames = get_wellnames(data)
formations = data['FormationClass'].unique()
distformation=np.load('dtw_distformation_fce.npy')
distformation[pd.isnull(distformation)]=0.0
# distformation is upper triangular, so add the transpose to make it full
distformationf = np.zeros([len(wellnames),len(wellnames),len(formations)])
for fidx in range(len(formations)):
distformationf[:,:,fidx] = distformation[:,:,fidx]+distformation[:,:,fidx].T
# We don't have facies for wells 9 or 10, so we don't want any other well
# to have these as one of their nearest neighbours
distformationf[:,9,:]=np.inf
distformationf[:,10,:]=np.inf
# We also don't want a well to be its own neighbour
distformationf[distformationf==0.0]=np.inf
data['NeighbFacies']=0
k=8
clf = KNeighborsClassifier(n_neighbors = k, weights = 'distance', leaf_size = 1, p = 1)
cols = ['GR', 'ILD_log10', 'PHIND', 'RELPOS', 'NM_MClass', 'RGT']
for wellidx in range(len(wellnames)):
for fidx in formations:
# Find the k 'nearest' (as determined by dtw) wells
neighb = np.argsort(distformationf[wellidx,:,formations.tolist().index(fidx)])[:k]
# Find the rows in data for these wells
neighb_rows = np.array([False]*len(data))
for nidx in neighb:
neighb_rows = neighb_rows | (data['Well Name']==wellnames[nidx])
# Select only those rows with formation 'fidx'
neighb_rows = neighb_rows & (data['FormationClass']==fidx)
# Rows for the chosen formation in the current well
my_rows = (data['Well Name']==wellnames[wellidx]) & (data['FormationClass']==fidx)
# Fit and predict
if (np.sum(neighb_rows)>0) & (np.sum(my_rows)>0):
clf.fit(data.loc[neighb_rows, cols].values, odata.loc[neighb_rows, 'Facies'].values.ravel())
data.loc[my_rows, 'NeighbFacies'] = clf.predict(data.loc[my_rows, cols].values)
# Start of functions associated with finding RGT
def get_pts_per_well(data):
npts_in_well = data.groupby('Well Name', sort=False).size().values
cum_pts = np.append([0],np.cumsum(npts_in_well))
return npts_in_well, cum_pts
def build_Adtw(data, wells, nwells, npts_in_well, cum_pts, cols):
formations = data['FormationClass'].unique()
max_num_pairs = int(nwells * (nwells-1) / 2 * np.max(npts_in_well))
max_nz_in_row = int(np.max(npts_in_well) * 2)
max_num_rows = max_num_pairs
max_num_nonzero = max_num_rows * max_nz_in_row
dist = np.zeros([len(wells),len(wells)])
distformation = np.zeros([len(wells),len(wells),len(formations)])
indices = np.zeros(max_num_nonzero,dtype=int)
indptr = np.zeros(max_num_rows+1,dtype=int)
Adata = np.zeros(max_num_nonzero)
b = np.zeros(max_num_rows)
bounds = np.ones(len(data))
nz_rows = 0
nz_indices = 0
def add_shift_sum(Adata, indices, nz_indices, i, path, cum_pts, wellidx, idx):
col0 = cum_pts[wellidx]
col1 = cum_pts[wellidx] + path[i][idx]
num_added_indices = col1 - col0 + 1
indices[nz_indices:nz_indices+num_added_indices] = np.arange(col0, col1+1)
#1-2*idx so when idx=0 put +1 in Adata, when idx=1 put -1 in Adata
Adata[nz_indices:nz_indices+num_added_indices] = np.ones(num_added_indices)*(1-2*idx)
return num_added_indices
def add_row (Adata, indices, indptr, b, nz_rows, nz_indices, i, path, cum_pts, wellidxs):
num_added_indices = 0
indptr[nz_rows] = nz_indices
for idx in [0,1]:
num_added_indices = add_shift_sum(Adata, indices, nz_indices, i, path, cum_pts, wellidxs[idx], idx)
nz_indices = nz_indices + num_added_indices
b[nz_rows] = 0.0
return nz_indices
weightsum = 0.0
for well1idx in range(nwells-1):
for well2idx in range(well1idx+1, nwells):
w1df = data.loc[data['Well Name'] == wells[well1idx], cols + ['FormationClass']]
w2df = data.loc[data['Well Name'] == wells[well2idx], cols + ['FormationClass']]
w1formations = w1df['FormationClass'].unique()
w2formations = w2df['FormationClass'].unique()
nzcols = []
path = []
for col in cols:
if (np.all(np.isfinite(w1df[col])) & np.all(np.isfinite(w2df[col]))):
nzcols.append(col)
for formation in formations:
if (formation in w1formations) & (formation in w2formations):
w1f = w1df.loc[w1df['FormationClass'] == formation, nzcols]
w2f = w2df.loc[w2df['FormationClass'] == formation, nzcols]
w1 = np.array(w1f.values)
w2 = np.array(w2f.values)
dist_tmp, path_tmp = fastdtw.dtw(w1, w2, 2)
dist[well1idx,well2idx] += dist_tmp
distformation[well1idx,well2idx,formations.tolist().index(formation)] = dist_tmp
for pair in path_tmp:
idx1 = w1f.index[pair[0]]-w1df.index[0]
idx2 = w2f.index[pair[1]]-w2df.index[0]
path.append((idx1, idx2))
bounds[cum_pts[well1idx]] = np.max([bounds[cum_pts[well1idx]], path[0][1]])
bounds[cum_pts[well2idx]] = np.max([bounds[cum_pts[well2idx]], path[0][0]])
#NOTE delete
#np.save('path_%d_%d_fce.npy' % (well1idx, well2idx), path, allow_pickle = False)
pre_nz_rows = nz_rows
pre_nz_indices = nz_indices
added_1=-1
added_2=-1
for i in range(len(path)):
if ((path[i][0] != added_1) & (path[i][1] != added_2)):
if ((i > 0) & (i < len(path)-1)):
if (((path[i][0] != path[i-1][0]) & (path[i][1] != path[i+1][1])) | ((path[i][0] != path[i+1][0]) & (path[i][1] != path[i-1][1]))):
nz_indices = add_row(Adata, indices, indptr, b, nz_rows, nz_indices, i, path, cum_pts, [well1idx, well2idx])
nz_rows = nz_rows + 1
added_1 = path[i][0]
added_2 = path[i][1]
elif (i>0):
if ((path[i][0] != path[i-1][0]) & (path[i][1] != path[i-1][1])):
nz_indices = add_row(Adata, indices, indptr, b, nz_rows, nz_indices, i, path, cum_pts, [well1idx, well2idx])
nz_rows = nz_rows + 1
added_1 = path[i][0]
added_2 = path[i][1]
else:
if ((path[i][0] != path[i+1][0]) & (path[i][1] != path[i+1][1])):
nz_indices = add_row(Adata, indices, indptr, b, nz_rows, nz_indices, i, path, cum_pts, [well1idx, well2idx])
nz_rows = nz_rows + 1
added_1 = path[i][0]
added_2 = path[i][1]
num_matched_pairs = nz_rows - pre_nz_rows + 1
p = 2.0
weight = num_matched_pairs * (num_matched_pairs/dist[well1idx, well2idx])**(2.0/p)
weightsum = weightsum + weight
Adata[pre_nz_indices : nz_indices] = Adata[pre_nz_indices : nz_indices] * weight
Adata[:nz_indices] = Adata[:nz_indices] / weightsum
indptr[nz_rows] = nz_indices
indptr = indptr[:nz_rows+1]
np.save('dtw_dist_fce.npy', dist)
np.save('dtw_distformation_fce.npy', distformation)
return Adata, indices, indptr, b, bounds, nz_rows, nz_indices
def create_Ab(Adata, indices, indptr, b, nz_rows, nz_indices):
Adata = Adata[:nz_indices]
indices = indices[:nz_indices]
b = b[:nz_rows]
A = csr_matrix((Adata, indices, indptr), dtype=float)
return A, b, Adata, indices
def solve_Ax(A, b, bounds, data, wells, nwells, npts_in_well, cum_pts, reg_start_row, its=1):
res = lsq_linear(A,b,bounds=(bounds, 100.0*np.ones(len(data))),verbose=2,lsmr_tol='auto',max_iter=its)
wellnames = data['Well Name'].unique()
k = 0
for i, wellname in enumerate(wellnames):
wl = len(data.loc[data['Well Name'] == wellname])
rgt = np.cumsum(res.x[k:k+wl])
data.loc[data['Well Name'] == wellname, 'RGT'] = rgt
k = k+wl
def find_rgt(data, names, its):
wellnames = get_wellnames(data)
numwells = get_numwells(data)
npts_in_well, cum_pts = get_pts_per_well(data)
Adata, indices, indptr, b, bounds, dtw_rows, dtw_indices = build_Adtw(data, wellnames, numwells, npts_in_well, cum_pts, names)
A, b, Adata, indices = create_Ab(Adata, indices, indptr, b, dtw_rows, dtw_indices)
solve_Ax(A, b, bounds, data, wellnames, numwells, npts_in_well, cum_pts, dtw_rows, its)
# End of RGT functions
# Start of feature engineering functions
def find_dist(data):
wellnames = get_wellnames(data)
numwells = get_numwells(data)
dist = np.load('dtw_dist_fce.npy')
dist[pd.isnull(dist)]=0.0
distf = dist + dist.T
numpairs = int(numwells * (numwells-1) / 2)
A = np.zeros([numpairs, numwells], dtype=int)
b = np.zeros(numpairs)
row = 0
for well1idx in range(numwells-1):
for well2idx in range(well1idx+1, numwells):
A[row, well1idx] = 1
A[row, well2idx] = -1
b[row] = distf[well1idx, well2idx]
row += 1
dist = lsqr(A,b)
for well1idx in range(numwells):
set_well_value(data, wellnames[well1idx], 'X1D', dist[0][well1idx])
def interval_cols(intervals):
cols = []
for interval in intervals:
for metric in ['Depth','RGT']:
cols.append('%sFromPrev%sChange' % (metric, interval))
cols.append('%sToNext%sChange' % (metric, interval))
cols.append('%sToNearest%sChange' % (metric, interval))
cols.append('FracThrough%s%s' % (metric, interval))
cols.append('%sSize%s' % (interval, metric))
cols.append('Next%s' % interval)
cols.append('Prev%s' % interval)
cols.append('%sCompaction' % interval)
return cols
def interval_fe(data, intervals):
for interval in intervals:
for metric in ['Depth','RGT']:
df = data.groupby(['Well Name',interval],sort=False)[metric].min().reset_index()
df.columns = ['Well Name',interval,'%sPrev%sChange' % (metric, interval)]
data = pd.merge(data,df,how='left',on = ['Well Name',interval])
df = data.groupby(['Well Name',interval],sort=False)[metric].max().reset_index()
df.columns = ['Well Name',interval,'Max%sBefore%sChange' % (metric, interval)]
data = pd.merge(data,df,how='left',on = ['Well Name',interval])
# Set next change to be prev change of next interval. This will cause 'NaN' at the end of each well, so fill those with the max of the interval
df = data.groupby(['Well Name',interval],sort=False)['%sPrev%sChange' % (metric, interval)].first().reset_index()
df['%sNext%sChange' % (metric, interval)] = df['%sPrev%sChange' % (metric, interval)].shift(-1).reset_index(drop=True)
df.drop('%sPrev%sChange' % (metric, interval),axis=1,inplace=True)
df = df.groupby(['Well Name',interval],sort=False).first()
for wellname in df.index.levels[0]:
df.loc[wellname,df.loc[wellname].index[-1]] = np.nan
df = df.reset_index()
data = pd.merge(data,df,how='left',on = ['Well Name', interval])
data.loc[data['%sNext%sChange' % (metric, interval)].isnull(),'%sNext%sChange' % (metric, interval)] = data.loc[data['%sNext%sChange' % (metric, interval)].isnull(),'Max%sBefore%sChange' % (metric, interval)]
#IntervalSizeMetric
data['%sSize%s'%(interval,metric)] = data['%sNext%sChange'%(metric,interval)] - data['%sPrev%sChange'%(metric,interval)]
#MetricFromPrevIntervalChange
data['%sFromPrev%sChange' % (metric,interval)] = data[metric] - data['%sPrev%sChange' % (metric,interval)]
#MetricToNextIntervalChange
data['%sToNext%sChange' % (metric,interval)] = data['%sNext%sChange' % (metric,interval)] - data[metric]
#MetricToNearestIntervalChange
data['%sToNearest%sChange' % (metric,interval)] = data[['%sToNext%sChange' % (metric,interval), '%sFromPrev%sChange' % (metric,interval)]].min(axis=1)
#FracThroughMetricInterval
data['FracThrough%s%s' % (metric,interval)] = (data[metric] - data['%sPrev%sChange'%(metric,interval)]) / (data['%sSize%s'%(interval,metric)]+eps)
#Next/PrevInterval
intervalClass = data.groupby(['Well Name', interval],sort=False)[interval].first()
intervalClass.name = 'Shift%s' %interval
nextIntervalClass = intervalClass.shift(-1).reset_index()
prevIntervalClass = intervalClass.shift(1).reset_index()
nextIntervalClass.columns = ['Well Name',interval,'Next%s'%interval]
prevIntervalClass.columns = ['Well Name',interval,'Prev%s'%interval]
nextIntervalClass.loc[nextIntervalClass['Next%s'%interval].isnull(),'Next%s'%interval] = nextIntervalClass.loc[nextIntervalClass['Next%s'%interval].isnull(),interval]
prevIntervalClass.loc[prevIntervalClass['Prev%s'%interval].isnull(),'Prev%s'%interval] = prevIntervalClass.loc[prevIntervalClass['Prev%s'%interval].isnull(),interval]
data = pd.merge(data,nextIntervalClass,how='left',on = ['Well Name', interval])
data = pd.merge(data,prevIntervalClass,how='left',on = ['Well Name', interval])
#Compaction
data['%sCompaction'%interval] = data['%sSizeRGT'%interval] / (data['%sSizeDepth'%interval] + eps)
return data
def measurement_cols(ms):
cols = []
for m in ms:
cols.append('MedFilt%s' % m)
cols.append('Diff%s' % m)
cols.append('Diff2%s' % m)
cols.append('Sharp%s' % m)
return cols
def measurement_fe(data, ms):
dfg = data.groupby('Well Name')
for m in ms:
#MedFilt NOTE WINDOW CHOICE
for name,group in dfg[m]:
data.loc[data['Well Name']==name,'MedFilt%s'%m] = medfilt(group,15)
#Diff
for name,group in dfg[m]:
data.loc[data['Well Name']==name,'Diff%s'%m] = np.gradient(group)
#Diff2
for name,group in dfg['Diff%s'%m]:
data.loc[data['Well Name']==name,'Diff2%s'%m] = np.gradient(group)
#Sharp
data['Sharp%s' %m] = data[m] - data['Diff2%s' % m]
return data
def interval_measurement_cols(intervals, ms):
cols = []
for interval in intervals:
for m in ms:
cols.append('Mean%s%s' % (interval, m))
cols.append('DiffMean%s%s' % (interval, m))
if (interval != 'Local'):
cols.append('Std%s%s' % (interval, m))
cols.append('FracStd%s%s' % (interval, m))
return cols
def interval_measurement_fe(data, intervals, ms):
for interval in intervals:
for m in ms:
# Get dataframe group and rows
dfg = None
def rows(data, name):
return None
if (interval == 'Well') | (interval == 'Local'):
dfg = data.groupby('Well Name')
def rows(data, name):
return data['Well Name']==name
else:
dfg = data.groupby(['Well Name', interval])
def rows(data, name):
return (data['Well Name']==name[0]) & (data[interval]==name[1])
# Compute mean and standard deviation
if (interval != 'Local'):
#MeanInterval
for name,group in dfg[m]:
data.loc[rows(data, name),'Mean%s%s'% (interval, m)] = np.mean(group)
#StdInterval
for name,group in dfg[m]:
data.loc[rows(data, name),'Std%s%s'% (interval, m)] = np.std(group)
else:
#MeanLocal NOTE WINDOW CHOICE
gauss = gaussian(5,1)
gauss /= np.sum(gauss)
for name,group in dfg[m]:
data.loc[rows(data, name),'MeanLocal%s'%m] = np.convolve(group,gauss,'same')
#DiffMeanInterval
data['DiffMean%s%s'% (interval, m)] = data[m] - data['Mean%s%s'% (interval, m)]
#FracStdInterval
if (interval != 'Local'):
data['FracStd%s%s'% (interval, m)] = data['DiffMean%s%s'% (interval, m)] / (data['Std%s%s'% (interval, m)]+eps)
return data
def basic_feature_engineering(data):
cols = ['X1D', 'Formation3Depth', 'DepthFromSurf', 'WellFracMarine', 'FormationFracMarine', 'DepthFromSurf_divby_RGT', 'FormationSizeDepth_rel_av', 'FormationSizeRGT_rel_av', 'DiffRGT', 'IGR', 'VShaleClavier']
# Give unique values to each NM_M interval so they can be distinguished below
# Very hacky method for doing it...
nmclasssep = np.zeros(len(data['NM_MClass']))
nmclasssep[1:] = np.cumsum(np.abs(np.diff(data['NM_MClass'].values)))
nmclasssep[0] = nmclasssep[1]
data['NM_MClassSep'] = nmclasssep
intervals = ['FormationClass', 'NM_MClassSep']
intervals_measurement = intervals + ['Well', 'Local']
cols += interval_cols(intervals)
ms=[u'GR', u'ILD_log10', u'DeltaPHI', u'PHIND', u'RELPOS']
cols += measurement_cols(ms)
cols += interval_measurement_cols(intervals_measurement, ms)
# X1D
find_dist(data)
# Formation3Depth
df = data.loc[data['FormationClass']==3].groupby(['Well Name'],sort=False)['Depth'].min().reset_index()
df.columns = ['Well Name','Formation3Depth']
data = pd.merge(data,df,how='left',on = 'Well Name')
# DepthFromSurf
df = data.groupby(['Well Name'],sort=False)['Depth'].min().reset_index()
df.columns = ['Well Name','SurfDepth']
data = pd.merge(data,df,how='left',on = ['Well Name'])
data['DepthFromSurf'] = data['Depth']-data['SurfDepth']
data.drop('SurfDepth',axis=1,inplace=True)
# WellFracMarine
df = data.groupby(['Well Name'],sort=False)['NM_MClass'].mean().reset_index()
df.columns = ['Well Name','WellFracMarine']
data = pd.merge(data,df,how='left',on = ['Well Name'])
# FormationFracMarine
df = data.groupby(['Well Name', 'FormationClass'],sort=False)['NM_MClass'].mean().reset_index()
df.columns = ['Well Name','FormationClass','FormationFracMarine']
data = pd.merge(data,df,how='left',on = ['Well Name', 'FormationClass'])
#DepthFromSurf_divby_RGT
data['DepthFromSurf_divby_RGT'] = data['DepthFromSurf']/data['RGT']
#DiffRGT
wellrgt = data.groupby(['Well Name'],sort=False)['RGT']
for name,group in wellrgt:
data.loc[data['Well Name']==name,'DiffRGT'] = np.gradient(group)
# Intervals
data = interval_fe(data, intervals)
# Remove useless columns
cols.remove('NextNM_MClassSep')
cols.remove('PrevNM_MClassSep')
# FormationSizeDepth_rel_av
mss=data.groupby(['Well Name','FormationClass'])['FormationClassSizeDepth'].first().reset_index().groupby('FormationClass').mean().values
data['FormationSizeDepth_rel_av']=data['FormationClassSizeDepth'].values/mss[data['FormationClass'].values.astype(int)].ravel()
# FormationSizeRGT_rel_av
mss=data.groupby(['Well Name','FormationClass'])['FormationClassSizeRGT'].first().reset_index().groupby('FormationClass').mean().values
data['FormationSizeRGT_rel_av']=data['FormationClassSizeRGT'].values/mss[data['FormationClass'].values.astype(int)].ravel()
#Measurements
data = measurement_fe(data, ms)
data = interval_measurement_fe(data, intervals_measurement, ms)
#IGR
data['IGR'] = (data['MedFiltGR']-data['MedFiltGR'].min())/(data['MedFiltGR'].max()-data['MedFiltGR'].min())
#VShaleClavier
data['VShaleClavier'] = 1.7 * np.sqrt(3.38 - (data['IGR']+0.7)**2)
return cols, data
def predict_pe_feature_engineering(data):
cols = []
intervals = ['Facies']
cols += interval_cols(intervals)
ms=[u'GR', u'ILD_log10', u'DeltaPHI', u'PHIND', u'RELPOS']
cols += interval_measurement_cols(intervals, ms)
data = interval_fe(data, intervals)
data = interval_measurement_fe(data, intervals, ms)
return cols, data
def predict_facies2_feature_engineering(data):
cols = []
intervals = ['FormationClass', 'NM_MClassSep', 'Well', 'Local']
ms=['PE']
cols += measurement_cols(ms)
cols += interval_measurement_cols(intervals, ms)
data = measurement_fe(data, ms)
data = interval_measurement_fe(data, intervals, ms)
return cols, data
def make_classifier(data, Xcols, Ycols, rows, clf):
clf.fit(data.loc[~rows, Xcols], data.loc[~rows, Ycols])
return clf
def classify(data, clf, Xcols, Ycols, rows):
data.loc[rows, Ycols] = clf.predict(data.loc[rows, Xcols])
def make_regressor(data, Xcols, Ycols, rows, reg):
reg.fit(data.loc[~rows, Xcols], data.loc[~rows, Ycols])
return reg
def regress(data, reg, Xcols, Ycols, rows):
data.loc[rows, Ycols] = reg.predict(data.loc[rows, Xcols])
#NOTE seeds
def run(solve_rgt=False):
# Load + preprocessing
odata = load_data()
if (solve_rgt):
data = load_data()
le = make_label_encoders(data, ['Formation', 'NM_M'])
apply_label_encoders(data, le)
scalers = make_scalers(data, ['GR', 'ILD_log10', 'DeltaPHI', 'PHIND', 'RELPOS', 'PE', 'FormationClass', u'NM_MClass', u'Facies'])
apply_scalers(data, scalers)
#NOTE Max its
find_rgt(data, [u'DeltaPHI', u'Facies', u'GR', u'ILD_log10', u'NM_MClass', u'PE', u'PHIND', u'RELPOS'], 1)
else:
data = pd.read_csv('dtw_out_fce_14000.csv')
data.drop(u'Unnamed: 0', axis=1, inplace=True)
# Reset Facies back to their unscaled values
data['Facies']=odata['Facies'].values
scalers = make_scalers(data, ['RGT'], stype='Standard')
apply_scalers(data, scalers)
neigh_interp(data)
cols = ['DeltaPHI', 'GR', 'ILD_log10', 'PHIND', 'RELPOS', 'FormationClass', 'NM_MClass', 'RGT', 'NeighbFacies']
basic_cols, data = basic_feature_engineering(data)
cols += basic_cols
seed1=0
seed2=0
seed3=0
facies_rows_to_predict = data['Facies'].isnull()
pe_rows_to_predict = data['PE'].isnull()
clf1 = XGBClassifier(base_score=0.5, colsample_bylevel=0.5, colsample_bytree=0.6, gamma=0.01, learning_rate=0.025, max_delta_step=0, max_depth=2, min_child_weight=7, missing=None, n_estimators=500, nthread=-1, objective='multi:softprob', reg_alpha=2, reg_lambda=20, scale_pos_weight=1, seed=seed1, silent=True, subsample=0.2)
clf2 = XGBClassifier(base_score=0.5, colsample_bylevel=0.3, colsample_bytree=0.8,
gamma=0.01, learning_rate=0.05, max_delta_step=0, max_depth=3,
min_child_weight=1, missing=None, n_estimators=500, nthread=-1,
objective='multi:softprob', reg_alpha=0, reg_lambda=1,
scale_pos_weight=1, seed=seed2, silent=True, subsample=0.5)
reg1 = XGBRegressor(base_score=0.5, colsample_bylevel=0.5, colsample_bytree=0.1,
gamma=0, learning_rate=0.05, max_delta_step=0, max_depth=1,
min_child_weight=10, missing=None, n_estimators=500, nthread=-1,
objective='reg:linear', reg_alpha=10, reg_lambda=10,
scale_pos_weight=1, seed=seed3, silent=True, subsample=0.1)
# Predict facies #1
Ycol = 'Facies'
Xcols = cols
clf = make_classifier(data, Xcols, Ycol, facies_rows_to_predict, clf1)
classify(data, clf, Xcols, Ycol, facies_rows_to_predict)
for wellname in get_wellnames(data):
wd = data.loc[data['Well Name'] == wellname, 'Facies']
wd = medfilt(wd, kernel_size=5)
data.loc[data['Well Name'] == wellname, 'Facies1'] = wd
cols += ['Facies1']
# Predict PE
predict_pe_cols, data = predict_pe_feature_engineering(data)
cols += predict_pe_cols
Ycol = 'PE'
Xcols = cols
reg = make_regressor(data, Xcols, Ycol, pe_rows_to_predict, reg1)
regress(data, reg, Xcols, Ycol, pe_rows_to_predict)
cols += ['PE']
# Predict facies #2
predict_facies2_cols, data = predict_facies2_feature_engineering(data)
cols += predict_facies2_cols
Ycol = 'Facies'
Xcols = cols
clf = make_classifier(data, Xcols, Ycol, facies_rows_to_predict, clf2)
classify(data, clf, Xcols, Ycol, facies_rows_to_predict)
for wellname in get_wellnames(data):
wd = data.loc[data['Well Name'] == wellname, 'Facies']
wd = medfilt(wd, kernel_size=7)
data.loc[data['Well Name'] == wellname, 'Facies'] = wd
data = data.loc[(data['Well Name'] == 'STUART') | (data['Well Name'] == 'CRAWFORD'),['Well Name','Depth','Facies']]
data.to_csv('ar4_submission3.csv')
if __name__ == "__main__":
run()
| {
"repo_name": "seg/2016-ml-contest",
"path": "ar4/ar4_submission3.py",
"copies": "1",
"size": "27730",
"license": "apache-2.0",
"hash": -5907807276246357000,
"line_mean": 43.2971246006,
"line_max": 329,
"alpha_frac": 0.6047601875,
"autogenerated": false,
"ratio": 3.136168287717711,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4240928475217711,
"avg_score": null,
"num_lines": null
} |
""" A large portion of this sub-package is taken from Pyramid
"""
import inspect
import logging
import pkgutil
from typing import Optional, Tuple
from types import ModuleType
import ramlfications as raml
import venusian
from pyrsistent import pmap, pvector
from solo.server.app import App
from solo.server.csrf import SessionCSRFStoragePolicy
from ..config.app import Config, AppConfig
from .util import maybe_dotted
from .config.rendering import BUILTIN_RENDERERS
from .config.rendering import RenderingConfigurator
from .config.routes import RoutesConfigurator
from .config.views import ViewsConfigurator
from .config.sums import SumTypesConfigurator
from .exceptions import ConfigurationError
from .registry import Registry
from .path import caller_package
from .view import http_defaults
from solo.server.definitions import PredicatedHandler
from .view import http_endpoint
from .url import normalize_route_pattern, complete_route_pattern, complete_url_rules
__all__ = ['http_endpoint', 'http_defaults', 'Configurator']
log = logging.getLogger(__name__)
class Configurator:
venusian = venusian
inspect = inspect
def __init__(self,
app: App,
config: Config,
route_prefix=None,
router_configurator=RoutesConfigurator,
views_configurator=ViewsConfigurator,
rendering_configurator=RenderingConfigurator,
sum_types_configurator=SumTypesConfigurator) -> None:
if route_prefix is None:
route_prefix = ''
self.app = app
self.registry = Registry(config=config,
csrf_policy=SessionCSRFStoragePolicy())
self.router = router_configurator(app.url_gen, route_prefix)
self.views = views_configurator(app)
self.rendering = rendering_configurator(app)
self.sums = sum_types_configurator()
self._directives = pmap({})
self.setup_configurator()
def include(self, callable, route_prefix: Optional[str] = None) -> None:
"""
:param callable: package to be configured
:param route_prefix:
:return:
"""
configuration_section: ModuleType = maybe_dotted(callable)
old_namespace = self.router.change_namespace(configuration_section.__package__)
module = self.inspect.getmodule(configuration_section)
if module is configuration_section:
try:
configuration_section = getattr(module, 'includeme')
log.debug(f'Including {callable}')
except AttributeError:
raise ConfigurationError(
f"module {module.__name__} has no attribute 'includeme'"
)
sourcefile = self.inspect.getsourcefile(configuration_section)
if sourcefile is None:
raise ConfigurationError(
'No source file for module {} (.py file must exist, '
'refusing to use orphan .pyc or .pyo file).'.format(module.__name__)
)
if route_prefix is None:
route_prefix = ''
route_prefix = u'{}/{}'.format(self.router.route_prefix.rstrip('/'), route_prefix.lstrip('/'))
old_route_prefix = self.router.change_route_prefix(route_prefix)
configuration_section(self)
self.router.change_namespace(old_namespace)
self.router.change_route_prefix(old_route_prefix)
def include_api_specs(self, pkg_name: str, path: str) -> None:
log.debug(f'Including API specs: {pkg_name}:{path}')
data: bytes = pkgutil.get_data(pkg_name, path)
data = data.decode('utf-8')
data = raml.loads(data)
raml_config = raml.setup_config(None)
raml_config["validate"] = True
specs = raml.parse_raml(data, raml_config)
processed = set()
for res in specs.resources:
if res.name in processed:
continue
if not res.method:
continue
name, pattern, rules = normalize_route_pattern(res.path)
self.router.add_route(name=name, pattern=pattern, rules=rules)
processed.add(res.name)
def scan(self, package: Optional[AppConfig] = None, categories=None, onerror=None, ignore=None) -> None:
pkg_name = package.name if package else caller_package()
pkg = maybe_dotted(pkg_name)
log.debug(f'Scanning {pkg}')
scanner = self.venusian.Scanner(configurator=self)
previous_namespace = scanner.configurator.router.change_namespace(pkg.__name__)
scanner.scan(pkg, categories=categories, onerror=onerror, ignore=ignore)
self.router.check_routes_consistency(pkg)
self.sums.check_sum_types_consistency(pkg)
scanner.configurator.router.change_namespace(previous_namespace)
self.app = self.register_routes(self.app, pkg_name)
for setup_step in package.setup:
directive, kw = pvector(setup_step.items())[0]
getattr(self, directive)(**kw)
log.debug(f'End scanning {pkg}')
def register_routes(self, webapp: App, namespace: str) -> App:
# Setup routes
# ------------
application_routes = self.router.routes[namespace]
for route in application_routes.values():
handler = PredicatedHandler(route.rules, route.view_metas)
requirements = complete_url_rules(route.rules)
log.debug(
f'Binding route {route.pattern} '
f'to the handler named {route.name} '
f'in the namespace {namespace}.'
)
webapp.route_map.connect(
f'{namespace}:{route.name}',
route.pattern,
requirements=requirements,
controller=handler
)
return webapp
def setup_configurator(self) -> None:
# Add default renderers
# ---------------------
for name, renderer in BUILTIN_RENDERERS.items():
self.rendering.add_renderer(name, renderer)
# Predicates machinery
# --------------------
self.views.add_default_view_predicates()
def add_directive(self, directive, name=None, action_wrap=True):
""" THIS METHOD IS A MODIFIED COPY OF ``pyramid.config.Configurator.add_directive``.
MODIFIED ON: 2016-04-26.
DOCS: http://docs.pylonsproject.org/projects/pyramid/en/latest/api/config.html#pyramid.config.Configurator.add_directive
"""
c = maybe_dotted(directive)
if name is None:
name = c.__name__
self._directives = self._directives.update({name: (c, action_wrap)})
def __getattr__(self, name: str):
""" THIS METHOD IS A MODIFIED COPY OF ``pyramid.config.Configurator.__getattr__``.
MODIFIED ON: 2016-04-26.
"""
directives = getattr(self, '_directives', {})
c = directives.get(name)
if c is None:
log.debug(directives)
raise AttributeError(name)
c, action_wrap = c
# Create a bound method (works on both Py2 and Py3)
# http://stackoverflow.com/a/1015405/209039
m = c.__get__(self, self.__class__)
return m
def complete(self) -> Tuple[App, Registry]:
return self.app, self.registry
| {
"repo_name": "avanov/solo",
"path": "solo/configurator/__init__.py",
"copies": "1",
"size": "7360",
"license": "mit",
"hash": -7778135602919673000,
"line_mean": 36.7435897436,
"line_max": 128,
"alpha_frac": 0.6213315217,
"autogenerated": false,
"ratio": 4.184195565662308,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0007578850950637447,
"num_lines": 195
} |
# a larger script for InMoov
inMoov = Runtime.createAndStart("inMoov", "InMoov")
# attach an arduino to InMoov
# possible board types include uno atmega168 atmega328p atmega2560 atmega1280 atmega32u4
# the MRLComm.ino sketch must be loaded into the Arduino for MyRobotLab control !
# set COM number according to the com of your Arduino board
inMoov.attachArduino("right","uno","COM12")
inMoov.attachArduino("left","atmega1280","COM10")
# attaching body parts
inMoov.attachHand("right")
inMoov.attachHand("left")
inMoov.attachArm("right")
inMoov.attachArm("left")
inMoov.attachHead("right")
inMoov.systemCheck()
def heard():
data = msg_ear_recognized.data[0]
print "heard ", data
#mouth.setLanguage("fr")
mouth.setLanguage("en")
mouth.speak("you said " + data)
if (data == "rest"):
rest()
elif (data == "one"):
takeball()
elif (data == "one ball"):
ball()
elif (data == "two"):
keepball()
elif (data == "three"):
goestotake1()
elif (data == "four"):
goestotake2()
elif (data == "five"):
take()
elif (data == "six"):
takefinal1()
elif (data == "seven"):
takefinal2()
elif (data == "eight"):
takefinal3()
elif (data == "nine"):
takefinal4()
elif (data == "ten"):
davinciarm1()
elif (data == "look one"):
lookatthing2()
elif (data == "down one"):
putdown1()
elif (data == "down two"):
putdown2()
elif (data == "point"):
pointfinger()
elif (data == "scared"):
scared()
elif (data == "ballet"):
ballet()
elif (data == "surrender"):
surrender()
elif (data == "surrender two"):
surrender2()
elif (data == "what"):
what()
elif (data == "welcome"):
welcome()
elif (data == "protect"):
protectface()
elif (data == "start tracking"):
inMoov.startTracking()
elif (data == "stop tracking"):
inMoov.stopTracking()
inMoov.startListening("rest | one ball | one | two | three | four | five | six | seven | eight | nine | ten | look one | down one | down two | point | scared | ballet | surrender | surrender two | what | welcome | protect | start tracking | stop tracking")
def rest():
inMoov.setHandSpeed("left", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0)
inMoov.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0)
inMoov.setArmSpeed("left", 0.75, 0.75, 0.75, 0.75)
inMoov.setArmSpeed("right", 0.75, 0.75, 0.75, 0.75)
inMoov.setHeadSpeed( 0.75, 0.75)
inMoov.moveHead(90,90)
inMoov.moveArm("left",0,85,16,15)
inMoov.moveArm("right",0,73,29,15)
inMoov.moveHand("left",50,28,30,10,10,90)
inMoov.moveHand("right",10,10,10,10,10,90)
inMoov.broadcastState()
sleep(5)
def ball():
inMoov.setHandSpeed("right", 0.75, 0.75, 0.75, 0.75, 0.75, 0.75)
inMoov.setArmSpeed("right", 0.85, 0.85, 0.85, 0.85)
inMoov.moveHead(52,81)
inMoov.moveArm("left",0,84,16,15)
inMoov.moveArm("right",0,85,58,15)
inMoov.moveHand("left",50,28,30,10,10,90)
inMoov.moveHand("right",10,111,103,19,11,90)
def takeball():
inMoov.setHandSpeed("right", 0.75, 0.75, 0.75, 0.75, 0.75, 0.75)
inMoov.setArmSpeed("right", 0.85, 0.85, 0.85, 0.85)
inMoov.moveHead(52,81)
inMoov.moveArm("left",0,84,16,15)
inMoov.moveArm("right",6,73,65,16)
inMoov.moveHand("left",50,28,30,0,0,90)
inMoov.moveHand("right",85,131,104,106,139,129)
sleep(5)
def keepball():
inMoov.moveHead(0,80)
inMoov.moveArm("left",0,84,16,15)
inMoov.moveArm("right",70,62,62,16)
inMoov.moveHand("left",50,28,30,10,10,90)
inMoov.moveHand("right",85,131,104,106,139,75)
sleep(4)
def goestotake1():
inMoov.setHandSpeed("right", 0.75, 0.75, 0.75, 0.75, 0.75, 0.65)
inMoov.setArmSpeed("left", 0.75, 0.75, 0.75, 0.95)
inMoov.setArmSpeed("right", 0.95, 0.95, 0.95, 0.85)
inMoov.moveHead(15,84)
inMoov.moveArm("left",90,91,37,15)
inMoov.moveArm("right",63,50,45,15)
inMoov.moveHand("left",50,28,30,10,10,0)
inMoov.moveHand("right",85,85,75,72,81,22)
sleep(1)
def goestotake2():
inMoov.setArmSpeed("left", 0.75, 0.75, 0.75, 0.95)
inMoov.setArmSpeed("right", 0.95, 0.95, 0.95, 0.85)
inMoov.moveHead(12,80)
inMoov.moveArm("left",71,51,37,15)
inMoov.moveArm("right",63,50,45,15)
inMoov.moveHand("left",50,28,30,10,10,0)
inMoov.moveHand("right",77,85,75,72,81,22)
sleep(4)
def take():
inMoov.setHandSpeed("left", 0.75, 0.75, 0.75, 0.75, 0.75, 0.75)
inMoov.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0)
inMoov.moveHead(10,74)
inMoov.moveArm("left",71,51,37,15)
inMoov.moveArm("right",63,50,45,15)
inMoov.moveHand("left",50,28,30,10,10,0)
inMoov.moveHand("right",60,85,75,72,81,22)
sleep(2)
def takefinal1():
inMoov.setHandSpeed("right", 0.65, 0.65, 0.65, 0.65, 0.65, 0.65)
inMoov.moveHead(5,74)
inMoov.moveArm("left",71,51,37,15)
inMoov.moveArm("right",63,50,45,15)
inMoov.moveHand("left",50,28,30,10,10,0)
inMoov.moveHand("right",20,75,74,72,81,22)
sleep(1)
def takefinal2():
inMoov.setHandSpeed("left", 0.75, 0.65, 0.65, 0.65, 0.65, 0.65)
inMoov.setHandSpeed("right", 0.75, 0.65, 0.65, 0.65, 0.65, 0.65)
inMoov.moveHead(10,74)
inMoov.moveArm("left",68,51,37,15)
inMoov.moveArm("right",63,50,45,15)
inMoov.moveHand("left",155,110,118,10,10,0)
inMoov.moveHand("right",20,64,72,72,81,22)
sleep(4)
def takefinal3():
inMoov.setHandSpeed("left", 0.75, 0.65, 0.65, 0.65, 0.65, 0.65)
inMoov.setHandSpeed("right", 0.65, 0.65, 0.65, 0.65, 0.65, 0.65)
inMoov.moveHead(10,74)
inMoov.moveArm("left",68,51,37,15)
inMoov.moveArm("right",63,50,45,15)
inMoov.moveHand("left",170,110,118,10,10,0)
inMoov.moveHand("right",20,30,40,30,30,22)
sleep(3)
def takefinal4():
inMoov.setHandSpeed("left", 1.0, 0.65, 0.65, 0.65, 0.65, 0.65)
inMoov.setHandSpeed("right", 0.75, 0.75, 0.75, 0.75, 0.75, 0.75)
inMoov.setArmSpeed("right", 0.75, 0.85, 0.95, 0.85)
inMoov.moveHead(10,74)
inMoov.moveArm("left",71,51,37,15)
inMoov.setArmSpeed("right", 0.65, 0.65, 0.75, 0.85)
inMoov.moveArm("right",0,82,33,15)
inMoov.moveHand("left",140,125,125,34,34,0)
inMoov.moveHand("right",20,20,40,30,30,20)
sleep(2)
def davinciarm1():
inMoov.setHandSpeed("left", 1.0, 1.0, 1.0, 1.0, 1.0, 0.65)
inMoov.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 0.65)
inMoov.setArmSpeed("left", 0.75, 0.75, 0.75, 0.75)
inMoov.setArmSpeed("right", 0.75, 0.75, 0.75, 0.75)
inMoov.setHeadSpeed( 0.75, 0.75)
inMoov.moveHead(80,90)
inMoov.moveArm("left",0,118,13,74)
inMoov.moveArm("right",0,118,29,74)
inMoov.moveHand("left",50,28,30,10,10,47)
inMoov.moveHand("right",10,10,10,10,10,137)
sleep(4)
def lookatthing2():
inMoov.setHeadSpeed(0.65, 0.75)
inMoov.moveHead(73,74)
inMoov.moveArm("left",70,64,83,15)
inMoov.moveArm("right",0,82,33,15)
inMoov.moveHand("left",147,130,140,34,34,164)
inMoov.moveHand("right",20,40,40,30,30,10)
sleep(2)
def putdown1():
inMoov.moveHead(0,99)
inMoov.moveArm("left",1,45,57,31)
inMoov.moveArm("right",0,82,33,15)
inMoov.moveHand("left",147,130,135,34,34,35)
inMoov.moveHand("right",20,40,40,30,30,22)
sleep(2)
def putdown2():
inMoov.moveHead(0,99)
inMoov.moveArm("left",1,45,53,31)
inMoov.moveArm("right",0,82,33,15)
sleep(3)
inMoov.moveHand("left",147,61,67,34,34,35)
inMoov.moveHand("right",20,40,40,30,30,22)
inMoov.broadcastState()
sleep(2)
def pointfinger():
inMoov.moveHead(90,90)
inMoov.moveArm("left",0,84,16,15)
inMoov.moveArm("right",26,73,88,15)
inMoov.moveHand("left",50,28,30,10,10,90)
inMoov.moveHand("right",10,10,142,156,148,180)
def scared():
inMoov.moveHead(90,90)
inMoov.moveArm("left",90,40,24,15)
inMoov.moveArm("right",90,40,139,10)
inMoov.moveHand("left",68,85,56,27,26,52)
inMoov.moveHand("right",10,10,20,34,19,156)
def ballet():
inMoov.moveHead(90,90)
inMoov.moveArm("left",0,40,95,29)
inMoov.moveArm("right",50,40,164,10)
inMoov.moveHand("left",68,0,56,27,26,52)
inMoov.moveHand("right",10,10,20,34,19,156)
def surrender():
inMoov.moveHead(90,90)
inMoov.moveArm("left",90,139,15,80)
inMoov.moveArm("right",90,145,37,80)
inMoov.moveHand("left",50,28,30,10,10,76)
inMoov.moveHand("right",10,10,10,10,10,139)
def surrender2():
inMoov.moveHead(90,112)
inMoov.moveArm("left",90,139,48,80)
inMoov.moveArm("right",90,145,77,80)
inMoov.moveHand("left",50,28,30,10,10,76)
inMoov.moveHand("right",10,10,10,10,10,139)
def what():
inMoov.moveHead(38,90)
inMoov.moveArm("left",0,140,0,15)
inMoov.moveArm("right",0,140,2,15)
inMoov.moveHand("left",50,28,30,10,10,158)
inMoov.moveHand("right",10,10,10,10,10,90)
def welcome():
inMoov.moveHead(38,90)
inMoov.moveArm("left",0,140,0,49)
inMoov.moveArm("right",0,140,2,40)
inMoov.moveHand("left",50,28,30,10,10,158)
inMoov.moveHand("right",10,10,10,10,10,90)
def protectface():
inMoov.moveHead(90,90)
inMoov.moveArm("left",90,64,128,43)
inMoov.moveArm("right",0,73,29,15)
inMoov.moveHand("left",50,28,30,10,10,90)
inMoov.moveHand("right",10,10,10,10,10,90) | {
"repo_name": "DarkRebel/myrobotlab",
"path": "src/resource/Python/examples/InMoov.py",
"copies": "2",
"size": "9184",
"license": "apache-2.0",
"hash": 8766592775884759000,
"line_mean": 29.675862069,
"line_max": 257,
"alpha_frac": 0.6344729965,
"autogenerated": false,
"ratio": 2.178884934756821,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8466881396555712,
"avg_score": 0.06929530694022176,
"num_lines": 290
} |
# a larger script for InMoov
inMoov = Runtime.createAndStart("inMoov", "InMoov")
# attach an arduino to InMoov
# possible board types include uno atmega168 atmega328p atmega2560 atmega1280 atmega32u4
# the MRLComm.ino sketch must be loaded into the Arduino for MyRobotLab control !
# set COM number according to the com of your Arduino board
inMoov.attachArduino("right","uno","COM12")
inMoov.attachArduino("left","atmega1280","COM10")
# attaching body parts
inMoov.attachHand("right")
inMoov.attachHand("left")
inMoov.attachArm("right")
inMoov.attachArm("left")
inMoov.attachHead("right")
inMoov.systemCheck()
def heard():
data = msg_ear_recognized.data[0]
print "heard ", data
#mouth.setLanguage("fr")
mouth.setLanguage("en")
mouth.speak("you said " + data)
if (data == "rest"):
rest()
elif (data == "one"):
takeball()
elif (data == "one ball"):
ball()
elif (data == "two"):
keepball()
elif (data == "three"):
goestotake1()
elif (data == "four"):
goestotake2()
elif (data == "five"):
take()
elif (data == "six"):
takefinal1()
elif (data == "seven"):
takefinal2()
elif (data == "eight"):
takefinal3()
elif (data == "nine"):
takefinal4()
elif (data == "ten"):
davinciarm1()
elif (data == "look one"):
lookatthing2()
elif (data == "down one"):
putdown1()
elif (data == "down two"):
putdown2()
elif (data == "point"):
pointfinger()
elif (data == "scared"):
scared()
elif (data == "ballet"):
ballet()
elif (data == "surrender"):
surrender()
elif (data == "surrender two"):
surrender2()
elif (data == "what"):
what()
elif (data == "welcome"):
welcome()
elif (data == "protect"):
protectface()
elif (data == "start tracking"):
inMoov.startTracking()
elif (data == "stop tracking"):
inMoov.stopTracking()
inMoov.startListening("rest | one ball | one | two | three | four | five | six | seven | eight | nine | ten | look one | down one | down two | point | scared | ballet | surrender | surrender two | what | welcome | protect | start tracking | stop tracking")
def rest():
inMoov.setHandSpeed("left", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0)
inMoov.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0)
inMoov.setArmSpeed("left", 0.75, 0.75, 0.75, 0.75)
inMoov.setArmSpeed("right", 0.75, 0.75, 0.75, 0.75)
inMoov.setHeadSpeed( 0.75, 0.75)
inMoov.moveHead(90,90)
inMoov.moveArm("left",0,85,16,15)
inMoov.moveArm("right",0,73,29,15)
inMoov.moveHand("left",50,28,30,10,10,90)
inMoov.moveHand("right",10,10,10,10,10,90)
inMoov.broadcastState()
sleep(5)
def ball():
inMoov.setHandSpeed("right", 0.75, 0.75, 0.75, 0.75, 0.75, 0.75)
inMoov.setArmSpeed("right", 0.85, 0.85, 0.85, 0.85)
inMoov.moveHead(52,81)
inMoov.moveArm("left",0,84,16,15)
inMoov.moveArm("right",0,85,58,15)
inMoov.moveHand("left",50,28,30,10,10,90)
inMoov.moveHand("right",10,111,103,19,11,90)
def takeball():
inMoov.setHandSpeed("right", 0.75, 0.75, 0.75, 0.75, 0.75, 0.75)
inMoov.setArmSpeed("right", 0.85, 0.85, 0.85, 0.85)
inMoov.moveHead(52,81)
inMoov.moveArm("left",0,84,16,15)
inMoov.moveArm("right",6,73,65,16)
inMoov.moveHand("left",50,28,30,0,0,90)
inMoov.moveHand("right",85,131,104,106,139,129)
sleep(5)
def keepball():
inMoov.moveHead(0,80)
inMoov.moveArm("left",0,84,16,15)
inMoov.moveArm("right",70,62,62,16)
inMoov.moveHand("left",50,28,30,10,10,90)
inMoov.moveHand("right",85,131,104,106,139,75)
sleep(4)
def goestotake1():
inMoov.setHandSpeed("right", 0.75, 0.75, 0.75, 0.75, 0.75, 0.65)
inMoov.setArmSpeed("left", 0.75, 0.75, 0.75, 0.95)
inMoov.setArmSpeed("right", 0.95, 0.95, 0.95, 0.85)
inMoov.moveHead(15,84)
inMoov.moveArm("left",90,91,37,15)
inMoov.moveArm("right",63,50,45,15)
inMoov.moveHand("left",50,28,30,10,10,0)
inMoov.moveHand("right",85,85,75,72,81,22)
sleep(1)
def goestotake2():
inMoov.setArmSpeed("left", 0.75, 0.75, 0.75, 0.95)
inMoov.setArmSpeed("right", 0.95, 0.95, 0.95, 0.85)
inMoov.moveHead(12,80)
inMoov.moveArm("left",71,51,37,15)
inMoov.moveArm("right",63,50,45,15)
inMoov.moveHand("left",50,28,30,10,10,0)
inMoov.moveHand("right",77,85,75,72,81,22)
sleep(4)
def take():
inMoov.setHandSpeed("left", 0.75, 0.75, 0.75, 0.75, 0.75, 0.75)
inMoov.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0)
inMoov.moveHead(10,74)
inMoov.moveArm("left",71,51,37,15)
inMoov.moveArm("right",63,50,45,15)
inMoov.moveHand("left",50,28,30,10,10,0)
inMoov.moveHand("right",60,85,75,72,81,22)
sleep(2)
def takefinal1():
inMoov.setHandSpeed("right", 0.65, 0.65, 0.65, 0.65, 0.65, 0.65)
inMoov.moveHead(5,74)
inMoov.moveArm("left",71,51,37,15)
inMoov.moveArm("right",63,50,45,15)
inMoov.moveHand("left",50,28,30,10,10,0)
inMoov.moveHand("right",20,75,74,72,81,22)
sleep(1)
def takefinal2():
inMoov.setHandSpeed("left", 0.75, 0.65, 0.65, 0.65, 0.65, 0.65)
inMoov.setHandSpeed("right", 0.75, 0.65, 0.65, 0.65, 0.65, 0.65)
inMoov.moveHead(10,74)
inMoov.moveArm("left",68,51,37,15)
inMoov.moveArm("right",63,50,45,15)
inMoov.moveHand("left",155,110,118,10,10,0)
inMoov.moveHand("right",20,64,72,72,81,22)
sleep(4)
def takefinal3():
inMoov.setHandSpeed("left", 0.75, 0.65, 0.65, 0.65, 0.65, 0.65)
inMoov.setHandSpeed("right", 0.65, 0.65, 0.65, 0.65, 0.65, 0.65)
inMoov.moveHead(10,74)
inMoov.moveArm("left",68,51,37,15)
inMoov.moveArm("right",63,50,45,15)
inMoov.moveHand("left",170,110,118,10,10,0)
inMoov.moveHand("right",20,30,40,30,30,22)
sleep(3)
def takefinal4():
inMoov.setHandSpeed("left", 1.0, 0.65, 0.65, 0.65, 0.65, 0.65)
inMoov.setHandSpeed("right", 0.75, 0.75, 0.75, 0.75, 0.75, 0.75)
inMoov.setArmSpeed("right", 0.75, 0.85, 0.95, 0.85)
inMoov.moveHead(10,74)
inMoov.moveArm("left",71,51,37,15)
inMoov.setArmSpeed("right", 0.65, 0.65, 0.75, 0.85)
inMoov.moveArm("right",0,82,33,15)
inMoov.moveHand("left",140,125,125,34,34,0)
inMoov.moveHand("right",20,20,40,30,30,20)
sleep(2)
def davinciarm1():
inMoov.setHandSpeed("left", 1.0, 1.0, 1.0, 1.0, 1.0, 0.65)
inMoov.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 0.65)
inMoov.setArmSpeed("left", 0.75, 0.75, 0.75, 0.75)
inMoov.setArmSpeed("right", 0.75, 0.75, 0.75, 0.75)
inMoov.setHeadSpeed( 0.75, 0.75)
inMoov.moveHead(80,90)
inMoov.moveArm("left",0,118,13,74)
inMoov.moveArm("right",0,118,29,74)
inMoov.moveHand("left",50,28,30,10,10,47)
inMoov.moveHand("right",10,10,10,10,10,137)
sleep(4)
def lookatthing2():
inMoov.setHeadSpeed(0.65, 0.75)
inMoov.moveHead(73,74)
inMoov.moveArm("left",70,64,83,15)
inMoov.moveArm("right",0,82,33,15)
inMoov.moveHand("left",147,130,140,34,34,164)
inMoov.moveHand("right",20,40,40,30,30,10)
sleep(2)
def putdown1():
inMoov.moveHead(0,99)
inMoov.moveArm("left",1,45,57,31)
inMoov.moveArm("right",0,82,33,15)
inMoov.moveHand("left",147,130,135,34,34,35)
inMoov.moveHand("right",20,40,40,30,30,22)
sleep(2)
def putdown2():
inMoov.moveHead(0,99)
inMoov.moveArm("left",1,45,53,31)
inMoov.moveArm("right",0,82,33,15)
sleep(3)
inMoov.moveHand("left",147,61,67,34,34,35)
inMoov.moveHand("right",20,40,40,30,30,22)
inMoov.broadcastState()
sleep(2)
def pointfinger():
inMoov.moveHead(90,90)
inMoov.moveArm("left",0,84,16,15)
inMoov.moveArm("right",26,73,88,15)
inMoov.moveHand("left",50,28,30,10,10,90)
inMoov.moveHand("right",10,10,142,156,148,180)
def scared():
inMoov.moveHead(90,90)
inMoov.moveArm("left",90,40,24,15)
inMoov.moveArm("right",90,40,139,10)
inMoov.moveHand("left",68,85,56,27,26,52)
inMoov.moveHand("right",10,10,20,34,19,156)
def ballet():
inMoov.moveHead(90,90)
inMoov.moveArm("left",0,40,95,29)
inMoov.moveArm("right",50,40,164,10)
inMoov.moveHand("left",68,0,56,27,26,52)
inMoov.moveHand("right",10,10,20,34,19,156)
def surrender():
inMoov.moveHead(90,90)
inMoov.moveArm("left",90,139,15,80)
inMoov.moveArm("right",90,145,37,80)
inMoov.moveHand("left",50,28,30,10,10,76)
inMoov.moveHand("right",10,10,10,10,10,139)
def surrender2():
inMoov.moveHead(90,112)
inMoov.moveArm("left",90,139,48,80)
inMoov.moveArm("right",90,145,77,80)
inMoov.moveHand("left",50,28,30,10,10,76)
inMoov.moveHand("right",10,10,10,10,10,139)
def what():
inMoov.moveHead(38,90)
inMoov.moveArm("left",0,140,0,15)
inMoov.moveArm("right",0,140,2,15)
inMoov.moveHand("left",50,28,30,10,10,158)
inMoov.moveHand("right",10,10,10,10,10,90)
def welcome():
inMoov.moveHead(38,90)
inMoov.moveArm("left",0,140,0,49)
inMoov.moveArm("right",0,140,2,40)
inMoov.moveHand("left",50,28,30,10,10,158)
inMoov.moveHand("right",10,10,10,10,10,90)
def protectface():
inMoov.moveHead(90,90)
inMoov.moveArm("left",90,64,128,43)
inMoov.moveArm("right",0,73,29,15)
inMoov.moveHand("left",50,28,30,10,10,90)
inMoov.moveHand("right",10,10,10,10,10,90) | {
"repo_name": "mecax/pyrobotlab",
"path": "toSort/InMoov.py",
"copies": "1",
"size": "9473",
"license": "apache-2.0",
"hash": -2442934497061536300,
"line_mean": 14.8635578584,
"line_max": 257,
"alpha_frac": 0.6151166473,
"autogenerated": false,
"ratio": 2.1041759218125278,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3219292569112528,
"avg_score": null,
"num_lines": null
} |
"""A Lark Transformer for transforming a Lark parse tree into a Python dict"""
import re
import sys
from typing import List, Dict, Any
from lark import Transformer, Discard
HEREDOC_PATTERN = re.compile(r'<<([a-zA-Z][a-zA-Z0-9._-]+)\n((.|\n)*?)\n\s*\1', re.S)
HEREDOC_TRIM_PATTERN = re.compile(r'<<-([a-zA-Z][a-zA-Z0-9._-]+)\n((.|\n)*?)\n\s*\1', re.S)
# pylint: disable=missing-docstring,unused-argument
class DictTransformer(Transformer):
def float_lit(self, args: List) -> float:
return float("".join([str(arg) for arg in args]))
def int_lit(self, args: List) -> int:
return int("".join([str(arg) for arg in args]))
def expr_term(self, args: List) -> Any:
args = self.strip_new_line_tokens(args)
#
if args[0] == "true":
return True
if args[0] == "false":
return False
if args[0] == "null":
return None
# if the expression starts with a paren then unwrap it
if args[0] == "(":
return args[1]
# otherwise return the value itself
return args[0]
def index_expr_term(self, args: List) -> str:
args = self.strip_new_line_tokens(args)
return "%s%s" % (str(args[0]), str(args[1]))
def index(self, args: List) -> str:
args = self.strip_new_line_tokens(args)
return "[%s]" % (str(args[0]))
def get_attr_expr_term(self, args: List) -> str:
return "%s.%s" % (str(args[0]), str(args[1]))
def attr_splat_expr_term(self, args: List) -> str:
return "%s.*.%s" % (args[0], args[1])
def tuple(self, args: List) -> List:
return [self.to_string_dollar(arg) for arg in self.strip_new_line_tokens(args)]
def object_elem(self, args: List) -> Dict:
# This returns a dict with a single key/value pair to make it easier to merge these
# into a bigger dict that is returned by the "object" function
key = self.strip_quotes(args[0])
value = self.to_string_dollar(args[1])
return {
key: value
}
def object(self, args: List) -> Dict:
args = self.strip_new_line_tokens(args)
result: Dict[str, Any] = {}
for arg in args:
result.update(arg)
return result
def function_call(self, args: List) -> str:
args = self.strip_new_line_tokens(args)
args_str = ''
if len(args) > 1:
args_str = ",".join([str(arg) for arg in args[1]])
return "%s(%s)" % (str(args[0]), args_str)
def arguments(self, args: List) -> List:
return args
def new_line_and_or_comma(self, args: List) -> Discard:
return Discard()
def block(self, args: List) -> Dict:
args = self.strip_new_line_tokens(args)
# if the last token is a string instead of an object then the block is empty
# such as 'foo "bar" "baz" {}'
# in that case append an empty object
if isinstance(args[-1], str):
args.append({})
result: Dict[str, Any] = {}
current_level = result
for arg in args[0:-2]:
current_level[self.strip_quotes(arg)] = {}
current_level = current_level[self.strip_quotes(arg)]
current_level[self.strip_quotes(args[-2])] = args[-1]
return result
def one_line_block(self, args: List) -> Dict:
return self.block(args)
def attribute(self, args: List) -> Dict:
key = str(args[0])
if key.startswith('"') and key.endswith('"'):
key = key[1:-1]
value = self.to_string_dollar(args[1])
return {
key: value
}
def conditional(self, args: List) -> str:
args = self.strip_new_line_tokens(args)
return "%s ? %s : %s" % (args[0], args[1], args[2])
def binary_op(self, args: List) -> str:
return " ".join([str(arg) for arg in args])
def unary_op(self, args: List) -> str:
return "".join([str(arg) for arg in args])
def binary_term(self, args: List) -> str:
args = self.strip_new_line_tokens(args)
return " ".join([str(arg) for arg in args])
def body(self, args: List) -> Dict[str, List]:
# A body can have multiple attributes with the same name
# For example multiple Statement attributes in a IAM resource body
# So This returns a dict of attribute names to lists
# The attribute values will always be lists even if they aren't repeated
# and only contain a single entry
args = self.strip_new_line_tokens(args)
result: Dict[str, Any] = {}
for arg in args:
for key, value in arg.items():
key = str(key)
if key not in result:
result[key] = [value]
else:
if isinstance(result[key], list):
if isinstance(value, list):
result[key].extend(value)
else:
result[key].append(value)
else:
result[key] = [result[key], value]
return result
def start(self, args: List) -> Dict:
args = self.strip_new_line_tokens(args)
return args[0]
def binary_operator(self, args: List) -> str:
return str(args[0])
def heredoc_template(self, args: List) -> str:
match = HEREDOC_PATTERN.match(str(args[0]))
if not match:
raise RuntimeError("Invalid Heredoc token: %s" % args[0])
return '"%s"' % match.group(2)
def heredoc_template_trim(self, args: List) -> str:
# See https://github.com/hashicorp/hcl2/blob/master/hcl/hclsyntax/spec.md#template-expressions
# This is a special version of heredocs that are declared with "<<-"
# This will calculate the minimum number of leading spaces in each line of a heredoc
# and then remove that number of spaces from each line
match = HEREDOC_TRIM_PATTERN.match(str(args[0]))
if not match:
raise RuntimeError("Invalid Heredoc token: %s" % args[0])
text = match.group(2)
lines = text.split('\n')
# calculate the min number of leading spaces in each line
min_spaces = sys.maxsize
for line in lines:
leading_spaces = len(line) - len(line.lstrip(' '))
min_spaces = min(min_spaces, leading_spaces)
# trim off that number of leading spaces from each line
lines = [line[min_spaces:] for line in lines]
return '"%s"' % '\n'.join(lines)
def new_line_or_comment(self, args: List) -> Discard:
return Discard()
def for_tuple_expr(self, args: List) -> str:
args = self.strip_new_line_tokens(args)
for_expr = " ".join([str(arg) for arg in args[1:-1]])
return '[%s]' % for_expr
def for_intro(self, args: List) -> str:
args = self.strip_new_line_tokens(args)
return " ".join([str(arg) for arg in args])
def for_cond(self, args: List) -> str:
args = self.strip_new_line_tokens(args)
return " ".join([str(arg) for arg in args])
def for_object_expr(self, args: List) -> str:
args = self.strip_new_line_tokens(args)
for_expr = " ".join([str(arg) for arg in args[1:-1]])
return '{%s}' % for_expr
def strip_new_line_tokens(self, args: List) -> List:
"""
Remove new line and Discard tokens.
The parser will sometimes include these in the tree so we need to strip them out here
"""
return [arg for arg in args if arg != "\n" and not isinstance(arg, Discard)]
def to_string_dollar(self, value: Any) -> Any:
"""Wrap a string in ${ and }"""
if isinstance(value, str):
if value.startswith('"') and value.endswith('"'):
return str(value)[1:-1]
return '${%s}' % value
return value
def strip_quotes(self, value: Any) -> Any:
"""Remove quote characters from the start and end of a string"""
if isinstance(value, str):
if value.startswith('"') and value.endswith('"'):
return str(value)[1:-1]
return value
def identifier(self, value: Any) -> Any:
# Making identifier a token by capitalizing it to IDENTIFIER
# seems to return a token object instead of the str
# So treat it like a regular rule
# In this case we just convert the whole thing to a string
return str(value[0])
| {
"repo_name": "amplify-education/python-hcl2",
"path": "hcl2/transformer.py",
"copies": "1",
"size": "8543",
"license": "mit",
"hash": -3373394633407241000,
"line_mean": 35.0464135021,
"line_max": 102,
"alpha_frac": 0.5644387218,
"autogenerated": false,
"ratio": 3.6384156729131174,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47028543947131174,
"avg_score": null,
"num_lines": null
} |
# alarm_clock.py
# Description: A simple Python program to make the computer act
# like an alarm clock. Start it running from the command line
# with a command line argument specifying the duration in minutes
# after which to sound the alarm. It will sleep for that long,
# and then beep a few times. Use a duration of 0 to test the
# alarm immediiately, e.g. for checking that the volume is okay.
# Author: Vasudev Ram - http://www.dancingbison.com
import sys
import string
from time import sleep
sa = sys.argv
lsa = len(sys.argv)
if lsa != 2:
print "Usage: [ python ] alarm_clock.py duration_in_minutes"
print "Example: [ python ] alarm_clock.py 10"
print "Use a value of 0 minutes for testing the alarm immediately."
print "Beeps a few times after the duration is over."
print "Press Ctrl-C to terminate the alarm clock early."
sys.exit(1)
try:
minutes = int(sa[1])
except ValueError:
print "Invalid numeric value (%s) for minutes" % sa[1]
print "Should be an integer >= 0"
sys.exit(1)
if minutes < 0:
print "Invalid value for minutes, should be >= 0"
sys.exit(1)
seconds = minutes * 60
if minutes == 1:
unit_word = " minute"
else:
unit_word = " minutes"
try:
if minutes > 0:
print "Sleeping for " + str(minutes) + unit_word
sleep(seconds)
print "Wake up"
for i in range(5):
print chr(7),
sleep(1)
except KeyboardInterrupt:
print "Interrupted by user"
sys.exit(1)
# EOF
| {
"repo_name": "ActiveState/code",
"path": "recipes/Python/579117_Simple_commandline_alarm_clock/recipe-579117.py",
"copies": "1",
"size": "1496",
"license": "mit",
"hash": -486850354214936200,
"line_mean": 25.7142857143,
"line_max": 71,
"alpha_frac": 0.6691176471,
"autogenerated": false,
"ratio": 3.415525114155251,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4584642761255251,
"avg_score": null,
"num_lines": null
} |
# Copyright Peter Hinch
# V0.4 10th October 2016 Now uses machine.reset_cause()
# Flashes leds at 30 second intervals courtesy of two concurrent timers
# (each times out at one minute intervals).
# Note that the Pyboard flashes the green LED briefly on waking from standby.
import stm, pyb, upower, machine
red, green, yellow = (pyb.LED(x) for x in range(1, 4)) # LED(3) is blue, not yellow, on D series
rtc = pyb.RTC()
rtc.wakeup(None) # If we have a backup battery clear down any setting from a previously running program
reason = machine.reset_cause() # Why have we woken?
if reason == machine.PWRON_RESET or reason == machine.HARD_RESET: # first boot
rtc.datetime((2020, 8, 6, 4, 13, 0, 0, 0)) # Code to run on 1st boot only
aa = upower.Alarm('a')
aa.timeset(second = 39)
ab = upower.Alarm('b')
ab.timeset(second = 9)
red.on()
elif reason == machine.DEEPSLEEP_RESET:
reason = upower.why()
if reason == 'ALARM_A':
green.on()
elif reason == 'ALARM_B':
yellow.on()
upower.lpdelay(1000) # Let LED's be seen!
pyb.standby()
| {
"repo_name": "peterhinch/micropython-micropower",
"path": "alarm.py",
"copies": "1",
"size": "1171",
"license": "mit",
"hash": -5261503305971661000,
"line_mean": 39.3793103448,
"line_max": 103,
"alpha_frac": 0.6643894108,
"autogenerated": false,
"ratio": 3.1061007957559683,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4270490206555968,
"avg_score": null,
"num_lines": null
} |
"""Alarm Script."""
import sys
import time
from . import config as cg
from . import all_off, fade, lcd
from .context import IO
# FIXME: Re-implement as a thread with an interrupt Exception
###########################
# Configuration:
###########################
# Electronic Pin Numbering Globals:
off_button = cg.get_pin('Input_Pins', 'off_button')
pin_buzzer = cg.get_pin('Haptics', 'pin_buzzer')
pin_shaker = cg.get_pin('Haptics', 'pin_shaker')
pin_blue = cg.get_pin('RGB_Strip', 'pin_blue')
pin_red = cg.get_pin('RGB_Strip', 'pin_red')
pin_green = cg.get_pin('RGB_Strip', 'pin_green')
# # TODO: Add second LED Strip
# pin_blue2 = cg.get_pin('RGB_Strip', 'pin_blue2')
# pin_red2 = cg.get_pin('RGB_Strip', 'pin_red2')
# pin_green2 = cg.get_pin('RGB_Strip', 'pin_green2')
# Allow shorter run time for testing with ANY argument
if len(sys.argv) > 1:
# arg = cg.parse_argv(sys)
alarm_stage_time = [0, 5, 10, 15]
else:
alarm_stage_time = [30, 180, 80, 60]
step_size = 0.2
alarm_on = True
_running = False
cg.quiet_logging(False)
# Settings for fade_led_strip()
max_brightness = 0.6
last_beep, fade_stage = 0, 0
fade_stages = [pin_green, pin_red, pin_blue,
pin_green, pin_red, pin_blue]
a_s_t = alarm_stage_time[3]
l_f_s = len(fade_stages)
if a_s_t < l_f_s:
raise ValueError('a_s_t({}) not > len({})'.format(a_s_t, l_f_s))
time_total = a_s_t / l_f_s
###########################
# Functions and Stuff
###########################
def alarm_deactivate(pin_num):
"""Button callback on rising edge."""
global alarm_on
if IO.input(pin_num):
cg.send('Deactivating Alarm on {}'.format(IO.input(pin_num)))
alarm_on = False
def gen_button_cb(pin_num):
"""For testing the callback function."""
if IO.input(pin_num):
cg.send('Triggered on a rising edge from pin: {}'.format(pin_num))
else:
cg.send('Triggered on a falling edge from pin: {}'.format(pin_num))
def beep(counter):
"""Cycle through different low frequencies."""
global last_beep
if counter % 2 <= 1 and last_beep == 0:
cg.set_pwm(pin_buzzer, 0.2)
last_beep = 0.2
elif counter % 2 > 1 and last_beep == 0.2:
cg.set_pwm(pin_buzzer, 0.0)
last_beep = 0
def fade_led_strip(counter):
"""Cycle the LED Strip through various colors."""
global fade_stage
if time_total < 0.1:
time_step = 1
else:
time_step = (counter % time_total) + 1.0
# Increment the LED value
if fade_stage % 2 == 0:
value = 1 - (1 / time_step)
# Decrement the LED value
elif fade_stage % 2 == 1:
value = 1 / time_step
# Update the Alarm Electronics
if fade_stage < len(fade_stages):
# cg.set_pwm(pin_buzzer, ((counter % 2) + 1.0) / 4)
cg.set_pwm(fade_stages[fade_stage], max_brightness * value)
if time_step == time_total:
fade_stage += 1
else:
# cg.set_pwm(pin_buzzer, 0.5)
fade.all_on(max_brightness)
###########################
# Alarm logic!
###########################
def stop():
"""Halt execution."""
global _running
_running = False
cg.send('\nAlarm Cycles Finished\n')
cg.ifttt('PiAlarm_SendText', {'value1': 'PiAlarm Completed'})
# Cleanup tasks:
all_off.deactivate()
IO.remove_event_detect(off_button)
#
# IO.cleanup() # Removed to avoid interference with clock
#
# release_pwm(pin_shaker)
# etc...
# # Then stop pi-blaster for good measure:
# stopPiB = "sudo kill $(ps aux | grep [b]laster | awk '{print $2}')"
# subprocess.call(stopPiB, shell=True)
def start(user_home):
"""Start alarm sequence."""
global fade_stage, _running, alarm_on
_running = True
stage, stage3_rep_counter = 1, 0
cg.send('Set IO mode and event detection')
IO.setwarnings(False)
IO.setmode(IO.BCM)
IO.setup(off_button, IO.IN)
IO.add_event_detect(off_button, IO.RISING, callback=alarm_deactivate, bouncetime=300)
while stage < 4 and stage3_rep_counter < 3 and user_home:
all_off.deactivate()
cg.send('\nStarting Stage: {}'.format(stage) +
' for {} seconds'.format(alarm_stage_time[stage]))
current_time = 0
# Stage 1 - Green LED Strip for 1 minute
if stage == 1 and alarm_on:
cg.send('Configuring Stage 1')
cg.set_pwm(pin_green, 0.2)
cg.set_pwm(pin_red, 0.2)
cb = False
# Stage 2 - Purple LED Strip and Buzzer
if stage == 2 and alarm_on:
cg.send('Configuring Stage 2')
cg.set_pwm(pin_blue, 0.5)
cg.set_pwm(pin_red, 0.5)
cg.set_pwm(pin_buzzer, 0.1)
cb = beep
# Stage 3 - LED Strip, Bed Shaker, and Buzzer
if stage == 3 and alarm_on:
cg.send('Configuring Stage 3')
cg.set_pwm(pin_shaker, 1)
cg.set_pwm(pin_buzzer, 0.5)
cb = fade_led_strip
# Run alarm and check for button interrupt:
while alarm_on and current_time < alarm_stage_time[stage]:
time.sleep(step_size)
current_time += step_size
if cb:
cb(current_time)
cg.send('Completed Step #{0}'.format(stage))
# Prep for the next loop:
if stage == 3 and alarm_on:
all_off.deactivate()
cg.send('\nLooping back through Stage 3')
time.sleep(7)
fade_stage = 0
stage3_rep_counter += 1
else:
stage += 1
current_time = 0
user_home = cg.check_status()
cg.send('Checking home (= {}) before next loop'.format(user_home))
stop()
def run():
"""Check state and start alarm if ready."""
global _running, alarm_on
user_home = cg.check_status()
alarm_on = True
if _running:
_err = 'ERROR: ALARM IS ALREADY RUNNING!'
cg.send(_err)
cg.ifttt('PiAlarm_SendText', {'value1': _err})
elif user_home:
lcd.brightness('alt')
cg.ifttt('PiAlarm_SendText', {'value1': '** PiAlarm Started! **'})
time.sleep(alarm_stage_time[0]) # let text alert go out
start(cg.check_status())
else:
cg.ifttt('PiAlarm_SendText', {'value1': 'User away, no PiAlarm'})
if __name__ == '__main__':
run()
| {
"repo_name": "KyleKing/PiAlarm",
"path": ".archive-python/modules/alarm.py",
"copies": "1",
"size": "6336",
"license": "mit",
"hash": 722681017223502800,
"line_mean": 28.4697674419,
"line_max": 89,
"alpha_frac": 0.5678661616,
"autogenerated": false,
"ratio": 3.1459781529294935,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42138443145294935,
"avg_score": null,
"num_lines": null
} |
## Alarm Server
## Supporting Envisalink 2DS/3
##
## This code is under the terms of the GPL v3 license.
import ctypes
c_uint16 = ctypes.c_uint16
class IconLED_Bitfield( ctypes.LittleEndianStructure ):
_fields_ = [
("alarm", c_uint16, 1 ),
("alarm_in_memory", c_uint16, 1 ),
("armed_away", c_uint16, 1 ),
("ac_present", c_uint16, 1 ),
("bypass", c_uint16, 1 ),
("chime", c_uint16, 1 ),
("not_used1", c_uint16, 1 ),
("armed_zero_entry_delay", c_uint16, 1 ),
("alarm_fire_zone", c_uint16, 1 ),
("system_trouble", c_uint16, 1 ),
("not_used2", c_uint16, 1 ),
("not_used3", c_uint16, 1 ),
("ready", c_uint16, 1 ),
("fire", c_uint16, 1 ),
("low_battery", c_uint16, 1 ),
("armed_stay", c_uint16, 1 )
]
class IconLED_Flags( ctypes.Union ):
_fields_ = [
("b", IconLED_Bitfield ),
("asShort", c_uint16 )
]
_anonymous_ = ("b")
evl_Commands = {
'KeepAlive' : '00',
'ChangeDefaultPartition' : '01',
'DumpZoneTimers' : '02',
'PartitionKeypress' : '03'
}
evl_PanicTypes = {
'Fire' : 'A',
'Ambulance' : 'B',
'Police' : 'C'
}
evl_ResponseTypes = {
'Login:' : {'name' : 'Login Prompt', 'description' : 'Sent During Session Login Only.', 'handler' : 'login'},
'OK' : {'name' : 'Login Success', 'description' : 'Send During Session Login Only, successful login', 'handler' : 'login_success'},
'FAILED' : {'name' : 'Login Failure', 'description' : 'Sent During Session Login Only, password not accepted', 'handler' : 'login_failure'},
'Timed Out!' : {'name' : 'Login Interaction Timed Out', 'description' : 'Sent during Session Login Only, socket connection is then closed', 'handler' : 'login_timeout'},
'%00' : {'name' : 'Virtual Keypad Update', 'description' : 'The panel wants to update the state of the keypad','handler' : 'keypad_update'},
'%01' : {'type' : 'zone', 'name' : 'Zone State Change', 'description' : 'A zone change-of-state has occurred', 'handler' : 'zone_state_change'},
'%02' : {'type' : 'partition', 'name' : 'Partition State Change', 'description' : 'A partition change-of-state has occured', 'handler' : 'partition_state_change'},
'%03' : {'type' : 'system', 'name' : 'Realtime CID Event', 'description' : 'A system event has happened that is signaled to either the Envisalerts servers or the central monitoring station', 'handler' : 'realtime_cid_event'},
'%FF' : {'name' : 'Envisalink Zone Timer Dump', 'description' : 'This command contains the raw zone timers used inside the Envisalink. The dump is a 256 character packed HEX string representing 64 UINT16 (little endian) zone timers. Zone timers count down from 0xFFFF (zone is open) to 0x0000 (zone is closed too long ago to remember). Each ''tick'' of the zone time is actually 5 seconds so a zone timer of 0xFFFE means ''5 seconds ago''. Remember, the zone timers are LITTLE ENDIAN so the above example would be transmitted as FEFF.', 'handler' : 'zone_timer_dump'},
'^00' : {'type' : 'envisalink', 'name': 'Poll', 'description' : 'Envisalink poll', 'handler' : 'poll_response'},
'^01' : {'type' : 'envisalink', 'name': 'Change Default Partition', 'description': 'Change the partition which keystrokes are sent to when using the virtual keypad.', 'handler' : 'command_response'},
'^02' : {'type' : 'envisalink', 'name': 'Dump Zone Timers', 'description' : 'This command contains the raw zone timers used inside the Envisalink. The dump is a 256 character packed HEX string representing 64 UINT16 (little endian) zone timers. Zone timers count down from 0xFFFF (zone is open) to 0x0000 (zone is closed too long ago to remember). Each ''tick'' of the zone time is actually 5 seconds so a zone timer of 0xFFFE means ''5 seconds ago''. Remember, the zone timers are LITTLE ENDIAN so the above example would be transmitted as FEFF.','handler' : 'command_response'},
'^03' : {'type' : 'envisalink', 'name': 'Keypress to Specific Partition', 'description' : 'This will send a keystroke to the panel from an arbitrary partition. Use this if you don''t want to change the TPI default partition.' ,'handler' : 'command_response'},
'^0C' : {'type' : 'envisalink', 'name': 'Response for Invalid Command', 'description' : 'This response is returned when an invalid command number is passed to Envisalink', 'handler': 'command_response'}
}
evl_TPI_Response_Codes = {
'00' : 'Command Accepted',
'01' : 'Receive Buffer Overrun (a command is received while another is still being processed)',
'02' : 'Unknown Command',
'03' : 'Syntax Error. Data appended to the command is incorrect in some fashion',
'04' : 'Receive Buffer Overflow',
'05' : 'Receive State Machine Timeout (command not completed within 3 seconds)'
}
evl_Partition_Status_Codes = {
'00' : {'name' : 'NOT_USED', 'description' : 'Partition is not used or doesn''t exist'},
'01' : {'name' : 'READY', 'description' : 'Ready', 'pluginhandler' : 'disarmed'},
'02' : {'name' : 'READY_BYPASS', 'description' : 'Ready to Arm (Zones are Bypasses)', 'pluginhandler' : 'disarmed'},
'03' : {'name' : 'NOT_READY', 'description' : 'Not Ready', 'pluginhandler' : 'disarmed'},
'04' : {'name' : 'ARMED_STAY', 'description' : 'Armed in Stay Mode', 'pluginhandler' : 'armedHome'},
'05' : {'name' : 'ARMED_AWAY', 'description' : 'Armed in Away Mode', 'pluginhandler' : 'armedAway'},
'06' : {'name' : 'ARMED_MAX', 'description' : 'Armed in Away Mode', 'pluginhandler' : 'armedInstant'},
'07' : {'name' : 'EXIT_ENTRY_DELAY', 'description' : 'Entry or Exit Delay'},
'08' : {'name' : 'IN_ALARM', 'description' : 'Partition is in Alarm', 'pluginhandler' : 'alarmTriggered'},
'09' : {'name' : 'ALARM_IN_MEMORY', 'description' : 'Alarm Has Occurred (Alarm in Memory)', 'pluginhandler' : 'alarmCleared'}
}
evl_Virtual_Keypad_How_To_Beep = {
'00' : 'off',
'01' : 'beep 1 time',
'02' : 'beep 2 times',
'03' : 'beep 3 times',
'04' : 'continous fast beep',
'05' : 'continuous slow beep'
}
evl_CID_Qualifiers = {
1 : 'New Event or Opening',
3 : 'New Restore or Closing',
6 : 'Previously Reported Condition Still Present'
}
evl_CID_Events = {
100 : {
"label" : "Medical Alert",
"type" : "zone",
},
101 : {
"label" : "Personal Emergency",
"type" : "zone",
},
102 : {
"label" : "Failure to Report In",
"type" : "zone",
},
110 : {
"label" : "Fire Alarm",
"type" : "zone",
},
111 : {
"label" : "Smoke Alarm",
"type" : "zone",
},
112 : {
"label" : "Combustion Detected Alarm",
"type" : "zone",
},
113 : {
"label" : "Water Flood Alarm",
"type" : "zone",
},
114 : {
"label" : "Excessive Heat Alarm",
"type" : "zone",
},
115 : {
"label" : "Fire Alarm Pulled",
"type" : "zone",
},
116 : {
"label" : "Duct Alarm",
"type" : "zone",
},
117 : {
"label" : "Flame Detected",
"type" : "zone",
},
118 : {
"label" : "Near Alarm",
"type" : "zone",
},
120 : {
"label" : "Panic Alarm",
"type" : "zone",
},
121 : {
"label" : "Duress Alarm",
"type" : "user",
},
122 : {
"label" : "Alarm, 24-hour Silent",
"type" : "zone",
},
123 : {
"label" : "Alarm, 24-hour Audible",
"type" : "zone",
},
124 : {
"label" : "Duress - Access granted",
"type" : "zone",
},
125 : {
"label" : "Duress - Egress granted",
"type" : "zone",
},
130 : {
"label" : "Burgalry in Progress",
"type" : "zone",
},
131 : {
"label" : "Alarm, Perimeter",
"type" : "zone",
},
132 : {
"label" : "Alarm, Interior",
"type" : "zone",
},
133 : {
"label" : "24 Hour (Safe)",
"type" : "zone",
},
134 : {
"label" : "Alarm, Entry/Exit",
"type" : "zone",
},
135 : {
"label" : "Alarm, Day/Night",
"type" : "zone",
},
136 : {
"label" : "Alarm, Outdoor",
"type" : "zone",
},
137 : {
"label" : "Alarm, Tamper",
"type" : "zone",
},
138 : {
"label" : "Near Alarm",
"type" : "zone",
},
139 : {
"label" : "Intrusion Verifier",
"type" : "zone",
},
140 : {
"label" : "Alarm, General Alarm",
"type" : "zone",
},
141 : {
"label" : "Alarm, Polling Loop Open",
"type" : "zone",
},
142 : {
"label" : "Alarm, Polling Loop Short",
"type" : "zone",
},
143 : {
"label" : "Alarm, Expansion Module",
"type" : "zone",
},
144 : {
"label" : "Alarm, Sensor Tamper",
"type" : "zone",
},
145 : {
"label" : "Alarm, Expansion Module Tamper",
"type" : "zone",
},
146 : {
"label" : "Silent Burglary",
"type" : "zone",
},
147 : {
"label" : "Sensor Supervision failure",
"type" : "zone",
},
150 : {
"label" : "Alarm, 24-Hour Auxiliary",
"type" : "zone",
},
151 : {
"label" : "Alarm, Gas detected",
"type" : "zone",
},
152 : {
"label" : "Alarm, Refrigeration",
"type" : "zone",
},
153 : {
"label" : "Alarm, Loss of heat",
"type" : "zone",
},
154 : {
"label" : "Alarm, Water leakage",
"type" : "zone",
},
155 : {
"label" : "Alarm, foil break",
"type" : "zone",
},
156 : {
"label" : "Day trouble",
"type" : "zone",
},
157 : {
"label" : "Low bottled gas level",
"type" : "zone",
},
158 : {
"label" : "Alarm, High temperature",
"type" : "zone",
},
159 : {
"label" : "Alarm, Low temperature",
"type" : "zone",
},
161 : {
"label" : "Alarm, Loss of air flow",
"type" : "zone",
},
162 : {
"label" : "Alarm, Carbon Monoxide Detected",
"type" : "zone",
},
163 : {
"label" : "Alarm, Tank Level",
"type" : "zone",
},
300 : {
"label" : "System Trouble",
"type" : "zone",
},
301 : {
"label" : "AC Power",
"type" : "zone",
},
302 : {
"label" : "Low System Battery/Battery Test Fail",
"type" : "zone",
},
303 : {
"label" : "RAM Checksum Bad",
"type" : "zone",
},
304 : {
"label" : "ROM Checksum Bad",
"type" : "zone",
},
305 : {
"label" : "System Reset",
"type" : "zone",
},
306 : {
"label" : "Panel programming changed",
"type" : "zone",
},
307 : {
"label" : "Self-test failure",
"type" : "zone",
},
308 : {
"label" : "System shutdown",
"type" : "zone",
},
309 : {
"label" : "Battery test failure",
"type" : "zone",
},
310 : {
"label" : "Ground fault",
"type" : "zone",
},
311 : {
"label" : "Battery Missing/Dead",
"type" : "zone",
},
312 : {
"label" : "Power Supply Overcurrent",
"type" : "zone",
},
313 : {
"label" : "Engineer Reset",
"type" : "user",
},
321 : {
"label" : "Bell/Siren Trouble",
"type" : "zone",
},
333 : {
"label" : "Trouble or Tamper Expansion Module",
"type" : "zone",
},
341 : {
"label" : "Trouble, ECP Cover Tamper",
"type" : "zone",
},
344 : {
"label" : "RF Receiver Jam",
"type" : "zone",
},
351 : {
"label" : "Telco Line Fault",
"type" : "zone",
},
353 : {
"label" : "Long Range Radio Trouble",
"type" : "zone",
},
373 : {
"label" : "Fire Loop Trouble",
"type" : "zone",
},
374 : {
"label" : "Exit Error Alarm",
"type" : "zone",
},
380 : {
"label" : "Global Trouble, Trouble Day/Night",
"type" : "zone",
},
381 : {
"label" : "RF Supervision Trouble",
"type" : "zone",
},
382 : {
"label" : "Supervision Auxillary Wire Zone",
"type" : "zone",
},
383 : {
"label" : "RF Sensor Tamper",
"type" : "zone",
},
384 : {
"label" : "RF Sensor Low Battery",
"type" : "zone",
},
393 : {
"label" : "Clean Me",
"type" : "zone",
},
401 : {
"label" : "AWAY/MAX",
"type" : "user",
},
403 : {
"label" : "Scheduled Arming",
"type" : "user",
},
406 : {
"label" : "Cancel by User",
"type" : "user",
},
407 : {
"label" : "Remote Arm/Disarm (Downloading)",
"type" : "user",
},
408 : {
"label" : "Quick AWAY/MAX",
"type" : "user",
},
409 : {
"label" : "AWAY/MAX Keyswitch",
"type" : "user",
},
411 : {
"label" : "Callback Requested",
"type" : "user",
},
412 : {
"label" : "Success-Download/Access",
"type" : "user",
},
413 : {
"label" : "Unsuccessful Access",
"type" : "user",
},
414 : {
"label" : "System Shutdown",
"type" : "user",
},
415 : {
"label" : "Dialer Shutdown",
"type" : "user",
},
416 : {
"label" : "Successful Upload",
"type" : "user",
},
421 : {
"label" : "Access Denied",
"type" : "user",
},
422 : {
"label" : "Access Granted",
"type" : "user",
},
423 : {
"label" : "PANIC Forced Access",
"type" : "zone",
},
424 : {
"label" : "Egress Denied",
"type" : "user",
},
425 : {
"label" : "Egress Granted",
"type" : "user",
},
426 : {
"label" : "Access Door Propped Open",
"type" : "zone",
},
427 : {
"label" : "Access Point DSM Trouble",
"type" : "zone",
},
428 : {
"label" : "Access Point RTE Trouble",
"type" : "zone",
},
429 : {
"label" : "Access Program Mode Entry",
"type" : "user",
},
430 : {
"label" : "Access Program Mode Exit",
"type" : "user",
},
431 : {
"label" : "Access Threat Level Change",
"type" : "user",
},
432 : {
"label" : "Access Relay/Triger Failure",
"type" : "zone",
},
433 : {
"label" : "Access RTE Shunt",
"type" : "zone",
},
434 : {
"label" : "Access DSM Shunt",
"type" : "zone",
},
441 : {
"label" : "STAY/INSTANT",
"type" : "user",
},
442 : {
"label" : "STAY/INSTANT Keyswitch",
"type" : "user",
},
570 : {
"label" : "Zone Bypass",
"type" : "zone",
},
574 : {
"label" : "Group Bypass",
"type" : "user"
},
601 : {
"label" : "Operator Initiated Dialer Test",
"type" : "user",
},
602 : {
"label" : "Periodic Test",
"type" : "zone",
},
606 : {
"label" : "AAV to follow",
"type" : "zone",
},
607 : {
"label" : "Walk Test",
"type" : "user",
},
623 : {
"label" : "Event Log 80% Full",
"type" : "zone",
},
625 : {
"label" : "Real-Time Clock Changed",
"type" : "user",
},
627 : {
"label" : "Program Mode Entry",
"type" : "zone",
},
628 : {
"label" : "Program Mode Exit",
"type" : "zone",
},
629 : {
"label" : "1-1/3 Day No Event",
"type" : "zone",
},
642 : {
"label" : "Latch Key",
"type" : "user",
},
}
| {
"repo_name": "jnimmo/pyenvisalink",
"path": "pyenvisalink/honeywell_envisalinkdefs.py",
"copies": "1",
"size": "15602",
"license": "mit",
"hash": -1215106233768105200,
"line_mean": 26.1339130435,
"line_max": 584,
"alpha_frac": 0.4976926035,
"autogenerated": false,
"ratio": 3.0207163601161664,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.40184089636161663,
"avg_score": null,
"num_lines": null
} |
## Alarm Server
## Supporting Envisalink 2DS/3
##
## This code is under the terms of the GPL v3 license.
evl_Commands = {
'KeepAlive' : '000',
'StatusReport' : '001',
'DumpZoneTimers' : '008',
'PartitionKeypress' : '071',
'Disarm' : '040',
'ArmStay' : '031',
'ArmAway' : '030',
'ArmMax' : '032',
'Login' : '005',
'Panic' : '060',
'SendCode' : '200',
'CommandOutput' : '020',
'SetTime' : '010'
}
evl_PanicTypes = {
'Fire' : '1',
'Ambulance' : '2',
'Police' : '3'
}
evl_ArmModes = {
'0' : {'name' : 'Arm Away', 'status':{'armed_away': True, 'armed_zero_entry_delay': False, 'alpha':'Arm Away', 'exit_delay':False, 'entry_delay': False }},
'1' : {'name' : 'Arm Stay', 'status':{'armed_stay': True, 'armed_zero_entry_delay': False, 'alpha':'Arm Stay', 'exit_delay':False, 'entry_delay': False }},
'2' : {'name' : 'Arm Zero Entry Away', 'status':{'armed_away': True, 'armed_zero_entry_delay': True, 'alpha':'Arm Zero Entry Away', 'exit_delay':False, 'entry_delay': False }},
'3' : {'name' : 'Arm Zero Entry Stay', 'status':{'armed_stay': True, 'armed_zero_entry_delay': True, 'alpha':'Arm Zero Entry Stay', 'exit_delay':False, 'entry_delay': False }}
}
evl_ResponseTypes = {
'505' : {'name':'Login Prompt', 'handler':'login'},
'615' : {'name':'Envisalink Zone Timer Dump', 'handler':'zone_timer_dump'},
'500' : {'name':'Poll', 'handler':'poll_response'},
'501' : {'name':'Checksum', 'handler':'command_response_error'},
'900' : {'name':'EnterCode', 'handler':'send_code'},
'912' : {'name':'PGMEnterCode', 'handler':'send_code'},
#ZONE UPDATES
'601' : {'name':'Zone Alarm', 'handler':'zone_state_change', 'status':{'alarm' : True}},
'602' : {'name':'Zone Alarm Restore', 'handler':'zone_state_change', 'status':{'alarm' : False}},
'603' : {'name':'Zone Tamper', 'handler':'zone_state_change', 'status':{'tamper' : True}},
'604' : {'name':'Zone Tamper Restore', 'handler':'zone_state_change', 'status':{'tamper' : False}},
'605' : {'name':'Zone Fault', 'handler':'zone_state_change', 'status':{'fault' : True}},
'606' : {'name':'Zone Fault Restore', 'handler':'zone_state_change', 'status':{'fault' : False}},
'609' : {'name':'Zone Open', 'handler':'zone_state_change', 'status':{'open' : True}},
'610' : {'name':'Zone Restored', 'handler':'zone_state_change', 'status':{'open' : False}},
#PARTITION UPDATES
'650' : {'name':'Ready', 'handler':'partition_state_change', 'status':{'ready' : True, 'alpha' : 'Ready'}},
'651' : {'name':'Not Ready', 'handler':'partition_state_change', 'status':{'ready' : False, 'alpha' : 'Not Ready'}},
'652' : {'name':'Armed', 'handler':'partition_state_change'},
'653' : {'name':'Ready - Force Arming Enabled', 'handler':'partition_state_change', 'status':{'ready': True, 'alpha' : 'Ready - Force Arm'}},
'654' : {'name':'Alarm', 'handler':'partition_state_change', 'status':{'alarm' : True, 'alpha' : 'Alarm'}},
'655' : {'name':'Disarmed', 'handler':'partition_state_change', 'status' : {'alarm' : False, 'armed_stay' : False, 'armed_zero_entry_delay': False, 'armed_away' : False, 'exit_delay' : False, 'entry_delay' : False, 'alpha' : 'Disarmed'}},
'656' : {'name':'Exit Delay in Progress', 'handler':'partition_state_change', 'status':{'exit_delay' : True, 'alpha' : 'Exit Delay In Progress'}},
'657' : {'name':'Entry Delay in Progress', 'handler':'partition_state_change', 'status':{'entry_delay' : True, 'alpha' : 'Entry Delay in Progress'}},
'663' : {'name':'ChimeOn', 'handler':'partition_state_change', 'status': {'chime': True}},
'664' : {'name':'ChimeOff', 'handler':'partition_state_change', 'status': {'chime': False}},
'673' : {'name':'Busy', 'handler':'partition_state_change', 'status': {'alpha': 'Busy'}},
'700' : {'name':'Armed by user', 'handler':'partition_state_change', 'status':{}},
'750' : {'name':'Disarmed by user', 'handler':'partition_state_change', 'status' : {'alarm' : False, 'armed_stay' : False, 'armed_away' : False, 'armed_zero_entry_delay': False, 'exit_delay' : False, 'entry_delay' : False, 'alpha' : 'Disarmed'}},
'751' : {'name':'Disarmed special', 'handler':'partition_state_change', 'status' : {'alarm' : False, 'armed_stay' : False, 'armed_away' : False, 'armed_zero_entry_delay': False, 'exit_delay' : False, 'entry_delay' : False, 'alpha' : 'Disarmed'}},
'840' : {'name':'Trouble LED', 'handler':'partition_state_change', 'status':{'trouble' : True}},
'841' : {'name':'Trouble Clear', 'handler':'partition_state_change', 'status':{'trouble' : False, 'ac_present': True}},
#GENERAL UPDATES
'621' : {'name':'FireAlarmButton', 'handler':'keypad_update', 'status':{'fire' : True, 'alarm': True, 'alpha' : 'Fire Alarm'}},
'622' : {'name':'FireAlarmButtonOff', 'handler':'keypad_update', 'status':{'fire' : False, 'alarm': False, 'alpha' : 'Fire Alarm Cleared'}},
'623' : {'name':'AuxAlarmButton', 'handler':'keypad_update', 'status':{'alarm': True, 'alpha' : 'Aux Alarm'}},
'624' : {'name':'AuxAlarmButtonOff', 'handler':'keypad_update', 'status':{'alarm': False, 'alpha' : 'Aux Alarm Cleared'}},
'625' : {'name':'PanicAlarmButton', 'handler':'keypad_update', 'status':{'alarm': True, 'alpha' : 'Panic Alarm'}},
'626' : {'name':'PanicAlarmButtonOff', 'handler':'keypad_update', 'status':{'alarm': False, 'alpha' : 'Panic Alarm Cleared'}},
'631' : {'name':'SmokeAlarmButton', 'handler':'keypad_update', 'status':{'alarm': True, 'alpha' : 'Smoke Alarm'}},
'632' : {'name':'SmokeAlarmButtonOff', 'handler':'keypad_update', 'status':{'alarm': False, 'alpha' : 'Smoke Alarm Cleared'}},
'800' : {'name':'LowBatTrouble', 'handler':'keypad_update', 'status':{'bat_trouble': True, 'alpha' : 'Low Battery'}},
'801' : {'name':'LowBatTroubleOff', 'handler':'keypad_update', 'status':{'bat_trouble': False, 'alpha' : 'Low Battery Cleared'}},
'802' : {'name':'ACTrouble', 'handler':'keypad_update', 'status':{'ac_present': False, 'alpha' : 'AC Power Lost'}},
'803' : {'name':'ACTroubleOff', 'handler':'keypad_update', 'status':{'ac_present': True, 'alpha' : 'AC Power Restored'}},
'829' : {'name':'SystemTamper', 'handler':'keypad_update', 'status':{'alpha' : 'System tamper'}},
'830' : {'name':'SystemTamperOff', 'handler':'keypad_update', 'status':{'alpha' : 'System tamper Restored'}},
'849' : {'name':'TroubleVerbose', 'handler':'keypad_update', 'status':None}
}
evl_verboseTrouble = {
0 : 'Service is Required',
1 : 'AC Power Lost',
2 : 'Telephone Line Fault',
3 : 'Failure to communicate',
4 : 'Zone/Sensor Fault',
5 : 'Zone/Sensor Tamper',
6 : 'Zone/Sensor Low Battery',
7 : 'Loss of time'
}
| {
"repo_name": "Cinntax/pyenvisalink",
"path": "pyenvisalink/dsc_envisalinkdefs.py",
"copies": "1",
"size": "6748",
"license": "mit",
"hash": 3367477178545005000,
"line_mean": 67.1616161616,
"line_max": 250,
"alpha_frac": 0.5997332543,
"autogenerated": false,
"ratio": 3.0192393736017897,
"config_test": false,
"has_no_keywords": true,
"few_assignments": false,
"quality_score": 0.8917752731576963,
"avg_score": 0.040243979264965285,
"num_lines": 99
} |
a = [{"latitudes": ["29.77119", "29.77879", "29.78162", "29.78177", "29.77881"],
"longitudes": ["-96.03579", "-96.0045", "-95.9906", "-95.98236", "-95.96749"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 66, "segmentTravelTime": "4 minutes 49 seconds", "segmentDist": "4.20",
"segmentFacilityType": "ML", "segmentOriginId": "1300", "segmentDestId": "1301",
"segmentLocation": "IH-10 Katy Eastbound from Brazos River to FM-1489"},
{"latitudes": ["29.77881", "29.77894", "29.77887", "29.77717"],
"longitudes": ["-95.96749", "-95.95133", "-95.93635", "-95.92054"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 65,
"segmentTravelTime": "3 minutes 36 seconds", "segmentDist": "2.80", "segmentFacilityType": "ML",
"segmentOriginId": "1301", "segmentDestId": "1302",
"segmentLocation": "IH-10 Katy Eastbound from FM-1489 to Woods"},
{"latitudes": ["29.77717", "29.77715", "29.77727"], "longitudes": ["-95.92054", "-95.90125", "-95.88114"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 67, "segmentTravelTime": "2 minutes 9 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "1302", "segmentDestId": "1303",
"segmentLocation": "IH-10 Katy Eastbound from Woods to Pederson"},
{"latitudes": ["29.77727", "29.7773", "29.77745", "29.77743"],
"longitudes": ["-95.88114", "-95.85382", "-95.83199", "-95.8217"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 69,
"segmentTravelTime": "3 minutes 14 seconds", "segmentDist": "3.70", "segmentFacilityType": "ML",
"segmentOriginId": "1303", "segmentDestId": "1304",
"segmentLocation": "IH-10 Katy Eastbound from Pederson to Pin Oak"}, {
"latitudes": ["29.77743", "29.77819", "29.77827", "29.78168", "29.78252", "29.78473", "29.7855", "29.78548",
"29.78537"],
"longitudes": ["-95.8217", "-95.81623", "-95.81595", "-95.8116", "-95.81063", "-95.80758", "-95.80486",
"-95.80179", "-95.77735"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00500", "0.00500", "0.00500", "0.00400", "0.00277", "0.00277",
"0.00277"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100"], "segmentDirection": "EB", "segmentSpeed": 63,
"segmentTravelTime": "2 minutes 59 seconds", "segmentDist": "2.10", "segmentFacilityType": "ML",
"segmentOriginId": "411", "segmentDestId": "412",
"segmentLocation": "IH-10 Katy Eastbound from Pin Oak to Grand Parkway"},
{"latitudes": ["29.78537", "29.7852", "29.78527"], "longitudes": ["-95.77735", "-95.75188", "-95.73574"],
"latitudeOffsets": ["0.00277", "0.00277", "0.00277"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 72, "segmentTravelTime": "3 minutes 40 seconds", "segmentDist": "3.20",
"segmentFacilityType": "ML", "segmentOriginId": "412", "segmentDestId": "413",
"segmentLocation": "IH-10 Katy Eastbound from Grand Parkway to Westgreen"},
{"latitudes": ["29.78527", "29.78483", "29.78513"], "longitudes": ["-95.73574", "-95.71894", "-95.706"],
"latitudeOffsets": ["0.00277", "0.00277", "0.00277"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 66, "segmentTravelTime": "2 minutes 37 seconds", "segmentDist": "1.80",
"segmentFacilityType": "ML", "segmentOriginId": "413", "segmentDestId": "368",
"segmentLocation": "IH-10 Katy Eastbound from Westgreen to Greenhouse"},
{"latitudes": ["29.78513", "29.78511", "29.78463", "29.78463"],
"longitudes": ["-95.706", "-95.70199", "-95.68851", "-95.64481"],
"latitudeOffsets": ["0.00277", "0.00277", "0.00277", "0.00600"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 62,
"segmentTravelTime": "3 minutes 47 seconds", "segmentDist": "2.90", "segmentFacilityType": "ML",
"segmentOriginId": "368", "segmentDestId": "427",
"segmentLocation": "IH-10 Katy Eastbound from Greenhouse to SH-6"},
{"latitudes": ["29.78463", "29.78463", "29.78463"], "longitudes": ["-95.64481", "-95.63756", "-95.61775"],
"latitudeOffsets": ["0.00600", "0.00600", "0.00800"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 61, "segmentTravelTime": "2 minutes 9 seconds", "segmentDist": "2.20",
"segmentFacilityType": "ML", "segmentOriginId": "427", "segmentDestId": "465",
"segmentLocation": "IH-10 Katy Eastbound from SH-6 to Eldridge"},
{"latitudes": ["29.78463", "29.78456", "29.78450"], "longitudes": ["-95.61775", "-95.60650", "-95.59025"],
"latitudeOffsets": ["0.00800", "0.00800", "0.00800"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 61, "segmentTravelTime": "1 minute 10 seconds", "segmentDist": "1.20",
"segmentFacilityType": "ML", "segmentOriginId": "465", "segmentDestId": "443",
"segmentLocation": "IH-10 Katy Eastbound from Eldridge to Kirkwood"},
{"latitudes": ["29.78450", "29.78425", "29.78431"], "longitudes": ["-95.59025", "-95.57562", "-95.56294"],
"latitudeOffsets": ["0.00800", "0.00800", "0.00800"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 65, "segmentTravelTime": "2 minutes 44 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "443", "segmentDestId": "466",
"segmentLocation": "IH-10 Katy Eastbound from Kirkwood to Beltway 8-West"},
{"latitudes": ["29.78431", "29.78413", "29.78400", "29.78406"],
"longitudes": ["-95.56294", "-95.54400", "-95.53181", "-95.52125"],
"latitudeOffsets": ["0.00800", "0.00800", "0.00800", "0.00800"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 61,
"segmentTravelTime": "2 minutes 14 seconds", "segmentDist": "2.30", "segmentFacilityType": "ML",
"segmentOriginId": "466", "segmentDestId": "444",
"segmentLocation": "IH-10 Katy Eastbound from Beltway 8-West to Blalock"},
{"latitudes": ["29.78406", "29.78413", "29.78381"], "longitudes": ["-95.52125", "-95.51389", "-95.49875"],
"latitudeOffsets": ["0.00800", "0.00800", "0.00800"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 66, "segmentTravelTime": "2 minutes 32 seconds", "segmentDist": "1.70",
"segmentFacilityType": "ML", "segmentOriginId": "444", "segmentDestId": "440",
"segmentLocation": "IH-10 Katy Eastbound from Blalock to Bingle/Voss"},
{"latitudes": ["29.78381", "29.78369", "29.78375", "29.78400", "29.78356", "29.78236", "29.78044"],
"longitudes": ["-95.49875", "-95.48487", "-95.47556", "-95.46800", "-95.45943", "-95.45675", "-95.45369"],
"latitudeOffsets": ["0.00800", "0.00800", "0.00800", "0.00800", "0.00800", "0.00800", "0.00600"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 73, "segmentTravelTime": "2 minutes 57 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "440", "segmentDestId": "426",
"segmentLocation": "IH-10 Katy Eastbound from Bingle/Voss to IH-610 West Loop"},
{"latitudes": ["29.78044", "29.77892", "29.77900", "29.77750"],
"longitudes": ["-95.45369", "-95.45009", "-95.44365", "-95.42905"],
"latitudeOffsets": ["0.00600", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 56,
"segmentTravelTime": "2 minutes 29 seconds", "segmentDist": "2.30", "segmentFacilityType": "ML",
"segmentOriginId": "1310", "segmentDestId": "1311",
"segmentLocation": "IH-10 Katy Eastbound from IH-610 West Loop to Washington"},
{"latitudes": ["29.77750", "29.77750", "29.77750"], "longitudes": ["-95.42905", "-95.41810", "-95.41015"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 66, "segmentTravelTime": "1 minute 27 seconds", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "1311", "segmentDestId": "1312",
"segmentLocation": "IH-10 Katy Eastbound from Washington to Shepherd/Durham"}, {
"latitudes": ["29.77750", "29.77652", "29.77665", "29.77775", "29.77881", "29.77906", "29.77878", "29.77819",
"29.77737", "29.7762", "29.77379", "29.77023"],
"longitudes": ["-95.41015", "-95.40102", "-95.38726", "-95.38118", "-95.37832", "-95.37643", "-95.37439",
"-95.37243", "-95.37024", "-95.36872", "-95.36733", "-95.36509"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300",
"0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 56,
"segmentTravelTime": "2 minutes 21 seconds", "segmentDist": "2.20", "segmentFacilityType": "ML",
"segmentOriginId": "1312", "segmentDestId": "1313",
"segmentLocation": "IH-10 Katy Eastbound from Shepherd/Durham to IH-45 North"}, {
"latitudes": ["29.77023", "29.77379", "29.7762", "29.77737", "29.77819", "29.77878", "29.77906", "29.77881",
"29.77775", "29.77665", "29.77652", "29.77750"],
"longitudes": ["-95.36509", "-95.36733", "-95.36872", "-95.37024", "-95.37243", "-95.37439", "-95.37643",
"-95.37832", "-95.38118", "-95.38726", "-95.40102", "-95.41015"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300",
"0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 57,
"segmentTravelTime": "2 minutes 18 seconds", "segmentDist": "2.20", "segmentFacilityType": "ML",
"segmentOriginId": "1314", "segmentDestId": "1315",
"segmentLocation": "IH-10 Katy Westbound from IH-45 North to Shepherd/Durham"},
{"latitudes": ["29.77750", "29.77750", "29.77750"], "longitudes": ["-95.41015", "-95.41810", "-95.42905"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 61, "segmentTravelTime": "2 minutes 35 seconds", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "1315", "segmentDestId": "1316",
"segmentLocation": "IH-10 Katy Westbound from Shepherd/Durham to Washington"},
{"latitudes": ["29.77750", "29.77900", "29.77892", "29.78044"],
"longitudes": ["-95.42905", "-95.44365", "-95.45009", "-95.45369"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00600"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 61,
"segmentTravelTime": "2 minutes 16 seconds", "segmentDist": "2.30", "segmentFacilityType": "ML",
"segmentOriginId": "1316", "segmentDestId": "1317",
"segmentLocation": "IH-10 Katy Westbound from Washington to IH-610 West Loop"},
{"latitudes": ["29.78044", "29.78236", "29.78356", "29.78400", "29.78375"],
"longitudes": ["-95.45369", "-95.45675", "-95.45943", "-95.46800", "-95.47556"],
"latitudeOffsets": ["0.00600", "0.00800", "0.00800", "0.00800", "0.00800"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 66, "segmentTravelTime": "1 minute 0 seconds", "segmentDist": "1.10", "segmentFacilityType": "ML",
"segmentOriginId": "425", "segmentDestId": "441",
"segmentLocation": "IH-10 Katy Westbound from IH-610 West Loop to Antoine"},
{"latitudes": ["29.78375", "29.78369", "29.78381", "29.78413", "29.78406"],
"longitudes": ["-95.47556", "-95.48487", "-95.49875", "-95.51389", "-95.52125"],
"latitudeOffsets": ["0.00800", "0.00800", "0.00800", "0.00800", "0.00800"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 66, "segmentTravelTime": "3 minutes 42 seconds", "segmentDist": "3", "segmentFacilityType": "ML",
"segmentOriginId": "441", "segmentDestId": "445",
"segmentLocation": "IH-10 Katy Westbound from Antoine to Blalock"},
{"latitudes": ["29.78406", "29.78400", "29.78413", "29.78431"],
"longitudes": ["-95.52125", "-95.53181", "-95.54400", "-95.56294"],
"latitudeOffsets": ["0.00800", "0.00800", "0.00800", "0.00800"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 66,
"segmentTravelTime": "2 minutes 4 seconds", "segmentDist": "2.30", "segmentFacilityType": "ML",
"segmentOriginId": "445", "segmentDestId": "467",
"segmentLocation": "IH-10 Katy Westbound from Blalock to Beltway 8-West"},
{"latitudes": ["29.78431", "29.78425", "29.78450"], "longitudes": ["-95.56294", "-95.57562", "-95.59025"],
"latitudeOffsets": ["0.00800", "0.00800", "0.00800"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 68, "segmentTravelTime": "2 minutes 45 seconds", "segmentDist": "2",
"segmentFacilityType": "ML", "segmentOriginId": "467", "segmentDestId": "442",
"segmentLocation": "IH-10 Katy Westbound from Beltway 8-West to Kirkwood"},
{"latitudes": ["29.78450", "29.78456", "29.78463", "29.78463", "29.78463"],
"longitudes": ["-95.59025", "-95.60650", "-95.61775", "-95.63756", "-95.64481"],
"latitudeOffsets": ["0.00800", "0.00800", "0.00800", "0.00600", "0.00600"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 62, "segmentTravelTime": "3 minutes 17 seconds", "segmentDist": "3.40",
"segmentFacilityType": "ML", "segmentOriginId": "442", "segmentDestId": "396",
"segmentLocation": "IH-10 Katy Westbound from Kirkwood to SH-6"},
{"latitudes": ["29.78463", "29.78463", "29.78511", "29.78513"],
"longitudes": ["-95.64481", "-95.68851", "-95.70199", "-95.706"],
"latitudeOffsets": ["0.00600", "0.00277", "0.00277", "0.00277"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 55,
"segmentTravelTime": "3 minutes 9 seconds", "segmentDist": "2.90", "segmentFacilityType": "ML",
"segmentOriginId": "396", "segmentDestId": "369",
"segmentLocation": "IH-10 Katy Westbound from SH-6 to Greenhouse"},
{"latitudes": ["29.78513", "29.78483", "29.78527"], "longitudes": ["-95.706", "-95.71894", "-95.73574"],
"latitudeOffsets": ["0.00277", "0.00277", "0.00277"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 80, "segmentTravelTime": "1 minute 20 seconds", "segmentDist": "1.80",
"segmentFacilityType": "ML", "segmentOriginId": "369", "segmentDestId": "414",
"segmentLocation": "IH-10 Katy Westbound from Greenhouse to Westgreen"},
{"latitudes": ["29.78527", "29.7852"], "longitudes": ["-95.73574", "-95.75188"],
"latitudeOffsets": ["0.00277", "0.00277"], "longitudeOffsets": ["0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 64, "segmentTravelTime": "2 minutes 51 seconds", "segmentDist": "2", "segmentFacilityType": "ML",
"segmentOriginId": "414", "segmentDestId": "415",
"segmentLocation": "IH-10 Katy Westbound from Westgreen to Mason"}, {
"latitudes": ["29.7852", "29.78537", "29.78548", "29.7855", "29.78473", "29.78252", "29.78168", "29.77827",
"29.77819", "29.77743"],
"longitudes": ["-95.75188", "-95.77735", "-95.80179", "-95.80486", "-95.80758", "-95.81063", "-95.8116",
"-95.81595", "-95.81623", "-95.8217"],
"latitudeOffsets": ["0.00277", "0.00277", "0.00277", "0.00277", "0.00400", "0.00500", "0.00500", "0.00500",
"0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 64,
"segmentTravelTime": "3 minutes 3 seconds", "segmentDist": "3.30", "segmentFacilityType": "ML",
"segmentOriginId": "415", "segmentDestId": "416",
"segmentLocation": "IH-10 Katy Westbound from Mason to Pin Oak"},
{"latitudes": ["29.77743", "29.77745", "29.7773", "29.77727"],
"longitudes": ["-95.8217", "-95.83199", "-95.85382", "-95.88114"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 65,
"segmentTravelTime": "3 minutes 24 seconds", "segmentDist": "3.70", "segmentFacilityType": "ML",
"segmentOriginId": "1305", "segmentDestId": "1306",
"segmentLocation": "IH-10 Katy Westbound from Pin Oak to Pederson"},
{"latitudes": ["29.77727", "29.77715", "29.77717"], "longitudes": ["-95.88114", "-95.90125", "-95.92054"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 67, "segmentTravelTime": "2 minutes 9 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "1306", "segmentDestId": "1307",
"segmentLocation": "IH-10 Katy Westbound from Pederson to Woods"},
{"latitudes": ["29.77717", "29.77887", "29.77894", "29.77881"],
"longitudes": ["-95.92054", "-95.93635", "-95.95133", "-95.96749"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 64,
"segmentTravelTime": "3 minutes 37 seconds", "segmentDist": "2.80", "segmentFacilityType": "ML",
"segmentOriginId": "1307", "segmentDestId": "1308",
"segmentLocation": "IH-10 Katy Westbound from Woods to FM-1489"},
{"latitudes": ["29.77881", "29.78177", "29.78162", "29.77879", "29.77119"],
"longitudes": ["-95.96749", "-95.98236", "-95.9906", "-96.0045", "-96.03579"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 70, "segmentTravelTime": "4 minutes 36 seconds", "segmentDist": "4.20",
"segmentFacilityType": "ML", "segmentOriginId": "1308", "segmentDestId": "1309",
"segmentLocation": "IH-10 Katy Westbound from FM-1489 to Brazos River"},
{"latitudes": ["29.78463", "29.78463", "29.78463", "29.78456", "29.78450"],
"longitudes": ["-95.64481", "-95.63756", "-95.61775", "-95.60650", "-95.59025"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "3.10", "segmentFacilityType": "ML",
"segmentOriginId": "449", "segmentDestId": "451",
"segmentLocation": "IH-10 Katy Managed Lanes Eastbound from SH-6 to Kirkwood"},
{"latitudes": ["29.78450", "29.78425", "29.78431"], "longitudes": ["-95.59025", "-95.57562", "-95.56294"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "451", "segmentDestId": "468",
"segmentLocation": "IH-10 Katy Managed Lanes Eastbound from Kirkwood to Beltway 8-West"},
{"latitudes": ["29.78431", "29.78413", "29.78400", "29.78406"],
"longitudes": ["-95.56294", "-95.54400", "-95.53181", "-95.52125"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2.30", "segmentFacilityType": "ML",
"segmentOriginId": "468", "segmentDestId": "453",
"segmentLocation": "IH-10 Katy Managed Lanes Eastbound from Beltway 8-West to Blalock"},
{"latitudes": ["29.78406", "29.78413", "29.78381"], "longitudes": ["-95.52125", "-95.51389", "-95.49875"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.70",
"segmentFacilityType": "ML", "segmentOriginId": "453", "segmentDestId": "454",
"segmentLocation": "IH-10 Katy Managed Lanes Eastbound from Blalock to Bingle/Voss"},
{"latitudes": ["29.78381", "29.78369", "29.78375", "29.78400", "29.78356", "29.78236", "29.78044"],
"longitudes": ["-95.49875", "-95.48487", "-95.47556", "-95.46800", "-95.45943", "-95.45675", "-95.45369"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "454", "segmentDestId": "426",
"segmentLocation": "IH-10 Katy Managed Lanes Eastbound from Bingle/Voss to IH-610 West Loop"},
{"latitudes": ["29.78044", "29.78236", "29.78356", "29.78400", "29.78375"],
"longitudes": ["-95.45369", "-95.45675", "-95.45943", "-95.46800", "-95.47556"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.10", "segmentFacilityType": "ML",
"segmentOriginId": "460", "segmentDestId": "455",
"segmentLocation": "IH-10 Katy Managed Lanes Westbound from IH-610 West Loop to Antoine"},
{"latitudes": ["29.78375", "29.78369", "29.78381", "29.78413", "29.78406"],
"longitudes": ["-95.47556", "-95.48487", "-95.49875", "-95.51389", "-95.52125"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "3", "segmentFacilityType": "ML",
"segmentOriginId": "455", "segmentDestId": "456",
"segmentLocation": "IH-10 Katy Managed Lanes Westbound from Antoine to Blalock"},
{"latitudes": ["29.78406", "29.78400", "29.78413", "29.78431"],
"longitudes": ["-95.52125", "-95.53181", "-95.54400", "-95.56294"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2.30", "segmentFacilityType": "ML",
"segmentOriginId": "456", "segmentDestId": "469",
"segmentLocation": "IH-10 Katy Managed Lanes Westbound from Blalock to Beltway 8-West"},
{"latitudes": ["29.78431", "29.78425", "29.78450"], "longitudes": ["-95.56294", "-95.57562", "-95.59025"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2",
"segmentFacilityType": "ML", "segmentOriginId": "469", "segmentDestId": "458",
"segmentLocation": "IH-10 Katy Managed Lanes Westbound from Beltway 8-West to Kirkwood"},
{"latitudes": ["29.78450", "29.78456", "29.78463", "29.78463", "29.78463"],
"longitudes": ["-95.59025", "-95.60650", "-95.61775", "-95.63756", "-95.64481"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "3.10", "segmentFacilityType": "ML",
"segmentOriginId": "458", "segmentDestId": "459",
"segmentLocation": "IH-10 Katy Managed Lanes Westbound from Kirkwood to SH-6"},
{"latitudes": ["29.6539", "29.64937", "29.64955", "29.65842"],
"longitudes": ["-97.56604", "-97.48922", "-97.4859", "-97.4153"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 69,
"segmentTravelTime": "8 minutes 48 seconds", "segmentDist": "9", "segmentFacilityType": "ML",
"segmentOriginId": "1361", "segmentDestId": "1362",
"segmentLocation": "IH-10 West Eastbound from US-183 East of Luling to SH-304 West of Waelder"},
{"latitudes": ["29.65842", "29.66223", "29.66943"], "longitudes": ["-97.4153", "-97.38721", "-97.30127"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 71, "segmentTravelTime": "6 minutes 47 seconds", "segmentDist": "6.80",
"segmentFacilityType": "ML", "segmentOriginId": "1362", "segmentDestId": "1363",
"segmentLocation": "IH-10 West Eastbound from SH-304 West of Waelder to SH-97 in Waelder"},
{"latitudes": ["29.66943", "29.66998", "29.6708", "29.68516", "29.69206", "29.6925", "29.69229", "29.6919"],
"longitudes": ["-97.30127", "-97.29578", "-97.29327", "-97.26323", "-97.24317", "-97.23639", "-97.21053",
"-97.16437"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 66, "segmentTravelTime": "6 minutes 24 seconds", "segmentDist": "7",
"segmentFacilityType": "ML", "segmentOriginId": "1363", "segmentDestId": "1364",
"segmentLocation": "IH-10 West Eastbound from SH-97 in Waelder to Webb Ln West of Flatonia"},
{"latitudes": ["29.6919", "29.69232", "29.69484", "29.69645", "29.69665", "29.69793"],
"longitudes": ["-97.16437", "-97.16148", "-97.1508", "-97.14147", "-97.10865", "-97.05918"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 64, "segmentTravelTime": "7 minutes 14 seconds", "segmentDist": "7.70",
"segmentFacilityType": "ML", "segmentOriginId": "1364", "segmentDestId": "1365",
"segmentLocation": "IH-10 West Eastbound from Webb Ln West of Flatonia to N Knezek Rd East of Flatonia"}, {
"latitudes": ["29.69793", "29.69788", "29.69681", "29.69164", "29.69156", "29.69114", "29.68887", "29.68879",
"29.69011"],
"longitudes": ["-97.05918", "-97.05287", "-97.03792", "-97.0071", "-97.00323", "-96.97878", "-96.9576",
"-96.95552", "-96.93267"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100"], "segmentDirection": "EB", "segmentSpeed": 64,
"segmentTravelTime": "7 minutes 3 seconds", "segmentDist": "7.50", "segmentFacilityType": "ML",
"segmentOriginId": "1365", "segmentDestId": "1366",
"segmentLocation": "IH-10 West Eastbound from N Knezek Rd East of Flatonia to FM-2672 West of Schulenburg"},
{"latitudes": ["29.69011", "29.69097", "29.69348", "29.69601", "29.6965", "29.69628", "29.69589", "29.69428"],
"longitudes": ["-96.93267", "-96.91887", "-96.90268", "-96.88572", "-96.85665", "-96.85487", "-96.85328",
"-96.84902"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 67, "segmentTravelTime": "4 minutes 29 seconds", "segmentDist": "5",
"segmentFacilityType": "ML", "segmentOriginId": "1366", "segmentDestId": "1367",
"segmentLocation": "IH-10 West Eastbound from FM-2672 West of Schulenburg to US-90 East of Schulenburg"},
{"latitudes": ["29.69428", "29.68842", "29.68794", "29.68789", "29.69115"],
"longitudes": ["-96.84902", "-96.83412", "-96.83204", "-96.83037", "-96.78464"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "4", "segmentFacilityType": "ML",
"segmentOriginId": "1367", "segmentDestId": "1368",
"segmentLocation": "IH-10 West Eastbound from US-90 East of Schulenburg to County Rd 240 in Weimar"},
{"latitudes": ["29.69115", "29.69214", "29.69225", "29.68862"],
"longitudes": ["-96.78464", "-96.77051", "-96.73115", "-96.70435"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "4.70", "segmentFacilityType": "ML",
"segmentOriginId": "1368", "segmentDestId": "1369",
"segmentLocation": "IH-10 West Eastbound from County Rd 240 in Weimar to County Rd 210 East of Weimar"},
{"latitudes": ["29.68862", "29.68835", "29.68842", "29.69161"],
"longitudes": ["-96.70435", "-96.70174", "-96.69926", "-96.64834"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 68,
"segmentTravelTime": "3 minutes 1 second", "segmentDist": "3.40", "segmentFacilityType": "ML",
"segmentOriginId": "1369", "segmentDestId": "1370",
"segmentLocation": "IH-10 West Eastbound from County Rd 210 East of Weimar to Hatterman Ln"},
{"latitudes": ["29.69161", "29.69331", "29.69369"], "longitudes": ["-96.64834", "-96.61956", "-96.56243"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 61, "segmentTravelTime": "5 minutes 53 seconds", "segmentDist": "5",
"segmentFacilityType": "ML", "segmentOriginId": "1370", "segmentDestId": "1371",
"segmentLocation": "IH-10 West Eastbound from Hatterman Ln to FM-806 West of Columbus"},
{"latitudes": ["29.69369", "29.69361", "29.69192", "29.69188", "29.69202", "29.69263"],
"longitudes": ["-96.56243", "-96.55823", "-96.54442", "-96.54315", "-96.54176", "-96.53965"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 66, "segmentTravelTime": "1 minute 16 seconds", "segmentDist": "1.40",
"segmentFacilityType": "ML", "segmentOriginId": "1371", "segmentDestId": "1372",
"segmentLocation": "IH-10 West Eastbound from FM-806 West of Columbus to SH-71 in Columbus"}, {
"latitudes": ["29.69263", "29.69404", "29.69819", "29.69896", "29.70204", "29.70395", "29.70447", "29.70892",
"29.70968"],
"longitudes": ["-96.53965", "-96.53536", "-96.52958", "-96.52829", "-96.52018", "-96.51398", "-96.51268",
"-96.50553", "-96.50379"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100"], "segmentDirection": "EB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2.60", "segmentFacilityType": "ML",
"segmentOriginId": "1372", "segmentDestId": "1373",
"segmentLocation": "IH-10 West Eastbound from SH-71 in Columbus to US-90 East of Columbus"},
{"latitudes": ["29.70968", "29.71162", "29.7121", "29.71385", "29.7162"],
"longitudes": ["-96.50379", "-96.49607", "-96.49399", "-96.4823", "-96.46589"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.10", "segmentFacilityType": "ML",
"segmentOriginId": "1373", "segmentDestId": "1374",
"segmentLocation": "IH-10 West Eastbound from US-90 East of Columbus to FM-102 East of Columbus"},
{"latitudes": ["29.7162", "29.71709", "29.71868", "29.71902", "29.72434", "29.7293"],
"longitudes": ["-96.46589", "-96.45988", "-96.44378", "-96.44132", "-96.4151", "-96.39073"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 69, "segmentTravelTime": "4 minutes 54 seconds", "segmentDist": "4.50",
"segmentFacilityType": "ML", "segmentOriginId": "1374", "segmentDestId": "1375",
"segmentLocation": "IH-10 West Eastbound from FM-102 East of Columbus to Eagle Lake Rd"},
{"latitudes": ["29.7293", "29.74152"], "longitudes": ["-96.39073", "-96.33036"],
"latitudeOffsets": ["0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 70, "segmentTravelTime": "3 minutes 6 seconds", "segmentDist": "3.60",
"segmentFacilityType": "ML", "segmentOriginId": "1375", "segmentDestId": "1376",
"segmentLocation": "IH-10 West Eastbound from Eagle Lake Rd to FM-2761/Bernardo Rd"},
{"latitudes": ["29.74152", "29.7501", "29.75085", "29.75256"],
"longitudes": ["-96.33036", "-96.28802", "-96.28366", "-96.26944"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 70,
"segmentTravelTime": "3 minutes 9 seconds", "segmentDist": "3.70", "segmentFacilityType": "ML",
"segmentOriginId": "1376", "segmentDestId": "1377",
"segmentLocation": "IH-10 West Eastbound from FM-2761/Bernardo Rd to Beckendorff Rd"},
{"latitudes": ["29.75256", "29.75873", "29.76275"], "longitudes": ["-96.26944", "-96.22047", "-96.19693"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 66, "segmentTravelTime": "4 minutes 55 seconds", "segmentDist": "4.30",
"segmentFacilityType": "ML", "segmentOriginId": "1377", "segmentDestId": "1378",
"segmentLocation": "IH-10 West Eastbound from Beckendorff Rd to US-90 West of Sealy"}, {
"latitudes": ["29.76275", "29.76371", "29.7638", "29.76364", "29.76302", "29.76082", "29.75985", "29.75983",
"29.76023", "29.76449"],
"longitudes": ["-96.19693", "-96.19124", "-96.18965", "-96.1877", "-96.18526", "-96.17781", "-96.17318",
"-96.17044", "-96.16722", "-96.15287"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2.70", "segmentFacilityType": "ML",
"segmentOriginId": "1378", "segmentDestId": "1379",
"segmentLocation": "IH-10 West Eastbound from US-90 West of Sealy to SH-36 in Sealy"},
{"latitudes": ["29.76449", "29.76762", "29.7687", "29.77221", "29.77328", "29.77407", "29.77506"],
"longitudes": ["-96.15287", "-96.14264", "-96.14032", "-96.13577", "-96.13404", "-96.13208", "-96.12787"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1379", "segmentDestId": "1380",
"segmentLocation": "IH-10 West Eastbound from SH-36 in Sealy to US-90 East of Sealy"}, {
"latitudes": ["29.77506", "29.77561", "29.77614", "29.77637", "29.77631", "29.77471", "29.77388", "29.77327",
"29.77298", "29.77274"],
"longitudes": ["-96.12787", "-96.12343", "-96.10905", "-96.10015", "-96.09793", "-96.08514", "-96.08029",
"-96.07597", "-96.07374", "-96.07029"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 68,
"segmentTravelTime": "3 minutes 55 seconds", "segmentDist": "3.30", "segmentFacilityType": "ML",
"segmentOriginId": "1380", "segmentDestId": "1381",
"segmentLocation": "IH-10 West Eastbound from US-90 East of Sealy to Brazos River Weigh Station"},
{"latitudes": ["29.77274", "29.77208", "29.76979", "29.76979", "29.77119"],
"longitudes": ["-96.07029", "-96.06366", "-96.04508", "-96.04179", "-96.03579"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 67, "segmentTravelTime": "2 minutes 58 seconds", "segmentDist": "2.20",
"segmentFacilityType": "ML", "segmentOriginId": "1381", "segmentDestId": "1300",
"segmentLocation": "IH-10 West Eastbound from Brazos River Weigh Station to Brazos River"},
{"latitudes": ["29.77119", "29.76979", "29.76979", "29.77208", "29.77274"],
"longitudes": ["-96.03579", "-96.04179", "-96.04508", "-96.06366", "-96.07029"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 59, "segmentTravelTime": "2 minutes 14 seconds", "segmentDist": "2.20",
"segmentFacilityType": "ML", "segmentOriginId": "1309", "segmentDestId": "1340",
"segmentLocation": "IH-10 West Westbound from Brazos River to Brazos River Weigh Station"}, {
"latitudes": ["29.77274", "29.77298", "29.77327", "29.77388", "29.77471", "29.77631", "29.77637", "29.77614",
"29.77561", "29.77506"],
"longitudes": ["-96.07029", "-96.07374", "-96.07597", "-96.08029", "-96.08514", "-96.09793", "-96.10015",
"-96.10905", "-96.12343", "-96.12787"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 70,
"segmentTravelTime": "3 minutes 49 seconds", "segmentDist": "3.30", "segmentFacilityType": "ML",
"segmentOriginId": "1340", "segmentDestId": "1341",
"segmentLocation": "IH-10 West Westbound from Brazos River Weigh Station to US-90 East of Sealy"},
{"latitudes": ["29.77506", "29.77407", "29.77328", "29.77221", "29.7687", "29.76762", "29.76449"],
"longitudes": ["-96.12787", "-96.13208", "-96.13404", "-96.13577", "-96.14032", "-96.14264", "-96.15287"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1341", "segmentDestId": "1342",
"segmentLocation": "IH-10 West Westbound from US-90 East of Sealy to SH-36 in Sealy"}, {
"latitudes": ["29.76449", "29.76023", "29.75983", "29.75985", "29.76082", "29.76302", "29.76364", "29.7638",
"29.76371", "29.76275"],
"longitudes": ["-96.15287", "-96.16722", "-96.17044", "-96.17318", "-96.17781", "-96.18526", "-96.1877",
"-96.18965", "-96.19124", "-96.19693"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2.70", "segmentFacilityType": "ML",
"segmentOriginId": "1342", "segmentDestId": "1343",
"segmentLocation": "IH-10 West Westbound from SH-36 in Sealy to US-90 West of Sealy"},
{"latitudes": ["29.76275", "29.75873", "29.75256"], "longitudes": ["-96.19693", "-96.22047", "-96.26944"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 65, "segmentTravelTime": "4 minutes 57 seconds", "segmentDist": "4.30",
"segmentFacilityType": "ML", "segmentOriginId": "1343", "segmentDestId": "1344",
"segmentLocation": "IH-10 West Westbound from US-90 West of Sealy to Beckendorff Rd"},
{"latitudes": ["29.75256", "29.75085", "29.7501", "29.74152"],
"longitudes": ["-96.26944", "-96.28366", "-96.28802", "-96.33036"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 68,
"segmentTravelTime": "3 minutes 16 seconds", "segmentDist": "3.70", "segmentFacilityType": "ML",
"segmentOriginId": "1344", "segmentDestId": "1345",
"segmentLocation": "IH-10 West Westbound from Beckendorff Rd to FM-2761/Bernardo Rd"},
{"latitudes": ["29.74152", "29.7293"], "longitudes": ["-96.33036", "-96.39073"],
"latitudeOffsets": ["0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 61, "segmentTravelTime": "4 minutes 34 seconds", "segmentDist": "3.60",
"segmentFacilityType": "ML", "segmentOriginId": "1345", "segmentDestId": "1346",
"segmentLocation": "IH-10 West Westbound from FM-2761/Bernardo Rd to Eagle Lake Rd"},
{"latitudes": ["29.7293", "29.72434", "29.71902", "29.71868", "29.71709", "29.7162"],
"longitudes": ["-96.39073", "-96.4151", "-96.44132", "-96.44378", "-96.45988", "-96.46589"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 62, "segmentTravelTime": "4 minutes 21 seconds", "segmentDist": "4.50",
"segmentFacilityType": "ML", "segmentOriginId": "1346", "segmentDestId": "1347",
"segmentLocation": "IH-10 West Westbound from Eagle Lake Rd to FM-102 East of Columbus"},
{"latitudes": ["29.7162", "29.71385", "29.7121", "29.71162", "29.70968"],
"longitudes": ["-96.46589", "-96.4823", "-96.49399", "-96.49607", "-96.50379"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.10", "segmentFacilityType": "ML",
"segmentOriginId": "1347", "segmentDestId": "1348",
"segmentLocation": "IH-10 West Westbound from FM-102 East of Columbus to US-90 East of Columbus"}, {
"latitudes": ["29.70968", "29.70892", "29.70447", "29.70395", "29.70204", "29.69896", "29.69819", "29.69404",
"29.69263"],
"longitudes": ["-96.50379", "-96.50553", "-96.51268", "-96.51398", "-96.52018", "-96.52829", "-96.52958",
"-96.53536", "-96.53965"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100"], "segmentDirection": "WB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2.60", "segmentFacilityType": "ML",
"segmentOriginId": "1348", "segmentDestId": "1349",
"segmentLocation": "IH-10 West Westbound from US-90 East of Columbus to SH-71 in Columbus"},
{"latitudes": ["29.69263", "29.69202", "29.69188", "29.69192", "29.69361", "29.69369"],
"longitudes": ["-96.53965", "-96.54176", "-96.54315", "-96.54442", "-96.55823", "-96.56243"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 66, "segmentTravelTime": "1 minute 16 seconds", "segmentDist": "1.40",
"segmentFacilityType": "ML", "segmentOriginId": "1349", "segmentDestId": "1350",
"segmentLocation": "IH-10 West Westbound from SH-71 in Columbus to FM-806 West of Columbus"},
{"latitudes": ["29.69369", "29.69331", "29.69161"], "longitudes": ["-96.56243", "-96.61956", "-96.64834"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 68, "segmentTravelTime": "4 minutes 23 seconds", "segmentDist": "5",
"segmentFacilityType": "ML", "segmentOriginId": "1350", "segmentDestId": "1351",
"segmentLocation": "IH-10 West Westbound from FM-806 West of Columbus to Hatterman Ln"},
{"latitudes": ["29.69161", "29.68842", "29.68835", "29.68862"],
"longitudes": ["-96.64834", "-96.69926", "-96.70174", "-96.70435"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 67,
"segmentTravelTime": "3 minutes 2 seconds", "segmentDist": "3.40", "segmentFacilityType": "ML",
"segmentOriginId": "1351", "segmentDestId": "1352",
"segmentLocation": "IH-10 West Westbound from Hatterman Ln to County Rd 210 East of Weimar"},
{"latitudes": ["29.68862", "29.69225", "29.69214", "29.69115"],
"longitudes": ["-96.70435", "-96.73115", "-96.77051", "-96.78464"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "4.70", "segmentFacilityType": "ML",
"segmentOriginId": "1352", "segmentDestId": "1353",
"segmentLocation": "IH-10 West Westbound from County Rd 210 East of Weimar to County Rd 240 in Weimar"},
{"latitudes": ["29.69115", "29.68789", "29.68794", "29.68842", "29.69428"],
"longitudes": ["-96.78464", "-96.83037", "-96.83204", "-96.83412", "-96.84902"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "4", "segmentFacilityType": "ML",
"segmentOriginId": "1353", "segmentDestId": "1354",
"segmentLocation": "IH-10 West Westbound from County Rd 240 in Weimar to US-90 East of Schulenburg"},
{"latitudes": ["29.69428", "29.69589", "29.69628", "29.6965", "29.69601", "29.69348", "29.69097", "29.69011"],
"longitudes": ["-96.84902", "-96.85328", "-96.85487", "-96.85665", "-96.88572", "-96.90268", "-96.91887",
"-96.93267"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 65, "segmentTravelTime": "5 minutes 35 seconds", "segmentDist": "5",
"segmentFacilityType": "ML", "segmentOriginId": "1354", "segmentDestId": "1355",
"segmentLocation": "IH-10 West Westbound from US-90 East of Schulenburg to FM-2672 West of Schulenburg"}, {
"latitudes": ["29.69011", "29.68879", "29.68887", "29.69114", "29.69156", "29.69164", "29.69681", "29.69788",
"29.69793"],
"longitudes": ["-96.93267", "-96.95552", "-96.9576", "-96.97878", "-97.00323", "-97.0071", "-97.03792",
"-97.05287", "-97.05918"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100"], "segmentDirection": "WB", "segmentSpeed": 65,
"segmentTravelTime": "7 minutes 53 seconds", "segmentDist": "7.50", "segmentFacilityType": "ML",
"segmentOriginId": "1355", "segmentDestId": "1356",
"segmentLocation": "IH-10 West Westbound from FM-2672 West of Schulenburg to N Knezek Rd East of Flatonia"},
{"latitudes": ["29.69793", "29.69665", "29.69645", "29.69484", "29.69232", "29.6919"],
"longitudes": ["-97.05918", "-97.10865", "-97.14147", "-97.1508", "-97.16148", "-97.16437"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 67, "segmentTravelTime": "7 minutes 55 seconds", "segmentDist": "7.70",
"segmentFacilityType": "ML", "segmentOriginId": "1356", "segmentDestId": "1357",
"segmentLocation": "IH-10 West Westbound from N Knezek Rd East of Flatonia to Webb Ln West of Flatonia"},
{"latitudes": ["29.6919", "29.69229", "29.6925", "29.69206", "29.68516", "29.6708", "29.66998", "29.66943"],
"longitudes": ["-97.16437", "-97.21053", "-97.23639", "-97.24317", "-97.26323", "-97.29327", "-97.29578",
"-97.30127"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 64, "segmentTravelTime": "7 minutes 34 seconds", "segmentDist": "7",
"segmentFacilityType": "ML", "segmentOriginId": "1357", "segmentDestId": "1358",
"segmentLocation": "IH-10 West Westbound from Webb Ln West of Flatonia to SH-97 in Waelder"},
{"latitudes": ["29.66943", "29.66223", "29.65842"], "longitudes": ["-97.30127", "-97.38721", "-97.4153"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 65, "segmentTravelTime": "6 minutes 19 seconds", "segmentDist": "6.80",
"segmentFacilityType": "ML", "segmentOriginId": "1358", "segmentDestId": "1359",
"segmentLocation": "IH-10 West Westbound from SH-97 in Waelder to SH-304 West of Waelder"},
{"latitudes": ["29.65842", "29.64955", "29.64937", "29.6539"],
"longitudes": ["-97.4153", "-97.4859", "-97.48922", "-97.56604"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 70,
"segmentTravelTime": "8 minutes 46 seconds", "segmentDist": "9", "segmentFacilityType": "ML",
"segmentOriginId": "1359", "segmentDestId": "1360",
"segmentLocation": "IH-10 West Westbound from SH-304 West of Waelder to US-183 East of Luling"},
{"latitudes": ["29.77023", "29.76883", "29.76744", "29.76714", "29.76743", "29.76827"],
"longitudes": ["-95.36509", "-95.36374", "-95.36116", "-95.35947", "-95.35827", "-95.35593"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 54, "segmentTravelTime": "1 minute 7 seconds", "segmentDist": "1", "segmentFacilityType": "ML",
"segmentOriginId": "1313", "segmentDestId": "1318",
"segmentLocation": "IH-10 East Eastbound from IH-45 North to San Jacinto"},
{"latitudes": ["29.76827", "29.76992", "29.77011", "29.77001", "29.76919", "29.76903", "29.76927"],
"longitudes": ["-95.35593", "-95.35212", "-95.35041", "-95.34699", "-95.34133", "-95.33947", "-95.33574"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 55, "segmentTravelTime": "1 minute 19 seconds", "segmentDist": "1.20",
"segmentFacilityType": "ML", "segmentOriginId": "1318", "segmentDestId": "1319",
"segmentLocation": "IH-10 East Eastbound from San Jacinto to Gregg"},
{"latitudes": ["29.76927", "29.77094", "29.77285", "29.77408", "29.7743", "29.77432", "29.7743"],
"longitudes": ["-95.33574", "-95.33053", "-95.3264", "-95.32277", "-95.32102", "-95.31537", "-95.30744"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 55, "segmentTravelTime": "2 minutes 51 seconds", "segmentDist": "1.70",
"segmentFacilityType": "ML", "segmentOriginId": "1319", "segmentDestId": "1320",
"segmentLocation": "IH-10 East Eastbound from Gregg to Kress"}, {
"latitudes": ["29.7743", "29.77437", "29.77509", "29.77774", "29.77797", "29.77803", "29.7781", "29.77776",
"29.7769"],
"longitudes": ["-95.30744", "-95.30109", "-95.29869", "-95.2916", "-95.2882", "-95.28404", "-95.2801",
"-95.27704", "-95.2721"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300",
"0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100"], "segmentDirection": "EB", "segmentSpeed": 61,
"segmentTravelTime": "2 minutes 4 seconds", "segmentDist": "2.10", "segmentFacilityType": "ML",
"segmentOriginId": "1320", "segmentDestId": "1321",
"segmentLocation": "IH-10 East Eastbound from Kress to Gellhorn"},
{"latitudes": ["29.7769", "29.77567", "29.77365", "29.77369", "29.7737", "29.77311"],
"longitudes": ["-95.2721", "-95.26401", "-95.25126", "-95.24736", "-95.23562", "-95.23291"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 58, "segmentTravelTime": "2 minutes 23 seconds", "segmentDist": "2.30",
"segmentFacilityType": "ML", "segmentOriginId": "1321", "segmentDestId": "1322",
"segmentLocation": "IH-10 East Eastbound from Gellhorn to Holland/John Ralston"},
{"latitudes": ["29.77311", "29.77044", "29.77036", "29.77065", "29.77129"],
"longitudes": ["-95.23291", "-95.22049", "-95.21832", "-95.21367", "-95.20622"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 61, "segmentTravelTime": "2 minutes 35 seconds", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "1322", "segmentDestId": "1323",
"segmentLocation": "IH-10 East Eastbound from Holland/John Ralston to Normandy"},
{"latitudes": ["29.77129", "29.77156", "29.77067", "29.77055", "29.77123"],
"longitudes": ["-95.20622", "-95.19871", "-95.18658", "-95.18145", "-95.15359"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 67, "segmentTravelTime": "2 minutes 15 seconds", "segmentDist": "2.50",
"segmentFacilityType": "ML", "segmentOriginId": "1323", "segmentDestId": "1324",
"segmentLocation": "IH-10 East Eastbound from Normandy to Beltway 8-East"},
{"latitudes": ["29.77123", "29.77472", "29.77741", "29.77811"],
"longitudes": ["-95.15359", "-95.13844", "-95.12856", "-95.12441"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 60,
"segmentTravelTime": "2 minutes 18 seconds", "segmentDist": "2.30", "segmentFacilityType": "ML",
"segmentOriginId": "1324", "segmentDestId": "1325",
"segmentLocation": "IH-10 East Eastbound from Beltway 8-East to Sheldon"},
{"latitudes": ["29.77811", "29.77904", "29.78118", "29.78504", "29.79047", "29.79135", "29.7923"],
"longitudes": ["-95.12441", "-95.12124", "-95.11609", "-95.10665", "-95.08958", "-95.08293", "-95.07965"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 65, "segmentTravelTime": "3 minutes 35 seconds", "segmentDist": "2.80",
"segmentFacilityType": "ML", "segmentOriginId": "1325", "segmentDestId": "1326",
"segmentLocation": "IH-10 East Eastbound from Sheldon to Monmouth"},
{"latitudes": ["29.7923", "29.79526", "29.79494", "29.79393", "29.79092", "29.79074"],
"longitudes": ["-95.07965", "-95.07049", "-95.06671", "-95.06402", "-95.05741", "-95.05688"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 60, "segmentTravelTime": "2 minutes 42 seconds", "segmentDist": "1.70",
"segmentFacilityType": "ML", "segmentOriginId": "1326", "segmentDestId": "1327",
"segmentLocation": "IH-10 East Eastbound from Monmouth to Crosby-Lynchburg"},
{"latitudes": ["29.79074", "29.79082", "29.79754", "29.79937", "29.79972", "29.79984", "29.79998"],
"longitudes": ["-95.05688", "-95.04989", "-95.03189", "-95.02696", "-95.02532", "-95.02377", "-95.01672"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 64, "segmentTravelTime": "2 minutes 53 seconds", "segmentDist": "2",
"segmentFacilityType": "ML", "segmentOriginId": "1327", "segmentDestId": "1339",
"segmentLocation": "IH-10 East Eastbound from Crosby-Lynchburg to Wade"},
{"latitudes": ["29.79998", "29.79984", "29.79972", "29.79937", "29.79754", "29.79082", "29.79074"],
"longitudes": ["-95.01672", "-95.02377", "-95.02532", "-95.02696", "-95.03189", "-95.04989", "-95.05688"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 63, "segmentTravelTime": "2 minutes 55 seconds", "segmentDist": "2",
"segmentFacilityType": "ML", "segmentOriginId": "1338", "segmentDestId": "1328",
"segmentLocation": "IH-10 East Westbound from Wade to Crosby-Lynchburg"},
{"latitudes": ["29.79074", "29.79092", "29.79393", "29.79494", "29.79526", "29.7923"],
"longitudes": ["-95.05688", "-95.05741", "-95.06402", "-95.06671", "-95.07049", "-95.07965"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 66, "segmentTravelTime": "2 minutes 33 seconds", "segmentDist": "1.70",
"segmentFacilityType": "ML", "segmentOriginId": "1328", "segmentDestId": "1329",
"segmentLocation": "IH-10 East Westbound from Crosby-Lynchburg to Monmouth"},
{"latitudes": ["29.7923", "29.79135", "29.79047", "29.78504", "29.78118", "29.77904", "29.77811"],
"longitudes": ["-95.07965", "-95.08293", "-95.08958", "-95.10665", "-95.11609", "-95.12124", "-95.12441"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 64, "segmentTravelTime": "3 minutes 38 seconds", "segmentDist": "2.80",
"segmentFacilityType": "ML", "segmentOriginId": "1329", "segmentDestId": "1330",
"segmentLocation": "IH-10 East Westbound from Monmouth to Sheldon"},
{"latitudes": ["29.77811", "29.77741", "29.77472", "29.77123"],
"longitudes": ["-95.12441", "-95.12856", "-95.13844", "-95.15359"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 57,
"segmentTravelTime": "2 minutes 25 seconds", "segmentDist": "2.30", "segmentFacilityType": "ML",
"segmentOriginId": "1330", "segmentDestId": "1331",
"segmentLocation": "IH-10 East Westbound from Sheldon to Beltway 8-East"},
{"latitudes": ["29.77123", "29.77055", "29.77067", "29.77156", "29.77129"],
"longitudes": ["-95.15359", "-95.18145", "-95.18658", "-95.19871", "-95.20622"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 63, "segmentTravelTime": "2 minutes 22 seconds", "segmentDist": "2.50",
"segmentFacilityType": "ML", "segmentOriginId": "1331", "segmentDestId": "1332",
"segmentLocation": "IH-10 East Westbound from Beltway 8-East to Normandy"},
{"latitudes": ["29.77129", "29.77065", "29.77036", "29.77044", "29.77311"],
"longitudes": ["-95.20622", "-95.21367", "-95.21832", "-95.22049", "-95.23291"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 63, "segmentTravelTime": "2 minutes 32 seconds", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "1332", "segmentDestId": "1333",
"segmentLocation": "IH-10 East Westbound from Normandy to Holland/John Ralston"},
{"latitudes": ["29.77311", "29.7737", "29.77369", "29.77365", "29.77567", "29.7769"],
"longitudes": ["-95.23291", "-95.23562", "-95.24736", "-95.25126", "-95.26401", "-95.2721"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 61, "segmentTravelTime": "2 minutes 16 seconds", "segmentDist": "2.30",
"segmentFacilityType": "ML", "segmentOriginId": "1333", "segmentDestId": "1334",
"segmentLocation": "IH-10 East Westbound from Holland/John Ralston to Gellhorn"}, {
"latitudes": ["29.7769", "29.77776", "29.7781", "29.77803", "29.77797", "29.77774", "29.77509", "29.77437",
"29.7743"],
"longitudes": ["-95.2721", "-95.27704", "-95.2801", "-95.28404", "-95.2882", "-95.2916", "-95.29869",
"-95.30109", "-95.30744"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300",
"0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100"], "segmentDirection": "WB", "segmentSpeed": 56,
"segmentTravelTime": "2 minutes 14 seconds", "segmentDist": "2.10", "segmentFacilityType": "ML",
"segmentOriginId": "1334", "segmentDestId": "1335",
"segmentLocation": "IH-10 East Westbound from Gellhorn to Kress"},
{"latitudes": ["29.7743", "29.77432", "29.7743", "29.77408", "29.77285", "29.77094", "29.76927"],
"longitudes": ["-95.30744", "-95.31537", "-95.32102", "-95.32277", "-95.3264", "-95.33053", "-95.33574"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 59, "segmentTravelTime": "2 minutes 43 seconds", "segmentDist": "1.70",
"segmentFacilityType": "ML", "segmentOriginId": "1335", "segmentDestId": "1336",
"segmentLocation": "IH-10 East Westbound from Kress to Gregg"},
{"latitudes": ["29.76927", "29.76903", "29.76919", "29.77001", "29.77011", "29.76992", "29.76827"],
"longitudes": ["-95.33574", "-95.33947", "-95.34133", "-95.34699", "-95.35041", "-95.35212", "-95.35593"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 43, "segmentTravelTime": "2 minutes 41 seconds", "segmentDist": "1.20",
"segmentFacilityType": "ML", "segmentOriginId": "1336", "segmentDestId": "1337",
"segmentLocation": "IH-10 East Westbound from Gregg to San Jacinto"},
{"latitudes": ["29.76827", "29.76743", "29.76714", "29.76744", "29.76883", "29.77023"],
"longitudes": ["-95.35593", "-95.35827", "-95.35947", "-95.36116", "-95.36374", "-95.36509"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 61, "segmentTravelTime": "1 minute 59 seconds", "segmentDist": "1", "segmentFacilityType": "ML",
"segmentOriginId": "1337", "segmentDestId": "1314",
"segmentLocation": "IH-10 East Westbound from San Jacinto to IH-45 North"},
{"latitudes": ["30.23669", "30.20725"], "longitudes": ["-95.45862", "-95.45612"],
"latitudeOffsets": ["0.00100", "0.00100"], "longitudeOffsets": ["0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 64, "segmentTravelTime": "1 minute 25 seconds", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "1267", "segmentDestId": "1265",
"segmentLocation": "IH-45 North Southbound from FM-1488 to SH-242"},
{"latitudes": ["30.20725", "30.17825"], "longitudes": ["-95.45612", "-95.45187"],
"latitudeOffsets": ["0.00100", "0.00100"], "longitudeOffsets": ["0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 57, "segmentTravelTime": "2 minutes 13 seconds", "segmentDist": "2.10",
"segmentFacilityType": "ML", "segmentOriginId": "1265", "segmentDestId": "1263",
"segmentLocation": "IH-45 North Southbound from SH-242 to Research Forest"},
{"latitudes": ["30.17825", "30.16737", "30.15706"], "longitudes": ["-95.45187", "-95.45087", "-95.44925"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": 65, "segmentTravelTime": "2 minutes 7 seconds", "segmentDist": "2.30",
"segmentFacilityType": "ML", "segmentOriginId": "1263", "segmentDestId": "1261",
"segmentLocation": "IH-45 North Southbound from Research Forest to Woodlands Parkway"},
{"latitudes": ["30.15706", "30.12675", "30.11898", "30.1135", "30.112", "30.1067"],
"longitudes": ["-95.44925", "-95.44319", "-95.44137", "-95.43712", "-95.43694", "-95.4356"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 69, "segmentTravelTime": "2 minutes 11 seconds", "segmentDist": "2.50",
"segmentFacilityType": "ML", "segmentOriginId": "1261", "segmentDestId": "1259",
"segmentLocation": "IH-45 North Southbound from Woodlands Parkway to Hardy Toll Road"},
{"latitudes": ["30.1067", "30.08868", "30.08292", "30.07495", "30.06658"],
"longitudes": ["-95.4356", "-95.4353", "-95.43673", "-95.4359", "-95.43445"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 64, "segmentTravelTime": "3 minutes 7 seconds", "segmentDist": "3.30",
"segmentFacilityType": "ML", "segmentOriginId": "1259", "segmentDestId": "1257",
"segmentLocation": "IH-45 North Southbound from Hardy Toll Road to Louetta"},
{"latitudes": ["30.06658", "30.05258", "30.03799", "30.02852", "30.0212"],
"longitudes": ["-95.43445", "-95.4319", "-95.42933", "-95.4292", "-95.42895"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00500"], "segmentDirection": "SB",
"segmentSpeed": 64, "segmentTravelTime": "2 minutes 26 seconds", "segmentDist": "2.60",
"segmentFacilityType": "ML", "segmentOriginId": "1257", "segmentDestId": "1255",
"segmentLocation": "IH-45 North Southbound from Louetta to FM-1960"},
{"latitudes": ["30.0212", "30.006", "29.9978", "29.98195"],
"longitudes": ["-95.42895", "-95.427", "-95.42568", "-95.42242"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00500", "0.00700", "0.00700", "0.00700"], "segmentDirection": "SB", "segmentSpeed": 64,
"segmentTravelTime": "3 minutes 33 seconds", "segmentDist": "2.70", "segmentFacilityType": "ML",
"segmentOriginId": "1255", "segmentDestId": "1253",
"segmentLocation": "IH-45 North Southbound from FM-1960 to Airtex"},
{"latitudes": ["29.98195", "29.96582", "29.95828", "29.9491"],
"longitudes": ["-95.42242", "-95.4189", "-95.41858", "-95.41698"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "SB", "segmentSpeed": 64,
"segmentTravelTime": "2 minutes 14 seconds", "segmentDist": "2.40", "segmentFacilityType": "ML",
"segmentOriginId": "1253", "segmentDestId": "1251",
"segmentLocation": "IH-45 North Southbound from Airtex to Greens"},
{"latitudes": ["29.9491", "29.94052", "29.93178", "29.91502"],
"longitudes": ["-95.41698", "-95.41372", "-95.4118", "-95.4126"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "SB", "segmentSpeed": 62,
"segmentTravelTime": "3 minutes 53 seconds", "segmentDist": "3", "segmentFacilityType": "ML",
"segmentOriginId": "1251", "segmentDestId": "1249",
"segmentLocation": "IH-45 North Southbound from Greens to West"},
{"latitudes": ["29.91502", "29.8982", "29.88902", "29.87603", "29.8699"],
"longitudes": ["-95.4126", "-95.41225", "-95.41205", "-95.41152", "-95.40778"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "SB",
"segmentSpeed": 64, "segmentTravelTime": "3 minutes 54 seconds", "segmentDist": "3.10",
"segmentFacilityType": "ML", "segmentOriginId": "1249", "segmentDestId": "1247",
"segmentLocation": "IH-45 North Southbound from West to Little York"},
{"latitudes": ["29.8699", "29.8595", "29.8456"], "longitudes": ["-95.40778", "-95.40242", "-95.39278"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00700", "0.00700", "0.00700"],
"segmentDirection": "SB", "segmentSpeed": 63, "segmentTravelTime": "2 minutes 49 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1247", "segmentDestId": "1245",
"segmentLocation": "IH-45 North Southbound from Little York to Tidwell"},
{"latitudes": ["29.8456", "29.83325", "29.82868", "29.81395", "29.80758", "29.8038"],
"longitudes": ["-95.39278", "-95.3843", "-95.3811", "-95.3756", "-95.37333", "-95.3726"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "SB",
"segmentSpeed": 59, "segmentTravelTime": "3 minutes 3 seconds", "segmentDist": "3", "segmentFacilityType": "ML",
"segmentOriginId": "1245", "segmentDestId": "1243",
"segmentLocation": "IH-45 North Southbound from Tidwell to Cavalcade"}, {
"latitudes": ["29.8038", "29.7981", "29.7899", "29.78762", "29.78555", "29.7838", "29.78099", "29.7793",
"29.77702", "29.77393", "29.77135", "29.76898", "29.7655", "29.7655", "29.76378", "29.7624",
"29.76015"],
"longitudes": ["-95.3726", "-95.37192", "-95.37192", "-95.37112", "-95.36958", "-95.36869", "-95.36832",
"-95.36849", "-95.36858", "-95.36677", "-95.36521", "-95.36566", "-95.3672", "-95.3672",
"-95.36928", "-95.37222", "-95.37412"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "-0.0010", "-0.0010", "-0.0010",
"0.00000"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700",
"0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700",
"0.00700"], "segmentDirection": "SB", "segmentSpeed": 59,
"segmentTravelTime": "3 minutes 50 seconds", "segmentDist": "2.80", "segmentFacilityType": "ML",
"segmentOriginId": "1243", "segmentDestId": "1239",
"segmentLocation": "IH-45 North Southbound from Cavalcade to Allen Parkway"}, {
"latitudes": ["29.76015", "29.7624", "29.76378", "29.7655", "29.7655", "29.76898", "29.77135", "29.77393",
"29.77702", "29.7793", "29.78099", "29.7838", "29.78555", "29.78762", "29.7899", "29.7981",
"29.8038"],
"longitudes": ["-95.37412", "-95.37222", "-95.36928", "-95.3672", "-95.3672", "-95.36566", "-95.36521",
"-95.36677", "-95.36858", "-95.36849", "-95.36832", "-95.36869", "-95.36958", "-95.37112",
"-95.37192", "-95.37192", "-95.3726"],
"latitudeOffsets": ["0.00000", "-0.0010", "-0.0010", "-0.0010", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700",
"0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700",
"0.00700"], "segmentDirection": "NB", "segmentSpeed": 58,
"segmentTravelTime": "3 minutes 55 seconds", "segmentDist": "2.80", "segmentFacilityType": "ML",
"segmentOriginId": "1238", "segmentDestId": "1242",
"segmentLocation": "IH-45 North Northbound from Allen Parkway to Cavalcade"},
{"latitudes": ["29.8038", "29.80758", "29.81395", "29.82868", "29.83325", "29.8456"],
"longitudes": ["-95.3726", "-95.37333", "-95.3756", "-95.3811", "-95.3843", "-95.39278"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "NB",
"segmentSpeed": 59, "segmentTravelTime": "3 minutes 2 seconds", "segmentDist": "3", "segmentFacilityType": "ML",
"segmentOriginId": "1242", "segmentDestId": "1244",
"segmentLocation": "IH-45 North Northbound from Cavalcade to Tidwell"},
{"latitudes": ["29.8456", "29.8595", "29.8699"], "longitudes": ["-95.39278", "-95.40242", "-95.40778"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00700", "0.00700", "0.00700"],
"segmentDirection": "NB", "segmentSpeed": 61, "segmentTravelTime": "2 minutes 52 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1244", "segmentDestId": "1246",
"segmentLocation": "IH-45 North Northbound from Tidwell to Little York"},
{"latitudes": ["29.8699", "29.87603", "29.88902", "29.8982", "29.91502"],
"longitudes": ["-95.40778", "-95.41152", "-95.41205", "-95.41225", "-95.4126"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "NB",
"segmentSpeed": 65, "segmentTravelTime": "3 minutes 51 seconds", "segmentDist": "3.10",
"segmentFacilityType": "ML", "segmentOriginId": "1246", "segmentDestId": "1248",
"segmentLocation": "IH-45 North Northbound from Little York to West"},
{"latitudes": ["29.91502", "29.93178", "29.94052", "29.9491"],
"longitudes": ["-95.4126", "-95.4118", "-95.41372", "-95.41698"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "NB", "segmentSpeed": 62,
"segmentTravelTime": "3 minutes 54 seconds", "segmentDist": "3", "segmentFacilityType": "ML",
"segmentOriginId": "1248", "segmentDestId": "1250",
"segmentLocation": "IH-45 North Northbound from West to Greens"},
{"latitudes": ["29.9491", "29.95828", "29.96582", "29.98195"],
"longitudes": ["-95.41698", "-95.41858", "-95.4189", "-95.42242"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "NB", "segmentSpeed": 59,
"segmentTravelTime": "2 minutes 27 seconds", "segmentDist": "2.40", "segmentFacilityType": "ML",
"segmentOriginId": "1250", "segmentDestId": "1252",
"segmentLocation": "IH-45 North Northbound from Greens to Airtex"},
{"latitudes": ["29.98195", "29.9978", "30.006", "30.0212"],
"longitudes": ["-95.42242", "-95.42568", "-95.427", "-95.42895"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00500"], "segmentDirection": "NB", "segmentSpeed": 63,
"segmentTravelTime": "3 minutes 35 seconds", "segmentDist": "2.70", "segmentFacilityType": "ML",
"segmentOriginId": "1252", "segmentDestId": "1254",
"segmentLocation": "IH-45 North Northbound from Airtex to FM-1960"},
{"latitudes": ["30.0212", "30.02852", "30.03799", "30.05258", "30.06658"],
"longitudes": ["-95.42895", "-95.4292", "-95.42933", "-95.4319", "-95.43445"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00500", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 65, "segmentTravelTime": "2 minutes 24 seconds", "segmentDist": "2.60",
"segmentFacilityType": "ML", "segmentOriginId": "1254", "segmentDestId": "1256",
"segmentLocation": "IH-45 North Northbound from FM-1960 to Louetta"},
{"latitudes": ["30.06658", "30.07495", "30.08292", "30.08868", "30.1067"],
"longitudes": ["-95.43445", "-95.4359", "-95.43673", "-95.4353", "-95.4356"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 60, "segmentTravelTime": "3 minutes 18 seconds", "segmentDist": "3.30",
"segmentFacilityType": "ML", "segmentOriginId": "1256", "segmentDestId": "1258",
"segmentLocation": "IH-45 North Northbound from Louetta to Hardy Toll Road"},
{"latitudes": ["30.1067", "30.112", "30.1135", "30.11898", "30.12675", "30.15706"],
"longitudes": ["-95.4356", "-95.43694", "-95.43712", "-95.44137", "-95.44319", "-95.44925"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 73, "segmentTravelTime": "2 minutes 4 seconds", "segmentDist": "2.50",
"segmentFacilityType": "ML", "segmentOriginId": "1258", "segmentDestId": "1260",
"segmentLocation": "IH-45 North Northbound from Hardy Toll Road to Woodlands Parkway"},
{"latitudes": ["30.15706", "30.16737", "30.17825"], "longitudes": ["-95.44925", "-95.45087", "-95.45187"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": 60, "segmentTravelTime": "2 minutes 18 seconds", "segmentDist": "2.30",
"segmentFacilityType": "ML", "segmentOriginId": "1260", "segmentDestId": "1262",
"segmentLocation": "IH-45 North Northbound from Woodlands Parkway to Research Forest"},
{"latitudes": ["30.17825", "30.20725"], "longitudes": ["-95.45187", "-95.45612"],
"latitudeOffsets": ["0.00100", "0.00100"], "longitudeOffsets": ["0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 61, "segmentTravelTime": "2 minutes 3 seconds", "segmentDist": "2.10",
"segmentFacilityType": "ML", "segmentOriginId": "1262", "segmentDestId": "1264",
"segmentLocation": "IH-45 North Northbound from Research Forest to SH-242"},
{"latitudes": ["30.20725", "30.23669"], "longitudes": ["-95.45612", "-95.45862"],
"latitudeOffsets": ["0.00100", "0.00100"], "longitudeOffsets": ["0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 61, "segmentTravelTime": "1 minute 28 seconds", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "1264", "segmentDestId": "1266",
"segmentLocation": "IH-45 North Northbound from SH-242 to FM-1488"}, {
"latitudes": ["30.0212", "30.006", "29.9978", "29.98195", "29.96582", "29.95828", "29.9491", "29.94052",
"29.93178"],
"longitudes": ["-95.42895", "-95.427", "-95.42568", "-95.42242", "-95.4189", "-95.41858", "-95.41698",
"-95.41372", "-95.4118"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100"],
"longitudeOffsets": ["0.00500", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700",
"0.00700"], "segmentDirection": "SB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "7", "segmentFacilityType": "HOV",
"segmentOriginId": "287", "segmentDestId": "64",
"segmentLocation": "IH-45 North HOV Southbound from FM-1960 to Aldine-Bender (FM-525)"},
{"latitudes": ["29.93178", "29.91502", "29.8982", "29.88902", "29.87603"],
"longitudes": ["-95.4118", "-95.4126", "-95.41225", "-95.41205", "-95.41152"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "SB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "3.60", "segmentFacilityType": "HOV",
"segmentOriginId": "64", "segmentDestId": "65",
"segmentLocation": "IH-45 North HOV Southbound from Aldine-Bender (FM-525) to North Shepherd"},
{"latitudes": ["29.87603", "29.8699", "29.8595", "29.8456", "29.83325", "29.82868"],
"longitudes": ["-95.41152", "-95.40778", "-95.40242", "-95.39278", "-95.3843", "-95.3811"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "SB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "4.60", "segmentFacilityType": "HOV",
"segmentOriginId": "65", "segmentDestId": "66",
"segmentLocation": "IH-45 North HOV Southbound from North Shepherd to Crosstimbers"}, {
"latitudes": ["29.82868", "29.81395", "29.80758", "29.8038", "29.7981", "29.7899", "29.78762", "29.78555",
"29.7838", "29.78099", "29.7793"],
"longitudes": ["-95.3811", "-95.3756", "-95.37333", "-95.3726", "-95.37192", "-95.37192", "-95.37112",
"-95.36958", "-95.36869", "-95.36832", "-95.36849"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700",
"0.00700", "0.00700", "0.00700"], "segmentDirection": "SB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "3.50", "segmentFacilityType": "HOV",
"segmentOriginId": "66", "segmentDestId": "506",
"segmentLocation": "IH-45 North HOV Southbound from Crosstimbers to Quitman"}, {
"latitudes": ["29.76015", "29.75779", "29.75279", "29.75171", "29.75095", "29.7486", "29.74454", "29.74279",
"29.73558"],
"longitudes": ["-95.37412", "-95.37469", "-95.37457", "-95.37416", "-95.37355", "-95.37012", "-95.36275",
"-95.35842", "-95.34748"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00300", "0.00300", "0.00300",
"0.00300"],
"longitudeOffsets": ["0.00700", "0.00600", "0.00500", "0.00500", "0.00500", "0.00400", "0.00400", "0.00400",
"0.00400"], "segmentDirection": "SB", "segmentSpeed": 61,
"segmentTravelTime": "3 minutes 8 seconds", "segmentDist": "3.20", "segmentFacilityType": "ML",
"segmentOriginId": "1239", "segmentDestId": "1237",
"segmentLocation": "IH-45 Gulf Southbound from Allen Parkway to Scott"},
{"latitudes": ["29.73558", "29.73168", "29.72812", "29.71832"],
"longitudes": ["-95.34748", "-95.34218", "-95.33702", "-95.32068"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00400", "0.00500", "0.00600", "0.00600"], "segmentDirection": "SB", "segmentSpeed": 58,
"segmentTravelTime": "2 minutes 4 seconds", "segmentDist": "2", "segmentFacilityType": "ML",
"segmentOriginId": "1237", "segmentDestId": "1235",
"segmentLocation": "IH-45 Gulf Southbound from Scott to Telephone"},
{"latitudes": ["29.71832", "29.71502", "29.70848", "29.70212", "29.69732"],
"longitudes": ["-95.32068", "-95.31537", "-95.30532", "-95.29552", "-95.28852"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600", "0.00600"], "segmentDirection": "SB",
"segmentSpeed": 64, "segmentTravelTime": "3 minutes 32 seconds", "segmentDist": "2.70",
"segmentFacilityType": "ML", "segmentOriginId": "1235", "segmentDestId": "1233",
"segmentLocation": "IH-45 Gulf Southbound from Telephone to IH-610 South Loop"},
{"latitudes": ["29.69732", "29.68542", "29.67562", "29.66918"],
"longitudes": ["-95.28852", "-95.27742", "-95.26773", "-95.26222"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600"], "segmentDirection": "SB", "segmentSpeed": 58,
"segmentTravelTime": "3 minutes 42 seconds", "segmentDist": "2.60", "segmentFacilityType": "ML",
"segmentOriginId": "1233", "segmentDestId": "1231",
"segmentLocation": "IH-45 Gulf Southbound from IH-610 South Loop to Monroe"},
{"latitudes": ["29.66918", "29.65242", "29.64072"], "longitudes": ["-95.26222", "-95.25138", "-95.24328"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00600", "0.00600", "0.00600"],
"segmentDirection": "SB", "segmentSpeed": 64, "segmentTravelTime": "2 minutes 47 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1231", "segmentDestId": "1229",
"segmentLocation": "IH-45 Gulf Southbound from Monroe to Edgebrook"},
{"latitudes": ["29.64072", "29.62771", "29.61609"], "longitudes": ["-95.24328", "-95.22874", "-95.21608"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00600", "0.00600", "0.00500"],
"segmentDirection": "SB", "segmentSpeed": 68, "segmentTravelTime": "2 minutes 7 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "1229", "segmentDestId": "1205",
"segmentLocation": "IH-45 Gulf Southbound from Edgebrook to Fuqua"},
{"latitudes": ["29.61609", "29.60651", "29.58663"], "longitudes": ["-95.21608", "-95.20536", "-95.18433"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00500", "0.00400", "0.00300"],
"segmentDirection": "SB", "segmentSpeed": 63, "segmentTravelTime": "2 minutes 0 seconds", "segmentDist": "2.10",
"segmentFacilityType": "ML", "segmentOriginId": "1205", "segmentDestId": "1206",
"segmentLocation": "IH-45 Gulf Southbound from Fuqua to Dixie Farm Road (FM-1959)"},
{"latitudes": ["29.58663", "29.55348"], "longitudes": ["-95.18433", "-95.15418"],
"latitudeOffsets": ["0.00300", "0.00300"], "longitudeOffsets": ["0.00300", "0.00300"], "segmentDirection": "SB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "3", "segmentFacilityType": "ML",
"segmentOriginId": "1206", "segmentDestId": "1207",
"segmentLocation": "IH-45 Gulf Southbound from Dixie Farm Road (FM-1959) to El Dorado"},
{"latitudes": ["29.55348", "29.54032", "29.52857", "29.52262", "29.508"],
"longitudes": ["-95.15418", "-95.14167", "-95.13068", "-95.12578", "-95.11703"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "SB",
"segmentSpeed": 62, "segmentTravelTime": "4 minutes 11 seconds", "segmentDist": "4.30",
"segmentFacilityType": "ML", "segmentOriginId": "1207", "segmentDestId": "1208",
"segmentLocation": "IH-45 Gulf Southbound from El Dorado to Clear Creek"},
{"latitudes": ["29.508", "29.50306", "29.49112", "29.48869", "29.47144", "29.466"],
"longitudes": ["-95.11703", "-95.11403", "-95.10684", "-95.10547", "-95.09503", "-95.09072"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "SB",
"segmentSpeed": 70, "segmentTravelTime": "3 minutes 56 seconds", "segmentDist": "3.40",
"segmentFacilityType": "ML", "segmentOriginId": "1208", "segmentDestId": "1213",
"segmentLocation": "IH-45 Gulf Southbound from Clear Creek to FM-646"},
{"latitudes": ["29.466", "29.45637", "29.449", "29.44087", "29.43031"],
"longitudes": ["-95.09072", "-95.08303", "-95.07697", "-95.0704", "-95.06003"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "SB",
"segmentSpeed": 68, "segmentTravelTime": "3 minutes 43 seconds", "segmentDist": "3.10",
"segmentFacilityType": "ML", "segmentOriginId": "1213", "segmentDestId": "1215",
"segmentLocation": "IH-45 Gulf Southbound from FM-646 to Holland"},
{"latitudes": ["29.43031", "29.40268", "29.40068", "29.39018"],
"longitudes": ["-95.06003", "-95.03303", "-95.03096", "-95.02078"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "SB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "3.60", "segmentFacilityType": "ML",
"segmentOriginId": "1215", "segmentDestId": "1217",
"segmentLocation": "IH-45 Gulf Southbound from Holland to Delaney"},
{"latitudes": ["29.39018", "29.38181", "29.37818", "29.36612"],
"longitudes": ["-95.02078", "-95.01215", "-95.00765", "-94.99259"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "SB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2.40", "segmentFacilityType": "ML",
"segmentOriginId": "1217", "segmentDestId": "1219",
"segmentLocation": "IH-45 Gulf Southbound from Delaney to Vauthier"},
{"latitudes": ["29.36612", "29.36142", "29.35412", "29.34574", "29.33062"],
"longitudes": ["-94.99259", "-94.98711", "-94.97152", "-94.95246", "-94.93377"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "SB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "3.80", "segmentFacilityType": "ML",
"segmentOriginId": "1219", "segmentDestId": "1221",
"segmentLocation": "IH-45 Gulf Southbound from Vauthier to SH-6/SH-146"},
{"latitudes": ["29.33062", "29.31068", "29.30456"], "longitudes": ["-94.93377", "-94.90833", "-94.90446"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00300", "0.00300", "0.00300"],
"segmentDirection": "SB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.70",
"segmentFacilityType": "ML", "segmentOriginId": "1221", "segmentDestId": "1223",
"segmentLocation": "IH-45 Gulf Southbound from SH-6/SH-146 to Tiki Island"},
{"latitudes": ["29.30456", "29.30218", "29.28931", "29.28554", "29.28537"],
"longitudes": ["-94.90446", "-94.89958", "-94.87346", "-94.8648", "-94.86302"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00150"], "segmentDirection": "SB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.80", "segmentFacilityType": "ML",
"segmentOriginId": "1223", "segmentDestId": "1225",
"segmentLocation": "IH-45 Gulf Southbound from Tiki Island to Harborside"},
{"latitudes": ["29.28537", "29.28568", "29.28643", "29.28862", "29.28849"],
"longitudes": ["-94.86302", "-94.86014", "-94.85964", "-94.85039", "-94.83783"],
"latitudeOffsets": ["0.00300", "0.00200", "0.00200", "0.00200", "0.00200"],
"longitudeOffsets": ["0.00150", "0.00000", "0.00000", "0.00000", "0.00000"], "segmentDirection": "SB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2", "segmentFacilityType": "ML",
"segmentOriginId": "1225", "segmentDestId": "1227",
"segmentLocation": "IH-45 Gulf Southbound from Harborside to 61st"},
{"latitudes": ["29.28849", "29.28862", "29.28643", "29.28568", "29.28537"],
"longitudes": ["-94.83783", "-94.85039", "-94.85964", "-94.86014", "-94.86302"],
"latitudeOffsets": ["0.00200", "0.00200", "0.00200", "0.00200", "0.00300"],
"longitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00150"], "segmentDirection": "NB",
"segmentSpeed": 51, "segmentTravelTime": "2 minutes 21 seconds", "segmentDist": "2", "segmentFacilityType": "ML",
"segmentOriginId": "1226", "segmentDestId": "1224",
"segmentLocation": "IH-45 Gulf Northbound from 61st to Harborside"},
{"latitudes": ["29.28537", "29.28554", "29.28931", "29.30218", "29.30456"],
"longitudes": ["-94.86302", "-94.8648", "-94.87346", "-94.89958", "-94.90446"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00150", "0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "NB",
"segmentSpeed": 64, "segmentTravelTime": "3 minutes 38 seconds", "segmentDist": "2.80",
"segmentFacilityType": "ML", "segmentOriginId": "1224", "segmentDestId": "1222",
"segmentLocation": "IH-45 Gulf Northbound from Harborside to Tiki Island"},
{"latitudes": ["29.30456", "29.31068", "29.33062"], "longitudes": ["-94.90446", "-94.90833", "-94.93377"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00300", "0.00300", "0.00300"],
"segmentDirection": "NB", "segmentSpeed": 71, "segmentTravelTime": "2 minutes 17 seconds", "segmentDist": "2.70",
"segmentFacilityType": "ML", "segmentOriginId": "1222", "segmentDestId": "1220",
"segmentLocation": "IH-45 Gulf Northbound from Tiki Island to SH-6/SH-146"},
{"latitudes": ["29.33062", "29.34574", "29.35412", "29.36142", "29.36612"],
"longitudes": ["-94.93377", "-94.95246", "-94.97152", "-94.98711", "-94.99259"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "NB",
"segmentSpeed": 63, "segmentTravelTime": "4 minutes 37 seconds", "segmentDist": "3.80",
"segmentFacilityType": "ML", "segmentOriginId": "1220", "segmentDestId": "1218",
"segmentLocation": "IH-45 Gulf Northbound from SH-6/SH-146 to Vauthier"},
{"latitudes": ["29.36612", "29.37818", "29.38181", "29.39018"],
"longitudes": ["-94.99259", "-95.00765", "-95.01215", "-95.02078"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "NB", "segmentSpeed": 66,
"segmentTravelTime": "2 minutes 10 seconds", "segmentDist": "2.40", "segmentFacilityType": "ML",
"segmentOriginId": "1218", "segmentDestId": "1216",
"segmentLocation": "IH-45 Gulf Northbound from Vauthier to Delaney"},
{"latitudes": ["29.39018", "29.40068", "29.40268", "29.43031"],
"longitudes": ["-95.02078", "-95.03096", "-95.03303", "-95.06003"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "NB", "segmentSpeed": 65,
"segmentTravelTime": "3 minutes 20 seconds", "segmentDist": "3.60", "segmentFacilityType": "ML",
"segmentOriginId": "1216", "segmentDestId": "1214",
"segmentLocation": "IH-45 Gulf Northbound from Delaney to Holland"},
{"latitudes": ["29.43031", "29.44087", "29.449", "29.45637", "29.466"],
"longitudes": ["-95.06003", "-95.0704", "-95.07697", "-95.08303", "-95.09072"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "NB",
"segmentSpeed": 68, "segmentTravelTime": "3 minutes 45 seconds", "segmentDist": "3.10",
"segmentFacilityType": "ML", "segmentOriginId": "1214", "segmentDestId": "1212",
"segmentLocation": "IH-45 Gulf Northbound from Holland to FM-646"},
{"latitudes": ["29.466", "29.47144", "29.48869", "29.49112", "29.50306", "29.508"],
"longitudes": ["-95.09072", "-95.09503", "-95.10547", "-95.10684", "-95.11403", "-95.11703"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "NB",
"segmentSpeed": 67, "segmentTravelTime": "3 minutes 3 seconds", "segmentDist": "3.40",
"segmentFacilityType": "ML", "segmentOriginId": "1212", "segmentDestId": "1201",
"segmentLocation": "IH-45 Gulf Northbound from FM-646 to Clear Creek"},
{"latitudes": ["29.508", "29.52262", "29.52857", "29.54032", "29.55348"],
"longitudes": ["-95.11703", "-95.12578", "-95.13068", "-95.14167", "-95.15418"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "NB",
"segmentSpeed": 66, "segmentTravelTime": "4 minutes 56 seconds", "segmentDist": "4.30",
"segmentFacilityType": "ML", "segmentOriginId": "1201", "segmentDestId": "1202",
"segmentLocation": "IH-45 Gulf Northbound from Clear Creek to El Dorado"},
{"latitudes": ["29.55348", "29.58663"], "longitudes": ["-95.15418", "-95.18433"],
"latitudeOffsets": ["0.00300", "0.00300"], "longitudeOffsets": ["0.00300", "0.00300"], "segmentDirection": "NB",
"segmentSpeed": 68, "segmentTravelTime": "3 minutes 40 seconds", "segmentDist": "3", "segmentFacilityType": "ML",
"segmentOriginId": "1202", "segmentDestId": "1203",
"segmentLocation": "IH-45 Gulf Northbound from El Dorado to Dixie Farm Road (FM-1959)"},
{"latitudes": ["29.58663", "29.60651", "29.61609"], "longitudes": ["-95.18433", "-95.20536", "-95.21608"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00300", "0.00400", "0.00500"],
"segmentDirection": "NB", "segmentSpeed": 66, "segmentTravelTime": "2 minutes 55 seconds", "segmentDist": "2.10",
"segmentFacilityType": "ML", "segmentOriginId": "1203", "segmentDestId": "1204",
"segmentLocation": "IH-45 Gulf Northbound from Dixie Farm Road (FM-1959) to Fuqua"},
{"latitudes": ["29.61609", "29.62771", "29.64072"], "longitudes": ["-95.21608", "-95.22874", "-95.24328"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00500", "0.00600", "0.00600"],
"segmentDirection": "NB", "segmentSpeed": 65, "segmentTravelTime": "2 minutes 12 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "1204", "segmentDestId": "1228",
"segmentLocation": "IH-45 Gulf Northbound from Fuqua to Edgebrook"},
{"latitudes": ["29.64072", "29.65242", "29.66918"], "longitudes": ["-95.24328", "-95.25138", "-95.26222"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00600", "0.00600", "0.00600"],
"segmentDirection": "NB", "segmentSpeed": 67, "segmentTravelTime": "2 minutes 42 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1228", "segmentDestId": "1230",
"segmentLocation": "IH-45 Gulf Northbound from Edgebrook to Monroe"},
{"latitudes": ["29.66918", "29.67562", "29.68542", "29.69732"],
"longitudes": ["-95.26222", "-95.26773", "-95.27742", "-95.28852"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600"], "segmentDirection": "NB", "segmentSpeed": 63,
"segmentTravelTime": "2 minutes 28 seconds", "segmentDist": "2.60", "segmentFacilityType": "ML",
"segmentOriginId": "1230", "segmentDestId": "1232",
"segmentLocation": "IH-45 Gulf Northbound from Monroe to IH-610 South Loop"},
{"latitudes": ["29.69732", "29.70212", "29.70848", "29.71502", "29.71832"],
"longitudes": ["-95.28852", "-95.29552", "-95.30532", "-95.31537", "-95.32068"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600", "0.00600"], "segmentDirection": "NB",
"segmentSpeed": 62, "segmentTravelTime": "3 minutes 36 seconds", "segmentDist": "2.70",
"segmentFacilityType": "ML", "segmentOriginId": "1232", "segmentDestId": "1234",
"segmentLocation": "IH-45 Gulf Northbound from IH-610 South Loop to Telephone"},
{"latitudes": ["29.71832", "29.72812", "29.73168", "29.73558"],
"longitudes": ["-95.32068", "-95.33702", "-95.34218", "-95.34748"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00600", "0.00600", "0.00500", "0.00400"], "segmentDirection": "NB", "segmentSpeed": 65,
"segmentTravelTime": "2 minutes 51 seconds", "segmentDist": "2", "segmentFacilityType": "ML",
"segmentOriginId": "1234", "segmentDestId": "1236",
"segmentLocation": "IH-45 Gulf Northbound from Telephone to Scott"}, {
"latitudes": ["29.73558", "29.74279", "29.74454", "29.7486", "29.75095", "29.75171", "29.75279", "29.75779",
"29.76015"],
"longitudes": ["-95.34748", "-95.35842", "-95.36275", "-95.37012", "-95.37355", "-95.37416", "-95.37457",
"-95.37469", "-95.37412"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00000", "0.00000", "0.00000", "0.00000",
"0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00500", "0.00500", "0.00500", "0.00600",
"0.00700"], "segmentDirection": "NB", "segmentSpeed": 56,
"segmentTravelTime": "3 minutes 26 seconds", "segmentDist": "3.20", "segmentFacilityType": "ML",
"segmentOriginId": "1236", "segmentDestId": "1238",
"segmentLocation": "IH-45 Gulf Northbound from Scott to Allen Parkway"},
{"latitudes": ["29.58663", "29.60651", "29.61609", "29.62771", "29.64072", "29.65242", "29.66918"],
"longitudes": ["-95.18433", "-95.20536", "-95.21608", "-95.22874", "-95.24328", "-95.25138", "-95.26222"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00300", "0.00400", "0.00500", "0.00600", "0.00600", "0.00600", "0.00600"],
"segmentDirection": "NB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "5.80",
"segmentFacilityType": "HOV", "segmentOriginId": "508", "segmentDestId": "504",
"segmentLocation": "IH-45 Gulf HOV Northbound from Dixie Farm Road (FM-1959) to Monroe"},
{"latitudes": ["29.66918", "29.67562", "29.68542"], "longitudes": ["-95.26222", "-95.26773", "-95.27742"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00600", "0.00600", "0.00600"],
"segmentDirection": "NB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.70",
"segmentFacilityType": "HOV", "segmentOriginId": "504", "segmentDestId": "114",
"segmentLocation": "IH-45 Gulf HOV Northbound from Monroe to Broadway"},
{"latitudes": ["29.68542", "29.69732", "29.70212", "29.70848", "29.71502"],
"longitudes": ["-95.27742", "-95.28852", "-95.29552", "-95.30532", "-95.31537"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600", "0.00600"], "segmentDirection": "NB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.90", "segmentFacilityType": "HOV",
"segmentOriginId": "114", "segmentDestId": "116",
"segmentLocation": "IH-45 Gulf HOV Northbound from Broadway to Wayside"},
{"latitudes": ["29.71502", "29.71832", "29.72812", "29.73168", "29.73558", "29.74279"],
"longitudes": ["-95.31537", "-95.32068", "-95.33702", "-95.34218", "-95.34748", "-95.35842"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00500", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "3.10", "segmentFacilityType": "HOV",
"segmentOriginId": "116", "segmentDestId": "510",
"segmentLocation": "IH-45 Gulf HOV Northbound from Wayside to Dowling"},
{"latitudes": ["30.0352", "30.0335", "30.01812"], "longitudes": ["-95.25663", "-95.25702", "-95.26219"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00700", "0.00700", "0.00700"],
"segmentDirection": "SB", "segmentSpeed": 54, "segmentTravelTime": "2 minutes 6 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1513", "segmentDestId": "1514",
"segmentLocation": "US-59 Eastex Southbound from Sorters-McClellan to Townsen"},
{"latitudes": ["30.01812", "30.00306", "29.98569"], "longitudes": ["-95.26219", "-95.26888", "-95.27588"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00700", "0.00700", "0.00700"],
"segmentDirection": "SB", "segmentSpeed": 63, "segmentTravelTime": "2 minutes 54 seconds", "segmentDist": "2",
"segmentFacilityType": "ML", "segmentOriginId": "1514", "segmentDestId": "1515",
"segmentLocation": "US-59 Eastex Southbound from Townsen to Will Clayton"},
{"latitudes": ["29.98569", "29.96569", "29.95225"], "longitudes": ["-95.27588", "-95.28456", "-95.29044"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00700", "0.00700", "0.00700"],
"segmentDirection": "SB", "segmentSpeed": 67, "segmentTravelTime": "2 minutes 59 seconds", "segmentDist": "2.20",
"segmentFacilityType": "ML", "segmentOriginId": "1515", "segmentDestId": "1502",
"segmentLocation": "US-59 Eastex Southbound from Will Clayton to Greens"},
{"latitudes": ["29.95225", "29.94012", "29.924", "29.91843"],
"longitudes": ["-95.29044", "-95.29581", "-95.30263", "-95.30685"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "SB", "segmentSpeed": 60,
"segmentTravelTime": "2 minutes 29 seconds", "segmentDist": "2.50", "segmentFacilityType": "ML",
"segmentOriginId": "1502", "segmentDestId": "1503",
"segmentLocation": "US-59 Eastex Southbound from Greens to Greens Bayou"},
{"latitudes": ["29.91843", "29.91659", "29.91019", "29.89087"],
"longitudes": ["-95.30685", "-95.30807", "-95.30869", "-95.31806"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "SB", "segmentSpeed": 55,
"segmentTravelTime": "2 minutes 4 seconds", "segmentDist": "1.90", "segmentFacilityType": "ML",
"segmentOriginId": "1503", "segmentDestId": "1516",
"segmentLocation": "US-59 Eastex Southbound from Greens Bayou to Mount Houston"},
{"latitudes": ["29.89087", "29.87956", "29.87025", "29.86656", "29.85944"],
"longitudes": ["-95.31806", "-95.32463", "-95.33", "-95.33238", "-95.3335"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "SB",
"segmentSpeed": 60, "segmentTravelTime": "2 minutes 23 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "1516", "segmentDestId": "1517",
"segmentLocation": "US-59 Eastex Southbound from Mount Houston to Parker"},
{"latitudes": ["29.85944", "29.84875", "29.84075", "29.82919"],
"longitudes": ["-95.3335", "-95.33363", "-95.33344", "-95.33525"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "SB", "segmentSpeed": 65,
"segmentTravelTime": "2 minutes 1 second", "segmentDist": "2.20", "segmentFacilityType": "ML",
"segmentOriginId": "1517", "segmentDestId": "1518",
"segmentLocation": "US-59 Eastex Southbound from Parker to Crosstimbers"},
{"latitudes": ["29.82919", "29.82213", "29.81325", "29.81019", "29.808"],
"longitudes": ["-95.33525", "-95.33531", "-95.335", "-95.33528", "-95.33631"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700", "0.00600"], "segmentDirection": "SB",
"segmentSpeed": 59, "segmentTravelTime": "2 minutes 38 seconds", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "1518", "segmentDestId": "1519",
"segmentLocation": "US-59 Eastex Southbound from Crosstimbers to IH-610 North Loop"},
{"latitudes": ["29.808", "29.80434", "29.8015", "29.79375", "29.78356"],
"longitudes": ["-95.33631", "-95.33912", "-95.33994", "-95.33975", "-95.33994"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600", "0.00600"], "segmentDirection": "SB",
"segmentSpeed": 61, "segmentTravelTime": "1 minute 28 seconds", "segmentDist": "1.50",
"segmentFacilityType": "ML", "segmentOriginId": "1519", "segmentDestId": "1520",
"segmentLocation": "US-59 Eastex Southbound from IH-610 North Loop to Quitman"},
{"latitudes": ["29.78356", "29.76931", "29.76644", "29.76156", "29.74457"],
"longitudes": ["-95.33994", "-95.34138", "-95.34781", "-95.34988", "-95.36338"],
"latitudeOffsets": ["0.00100", "-0.0010", "-0.0010", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600", "0.00600"], "segmentDirection": "SB",
"segmentSpeed": 61, "segmentTravelTime": "2 minutes 4 seconds", "segmentDist": "2.10",
"segmentFacilityType": "ML", "segmentOriginId": "1520", "segmentDestId": "1521",
"segmentLocation": "US-59 Eastex Southbound from Quitman to IH-45 Gulf"},
{"latitudes": ["29.74457", "29.73855", "29.73662", "29.7336"],
"longitudes": ["-95.36338", "-95.3673", "-95.36872", "-95.37128"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "-0.0040"],
"longitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00100"], "segmentDirection": "SB", "segmentSpeed": 57,
"segmentTravelTime": "2 minutes 18 seconds", "segmentDist": "2.20", "segmentFacilityType": "ML",
"segmentOriginId": "1521", "segmentDestId": "1531",
"segmentLocation": "US-59 Eastex Southbound from IH-45 Gulf to SH-288"},
{"latitudes": ["29.7336", "29.73662", "29.73855", "29.74457"],
"longitudes": ["-95.37128", "-95.36872", "-95.3673", "-95.36338"],
"latitudeOffsets": ["-0.0040", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00100", "0.00600", "0.00600", "0.00600"], "segmentDirection": "NB", "segmentSpeed": 60,
"segmentTravelTime": "2 minutes 13 seconds", "segmentDist": "2.20", "segmentFacilityType": "ML",
"segmentOriginId": "1560", "segmentDestId": "1504",
"segmentLocation": "US-59 Eastex Northbound from SH-288 to IH-45 Gulf"},
{"latitudes": ["29.74457", "29.76156", "29.76644", "29.76931", "29.78356"],
"longitudes": ["-95.36338", "-95.34988", "-95.34781", "-95.34138", "-95.33994"],
"latitudeOffsets": ["0.00100", "0.00100", "-0.0010", "-0.0010", "0.00100"],
"longitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600", "0.00600"], "segmentDirection": "NB",
"segmentSpeed": 58, "segmentTravelTime": "2 minutes 11 seconds", "segmentDist": "2.10",
"segmentFacilityType": "ML", "segmentOriginId": "1504", "segmentDestId": "1505",
"segmentLocation": "US-59 Eastex Northbound from IH-45 Gulf to Quitman"},
{"latitudes": ["29.78356", "29.79375", "29.8015", "29.80434", "29.808"],
"longitudes": ["-95.33994", "-95.33975", "-95.33994", "-95.33912", "-95.33631"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600", "0.00600"], "segmentDirection": "NB",
"segmentSpeed": 69, "segmentTravelTime": "1 minute 18 seconds", "segmentDist": "1.50",
"segmentFacilityType": "ML", "segmentOriginId": "1505", "segmentDestId": "1506",
"segmentLocation": "US-59 Eastex Northbound from Quitman to IH-610 North Loop"},
{"latitudes": ["29.808", "29.81019", "29.81325", "29.82213", "29.82919"],
"longitudes": ["-95.33631", "-95.33528", "-95.335", "-95.33531", "-95.33525"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00600", "0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "NB",
"segmentSpeed": 54, "segmentTravelTime": "2 minutes 47 seconds", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "1506", "segmentDestId": "1507",
"segmentLocation": "US-59 Eastex Northbound from IH-610 North Loop to Crosstimbers"},
{"latitudes": ["29.82919", "29.84075", "29.84875", "29.85944"],
"longitudes": ["-95.33525", "-95.33344", "-95.33363", "-95.3335"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "NB", "segmentSpeed": 72,
"segmentTravelTime": "2 minutes 50 seconds", "segmentDist": "2.20", "segmentFacilityType": "ML",
"segmentOriginId": "1507", "segmentDestId": "1508",
"segmentLocation": "US-59 Eastex Northbound from Crosstimbers to Parker"},
{"latitudes": ["29.85944", "29.86656", "29.87025", "29.87956", "29.89087"],
"longitudes": ["-95.3335", "-95.33238", "-95.33", "-95.32463", "-95.31806"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "NB",
"segmentSpeed": 54, "segmentTravelTime": "3 minutes 41 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "1508", "segmentDestId": "1509",
"segmentLocation": "US-59 Eastex Northbound from Parker to Mount Houston"},
{"latitudes": ["29.89087", "29.91019", "29.91659", "29.91843"],
"longitudes": ["-95.31806", "-95.30869", "-95.30807", "-95.30685"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "NB", "segmentSpeed": 70,
"segmentTravelTime": "2 minutes 38 seconds", "segmentDist": "1.90", "segmentFacilityType": "ML",
"segmentOriginId": "1509", "segmentDestId": "1500",
"segmentLocation": "US-59 Eastex Northbound from Mount Houston to Greens Bayou"},
{"latitudes": ["29.91843", "29.924", "29.94012", "29.95225"],
"longitudes": ["-95.30685", "-95.30263", "-95.29581", "-95.29044"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "NB", "segmentSpeed": 60,
"segmentTravelTime": "2 minutes 29 seconds", "segmentDist": "2.50", "segmentFacilityType": "ML",
"segmentOriginId": "1500", "segmentDestId": "1501",
"segmentLocation": "US-59 Eastex Northbound from Greens Bayou to Greens"},
{"latitudes": ["29.95225", "29.96569", "29.98569"], "longitudes": ["-95.29044", "-95.28456", "-95.27588"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00700", "0.00700", "0.00700"],
"segmentDirection": "NB", "segmentSpeed": 52, "segmentTravelTime": "3 minutes 31 seconds", "segmentDist": "2.20",
"segmentFacilityType": "ML", "segmentOriginId": "1501", "segmentDestId": "1510",
"segmentLocation": "US-59 Eastex Northbound from Greens to Will Clayton"},
{"latitudes": ["29.98569", "30.00306", "30.01812"], "longitudes": ["-95.27588", "-95.26888", "-95.26219"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00700", "0.00700", "0.00700"],
"segmentDirection": "NB", "segmentSpeed": 58, "segmentTravelTime": "2 minutes 5 seconds", "segmentDist": "2",
"segmentFacilityType": "ML", "segmentOriginId": "1510", "segmentDestId": "1511",
"segmentLocation": "US-59 Eastex Northbound from Will Clayton to Townsen"},
{"latitudes": ["30.01812", "30.0335", "30.0352"], "longitudes": ["-95.26219", "-95.25702", "-95.25663"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00700", "0.00700", "0.00700"],
"segmentDirection": "NB", "segmentSpeed": 61, "segmentTravelTime": "2 minutes 53 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1511", "segmentDestId": "1512",
"segmentLocation": "US-59 Eastex Northbound from Townsen to Sorters-McClellan"},
{"latitudes": ["30.01812", "30.00306", "29.98569"], "longitudes": ["-95.26219", "-95.26888", "-95.27588"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00700", "0.00700", "0.00700"],
"segmentDirection": "SB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "3.20",
"segmentFacilityType": "HOV", "segmentOriginId": "382", "segmentDestId": "432",
"segmentLocation": "US-59 Eastex HOV Southbound from Townsen to Will Clayton"},
{"latitudes": ["29.98569", "29.96569", "29.95225", "29.94012"],
"longitudes": ["-95.27588", "-95.28456", "-95.29044", "-95.29581"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "SB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2", "segmentFacilityType": "HOV", "segmentOriginId": "432",
"segmentDestId": "383", "segmentLocation": "US-59 Eastex HOV Southbound from Will Clayton to Beltway 8-North"},
{"latitudes": ["29.94012", "29.924", "29.91843", "29.91659", "29.91019"],
"longitudes": ["-95.29581", "-95.30263", "-95.30685", "-95.30807", "-95.30869"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700", "0.00700"], "segmentDirection": "SB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.80", "segmentFacilityType": "HOV",
"segmentOriginId": "383", "segmentDestId": "384",
"segmentLocation": "US-59 Eastex HOV Southbound from Beltway 8-North to Aldine Mail Route"},
{"latitudes": ["29.91019", "29.89087", "29.87956", "29.87025", "29.86656", "29.85944", "29.84875"],
"longitudes": ["-95.30869", "-95.31806", "-95.32463", "-95.33", "-95.33238", "-95.3335", "-95.33363"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700"],
"segmentDirection": "SB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "3.90",
"segmentFacilityType": "HOV", "segmentOriginId": "384", "segmentDestId": "385",
"segmentLocation": "US-59 Eastex HOV Southbound from Aldine Mail Route to Tidwell"}, {
"latitudes": ["29.84875", "29.84075", "29.82919", "29.82213", "29.81325", "29.81019", "29.808", "29.80434",
"29.8015", "29.79375", "29.78356"],
"longitudes": ["-95.33363", "-95.33344", "-95.33525", "-95.33531", "-95.335", "-95.33528", "-95.33631",
"-95.33912", "-95.33994", "-95.33975", "-95.33994"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00700", "0.00600", "0.00600",
"0.00600", "0.00600", "0.00600"], "segmentDirection": "SB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "5.20", "segmentFacilityType": "HOV",
"segmentOriginId": "385", "segmentDestId": "436",
"segmentLocation": "US-59 Eastex HOV Southbound from Tidwell to Quitman"}, {
"latitudes": ["29.7336", "29.73298", "29.73242", "29.7323", "29.73215", "29.73185", "29.7313", "29.73088",
"29.7309"],
"longitudes": ["-95.37128", "-95.37535", "-95.38312", "-95.38452", "-95.38845", "-95.39105", "-95.39948",
"-95.407", "-95.41065"],
"latitudeOffsets": ["-0.0040", "-0.0040", "-0.0040", "-0.0040", "-0.0040", "-0.0040", "-0.0040", "-0.0050",
"-0.0060"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100"], "segmentDirection": "SB", "segmentSpeed": 55,
"segmentTravelTime": "2 minutes 17 seconds", "segmentDist": "2.10", "segmentFacilityType": "ML",
"segmentOriginId": "1531", "segmentDestId": "1532",
"segmentLocation": "US-59 Southwest Southbound from SH-288 to Shepherd"},
{"latitudes": ["29.7309", "29.73083", "29.73062", "29.7299", "29.7292", "29.72895"],
"longitudes": ["-95.41065", "-95.41285", "-95.41868", "-95.42815", "-95.43357", "-95.4415"],
"latitudeOffsets": ["-0.0060", "-0.0060", "-0.0060", "-0.0060", "-0.0060", "-0.0060"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "SB",
"segmentSpeed": 71, "segmentTravelTime": "2 minutes 31 seconds", "segmentDist": "1.80",
"segmentFacilityType": "ML", "segmentOriginId": "1532", "segmentDestId": "1533",
"segmentLocation": "US-59 Southwest Southbound from Shepherd to Weslayan"},
{"latitudes": ["29.72895", "29.73018", "29.7288", "29.72632", "29.72598"],
"longitudes": ["-95.4415", "-95.4515", "-95.46053", "-95.46825", "-95.47658"],
"latitudeOffsets": ["-0.0060", "-0.0060", "-0.0060", "-0.0060", "-0.0060"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "SB",
"segmentSpeed": 62, "segmentTravelTime": "2 minutes 33 seconds", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "1533", "segmentDestId": "1534",
"segmentLocation": "US-59 Southwest Southbound from Weslayan to Chimney Rock"},
{"latitudes": ["29.72598", "29.72588", "29.72571", "29.72059"],
"longitudes": ["-95.47658", "-95.48422", "-95.48783", "-95.49483"],
"latitudeOffsets": ["-0.0060", "-0.0060", "-0.0060", "0.00100"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.01000"], "segmentDirection": "SB", "segmentSpeed": 66,
"segmentTravelTime": "1 minute 11 seconds", "segmentDist": "1.30", "segmentFacilityType": "ML",
"segmentOriginId": "1534", "segmentDestId": "1535",
"segmentLocation": "US-59 Southwest Southbound from Chimney Rock to Westpark"},
{"latitudes": ["29.72059", "29.71682", "29.7044"], "longitudes": ["-95.49483", "-95.49928", "-95.51318"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.01000", "0.01000", "0.01000"],
"segmentDirection": "SB", "segmentSpeed": 57, "segmentTravelTime": "2 minutes 53 seconds", "segmentDist": "1.80",
"segmentFacilityType": "ML", "segmentOriginId": "1535", "segmentDestId": "1536",
"segmentLocation": "US-59 Southwest Southbound from Westpark to Bellaire"},
{"latitudes": ["29.7044", "29.7003", "29.69205"], "longitudes": ["-95.51318", "-95.51782", "-95.52717"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.01000", "0.01000", "0.01000"],
"segmentDirection": "SB", "segmentSpeed": 57, "segmentTravelTime": "1 minute 10 seconds", "segmentDist": "1.10",
"segmentFacilityType": "ML", "segmentOriginId": "1536", "segmentDestId": "1537",
"segmentLocation": "US-59 Southwest Southbound from Bellaire to Beechnut"},
{"latitudes": ["29.69205", "29.688", "29.68085", "29.67577", "29.66098"],
"longitudes": ["-95.52717", "-95.53075", "-95.53662", "-95.54213", "-95.55803"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.01000", "0.01000", "0.01000", "0.00800", "0.00800"], "segmentDirection": "SB",
"segmentSpeed": 52, "segmentTravelTime": "3 minutes 33 seconds", "segmentDist": "2.20",
"segmentFacilityType": "ML", "segmentOriginId": "1537", "segmentDestId": "1538",
"segmentLocation": "US-59 Southwest Southbound from Beechnut to Beltway 8-West"},
{"latitudes": ["29.66098", "29.65398", "29.65142", "29.64405"],
"longitudes": ["-95.55803", "-95.56565", "-95.5684", "-95.57612"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00800", "0.00800", "0.00800", "0.00800"], "segmentDirection": "SB", "segmentSpeed": 63,
"segmentTravelTime": "2 minutes 43 seconds", "segmentDist": "1.80", "segmentFacilityType": "ML",
"segmentOriginId": "1538", "segmentDestId": "1539",
"segmentLocation": "US-59 Southwest Southbound from Beltway 8-West to W Airport"},
{"latitudes": ["29.64405", "29.63913"], "longitudes": ["-95.57612", "-95.58136"],
"latitudeOffsets": ["0.00100", "0.00100"], "longitudeOffsets": ["0.00800", "0.00800"], "segmentDirection": "SB",
"segmentSpeed": 76, "segmentTravelTime": "1 minute 57 seconds", "segmentDist": "1.20",
"segmentFacilityType": "ML", "segmentOriginId": "1539", "segmentDestId": "1540",
"segmentLocation": "US-59 Southwest Southbound from W Airport to Kirkwood"},
{"latitudes": ["29.63913", "29.62771", "29.62174"], "longitudes": ["-95.58136", "-95.59363", "-95.60161"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00800", "0.00800", "0.00800"],
"segmentDirection": "SB", "segmentSpeed": 66, "segmentTravelTime": "2 minutes 33 seconds", "segmentDist": "1.70",
"segmentFacilityType": "ML", "segmentOriginId": "1540", "segmentDestId": "1541",
"segmentLocation": "US-59 Southwest Southbound from Kirkwood to Dairy-Ashford"},
{"latitudes": ["29.62174", "29.61738", "29.61383", "29.60697", "29.59841"],
"longitudes": ["-95.60161", "-95.60496", "-95.60758", "-95.61363", "-95.62249"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00800", "0.00800", "0.00800", "0.00800", "0.00800"], "segmentDirection": "SB",
"segmentSpeed": 56, "segmentTravelTime": "2 minutes 43 seconds", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "1541", "segmentDestId": "1542",
"segmentLocation": "US-59 Southwest Southbound from Dairy-Ashford to SH-6"},
{"latitudes": ["29.59841", "29.59361", "29.58928", "29.58103"],
"longitudes": ["-95.62249", "-95.62796", "-95.63468", "-95.64787"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00800", "0.00800", "0.00800", "0.00800"], "segmentDirection": "SB", "segmentSpeed": 61,
"segmentTravelTime": "2 minutes 3 seconds", "segmentDist": "2.10", "segmentFacilityType": "ML",
"segmentOriginId": "1542", "segmentDestId": "1543",
"segmentLocation": "US-59 Southwest Southbound from SH-6 to University"},
{"latitudes": ["29.58103", "29.56808", "29.56553", "29.56422", "29.5623", "29.56137"],
"longitudes": ["-95.64787", "-95.66826", "-95.67271", "-95.67673", "-95.68599", "-95.6905"],
"latitudeOffsets": ["0.00100", "0.00100", "-0.0020", "-0.0020", "-0.0020", "-0.0020"],
"longitudeOffsets": ["0.00800", "0.00800", "0.00200", "0.00200", "0.00200", "0.00200"], "segmentDirection": "SB",
"segmentSpeed": 61, "segmentTravelTime": "2 minutes 35 seconds", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "1543", "segmentDestId": "1544",
"segmentLocation": "US-59 Southwest Southbound from University to SH-99"},
{"latitudes": ["29.56137", "29.55568"], "longitudes": ["-95.6905", "-95.71733"],
"latitudeOffsets": ["-0.0020", "-0.0020"], "longitudeOffsets": ["0.00200", "0.00200"], "segmentDirection": "SB",
"segmentSpeed": 63, "segmentTravelTime": "2 minutes 0 seconds", "segmentDist": "2.10",
"segmentFacilityType": "ML", "segmentOriginId": "1544", "segmentDestId": "1545",
"segmentLocation": "US-59 Southwest Southbound from SH-99 to Williams Way"},
{"latitudes": ["29.55568", "29.56137"], "longitudes": ["-95.71733", "-95.6905"],
"latitudeOffsets": ["-0.0020", "-0.0020"], "longitudeOffsets": ["0.00200", "0.00200"], "segmentDirection": "NB",
"segmentSpeed": 66, "segmentTravelTime": "2 minutes 55 seconds", "segmentDist": "2.10",
"segmentFacilityType": "ML", "segmentOriginId": "1546", "segmentDestId": "1547",
"segmentLocation": "US-59 Southwest Northbound from Williams Way to SH-99"},
{"latitudes": ["29.56137", "29.5623", "29.56422", "29.56553", "29.56808", "29.58103"],
"longitudes": ["-95.6905", "-95.68599", "-95.67673", "-95.67271", "-95.66826", "-95.64787"],
"latitudeOffsets": ["-0.0020", "-0.0020", "-0.0020", "-0.0020", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00200", "0.00200", "0.00200", "0.00200", "0.00800", "0.00800"], "segmentDirection": "NB",
"segmentSpeed": 53, "segmentTravelTime": "2 minutes 49 seconds", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "1547", "segmentDestId": "1548",
"segmentLocation": "US-59 Southwest Northbound from SH-99 to University"},
{"latitudes": ["29.58103", "29.58928", "29.59361", "29.59841"],
"longitudes": ["-95.64787", "-95.63468", "-95.62796", "-95.62249"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00800", "0.00800", "0.00800", "0.00800"], "segmentDirection": "NB", "segmentSpeed": 60,
"segmentTravelTime": "2 minutes 6 seconds", "segmentDist": "2.10", "segmentFacilityType": "ML",
"segmentOriginId": "1548", "segmentDestId": "1549",
"segmentLocation": "US-59 Southwest Northbound from University to SH-6"},
{"latitudes": ["29.59841", "29.60697", "29.61383", "29.61738", "29.62174"],
"longitudes": ["-95.62249", "-95.61363", "-95.60758", "-95.60496", "-95.60161"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00800", "0.00800", "0.00800", "0.00800", "0.00800"], "segmentDirection": "NB",
"segmentSpeed": 66, "segmentTravelTime": "1 minute 27 seconds", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "1549", "segmentDestId": "1550",
"segmentLocation": "US-59 Southwest Northbound from SH-6 to Dairy-Ashford"},
{"latitudes": ["29.62174", "29.62771", "29.63913"], "longitudes": ["-95.60161", "-95.59363", "-95.58136"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00800", "0.00800", "0.00800"],
"segmentDirection": "NB", "segmentSpeed": 64, "segmentTravelTime": "2 minutes 35 seconds", "segmentDist": "1.70",
"segmentFacilityType": "ML", "segmentOriginId": "1550", "segmentDestId": "1551",
"segmentLocation": "US-59 Southwest Northbound from Dairy-Ashford to Kirkwood"},
{"latitudes": ["29.63913", "29.64405"], "longitudes": ["-95.58136", "-95.57612"],
"latitudeOffsets": ["0.00100", "0.00100"], "longitudeOffsets": ["0.00800", "0.00800"], "segmentDirection": "NB",
"segmentSpeed": 60, "segmentTravelTime": "1 minute 12 seconds", "segmentDist": "1.20",
"segmentFacilityType": "ML", "segmentOriginId": "1551", "segmentDestId": "1552",
"segmentLocation": "US-59 Southwest Northbound from Kirkwood to W Airport"},
{"latitudes": ["29.64405", "29.65142", "29.65398", "29.66098"],
"longitudes": ["-95.57612", "-95.5684", "-95.56565", "-95.55803"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00800", "0.00800", "0.00800", "0.00800"], "segmentDirection": "NB", "segmentSpeed": 68,
"segmentTravelTime": "2 minutes 36 seconds", "segmentDist": "1.80", "segmentFacilityType": "ML",
"segmentOriginId": "1552", "segmentDestId": "1553",
"segmentLocation": "US-59 Southwest Northbound from W Airport to Beltway 8-West"},
{"latitudes": ["29.66098", "29.67577", "29.68085", "29.688", "29.69205"],
"longitudes": ["-95.55803", "-95.54213", "-95.53662", "-95.53075", "-95.52717"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00800", "0.00800", "0.01000", "0.01000", "0.01000"], "segmentDirection": "NB",
"segmentSpeed": 57, "segmentTravelTime": "2 minutes 19 seconds", "segmentDist": "2.20",
"segmentFacilityType": "ML", "segmentOriginId": "1553", "segmentDestId": "1554",
"segmentLocation": "US-59 Southwest Northbound from Beltway 8-West to Beechnut"},
{"latitudes": ["29.69205", "29.7003", "29.7044"], "longitudes": ["-95.52717", "-95.51782", "-95.51318"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.01000", "0.01000", "0.01000"],
"segmentDirection": "NB", "segmentSpeed": 56, "segmentTravelTime": "1 minute 11 seconds", "segmentDist": "1.10",
"segmentFacilityType": "ML", "segmentOriginId": "1554", "segmentDestId": "1555",
"segmentLocation": "US-59 Southwest Northbound from Beechnut to Bellaire"},
{"latitudes": ["29.7044", "29.71682", "29.72059"], "longitudes": ["-95.51318", "-95.49928", "-95.49483"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.01000", "0.01000", "0.01000"],
"segmentDirection": "NB", "segmentSpeed": 59, "segmentTravelTime": "2 minutes 49 seconds", "segmentDist": "1.80",
"segmentFacilityType": "ML", "segmentOriginId": "1555", "segmentDestId": "1556",
"segmentLocation": "US-59 Southwest Northbound from Bellaire to Westpark"},
{"latitudes": ["29.72059", "29.72571", "29.72588", "29.72598"],
"longitudes": ["-95.49483", "-95.48783", "-95.48422", "-95.47658"],
"latitudeOffsets": ["0.00100", "-0.0060", "-0.0060", "-0.0060"],
"longitudeOffsets": ["0.01000", "0.00100", "0.00100", "0.00100"], "segmentDirection": "NB", "segmentSpeed": 57,
"segmentTravelTime": "1 minute 22 seconds", "segmentDist": "1.30", "segmentFacilityType": "ML",
"segmentOriginId": "1556", "segmentDestId": "1557",
"segmentLocation": "US-59 Southwest Northbound from Westpark to Chimney Rock"},
{"latitudes": ["29.72598", "29.72632", "29.7288", "29.73018", "29.72895"],
"longitudes": ["-95.47658", "-95.46825", "-95.46053", "-95.4515", "-95.4415"],
"latitudeOffsets": ["-0.0060", "-0.0060", "-0.0060", "-0.0060", "-0.0060"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "NB",
"segmentSpeed": 63, "segmentTravelTime": "2 minutes 32 seconds", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "1557", "segmentDestId": "1558",
"segmentLocation": "US-59 Southwest Northbound from Chimney Rock to Weslayan"},
{"latitudes": ["29.72895", "29.7292", "29.7299", "29.73062", "29.73083", "29.7309"],
"longitudes": ["-95.4415", "-95.43357", "-95.42815", "-95.41868", "-95.41285", "-95.41065"],
"latitudeOffsets": ["-0.0060", "-0.0060", "-0.0060", "-0.0060", "-0.0060", "-0.0060"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "NB",
"segmentSpeed": 59, "segmentTravelTime": "2 minutes 50 seconds", "segmentDist": "1.80",
"segmentFacilityType": "ML", "segmentOriginId": "1558", "segmentDestId": "1559",
"segmentLocation": "US-59 Southwest Northbound from Weslayan to Shepherd"}, {
"latitudes": ["29.7309", "29.73088", "29.7313", "29.73185", "29.73215", "29.7323", "29.73242", "29.73298",
"29.7336"],
"longitudes": ["-95.41065", "-95.407", "-95.39948", "-95.39105", "-95.38845", "-95.38452", "-95.38312",
"-95.37535", "-95.37128"],
"latitudeOffsets": ["-0.0060", "-0.0050", "-0.0040", "-0.0040", "-0.0040", "-0.0040", "-0.0040", "-0.0040",
"-0.0040"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100"], "segmentDirection": "NB", "segmentSpeed": 59,
"segmentTravelTime": "2 minutes 9 seconds", "segmentDist": "2.10", "segmentFacilityType": "ML",
"segmentOriginId": "1559", "segmentDestId": "1560",
"segmentLocation": "US-59 Southwest Northbound from Shepherd to SH-288"},
{"latitudes": ["29.64405", "29.65142", "29.65398", "29.66098", "29.67577"],
"longitudes": ["-95.57612", "-95.5684", "-95.56565", "-95.55803", "-95.54213"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00800", "0.00800", "0.00800", "0.00800", "0.00800"], "segmentDirection": "NB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.50", "segmentFacilityType": "HOV",
"segmentOriginId": "500", "segmentDestId": "164",
"segmentLocation": "US-59 Southwest HOV Northbound from W Airport to Bissonnet"},
{"latitudes": ["29.67577", "29.68085", "29.688", "29.69205", "29.7003", "29.7044", "29.71682"],
"longitudes": ["-95.54213", "-95.53662", "-95.53075", "-95.52717", "-95.51782", "-95.51318", "-95.49928"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00800", "0.01000", "0.01000", "0.01000", "0.01000", "0.01000", "0.01000"],
"segmentDirection": "NB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "5.10",
"segmentFacilityType": "HOV", "segmentOriginId": "164", "segmentDestId": "165",
"segmentLocation": "US-59 Southwest HOV Northbound from Bissonnet to Hillcroft"},
{"latitudes": ["29.71682", "29.72059", "29.72571", "29.72588", "29.72598", "29.72632", "29.7288"],
"longitudes": ["-95.49928", "-95.49483", "-95.48783", "-95.48422", "-95.47658", "-95.46825", "-95.46053"],
"latitudeOffsets": ["0.00100", "0.00100", "-0.0060", "-0.0060", "-0.0060", "-0.0060", "-0.0060"],
"longitudeOffsets": ["0.01000", "0.01000", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "NB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.60",
"segmentFacilityType": "HOV", "segmentOriginId": "165", "segmentDestId": "166",
"segmentLocation": "US-59 Southwest HOV Northbound from Hillcroft to IH-610 West Loop"},
{"latitudes": ["29.7288", "29.73018"], "longitudes": ["-95.46053", "-95.4515"],
"latitudeOffsets": ["-0.0060", "-0.0060"], "longitudeOffsets": ["0.00100", "0.00100"], "segmentDirection": "NB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.35", "segmentFacilityType": "HOV",
"segmentOriginId": "166", "segmentDestId": "167",
"segmentLocation": "US-59 Southwest HOV Northbound from IH-610 West Loop to Newcastle"}, {
"latitudes": ["29.73018", "29.72895", "29.7292", "29.7299", "29.73062", "29.73083", "29.7309", "29.73088",
"29.7313", "29.73185", "29.73215"],
"longitudes": ["-95.4515", "-95.4415", "-95.43357", "-95.42815", "-95.41868", "-95.41285", "-95.41065",
"-95.407", "-95.39948", "-95.39105", "-95.38845"],
"latitudeOffsets": ["-0.0060", "-0.0060", "-0.0060", "-0.0060", "-0.0060", "-0.0060", "-0.0060", "-0.0050",
"-0.0040", "-0.0040", "-0.0040"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100", "0.00100"], "segmentDirection": "NB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "4.50", "segmentFacilityType": "HOV",
"segmentOriginId": "167", "segmentDestId": "502",
"segmentLocation": "US-59 Southwest HOV Northbound from Newcastle to Spur-527"},
{"latitudes": ["30.11434", "30.11223", "30.11024", "30.10672"],
"longitudes": ["-96.07529", "-96.05713", "-96.05202", "-96.04816"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 65,
"segmentTravelTime": "2 minutes 51 seconds", "segmentDist": "2", "segmentFacilityType": "ML",
"segmentOriginId": "1625", "segmentDestId": "1623",
"segmentLocation": "US-290 Northwest Eastbound from SH-6 North in Hempstead to FM-1488"}, {
"latitudes": ["30.10672", "30.09908", "30.09619", "30.09152", "30.09104", "30.09063", "30.09028", "30.08971",
"30.08737", "30.08168", "30.08092", "30.0806"],
"longitudes": ["-96.04816", "-96.04151", "-96.03836", "-96.03081", "-96.02953", "-96.02794", "-96.02109",
"-96.01781", "-96.01203", "-95.99993", "-95.9966", "-95.99092"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 69,
"segmentTravelTime": "3 minutes 28 seconds", "segmentDist": "4", "segmentFacilityType": "ML",
"segmentOriginId": "1623", "segmentDestId": "1621",
"segmentLocation": "US-290 Northwest Eastbound from FM-1488 to FM-1098"},
{"latitudes": ["30.0806", "30.08029", "30.0793", "30.07211", "30.07039"],
"longitudes": ["-95.99092", "-95.98304", "-95.97893", "-95.94894", "-95.93418"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 62, "segmentTravelTime": "3 minutes 0 seconds", "segmentDist": "3.10",
"segmentFacilityType": "ML", "segmentOriginId": "1621", "segmentDestId": "1619",
"segmentLocation": "US-290 Northwest Eastbound from FM-1098 to FM-362"},
{"latitudes": ["30.07039", "30.06891", "30.06829", "30.06425", "30.06334"],
"longitudes": ["-95.93418", "-95.92163", "-95.91993", "-95.91144", "-95.90884"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 58, "segmentTravelTime": "2 minutes 4 seconds", "segmentDist": "2", "segmentFacilityType": "ML",
"segmentOriginId": "1619", "segmentDestId": "1617",
"segmentLocation": "US-290 Northwest Eastbound from FM-362 to FM-2920"},
{"latitudes": ["30.06334", "30.05503", "30.04767"], "longitudes": ["-95.90884", "-95.88528", "-95.8741"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 62, "segmentTravelTime": "3 minutes 0 seconds", "segmentDist": "3.10",
"segmentFacilityType": "ML", "segmentOriginId": "1617", "segmentDestId": "1615",
"segmentLocation": "US-290 Northwest Eastbound from FM-2920 to Kickapoo Rd"},
{"latitudes": ["30.04767", "30.04311", "30.0387", "30.03368", "30.03217", "30.02562", "30.02281", "30.01638"],
"longitudes": ["-95.8741", "-95.8673", "-95.85721", "-95.84485", "-95.84316", "-95.83795", "-95.83208",
"-95.81522"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 70, "segmentTravelTime": "3 minutes 0 seconds", "segmentDist": "3.50",
"segmentFacilityType": "ML", "segmentOriginId": "1615", "segmentDestId": "1613",
"segmentLocation": "US-290 Northwest Eastbound from Kickapoo Rd to Roberts Rd"},
{"latitudes": ["30.01638", "30.01025", "30.00311"], "longitudes": ["-95.81522", "-95.79928", "-95.78014"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 60, "segmentTravelTime": "2 minutes 17 seconds", "segmentDist": "2.30",
"segmentFacilityType": "ML", "segmentOriginId": "1613", "segmentDestId": "1611",
"segmentLocation": "US-290 Northwest Eastbound from Roberts Rd to Bauer Rd"},
{"latitudes": ["30.00311", "29.9883", "29.97906"], "longitudes": ["-95.78014", "-95.74098", "-95.71701"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 65, "segmentTravelTime": "4 minutes 10 seconds", "segmentDist": "4.50",
"segmentFacilityType": "ML", "segmentOriginId": "1611", "segmentDestId": "1609",
"segmentLocation": "US-290 Northwest Eastbound from Bauer Rd to Mueschke Rd"},
{"latitudes": ["29.97906", "29.9736", "29.97167", "29.96956", "29.9647", "29.96298", "29.95323"],
"longitudes": ["-95.71701", "-95.69959", "-95.69444", "-95.69217", "-95.68949", "-95.68776", "-95.67350"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 64, "segmentTravelTime": "3 minutes 37 seconds", "segmentDist": "2.80",
"segmentFacilityType": "ML", "segmentOriginId": "1609", "segmentDestId": "1607",
"segmentLocation": "US-290 Northwest Eastbound from Mueschke Rd to Barker-Cypress"},
{"latitudes": ["29.95323", "29.93660", "29.92413", "29.92333"],
"longitudes": ["-95.67350", "-95.64988", "-95.63195", "-95.62850"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00500"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 63,
"segmentTravelTime": "3 minutes 52 seconds", "segmentDist": "3", "segmentFacilityType": "ML",
"segmentOriginId": "1607", "segmentDestId": "1627",
"segmentLocation": "US-290 Northwest Eastbound from Barker-Cypress to Huffmeister"},
{"latitudes": ["29.92333", "29.92322", "29.92240", "29.9186", "29.91658"],
"longitudes": ["-95.62850", "-95.62437", "-95.62240", "-95.61721", "-95.61605"],
"latitudeOffsets": ["0.00500", "0.00500", "0.00600", "0.00600", "0.00600"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 67, "segmentTravelTime": "1 minute 15 seconds", "segmentDist": "1.40",
"segmentFacilityType": "ML", "segmentOriginId": "1627", "segmentDestId": "1629",
"segmentLocation": "US-290 Northwest Eastbound from Huffmeister to SH-6/FM-1960"},
{"latitudes": ["29.91658", "29.91085", "29.90698", "29.90320", "29.90089"],
"longitudes": ["-95.61605", "-95.61230", "-95.60810", "-95.60265", "-95.59938"],
"latitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600", "0.00600"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 67, "segmentTravelTime": "2 minutes 31 seconds", "segmentDist": "1.70",
"segmentFacilityType": "ML", "segmentOriginId": "1629", "segmentDestId": "1631",
"segmentLocation": "US-290 Northwest Eastbound from SH-6/FM-1960 to West"},
{"latitudes": ["29.90089", "29.89048", "29.87945", "29.87685"],
"longitudes": ["-95.59938", "-95.58450", "-95.56878", "-95.56323"],
"latitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 76,
"segmentTravelTime": "2 minutes 30 seconds", "segmentDist": "1.90", "segmentFacilityType": "ML",
"segmentOriginId": "1631", "segmentDestId": "1633",
"segmentLocation": "US-290 Northwest Eastbound from West to Senate"},
{"latitudes": ["29.87685", "29.87360", "29.87035", "29.86698", "29.85955"],
"longitudes": ["-95.56323", "-95.55545", "-95.54830", "-95.54092", "-95.52433"],
"latitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600", "0.00600"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 59, "segmentTravelTime": "3 minutes 14 seconds", "segmentDist": "3.20",
"segmentFacilityType": "ML", "segmentOriginId": "1633", "segmentDestId": "1635",
"segmentLocation": "US-290 Northwest Eastbound from Senate to Fairbanks-North Houston"},
{"latitudes": ["29.85955", "29.85133", "29.84890", "29.84148", "29.83815", "29.83315", "29.82367", "29.81949"],
"longitudes": ["-95.52433", "-95.50978", "-95.50558", "-95.49248", "-95.48885", "-95.48347", "-95.47337",
"-95.46874"],
"latitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600", "0.00600", "0.00600", "0.00600", "0.00600"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00200", "0.00200", "0.00200"],
"segmentDirection": "EB", "segmentSpeed": 61, "segmentTravelTime": "5 minutes 35 seconds", "segmentDist": "4.65",
"segmentFacilityType": "ML", "segmentOriginId": "1635", "segmentDestId": "1602",
"segmentLocation": "US-290 Northwest Eastbound from Fairbanks-North Houston to West 34th"},
{"latitudes": ["29.81949", "29.81036", "29.80746", "29.80236"],
"longitudes": ["-95.46874", "-95.459", "-95.45582", "-95.45084"],
"latitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600"],
"longitudeOffsets": ["0.00200", "0.00200", "0.00200", "0.00200"], "segmentDirection": "EB", "segmentSpeed": 59,
"segmentTravelTime": "1 minute 55 seconds", "segmentDist": "0.90", "segmentFacilityType": "ML",
"segmentOriginId": "1602", "segmentDestId": "1603",
"segmentLocation": "US-290 Northwest Eastbound from West 34th to IH-610 West Loop"},
{"latitudes": ["29.80236", "29.80746", "29.81036", "29.81949"],
"longitudes": ["-95.45084", "-95.45582", "-95.459", "-95.46874"],
"latitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600"],
"longitudeOffsets": ["0.00200", "0.00200", "0.00200", "0.00200"], "segmentDirection": "WB", "segmentSpeed": 74,
"segmentTravelTime": "1 minute 44 seconds", "segmentDist": "0.90", "segmentFacilityType": "ML",
"segmentOriginId": "1600", "segmentDestId": "1601",
"segmentLocation": "US-290 Northwest Westbound from IH-610 West Loop to West 34th"},
{"latitudes": ["29.81949", "29.82367", "29.83315", "29.83815", "29.84148", "29.84890", "29.85133", "29.85955"],
"longitudes": ["-95.46874", "-95.47337", "-95.48347", "-95.48885", "-95.49248", "-95.50558", "-95.50978",
"-95.52433"],
"latitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600", "0.00600", "0.00600", "0.00600", "0.00600"],
"longitudeOffsets": ["0.00200", "0.00200", "0.00200", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 60, "segmentTravelTime": "5 minutes 40 seconds", "segmentDist": "4.65",
"segmentFacilityType": "ML", "segmentOriginId": "1601", "segmentDestId": "1634",
"segmentLocation": "US-290 Northwest Westbound from West 34th to Fairbanks-North Houston"},
{"latitudes": ["29.85955", "29.86698", "29.87035", "29.87360", "29.87685"],
"longitudes": ["-95.52433", "-95.54092", "-95.54830", "-95.55545", "-95.56323"],
"latitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600", "0.00600"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 57, "segmentTravelTime": "3 minutes 22 seconds", "segmentDist": "3.20",
"segmentFacilityType": "ML", "segmentOriginId": "1634", "segmentDestId": "1632",
"segmentLocation": "US-290 Northwest Westbound from Fairbanks-North Houston to Senate"},
{"latitudes": ["29.87685", "29.87945", "29.89048", "29.90089"],
"longitudes": ["-95.56323", "-95.56878", "-95.58450", "-95.59938"],
"latitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 73,
"segmentTravelTime": "2 minutes 34 seconds", "segmentDist": "1.90", "segmentFacilityType": "ML",
"segmentOriginId": "1632", "segmentDestId": "1630",
"segmentLocation": "US-290 Northwest Westbound from Senate to West"},
{"latitudes": ["29.90089", "29.90320", "29.90698", "29.91085", "29.91658"],
"longitudes": ["-95.59938", "-95.60265", "-95.60810", "-95.61230", "-95.61605"],
"latitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600", "0.00600"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 60, "segmentTravelTime": "2 minutes 42 seconds", "segmentDist": "1.70",
"segmentFacilityType": "ML", "segmentOriginId": "1630", "segmentDestId": "1628",
"segmentLocation": "US-290 Northwest Westbound from West to SH-6/FM-1960"},
{"latitudes": ["29.91658", "29.9186", "29.92240", "29.92322", "29.92333"],
"longitudes": ["-95.61605", "-95.61721", "-95.62240", "-95.62437", "-95.62850"],
"latitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00500", "0.00500"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 61, "segmentTravelTime": "1 minute 22 seconds", "segmentDist": "1.40",
"segmentFacilityType": "ML", "segmentOriginId": "1628", "segmentDestId": "1626",
"segmentLocation": "US-290 Northwest Westbound from SH-6/FM-1960 to Huffmeister"},
{"latitudes": ["29.92333", "29.92413", "29.93660", "29.95323"],
"longitudes": ["-95.62850", "-95.63195", "-95.64988", "-95.67350"],
"latitudeOffsets": ["0.00500", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 62,
"segmentTravelTime": "3 minutes 53 seconds", "segmentDist": "3", "segmentFacilityType": "ML",
"segmentOriginId": "1626", "segmentDestId": "1606",
"segmentLocation": "US-290 Northwest Westbound from Huffmeister to Barker-Cypress"},
{"latitudes": ["29.95323", "29.96298", "29.9647", "29.96956", "29.97167", "29.9736", "29.97906"],
"longitudes": ["-95.67350", "-95.68776", "-95.68949", "-95.69217", "-95.69444", "-95.69959", "-95.71701"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 62, "segmentTravelTime": "3 minutes 42 seconds", "segmentDist": "2.80",
"segmentFacilityType": "ML", "segmentOriginId": "1606", "segmentDestId": "1608",
"segmentLocation": "US-290 Northwest Westbound from Barker-Cypress to Mueschke Rd"},
{"latitudes": ["29.97906", "29.9883", "30.00311"], "longitudes": ["-95.71701", "-95.74098", "-95.78014"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 63, "segmentTravelTime": "4 minutes 19 seconds", "segmentDist": "4.50",
"segmentFacilityType": "ML", "segmentOriginId": "1608", "segmentDestId": "1610",
"segmentLocation": "US-290 Northwest Westbound from Mueschke Rd to Bauer Rd"},
{"latitudes": ["30.00311", "30.01025", "30.01638"], "longitudes": ["-95.78014", "-95.79928", "-95.81522"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 60, "segmentTravelTime": "2 minutes 17 seconds", "segmentDist": "2.30",
"segmentFacilityType": "ML", "segmentOriginId": "1610", "segmentDestId": "1612",
"segmentLocation": "US-290 Northwest Westbound from Bauer Rd to Roberts Rd"},
{"latitudes": ["30.01638", "30.02281", "30.02562", "30.03217", "30.03368", "30.0387", "30.04311", "30.04767"],
"longitudes": ["-95.81522", "-95.83208", "-95.83795", "-95.84316", "-95.84485", "-95.85721", "-95.8673",
"-95.8741"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "3.50",
"segmentFacilityType": "ML", "segmentOriginId": "1612", "segmentDestId": "1614",
"segmentLocation": "US-290 Northwest Westbound from Roberts Rd to Kickapoo Rd"},
{"latitudes": ["30.04767", "30.05503", "30.06334"], "longitudes": ["-95.8741", "-95.88528", "-95.90884"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 62, "segmentTravelTime": "3 minutes 1 second", "segmentDist": "3.10",
"segmentFacilityType": "ML", "segmentOriginId": "1614", "segmentDestId": "1616",
"segmentLocation": "US-290 Northwest Westbound from Kickapoo Rd to FM-2920"},
{"latitudes": ["30.06334", "30.06425", "30.06829", "30.06891", "30.07039"],
"longitudes": ["-95.90884", "-95.91144", "-95.91993", "-95.92163", "-95.93418"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 54, "segmentTravelTime": "2 minutes 14 seconds", "segmentDist": "2", "segmentFacilityType": "ML",
"segmentOriginId": "1616", "segmentDestId": "1618",
"segmentLocation": "US-290 Northwest Westbound from FM-2920 to FM-362"},
{"latitudes": ["30.07039", "30.07211", "30.0793", "30.08029", "30.0806"],
"longitudes": ["-95.93418", "-95.94894", "-95.97893", "-95.98304", "-95.99092"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 60, "segmentTravelTime": "3 minutes 7 seconds", "segmentDist": "3.10",
"segmentFacilityType": "ML", "segmentOriginId": "1618", "segmentDestId": "1620",
"segmentLocation": "US-290 Northwest Westbound from FM-362 to FM-1098"}, {
"latitudes": ["30.0806", "30.08092", "30.08168", "30.08737", "30.08971", "30.09028", "30.09063", "30.09104",
"30.09152", "30.09619", "30.09908", "30.10672"],
"longitudes": ["-95.99092", "-95.9966", "-95.99993", "-96.01203", "-96.01781", "-96.02109", "-96.02794",
"-96.02953", "-96.03081", "-96.03836", "-96.04151", "-96.04816"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "4", "segmentFacilityType": "ML",
"segmentOriginId": "1620", "segmentDestId": "1622",
"segmentLocation": "US-290 Northwest Westbound from FM-1098 to FM-1488"},
{"latitudes": ["30.10672", "30.11024", "30.11223", "30.11434"],
"longitudes": ["-96.04816", "-96.05202", "-96.05713", "-96.07529"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2", "segmentFacilityType": "ML", "segmentOriginId": "1622",
"segmentDestId": "1624", "segmentLocation": "US-290 Northwest Westbound from FM-1488 to SH-6 North in Hempstead"},
{"latitudes": ["29.90089", "29.89048", "29.87945", "29.87685", "29.87360"],
"longitudes": ["-95.59938", "-95.58450", "-95.56878", "-95.56323", "-95.55545"],
"latitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600", "0.00600"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.95", "segmentFacilityType": "HOV",
"segmentOriginId": "36", "segmentDestId": "37",
"segmentLocation": "US-290 Northwest HOV Eastbound from West to Beltway 8-West"}, {
"latitudes": ["29.87360", "29.87035", "29.86698", "29.85955", "29.85133", "29.84890", "29.84148", "29.83815",
"29.83315", "29.82367", "29.81949", "29.81036", "29.80746", "29.80236"],
"longitudes": ["-95.55545", "-95.54830", "-95.54092", "-95.52433", "-95.50978", "-95.50558", "-95.49248",
"-95.48885", "-95.48347", "-95.47337", "-95.46874", "-95.459", "-95.45582", "-95.45084"],
"latitudeOffsets": ["0.00600", "0.00600", "0.00600", "0.00600", "0.00600", "0.00600", "0.00600", "0.00600",
"0.00600", "0.00600", "0.00600", "0.00600", "0.00600", "0.00600"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00200", "0.00200", "0.00200", "0.00200", "0.00200", "0.00200"],
"segmentDirection": "EB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "9.50",
"segmentFacilityType": "HOV", "segmentOriginId": "37", "segmentDestId": "42",
"segmentLocation": "US-290 Northwest HOV Eastbound from Beltway 8-West to IH-610 West Loop"}, {
"latitudes": ["29.73237", "29.72831", "29.72485", "29.72213", "29.71931", "29.71581", "29.7125", "29.706",
"29.70112"],
"longitudes": ["-95.37175", "-95.37326", "-95.37588", "-95.37712", "-95.37783", "-95.37874", "-95.37875",
"-95.37538", "-95.37512"],
"latitudeOffsets": ["-0.0020", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000",
"0.00000"],
"longitudeOffsets": ["0.00200", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400"], "segmentDirection": "SB", "segmentSpeed": 60,
"segmentTravelTime": "3 minutes 43 seconds", "segmentDist": "2.70", "segmentFacilityType": "ML",
"segmentOriginId": "1881", "segmentDestId": "1882",
"segmentLocation": "SH-288 Southbound from US-59 Southwest to Holcombe/Old Spanish Trail"},
{"latitudes": ["29.70112", "29.69799", "29.68663", "29.6805"],
"longitudes": ["-95.37512", "-95.37601", "-95.37925", "-95.381"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": 59,
"segmentTravelTime": "2 minutes 32 seconds", "segmentDist": "1.50", "segmentFacilityType": "ML",
"segmentOriginId": "1882", "segmentDestId": "1883",
"segmentLocation": "SH-288 Southbound from Holcombe/Old Spanish Trail to IH-610 South Loop"},
{"latitudes": ["29.6805", "29.67495", "29.671", "29.66785", "29.66223", "29.65775"],
"longitudes": ["-95.381", "-95.3817", "-95.38193", "-95.3832", "-95.38598", "-95.3873"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 61, "segmentTravelTime": "1 minute 17 seconds", "segmentDist": "1.30",
"segmentFacilityType": "ML", "segmentOriginId": "1883", "segmentDestId": "1884",
"segmentLocation": "SH-288 Southbound from IH-610 South Loop to Reed"},
{"latitudes": ["29.65775", "29.64335", "29.62743"], "longitudes": ["-95.3873", "-95.3873", "-95.38729"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": 58, "segmentTravelTime": "2 minutes 5 seconds", "segmentDist": "2",
"segmentFacilityType": "ML", "segmentOriginId": "1884", "segmentDestId": "1885",
"segmentLocation": "SH-288 Southbound from Reed to Orem"},
{"latitudes": ["29.62743", "29.61395", "29.59725"], "longitudes": ["-95.38729", "-95.3869", "-95.3865"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": 77, "segmentTravelTime": "1 minute 29 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1885", "segmentDestId": "1886",
"segmentLocation": "SH-288 Southbound from Orem to Beltway 8-South"},
{"latitudes": ["29.59725", "29.58025"], "longitudes": ["-95.3865", "-95.3862"],
"latitudeOffsets": ["0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 49, "segmentTravelTime": "2 minutes 11 seconds", "segmentDist": "1.80",
"segmentFacilityType": "ML", "segmentOriginId": "1886", "segmentDestId": "1887",
"segmentLocation": "SH-288 Southbound from Beltway 8-South to McHard"},
{"latitudes": ["29.58025", "29.57066", "29.55589"], "longitudes": ["-95.3862", "-95.38652", "-95.38769"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": 63, "segmentTravelTime": "1 minute 20 seconds", "segmentDist": "1.40",
"segmentFacilityType": "ML", "segmentOriginId": "1887", "segmentDestId": "1888",
"segmentLocation": "SH-288 Southbound from McHard to FM-518"},
{"latitudes": ["29.55589", "29.54143", "29.5261"], "longitudes": ["-95.38769", "-95.38729", "-95.38713"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": 73, "segmentTravelTime": "1 minute 9 seconds", "segmentDist": "1.40",
"segmentFacilityType": "ML", "segmentOriginId": "1888", "segmentDestId": "1889",
"segmentLocation": "SH-288 Southbound from FM-518 to County Road 101"},
{"latitudes": ["29.5261", "29.52995", "29.51237"], "longitudes": ["-95.38713", "-95.38696", "-95.38738"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": 61, "segmentTravelTime": "1 minute 17 seconds", "segmentDist": "1.30",
"segmentFacilityType": "ML", "segmentOriginId": "1889", "segmentDestId": "1890",
"segmentLocation": "SH-288 Southbound from County Road 101 to County Road 58"},
{"latitudes": ["29.51237", "29.49946", "29.4981", "29.49105", "29.48246"],
"longitudes": ["-95.38738", "-95.38958", "-95.3901", "-95.39416", "-95.40097"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.60", "segmentFacilityType": "ML",
"segmentOriginId": "1890", "segmentDestId": "1891",
"segmentLocation": "SH-288 Southbound from County Road 58 to SH-6"},
{"latitudes": ["29.48246", "29.49105", "29.4981", "29.49946", "29.51237"],
"longitudes": ["-95.40097", "-95.39416", "-95.3901", "-95.38958", "-95.38738"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.60", "segmentFacilityType": "ML",
"segmentOriginId": "1870", "segmentDestId": "1871",
"segmentLocation": "SH-288 Northbound from SH-6 to County Road 58"},
{"latitudes": ["29.51237", "29.52995", "29.5261"], "longitudes": ["-95.38738", "-95.38696", "-95.38713"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.30",
"segmentFacilityType": "ML", "segmentOriginId": "1871", "segmentDestId": "1872",
"segmentLocation": "SH-288 Northbound from County Road 58 to County Road 101"},
{"latitudes": ["29.5261", "29.54143", "29.55589"], "longitudes": ["-95.38713", "-95.38729", "-95.38769"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": 58, "segmentTravelTime": "1 minute 27 seconds", "segmentDist": "1.40",
"segmentFacilityType": "ML", "segmentOriginId": "1872", "segmentDestId": "1873",
"segmentLocation": "SH-288 Northbound from County Road 101 to FM-518"},
{"latitudes": ["29.55589", "29.57066", "29.58025"], "longitudes": ["-95.38769", "-95.38652", "-95.3862"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": 65, "segmentTravelTime": "1 minute 18 seconds", "segmentDist": "1.40",
"segmentFacilityType": "ML", "segmentOriginId": "1873", "segmentDestId": "1874",
"segmentLocation": "SH-288 Northbound from FM-518 to McHard"},
{"latitudes": ["29.58025", "29.59725"], "longitudes": ["-95.3862", "-95.3865"],
"latitudeOffsets": ["0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.80", "segmentFacilityType": "ML",
"segmentOriginId": "1874", "segmentDestId": "1875",
"segmentLocation": "SH-288 Northbound from McHard to Beltway 8-South"},
{"latitudes": ["29.59725", "29.61395", "29.62743"], "longitudes": ["-95.3865", "-95.3869", "-95.38729"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1875", "segmentDestId": "1876",
"segmentLocation": "SH-288 Northbound from Beltway 8-South to Orem"},
{"latitudes": ["29.62743", "29.64335", "29.65775"], "longitudes": ["-95.38729", "-95.3873", "-95.3873"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": 44, "segmentTravelTime": "3 minutes 45 seconds", "segmentDist": "2",
"segmentFacilityType": "ML", "segmentOriginId": "1876", "segmentDestId": "1877",
"segmentLocation": "SH-288 Northbound from Orem to Reed"},
{"latitudes": ["29.65775", "29.66223", "29.66785", "29.671", "29.67495", "29.6805"],
"longitudes": ["-95.3873", "-95.38598", "-95.3832", "-95.38193", "-95.3817", "-95.381"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 56, "segmentTravelTime": "1 minute 24 seconds", "segmentDist": "1.30",
"segmentFacilityType": "ML", "segmentOriginId": "1877", "segmentDestId": "1878",
"segmentLocation": "SH-288 Northbound from Reed to IH-610 South Loop"},
{"latitudes": ["29.6805", "29.68663", "29.69799", "29.70112"],
"longitudes": ["-95.381", "-95.37925", "-95.37601", "-95.37512"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB", "segmentSpeed": 57,
"segmentTravelTime": "2 minutes 35 seconds", "segmentDist": "1.50", "segmentFacilityType": "ML",
"segmentOriginId": "1878", "segmentDestId": "1879",
"segmentLocation": "SH-288 Northbound from IH-610 South Loop to Holcombe/Old Spanish Trail"}, {
"latitudes": ["29.70112", "29.706", "29.7125", "29.71581", "29.71931", "29.72213", "29.72485", "29.72831",
"29.73237"],
"longitudes": ["-95.37512", "-95.37538", "-95.37875", "-95.37874", "-95.37783", "-95.37712", "-95.37588",
"-95.37326", "-95.37175"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000",
"-0.0020"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00200"], "segmentDirection": "NB", "segmentSpeed": 58,
"segmentTravelTime": "3 minutes 49 seconds", "segmentDist": "2.70", "segmentFacilityType": "ML",
"segmentOriginId": "1879", "segmentDestId": "1880",
"segmentLocation": "SH-288 Northbound from Holcombe/Old Spanish Trail to US-59 Southwest"}, {
"latitudes": ["29.8129", "29.8127", "29.81218", "29.80683", "29.80158", "29.79975", "29.79455", "29.79118",
"29.7837", "29.78055"],
"longitudes": ["-95.41085", "-95.42938", "-95.43333", "-95.445", "-95.44874", "-95.44945", "-95.45072",
"-95.45162", "-95.45212", "-95.45372"],
"latitudeOffsets": ["-0.0030", "-0.0030", "-0.0030", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000",
"0.00000", "0.00000"],
"longitudeOffsets": ["0.00200", "0.00200", "0.00200", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": 61,
"segmentTravelTime": "4 minutes 52 seconds", "segmentDist": "3.90", "segmentFacilityType": "ML",
"segmentOriginId": "1104", "segmentDestId": "1105",
"segmentLocation": "IH-610 West Loop Southbound from Shepherd to IH-10 Katy"},
{"latitudes": ["29.78055", "29.77846", "29.77332", "29.7599", "29.75415"],
"longitudes": ["-95.45372", "-95.45537", "-95.45605", "-95.45562", "-95.45558"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 56, "segmentTravelTime": "2 minutes 22 seconds", "segmentDist": "2.20",
"segmentFacilityType": "ML", "segmentOriginId": "1105", "segmentDestId": "1106",
"segmentLocation": "IH-610 West Loop Southbound from IH-10 Katy to Post Oak"},
{"latitudes": ["29.75415", "29.74745", "29.7411", "29.73195", "29.7287", "29.71675"],
"longitudes": ["-95.45558", "-95.45662", "-95.45832", "-95.46012", "-95.46052", "-95.46028"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 61, "segmentTravelTime": "3 minutes 33 seconds", "segmentDist": "2.60",
"segmentFacilityType": "ML", "segmentOriginId": "1106", "segmentDestId": "1130",
"segmentLocation": "IH-610 West Loop Southbound from Post Oak to Fournace"},
{"latitudes": ["29.71675", "29.7117", "29.70575", "29.69855"],
"longitudes": ["-95.46028", "-95.46018", "-95.45942", "-95.45918"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": 64,
"segmentTravelTime": "1 minute 19 seconds", "segmentDist": "1.40", "segmentFacilityType": "ML",
"segmentOriginId": "1130", "segmentDestId": "1131",
"segmentLocation": "IH-610 West Loop Southbound from Fournace to Evergreen"},
{"latitudes": ["29.69855", "29.70575", "29.7117", "29.71675"],
"longitudes": ["-95.45918", "-95.45942", "-95.46018", "-95.46028"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB", "segmentSpeed": 55,
"segmentTravelTime": "2 minutes 31 seconds", "segmentDist": "1.40", "segmentFacilityType": "ML",
"segmentOriginId": "1128", "segmentDestId": "1129",
"segmentLocation": "IH-610 West Loop Northbound from Evergreen to Fournace"},
{"latitudes": ["29.71675", "29.7287", "29.73195", "29.7411", "29.74745", "29.75415"],
"longitudes": ["-95.46028", "-95.46052", "-95.46012", "-95.45832", "-95.45662", "-95.45558"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 56, "segmentTravelTime": "3 minutes 46 seconds", "segmentDist": "2.60",
"segmentFacilityType": "ML", "segmentOriginId": "1129", "segmentDestId": "1107",
"segmentLocation": "IH-610 West Loop Northbound from Fournace to Post Oak"},
{"latitudes": ["29.75415", "29.7599", "29.77332", "29.77846", "29.78055"],
"longitudes": ["-95.45558", "-95.45562", "-95.45605", "-95.45537", "-95.45372"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 57, "segmentTravelTime": "2 minutes 19 seconds", "segmentDist": "2.20",
"segmentFacilityType": "ML", "segmentOriginId": "1107", "segmentDestId": "1100",
"segmentLocation": "IH-610 West Loop Northbound from Post Oak to IH-10 Katy"}, {
"latitudes": ["29.78055", "29.7837", "29.79118", "29.79455", "29.79975", "29.80158", "29.80683", "29.81218",
"29.8127", "29.8129"],
"longitudes": ["-95.45372", "-95.45212", "-95.45162", "-95.45072", "-95.44945", "-95.44874", "-95.445",
"-95.43333", "-95.42938", "-95.41085"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "-0.0030",
"-0.0030", "-0.0030"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00200",
"0.00200", "0.00200"], "segmentDirection": "NB", "segmentSpeed": 62,
"segmentTravelTime": "4 minutes 48 seconds", "segmentDist": "3.90", "segmentFacilityType": "ML",
"segmentOriginId": "1100", "segmentDestId": "1101",
"segmentLocation": "IH-610 West Loop Northbound from IH-10 Katy to Shepherd"},
{"latitudes": ["29.8129", "29.813", "29.81398", "29.81402", "29.81402", "29.81375", "29.8139"],
"longitudes": ["-95.41085", "-95.3993", "-95.39152", "-95.38242", "-95.3756", "-95.36985", "-95.36132"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 66, "segmentTravelTime": "2 minutes 21 seconds", "segmentDist": "2.60",
"segmentFacilityType": "ML", "segmentOriginId": "1101", "segmentDestId": "1102",
"segmentLocation": "IH-610 North Loop Eastbound from Shepherd/Durham to Irvington"},
{"latitudes": ["29.8139", "29.81348", "29.8126", "29.80995", "29.8082", "29.80855"],
"longitudes": ["-95.36132", "-95.35248", "-95.35032", "-95.34248", "-95.336", "-95.32862"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 49, "segmentTravelTime": "2 minutes 26 seconds", "segmentDist": "2", "segmentFacilityType": "ML",
"segmentOriginId": "1102", "segmentDestId": "1108",
"segmentLocation": "IH-610 North Loop Eastbound from Irvington to Hirsch"},
{"latitudes": ["29.80855", "29.80935", "29.80935", "29.80745", "29.80475", "29.80432"],
"longitudes": ["-95.32862", "-95.31715", "-95.3053", "-95.3019", "-95.2966", "-95.29112"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 61, "segmentTravelTime": "2 minutes 21 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "1108", "segmentDestId": "1109",
"segmentLocation": "IH-610 North Loop Eastbound from Hirsch to Kirkpatrick"},
{"latitudes": ["29.80432", "29.80435", "29.7969", "29.79181", "29.78455"],
"longitudes": ["-95.29112", "-95.28458", "-95.27153", "-95.26794", "-95.2644"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 60, "segmentTravelTime": "2 minutes 11 seconds", "segmentDist": "2.20",
"segmentFacilityType": "ML", "segmentOriginId": "1109", "segmentDestId": "1112",
"segmentLocation": "IH-610 North Loop Eastbound from Kirkpatrick to Gellhorn"},
{"latitudes": ["29.78455", "29.79181", "29.7969", "29.80435", "29.80432"],
"longitudes": ["-95.2644", "-95.26794", "-95.27153", "-95.28458", "-95.29112"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 55, "segmentTravelTime": "2 minutes 23 seconds", "segmentDist": "2.20",
"segmentFacilityType": "ML", "segmentOriginId": "1117", "segmentDestId": "1110",
"segmentLocation": "IH-610 North Loop Westbound from Gellhorn to Kirkpatrick"},
{"latitudes": ["29.80432", "29.80475", "29.80745", "29.80935", "29.80935", "29.80855"],
"longitudes": ["-95.29112", "-95.2966", "-95.3019", "-95.3053", "-95.31715", "-95.32862"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 67, "segmentTravelTime": "2 minutes 9 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "1110", "segmentDestId": "1111",
"segmentLocation": "IH-610 North Loop Westbound from Kirkpatrick to Hirsch"},
{"latitudes": ["29.80855", "29.8082", "29.80995", "29.8126", "29.81348", "29.8139"],
"longitudes": ["-95.32862", "-95.336", "-95.34248", "-95.35032", "-95.35248", "-95.36132"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 49, "segmentTravelTime": "2 minutes 28 seconds", "segmentDist": "2", "segmentFacilityType": "ML",
"segmentOriginId": "1111", "segmentDestId": "1103",
"segmentLocation": "IH-610 North Loop Westbound from Hirsch to Irvington"},
{"latitudes": ["29.8139", "29.81375", "29.81402", "29.81402", "29.81398", "29.813", "29.8129"],
"longitudes": ["-95.36132", "-95.36985", "-95.3756", "-95.38242", "-95.39152", "-95.3993", "-95.41085"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 61, "segmentTravelTime": "3 minutes 33 seconds", "segmentDist": "2.60",
"segmentFacilityType": "ML", "segmentOriginId": "1103", "segmentDestId": "1104",
"segmentLocation": "IH-610 North Loop Westbound from Irvington to Shepherd/Durham"},
{"latitudes": ["29.79181", "29.78455", "29.77555", "29.7695", "29.73815"],
"longitudes": ["-95.26794", "-95.2644", "-95.26398", "-95.26528", "-95.26522"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 56, "segmentTravelTime": "2 minutes 21 seconds", "segmentDist": "2.20",
"segmentFacilityType": "ML", "segmentOriginId": "1112", "segmentDestId": "1113",
"segmentLocation": "IH-610 East Loop Southbound from Gellhorn to Clinton"},
{"latitudes": ["29.73815", "29.7251", "29.7139", "29.70945"],
"longitudes": ["-95.26522", "-95.26658", "-95.26608", "-95.26752"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0030"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00100"], "segmentDirection": "SB", "segmentSpeed": 67,
"segmentTravelTime": "2 minutes 25 seconds", "segmentDist": "2.70", "segmentFacilityType": "ML",
"segmentOriginId": "1113", "segmentDestId": "1114",
"segmentLocation": "IH-610 East Loop Southbound from Clinton to SH-225"},
{"latitudes": ["29.70945", "29.7139", "29.7251", "29.73815"],
"longitudes": ["-95.26752", "-95.26608", "-95.26658", "-95.26522"],
"latitudeOffsets": ["-0.0030", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00100", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB", "segmentSpeed": 64,
"segmentTravelTime": "3 minutes 32 seconds", "segmentDist": "2.70", "segmentFacilityType": "ML",
"segmentOriginId": "1115", "segmentDestId": "1116",
"segmentLocation": "IH-610 East Loop Northbound from SH-225 to Clinton"},
{"latitudes": ["29.73815", "29.7695", "29.77555", "29.78455", "29.79181"],
"longitudes": ["-95.26522", "-95.26528", "-95.26398", "-95.2644", "-95.26794"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 64, "segmentTravelTime": "2 minutes 3 seconds", "segmentDist": "2.20",
"segmentFacilityType": "ML", "segmentOriginId": "1116", "segmentDestId": "1117",
"segmentLocation": "IH-610 East Loop Northbound from Clinton to Gellhorn"},
{"latitudes": ["29.69855", "29.68915", "29.6814", "29.6799", "29.67929", "29.67922", "29.67928"],
"longitudes": ["-95.45918", "-95.45912", "-95.4585", "-95.45712", "-95.45501", "-95.44827", "-95.44"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00200", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00200", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 59, "segmentTravelTime": "2 minutes 9 seconds", "segmentDist": "2.10",
"segmentFacilityType": "ML", "segmentOriginId": "1131", "segmentDestId": "1123",
"segmentLocation": "IH-610 South Loop Eastbound from Evergreen to Stella Link"},
{"latitudes": ["29.67928", "29.67924", "29.67762", "29.67763", "29.67775", "29.67794", "29.67812", "29.67906"],
"longitudes": ["-95.44", "-95.4355", "-95.42775", "-95.42181", "-95.4185", "-95.4125", "-95.40781", "-95.40319"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 57, "segmentTravelTime": "2 minutes 18 seconds", "segmentDist": "2.20",
"segmentFacilityType": "ML", "segmentOriginId": "1123", "segmentDestId": "1124",
"segmentLocation": "IH-610 South Loop Eastbound from Stella Link to Fannin"},
{"latitudes": ["29.67906", "29.6805", "29.68056", "29.68094"],
"longitudes": ["-95.40319", "-95.39319", "-95.38113", "-95.36675"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 61,
"segmentTravelTime": "2 minutes 9 seconds", "segmentDist": "2.20", "segmentFacilityType": "ML",
"segmentOriginId": "1124", "segmentDestId": "1125",
"segmentLocation": "IH-610 South Loop Eastbound from Fannin to Scott"},
{"latitudes": ["29.68094", "29.68046", "29.68081", "29.68313", "29.68563", "29.68887"],
"longitudes": ["-95.36675", "-95.35987", "-95.35594", "-95.34913", "-95.34081", "-95.33195"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 60, "segmentTravelTime": "2 minutes 6 seconds", "segmentDist": "2.10",
"segmentFacilityType": "ML", "segmentOriginId": "1125", "segmentDestId": "1126",
"segmentLocation": "IH-610 South Loop Eastbound from Scott to Crestmont"},
{"latitudes": ["29.68887", "29.6935", "29.69488", "29.69677", "29.697", "29.69575"],
"longitudes": ["-95.33195", "-95.32238", "-95.31925", "-95.31459", "-95.31082", "-95.30019"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 53, "segmentTravelTime": "2 minutes 23 seconds", "segmentDist": "2.10",
"segmentFacilityType": "ML", "segmentOriginId": "1126", "segmentDestId": "1127",
"segmentLocation": "IH-610 South Loop Eastbound from Crestmont to Telephone"},
{"latitudes": ["29.69575", "29.69556", "29.69719", "29.70245", "29.70535", "29.70018", "29.70945"],
"longitudes": ["-95.30019", "-95.29502", "-95.28857", "-95.27778", "-95.27408", "-95.28129", "-95.26752"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 55, "segmentTravelTime": "3 minutes 38 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "1127", "segmentDestId": "1115",
"segmentLocation": "IH-610 South Loop Eastbound from Telephone to SH-225"},
{"latitudes": ["29.70945", "29.70018", "29.70535", "29.70245", "29.69719", "29.69556", "29.69575"],
"longitudes": ["-95.26752", "-95.28129", "-95.27408", "-95.27778", "-95.28857", "-95.29502", "-95.30019"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 56, "segmentTravelTime": "3 minutes 33 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "1114", "segmentDestId": "1118",
"segmentLocation": "IH-610 South Loop Westbound from SH-225 to Telephone"},
{"latitudes": ["29.69575", "29.697", "29.69677", "29.69488", "29.6935", "29.68887"],
"longitudes": ["-95.30019", "-95.31082", "-95.31459", "-95.31925", "-95.32238", "-95.33195"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 59, "segmentTravelTime": "2 minutes 9 seconds", "segmentDist": "2.10",
"segmentFacilityType": "ML", "segmentOriginId": "1118", "segmentDestId": "1119",
"segmentLocation": "IH-610 South Loop Westbound from Telephone to Crestmont"},
{"latitudes": ["29.68887", "29.68563", "29.68313", "29.68081", "29.68046", "29.68094"],
"longitudes": ["-95.33195", "-95.34081", "-95.34913", "-95.35594", "-95.35987", "-95.36675"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 60, "segmentTravelTime": "2 minutes 7 seconds", "segmentDist": "2.10",
"segmentFacilityType": "ML", "segmentOriginId": "1119", "segmentDestId": "1120",
"segmentLocation": "IH-610 South Loop Westbound from Crestmont to Scott"},
{"latitudes": ["29.68094", "29.68056", "29.6805", "29.67906"],
"longitudes": ["-95.36675", "-95.38113", "-95.39319", "-95.40319"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 65,
"segmentTravelTime": "2 minutes 2 seconds", "segmentDist": "2.20", "segmentFacilityType": "ML",
"segmentOriginId": "1120", "segmentDestId": "1121",
"segmentLocation": "IH-610 South Loop Westbound from Scott to Fannin"},
{"latitudes": ["29.67906", "29.67812", "29.67794", "29.67775", "29.67763", "29.67762", "29.67924", "29.67928"],
"longitudes": ["-95.40319", "-95.40781", "-95.4125", "-95.4185", "-95.42181", "-95.42775", "-95.4355", "-95.44"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 64, "segmentTravelTime": "2 minutes 3 seconds", "segmentDist": "2.20",
"segmentFacilityType": "ML", "segmentOriginId": "1121", "segmentDestId": "1122",
"segmentLocation": "IH-610 South Loop Westbound from Fannin to Stella Link"},
{"latitudes": ["29.67928", "29.67922", "29.67929", "29.6799", "29.6814", "29.68915", "29.69855"],
"longitudes": ["-95.44", "-95.44827", "-95.45501", "-95.45712", "-95.4585", "-95.45912", "-95.45918"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00200", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00200", "0.00400", "0.00400", "0.00400", "0.00400"],
"segmentDirection": "WB", "segmentSpeed": 61, "segmentTravelTime": "2 minutes 4 seconds", "segmentDist": "2.10",
"segmentFacilityType": "ML", "segmentOriginId": "1122", "segmentDestId": "1128",
"segmentLocation": "IH-610 South Loop Westbound from Stella Link to Evergreen"},
{"latitudes": ["30.1059", "30.103", "30.09575", "30.08769", "30.07912", "30.07444", "30.07088", "30.06575"],
"longitudes": ["-95.43452", "-95.4295", "-95.41537", "-95.41281", "-95.41062", "-95.41156", "-95.41144",
"-95.40819"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": 70, "segmentTravelTime": "3 minutes 43 seconds", "segmentDist": "3.20",
"segmentFacilityType": "ML", "segmentOriginId": "90", "segmentDestId": "91",
"segmentLocation": "Hardy Toll Road Southbound from IH-45 North to Cypresswood"},
{"latitudes": ["30.06575", "30.06288", "30.05181", "30.04175", "30.0265"],
"longitudes": ["-95.40819", "-95.40612", "-95.40369", "-95.40794", "-95.40444"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 75, "segmentTravelTime": "2 minutes 14 seconds", "segmentDist": "2.80",
"segmentFacilityType": "ML", "segmentOriginId": "91", "segmentDestId": "92",
"segmentLocation": "Hardy Toll Road Southbound from Cypresswood to FM-1960"}, {
"latitudes": ["30.0265", "30.00894", "30.00182", "29.99607", "29.97357", "29.97055", "29.96863", "29.96532",
"29.95738", "29.9543", "29.95226"],
"longitudes": ["-95.40444", "-95.40343", "-95.39825", "-95.39681", "-95.39012", "-95.38231", "-95.38062",
"-95.38", "-95.38031", "-95.38283", "-95.38412"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": 61,
"segmentTravelTime": "6 minutes 40 seconds", "segmentDist": "5.80", "segmentFacilityType": "ML",
"segmentOriginId": "92", "segmentDestId": "93",
"segmentLocation": "Hardy Toll Road Southbound from FM-1960 to Greens"},
{"latitudes": ["29.95226", "29.93888", "29.93276"], "longitudes": ["-95.38412", "-95.38137", "-95.37968"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": 66, "segmentTravelTime": "1 minute 24 seconds", "segmentDist": "1.55",
"segmentFacilityType": "ML", "segmentOriginId": "93", "segmentDestId": "94",
"segmentLocation": "Hardy Toll Road Southbound from Greens to Aldine-Bender"},
{"latitudes": ["29.93276", "29.90213", "29.89019", "29.87076"],
"longitudes": ["-95.37968", "-95.37268", "-95.36987", "-95.36518"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": 57,
"segmentTravelTime": "4 minutes 25 seconds", "segmentDist": "4.20", "segmentFacilityType": "ML",
"segmentOriginId": "94", "segmentDestId": "95",
"segmentLocation": "Hardy Toll Road Southbound from Aldine-Bender to Little York"},
{"latitudes": ["29.87076", "29.85919", "29.84651", "29.82882", "29.82182", "29.81976", "29.81395"],
"longitudes": ["-95.36518", "-95.36237", "-95.35906", "-95.35468", "-95.35599", "-95.35545", "-95.35243"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": 64, "segmentTravelTime": "3 minutes 22 seconds", "segmentDist": "3.60",
"segmentFacilityType": "ML", "segmentOriginId": "95", "segmentDestId": "96",
"segmentLocation": "Hardy Toll Road Southbound from Little York to IH-610 North Loop"},
{"latitudes": ["29.81395", "29.81976", "29.82182", "29.82882", "29.84651", "29.85919", "29.87076"],
"longitudes": ["-95.35243", "-95.35545", "-95.35599", "-95.35468", "-95.35906", "-95.36237", "-95.36518"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": 71, "segmentTravelTime": "3 minutes 2 seconds", "segmentDist": "3.60",
"segmentFacilityType": "ML", "segmentOriginId": "97", "segmentDestId": "98",
"segmentLocation": "Hardy Toll Road Northbound from IH-610 North Loop to Little York"},
{"latitudes": ["29.87076", "29.89019", "29.90213", "29.93276"],
"longitudes": ["-95.36518", "-95.36987", "-95.37268", "-95.37968"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB", "segmentSpeed": 59,
"segmentTravelTime": "4 minutes 15 seconds", "segmentDist": "4.20", "segmentFacilityType": "ML",
"segmentOriginId": "98", "segmentDestId": "99",
"segmentLocation": "Hardy Toll Road Northbound from Little York to Aldine-Bender"},
{"latitudes": ["29.93276", "29.93888", "29.95226"], "longitudes": ["-95.37968", "-95.38137", "-95.38412"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": 69, "segmentTravelTime": "1 minute 20 seconds", "segmentDist": "1.55",
"segmentFacilityType": "ML", "segmentOriginId": "99", "segmentDestId": "100",
"segmentLocation": "Hardy Toll Road Northbound from Aldine-Bender to Greens"}, {
"latitudes": ["29.95226", "29.9543", "29.95738", "29.96532", "29.96863", "29.97055", "29.97357", "29.99607",
"30.00182", "30.00894", "30.0265"],
"longitudes": ["-95.38412", "-95.38283", "-95.38031", "-95.38", "-95.38062", "-95.38231", "-95.39012",
"-95.39681", "-95.39825", "-95.40343", "-95.40444"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00400", "0.00400"], "segmentDirection": "NB", "segmentSpeed": 67,
"segmentTravelTime": "5 minutes 9 seconds", "segmentDist": "5.80", "segmentFacilityType": "ML",
"segmentOriginId": "100", "segmentDestId": "101",
"segmentLocation": "Hardy Toll Road Northbound from Greens to FM-1960"},
{"latitudes": ["30.0265", "30.04175", "30.05181", "30.06288", "30.06575"],
"longitudes": ["-95.40444", "-95.40794", "-95.40369", "-95.40612", "-95.40819"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 65, "segmentTravelTime": "3 minutes 35 seconds", "segmentDist": "2.80",
"segmentFacilityType": "ML", "segmentOriginId": "101", "segmentDestId": "102",
"segmentLocation": "Hardy Toll Road Northbound from FM-1960 to Cypresswood"},
{"latitudes": ["30.06575", "30.07088", "30.07444", "30.07912", "30.08769", "30.09575", "30.103", "30.1059"],
"longitudes": ["-95.40819", "-95.41144", "-95.41156", "-95.41062", "-95.41281", "-95.41537", "-95.4295",
"-95.43452"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": 60, "segmentTravelTime": "3 minutes 10 seconds", "segmentDist": "3.20",
"segmentFacilityType": "ML", "segmentOriginId": "102", "segmentDestId": "103",
"segmentLocation": "Hardy Toll Road Northbound from Cypresswood to IH-45 North"},
{"latitudes": ["29.9237", "29.9089", "29.9029"], "longitudes": ["-95.555", "-95.55595", "-95.5521"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": 60, "segmentTravelTime": "1 minute 0 seconds", "segmentDist": "1",
"segmentFacilityType": "ML", "segmentOriginId": "1425", "segmentDestId": "1426",
"segmentLocation": "Beltway 8-West Southbound from Fallbrook to West"},
{"latitudes": ["29.9029", "29.90035", "29.88675", "29.87905"],
"longitudes": ["-95.5521", "-95.55035", "-95.5492", "-95.5491"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": 65,
"segmentTravelTime": "2 minutes 34 seconds", "segmentDist": "1.70", "segmentFacilityType": "ML",
"segmentOriginId": "1426", "segmentDestId": "1427",
"segmentLocation": "Beltway 8-West Southbound from West to Gulf Bank"},
{"latitudes": ["29.87905", "29.87378", "29.87127", "29.86728"],
"longitudes": ["-95.5491", "-95.55585", "-95.56135", "-95.56375"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": 59,
"segmentTravelTime": "1 minute 19 seconds", "segmentDist": "1.30", "segmentFacilityType": "ML",
"segmentOriginId": "1427", "segmentDestId": "1428",
"segmentLocation": "Beltway 8-West Southbound from Gulf Bank to West Little York"},
{"latitudes": ["29.86728", "29.85028", "29.83198"], "longitudes": ["-95.56375", "-95.56385", "-95.56385"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": 58, "segmentTravelTime": "2 minutes 45 seconds", "segmentDist": "1.70",
"segmentFacilityType": "ML", "segmentOriginId": "1428", "segmentDestId": "1429",
"segmentLocation": "Beltway 8-West Southbound from West Little York to Clay"},
{"latitudes": ["29.83198", "29.82278"], "longitudes": ["-95.56385", "-95.56385"],
"latitudeOffsets": ["0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 74, "segmentTravelTime": "1 minute 8 seconds", "segmentDist": "1.40", "segmentFacilityType": "ML",
"segmentOriginId": "1429", "segmentDestId": "1430",
"segmentLocation": "Beltway 8-West Southbound from Clay to Kempwood"},
{"latitudes": ["29.82278", "29.81268"], "longitudes": ["-95.56385", "-95.56365"],
"latitudeOffsets": ["0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 84, "segmentTravelTime": "1 minute 43 seconds", "segmentDist": "1", "segmentFacilityType": "ML",
"segmentOriginId": "1430", "segmentDestId": "1431",
"segmentLocation": "Beltway 8-West Southbound from Kempwood to Hammerly"},
{"latitudes": ["29.81268", "29.79087", "29.78428", "29.77278", "29.76728", "29.76188"],
"longitudes": ["-95.56365", "-95.56345", "-95.56315", "-95.56285", "-95.56175", "-95.55805"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 60, "segmentTravelTime": "3 minutes 18 seconds", "segmentDist": "3.30",
"segmentFacilityType": "ML", "segmentOriginId": "1431", "segmentDestId": "1432",
"segmentLocation": "Beltway 8-West Southbound from Hammerly to Buffalo Bayou"},
{"latitudes": ["29.76188", "29.74778"], "longitudes": ["-95.55805", "-95.55795"],
"latitudeOffsets": ["0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 50, "segmentTravelTime": "1 minute 50 seconds", "segmentDist": "0.70",
"segmentFacilityType": "ML", "segmentOriginId": "1432", "segmentDestId": "1433",
"segmentLocation": "Beltway 8-West Southbound from Buffalo Bayou to Briar Forest"},
{"latitudes": ["29.74778", "29.73698", "29.72767"], "longitudes": ["-95.55795", "-95.55765", "-95.55755"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": 62, "segmentTravelTime": "2 minutes 39 seconds", "segmentDist": "1.70",
"segmentFacilityType": "ML", "segmentOriginId": "1433", "segmentDestId": "1434",
"segmentLocation": "Beltway 8-West Southbound from Briar Forest to Richmond"},
{"latitudes": ["29.72767", "29.72207", "29.71668", "29.70448", "29.68948"],
"longitudes": ["-95.55755", "-95.55765", "-95.55735", "-95.55735", "-95.55745"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 58, "segmentTravelTime": "2 minutes 11 seconds", "segmentDist": "2.10",
"segmentFacilityType": "ML", "segmentOriginId": "1434", "segmentDestId": "1437",
"segmentLocation": "Beltway 8-West Southbound from Richmond to Beechnut"},
{"latitudes": ["29.68948", "29.67468", "29.66168"], "longitudes": ["-95.55745", "-95.56045", "-95.55835"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": 60, "segmentTravelTime": "2 minutes 54 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1437", "segmentDestId": "1438",
"segmentLocation": "Beltway 8-West Southbound from Beechnut to US-59 Southwest"},
{"latitudes": ["29.66168", "29.67468", "29.68948"], "longitudes": ["-95.55835", "-95.56045", "-95.55745"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": 58, "segmentTravelTime": "2 minutes 57 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1435", "segmentDestId": "1436",
"segmentLocation": "Beltway 8-West Northbound from US-59 Southwest to Beechnut"},
{"latitudes": ["29.68948", "29.70448", "29.71668", "29.72207", "29.72767"],
"longitudes": ["-95.55745", "-95.55735", "-95.55735", "-95.55765", "-95.55755"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 58, "segmentTravelTime": "2 minutes 10 seconds", "segmentDist": "2.10",
"segmentFacilityType": "ML", "segmentOriginId": "1436", "segmentDestId": "1415",
"segmentLocation": "Beltway 8-West Northbound from Beechnut to Richmond"},
{"latitudes": ["29.72767", "29.73698", "29.74778"], "longitudes": ["-95.55755", "-95.55765", "-95.55795"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.70",
"segmentFacilityType": "ML", "segmentOriginId": "1415", "segmentDestId": "1416",
"segmentLocation": "Beltway 8-West Northbound from Richmond to Briar Forest"},
{"latitudes": ["29.74778", "29.76188"], "longitudes": ["-95.55795", "-95.55805"],
"latitudeOffsets": ["0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 66, "segmentTravelTime": "1 minute 38 seconds", "segmentDist": "0.70",
"segmentFacilityType": "ML", "segmentOriginId": "1416", "segmentDestId": "1417",
"segmentLocation": "Beltway 8-West Northbound from Briar Forest to Buffalo Bayou"},
{"latitudes": ["29.76188", "29.76728", "29.77278", "29.78428", "29.79087", "29.81268"],
"longitudes": ["-95.55805", "-95.56175", "-95.56285", "-95.56315", "-95.56345", "-95.56365"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 59, "segmentTravelTime": "3 minutes 23 seconds", "segmentDist": "3.30",
"segmentFacilityType": "ML", "segmentOriginId": "1417", "segmentDestId": "1418",
"segmentLocation": "Beltway 8-West Northbound from Buffalo Bayou to Hammerly"},
{"latitudes": ["29.81268", "29.82278"], "longitudes": ["-95.56365", "-95.56385"],
"latitudeOffsets": ["0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 88, "segmentTravelTime": "1 minute 41 seconds", "segmentDist": "1", "segmentFacilityType": "ML",
"segmentOriginId": "1418", "segmentDestId": "1419",
"segmentLocation": "Beltway 8-West Northbound from Hammerly to Kempwood"},
{"latitudes": ["29.82278", "29.83198"], "longitudes": ["-95.56385", "-95.56385"],
"latitudeOffsets": ["0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 71, "segmentTravelTime": "1 minute 11 seconds", "segmentDist": "1.40",
"segmentFacilityType": "ML", "segmentOriginId": "1419", "segmentDestId": "1420",
"segmentLocation": "Beltway 8-West Northbound from Kempwood to Clay"},
{"latitudes": ["29.83198", "29.85028", "29.86728"], "longitudes": ["-95.56385", "-95.56385", "-95.56375"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": 74, "segmentTravelTime": "1 minute 23 seconds", "segmentDist": "1.70",
"segmentFacilityType": "ML", "segmentOriginId": "1420", "segmentDestId": "1421",
"segmentLocation": "Beltway 8-West Northbound from Clay to West Little York"},
{"latitudes": ["29.86728", "29.87127", "29.87378", "29.87905"],
"longitudes": ["-95.56375", "-95.56135", "-95.55585", "-95.5491"],
"latitudeOffsets": ["0.00000", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB", "segmentSpeed": 57,
"segmentTravelTime": "1 minute 22 seconds", "segmentDist": "1.30", "segmentFacilityType": "ML",
"segmentOriginId": "1421", "segmentDestId": "1422",
"segmentLocation": "Beltway 8-West Northbound from West Little York to Gulf Bank"},
{"latitudes": ["29.87905", "29.88675", "29.90035", "29.9029"],
"longitudes": ["-95.5491", "-95.5492", "-95.55035", "-95.5521"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB", "segmentSpeed": 59,
"segmentTravelTime": "2 minutes 43 seconds", "segmentDist": "1.70", "segmentFacilityType": "ML",
"segmentOriginId": "1422", "segmentDestId": "1423",
"segmentLocation": "Beltway 8-West Northbound from Gulf Bank to West"},
{"latitudes": ["29.9029", "29.9089", "29.9237"], "longitudes": ["-95.5521", "-95.55595", "-95.555"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": 67, "segmentTravelTime": "1 minute 54 seconds", "segmentDist": "1",
"segmentFacilityType": "ML", "segmentOriginId": "1423", "segmentDestId": "1424",
"segmentLocation": "Beltway 8-West Northbound from West to Fallbrook"},
{"latitudes": ["29.9237", "29.9263", "29.92985", "29.9358", "29.93705"],
"longitudes": ["-95.555", "-95.55255", "-95.54405", "-95.5293", "-95.51625"],
"latitudeOffsets": ["0.00100", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["-0.0030", "-0.0010", "-0.0010", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 71, "segmentTravelTime": "2 minutes 16 seconds", "segmentDist": "2.70",
"segmentFacilityType": "ML", "segmentOriginId": "1424", "segmentDestId": "1465",
"segmentLocation": "Beltway 8-North Eastbound from Fallbrook to SH-249"},
{"latitudes": ["29.93705", "29.9371", "29.93735", "29.93761"],
"longitudes": ["-95.51625", "-95.49805", "-95.4833", "-95.4717"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 57,
"segmentTravelTime": "3 minutes 50 seconds", "segmentDist": "2.70", "segmentFacilityType": "ML",
"segmentOriginId": "1465", "segmentDestId": "1466",
"segmentLocation": "Beltway 8-North Eastbound from SH-249 to North Toll Plaza"}, {
"latitudes": ["29.93761", "29.9377", "29.93765", "29.9378", "29.93965", "29.9407", "29.9398", "29.9387",
"29.93885"],
"longitudes": ["-95.4717", "-95.46", "-95.4549", "-95.43235", "-95.4247", "-95.41395", "-95.3994", "-95.38325",
"-95.38135"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300",
"0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100"], "segmentDirection": "EB", "segmentSpeed": 52,
"segmentTravelTime": "6 minutes 11 seconds", "segmentDist": "5.40", "segmentFacilityType": "ML",
"segmentOriginId": "1466", "segmentDestId": "1468",
"segmentLocation": "Beltway 8-North Eastbound from North Toll Plaza to Hardy Toll Road"},
{"latitudes": ["29.93885", "29.93905", "29.9392"], "longitudes": ["-95.38135", "-95.35615", "-95.33155"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 65, "segmentTravelTime": "3 minutes 2 seconds", "segmentDist": "3.30",
"segmentFacilityType": "ML", "segmentOriginId": "1468", "segmentDestId": "1407",
"segmentLocation": "Beltway 8-North Eastbound from Hardy Toll Road to John F Kennedy Blvd"}, {
"latitudes": ["29.9392", "29.93935", "29.93995", "29.9402", "29.94019", "29.94062", "29.94079", "29.94059",
"29.93968", "29.93853", "29.93555", "29.93489", "29.93464"],
"longitudes": ["-95.33155", "-95.31562", "-95.30342", "-95.29672", "-95.28784", "-95.28324", "-95.28026",
"-95.27728", "-95.27216", "-95.26844", "-95.25908", "-95.25654", "-95.24948"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300",
"0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 65, "segmentTravelTime": "4 minutes 32 seconds", "segmentDist": "3.80",
"segmentFacilityType": "ML", "segmentOriginId": "1407", "segmentDestId": "1408",
"segmentLocation": "Beltway 8-North Eastbound from John F Kennedy Blvd to Wilson Rd"}, {
"latitudes": ["29.93464", "29.93489", "29.93555", "29.93853", "29.93968", "29.94059", "29.94079", "29.94062",
"29.94019", "29.9402", "29.93995", "29.93935", "29.9392"],
"longitudes": ["-95.24948", "-95.25654", "-95.25908", "-95.26844", "-95.27216", "-95.27728", "-95.28026",
"-95.28324", "-95.28784", "-95.29672", "-95.30342", "-95.31562", "-95.33155"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300",
"0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "3.80", "segmentFacilityType": "ML",
"segmentOriginId": "1405", "segmentDestId": "1406",
"segmentLocation": "Beltway 8-North Westbound from Wilson Rd to John F Kennedy Blvd"},
{"latitudes": ["29.9392", "29.93905", "29.93885"], "longitudes": ["-95.33155", "-95.35615", "-95.38135"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 67, "segmentTravelTime": "3 minutes 58 seconds", "segmentDist": "3.30",
"segmentFacilityType": "ML", "segmentOriginId": "1406", "segmentDestId": "1469",
"segmentLocation": "Beltway 8-North Westbound from John F Kennedy Blvd to Hardy Toll Road"}, {
"latitudes": ["29.93885", "29.9387", "29.9398", "29.9407", "29.93965", "29.9378", "29.93765", "29.9377",
"29.93761"],
"longitudes": ["-95.38135", "-95.38325", "-95.3994", "-95.41395", "-95.4247", "-95.43235", "-95.4549",
"-95.46", "-95.4717"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300",
"0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100"], "segmentDirection": "WB", "segmentSpeed": 55,
"segmentTravelTime": "6 minutes 54 seconds", "segmentDist": "5.40", "segmentFacilityType": "ML",
"segmentOriginId": "1469", "segmentDestId": "1471",
"segmentLocation": "Beltway 8-North Westbound from Hardy Toll Road to North Toll Plaza"},
{"latitudes": ["29.93761", "29.93735", "29.9371", "29.93705"],
"longitudes": ["-95.4717", "-95.4833", "-95.49805", "-95.51625"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 64,
"segmentTravelTime": "3 minutes 33 seconds", "segmentDist": "2.70", "segmentFacilityType": "ML",
"segmentOriginId": "1471", "segmentDestId": "1472",
"segmentLocation": "Beltway 8-North Westbound from North Toll Plaza to SH-249"},
{"latitudes": ["29.93705", "29.9358", "29.92985", "29.9263", "29.9237"],
"longitudes": ["-95.51625", "-95.5293", "-95.54405", "-95.55255", "-95.555"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00100"],
"longitudeOffsets": ["0.00100", "0.00100", "-0.0010", "-0.0010", "-0.0030"], "segmentDirection": "WB",
"segmentSpeed": 64, "segmentTravelTime": "3 minutes 32 seconds", "segmentDist": "2.70",
"segmentFacilityType": "ML", "segmentOriginId": "1472", "segmentDestId": "1425",
"segmentLocation": "Beltway 8-North Westbound from SH-249 to Fallbrook"}, {
"latitudes": ["29.93464", "29.93467", "29.934", "29.93418", "29.93424", "29.93272", "29.92846", "29.92637",
"29.9246", "29.92295", "29.91943", "29.90675"],
"longitudes": ["-95.24948", "-95.24245", "-95.23537", "-95.22557", "-95.21155", "-95.20739", "-95.20188",
"-95.2005", "-95.19992", "-95.19982", "-95.20035", "-95.20234"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "-0.0010",
"-0.0010", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00400",
"0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": 65,
"segmentTravelTime": "5 minutes 36 seconds", "segmentDist": "5", "segmentFacilityType": "ML",
"segmentOriginId": "1408", "segmentDestId": "1410",
"segmentLocation": "Beltway 8-East Southbound from Wilson Rd to Winfield"},
{"latitudes": ["29.90675", "29.90124", "29.89601", "29.89126", "29.88838", "29.88313", "29.87255", "29.87065"],
"longitudes": ["-95.20234", "-95.20309", "-95.20306", "-95.20298", "-95.20203", "-95.19779", "-95.18873",
"-95.18774"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": 58, "segmentTravelTime": "2 minutes 28 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "1410", "segmentDestId": "1411",
"segmentLocation": "Beltway 8-East Southbound from Winfield to Little York"}, {
"latitudes": ["29.87065", "29.86875", "29.8533", "29.84781", "29.84505", "29.84305", "29.84081", "29.83263",
"29.82536", "29.81611", "29.80866"],
"longitudes": ["-95.18774", "-95.18733", "-95.18703", "-95.18688", "-95.1863", "-95.18539", "-95.18369",
"-95.17589", "-95.16924", "-95.16458", "-95.1634"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010",
"-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": 67,
"segmentTravelTime": "4 minutes 58 seconds", "segmentDist": "4.40", "segmentFacilityType": "ML",
"segmentOriginId": "1411", "segmentDestId": "1412",
"segmentLocation": "Beltway 8-East Southbound from Little York to Wallisville"}, {
"latitudes": ["29.80866", "29.79767", "29.79349", "29.79056", "29.78507", "29.78234", "29.77863", "29.77645",
"29.77421", "29.77125", "29.76468", "29.75506"],
"longitudes": ["-95.1634", "-95.16331", "-95.16237", "-95.16115", "-95.15891", "-95.15853", "-95.15834",
"-95.15763", "-95.15624", "-95.1535", "-95.14813", "-95.14594"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010",
"-0.0010", "0.00000", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00500", "0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "4.20", "segmentFacilityType": "ML",
"segmentOriginId": "1412", "segmentDestId": "1413",
"segmentLocation": "Beltway 8-East Southbound from Wallisville to Jacinto Port"},
{"latitudes": ["29.75506", "29.73662", "29.72306", "29.7175", "29.71225", "29.70169"],
"longitudes": ["-95.14594", "-95.14619", "-95.14775", "-95.15081", "-95.15363", "-95.15369"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "4.10", "segmentFacilityType": "ML",
"segmentOriginId": "1413", "segmentDestId": "1461",
"segmentLocation": "Beltway 8-East Southbound from Jacinto Port to Greenshadow"},
{"latitudes": ["29.70169", "29.69006", "29.68394", "29.6815", "29.6655"],
"longitudes": ["-95.15369", "-95.15363", "-95.15481", "-95.15563", "-95.156"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.50", "segmentFacilityType": "ML",
"segmentOriginId": "1461", "segmentDestId": "1462",
"segmentLocation": "Beltway 8-East Southbound from Greenshadow to Spencer"},
{"latitudes": ["29.6655", "29.65806", "29.64975", "29.63962", "29.63344"],
"longitudes": ["-95.156", "-95.15638", "-95.15631", "-95.161", "-95.16763"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 36, "segmentTravelTime": "4 minutes 39 seconds", "segmentDist": "2.20",
"segmentFacilityType": "ML", "segmentOriginId": "1462", "segmentDestId": "1463",
"segmentLocation": "Beltway 8-East Southbound from Spencer to Preston"},
{"latitudes": ["29.63344", "29.6305", "29.6265", "29.6165"],
"longitudes": ["-95.16763", "-95.17063", "-95.17556", "-95.19194"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2.10", "segmentFacilityType": "ML",
"segmentOriginId": "1463", "segmentDestId": "1464",
"segmentLocation": "Beltway 8-East Southbound from Preston to Old Galveston Road"},
{"latitudes": ["29.6165", "29.6265", "29.6305", "29.63344"],
"longitudes": ["-95.19194", "-95.17556", "-95.17063", "-95.16763"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2.10", "segmentFacilityType": "ML",
"segmentOriginId": "1457", "segmentDestId": "1458",
"segmentLocation": "Beltway 8-East Northbound from Old Galveston Road to Preston"},
{"latitudes": ["29.63344", "29.63962", "29.64975", "29.65806", "29.6655"],
"longitudes": ["-95.16763", "-95.161", "-95.15631", "-95.15638", "-95.156"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 65, "segmentTravelTime": "2 minutes 1 second", "segmentDist": "2.20", "segmentFacilityType": "ML",
"segmentOriginId": "1458", "segmentDestId": "1459",
"segmentLocation": "Beltway 8-East Northbound from Preston to Spencer"},
{"latitudes": ["29.6655", "29.6815", "29.68394", "29.69006", "29.70169"],
"longitudes": ["-95.156", "-95.15563", "-95.15481", "-95.15363", "-95.15369"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 67, "segmentTravelTime": "2 minutes 14 seconds", "segmentDist": "2.50",
"segmentFacilityType": "ML", "segmentOriginId": "1459", "segmentDestId": "1460",
"segmentLocation": "Beltway 8-East Northbound from Spencer to Greenshadow"},
{"latitudes": ["29.70169", "29.71225", "29.7175", "29.72306", "29.73662", "29.75506"],
"longitudes": ["-95.15369", "-95.15363", "-95.15081", "-95.14775", "-95.14619", "-95.14594"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 70, "segmentTravelTime": "4 minutes 31 seconds", "segmentDist": "4.10",
"segmentFacilityType": "ML", "segmentOriginId": "1460", "segmentDestId": "1400",
"segmentLocation": "Beltway 8-East Northbound from Greenshadow to Jacinto Port"}, {
"latitudes": ["29.75506", "29.76468", "29.77125", "29.77421", "29.77645", "29.77863", "29.78234", "29.78507",
"29.79056", "29.79349", "29.79767", "29.80866"],
"longitudes": ["-95.14594", "-95.14813", "-95.1535", "-95.15624", "-95.15763", "-95.15834", "-95.15853",
"-95.15891", "-95.16115", "-95.16237", "-95.16331", "-95.1634"],
"latitudeOffsets": ["-0.0010", "-0.0010", "0.00000", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010",
"-0.0010", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00500", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB", "segmentSpeed": 62,
"segmentTravelTime": "4 minutes 2 seconds", "segmentDist": "4.20", "segmentFacilityType": "ML",
"segmentOriginId": "1400", "segmentDestId": "1401",
"segmentLocation": "Beltway 8-East Northbound from Jacinto Port to Wallisville"}, {
"latitudes": ["29.80866", "29.81611", "29.82536", "29.83263", "29.84081", "29.84305", "29.84505", "29.84781",
"29.8533", "29.86875", "29.87065"],
"longitudes": ["-95.1634", "-95.16458", "-95.16924", "-95.17589", "-95.18369", "-95.18539", "-95.1863",
"-95.18688", "-95.18703", "-95.18733", "-95.18774"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010",
"-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00400", "0.00400"], "segmentDirection": "NB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "4.40", "segmentFacilityType": "ML",
"segmentOriginId": "1401", "segmentDestId": "1402",
"segmentLocation": "Beltway 8-East Northbound from Wallisville to Little York"},
{"latitudes": ["29.87065", "29.87255", "29.88313", "29.88838", "29.89126", "29.89601", "29.90124", "29.90675"],
"longitudes": ["-95.18774", "-95.18873", "-95.19779", "-95.20203", "-95.20298", "-95.20306", "-95.20309",
"-95.20234"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": 59, "segmentTravelTime": "2 minutes 26 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "1402", "segmentDestId": "1403",
"segmentLocation": "Beltway 8-East Northbound from Little York to Winfield"}, {
"latitudes": ["29.90675", "29.91943", "29.92295", "29.9246", "29.92637", "29.92846", "29.93272", "29.93424",
"29.93418", "29.934", "29.93467", "29.93464"],
"longitudes": ["-95.20234", "-95.20035", "-95.19982", "-95.19992", "-95.2005", "-95.20188", "-95.20739",
"-95.21155", "-95.22557", "-95.23537", "-95.24245", "-95.24948"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "-0.0010", "-0.0010", "0.00300", "0.00300", "0.00300",
"0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "NB", "segmentSpeed": 64,
"segmentTravelTime": "5 minutes 43 seconds", "segmentDist": "5", "segmentFacilityType": "ML",
"segmentOriginId": "1403", "segmentDestId": "1405",
"segmentLocation": "Beltway 8-East Northbound from Winfield to Wilson Rd"},
{"latitudes": ["29.66168", "29.65362", "29.64419", "29.63725"],
"longitudes": ["-95.55835", "-95.55025", "-95.53913", "-95.53088"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 62,
"segmentTravelTime": "3 minutes 36 seconds", "segmentDist": "2.70", "segmentFacilityType": "ML",
"segmentOriginId": "1438", "segmentDestId": "1448",
"segmentLocation": "Beltway 8-South Eastbound from US-59 Southwest to Gessner"},
{"latitudes": ["29.63725", "29.62638", "29.62069", "29.61419"],
"longitudes": ["-95.53088", "-95.51781", "-95.50844", "-95.49475"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 60,
"segmentTravelTime": "3 minutes 36 seconds", "segmentDist": "2.60", "segmentFacilityType": "ML",
"segmentOriginId": "1448", "segmentDestId": "1449",
"segmentLocation": "Beltway 8-South Eastbound from Gessner to Hillcroft"},
{"latitudes": ["29.61419", "29.61331", "29.60031"], "longitudes": ["-95.49475", "-95.49319", "-95.46481"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 62, "segmentTravelTime": "2 minutes 57 seconds", "segmentDist": "2",
"segmentFacilityType": "ML", "segmentOriginId": "1449", "segmentDestId": "1450",
"segmentLocation": "Beltway 8-South Eastbound from Hillcroft to South Post Oak"},
{"latitudes": ["29.60031", "29.59713", "29.59681", "29.59713"],
"longitudes": ["-95.46481", "-95.45668", "-95.44662", "-95.42625"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 59,
"segmentTravelTime": "2 minutes 21 seconds", "segmentDist": "2.30", "segmentFacilityType": "ML",
"segmentOriginId": "1450", "segmentDestId": "1451",
"segmentLocation": "Beltway 8-South Eastbound from South Post Oak to Almeda"},
{"latitudes": ["29.59713", "29.59688", "29.59694", "29.59732"],
"longitudes": ["-95.42625", "-95.41725", "-95.40887", "-95.38656"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 54,
"segmentTravelTime": "3 minutes 53 seconds", "segmentDist": "2.60", "segmentFacilityType": "ML",
"segmentOriginId": "1451", "segmentDestId": "1452",
"segmentLocation": "Beltway 8-South Eastbound from Almeda to SH-288"},
{"latitudes": ["29.59732", "29.59844", "29.59913"], "longitudes": ["-95.38656", "-95.35712", "-95.3535"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 66, "segmentTravelTime": "2 minutes 49 seconds", "segmentDist": "2",
"segmentFacilityType": "ML", "segmentOriginId": "1452", "segmentDestId": "1453",
"segmentLocation": "Beltway 8-South Eastbound from SH-288 to Cullen"},
{"latitudes": ["29.59913", "29.59969", "29.60019", "29.60056"],
"longitudes": ["-95.3535", "-95.35024", "-95.34037", "-95.29856"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 60,
"segmentTravelTime": "2 minutes 6 seconds", "segmentDist": "2.10", "segmentFacilityType": "ML",
"segmentOriginId": "1453", "segmentDestId": "1454",
"segmentLocation": "Beltway 8-South Eastbound from Cullen to Mykawa"},
{"latitudes": ["29.60056", "29.60031", "29.59998"], "longitudes": ["-95.29856", "-95.28643", "-95.26967"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 59, "segmentTravelTime": "2 minutes 26 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "1454", "segmentDestId": "1455",
"segmentLocation": "Beltway 8-South Eastbound from Mykawa to Monroe"},
{"latitudes": ["29.59998", "29.60075", "29.60106"], "longitudes": ["-95.26967", "-95.24724", "-95.22881"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 63, "segmentTravelTime": "2 minutes 18 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "1455", "segmentDestId": "1456",
"segmentLocation": "Beltway 8-South Eastbound from Monroe to Beamer"},
{"latitudes": ["29.60106", "29.60119", "29.60294", "29.60656", "29.6165"],
"longitudes": ["-95.22881", "-95.21606", "-95.21106", "-95.20537", "-95.19194"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "-0.0010", "-0.0010", "-0.0010"], "segmentDirection": "EB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.30", "segmentFacilityType": "ML",
"segmentOriginId": "1456", "segmentDestId": "1457",
"segmentLocation": "Beltway 8-South Eastbound from Beamer to Old Galveston Road"},
{"latitudes": ["29.6165", "29.60656", "29.60294", "29.60119", "29.60106"],
"longitudes": ["-95.19194", "-95.20537", "-95.21106", "-95.21606", "-95.22881"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.30", "segmentFacilityType": "ML",
"segmentOriginId": "1464", "segmentDestId": "1439",
"segmentLocation": "Beltway 8-South Westbound from Old Galveston Road to Beamer"},
{"latitudes": ["29.60106", "29.60075", "29.59998"], "longitudes": ["-95.22881", "-95.24724", "-95.26967"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 54, "segmentTravelTime": "3 minutes 40 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "1439", "segmentDestId": "1440",
"segmentLocation": "Beltway 8-South Westbound from Beamer to Monroe"},
{"latitudes": ["29.59998", "29.60031", "29.60056"], "longitudes": ["-95.26967", "-95.28643", "-95.29856"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "1440", "segmentDestId": "1441",
"segmentLocation": "Beltway 8-South Westbound from Monroe to Mykawa"},
{"latitudes": ["29.60056", "29.60019", "29.59969", "29.59913"],
"longitudes": ["-95.29856", "-95.34037", "-95.35024", "-95.3535"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 46,
"segmentTravelTime": "3 minutes 43 seconds", "segmentDist": "2.10", "segmentFacilityType": "ML",
"segmentOriginId": "1441", "segmentDestId": "1442",
"segmentLocation": "Beltway 8-South Westbound from Mykawa to Cullen"},
{"latitudes": ["29.59913", "29.59844", "29.59732"], "longitudes": ["-95.3535", "-95.35712", "-95.38656"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2",
"segmentFacilityType": "ML", "segmentOriginId": "1442", "segmentDestId": "1443",
"segmentLocation": "Beltway 8-South Westbound from Cullen to SH-288"},
{"latitudes": ["29.59732", "29.59694", "29.59688", "29.59713"],
"longitudes": ["-95.38656", "-95.40887", "-95.41725", "-95.42625"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 63,
"segmentTravelTime": "2 minutes 29 seconds", "segmentDist": "2.60", "segmentFacilityType": "ML",
"segmentOriginId": "1443", "segmentDestId": "1444",
"segmentLocation": "Beltway 8-South Westbound from SH-288 to Almeda"},
{"latitudes": ["29.59713", "29.59681", "29.59713", "29.60031"],
"longitudes": ["-95.42625", "-95.44662", "-95.45668", "-95.46481"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 66,
"segmentTravelTime": "2 minutes 6 seconds", "segmentDist": "2.30", "segmentFacilityType": "ML",
"segmentOriginId": "1444", "segmentDestId": "1445",
"segmentLocation": "Beltway 8-South Westbound from Almeda to South Post Oak"},
{"latitudes": ["29.60031", "29.61331", "29.61419"], "longitudes": ["-95.46481", "-95.49319", "-95.49475"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 64, "segmentTravelTime": "2 minutes 52 seconds", "segmentDist": "2",
"segmentFacilityType": "ML", "segmentOriginId": "1445", "segmentDestId": "1446",
"segmentLocation": "Beltway 8-South Westbound from South Post Oak to Hillcroft"},
{"latitudes": ["29.61419", "29.62069", "29.62638", "29.63725"],
"longitudes": ["-95.49475", "-95.50844", "-95.51781", "-95.53088"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 66,
"segmentTravelTime": "2 minutes 21 seconds", "segmentDist": "2.60", "segmentFacilityType": "ML",
"segmentOriginId": "1446", "segmentDestId": "1447",
"segmentLocation": "Beltway 8-South Westbound from Hillcroft to Gessner"},
{"latitudes": ["29.63725", "29.64419", "29.65362", "29.66168"],
"longitudes": ["-95.53088", "-95.53913", "-95.55025", "-95.55835"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2.70", "segmentFacilityType": "ML",
"segmentOriginId": "1447", "segmentDestId": "1435",
"segmentLocation": "Beltway 8-South Westbound from Gessner to US-59 Southwest"},
{"latitudes": ["29.70962", "29.70781", "29.70606", "29.70775"],
"longitudes": ["-95.26663", "-95.25694", "-95.24519", "-95.22825"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 69,
"segmentTravelTime": "1 minute 23 seconds", "segmentDist": "1.60", "segmentFacilityType": "ML",
"segmentOriginId": "1825", "segmentDestId": "1826",
"segmentLocation": "SH-225 Eastbound from IH-610 East Loop to Scarborough"},
{"latitudes": ["29.70775", "29.71025", "29.71112", "29.71212"],
"longitudes": ["-95.22825", "-95.22", "-95.21169", "-95.20137"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 65,
"segmentTravelTime": "2 minutes 34 seconds", "segmentDist": "1.70", "segmentFacilityType": "ML",
"segmentOriginId": "1826", "segmentDestId": "1827",
"segmentLocation": "SH-225 Eastbound from Scarborough to Pasadena Blvd"},
{"latitudes": ["29.71212", "29.71244", "29.71281", "29.71269"],
"longitudes": ["-95.20137", "-95.19744", "-95.18425", "-95.17162"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 69,
"segmentTravelTime": "2 minutes 34 seconds", "segmentDist": "1.80", "segmentFacilityType": "ML",
"segmentOriginId": "1827", "segmentDestId": "1828",
"segmentLocation": "SH-225 Eastbound from Pasadena Blvd to Preston"},
{"latitudes": ["29.71269", "29.71219", "29.71225"], "longitudes": ["-95.17162", "-95.15343", "-95.14107"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1828", "segmentDestId": "1829",
"segmentLocation": "SH-225 Eastbound from Preston to Railroad Street"},
{"latitudes": ["29.71225", "29.7115", "29.71063"], "longitudes": ["-95.14107", "-95.12406", "-95.11325"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "1829", "segmentDestId": "1830",
"segmentLocation": "SH-225 Eastbound from Railroad Street to Tidal"},
{"latitudes": ["29.71063", "29.70475"], "longitudes": ["-95.11325", "-95.09331"],
"latitudeOffsets": ["0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 61, "segmentTravelTime": "2 minutes 53 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1830", "segmentDestId": "1831",
"segmentLocation": "SH-225 Eastbound from Tidal to Battleground"},
{"latitudes": ["29.70475", "29.69669", "29.69063", "29.68719"],
"longitudes": ["-95.09331", "-95.06675", "-95.04706", "-95.03093"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 59,
"segmentTravelTime": "2 minutes 20 seconds", "segmentDist": "2.30", "segmentFacilityType": "ML",
"segmentOriginId": "1831", "segmentDestId": "1832",
"segmentLocation": "SH-225 Eastbound from Battleground to SH-146"},
{"latitudes": ["29.68719", "29.69063", "29.69669", "29.70475"],
"longitudes": ["-95.03093", "-95.04706", "-95.06675", "-95.09331"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 68,
"segmentTravelTime": "2 minutes 1 second", "segmentDist": "2.30", "segmentFacilityType": "ML",
"segmentOriginId": "1833", "segmentDestId": "1834",
"segmentLocation": "SH-225 Westbound from SH-146 to Battleground"},
{"latitudes": ["29.70475", "29.71063"], "longitudes": ["-95.09331", "-95.11325"],
"latitudeOffsets": ["0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 64, "segmentTravelTime": "2 minutes 47 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1834", "segmentDestId": "1835",
"segmentLocation": "SH-225 Westbound from Battleground to Tidal"},
{"latitudes": ["29.71063", "29.7115", "29.71225"], "longitudes": ["-95.11325", "-95.12406", "-95.14107"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "1835", "segmentDestId": "1836",
"segmentLocation": "SH-225 Westbound from Tidal to Railroad Street"},
{"latitudes": ["29.71225", "29.71219", "29.71269"], "longitudes": ["-95.14107", "-95.15343", "-95.17162"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1836", "segmentDestId": "1837",
"segmentLocation": "SH-225 Westbound from Railroad Street to Preston"},
{"latitudes": ["29.71269", "29.71281", "29.71244", "29.71212"],
"longitudes": ["-95.17162", "-95.18425", "-95.19744", "-95.20137"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 65,
"segmentTravelTime": "2 minutes 40 seconds", "segmentDist": "1.80", "segmentFacilityType": "ML",
"segmentOriginId": "1837", "segmentDestId": "1838",
"segmentLocation": "SH-225 Westbound from Preston to Pasadena Blvd"},
{"latitudes": ["29.71212", "29.71112", "29.71025", "29.70775"],
"longitudes": ["-95.20137", "-95.21169", "-95.22", "-95.22825"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 64,
"segmentTravelTime": "2 minutes 35 seconds", "segmentDist": "1.70", "segmentFacilityType": "ML",
"segmentOriginId": "1838", "segmentDestId": "1839",
"segmentLocation": "SH-225 Westbound from Pasadena Blvd to Scarborough"},
{"latitudes": ["29.70775", "29.70606", "29.70781", "29.70962"],
"longitudes": ["-95.22825", "-95.24519", "-95.25694", "-95.26663"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 66,
"segmentTravelTime": "1 minute 27 seconds", "segmentDist": "1.60", "segmentFacilityType": "ML",
"segmentOriginId": "1839", "segmentDestId": "1840",
"segmentLocation": "SH-225 Westbound from Scarborough to IH-610 East Loop"},
{"latitudes": ["30.01311", "30.00159", "29.99687"], "longitudes": ["-95.5905", "-95.58307", "-95.57947"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "SB", "segmentSpeed": 60, "segmentTravelTime": "2 minutes 30 seconds", "segmentDist": "1.50",
"segmentFacilityType": "ML", "segmentOriginId": "1811", "segmentDestId": "1812",
"segmentLocation": "SH-249 Southbound from Spring-Cypress to Louetta"},
{"latitudes": ["29.99687", "29.99267", "29.98118", "29.97349"],
"longitudes": ["-95.57947", "-95.57543", "-95.56754", "-95.56316"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "SB", "segmentSpeed": 60,
"segmentTravelTime": "2 minutes 48 seconds", "segmentDist": "1.80", "segmentFacilityType": "ML",
"segmentOriginId": "1812", "segmentDestId": "1813", "segmentLocation": "SH-249 Southbound from Louetta to Perry"},
{"latitudes": ["29.97349", "29.96761", "29.96096", "29.95757", "29.95315"],
"longitudes": ["-95.56316", "-95.55702", "-95.54758", "-95.54119", "-95.53778"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "SB",
"segmentSpeed": 53, "segmentTravelTime": "2 minutes 3 seconds", "segmentDist": "1.80",
"segmentFacilityType": "ML", "segmentOriginId": "1813", "segmentDestId": "1814",
"segmentLocation": "SH-249 Southbound from Perry to Greens"},
{"latitudes": ["29.95315", "29.95014", "29.94486", "29.93991"],
"longitudes": ["-95.53778", "-95.53342", "-95.52642", "-95.51831"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00000"], "segmentDirection": "SB", "segmentSpeed": 57,
"segmentTravelTime": "2 minutes 47 seconds", "segmentDist": "1.70", "segmentFacilityType": "ML",
"segmentOriginId": "1814", "segmentDestId": "1815",
"segmentLocation": "SH-249 Southbound from Greens to Beltway 8-North"},
{"latitudes": ["29.93991", "29.94486", "29.95014", "29.95315"],
"longitudes": ["-95.51831", "-95.52642", "-95.53342", "-95.53778"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00000", "0.00100", "0.00100", "0.00100"], "segmentDirection": "NB", "segmentSpeed": 67,
"segmentTravelTime": "2 minutes 32 seconds", "segmentDist": "1.70", "segmentFacilityType": "ML",
"segmentOriginId": "1800", "segmentDestId": "1801",
"segmentLocation": "SH-249 Northbound from Beltway 8-North to Greens"},
{"latitudes": ["29.95315", "29.95757", "29.96096", "29.96761", "29.97349"],
"longitudes": ["-95.53778", "-95.54119", "-95.54758", "-95.55702", "-95.56316"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "NB",
"segmentSpeed": 64, "segmentTravelTime": "2 minutes 42 seconds", "segmentDist": "1.80",
"segmentFacilityType": "ML", "segmentOriginId": "1801", "segmentDestId": "1802",
"segmentLocation": "SH-249 Northbound from Greens to Perry"},
{"latitudes": ["29.97349", "29.98118", "29.99267", "29.99687"],
"longitudes": ["-95.56316", "-95.56754", "-95.57543", "-95.57947"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "NB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "1.80", "segmentFacilityType": "ML",
"segmentOriginId": "1802", "segmentDestId": "1803", "segmentLocation": "SH-249 Northbound from Perry to Louetta"},
{"latitudes": ["29.99687", "30.00159", "30.01311"], "longitudes": ["-95.57947", "-95.58307", "-95.5905"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "NB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.50",
"segmentFacilityType": "ML", "segmentOriginId": "1803", "segmentDestId": "1804",
"segmentLocation": "SH-249 Northbound from Louetta to Spring-Cypress"},
{"latitudes": ["29.60372", "29.61712", "29.61949", "29.62425", "29.62679", "29.62975", "29.63459", "29.63833"],
"longitudes": ["-95.03197", "-95.0338", "-95.03408", "-95.0343", "-95.03367", "-95.03231", "-95.03021",
"-95.03053"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"segmentDirection": "NB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2",
"segmentFacilityType": "ML", "segmentOriginId": "1911", "segmentDestId": "1846",
"segmentLocation": "SH-146 Northbound from Port Rd to Wharton Weems"},
{"latitudes": ["29.63833", "29.65224", "29.66003", "29.66521"],
"longitudes": ["-95.03053", "-95.02998", "-95.02918", "-95.02925"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "NB", "segmentSpeed": 58,
"segmentTravelTime": "2 minutes 57 seconds", "segmentDist": "1.90", "segmentFacilityType": "ML",
"segmentOriginId": "1846", "segmentDestId": "1847",
"segmentLocation": "SH-146 Northbound from Wharton Weems to Spencer Highway"},
{"latitudes": ["29.66521", "29.67306", "29.68843"], "longitudes": ["-95.02925", "-95.02938", "-95.03041"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00300", "0.00300", "0.00300"],
"segmentDirection": "NB", "segmentSpeed": 59, "segmentTravelTime": "2 minutes 49 seconds", "segmentDist": "1.80",
"segmentFacilityType": "ML", "segmentOriginId": "1847", "segmentDestId": "1848",
"segmentLocation": "SH-146 Northbound from Spencer Highway to SH-225"},
{"latitudes": ["29.68843", "29.69463", "29.69898", "29.71497"],
"longitudes": ["-95.03041", "-95.02777", "-95.02243", "-94.99912"],
"latitudeOffsets": ["0.00100", "-0.0010", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "NB", "segmentSpeed": 64,
"segmentTravelTime": "2 minutes 27 seconds", "segmentDist": "2.60", "segmentFacilityType": "ML",
"segmentOriginId": "1848", "segmentDestId": "1849",
"segmentLocation": "SH-146 Northbound from SH-225 to Fred Hartman Bridge North"},
{"latitudes": ["29.71497", "29.72359", "29.7263", "29.73025", "29.73345", "29.73574", "29.74066"],
"longitudes": ["-94.99912", "-94.99159", "-94.99135", "-94.99339", "-94.99451", "-94.99397", "-94.99056"],
"latitudeOffsets": ["-0.0010", "-0.0010", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": 64, "segmentTravelTime": "2 minutes 47 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1849", "segmentDestId": "1850",
"segmentLocation": "SH-146 Northbound from Fred Hartman Bridge North to W Texas"},
{"latitudes": ["29.74066", "29.73574", "29.73345", "29.73025", "29.7263", "29.72359", "29.71497"],
"longitudes": ["-94.99056", "-94.99397", "-94.99451", "-94.99339", "-94.99135", "-94.99159", "-94.99912"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "-0.0010", "-0.0010"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"segmentDirection": "SB", "segmentSpeed": 62, "segmentTravelTime": "2 minutes 51 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "1851", "segmentDestId": "1852",
"segmentLocation": "SH-146 Southbound from W Texas to Fred Hartman Bridge North"},
{"latitudes": ["29.71497", "29.69898", "29.69463", "29.68843"],
"longitudes": ["-94.99912", "-95.02243", "-95.02777", "-95.03041"],
"latitudeOffsets": ["-0.0010", "-0.0010", "-0.0010", "0.00100"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "SB", "segmentSpeed": 90,
"segmentTravelTime": "2 minutes 44 seconds", "segmentDist": "2.60", "segmentFacilityType": "ML",
"segmentOriginId": "1852", "segmentDestId": "1853",
"segmentLocation": "SH-146 Southbound from Fred Hartman Bridge North to SH-225"},
{"latitudes": ["29.68843", "29.67306", "29.66521"], "longitudes": ["-95.03041", "-95.02938", "-95.02925"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100"], "longitudeOffsets": ["0.00300", "0.00300", "0.00300"],
"segmentDirection": "SB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.80",
"segmentFacilityType": "ML", "segmentOriginId": "1853", "segmentDestId": "1854",
"segmentLocation": "SH-146 Southbound from SH-225 to Spencer Highway"},
{"latitudes": ["29.66521", "29.66003", "29.65224", "29.63833"],
"longitudes": ["-95.02925", "-95.02918", "-95.02998", "-95.03053"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"], "segmentDirection": "SB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "1.90", "segmentFacilityType": "ML",
"segmentOriginId": "1854", "segmentDestId": "1855",
"segmentLocation": "SH-146 Southbound from Spencer Highway to Wharton Weems"},
{"latitudes": ["29.63833", "29.63459", "29.62975", "29.62679", "29.62425", "29.61949", "29.61712", "29.60372"],
"longitudes": ["-95.03053", "-95.03021", "-95.03231", "-95.03367", "-95.0343", "-95.03408", "-95.0338",
"-95.03197"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"segmentDirection": "SB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2",
"segmentFacilityType": "ML", "segmentOriginId": "1855", "segmentDestId": "1912",
"segmentLocation": "SH-146 Southbound from Wharton Weems to Port Rd"},
{"latitudes": ["29.74066", "29.74674", "29.74886", "29.75301", "29.75406", "29.75598"],
"longitudes": ["-94.99056", "-94.98629", "-94.98756", "-94.99056", "-94.99174", "-94.99541"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.40", "segmentFacilityType": "ML",
"segmentOriginId": "1850", "segmentDestId": "1856",
"segmentLocation": "Spur-330 Westbound from W Texas to Rollingbrook"},
{"latitudes": ["29.75598", "29.76605", "29.77002"], "longitudes": ["-94.99541", "-95.00783", "-95.01288"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 58, "segmentTravelTime": "1 minute 2 seconds", "segmentDist": "1",
"segmentFacilityType": "ML", "segmentOriginId": "1856", "segmentDestId": "1857",
"segmentLocation": "Spur-330 Westbound from Rollingbrook to Baker"},
{"latitudes": ["29.77002", "29.77326"], "longitudes": ["-95.01288", "-95.01644"],
"latitudeOffsets": ["0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 75, "segmentTravelTime": "1 minute 2 seconds", "segmentDist": "1.30", "segmentFacilityType": "ML",
"segmentOriginId": "1857", "segmentDestId": "1858",
"segmentLocation": "Spur-330 Westbound from Baker to Wade Road"},
{"latitudes": ["29.77326", "29.77859", "29.783", "29.78645"],
"longitudes": ["-95.01644", "-95.02182", "-95.03202", "-95.0394"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": 61,
"segmentTravelTime": "1 minute 11 seconds", "segmentDist": "1.20", "segmentFacilityType": "ML",
"segmentOriginId": "1858", "segmentDestId": "1859",
"segmentLocation": "Spur-330 Westbound from Wade Road to N Market Loop"},
{"latitudes": ["29.78645", "29.783", "29.77859", "29.77326"],
"longitudes": ["-95.0394", "-95.03202", "-95.02182", "-95.01644"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 25,
"segmentTravelTime": "3 minutes 52 seconds", "segmentDist": "1.20", "segmentFacilityType": "ML",
"segmentOriginId": "1860", "segmentDestId": "1861",
"segmentLocation": "Spur-330 Eastbound from N Market Loop to Wade Road"},
{"latitudes": ["29.77326", "29.77002"], "longitudes": ["-95.01644", "-95.01288"],
"latitudeOffsets": ["0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 63, "segmentTravelTime": "1 minute 14 seconds", "segmentDist": "1.30",
"segmentFacilityType": "ML", "segmentOriginId": "1861", "segmentDestId": "1862",
"segmentLocation": "Spur-330 Eastbound from Wade Road to Baker"},
{"latitudes": ["29.77002", "29.76605", "29.75598"], "longitudes": ["-95.01288", "-95.00783", "-94.99541"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 75, "segmentTravelTime": "1 minute 48 seconds", "segmentDist": "1",
"segmentFacilityType": "ML", "segmentOriginId": "1862", "segmentDestId": "1863",
"segmentLocation": "Spur-330 Eastbound from Baker to Rollingbrook"},
{"latitudes": ["29.75598", "29.75406", "29.75301", "29.74886", "29.74674", "29.74066"],
"longitudes": ["-94.99541", "-94.99174", "-94.99056", "-94.98756", "-94.98629", "-94.99056"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00400", "0.00400"], "segmentDirection": "EB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.30", "segmentFacilityType": "ML",
"segmentOriginId": "1863", "segmentDestId": "1851",
"segmentLocation": "Spur-330 Eastbound from Rollingbrook to W Texas"}, {
"latitudes": ["29.71092", "29.71178", "29.71348", "29.71342", "29.71422", "29.71488", "29.71522", "29.71558",
"29.71662", "29.71687"],
"longitudes": ["-95.6442", "-95.63445", "-95.61415", "-95.6147", "-95.6048", "-95.59655", "-95.59225",
"-95.58765", "-95.57565", "-95.5714"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300",
"0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "4.10", "segmentFacilityType": "ML",
"segmentOriginId": "346", "segmentDestId": "347",
"segmentLocation": "Westpark Tollway Eastbound from SH-6 to Wilcrest"},
{"latitudes": ["29.71687", "29.71808", "29.71882", "29.71962"],
"longitudes": ["-95.5714", "-95.55755", "-95.5489", "-95.53919"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "1.80", "segmentFacilityType": "ML",
"segmentOriginId": "347", "segmentDestId": "348",
"segmentLocation": "Westpark Tollway Eastbound from Wilcrest to Gessner"},
{"latitudes": ["29.71962", "29.7205", "29.7205", "29.72188"],
"longitudes": ["-95.53919", "-95.52756", "-95.52756", "-95.51406"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2.60", "segmentFacilityType": "ML",
"segmentOriginId": "348", "segmentDestId": "349",
"segmentLocation": "Westpark Tollway Eastbound from Gessner to Hillcroft"},
{"latitudes": ["29.72188", "29.7223", "29.7223", "29.7223", "29.7223"],
"longitudes": ["-95.51406", "-95.49912", "-95.49912", "-95.49912", "-95.49912"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.90", "segmentFacilityType": "ML",
"segmentOriginId": "349", "segmentDestId": "350",
"segmentLocation": "Westpark Tollway Eastbound from Hillcroft to Rice"},
{"latitudes": ["29.7223", "29.7223", "29.7223", "29.7223", "29.72188"],
"longitudes": ["-95.49912", "-95.49912", "-95.49912", "-95.49912", "-95.51406"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.80", "segmentFacilityType": "ML",
"segmentOriginId": "341", "segmentDestId": "342",
"segmentLocation": "Westpark Tollway Westbound from Rice to Hillcroft"},
{"latitudes": ["29.72188", "29.7205", "29.7205", "29.71962"],
"longitudes": ["-95.51406", "-95.52756", "-95.52756", "-95.53919"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2", "segmentFacilityType": "ML", "segmentOriginId": "342",
"segmentDestId": "343", "segmentLocation": "Westpark Tollway Westbound from Hillcroft to Gessner"},
{"latitudes": ["29.71962", "29.71882", "29.71808", "29.71687"],
"longitudes": ["-95.53919", "-95.5489", "-95.55755", "-95.5714"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2.50", "segmentFacilityType": "ML",
"segmentOriginId": "343", "segmentDestId": "344",
"segmentLocation": "Westpark Tollway Westbound from Gessner to Wilcrest"}, {
"latitudes": ["29.71687", "29.71662", "29.71558", "29.71522", "29.71488", "29.71422", "29.71342", "29.71348",
"29.71178", "29.71092"],
"longitudes": ["-95.5714", "-95.57565", "-95.58765", "-95.59225", "-95.59655", "-95.6048", "-95.6147",
"-95.61415", "-95.63445", "-95.6442"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300",
"0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "4.10", "segmentFacilityType": "ML",
"segmentOriginId": "344", "segmentDestId": "345",
"segmentLocation": "Westpark Tollway Westbound from Wilcrest to SH-6"}, {
"latitudes": ["30.09609", "30.09605", "30.09597", "30.09605", "30.09477", "30.09447", "30.09463", "30.09483",
"30.09551", "30.09687", "30.09863", "30.12492"],
"longitudes": ["-94.13464", "-94.13004", "-94.12512", "-94.10414", "-94.10045", "-94.09801", "-94.09101",
"-94.08227", "-94.07482", "-94.06989", "-94.06579", "-94.0261"],
"latitudeOffsets": ["0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300", "0.00300",
"0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "7", "segmentFacilityType": "ML",
"segmentOriginId": "2000", "segmentDestId": "2002",
"segmentLocation": "IH-10 Eastbound from US-69 to Dewitt Rd"}, {
"latitudes": ["30.12492", "30.09863", "30.09687", "30.09551", "30.09483", "30.09463", "30.09447", "30.09477",
"30.09605", "30.09597"],
"longitudes": ["-94.0261", "-94.06579", "-94.06989", "-94.07482", "-94.08227", "-94.09101", "-94.09801",
"-94.10045", "-94.10414", "-94.12512"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00300", "0.00300", "0.00300", "0.00300",
"0.00300", "0.00300"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "6.60", "segmentFacilityType": "ML",
"segmentOriginId": "2003", "segmentDestId": "2004",
"segmentLocation": "IH-10 Westbound from Dewitt Rd to 9th St"},
{"latitudes": ["30.09589", "30.10437", "30.10791", "30.10984", "30.11717"],
"longitudes": ["-94.13601", "-94.1367", "-94.13702", "-94.13793", "-94.14528"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.10", "segmentFacilityType": "ML",
"segmentOriginId": "2100", "segmentDestId": "2101", "segmentLocation": "US-69 Northbound from IH-10 to Lucas Dr"},
{"latitudes": ["30.11717", "30.12395", "30.12618", "30.13342"],
"longitudes": ["-94.14528", "-94.15227", "-94.15616", "-94.16744"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB", "segmentSpeed": 63,
"segmentTravelTime": "2 minutes 23 seconds", "segmentDist": "2.50", "segmentFacilityType": "ML",
"segmentOriginId": "2101", "segmentDestId": "2102",
"segmentLocation": "US-69 Northbound from Lucas Dr to SH-105"},
{"latitudes": ["30.13342", "30.13657", "30.14054", "30.14718", "30.15296", "30.17098"],
"longitudes": ["-94.16744", "-94.17136", "-94.17499", "-94.17912", "-94.18051", "-94.18466"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 68, "segmentTravelTime": "2 minutes 40 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "2102", "segmentDestId": "2103",
"segmentLocation": "US-69 Northbound from SH-105 to Tram Rd"},
{"latitudes": ["30.17098", "30.17947"], "longitudes": ["-94.18466", "-94.18601"],
"latitudeOffsets": ["0.00100", "0.00100"], "longitudeOffsets": ["0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 68, "segmentTravelTime": "1 minute 19 seconds", "segmentDist": "1.50",
"segmentFacilityType": "ML", "segmentOriginId": "2103", "segmentDestId": "2104",
"segmentLocation": "US-69 Northbound from Tram Rd to Pine Island Bayou"},
{"latitudes": ["30.17947", "30.18599", "30.19306", "30.19562", "30.20812", "30.22125"],
"longitudes": ["-94.18601", "-94.18719", "-94.19", "-94.19073", "-94.19098", "-94.19317"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.30", "segmentFacilityType": "ML",
"segmentOriginId": "2104", "segmentDestId": "2105",
"segmentLocation": "US-69 Northbound from Pine Island Bayou to US-69/US-96 Y"},
{"latitudes": ["30.22125", "30.20812", "30.19562", "30.19306", "30.18599", "30.17947"],
"longitudes": ["-94.19317", "-94.19098", "-94.19073", "-94.19", "-94.18719", "-94.18601"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.30", "segmentFacilityType": "ML",
"segmentOriginId": "2106", "segmentDestId": "2107",
"segmentLocation": "US-69 Southbound from US-69/US-96 Y to Pine Island Bayou"},
{"latitudes": ["30.17947", "30.17098"], "longitudes": ["-94.18601", "-94.18466"],
"latitudeOffsets": ["0.00100", "0.00100"], "longitudeOffsets": ["0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 59, "segmentTravelTime": "2 minutes 32 seconds", "segmentDist": "1.50",
"segmentFacilityType": "ML", "segmentOriginId": "2107", "segmentDestId": "2108",
"segmentLocation": "US-69 Southbound from Pine Island Bayou to Tram Rd"},
{"latitudes": ["30.17098", "30.15296", "30.14718", "30.14054", "30.13657", "30.13342"],
"longitudes": ["-94.18466", "-94.18051", "-94.17912", "-94.17499", "-94.17136", "-94.16744"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": 63, "segmentTravelTime": "2 minutes 48 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "2108", "segmentDestId": "2109",
"segmentLocation": "US-69 Southbound from Tram Rd to SH-105"},
{"latitudes": ["30.13342", "30.12618", "30.12395", "30.11717"],
"longitudes": ["-94.16744", "-94.15616", "-94.15227", "-94.14528"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": 58,
"segmentTravelTime": "3 minutes 35 seconds", "segmentDist": "2.50", "segmentFacilityType": "ML",
"segmentOriginId": "2109", "segmentDestId": "2110",
"segmentLocation": "US-69 Southbound from SH-105 to Lucas Dr"},
{"latitudes": ["30.11717", "30.10984", "30.10791", "30.10437", "30.09589"],
"longitudes": ["-94.14528", "-94.13793", "-94.13702", "-94.1367", "-94.13601"],
"latitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.10", "segmentFacilityType": "ML",
"segmentOriginId": "2110", "segmentDestId": "2111", "segmentLocation": "US-69 Southbound from Lucas Dr to IH-10"},
{"latitudes": ["29.78092", "29.78271", "29.78512", "29.78924", "29.7913", "29.79532", "29.79804", "29.80176",
"29.81376", "29.81698"],
"longitudes": ["-95.2514", "-95.24958", "-95.24608", "-95.23947", "-95.23597", "-95.22837", "-95.22494",
"-95.22134", "-95.20953", "-95.20729"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 63,
"segmentTravelTime": "2 minutes 22 seconds", "segmentDist": "2.50", "segmentFacilityType": "ML",
"segmentOriginId": "2200", "segmentDestId": "2202",
"segmentLocation": "US-90 Eastbound from Oates Rd to S Lake Houston Parkway"},
{"latitudes": ["29.81698", "29.81913", "29.82159", "29.82975", "29.8326", "29.84097"],
"longitudes": ["-95.20729", "-95.20507", "-95.20019", "-95.18244", "-95.17586", "-95.15628"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": 61, "segmentTravelTime": "3 minutes 28 seconds", "segmentDist": "3.50",
"segmentFacilityType": "ML", "segmentOriginId": "2202", "segmentDestId": "2204",
"segmentLocation": "US-90 Eastbound from S Lake Houston Parkway to Miller Road Number 3"},
{"latitudes": ["29.84097", "29.84651", "29.85269", "29.86566"],
"longitudes": ["-95.15628", "-95.14344", "-95.12792", "-95.09371"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB", "segmentSpeed": 60,
"segmentTravelTime": "4 minutes 53 seconds", "segmentDist": "3.90", "segmentFacilityType": "ML",
"segmentOriginId": "2204", "segmentDestId": "2206",
"segmentLocation": "US-90 Eastbound from Miller Road Number 3 to San Jacinto River"},
{"latitudes": ["29.86566", "29.87035", "29.88421"], "longitudes": ["-95.09371", "-95.08137", "-95.06315"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00500"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": 61, "segmentTravelTime": "2 minutes 21 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "2206", "segmentDestId": "2208",
"segmentLocation": "US-90 Eastbound from San Jacinto River to FM-1942"},
{"latitudes": ["29.88421", "29.88729", "29.89067", "29.89952", "29.9117"],
"longitudes": ["-95.06315", "-95.06112", "-95.05973", "-95.0547", "-95.04719"],
"latitudeOffsets": ["0.00500", "0.00500", "0.00500", "0.00500", "0.00500"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "EB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.40", "segmentFacilityType": "ML",
"segmentOriginId": "2208", "segmentDestId": "2210",
"segmentLocation": "US-90 Eastbound from FM-1942 to Runneburg Rd"},
{"latitudes": ["29.9117", "29.93303", "29.93973"], "longitudes": ["-95.04719", "-95.03448", "-95.02785"],
"latitudeOffsets": ["0.00500", "0.00500", "0.00500"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.60",
"segmentFacilityType": "ML", "segmentOriginId": "2210", "segmentDestId": "2212",
"segmentLocation": "US-90 Eastbound from Runneburg Rd to Janacek Rd"},
{"latitudes": ["29.93973", "29.96367", "29.98046"], "longitudes": ["-95.02785", "-94.9964", "-94.97529"],
"latitudeOffsets": ["0.00500", "0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "EB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "3.30",
"segmentFacilityType": "ML", "segmentOriginId": "2212", "segmentDestId": "2214",
"segmentLocation": "US-90 Eastbound from Janacek Rd to Liberty County Line"},
{"latitudes": ["29.98046", "29.96367", "29.93973"], "longitudes": ["-94.97529", "-94.9964", "-95.02785"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00500"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "3.30",
"segmentFacilityType": "ML", "segmentOriginId": "2215", "segmentDestId": "2213",
"segmentLocation": "US-90 Westbound from Liberty County Line to Janacek Rd"},
{"latitudes": ["29.93973", "29.93303", "29.9117"], "longitudes": ["-95.02785", "-95.03448", "-95.04719"],
"latitudeOffsets": ["0.00500", "0.00500", "0.00500"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.60",
"segmentFacilityType": "ML", "segmentOriginId": "2213", "segmentDestId": "2211",
"segmentLocation": "US-90 Westbound from Janacek Rd to Runneburg Rd"},
{"latitudes": ["29.9117", "29.89952", "29.89067", "29.88729", "29.88421"],
"longitudes": ["-95.04719", "-95.0547", "-95.05973", "-95.06112", "-95.06315"],
"latitudeOffsets": ["0.00500", "0.00500", "0.00500", "0.00500", "0.00500"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2.40", "segmentFacilityType": "ML",
"segmentOriginId": "2211", "segmentDestId": "2209",
"segmentLocation": "US-90 Westbound from Runneburg Rd to FM-1942"},
{"latitudes": ["29.88421", "29.87035", "29.86566"], "longitudes": ["-95.06315", "-95.08137", "-95.09371"],
"latitudeOffsets": ["0.00500", "0.00400", "0.00400"], "longitudeOffsets": ["0.00100", "0.00100", "0.00100"],
"segmentDirection": "WB", "segmentSpeed": 65, "segmentTravelTime": "2 minutes 13 seconds", "segmentDist": "2.40",
"segmentFacilityType": "ML", "segmentOriginId": "2209", "segmentDestId": "2207",
"segmentLocation": "US-90 Westbound from FM-1942 to San Jacinto River"},
{"latitudes": ["29.86566", "29.85269", "29.84651", "29.84097"],
"longitudes": ["-95.09371", "-95.12792", "-95.14344", "-95.15628"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "3.90", "segmentFacilityType": "ML",
"segmentOriginId": "2207", "segmentDestId": "2205",
"segmentLocation": "US-90 Westbound from San Jacinto River to Miller Road Number 3"},
{"latitudes": ["29.84097", "29.8326", "29.82975", "29.82159", "29.81913", "29.81698"],
"longitudes": ["-95.15628", "-95.17586", "-95.18244", "-95.20019", "-95.20507", "-95.20729"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100"], "segmentDirection": "WB",
"segmentSpeed": 54, "segmentTravelTime": "4 minutes 55 seconds", "segmentDist": "3.50",
"segmentFacilityType": "ML", "segmentOriginId": "2205", "segmentDestId": "2203",
"segmentLocation": "US-90 Westbound from Miller Road Number 3 to S Lake Houston Parkway"}, {
"latitudes": ["29.81698", "29.81376", "29.80176", "29.79804", "29.79532", "29.7913", "29.78924", "29.78512",
"29.78271", "29.78092"],
"longitudes": ["-95.20729", "-95.20953", "-95.22134", "-95.22494", "-95.22837", "-95.23597", "-95.23947",
"-95.24608", "-95.24958", "-95.2514"],
"latitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00400"],
"longitudeOffsets": ["0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100", "0.00100",
"0.00100", "0.00100"], "segmentDirection": "WB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2.50", "segmentFacilityType": "ML",
"segmentOriginId": "2203", "segmentDestId": "2201",
"segmentLocation": "US-90 Westbound from S Lake Houston Parkway to Oates Rd"}, {
"latitudes": ["29.69052", "29.69711", "29.69818", "29.7006", "29.70394", "29.70636", "29.70829", "29.70983",
"29.71418"],
"longitudes": ["-95.76959", "-95.77502", "-95.7755", "-95.77583", "-95.77617", "-95.7761", "-95.77573",
"-95.77523", "-95.77341"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000",
"0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400"], "segmentDirection": "NB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "1.60", "segmentFacilityType": "ML",
"segmentOriginId": "2318", "segmentDestId": "2319",
"segmentLocation": "SH-99 West Northbound from Bellaire to S Fry Rd"},
{"latitudes": ["29.71418", "29.7162", "29.71818", "29.73345", "29.74404"],
"longitudes": ["-95.77341", "-95.77294", "-95.77273", "-95.77289", "-95.77302"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 60, "segmentTravelTime": "2 minutes 0 seconds", "segmentDist": "2", "segmentFacilityType": "ML",
"segmentOriginId": "2319", "segmentDestId": "2320",
"segmentLocation": "SH-99 West Northbound from S Fry Rd to Cinco Ranch Blvd"}, {
"latitudes": ["29.74404", "29.74499", "29.74631", "29.74769", "29.75067", "29.75207", "29.75329", "29.75662",
"29.76656", "29.76869", "29.77208"],
"longitudes": ["-95.77302", "-95.7731", "-95.77336", "-95.77396", "-95.77567", "-95.77611", "-95.77628",
"-95.77627", "-95.77623", "-95.77651", "-95.7772"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000",
"0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00400", "0.00400"], "segmentDirection": "NB", "segmentSpeed": 64,
"segmentTravelTime": "2 minutes 53 seconds", "segmentDist": "2", "segmentFacilityType": "ML",
"segmentOriginId": "2320", "segmentDestId": "2321",
"segmentLocation": "SH-99 West Northbound from Cinco Ranch Blvd to Kingsland"},
{"latitudes": ["29.77208", "29.77325", "29.7942"], "longitudes": ["-95.7772", "-95.7773", "-95.77636"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.50",
"segmentFacilityType": "ML", "segmentOriginId": "2321", "segmentDestId": "2317",
"segmentLocation": "SH-99 West Northbound from Kingsland to IH-10 Katy"},
{"latitudes": ["29.7942", "29.80499", "29.816", "29.81832"],
"longitudes": ["-95.77636", "-95.77619", "-95.77255", "-95.77216"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB", "segmentSpeed": 52,
"segmentTravelTime": "2 minutes 43 seconds", "segmentDist": "1.50", "segmentFacilityType": "ML",
"segmentOriginId": "2317", "segmentDestId": "2315",
"segmentLocation": "SH-99 West Northbound from IH-10 Katy to Morton"},
{"latitudes": ["29.81832", "29.83091"], "longitudes": ["-95.77216", "-95.76216"],
"latitudeOffsets": ["0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 65, "segmentTravelTime": "1 minute 28 seconds", "segmentDist": "1.60",
"segmentFacilityType": "ML", "segmentOriginId": "2315", "segmentDestId": "2313",
"segmentLocation": "SH-99 West Northbound from Morton to Clay Rd"},
{"latitudes": ["29.83091", "29.84123", "29.84774", "29.86108"],
"longitudes": ["-95.76216", "-95.76171", "-95.75969", "-95.75765"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB", "segmentSpeed": 63,
"segmentTravelTime": "1 minute 14 seconds", "segmentDist": "1.30", "segmentFacilityType": "ML",
"segmentOriginId": "2313", "segmentDestId": "2311",
"segmentLocation": "SH-99 West Northbound from Clay Rd to Beckendorff"},
{"latitudes": ["29.86108", "29.87767", "29.89123"], "longitudes": ["-95.75765", "-95.75963", "-95.75962"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": 73, "segmentTravelTime": "2 minutes 34 seconds", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "2311", "segmentDestId": "2309",
"segmentLocation": "SH-99 West Northbound from Beckendorff to Longenbaugh"},
{"latitudes": ["29.89123", "29.90367", "29.90659", "29.91179"],
"longitudes": ["-95.75962", "-95.76138", "-95.7605", "-95.76141"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB", "segmentSpeed": 55,
"segmentTravelTime": "2 minutes 38 seconds", "segmentDist": "1.50", "segmentFacilityType": "ML",
"segmentOriginId": "2309", "segmentDestId": "2307",
"segmentLocation": "SH-99 West Northbound from Longenbaugh to West Rd"},
{"latitudes": ["29.91179", "29.91921", "29.93451", "29.93701", "29.94054"],
"longitudes": ["-95.76141", "-95.76221", "-95.76344", "-95.7638", "-95.76544"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB",
"segmentSpeed": 55, "segmentTravelTime": "1 minute 25 seconds", "segmentDist": "1.30",
"segmentFacilityType": "ML", "segmentOriginId": "2307", "segmentDestId": "2305",
"segmentLocation": "SH-99 West Northbound from West Rd to House Hahl"},
{"latitudes": ["29.94054", "29.94759", "29.95894"], "longitudes": ["-95.76544", "-95.76889", "-95.7675"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "NB", "segmentSpeed": 84, "segmentTravelTime": "1 minute 0 seconds", "segmentDist": "1.40",
"segmentFacilityType": "ML", "segmentOriginId": "2305", "segmentDestId": "2303",
"segmentLocation": "SH-99 West Northbound from House Hahl to Bridgeland Lake Parkway"},
{"latitudes": ["29.95894", "29.97676", "29.98396", "29.99399"],
"longitudes": ["-95.7675", "-95.76856", "-95.76815", "-95.76708"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "NB", "segmentSpeed": 66,
"segmentTravelTime": "2 minutes 10 seconds", "segmentDist": "2.40", "segmentFacilityType": "ML",
"segmentOriginId": "2303", "segmentDestId": "2301",
"segmentLocation": "SH-99 West Northbound from Bridgeland Lake Parkway to US-290 Northwest"},
{"latitudes": ["29.99399", "29.98396", "29.97676", "29.95894"],
"longitudes": ["-95.76708", "-95.76815", "-95.76856", "-95.7675"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2.40", "segmentFacilityType": "ML",
"segmentOriginId": "2300", "segmentDestId": "2302",
"segmentLocation": "SH-99 West Southbound from US-290 Northwest to Bridgeland Lake Parkway"},
{"latitudes": ["29.95894", "29.94759", "29.94054"], "longitudes": ["-95.7675", "-95.76889", "-95.76544"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.40",
"segmentFacilityType": "ML", "segmentOriginId": "2302", "segmentDestId": "2304",
"segmentLocation": "SH-99 West Southbound from Bridgeland Lake Parkway to House Hahl"},
{"latitudes": ["29.94054", "29.93701", "29.93451", "29.91921", "29.91179"],
"longitudes": ["-95.76544", "-95.7638", "-95.76344", "-95.76221", "-95.76141"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.30", "segmentFacilityType": "ML",
"segmentOriginId": "2304", "segmentDestId": "2306",
"segmentLocation": "SH-99 West Southbound from House Hahl to West Rd"},
{"latitudes": ["29.91179", "29.90659", "29.90367", "29.89123"],
"longitudes": ["-95.76141", "-95.7605", "-95.76138", "-95.75962"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "1.50", "segmentFacilityType": "ML",
"segmentOriginId": "2306", "segmentDestId": "2308",
"segmentLocation": "SH-99 West Southbound from West Rd to Longenbaugh"},
{"latitudes": ["29.89123", "29.87767", "29.86108"], "longitudes": ["-95.75962", "-95.75963", "-95.75765"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.90",
"segmentFacilityType": "ML", "segmentOriginId": "2308", "segmentDestId": "2310",
"segmentLocation": "SH-99 West Southbound from Longenbaugh to Beckendorff"},
{"latitudes": ["29.86108", "29.84774", "29.84123", "29.83091"],
"longitudes": ["-95.75765", "-95.75969", "-95.76171", "-95.76216"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "1.30", "segmentFacilityType": "ML",
"segmentOriginId": "2310", "segmentDestId": "2312",
"segmentLocation": "SH-99 West Southbound from Beckendorff to Clay Rd"},
{"latitudes": ["29.83091", "29.81832"], "longitudes": ["-95.76216", "-95.77216"],
"latitudeOffsets": ["0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.60", "segmentFacilityType": "ML",
"segmentOriginId": "2312", "segmentDestId": "2314",
"segmentLocation": "SH-99 West Southbound from Clay Rd to Morton"},
{"latitudes": ["29.81832", "29.816", "29.80499", "29.7942"],
"longitudes": ["-95.77216", "-95.77255", "-95.77619", "-95.77636"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "1.50", "segmentFacilityType": "ML",
"segmentOriginId": "2314", "segmentDestId": "2316",
"segmentLocation": "SH-99 West Southbound from Morton to IH-10 Katy"},
{"latitudes": ["29.7942", "29.77325", "29.77208"], "longitudes": ["-95.77636", "-95.7773", "-95.7772"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000"], "longitudeOffsets": ["0.00400", "0.00400", "0.00400"],
"segmentDirection": "SB", "segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "1.50",
"segmentFacilityType": "ML", "segmentOriginId": "2316", "segmentDestId": "2322",
"segmentLocation": "SH-99 West Southbound from IH-10 Katy to Kingsland"}, {
"latitudes": ["29.77208", "29.76869", "29.76656", "29.75662", "29.75329", "29.75207", "29.75067", "29.74769",
"29.74631", "29.74499", "29.74404"],
"longitudes": ["-95.7772", "-95.77651", "-95.77623", "-95.77627", "-95.77628", "-95.77611", "-95.77567",
"-95.77396", "-95.77336", "-95.7731", "-95.77302"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000",
"0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400", "0.00400", "0.00400"], "segmentDirection": "SB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "2", "segmentFacilityType": "ML",
"segmentOriginId": "2322", "segmentDestId": "2323",
"segmentLocation": "SH-99 West Southbound from Kingsland to Cinco Ranch Blvd"},
{"latitudes": ["29.74404", "29.73345", "29.71818", "29.7162", "29.71418"],
"longitudes": ["-95.77302", "-95.77289", "-95.77273", "-95.77294", "-95.77341"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400"], "segmentDirection": "SB",
"segmentSpeed": -1, "segmentTravelTime": "Not Available", "segmentDist": "2", "segmentFacilityType": "ML",
"segmentOriginId": "2323", "segmentDestId": "2324",
"segmentLocation": "SH-99 West Southbound from Cinco Ranch Blvd to S Fry Rd"}, {
"latitudes": ["29.71418", "29.70983", "29.70829", "29.70636", "29.70394", "29.7006", "29.69818", "29.69711",
"29.69052"],
"longitudes": ["-95.77341", "-95.77523", "-95.77573", "-95.7761", "-95.77617", "-95.77583", "-95.7755",
"-95.77502", "-95.76959"],
"latitudeOffsets": ["0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000", "0.00000",
"0.00000"],
"longitudeOffsets": ["0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400", "0.00400",
"0.00400"], "segmentDirection": "SB", "segmentSpeed": -1,
"segmentTravelTime": "Not Available", "segmentDist": "1.60", "segmentFacilityType": "ML",
"segmentOriginId": "2324", "segmentDestId": "2325",
"segmentLocation": "SH-99 West Southbound from S Fry Rd to Bellaire"}]
b = [(d['segmentDist'], d['segmentOriginId'] + '-' + d['segmentDestId']) for d in a]
import mysql.connector
cnx = mysql.connector.connect(user='akunapar_ani', password='ttt124!@#riceilovetianani', host='box1112.bluehost.com',
database='akunapar_riceai_traffic')
for dist, id in b:
c = cnx.cursor()
c.execute('UPDATE segment_info SET distance=' + dist + ' WHERE segment_id="'+id+'"')
cnx.commit()
c.close()
cnx.close()
print b
| {
"repo_name": "anivk/riceai-traffic",
"path": "speeds/segment_info.py",
"copies": "1",
"size": "316844",
"license": "mit",
"hash": 3280300544466884600,
"line_mean": 90.4148874784,
"line_max": 120,
"alpha_frac": 0.5903599248,
"autogenerated": false,
"ratio": 2.719667642337834,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8773957784133638,
"avg_score": 0.007213956600839273,
"num_lines": 3466
} |
"""A layered graph, backed by redis.
Licensed under the 3-clause BSD License:
Copyright (c) 2013, Neeraj Kumar (neerajkumar.org)
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the author nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL NEERAJ KUMAR BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import os, sys, time
import redis
from nkpylib.nkredisutils import *
import nkpylib.nkutils as nkutils
import numpy as np
from line_profiler import LineProfiler
profile = LineProfiler()
GENERIC_NODE_KEY_FMT = 'nodes:%(layername)s:%(id)s'
def spark(vals, f=sys.stdout):
"""Custom spark that wraps to 201 and scales by 1000"""
return nkutils.spark(vals, wrap=201, scale=1000.0, f=f)
class RedisLayeredGraph(object):
"""A layered graph backed by redis.
Connections should only be within the same level or adjoining level.
However, you can have multiple "layers" at the same "level".
"""
def __init__(self, db=None, reset=0, symmetric=1, prefix='', callbacks=[], **kw):
"""Initializes ourself.
You can either pass in a redis connection in 'db', or give kw params.
They should include 'host', 'port', 'db', and 'password'.
Optionally, they can include 'charset' and 'socket_timeout'
If you set reset=1, then it will reset the database.
You can also add a prefix, which will be used whenever we need to save files.
"""
self.db = db if db else redis.Redis(**kw)
self.symmetric = symmetric
if prefix:
self.prefix = prefix
else:
# try to read it from the database
self.prefix = self.db.get('prefix')
self.edgematrices = {}
self.layerkeys = {}
self.dirty = {}
self.callbacks = callbacks[:]
if reset:
print 'RESETTING DATABASE!!! Press control-c to cancel, within 5 seconds!'
time.sleep(5)
self.db.flushdb()
self.db.set('prefix', self.prefix)
@memoize
def layerkey(self, layername):
"""Returns the key for a given layer name"""
return 'layers:%s' % (layername)
def getLayer(self, layername):
"""Returns a dict of the given layer"""
ret = self.db.hgetall(self.layerkey(layername))
ret = specializeDict(ret)
return ret
def layers(self):
"""Returns a list of (layername, level) pairs, in sorted order"""
layers = [(name, int(level)) for name, level in self.db.hgetall('layers').items()]
return sorted(layers, key=lambda pair: pair[1])
@memoize
def nodekeyfmt(self, layername):
"""Returns the node key format for the given layer"""
return self.db.hget(self.layerkey(layername), 'nodekeyfmt')
def nodekey(self, nodeprimary, layername):
"""Returns the node key given the primary id for the node and the layer.
If this does not exist, then returns None"""
id = self.db.hget('layers:%s:nodes' % (layername), nodeprimary)
if not id: return None
return self.nodekeyfmt(layername) % dict(layername=layername, id=id)
def nodekeyFromID(self, nodeid, layername):
"""Returns the node key given the node id and the layer"""
return self.nodekeyfmt(layername) % dict(layername=layername, id=nodeid)
@memoize
def splitNodekey(self, nodekey):
"""Splits a node key into (layername, node id)"""
_, layername, nodeid = nodekey.split(':')
return (layername, nodeid)
def nodes(self, layername):
"""Returns a list of nodekeys for a given layer"""
ids = sorted(map(int, self.db.hvals('layers:%s:nodes' % (layername))))
ret = [self.nodekeyFromID(id, layername) for id in ids]
return ret
def addcallback(self, callback, predfunc=None):
"""Adds the given node callback and optionally a predicate function.
The callback takes (nodekey, nodedict, layer, action), where:
nodekey - The key to the node
nodedict - The dict of elements in the node
layer - The layer the node is in
action - The action that was performed on this node. One of:
create - Created for the first time
update - Update a node that already existed
init - Called on first init of the current process
The predicate takes the same args and if False, the callback is not called.
With no predicate, the callback is always executed.
"""
self.callbacks.append((callback, predfunc))
def runcallbacks(self, nodekey, nodedict, layer, action):
"""Runs all applicable callbacks for the given args.
This does the appropriate checking with the predicate functions.
"""
for callback, predfunc in self.callbacks:
if not predfunc(nodekey, nodedict, layer, action): continue
callback(nodekey, nodedict, layer, action)
def addLayer(self, name, level, nodekeyfmt=GENERIC_NODE_KEY_FMT):
"""Creates a new layer with given name and level.
Optionally specify a fmt that generates nodekeys given a dict containing (id, layername).
Returns the layer name.
"""
# check if we already added this level
oldl = self.db.hget('layers', name)
if oldl is not None:
# we did, so check the level
if int(oldl) != level:
raise ValueError('Layer "%s" already exists at level %s, not %d!' % (name, oldl, level))
# level matches, so just return the name of the layer
#print 'Layer "%s" already existed' % (name)
return name
key = self.layerkey(name)
p = self.db.pipeline()
l = dict(name=name, level=level, key=key, nodekeyfmt=nodekeyfmt)
p.hmset(key, l)
p.hset('layers', name, level)
p.execute()
#print 'Added layer "%s" at level %s: %s' % (name, level, l)
return name
def addNode(self, layername, primary=None, checkexisting=1, pipeline=None, added=None, addindicator=None, **nodekw):
"""Adds a node to the given layer.
You can optionally pass in a "primary key", which can be used
to lookup the node id in O(1) time. Else, this becomes the node id.
If checkexisting is true (default), then it first checks
to see if a node with that primary exists. If so, it sets the given
nodekw on that existing node, and doesn't create a new id, etc.
If added is given, then checkexisting also tries to lookup the
'primary' value in the given dict, which should return a key.
If the key was not found, then the primary->key mapping is added to it.
The actual data to add should be given as key-value pairs in nodekw.
Note that the values should be serializable to fit in a redis hash.
Also, the data is ALWAYS set, even if 'checkexisting' was true.
If pipeline is given, then all db modifications are done within the
given pipeline. Else, creates a new pipeline for the duration of this
function, and then executes it at the end.
If addindicator is true, then it's assumed to be an array with at least 1 element.
This element is set to 1 if we actually added the node, else 0.
Returns the node keyname."""
# see if this key exists
key = None
if checkexisting and primary:
try:
primary = str(primary)
except Exception:
pass
if isinstance(added, dict) and primary in added:
# first check the given 'added' dict, if it exists
key = added[primary]
else:
# now check the database
key = self.nodekey(primary, layername=layername)
if not key or not self.db.exists(key):
#log(' Key %s did not exist!' % (key,))
key = None
p = pipeline if pipeline else self.db.pipeline()
if addindicator:
addindicator[0] = 0
# if we don't have a key yet, generate one
action = 'update'
if not key:
action = 'create'
lkey = self.layerkey(layername)
keyfmt = self.nodekeyfmt(layername)
id = self.db.incr('counters:layernodes:%s' % (layername))
key = keyfmt % dict(id=id, layername=layername)
# some bookkeeping
p.hincrby(lkey, 'nnodes', 1)
if not primary:
primary = id
try:
primary = str(primary)
except Exception: pass
#log('Actually adding node %s with primary %s to layer %s' % (key, primary, layername))
p.hset('layers:%(layername)s:nodes' % (locals()), primary, id)
# add to the 'added' cache of added keys
if isinstance(added, dict):
added[primary] = key
# mark layerkeys dirty for this layer
self.dirty['layerkeys-%s' % (layername)] = 1
# mark all edges connected to this layer as dirty
for l1, _ in self.layers():
for l2, _ in self.layers():
self.dirty['edgematrix-%s-%s' % (l1, l2)] = 1
# set the indicator
if addindicator:
addindicator[0] = 1
# also delete all flows and edgematrices
self.deletePickles()
# set the main kw for this node
p.hmset(key, nodekw)
if not pipeline:
p.execute()
# run callbacks
self.runcallbacks(key, nodekw, layername, action)
return key
def addScores(self, scorename, layername, hashkey=None, **kw):
"""Adds a zset for the given layername.
If you give a hashkey, then it initializes this using the given key
extracted from all nodes. Otherwise, it uses the kwargs to initialize it.
"""
if hashkey:
nodes = self.nodes(layername)
vals = pipefunc(self.db, nodes, 'hget', hashkey)
for n, v in zip(nodes, vals):
nodeid = n.rsplit(':', 1)[-1]
kw[nodeid] = v
if not kw: return
key = 'scores:%s:%s' % (layername, scorename)
p = self.db.pipeline()
p.zadd(key, **kw)
p.sadd('layers:%s:scores' % (layername), key)
p.execute()
def addEdges(self, edges, pipeline=None):
"""Adds edges, each of which is (nodekey1, nodekey2, weight).
This does various normalization and then adds relevant entries.
Returns a list of (edgekey, edgefield) entries.
Note that if self.symmetric is true, then adds the symmetric entries as well,
but will still return only as many return pairs as inputs.
Also, we will filter out any edges that connect a node to itself.
"""
p = pipeline if pipeline else self.db.pipeline()
ret = []
for nk1, nk2, w in edges:
if not nk1 or not nk2: continue
(l1, l2), (n1, n2) = zip(*[self.splitNodekey(nk) for nk in [nk1, nk2]])
if nk1 == nk2: continue
ekey1, efield1 = ('%s:edges:%s' % (nk1, l2), str(n2))
ret.append((ekey1, efield1))
p.zadd(ekey1, **{efield1:w})
p.hincrby('layeredges:%s' % (l1), l2, 1)
# mark this edgematrix as dirty
self.dirty['edgematrix-%s-%s' % (l1, l2)] = 1
if self.symmetric:
ekey2, efield2 = ('%s:edges:%s' % (nk2, l1), str(n1))
p.zadd(ekey2, **{efield2:w})
p.hincrby('layeredges:%s' % (l2), l1, 1)
# mark this edgematrix as dirty
self.dirty['edgematrix-%s-%s' % (l2, l1)] = 1
#print ' ', nk1, nk2, n1, n2, l1, l2, ekey1, efield1, ekey2, efield2, w
self.deletePickles()
if not pipeline:
p.execute()
return ret
def deletePickles(self):
"""Deletes all our pickles"""
from shutil import rmtree
rmtree(os.path.join(self.prefix, 'edgematrices'), ignore_errors=1)
rmtree(os.path.join(self.prefix, 'flows'), ignore_errors=1)
def deleteEdges(self, layer1, layer2, dosym=1):
"""Deletes edges from layer1 to layer2.
If self.symmetric and dosym, then also deletes edges the other way."""
p = self.db.pipeline()
l1keys = self.nodes(layer1)
for k in l1keys:
p.delete('%s:edges:%s' % (k, layer2))
self.dirty['edgematrix-%s-%s' % (layer1, layer2)] = 1
p.hdel('layeredges:%s' % (layer1), layer2)
p.execute()
if self.symmetric and dosym:
self.deleteEdges(layer2, layer1, dosym=0) # so that we don't keep iterating forever
def getEdges(self, nodekeys, valid=None, sort=1):
"""Gets edges from the given nodekeys, optionally limited to some layers.
Returns a dict mapping layer names to lists of results.
Each result list has the same length as 'nodekeys', and consists of
edge lists, which are (nodeid, weight) pairs.
If 'valid' is a string, then only returns edges that connect to that layer.
If 'valid' is a list, then only returns edges that connect to one of those layers.
if 'sort' is true (default), then each list is sorted by highest weight first.
All input nodekeys must be in the same layer."""
if not nodekeys: return []
# basic init and quick checks
layername, _ = self.splitNodekey(nodekeys[0])
elayers = self.db.hkeys('layeredges:%s' % (layername))
if valid:
if isinstance(valid, basestring): # single valid layer
elayers = [l for l in elayers if l==valid]
else: # list of layers
elayers = [l for l in elayers if l in valid]
if not elayers: return {}
ret = {}
for l in elayers:
edges = pipefunc(self.db, ['%s:edges:%s' % (nk, l) for nk in nodekeys], 'zrevrangebyscore', withscores=1, min=0.00001, max='inf')
assert len(edges) == len(nodekeys)
ret[l] = [[(int(k), float(v)) for k, v in e] for e in edges]
if sort:
for lst in ret[l]:
lst.sort(key=lambda pair: pair[1], reverse=1)
return ret
def summedEdges(self, keys, dstlayer):
"""Returns a summed list of edges out from the given key inputs.
Essentially one stage of a flow computation, but done without matrices.
The inputs are either a list of keys (assumed weight=1), a list of (key,score) pairs,
or a dict of key->weights. The edges to the given `dstlayer` are retrieved,
summed, and then multiplied by these scores.
The output is a dict of key->scores.
"""
from collections import defaultdict
if not keys: return {}
if isinstance(keys, dict): # already a dict
inputs = keys
else:
if isinstance(keys[0], basestring): # only keys
inputs = dict.fromkeys(keys, 1.0)
else: # (key,score) pairs
inputs = dict(keys)
ikeys = sorted(inputs)
edges = self.getEdges(ikeys, dstlayer, sort=0)[dstlayer]
ret = defaultdict(float)
for ikey, curedges in zip(ikeys, edges):
inscore = inputs[ikey]
#print ' %s : score %f, %d edges' % (ikey, inscore, len(curedges))
for nodeid, s in curedges:
ret[self.nodekeyFromID(nodeid, dstlayer)] += inscore * s
return dict(ret)
def recursiveFlow(self, keys, layers):
"""Repeated calls to summedEdges() with initial keys, going through many `layers`.
The outputs of one call are then fed to the next.
Returns a dict of key->scores at the last layer."""
if not keys: return {}
ret = keys
for dstlayer in layers:
ret = self.summedEdges(ret, dstlayer)
return ret
def updateLayerKeys(self, layername):
"""Updates the cached layer keys for the given layer"""
l = layername
dkey = 'layerkeys-%s' % (l)
if l not in self.layerkeys:
self.dirty[dkey] = 1
if self.dirty.get(dkey, 0):
#nnodes = self.db.hlen('layers:%s:nodes' % (l)) + 1
try:
nnodes = max(map(int, self.db.hvals('layers:%s:nodes' % (l)))) + 1
except Exception:
nnodes = 0
self.layerkeys[l] = [self.nodekeyFromID(id, l) for id in range(nnodes)]
#log(' Precached %d layerkeys for layer %s' % (len(self.layerkeys[l]), l))
if dkey in self.dirty:
del self.dirty[dkey]
#@timed
def getEdgeMatrix(self, srclayer, dstlayer, srckeys=None, dstkeys=None, usesparse=1):
"""Computes a matrix of weights that transforms from srclayer to dstlayer.
i.e., you have a vector V_s of values from srclayer, and this function
returns M_ds. Then you can do V_d = np.dot(M_ds, V_s).
Returns (M_ds, list of srclayer keys, list of dstlayer keys).
You can optionally pass in lists of srckeys and dstkeys.
If so, then only fills in values that exist in these lists.
If src and dst layers are the same, then initializes the matrix with identity.
Otherwise, initializes the matrix with 0s.
If usesparse is true (default), then uses sparse matrices. Notes:
- we initialize data using lil_matrix, because it's fastest to modify
- we convert to csr_matrix at the end, because that's fastest to multiply
"""
import scipy as sp
import scipy.sparse as sparse
from scipy.sparse import lil_matrix as sparsemat
times = [time.time()]
# init keys and matrix
if not srckeys:
srckeys = self.nodes(srclayer)
if not dstkeys:
dstkeys = self.nodes(dstlayer)
dstrows = dict((int(self.splitNodekey(dk)[1]), i) for i, dk in enumerate(dstkeys))
times.append(time.time())
ns, nd = len(srckeys), len(dstkeys)
assert ns > 0 and nd > 0
if srclayer == dstlayer:
if usesparse:
M = sparsemat((nd,nd))
M.setdiag(np.ones(nd))
else:
M = np.eye(nd)
else:
if usesparse:
M = sparsemat((nd,ns))
else:
M = np.zeros((nd, ns))
times.append(time.time())
# fill in the matrix, only if we have something to fill
if self.db.hexists('layeredges:%s' % (srclayer), dstlayer):
edges = self.getEdges(srckeys, valid=dstlayer, sort=0)[dstlayer]
for col, row in enumerate(edges):
for nodeid, w in row:
if nodeid not in dstrows: continue
row = dstrows[nodeid]
M[row, col] = w
times.append(time.time())
nz = len(M.nonzero()[0])
nels = M.shape[0]*M.shape[1]
if nz == 0:
M = None
else:
if ns == nd:
# check if it's identity
if usesparse:
eye = sparsemat((nd,nd))
eye.setdiag(np.ones(nd))
else:
eye = np.eye(nd)
eye -= M
iseye = (len(eye.nonzero()[0]) == 0)
if iseye:
M = None
else:
iseye = 0
log(' Matrix from %s (%d) to %s (%d) had %d/%d nonzeros (%0.5f%%) (iseye=%s)' % (srclayer, len(srckeys), dstlayer, len(dstkeys), nz, nels, nz*100.0/float(nels), iseye))
log(' Matrix took: %s' % (getTimeDiffs(times)))
if sparse.issparse(M):
M = M.tocsr()
return (M, srckeys, dstkeys)
def cachedEdgeMatrix(self, l1, l2):
"""Updates the cached edge matrix between the given layers (if needed).
Assumes associated layerkeys are already up-to-date.
Returns the matrix."""
import cPickle as pickle
#FIXME if things are symmetric, then only compute one half of the symmetries, and generate the others on-the-fly
dkey = 'edgematrix-%s-%s' % (l1, l2)
picklename = os.path.join(self.prefix, 'edgematrices', dkey+'.pickle')
try:
os.makedirs(os.path.dirname(picklename))
except OSError:
pass
if (l1, l2) not in self.edgematrices:
self.dirty[dkey] = 1
if self.dirty.get(dkey, 0): #FIXME the pickles are always assumed to be up-to-date right now!
try:
M = pickle.load(open(picklename))
#log(' Loaded %s of size %s' % (picklename, M.shape if M is not None else 0))
except Exception, e:
M, _, _ = self.getEdgeMatrix(l1, l2, self.layerkeys[l1], self.layerkeys[l2])
try:
os.makedirs(os.path.dirname(picklename))
except Exception: pass
#pickle.dump(M, open(picklename, 'wb'), -1)
#log(' Due to exception "%s", saved matrix of shape %s, with pickle size %d to "%s"' % (e, M.shape if M is not None else 0, os.stat(picklename).st_size, picklename))
self.edgematrices[(l1, l2)] = M #FIXME experiment to not use all this memory
#log(' Precached edgematrix %s x %s from layer %s to %s' % (M.shape[0], M.shape[1], l1, l2))
if dkey in self.dirty:
del self.dirty[dkey]
else:
M = self.edgematrices[(l1, l2)]
if 0 and l1 != l2 and M is not None: #debugging
log('Got M of type %s' % (type(M),))
import array
scores = array.array('f', [])
import numpy.random as random
for i in range(M.shape[1]):
scores.append(random.random())
log('Got %d scores: %s' % (len(scores), scores[:5]))
try:
t1 = time.time()
v = M.dot(scores)
t2 = time.time()
except Exception:
log('in exc')
t1 = time.time()
v = np.dot(M, scores)
t2 = time.time()
M = M.todense()
t3 = time.time()
v1 = np.dot(M, scores)
t4 = time.time()
log('Got %d in v, in %0.4f secs, compared to %0.4fs for dense: %s, %s, %s' % (len(v), t2-t1, t4-t3, v[:5], v1[:5], v==v1))
sys.exit()
return M
def nodeweights(self, layername, lkeys=None):
"""Returns the nodeweights for the given layer.
If lkeys is given, then the weights are returned in that order.
Otherwise, returns weights for all nodes in this layer, as returned by nodes()"""
if not lkeys:
lkeys = self.nodes(layername)
weights = np.ones(len(lkeys))
key = 'layers:%s:weights' % (layername)
if not self.db.exists(key): return weights
ids = [self.splitNodekey(k)[1] for k in lkeys]
for i, w in enumerate(self.db.hmget(key, ids)):
if w is None: continue
weights[i] = float(w)
#log('For layer %s, got %s' % (layername, zip(lkeys, ids, weights)))
#log('For layer %s, with %d lkeys, got %d weights: %s' % (layername, len(lkeys), len(weights), weights))
return weights
def updateCache(self):
"""Updates our cache"""
# update our list of layerkeys as needed
for l, _ in self.layers():
self.updateLayerKeys(l)
def createFlow(self, *args, **kw):
"""Creates a flow object.
If args and/or kw are given, then calls flow.add() with those params.
Note that for now, we precache layerkeys and edge matrices here."""
#log('In create flow, got dirty: %s' % (self.dirty,))
self.updateCache()
f = RGLFlow(self)
if args or kw:
f.add(*args, **kw)
return f
def updateIfDirty(self, dkey, func, *args, **kw):
"""Runs the given func if the dirty bit is set for the given key"""
if dkey in self.dirty:
func(*args, **kw)
del self.dirty[dkey]
else:
log('Got precached dkey %s' % (dkey))
class RGLFlow(object):
"""A flow object for a given RedisLayeredGraph (RGL)"""
def __init__(self, g, id=None, tempflow=0, debugfmt='str'):
"""Initialize this flow object from the given graph.
If an id is given, then tries to load the values from disk.
If tempflow is true (default false), then save() and load() become no-ops.
The debugfmt determines how to print out ourself:
'spark': using spark lines
'str': using get()
"""
self.g = g
self.db = g.db
self.scores = {}
self.tempflow = tempflow
self.debugfmt = debugfmt
if id:
# load from disk
self.id = id
try:
self.load()
except Exception:
# could not load, so just reset
self.reset()
else:
# create new id and reset
self.newid()
self.reset()
def reset(self, *layernames):
"""Resets the score arrays.
Optionally, you can give a list of layernames to reset.
Otherwise, it resets all layers."""
# update the cached list of layerkeys as needed
for lname, level in self.g.layers():
if layernames and lname not in layernames: continue
nnodes = len(self.g.layerkeys[lname])
a = self.scores[lname] = np.zeros(nnodes)
#print 'Reset scores for layer %s (level %d) with %d nodes' % (lname, level, len(a))
self.save()
def binop(self, other, binfunc):
"""Base function for binary operators.
Does all the necessary checks, and then calls the binfunc(v1, v2) to get the output.
'other' can be either another flow, or a scalar."""
ret = RGLFlow(g=self.g, debugfmt=self.debugfmt, tempflow=1)
assert sorted(self.scores) == sorted(ret.scores)
if isinstance(other, RGLFlow):
# combine two flows
assert self.g == other.g
assert sorted(self.scores) == sorted(other.scores)
for layer in self.scores:
s1, s2, out = self.scores[layer], other.scores[layer], ret.scores[layer]
assert len(s1) == len(s2) == len(out)
ret.scores[layer] = binfunc(s1, s2)
elif isinstance(other, (float, long, int)):
# apply the given scalar to this flow
for layer in self.scores:
s, out = self.scores[layer], ret.scores[layer]
assert len(s) == len(out)
ret.scores[layer] = binfunc(s, other)
else:
raise NotImplementedError('cannot handle type %s for RGLFlow.binop()' % (type(other)))
return ret
def __add__(self, other):
"""Adds 'other' rgl flow to this one and returns new RGLFlow"""
return self.binop(other, binfunc=lambda v1, v2: v1+v2)
def __radd__(self, other):
"""Addition with flipped order"""
return self.__add__(other)
def __iadd__(self, other):
"""Runs the normal __add__, and then resets our variables"""
temp = self+other
self.scores = temp.scores
self.save()
return self
def __sub__(self, other):
"""Subtracts 'other' flow from this one and returns the result.
Note that values are clamped to remain positive."""
def binfunc(v1, v2):
c = v1-v2
c[c < 0] = 0.0
return c
return self.binop(other, binfunc=binfunc)
def __mul__(self, other):
"""Multiplies two flows, or this flow and a scalar"""
return self.binop(other, binfunc=lambda v1, v2: v1*v2)
def __rmul__(self, other):
"""Multiplication with flipped order"""
return self.__mul__(other)
def __eq__(self, other):
"""Returns true if our layers are the same and the values are (almost) the same."""
if sorted(self.scores) != sorted(other.scores): return False
for l in self.scores:
s1 = self.scores[l]
s2 = other.scores[l]
if not np.allclose(s1, s2): return False
return True
def newid(self):
"""Changes our id"""
import uuid
self.id = uuid.uuid1().hex
def save(self):
"""Saves ourself to disk"""
import cPickle as pickle
#from scipy.sparse import lil_matrix as sparsemat
from scipy.sparse import csr_matrix as sparsemat
#M = M.tocsr()
if self.tempflow: return
fname = os.path.join(self.g.prefix, 'flows', self.id+'.pickle')
try:
os.makedirs(os.path.dirname(fname))
except OSError: pass
if 0:
todump = {}
for k in self.scores:
todump[k] = sparsemat(self.scores[k])
else:
todump = self.scores
pickle.dump(todump, open(fname, 'wb'), -1)
#log('Saving flow with id %s' % (self.id))
def load(self):
"""Loads ourself from disk. Our id must be set"""
import cPickle as pickle
fname = os.path.join(self.g.prefix, 'flows', self.id+'.pickle')
self.scores = pickle.load(open(fname))
# check if the lengths of these scores match RLG's list of layerkeys
for l in self.g.layerkeys:
alen, blen = len(self.g.layerkeys[l]), len(self.scores.get(l,[]))
if alen != blen:
log('### checking l "%s": exists %s, lens %d vs %d' % (l, l in self.scores, alen, blen))
for l in self.scores:
if l not in self.g.layerkeys:
log('### layer "%s" from flow %s does not exist in layerkeys' % (l, self.id))
pass#TODO HERE
#data = pickle.load(open(fname))
#self.scores = dict((k, v.todense()) for k, v in data.iteritems())
#log('Loading flow with id %s' % (self.id))
def __str__(self):
"""Returns our values as a string"""
from StringIO import StringIO
s = StringIO()
print >>s, 'Flow %s, tempflow %s, debugfmt %s' % (self.id, self.tempflow, self.debugfmt)
for lname, level in self.g.layers():
sc = self.get(lname)
if self.debugfmt == 'spark' or sc:
print >>s, '%s (%d nodes, %d nz):' % (lname, len(self.scores[lname]), len(sc))
s.flush()
if self.debugfmt == 'spark':
spark(self.scores[lname], f=s)
elif self.debugfmt == 'str':
if sc:
print >>s, sc
return s.getvalue()
def incr(self, op='add', **scores):
"""Increments scores, given as a mapping of nodekey=increment.
Increment type depends on 'op':
'add': add to existing [default]
'mul': multiply to existing
"""
for nk, incr in scores.iteritems():
lname, nodeid = self.g.splitNodekey(nk)
a = self.scores[lname]
if int(nodeid) >= len(a): continue
if op == 'add':
try:
a[int(nodeid)] += incr
except Exception:
log('Got nk %s, incr %s, lname %s, nodeid %s, a len %d, lkeys %d: %s' % (nk, incr, lname, nodeid, len(a), len(self.g.layerkeys[lname]), self.g.layerkeys[lname][-5:]))
raise
elif op == 'mul':
a[int(nodeid)] *= incr
else:
raise NotImplementedError()
@timed
#@profile
def add(self, dir, destlevel, layerfuncs=None, debug=0, **flows):
"""Adds the given flows.
The scores are calculated going in the given dir (+1 or -1).
Computation proceeds until the given destination level.
Each flow should be given as nodekey=score.
You can optionally pass in a dict of layerfuncs.
These are applied at the given layer as:
self.scores[layer] = layerfuncs[layer](self.scores[layer])
Returns self.
"""
import numpy as np
import operator as op
#if not flows: return
mylog = lambda s: log(s, funcindent=-1)
if debug > 0:
mylog('Adding %d flows in dir %d to destination level %d' % (len(flows), dir, destlevel))
# basic init
g = self.g
db = self.db
layers = dict((layer, g.getLayer(layer)) for layer, level in g.layers())
layerOrder = sorted(layers, key=lambda l: layers[l]['level'], reverse=(dir<0))
if debug > 0:
mylog('Layers: %s' % (layers,))
mylog('Layer order: %s' % (layerOrder,))
# add all todo flows
if flows:
self.incr(**flows)
# start accumulating flows
for l in layerOrder:
curlevel = layers[l]['level']
if dir > 0 and curlevel > destlevel: break
if dir < 0 and curlevel < destlevel: break
if debug > 0:
mylog('Adding flows to layer %s (level %d)' % (l, curlevel))
lkeys = g.layerkeys[l] # these are guaranteed to line up with our scores array.
# quick check for non-zero elements
nz = len(self.scores[l].nonzero()[0])
if nz == 0: continue
if 0: #FIXME temporarily disabled
# get the self-edge matrix
if debug > 1:
mylog(' Running local flows')
#M, lkeys2, _ = g.getEdgeMatrix(l, l, lkeys, lkeys)
M = g.cachedEdgeMatrix(l, l)
#assert lkeys == _ == lkeys2
#print M, M.shape, len(lkeys), lkeys[:5]
#FIXME see if we run into any issues due to M being None for identity
if M is not None:
# multiply scores by this matrix
try:
v = M.dot(self.scores[l])
except Exception, e:
v = np.dot(M, self.scores[l])
mylog(' ***** Hit exception %s: %s vs %s' % (e, M.shape, len(self.scores[l])))
mylog('%d Layerkeys: %s, %s' % (len(g.layerkeys[l]), g.layerkeys[l][:3], g.layerkeys[l][-3:]))
sys.exit()
#assert len(v) == len(lkeys) == M.shape[0] == M.shape[1] == len(self.scores[l])
assert len(v) == len(self.scores[l])
self.scores[l] = v
#print len(v), v, v.max(), v.argmax(), v.sum()
# at this point, multiply by our weights
self.scores[l] *= g.nodeweights(l, lkeys)
# now apply the layerfunc, if we have one
if layerfuncs and l in layerfuncs and layerfuncs[l]:
self.scores[l] = layerfuncs[l](self.scores[l])
# another quick check for nonzeros
nz = len(self.scores[l].nonzero()[0])
if nz == 0: continue
# now run flows from this layer to all others in dir
if debug > 1:
mylog(' Running neighboring flows')
for l2 in db.hkeys('layeredges:%s' % (l)):
if l2 == l: continue
l2level = layers[l2]['level']
if dir > 0 and (l2level > destlevel or l2level < curlevel): continue
if dir < 0 and (l2level < destlevel or l2level > curlevel): continue
l2keys = g.layerkeys[l2]
if debug > 2:
mylog(' Neighboring flow from %s (%d) to %s (%d), dir %s, destlevel %s' % (l, curlevel, l2, l2level, dir, destlevel))
# get the edge matrix
#M, _, _ = g.getEdgeMatrix(l, l2, lkeys, l2keys)
M = g.cachedEdgeMatrix(l, l2) #TODO most time spent here
if M is not None:
#print M, M.shape, len(l2keys), l2keys[:5]
# multiply scores by this matrix to get dst scores
try:
v = M.dot(self.scores[l])
except Exception, e:
v = np.dot(M, self.scores[l])
log(' **** In exception: %s' % (e,))
raise
assert len(v) == len(self.scores[l2])
# add these scores to existing scores at that level
self.scores[l2] += v
# at this point, multiply by the weights in the 2nd layer
#self.scores[l2] *= g.nodeweights(l2, l2keys) #FIXME I think this will cause a double-weighting
self.save()
return self
def get(self, layername, thresh=0.0, withscores=1, tokeys=0):
"""Returns (nodeid, score) pairs from the given layer, where score > thresh.
Results are sorted from high score to low.
If withscores is true (default) the returns scores as well.
If tokeys is true, then maps ids to keys.
"""
a = self.scores[layername]
#ret = [(i, score) for i, score in enumerate(a) if score > thresh]
#ret.sort(key=lambda pair: pair[1], reverse=1)
# only process rest if we have any values above the threshold
if not np.any(a > thresh): return []
inds = np.argsort(a)[::-1]
scores = a[inds]
scores = scores[scores > thresh]
if tokeys:
inds = [self.g.nodekeyFromID(id, layername) for id in inds]
if withscores:
ret = zip(inds, scores)
else:
ret = inds[:len(scores)]
return ret
def outliers(self, layers=None):
"""Returns the "outliers" amongst the given layers (or all if None)."""
if not layers:
layers = self.scores.keys()
ret = []
for l in layers:
scores = [(self.g.nodekeyFromID(id, l), score) for id, score in self.get(l)]
if not scores: continue
nk, s = scores[0]
oscore = 1.0 if len(scores) == 1 else 1.0-(scores[1][1]/s)
if oscore == 0: continue
#print l, scores, nk, s, oscore
ret.append((nk, oscore, s))
ret.sort(key=lambda r: (r[1], r[2]), reverse=1)
return ret
@classmethod
def combine(cls, tocomb, op='add', **kw):
"""Combines a list of (factor, flow) flows into one:
ret = op(factor*flow for factor, flow in tocomb)
Where 'op' is one of:
'add': adds flows
'mul': multiplies flows
If you given any other keywords, they are used in the initialization.
Returns None on error.
"""
if not tocomb: return None
first = tocomb[0][1]
defkw = dict(tempflow=1, debugfmt=first.debugfmt)
defkw.update(kw)
f = RGLFlow(first.g, **defkw)
for layer, a in f.scores.iteritems():
if op == 'mul': # re-initialize the layer to be ones if we're multiplying
a += 1.0
for fac, flow in tocomb:
if op == 'add':
a += fac*flow.scores[layer]
elif op == 'mul':
a *= fac*flow.scores[layer]
# clamp back to positive
a[a < 0] = 0.0
f.save()
return f
| {
"repo_name": "neeraj-kumar/nkpylib",
"path": "nkredisgraph.py",
"copies": "1",
"size": "40830",
"license": "bsd-3-clause",
"hash": 2145750332079443000,
"line_mean": 42.8560687433,
"line_max": 186,
"alpha_frac": 0.5611070291,
"autogenerated": false,
"ratio": 3.8544321721891817,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4915539201289182,
"avg_score": null,
"num_lines": null
} |
""" A layer on top of GitPython, handles pulling and scheduling. """
import os.path
import platform
import random
import git
from Anemone import app, schedule
from Anemone.models import Job, Project
from Anemone.abcfile import parse as abcparse
from Anemone.buildslave import build
def setup_schedule():
""" Setups up the schedule for building the project periodically. """
for project in Project.select():
if (project.schedule_interval is not None) and (project.schedule_interval > 0):
schedule.add_job(pull_build_project, "interval", id="building_" + str(project.id),
hours=project.schedule_interval,
args=[project, "master"])
def pull_build_project(project, branch="master"):
""" pulls and build a project """
repo_result, pull_log = pull(project.path, app.config["SSH_PRIVATE"], branch)
if repo_result:
newjob = create_job(project, pull_log)
settings = abcparse(os.path.join(project.path, "build.abc"))
build(newjob, project, settings["windows"]) #TODO: Handle configs
def pull(repo_path, ssh, branch="master"):
""" Pulls the project and returns wether or not there were changes """
ssh_key = os.path.abspath(ssh)
if platform.system() == "Windows":
ssh_key = "/" + ssh_key.replace("\\", "/").replace(":", "")
ssh_cmd = "ssh -i %s" % ssh_key
my_repo = git.Repo(repo_path)
pull_output = ""
contributer_emails = list()
files = list()
new_commit = False
with my_repo.git.custom_environment(GIT_SSH_COMMAND=ssh_cmd):
for result in my_repo.remotes.origin.pull(branch):
if result.commit == my_repo.head.commit:
continue
new_commit = True
contributer_emails.append(result.commit.author.email)
pull_output += str(result.commit) + "\n"
pull_output += str(result.commit.author) + "<" + str(result.commit.author.email) + ">\n"
pull_output += str(result.commit.committed_datetime) + "\n"
pull_output += str(result.commit.summary) + "\n"
pull_output += str(result.commit.stats.total) + "\n\n"
for stat in result.commit.stats.files: #We write all files at the end of the description
files.append(stat)
if not new_commit:
# There were no new changes, we do not need to rebuild.
return False, "No new changes"
pull_output += "Files changed:\n"
for changes in files:
pull_output += changes
return True, pull_output
def create_job(project, description):
""" Creates and returns a new job with a random name """
randomnames = open(os.path.join("Anemone", "templates", "namegen.html")).readlines()
jobname = ("Quick." +
random.choice(randomnames)[:-1] + # for some reason choice gives extra space
random.choice(randomnames)[:-1]) # for some reason choice gives extra space
newjob = Job.create(project=project, name=jobname, description=description)
newjob.name = newjob.name + ".{0:0=3d}".format(newjob.id)
newjob.save()
return newjob
| {
"repo_name": "Winnak/Anemone",
"path": "Anemone/githandling.py",
"copies": "1",
"size": "3147",
"license": "mit",
"hash": -1394937412854038800,
"line_mean": 39.8701298701,
"line_max": 100,
"alpha_frac": 0.6313949793,
"autogenerated": false,
"ratio": 3.7508939213349226,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.986471128259089,
"avg_score": 0.0035155236088064753,
"num_lines": 77
} |
"""A layout for a XLS report.
Each sheet in a report contains a single layout. The layout describes how the
different elements are organized and can contain child layouts inside.
"""
__author__ = 'jt@javiertordable.com'
__copyright__ = "Copyright (C) 2014 Javier Tordable"
from style import TableStyle
MAX_EXCEL_COLUMN = 16383
class Layout(object):
"""A layout is a container for a group of report tables and other layouts.
A layout can be embedded inside of another layout. Normally there will be a
single layout which contains all other layouts.
"""
def __init__(self, style):
self.style = None
self.children = None
def style(self):
"""The style of the layout."""
return self.style
def children(self):
"""A list with the child layouts of this layout."""
return self.children
def size(self):
"""The size of the layout, in cells, given as (width, height)"""
return None
def draw(self, output_sheet, start_position):
"""Draw the layout on the output_table, starting at start_position."""
(start_column, start_row) = start_position
(width, height) = self.size()
for row in range(start_row, start_row + height):
for column in range(start_column, start_column + width):
cell_value = self.style.get_cell_content(column, row)
cell_format = self.style.get_cell_format(column, row)
output_sheet.write(row, column, cell_value, cell_format)
class FixedSizeLayout(Layout):
"""A layout with a fixed size in cells. but no table content."""
def __init__(self, style, width, height):
if style is None:
raise ValueError('Please give a valid style')
if width < 1 or height < 1:
raise ValueError('Please give positive dimensions')
self.style = style
self.width = width
self.height = height
def size(self):
return (self.width, self.height)
class TableLayout(Layout):
"""A layout to render a table."""
def __init__(self, style, table):
if style is None or not isinstance(style, TableStyle):
raise ValueError('Please use a TableSytle to draw a TableLayout')
if table is None:
raise ValueError('Plase give a valid table.')
self.style = style
self.table = table
def size(self):
return (self.table.num_columns,
self.table.num_rows + 1) # Add one row for the header.
def draw(self, output_sheet, start_position):
(start_column, start_row) = start_position
(width, height) = self.size()
for output_row in range(start_row, start_row + height):
data_row_index = output_row - start_row
for output_column in range(start_column, start_column + width):
data_column_index = output_column - start_column
cell_value = self.style.get_cell_content(data_column_index,
data_row_index)
cell_format = self.style.get_cell_format(data_column_index,
data_row_index)
output_sheet.write(output_row, output_column, cell_value, cell_format)
class PaddingLayout(Layout):
"""A layout which adds a padding of cells to another layout.
The parameters of this layout are in the same order as the CSS parameters
for padding.
"""
def __init__(self, style, child_layout, top, right, bottom, left):
if style is None:
raise ValueError('Please give a valid style')
if top < 0 or right < 0 or bottom < 0 or left < 0 or \
(top == 0 and right == 0 and bottom == 0 and left == 0):
raise ValueError('Please use valid padding sizes')
if child_layout is None or not isinstance(child_layout, Layout):
raise ValueError('Please pass a valid child layout')
self.style = style
self.top = top
self.right = right
self.bottom = bottom
self.left = left
self.children = [child_layout]
def size(self):
(child_width, child_height) = self.children[0].size()
return (child_width + self.left + self.right,
child_height + self.top + self.bottom)
def draw(self, output_sheet, start_position):
# First apply the style of the PaddingLayout.
super(PaddingLayout, self).draw(output_sheet, start_position)
# Now draw the child in the correct position, inside of the padding.
(start_column, start_row) = start_position
(start_column, start_row) = (start_column + self.left, start_row + self.top)
self.children[0].draw(output_sheet, (start_column, start_row))
class RowLayout(Layout):
"""A layout which contains other layouts in a single row."""
def __init__(self, style, row_layouts):
if style is None:
raise ValueError('Please give a valid style')
if row_layouts is None or not isinstance(row_layouts, list) or \
len(row_layouts) < 1:
raise ValueError('Please pass a non-empty list of layouts')
self.style = style
self.children = row_layouts
def size(self):
total_width = 0
max_height = 0
for layout in self.children:
(width, height) = layout.size()
total_width += width
max_height = max(max_height, height)
return (total_width, max_height)
def draw(self, output_sheet, start_position):
# First apply the style of the RowLayout.
super(RowLayout, self).draw(output_sheet, start_position)
# Draw all layouts, one at a time, from left to right.
(start_column, start_row) = start_position
for child_layout in self.children:
child_layout.draw(output_sheet, (start_column, start_row))
# Move the start position for the next layout, but only horizontally.
(added_width, added_height) = child_layout.size()
(start_column, start_row) = (start_column + added_width, start_row)
class ColumnLayout(Layout):
"""A layout which contains other layouts in a single column."""
def __init__(self, style, column_layouts):
if style is None:
raise ValueError('Please give a valid style')
if column_layouts is None or not isinstance(column_layouts, list) or \
len(column_layouts) < 1:
raise ValueError('Please pass a non-empty list of layouts')
self.style = style
self.children = column_layouts
def size(self):
max_width = 0
total_height = 0
for layout in self.children:
(width, height) = layout.size()
max_width = max(max_width, width)
total_height += height
return (max_width, total_height)
def draw(self, output_sheet, start_position):
# First apply the style of the ColumnLayout.
super(ColumnLayout, self).draw(output_sheet, start_position)
# Draw all layouts, one at a time, from top to bottom.
(start_column, start_row) = start_position
for child_layout in self.children:
child_layout.draw(output_sheet, (start_column, start_row))
# Move the start position for the next layout, but only vertically.
(added_width, added_height) = child_layout.size()
(start_column, start_row) = (start_column, start_row + added_height)
class HideOutsideLayout(Layout):
"""A layout which hides the cells beyond the layout size.
WARNING: The HideOutsideLayout should normally be the outermost layout in
a sheet. It will hide any other layout outside of it's dimensions.
"""
def __init__(self, style, child_layout):
if style is None:
raise ValueError('Please give a valid style')
if child_layout is None or not isinstance(child_layout, Layout):
raise ValueError('Please pass a valid child layout')
self.style = style
self.children = [child_layout]
def size(self):
return self.children[0].size()
def draw(self, output_sheet, start_position):
# The start position has to be (0, 0)
(start_column, start_row) = start_position
if start_column is not 0 or start_row is not 0:
raise ValueError('The start position for this layout must be (0, 0)')
# Hide all rows without data (below the layout).
output_sheet.set_default_row(hide_unused_rows=True)
# Hide all columns to the right of the layout.
output_sheet.set_column(first_col=self.size()[0], last_col=MAX_EXCEL_COLUMN,
width=None, format=None, options={'hidden': True})
# Apply the style of the HideOutsideLayout and then draw the child.
super(HideOutsideLayout, self).draw(output_sheet, start_position)
self.children[0].draw(output_sheet, start_position)
| {
"repo_name": "tordable/XlsxReports",
"path": "layout.py",
"copies": "1",
"size": "8369",
"license": "apache-2.0",
"hash": -1394631079583768600,
"line_mean": 33.4403292181,
"line_max": 80,
"alpha_frac": 0.6661488828,
"autogenerated": false,
"ratio": 3.7613483146067415,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49274971974067416,
"avg_score": null,
"num_lines": null
} |
"""Album and Photo models."""
from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.db import models
from django.contrib.auth.models import User
from taggit.managers import TaggableManager
PUBLISHED_OPTIONS = (
("private", "private"),
("shared", "shared"),
("public", "public"),
)
def image_path(instance, file_name):
"""Upload file to media root in user folder."""
return 'user_{0}/{1}'.format(instance.user.id, file_name)
@python_2_unicode_compatible
class Photo(models.Model):
"""Create Photo Model."""
tags = TaggableManager()
user = models.ForeignKey(
User,
related_name='photos',
on_delete=models.CASCADE,
)
image = models.ImageField(upload_to=image_path, blank=True, null=True)
title = models.CharField(max_length=60)
description = models.TextField(max_length=120, blank=True, null=True)
date_uploaded = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
date_published = models.DateTimeField(blank=True, null=True)
published = models.CharField(max_length=10, choices=PUBLISHED_OPTIONS, default='public')
def __str__(self):
"""Return string description of photo."""
return "{}: Photo belonging to {}".format(self.title, self.user)
@python_2_unicode_compatible
class Album(models.Model):
"""Create Album Model."""
user = models.ForeignKey(
User,
related_name="albums",
on_delete=models.CASCADE,
)
cover = models.ForeignKey(
"Photo",
null=True,
related_name="albums_covered"
)
title = models.CharField(max_length=60)
description = models.TextField(max_length=200)
photos = models.ManyToManyField(
"Photo",
related_name="albums",
symmetrical=False
)
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
date_published = models.DateTimeField(blank=True, null=True)
published = models.CharField(max_length=10, choices=PUBLISHED_OPTIONS, default='public')
def __str__(self):
"""Return String Representation of Album."""
return "{}: Album belonging to {}".format(self.title, self.user)
| {
"repo_name": "pasaunders/django-imager",
"path": "imagersite/imager_images/models.py",
"copies": "1",
"size": "2304",
"license": "mit",
"hash": 9023013273337566000,
"line_mean": 31.4507042254,
"line_max": 92,
"alpha_frac": 0.6692708333,
"autogenerated": false,
"ratio": 3.8336106489184694,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.500288148221847,
"avg_score": null,
"num_lines": null
} |
"""Album views."""
from pyramid.httpexceptions import HTTPForbidden
from pyramid.httpexceptions import HTTPNotFound
from sqlalchemy.orm.exc import NoResultFound
from lasco.auth import get_user_role
from lasco.batch import Batch
from lasco.models import Album
from lasco.models import DBSession
from lasco.models import Gallery
from lasco.views.utils import TemplateAPI
def album_index(request):
"""An index page for an album """
session = DBSession()
gallery_name = request.matchdict['gallery_name']
album_name = request.matchdict['album_name']
page = int(request.GET.get('page', 1))
try:
gallery = session.query(Gallery).filter_by(name=gallery_name).one()
album = session.query(Album).filter_by(gallery_id=gallery.id,
name=album_name).one()
except NoResultFound:
raise HTTPNotFound(request.url)
if not get_user_role(request, session, gallery, album):
raise HTTPForbidden()
pictures = sorted(album.pictures, key=lambda p: p.date)
settings = request.registry.settings
pictures = Batch(pictures,
batch_length=int(settings['lasco.pictures_per_page']),
current=page)
api = TemplateAPI(request,
'%s - %s' % (gallery.title, album.title))
url_of_page = '%s?page=%%s' % request.route_url(
'album',
gallery_name=gallery.name, album_name=album.name)
if pictures.previous:
api.previous_url = url_of_page % pictures.previous
if pictures.next:
api.next_url = url_of_page % pictures.next
return {'api': api,
'gallery': gallery,
'album': album,
'pictures': pictures,
'url_of_page': url_of_page}
| {
"repo_name": "dbaty/Lasco",
"path": "lasco/views/album.py",
"copies": "1",
"size": "1772",
"license": "bsd-3-clause",
"hash": 5184988334340639000,
"line_mean": 35.9166666667,
"line_max": 75,
"alpha_frac": 0.6371331828,
"autogenerated": false,
"ratio": 3.683991683991684,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4821124866791684,
"avg_score": null,
"num_lines": null
} |
# Alden Golab
# 7/27/2016
# This file is an encapsulation in Python for ebLink, which runs in R.
# This program requires R to run.
# ebLink is drawn from Steorts, et. al. 2015 can can be found here:
# https://github.com/resteorts/ebLink
## NOTES ##
# Current build only takes csvs. To add additional file types or connections,
# please look at read_iterator.
from datetime import datetime
import rpy2
import pandas as pd
import os
import random
import subprocess
import csv
# Execute necessary dependencies for rpy2
from numpy import *
import scipy as sp
from pandas import *
import pickle
class EBlink(object):
def __init__(self, interactive=False, files=[]):
## File locations & directories
self._files = files # A list of filepaths or, possibly, python objects
self._tmp_dir = None # Directory where temp files are stored
self._tmp = None # Temporary csv for use in link
self._crosswalk_file = None # File where crosswalk is saved
self._interactive = interactive # Whether this will be run interactively
## Inputs
self._columns = [] # Contains columns to use from first file
self._indices = {} # Specifies UID columns for each file
self._matchcolumns = {} # Contains lists mapping columns in other files to self._columns
self._column_types = {} # Maps first file's columns to String or Categorical
## Subjective inputs
self.alpha = None # Alpha value for prior
self.beta = None # Beta value for prior
self.iterations = 0 # Number of gibbs iterations to run
## Constructed inputs
self._numrecords = 0 # Number of records
self._filenum = [] # Labels each entry in joined CSV with file number from self._files
## Outputs from ebLink
self.pop_est = 0 # De-duplicated/linked population estimated by ebLink
self.pairs = None # Pairs linked by ebLink
self.crosswalk = None # Crosswalk of UIDs
self._lookup_pairs = {} # Create a dict to look-up pairs of entries
## Interactive mode
if self._interactive == True:
self._run_interactively()
def _run_interactively(self):
if self.files==[]:
self.set_files()
self.set_columns()
self.get_col_types()
self.betauild()
self.define()
self.model()
self.betauild_crosswalk()
self.pickle()
########################################################
################## Attribute Methods ###################
########################################################
@property
def files(self):
return self._files
@files.setter
def files(self, files):
if type(files) == list and len(filter(lambda x: os.path.isfile(x), files)) == len(files):
self._files = files
else:
raise TypeError('Filename(s) input poorly formatted.')
@property
def columns(self):
return self._columns
@columns.setter
def columns(self, columns):
if type(columns) == list:
self._columns = columns
else:
raise TypeError('Columns input poorly formatted.')
@property
def column_types(self):
return self._column_types
@column_types.setter
def column_types(self, types):
if type(types) == dict:
self._column_types = types
else:
raise TypeError('Column types input poorly formatted.')
@property
def match_columns(self):
return self._matchcolumns
@match_columns.setter
def match_columns(self, match_columns):
if type(match_columns) == dict:
if self._columns != None:
for x in match_columns.keys():
if x not in self._columns[0]:
print '{} not in columns'.format(x)
raise NameError('match_columns keys do not match columns')
self._matchcolumns = match_columns
else:
self._matchcolumns = match_columns
else:
raise TypeError('Match columns input poorly formatted.')
@property
def indices(self):
return self._indices
@indices.setter
def indices(self, indices):
if type(indices) == list:
self._indices = indices
else:
raise TypeError('Indices input poorly formatted.')
########################################################
################## Class Methods ###################
########################################################
def set_files(self, filename=None, delete=False):
'''
Sets the files to use for linkage. Takes a list of filepaths or a single
filepath. All files must have headers.
'''
if delete: # This is for a redo function
self._files = []
print 'Files: {}'.format(self._files)
return
if self._interactive:
print '\nPLEASE SPECIFY THE FILEPATHS FOR THE DATA YOU WISH TO LINK. Separate each filepath by a comma.\n'
print 'Current files loaded: {}'.format(self._files)
inp = None
while inp == None:
inp = raw_input('\nFilepaths: ')
filename = []
for x in inp.split(','):
filename.append(x.strip())
if type(filename) == list and len(filename) > 1:
for n in filename:
if os.path.isfile(n):
self._files.append(n)
else:
print '\nERROR: {} is not a file.\n'.format(n)
self.file_retry()
else:
if type(filename) == list:
filename = filename[0]
if os.path.isfile(filename):
self._files.append(filename)
else:
print '\nERROR: {} is not a file.\n'.format(filename)
self.file_retry()
if self._interactive:
if not self.check_correct():
self.set_files()
def check_correct(self):
'''
Asks the user if they want to continue.
'''
answer = ''
while answer.strip().upper() != 'Y' and answer.strip().upper() != 'N':
answer = raw_input('\nARE THESE SETTINGS CORRECT (Y/N)? ')
if answer.upper().strip() == 'N':
return False
else:
return True
def file_retry(self):
'''
Asks user if they'd like to retry in the event they enter a poorly
formed filename.
'''
retry = None
while retry == None:
retry = raw_input('\nWould you like to retry? (Y/N): ')
if retry == 'Y' or retry == 'y':
self.set_files()
else:
return
def set_columns(self, count=None, cols=[]):
'''
Will take a list containing the columns to be matched for each file,
treating corresponding indices as matching. For example, an input of
[['name', 'dob'], ['NAME', 'DOB']] will match (name, NAME) and
(dob, DOB), where the first column comes from file 1 and the second from
file 2.
Yields a dict of {file1_column: [file2column,...,fileNcolumn]}.
'''
if self._interactive == True:
if count == None:
count = 1
for f in self._files:
print '\nPLEASE INDICATE WHICH COLUMNS IN {} YOU WOULD LIKE TO USE FOR LINKING. Separate each column name with a comma.'.format(f)
cols = []
to_add = None
while to_add == None:
to_add = raw_input('\nColumns: ')
for x in to_add.split(','):
cols.append(x.strip())
self._columns.append(cols)
if count > 1:
for x in self._columns[0]:
print '\nWhich column in this file matches to {}?'.format(x)
match = None
while match == None:
match = raw_input('\nColumn: ')
if x not in self._matchcolumns:
self._matchcolumns[x] = []
self._matchcolumns[x].append(match.strip())
self.check_index(count)
count += 1
if not self.check_correct():
self.set_columns(count)
else:
for i in range(len(cols[0])):
if cols[0][i] not in self._matchcolumns:
self._matchcolumns[cols[0][i]] = []
for l in cols[1:]:
self._matchcolumns[cols[0][i]].append(l[i])
def check_index(self, num):
'''
Asks for an unique ID for a given file.
'''
answer = None
while answer == None:
answer = raw_input('\nDoes this file have a unique ID (Y/N)? ')
if answer.strip().upper() == 'Y':
index = None
while index == None:
index = raw_input('\nPlease specify the unique ID: ')
self._indices[num] = index
else:
self._indices[num] = False
return
def get_col_types(self):
'''
Interactively asks user for types of each column.
'''
types = {}
for col in self._columns[0]:
typ = None
while typ != 'C' and typ != 'S' and typ != 'c' and typ != 's':
typ = raw_input('\nIs {} a categorical (C) or string (S) field?: '.format(col))
if typ != 'C' and typ != 'S' and typ != 'c' and typ != 's':
print '\nERROR: Please enter C for categorical or S for string.'
types[col] = typ.upper()
if not self.check_correct():
self.get_col_types()
else:
self._column_types = types
def define(self, a=None, b=None, interations=0):
'''
Asks user to define alpha and beta for prior distribution or allows
coder to set if non-interactive session.
'''
if self._interactive == True:
print '\nPLEASE SET THE ALPHA AND BETA VALUES FOR THE PRIOR DISTRIBUTION.'
print 'If you are unsure how to set these values, please see the documentation for ebLink.\n'
self.alpha = None
while self.alpha == None:
self.alpha = raw_input('Alpha: ')
self.beta = None
while self.beta == None:
self.beta = raw_input('Beta: ')
print '\nHOW MANY INTERATIONS SHOULD BE RUN? RECOMMENDED > 100,000.'
self.iterations = 0
while self.iterations == 0:
self.iterations = raw_input('\nIterations: ')
if not self.check_correct():
self.define()
else:
self.alpha = a
self.beta = b
self.iterations = iterations
def build(self, headers=False):
'''
Builds the inputs for ebLink. Constructs filenum input as well as
a single hidden tmp csv for feeding into the system.
'''
if len(self._files) < 2:
print 'Only one file found. Please set additional files.'
return
self._build_directory()
columns = self._columns[0] + ['ETL_ID'] # Adding a temporary ID
with open(self._tmp, 'w') as dest:
wtr = csv.writer(dest)
file_count = 1
# Go through each file
for f in self._files:
rdr, fi = self.read_iterator(f)
headers = rdr.next()
# In case iterator returns tuples instead of lists
if type(headers) == tuple:
headers = list(headers)
if file_count == 1:
wtr.writerow(columns)
# For each line in that file
for line in rdr:
# In case iterator returns tuples instead of lists
if type(line) == tuple:
line = list(line)
# Count records
self._numrecords += 1
# Add file number to column to be fed into ebLink
self._filenum.append(file_count)
if file_count == 1:
row = []
for col in self._columns[0]:
index = headers.index(col)
row.append(line[index])
elif file_count >= 2:
# Else use match_columns to make sure columns are matched to
# first file's columns in the new file.
row = []
for col in self._columns[0]:
index = headers.index(self._matchcolumns[col][file_count-2])
row.append(line[index])
# Add additional ID, unique for the link
row.append(self._numrecords)
wtr.writerow(row)
if fi:
fi.close()
file_count += 1
def read_iterator(self, filepath):
'''
Takes a filepath and returns an iterator. Iterator returns each line as
a list of elements, much like csv writer. Must return headers as first
line. Should pass back two values, first is iterator, second is file
instance (can be None).
All returned iterators must work with both .next() and for loops. Can
return tuples or lists, where each column is a separate tuple or list
entry.
**ADD NEW CONNECTIONS/FILE TYPES HERE**
'''
### CSV ###
if 'csv' in filepath:
f = open(filepath, 'r')
reader = csv.reader(f)
return (reader, f)
### petl functionality for use with Urban ETL ###
if 'petl' in str(type(filepath)):
return (iter(filepath), None)
### Error Message ###
else:
raise NameError('This file type or connection is not yet supported.')
def _build_directory(self):
'''
Private function to build a temporary directory for storing data.
'''
self._tmp_dir = '._tmp-{}'.format(random.randint(0, 10000))
bashCommand = 'mkdir {}'.format(self._tmp_dir)
output = subprocess.check_output(['bash','-c', bashCommand])
now = datetime.today().strftime('%y%m%d-%H%M%S')
self._tmp = '{}/{}-{:.2}.csv'.format(self._tmp_dir, now, random.random())
def model(self):
'''
Carries out modeling in R. Returns a numpy array
'''
import R_interface as ri
result, estPopSize = ri.run_eblink(self._tmp, self._tmp_dir,
self.column_types, self.alpha, self.beta, self.iterations, self._filenum,
self._numrecords)
self.pop_est = np.average(estPopSize)
del estPopSize
print "Estimated population size: ", self.pop_est
print "Total number of records: ", self._numrecords
if self.pop_est <= self._numrecords - 1:
# Only look for linked pairs if there are pairs to look for
p = ri.calc_linkages(result)
self.pairs = [tuple(x) for x in p]
del result
def build_crosswalk(self):
'''
Writes identified links to a file using UIDs.
## This function uses Pandas and may need to be edited for scaling! ##
'''
if not self.pairs: # Don't run this if there aren't any pairs
print 'No pairs identified.'
return
for pair in self.pairs:
self._lookup_pairs = self._add_to_dict(self._lookup_pairs, pair[0], pair[1])
rv = {} # This is the output data set
new_id = 0 # This is a new, uninteresting id for building rv
filenum_index = 0 # This is for indexing into the self.filenum object
placed = {} # Memo
# Go through each file and read through all the objects
for i in range(len(self._files)):
fp = self._files[i]
rdr, fi = self.read_iterator(fp)
headers = list(rdr.next()) # Get headers for this file
unique = self._indices[i] # Grab UID name this file from build
uni_index = headers.index(unique) # Get the index for the UID within this file
for line in rdr:
line = list(line)
# Check that file and filenum specified align
if self._filenum[filenum_index] != (i + 1):
print "WARNING: File numbers don't match for entry {}.".format(filenum_index)
print 'Listed file {} should be {}.'.format(self._filenum[filenum_index], i + 1)
print 'Aborting. Check inputs for mismatch.'
print 'Index with error: ', filenum_index
print self._filenum[filenum_index + 1], ' is next.'
print 'Filenum: ', self._filenum
return
# Check if this entry has matches that are already placed
placed_matches = []
if filenum_index in self._lookup_pairs:
placed_matches = [x for x in self._lookup_pairs[filenum_index] if x in placed]
if len(placed_matches) > 0:
# If so, then add this UID to that entry
match_index = placed[placed_matches[0]]
# If there's already an entry in this column, it means that
# ebLink has detected a duplicate within the data file.
# This will be recorded as a tuple, but can be ignored
# later.
if rv[match_index][i] != '':
old = rv[match_index][i]
rv[match_index][i] = (old, line[uni_index])
# Otherwise just add the new UID
else:
rv[match_index][i] = line[uni_index]
placed[filenum_index] = match_index
filenum_index += 1
else:
# Else add the unique id from original file for a new id
rv[new_id] = ['' for y in range(len(self._files))]
rv[new_id][i] = line[uni_index]
placed[filenum_index] = new_id
new_id += 1
filenum_index += 1
if fi:
fi.close()
# Place crosswalk into attributes as a Pandas Dataframe
self.crosswalk = pd.DataFrame.from_dict(rv, orient='index')
def build_linked_data(self, interactive=False):
'''
Builds a set of linked data.
'''
rv = []
data = pd.read_csv(self._tmp)
keeps = [x[0] for x in self.pairs] # Entries to keep
deletes = [y for sub in [x[1:] for x in self.pairs] for y in sub] # Flattens list of others
rv.append(self._columns[0] + ['ETL_ID'])
for i in range(len(self._filenum)):
if interactive and (i+1 in keeps or i+1 in deletes):
print 'Linked entries:'
print ' Entry {}: {}'.format(i+1, list(data.iloc[i]))
for j in self._lookup_pairs[i]:
print ' Entry {}: {}'.format(j, list(data.iloc[j]))
keep = None
while keep not in self._lookup_pairs[i]:
keep = int(raw_input('Please select which ETL_ID to keep: '))
keep = int(keep.strip())- 1
rv.append(list(data.iloc[keep]))
elif i+1 not in deletes:
rv.append(list(data.iloc[i]))
self.linked_set = pd.DataFrame(rv)
def _add_to_dict(self, dict, value1, value2):
'''
Adds both values and redundantly refers them to one another in dict.
'''
if value1 not in dict:
dict[value1] = [value2]
elif value1 in dict:
dict[value1].append(value2)
if value2 not in dict:
dict[value2] = [value1]
elif value2 in dict:
dict[value2].append(value1)
return dict
def pickle(self, filename=None):
'''
Pickles this model & settings for later use.
'''
if filename == None:
filename = 'eblink_' + datetime.today().strftime('%y%m%d-%H%M%S') + '.pkl'
f = open(filename, 'w')
pickle.dump(self, f)
f.close()
return True
def write(self, obj, filename):
'''
Writes ebLink object to file.
'''
if filename == None:
filename = 'links_' + datetime.today().strftime('%y%m%d-%H%M%S') + '.ebout'
try:
pd.to_csv(obj, filename)
except:
try:
pd.to_csv(pd.DataFrame(obj), filename)
except:
with open(filename, w) as f:
f.write(obj)
def clean_tmp(self):
'''
Deletes the tmp folder.
'''
subprocess.call('rm -r {}'.format(self._tmp_dir), shell=True)
print 'Temp folder deleted.'
| {
"repo_name": "aldengolab/graphical-record-linkage",
"path": "python-encapsulation/eblink.py",
"copies": "1",
"size": "21295",
"license": "cc0-1.0",
"hash": 6204278033126673000,
"line_mean": 37.4386281588,
"line_max": 146,
"alpha_frac": 0.5170697347,
"autogenerated": false,
"ratio": 4.369101354123923,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5386171088823922,
"avg_score": null,
"num_lines": null
} |
"""A legacy test for a particular somewhat complicated mapping."""
from sqlalchemy import CHAR
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy import Text
from sqlalchemy.orm import backref
from sqlalchemy.orm import mapper
from sqlalchemy.orm import polymorphic_union
from sqlalchemy.orm import relationship
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.fixtures import fixture_session
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
class MagazineTest(fixtures.MappedTest):
@classmethod
def setup_classes(cls):
Base = cls.Comparable
class Publication(Base):
pass
class Issue(Base):
pass
class Location(Base):
pass
class LocationName(Base):
pass
class PageSize(Base):
pass
class Magazine(Base):
pass
class Page(Base):
pass
class MagazinePage(Page):
pass
class ClassifiedPage(MagazinePage):
pass
@classmethod
def define_tables(cls, metadata):
Table(
"publication",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("name", String(45), default=""),
)
Table(
"issue",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("publication_id", Integer, ForeignKey("publication.id")),
Column("issue", Integer),
)
Table(
"location",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("issue_id", Integer, ForeignKey("issue.id")),
Column("ref", CHAR(3), default=""),
Column(
"location_name_id", Integer, ForeignKey("location_name.id")
),
)
Table(
"location_name",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("name", String(45), default=""),
)
Table(
"magazine",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("location_id", Integer, ForeignKey("location.id")),
Column("page_size_id", Integer, ForeignKey("page_size.id")),
)
Table(
"page",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("page_no", Integer),
Column("type", CHAR(1), default="p"),
)
Table(
"magazine_page",
metadata,
Column(
"page_id", Integer, ForeignKey("page.id"), primary_key=True
),
Column("magazine_id", Integer, ForeignKey("magazine.id")),
Column("orders", Text, default=""),
)
Table(
"classified_page",
metadata,
Column(
"magazine_page_id",
Integer,
ForeignKey("magazine_page.page_id"),
primary_key=True,
),
Column("titles", String(45), default=""),
)
Table(
"page_size",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("width", Integer),
Column("height", Integer),
Column("name", String(45), default=""),
)
def _generate_data(self):
(
Publication,
Issue,
Location,
LocationName,
PageSize,
Magazine,
Page,
MagazinePage,
ClassifiedPage,
) = self.classes(
"Publication",
"Issue",
"Location",
"LocationName",
"PageSize",
"Magazine",
"Page",
"MagazinePage",
"ClassifiedPage",
)
london = LocationName(name="London")
pub = Publication(name="Test")
issue = Issue(issue=46, publication=pub)
location = Location(ref="ABC", name=london, issue=issue)
page_size = PageSize(name="A4", width=210, height=297)
magazine = Magazine(location=location, size=page_size)
ClassifiedPage(magazine=magazine, page_no=1)
MagazinePage(magazine=magazine, page_no=2)
ClassifiedPage(magazine=magazine, page_no=3)
return pub
def _setup_mapping(self, use_unions, use_joins):
(
Publication,
Issue,
Location,
LocationName,
PageSize,
Magazine,
Page,
MagazinePage,
ClassifiedPage,
) = self.classes(
"Publication",
"Issue",
"Location",
"LocationName",
"PageSize",
"Magazine",
"Page",
"MagazinePage",
"ClassifiedPage",
)
mapper(Publication, self.tables.publication)
mapper(
Issue,
self.tables.issue,
properties={
"publication": relationship(
Publication,
backref=backref("issues", cascade="all, delete-orphan"),
)
},
)
mapper(LocationName, self.tables.location_name)
mapper(
Location,
self.tables.location,
properties={
"issue": relationship(
Issue,
backref=backref(
"locations",
lazy="joined",
cascade="all, delete-orphan",
),
),
"name": relationship(LocationName),
},
)
mapper(PageSize, self.tables.page_size)
mapper(
Magazine,
self.tables.magazine,
properties={
"location": relationship(
Location, backref=backref("magazine", uselist=False)
),
"size": relationship(PageSize),
},
)
if use_unions:
page_join = polymorphic_union(
{
"m": self.tables.page.join(self.tables.magazine_page),
"c": self.tables.page.join(self.tables.magazine_page).join(
self.tables.classified_page
),
"p": self.tables.page.select()
.where(self.tables.page.c.type == "p")
.subquery(),
},
None,
"page_join",
)
page_mapper = mapper(
Page,
self.tables.page,
with_polymorphic=("*", page_join),
polymorphic_on=page_join.c.type,
polymorphic_identity="p",
)
elif use_joins:
page_join = self.tables.page.outerjoin(
self.tables.magazine_page
).outerjoin(self.tables.classified_page)
page_mapper = mapper(
Page,
self.tables.page,
with_polymorphic=("*", page_join),
polymorphic_on=self.tables.page.c.type,
polymorphic_identity="p",
)
else:
page_mapper = mapper(
Page,
self.tables.page,
polymorphic_on=self.tables.page.c.type,
polymorphic_identity="p",
)
if use_unions:
magazine_join = polymorphic_union(
{
"m": self.tables.page.join(self.tables.magazine_page),
"c": self.tables.page.join(self.tables.magazine_page).join(
self.tables.classified_page
),
},
None,
"page_join",
)
magazine_page_mapper = mapper(
MagazinePage,
self.tables.magazine_page,
with_polymorphic=("*", magazine_join),
inherits=page_mapper,
polymorphic_identity="m",
properties={
"magazine": relationship(
Magazine,
backref=backref(
"pages", order_by=magazine_join.c.page_no
),
)
},
)
elif use_joins:
magazine_join = self.tables.page.join(
self.tables.magazine_page
).outerjoin(self.tables.classified_page)
magazine_page_mapper = mapper(
MagazinePage,
self.tables.magazine_page,
with_polymorphic=("*", magazine_join),
inherits=page_mapper,
polymorphic_identity="m",
properties={
"magazine": relationship(
Magazine,
backref=backref(
"pages", order_by=self.tables.page.c.page_no
),
)
},
)
else:
magazine_page_mapper = mapper(
MagazinePage,
self.tables.magazine_page,
inherits=page_mapper,
polymorphic_identity="m",
properties={
"magazine": relationship(
Magazine,
backref=backref(
"pages", order_by=self.tables.page.c.page_no
),
)
},
)
mapper(
ClassifiedPage,
self.tables.classified_page,
inherits=magazine_page_mapper,
polymorphic_identity="c",
primary_key=[self.tables.page.c.id],
)
@testing.combinations(
("unions", True, False),
("joins", False, True),
("plain", False, False),
id_="iaa",
)
def test_magazine_round_trip(self, use_unions, use_joins):
self._setup_mapping(use_unions, use_joins)
Publication = self.classes.Publication
session = fixture_session()
pub = self._generate_data()
session.add(pub)
session.commit()
session.close()
p = session.query(Publication).filter(Publication.name == "Test").one()
test_pub = self._generate_data()
eq_(p, test_pub)
eq_(
p.issues[0].locations[0].magazine.pages,
test_pub.issues[0].locations[0].magazine.pages,
)
| {
"repo_name": "monetate/sqlalchemy",
"path": "test/orm/inheritance/test_magazine.py",
"copies": "3",
"size": "11321",
"license": "mit",
"hash": 8804364880007253000,
"line_mean": 29.0291777188,
"line_max": 79,
"alpha_frac": 0.4673615405,
"autogenerated": false,
"ratio": 4.685844370860927,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6653205911360927,
"avg_score": null,
"num_lines": null
} |
"""Alembic migrations handler
"""
import os
from sqlalchemy import MetaData
from alembic.config import Config
from alembic import command as alembic_cmd
from lux.utils.context import app_attribute
TEMPLATE_PATH = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'commands', 'template'
)
def lux_template_directory():
return TEMPLATE_PATH
@app_attribute
def migrations(app):
"""Alembic handler"""
return Alembic(app)
class Alembic:
def __init__(self, app):
self.app = app
self.cfg = self._create_config()
def init(self):
dirname = self.cfg.get_main_option('script_location')
alembic_cmd.init(self.cfg, dirname, template='lux')
def show(self, revision):
alembic_cmd.show(self.cfg, revision)
def stamp(self, revision):
alembic_cmd.stamp(self.cfg, revision)
def revision(self, message, autogenerate=False, branch_label=None):
alembic_cmd.revision(self.cfg, autogenerate=autogenerate,
message=message, branch_label=branch_label)
def upgrade(self, revision):
alembic_cmd.upgrade(self.cfg, revision)
def downgrade(self, revision):
alembic_cmd.downgrade(self.cfg, revision)
def merge(self, message, branch_label=None, rev_id=None, revisions=None):
alembic_cmd.merge(self.cfg, message=message,
branch_label=branch_label,
rev_id=rev_id, revisions=revisions)
def _create_config(self):
"""Programmatically create Alembic config. To determine databases,
DATASTORE from project's config file is used. To customize Alembic
use MIGRATIONS in you config file.
Example::
MIGRATIONS = {
'alembic': {
'script_location': '<path>',
'databases': '<db_name1>,<db_name2>',
},
'<db_name1>': {
'sqlalchemy.url': 'driver://user:pass@localhost/dbname',
},
'<bd_name2>': {
'sqlalchemy.url': 'driver://user:pass@localhost/dbname',
},
'logging': {
'path': '<path_to_logging_config>',
}
}
For more information about possible options, please visit Alembic
documentation:
https://alembic.readthedocs.org/en/latest/index.html
"""
app = self.app
cfg = Config()
cfg.get_template_directory = lux_template_directory
migrations = os.path.join(app.meta.path, 'migrations')
cfg.set_main_option('script_location', migrations)
cfg.config_file_name = os.path.join(migrations, 'alembic.ini')
odm = app.odm()
databases = []
# set section for each found database
for name, engine in odm.keys_engines():
if not name:
name = 'default'
databases.append(name)
# url = str(engine.url).replace('+green', '')
url = str(engine.url)
cfg.set_section_option(name, 'sqlalchemy.url', url)
# put databases in main options
cfg.set_main_option("databases", ','.join(databases))
# create empty logging section to avoid raising errors in env.py
cfg.set_section_option('logging', 'path', '')
# obtain the metadata required for `auto` command
metadata = {}
for key, db_engine in odm.keys_engines():
if not key:
key = 'default'
metadata[key] = meta = MetaData()
for table, engine in odm.binds.items():
if engine == db_engine:
table.tometadata(meta)
cfg.metadata = metadata
config = app.config.get('MIGRATIONS')
if config:
for section in config.keys():
for key, value in config[section].items():
if section == 'alembic':
cfg.set_main_option(key, value)
else:
cfg.set_section_option(section, key, value)
return cfg
| {
"repo_name": "quantmind/lux",
"path": "lux/ext/odm/migrations.py",
"copies": "1",
"size": "4156",
"license": "bsd-3-clause",
"hash": -8978608378144670000,
"line_mean": 31.7244094488,
"line_max": 77,
"alpha_frac": 0.5635226179,
"autogenerated": false,
"ratio": 4.168505516549649,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5232028134449649,
"avg_score": null,
"num_lines": null
} |
"""Alerter module"""
import smtplib
from email.mime.text import MIMEText
import click
from slacker import Slacker
from config import CONFIG
class SlackAlerter(object):
"""SlackAlerter class"""
def __init__(self):
token = CONFIG.get('slack', 'API_TOKEN')
self.slack = Slacker(token)
self.channel = CONFIG.get('slack', 'CHANNEL')
def send(self, indicator, host, value):
"""Send a slack alert"""
message = """Monitoring alert for *%(host)s*. Indicator `%(indicator)s` is %(comparison)s threshold.
Value: `%(value).2f%(unit)s`, threshold: `%(threshold)s%(unit)s`.""" % {
'host': host,
'indicator': indicator.name.upper(),
'comparison': 'above' if indicator.comparison == 'gt' else 'below',
'value': value,
'threshold': indicator.threshold,
'unit': indicator.unit
}
try:
self.slack.chat.post_message('#%s' % self.channel, message, as_user=True)
except Exception as e:
click.secho('Slack send error: %s' % e, fg='red')
class MailAlerter(object):
"""MailAlerter class"""
def __init__(self):
self.smtp_host = CONFIG.get('mail', 'SMTP_HOST')
def send(self, indicator, host, value):
"""Send an email alert"""
msg = MIMEText("""
Monitoring alert for %(host)s. Indicator %(indicator)s is %(comparison)s threshold.
Value: %(value).2f%(unit)s, threshold: %(threshold)s%(unit)s.""" % {
'host': host,
'indicator': indicator.name.upper(),
'comparison': 'above' if indicator.comparison == 'gt' else 'below',
'value': value,
'threshold': indicator.threshold,
'unit': indicator.unit
})
msg['Subject'] = '[%s] Monitoring alert for %s' % (
host,
indicator.name.upper()
)
smtp_i = None
try:
smtp_i = smtplib.SMTP(self.smtp_host)
smtp_i.sendmail(
CONFIG.get('mail', 'MAIL_FROM'),
str(CONFIG.get('mail', 'MAIL_TO')).split(','),
msg.as_string()
)
except Exception as e:
click.secho('Mail send error: %s' % e, fg='red')
finally:
if smtp_i:
smtp_i.quit()
| {
"repo_name": "abulte/python-influxdb-alerts",
"path": "alerters.py",
"copies": "1",
"size": "2324",
"license": "mit",
"hash": -8615540629591645000,
"line_mean": 30.8356164384,
"line_max": 108,
"alpha_frac": 0.5404475043,
"autogenerated": false,
"ratio": 3.778861788617886,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4819309292917886,
"avg_score": null,
"num_lines": null
} |
""" Alert Logic Event API """
# Author: Justin Ibarra (justin.s.ibarra@gmail.com)
# License: MIT - A full copy of the license is provided with this source code
from incidents import Incident, Event, threading
from errors import IncidentNotRetrievedError, EventNotRetrievedError
def get_event(event_id, customer_id, username, password, to_json=False):
event = Event(event_id, customer_id)
event.set_credentials(username, password)
event.get_event()
if to_json:
return event.to_json()
else:
return event
def get_events(event_id_list, customer_id, username, password, suppress_errors=True, to_json=False):
event_dict = {}
threads = []
errors = []
def __multi_get_events(thread_event_id): # for threading
try:
if to_json:
event_dict[thread_event_id] = get_event(thread_event_id, customer_id, username, password).to_json()
else:
event_dict[thread_event_id] = get_event(thread_event_id, customer_id, username, password)
except Exception as e:
errors.append(e.message)
pass
for i in event_id_list:
t = threading.Thread(target=__multi_get_events, args=(i,))
threads.append(t)
t.start()
for _thread in threads:
_thread.join()
if not suppress_errors:
raise EventNotRetrievedError('Their were errors retrieving some events: {0}'.format(errors))
return event_dict
def get_incident(incident_id, customer_id, api_key, username, password, to_json=False):
if to_json:
return Incident(incident_id, customer_id, api_key, username, password).to_json()
else:
return Incident(incident_id, customer_id, api_key, username, password)
def get_incidents(incident_id_list, customer_id, api_key, username, password, suppress_errors=True, to_json=False):
incident_dict = {}
threads = []
errors = []
def __multi_get_incidents(thread_incident_id): # for threading
try:
if to_json:
incident_dict[thread_incident_id] = get_incident(
thread_incident_id, customer_id, api_key, username, password).to_json()
else:
incident_dict[thread_incident_id] = get_incident(
thread_incident_id, customer_id, api_key, username, password)
except Exception as e:
errors.append(e.message)
pass
for i in incident_id_list:
t = threading.Thread(target=__multi_get_incidents, args=(i,))
threads.append(t)
t.start()
for _thread in threads:
_thread.join()
if not suppress_errors:
raise IncidentNotRetrievedError('Their were errors retrieving some incidents: {0}'.format(errors))
return incident_dict
| {
"repo_name": "brokensound77/AlertLogic-event-api",
"path": "alapi/alapi.py",
"copies": "1",
"size": "2795",
"license": "mit",
"hash": 7370795453971111000,
"line_mean": 34.8333333333,
"line_max": 115,
"alpha_frac": 0.6350626118,
"autogenerated": false,
"ratio": 3.6583769633507854,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9786933972923,
"avg_score": 0.001301120445557255,
"num_lines": 78
} |
"""Alert on AWS Security Groups that allow ingress from anywhere."""
from rules.helpers.base import get_keys
from streamalert.shared.rule import rule
@rule(
logs=['cloudwatch:events'],
req_subkeys={
'detail': ['eventName', 'requestParameters']
})
def cloudtrail_security_group_ingress_anywhere(rec):
"""
author: @mimeframe, @ryandeivert
description: Alert on AWS Security Groups that allow ingress from anywhere.
This rule accounts for both IPv4 and IPv6.
reference: http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/
using-network-security.html#creating-security-group
"""
if rec['detail']['eventName'] != 'AuthorizeSecurityGroupIngress':
return False
ipv4_cidrs = get_keys(rec['detail']['requestParameters'], 'cidrIp')
ipv6_cidrs = get_keys(rec['detail']['requestParameters'], 'cidrIpv6')
if '0.0.0.0/0' in ipv4_cidrs:
return True
if '::/0' in ipv6_cidrs:
return True
return False
| {
"repo_name": "airbnb/streamalert",
"path": "rules/community/cloudwatch_events/cloudtrail_security_group_ingress_anywhere.py",
"copies": "1",
"size": "1034",
"license": "apache-2.0",
"hash": 6380590633888725000,
"line_mean": 32.3548387097,
"line_max": 82,
"alpha_frac": 0.6470019342,
"autogenerated": false,
"ratio": 3.732851985559567,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9877910662861005,
"avg_score": 0.000388651379712398,
"num_lines": 31
} |
"""Alert on calls made without MFA that may be attempting to abuse a flawed enforcement policy"""
from streamalert.shared.rule import rule
_IAM_ACTIONS = {
'CreateUser',
'CreateAccessKey',
'DetachUserPolicy',
'DetachGroupPolicy',
'RemoveUserFromGroup',
'DeleteUserPolicy',
'PutGroupPolicy',
'PutUserPolicy'
}
_EVENT_NAMES = {'CreateVirtualMFADevice', 'EnableMFADevice'}
@rule(logs=['cloudtrail:events'])
def cloudtrail_mfa_policy_abuse_attempt(rec):
"""
author: Scott Piper of Summit Route in collaboration with Duo Security
description: Alert on potential attacks performed by users that are supposed to have
MFA enforced. May indicate leaked access keys and an attempt to abuse
a flawed MFA enforcement policy.
playbook: (a) Identify whether or not the attempt was a mistake.
(b) Begin IR: Roll access keys, investigate past CloudTrail logs for
other actions performed, investigate how the keys were leaked in the
first place.
"""
# Depending on the practices you follow with AWS today, you may wish to simply alert
# on any errors at all in AWS, possibly with some focus on IAM actions,
# and possibly with some exemptions to ignore things like failed login attempts.
# Another option would be to just alert on any AccessDenied's when MFA is not used.
# This rule attempts to reduce the false positives.
# Get the value for whether the user is MFA authenticated.
# If this doesn't exist, assume not MFA authenticated.
try:
mfa_authenticated = rec['userIdentity']['sessionContext']['attributes']['mfaAuthenticated']
except KeyError:
mfa_authenticated = 'false'
# If the user is MFA authenticated, then any issues are not due to just a compromised
# access key, so ignore it.
if mfa_authenticated == 'true':
return False
# If the user tries to remove their MFA device without being MFA authenticated,
# it could be an attacker trying to take advantage of an issue with an older AWS policy.
if rec['eventName'] == 'DeactivateMFADevice':
return True
# Similarly, the attacker could try some other IAM actions under the assumption that the user
# is an admin with the flawed policy. There are a lot of actions they could try, which should
# be blocked by your policy anyway now, but these should detect most of the actions an attacker
# would try.
if rec['eventName'] in _IAM_ACTIONS:
return True
# If the user tries to create or enable an MFA device, but they are unable to, it could mean
# they are attempting to exploit a race condition where they wait for the user to one day
# swap MFA devices.
# This will have an errorCode of:
# - 'AccessDenied'
# - 'EntityAlreadyExists': Can't create another MFA device with the same name.
# - 'LimitExceeded': Can't enable a second MFA device for the same user.
if rec['errorCode'] and rec['eventName'] in _EVENT_NAMES:
return True
return False
| {
"repo_name": "airbnb/streamalert",
"path": "rules/community/cloudwatch_events/cloudtrail_mfa_policy_abuse_attempt.py",
"copies": "1",
"size": "3151",
"license": "apache-2.0",
"hash": -6808185094441624000,
"line_mean": 42.7638888889,
"line_max": 99,
"alpha_frac": 0.6835925103,
"autogenerated": false,
"ratio": 4.212566844919786,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5396159355219786,
"avg_score": null,
"num_lines": null
} |
"""Alert on dangerous S3 bucket ACLs."""
from streamalert.shared.rule import rule
_DENIED_ACLS = {
'http://acs.amazonaws.com/groups/global/AuthenticatedUsers',
'http://acs.amazonaws.com/groups/global/AllUsers'
}
@rule(
logs=['cloudwatch:events'],
req_subkeys={
'detail': ['requestParameters', 'eventName']
})
def cloudtrail_put_bucket_acl(rec):
"""
author: airbnb_csirt
description: Identifies a change to an S3 bucket ACL that grants access to AllUsers
(anyone on the internet) or AuthenticatedUsers (any user on any AWS account)
reference: http://docs.aws.amazon.com/
AmazonS3/latest/dev/acl-overview.html#specifying-grantee
playbook: (a) identify who made the change by looking at `userIdentity`
(b) ping that individual to verify the bucket should be accessible to the world
(c) if not, remove the bucket ACL and investigate access logs
"""
if rec['detail']['eventName'] != 'PutBucketAcl':
# check the event type early to avoid unnecessary performance impact
return False
if rec['detail']['requestParameters'] is None:
# requestParameters can be defined with a value of null
return False
req_params = rec['detail']['requestParameters']
access_control_policy = req_params.get('AccessControlPolicy')
if not access_control_policy:
return False
grants = access_control_policy['AccessControlList']['Grant']
bad_bucket_permissions = []
for grant in grants:
grantee = grant.get('Grantee', [])
if 'URI' in grantee:
bad_bucket_permissions.append(grantee['URI'] in _DENIED_ACLS)
return any(bad_bucket_permissions)
| {
"repo_name": "airbnb/streamalert",
"path": "rules/community/cloudwatch_events/cloudtrail_put_bucket_acl.py",
"copies": "1",
"size": "1761",
"license": "apache-2.0",
"hash": -4691504852176326000,
"line_mean": 37.2826086957,
"line_max": 97,
"alpha_frac": 0.6553094832,
"autogenerated": false,
"ratio": 3.957303370786517,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0006949207060501417,
"num_lines": 46
} |
"""Alert on insecure Amazon Machine Images (AMIs)."""
from streamalert.shared.rule import rule
@rule(
logs=['cloudwatch:events'],
req_subkeys={'detail': ['requestParameters', 'eventName']})
def unencrypted_ami_volume(rec):
"""
author: airbnb_csirt
description: Identifies creation of an AMI with a non encrypted volume
reference: https://amzn.to/2rQilUn
playbook: (a) Reach out to the user who created the volume
(b) Re-create the AMI with encryption enabled
(c) Delete the old AMI
"""
if rec['detail']['eventName'] != 'CreateImage':
# check the event type early to avoid unnecessary performance impact
return False
if rec['detail']['requestParameters'] is None:
# requestParameters can be defined with a value of null
return False
req_params = rec['detail']['requestParameters']
block_device_items = req_params.get('blockDeviceMapping', {}).get('items', [])
if not block_device_items:
return False
volume_encryption_enabled = set()
for block_device in block_device_items:
volume_encryption_enabled.add(block_device.get('ebs', {}).get('encrypted'))
return not any(volume_encryption_enabled)
@rule(
logs=['cloudwatch:events'],
req_subkeys={'detail': ['requestParameters', 'eventName']})
def public_ami(rec):
"""
author: airbnb_csirt
description: Identifies creation of an AMI with a non encrypted volume
reference: https://amzn.to/2rQilUn
playbook: (a) Reach out to the user who created the volume
(b) Set the AMI to private
"""
if rec['detail']['eventName'] != 'ModifyImageAttribute':
# check the event type early to avoid unnecessary performance impact
return False
if rec['detail']['requestParameters'] is None:
# requestParameters can be defined with a value of null
return False
req_params = rec['detail']['requestParameters']
permission_items = req_params.get('launchPermission', {}).get('add', {}).get('items', [])
for item in permission_items:
if item['group'] == 'all':
return True
return False
| {
"repo_name": "airbnb/streamalert",
"path": "rules/community/cloudwatch_events/cloudtrail_ec2_image_creation.py",
"copies": "1",
"size": "2216",
"license": "apache-2.0",
"hash": 6920055813780598000,
"line_mean": 34.7419354839,
"line_max": 93,
"alpha_frac": 0.6407942238,
"autogenerated": false,
"ratio": 4.165413533834586,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005579234281965591,
"num_lines": 62
} |
"""Alert on resources made public"""
from streamalert.shared.rule import rule
@rule(logs=['cloudtrail:events'])
def cloudtrail_snapshot_or_ami_made_public(rec):
"""
author: spiper
description: Alert on AWS API calls that make EBS snapshots,
RDS snapshots, or AMIs public.
playbook: (a) identify the AWS account in the log
(b) identify what resource(s) are impacted by the API call
(c) determine if the intent is valid, malicious or accidental
"""
# For each resouce walk through through the request parameters to check
# if this is adding permissions for 'all'
# Check AMIs
if rec['eventName'] == 'ModifyImageAttribute':
# For AMIs
params = rec.get('requestParameters', {})
if params.get('attributeType', '') == 'launchPermission':
if 'add' in params.get('launchPermission', {}):
items = params['launchPermission']['add'].get('items', [])
for item in items:
if item.get('group', '') == 'all':
return True
# Check EBS snapshots
if rec['eventName'] == 'ModifySnapshotAttribute':
params = rec.get('requestParameters', {})
if params.get('attributeType', '') == 'CREATE_VOLUME_PERMISSION':
if 'add' in params.get('createVolumePermission', {}):
items = params['createVolumePermission']['add'].get('items', [])
for item in items:
if item.get('group', '') == 'all':
return True
# Check RDS snapshots
if rec['eventName'] == 'ModifyDBClusterSnapshotAttribute':
params = rec.get('requestParameters', {})
if 'all' in params.get('valuesToAdd', []):
return True
return False
| {
"repo_name": "airbnb/streamalert",
"path": "rules/community/cloudwatch_events/cloudtrail_snapshot_or_ami_made_public.py",
"copies": "1",
"size": "1859",
"license": "apache-2.0",
"hash": -4538519538758568400,
"line_mean": 39.4130434783,
"line_max": 83,
"alpha_frac": 0.5664335664,
"autogenerated": false,
"ratio": 4.46875,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0007955678245533317,
"num_lines": 46
} |
"""AlertProfile class"""
from .halo_endpoint import HaloEndpoint
from .http_helper import HttpHelper
from .utility import Utility as utility
class AlertProfile(HaloEndpoint):
"""Initializing the AlertProfile class:
Filtering options for :func:`AlertProfile.list_all()` can be passed in as
keyword arguments. Valid filters can be found at
https://api-doc.cloudpassage.com/help#list-alert-profiles.
Args:
session (:class:`cloudpassage.HaloSession`): This will define how you
interact with the Halo API, including proxy settings and API keys
used for authentication.
Keyword args:
endpoint_version (int): Endpoint version override.
"""
object_name = "alert_profile"
objects_name = "alert_profiles"
default_endpoint_version = 1
def endpoint(self):
"""Return endpoint for API requests."""
return "/v{}/{}".format(self.endpoint_version, self.objects_name)
@classmethod
def object_key(cls):
"""Return the key used to pull the policy from the json document."""
return AlertProfile.object_name
@classmethod
def pagination_key(cls):
"""Return the pagination key for parsing paged results."""
return AlertProfile.objects_name
def create(self, policy_body):
"""Create a policy from JSON document.
Returns the ID of the new policy
"""
request = HttpHelper(self.session)
request_body = utility.policy_to_dict(policy_body)
return request.post(self.endpoint(),
request_body)["id"]
| {
"repo_name": "cloudpassage/cloudpassage-halo-python-sdk",
"path": "cloudpassage/alert_profile.py",
"copies": "1",
"size": "1603",
"license": "bsd-3-clause",
"hash": -416531011816418500,
"line_mean": 32.3958333333,
"line_max": 77,
"alpha_frac": 0.6606363069,
"autogenerated": false,
"ratio": 4.415977961432507,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5576614268332507,
"avg_score": null,
"num_lines": null
} |
"""Alert related models."""
from django.db import models
from django.utils import timezone
from reversion import revisions as reversion
from .. import constants
class AlarmQuerySet(models.QuerySet):
"""Custom queryset for Alarm."""
def opened(self):
"""Return opened alarms."""
return self.filter(status=constants.ALARM_OPENED)
class Alarm(models.Model):
"""A simple alarm to attach to a domain and/or mailbox."""
domain = models.ForeignKey(
"admin.Domain", on_delete=models.CASCADE, related_name="alarms")
mailbox = models.ForeignKey(
"admin.Mailbox", on_delete=models.SET_NULL,
related_name="alarms",
null=True, blank=True,
)
created = models.DateTimeField(default=timezone.now)
closed = models.DateTimeField(null=True, blank=True)
status = models.IntegerField(
default=constants.ALARM_OPENED, choices=constants.ALARM_STATUSES,
db_index=True
)
title = models.TextField()
internal_name = models.CharField(max_length=120)
objects = models.Manager.from_queryset(AlarmQuerySet)()
class Meta:
ordering = ["created"]
def __str__(self):
return "[{}] {} - {}".format(
self.created, self.domain, self.get_status_display())
def close(self):
"""Close this alarm."""
self.status = constants.ALARM_CLOSED
self.closed = timezone.now()
self.save()
reversion.register(Alarm)
| {
"repo_name": "modoboa/modoboa",
"path": "modoboa/admin/models/alarm.py",
"copies": "1",
"size": "1465",
"license": "isc",
"hash": 4179619620076557000,
"line_mean": 25.6363636364,
"line_max": 73,
"alpha_frac": 0.6511945392,
"autogenerated": false,
"ratio": 3.9594594594594597,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5110653998659459,
"avg_score": null,
"num_lines": null
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.