hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3df8290b4d58e046c57416ad8b4b108b36d44756 | 1,730 | py | Python | examples/simple/raw/client.py | islavov/tchannel-python | ca3da9b4e0367d2e00078b158ab9e0bb4f328619 | [
"MIT"
] | 98 | 2015-07-10T23:42:11.000Z | 2021-11-08T11:21:02.000Z | examples/simple/raw/client.py | islavov/tchannel-python | ca3da9b4e0367d2e00078b158ab9e0bb4f328619 | [
"MIT"
] | 445 | 2015-07-10T23:58:02.000Z | 2021-08-24T14:58:39.000Z | examples/simple/raw/client.py | islavov/tchannel-python | ca3da9b4e0367d2e00078b158ab9e0bb4f328619 | [
"MIT"
] | 43 | 2015-07-22T19:14:57.000Z | 2021-09-14T12:12:38.000Z | # Copyright (c) 2016 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from __future__ import print_function
from tornado import gen, ioloop
from tchannel import TChannel
tchannel = TChannel('raw-client')
@gen.coroutine
def make_request():
resp = yield tchannel.raw(
service='raw-server',
endpoint='endpoint',
body='req body',
headers='req headers',
hostport='localhost:54495',
)
raise gen.Return(resp)
resp = ioloop.IOLoop.current().run_sync(make_request)
assert resp.headers == b'resp headers'
assert resp.body == b'resp body'
print(resp.body.decode('utf8'))
print(resp.headers.decode('utf8'))
| 33.269231 | 79 | 0.749133 |
674ae26f7ab27154e44ffd01f958a46bb4bc9936 | 6,458 | py | Python | tnp_svm/src/svm_weight_recognition.py | warehouse-picking-automation-challenges/team_naist_panasonic | 999b8d20c5528f5510e43bf4a483215011f9871d | [
"Apache-2.0"
] | 10 | 2017-12-22T07:45:09.000Z | 2022-03-25T21:42:22.000Z | tnp_svm/src/svm_weight_recognition.py | warehouse-picking-automation-challenges/team_naist_panasonic | 999b8d20c5528f5510e43bf4a483215011f9871d | [
"Apache-2.0"
] | null | null | null | tnp_svm/src/svm_weight_recognition.py | warehouse-picking-automation-challenges/team_naist_panasonic | 999b8d20c5528f5510e43bf4a483215011f9871d | [
"Apache-2.0"
] | 6 | 2017-12-24T16:03:22.000Z | 2020-06-14T11:01:18.000Z | #
# Version: 2017.07.31
# Authors: Members of the Team NAIST-Panasonic at the Amazon Robotics Challenge 2017:
# Gustavo A. Garcia R. <garcia-g at is.naist.jp> (Captain),
# Lotfi El Hafi, Felix von Drigalski, Wataru Yamazaki, Viktor Hoerig, Arnaud Delmotte,
# Akishige Yuguchi, Marcus Gall, Chika Shiogama, Kenta Toyoshima, Pedro Uriguen,
# Rodrigo Elizalde, Masaki Yamamoto, Yasunao Okazaki, Kazuo Inoue, Katsuhiko Asai,
# Ryutaro Futakuchi, Seigo Okada, Yusuke Kato, and Pin-Chu Yang
#####################
# Copyright 2017 Team NAIST-Panasonic
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#####################
import glob
import sys
import time
import rospy
import cv2
from cv_bridge import CvBridge, CvBridgeError
from sensor_msgs.msg import Image, CameraInfo
from std_msgs.msg import String ,Float64 ,Bool
import numpy as np
import matplotlib.pyplot as plt
import os
import argparse
from sklearn import svm
from sklearn import cross_validation
from sklearn.svm import LinearSVC
from sklearn.metrics import classification_report, accuracy_score
from sklearn.externals import joblib
from BagOfFeatures import BagOfFeatures
from tnp_svm.srv import *
codebook_size = 15
group_name = "SVM_40items"
input_dir = "./input"
bof = BagOfFeatures(codebookSize=codebook_size, groupName=group_name)
bof.load()
#load_model
fit_model = joblib.load("./model/fit_model_" + group_name + ".bin")
def loadImages(path):
imagePathes = glob.glob(os.path.join(path, "*.png"))
images = map(cv2.imread, imagePathes)
return (images)
def extractDescriptors(images, method):
detector = cv2.xfeatures2d.SIFT_create()
keypoints = map(detector.detect, images)
descriptors = map(lambda a, b: detector.compute(a, b)[1], images, keypoints)
return (descriptors)
class Identification:
def __init__(self):
self.bridge = CvBridge()
self.Trigger_rgb = False
self.Trigger_depth = False
def rgb_callback(self,Image_rgb):
try:
self.rgb_image_cv2 = self.bridge.imgmsg_to_cv2(Image_rgb,"bgr8")
self.rgb_array = np.array(self.rgb_image_cv2, dtype=np.uint8)
self.Trigger_rgb = True
except CvBridgeError,e:
print e
def depth_callback(self,Depth_data):
try:
self.depth_data_cv2 = self.bridge.imgmsg_to_cv2(Depth_data,"16UC1")
self.depth_array = np.array(self.depth_data_cv2, dtype=np.int16)
self.Trigger_depth = True
except CvBridgeError,e:
print e
def recognize_by_svm_callback(self,req):
print 'callback recognize_items_callback called'
# print 'target_item: ', req.target_item_ids.data
items_ids, confidences = self.identify()
response = RecognizeBySvmResponse()
for elem in items_ids:
response.items_ids.append(String(elem))
response.confidences = [Float64(confidences)]
rospy.loginfo("Items_ids: %s",items_ids)
rospy.loginfo("Confidences: %s",confidences)
return response
def subtract_background(self,depth,rgb):
depth_filtered = cv2.blur(depth,(10,10))
rgb_subtracted = rgb
if depth_filtered[240][320] == 0:
print("wait for depth sensor")
else:
#set camera solution
for i in range (0,3):
for j in range (0,640):
for k in range (0,320):
if 3000 < depth_filtered[k][j] < 6000:
# print(depth_filtered[240][320])
rgb_subtracted = rgb
else:
rgb_subtracted[k][j][i] = 0
return rgb_subtracted
def weight_callback(self,weight):
if 0<weight<0.125:
self.group1_Trigger = True
elif 0.125<weight<0.20:
self.group2_Trigger = True
elif 0.20<weight<0.25:
self.group3_Trigger = True
elif 0.25<weight<0.31:
self.group4_Trigger = True
elif 0.31<weight<0.40:
self.group5_Trigger = True
elif 0.40<weight<0.55:
self.group6_Trigger = True
elif 0.55<weight<1.00:
self.group7_Trigger = True
def identify(self):
if self.Trigger_rgb and self.Trigger_depth == True:
rgb_subtracted = self.subtract_background(self.depth_array,self.rgb_array)
features = {}
features["test"] = extractDescriptors([rgb_subtracted], method="SIFT")
#make_histogram
hist = {}
hist["test"] = map(lambda a: bof.makeHistogram(np.matrix(a)), features["test"])
#predict_label
result_labels = fit_model.predict(hist["test"][0])
print("Result=%s" % (result_labels))
items_ids=result_labels
#Predict margin in SVM
confidences_matrix=fit_model.predict_proba(hist["test"][0])
confidences=np.max(confidences_matrix[0])
print("Confidence=%s" % (confidences))
cv2.imshow("img",rgb_subtracted)
cv2.waitKey(1)
else:
print("wait")
rospy.sleep(1.0)
return items_ids , confidences
def start(self):
#service,topic
service = rospy.Service('tnp_svm/recognize_by_svm', RecognizeBySvm, self.recognize_by_svm_callback)
rgb_sub = rospy.Subscriber("/camera/rgb/image_raw",Image,self.rgb_callback)
weight_sub = rospy.Subscriber("tnp_weight_events/weight_difference", Float64, self.weight_callback)
depth_sub = rospy.Subscriber("/camera/depth/image_raw",Image,self.depth_callback)
if __name__== "__main__":
try:
rospy.init_node('identification')
Identification = Identification()
Identification.start()
rospy.spin()
except rospy.ROSInterruptException:
pass | 39.139394 | 115 | 0.642459 |
63018b34fe2467dac21c77164b98486d1cbb6312 | 9,862 | py | Python | littlefish/timetool.py | michaelwalkerfl/littlefish | b226dde71367225d1c630c0b24f5157ad5aef2b6 | [
"Apache-2.0"
] | 3 | 2017-04-27T09:47:06.000Z | 2019-05-13T20:48:03.000Z | littlefish/timetool.py | michaelwalkerfl/littlefish | b226dde71367225d1c630c0b24f5157ad5aef2b6 | [
"Apache-2.0"
] | null | null | null | littlefish/timetool.py | michaelwalkerfl/littlefish | b226dde71367225d1c630c0b24f5157ad5aef2b6 | [
"Apache-2.0"
] | 3 | 2019-02-19T20:33:58.000Z | 2020-09-01T09:01:51.000Z | """
This contains functions to manipulate and display timestamps
"""
import logging
import datetime
import calendar
import dateutil.parser
import pytz
__author__ = 'Stephen Brown (Little Fish Solutions LTD)'
log = logging.getLogger(__name__)
utc = pytz.utc
local = pytz.timezone('Europe/London')
def to_local_time(time_in):
if not hasattr(time_in, 'tzinfo'):
# This is a date with no timestamp info so we can't do the conversion
return time_in
elif time_in.tzinfo is None:
# If there is no timezone, localise it to UTC
time_utc = utc.localize(time_in)
elif hasattr(time_in.tzinfo, 'zone') and time_in.tzinfo.zone == 'UTC':
# Already UTC - no need to localize
time_utc = time_in
elif hasattr(time_in.tzinfo, 'tzname') and time_in.tzinfo.tzname(time_in) == 'UTC':
# Already UTC
time_utc = time_in
else:
# Not a UTC timestamp
raise Exception('Not a UTC time: %s' % time_in)
time_local = time_utc.astimezone(local)
# Strip off timezone info
return time_local.replace(tzinfo=None)
def get_local_time():
return to_local_time(datetime.datetime.utcnow())
def date_to_local_time(date):
midnight = datetime.time()
return local.localize(datetime.datetime.combine(date, midnight))
def date_to_datetime(date):
return datetime.datetime(date.year, date.month, date.day)
def to_utc_time(time_in):
if not hasattr(time_in, 'tzinfo'):
# This is a date with no timestamp info so we can't do the conversion
return time_in
elif time_in.tzinfo is None:
# If there is no timezone, localise it to local time
time_local = local.localize(time_in)
elif time_in.tzinfo.zone == local.zone:
# Already local - no need to localize
time_local = time_in
else:
# Not a local timestamp
raise Exception('Not a local time: %s' % time_in)
time_utc = time_local.astimezone(utc)
# Strip off timezone info
return time_utc.replace(tzinfo=None)
def format_datetime(datetime, convert_to_local=True):
if datetime:
if convert_to_local:
local_datetime = to_local_time(datetime)
return local_datetime.strftime('%d/%m/%Y %H:%M')
else:
return datetime.strftime('%d/%m/%Y %H:%M')
else:
return ''
def format_datetime_seconds(datetime, convert_to_local=True):
if datetime:
if convert_to_local:
local_datetime = to_local_time(datetime)
return local_datetime.strftime('%d/%m/%Y %H:%M:%S')
else:
return datetime.strftime('%d/%m/%Y %H:%M:%S')
else:
return ''
def format_datetime_long(datetime, convert_to_local=True):
if datetime:
if convert_to_local:
local_datetime = to_local_time(datetime)
return local_datetime.strftime('%A %d %B %Y %H:%M')
else:
return datetime.strftime('%A %d %B %Y %H:%M')
else:
return ''
def format_datetime_long_seconds(datetime, convert_to_local=True):
if datetime:
if convert_to_local:
local_datetime = to_local_time(datetime)
return local_datetime.strftime('%A %d %B %Y %H:%M:%S')
else:
return datetime.strftime('%A %d %B %Y %H:%M:%S')
else:
return ''
def format_date(datetime, convert_to_local=True):
if datetime:
try:
if convert_to_local:
local_datetime = to_local_time(datetime)
return local_datetime.strftime('%d/%m/%Y')
else:
return datetime.strftime('%d/%m/%Y')
except ValueError:
log.warning('Invalid date: %s' % datetime)
return '????'
else:
return ''
def format_date_long(datetime, convert_to_local=True):
if datetime:
try:
if convert_to_local:
local_datetime = to_local_time(datetime)
return local_datetime.strftime('%A %-d %B %Y')
else:
return datetime.strftime('%A %-d %B %Y')
except ValueError:
log.warning('Invalid date: %s' % datetime)
return '????'
else:
return ''
def format_date_long_no_day(datetime, convert_to_local=True):
if datetime:
try:
if convert_to_local:
local_datetime = to_local_time(datetime)
return local_datetime.strftime('%-d %B %Y')
else:
return datetime.strftime('%-d %B %Y')
except ValueError:
log.warning('Invalid date: %s' % datetime)
return '????'
else:
return ''
def format_time(datetime, convert_to_local=True):
if datetime:
if convert_to_local:
local_datetime = to_local_time(datetime)
return local_datetime.strftime('%H:%M:%S')
else:
return datetime.strftime('%H:%M:%S')
else:
return ''
def format_time_delta(delta):
if delta.days:
return str(delta.days) + ' days ' + format_duration_seconds(delta.seconds)
return format_duration_seconds(delta.seconds)
def format_duration_seconds(seconds):
h = seconds // 3600
seconds = seconds % 3600
m = seconds // 60
# I'm sure there is a better way of doing this!
if m >= 10:
return "%i:%i" % (h, m)
else:
return "%i:0%i" % (h, m)
def add_working_days(num_days, date=None, include_saturday=False):
if date is None:
date = datetime.date.today()
for i in range(num_days):
date += datetime.timedelta(days=1)
while date.weekday() == 6 or (not include_saturday and date.weekday() == 5):
date += datetime.timedelta(days=1)
return date
def datetime_from_datepicker(date_string):
"""Turn a string from a jQuery UI datetpicker into a datetime object"""
return datetime.datetime.strptime(date_string, '%d/%m/%Y')
def date_from_datepicker(date_string):
"""Turn a string from a jQuery UI datetpicker into a datetime object"""
return datetime_from_datepicker(date_string).date()
def datetime_to_datepicker(timestamp):
"""Turn a datetime object back into a string for a JQuery UI datepicker"""
return timestamp.strftime('%d/%m/%Y')
def add_months(months, timestamp=datetime.datetime.utcnow()):
"""Add a number of months to a timestamp"""
month = timestamp.month
new_month = month + months
years = 0
while new_month < 1:
new_month += 12
years -= 1
while new_month > 12:
new_month -= 12
years += 1
# month = timestamp.month
year = timestamp.year + years
try:
return datetime.datetime(year, new_month, timestamp.day, timestamp.hour, timestamp.minute, timestamp.second)
except ValueError:
# This means that the day exceeds the last day of the month, i.e. it is 30th March, and we are finding the day
# 1 month ago, and it is trying to return 30th February
if months > 0:
# We are adding, so use the first day of the next month
new_month += 1
if new_month > 12:
new_month -= 12
year += 1
return datetime.datetime(year, new_month, 1, timestamp.hour, timestamp.minute, timestamp.second)
else:
# We are subtracting - use the last day of the same month
new_day = calendar.monthrange(year, new_month)[1]
return datetime.datetime(year, new_month, new_day, timestamp.hour, timestamp.minute, timestamp.second)
def add_months_to_date(months, date):
"""Add a number of months to a date"""
month = date.month
new_month = month + months
years = 0
while new_month < 1:
new_month += 12
years -= 1
while new_month > 12:
new_month -= 12
years += 1
# month = timestamp.month
year = date.year + years
try:
return datetime.date(year, new_month, date.day)
except ValueError:
# This means that the day exceeds the last day of the month, i.e. it is 30th March, and we are finding the day
# 1 month ago, and it is trying to return 30th February
if months > 0:
# We are adding, so use the first day of the next month
new_month += 1
if new_month > 12:
new_month -= 12
year += 1
return datetime.datetime(year, new_month, 1)
else:
# We are subtracting - use the last day of the same month
new_day = calendar.monthrange(year, new_month)[1]
return datetime.datetime(year, new_month, new_day)
def unix_time(dt=None, as_int=False):
"""Generate a unix style timestamp (in seconds)"""
if dt is None:
dt = datetime.datetime.utcnow()
if type(dt) is datetime.date:
dt = date_to_datetime(dt)
epoch = datetime.datetime.utcfromtimestamp(0)
delta = dt - epoch
if as_int:
return int(delta.total_seconds())
return delta.total_seconds()
def current_time_millis(dt=None):
unix_time_seconds = unix_time(dt=dt)
return int(unix_time() * 1000)
def format_datetime_iso8601(datetime):
return datetime.strftime('%Y-%m-%dT%H:%M:%SZ')
def datetime_from_iso8601(string):
return dateutil.parser.parse(string)
def is_christmas_period():
"""Is this the christmas period?"""
now = datetime.date.today()
if now.month != 12:
return False
if now.day < 15:
return False
if now.day > 27:
return False
return True
def get_end_of_day(timestamp):
"""
Given a date or a datetime, return a datetime at 23:59:59 on that day
"""
return datetime.datetime(timestamp.year, timestamp.month, timestamp.day, 23, 59, 59)
| 29.177515 | 118 | 0.618434 |
70399f5acff84461d84603989b9551e349e40c31 | 3,168 | py | Python | travel_time.5m.py | danpker/travel-time | c1382e48e02550ab8225f9cd8834c10ca0a981d5 | [
"MIT"
] | null | null | null | travel_time.5m.py | danpker/travel-time | c1382e48e02550ab8225f9cd8834c10ca0a981d5 | [
"MIT"
] | 3 | 2017-06-19T10:56:33.000Z | 2018-03-15T09:31:14.000Z | travel_time.5m.py | danpker/travel-time | c1382e48e02550ab8225f9cd8834c10ca0a981d5 | [
"MIT"
] | null | null | null | #!/usr/bin/env LC_ALL=en_US.UTF-8 /usr/local/bin/python3
import requests
import dateparser
from datetime import datetime
import json
import re
import html
import base64
import os
START = ""
DESTINATION = ""
API_KEY = ""
TAG_RE = re.compile(r"<[^>]+>")
def main():
url = ("https://maps.googleapis.com/maps/api/directions/json?"
"origin={}&destination={}&mode=driving&departure_time=now&"
"key={}".format(START, DESTINATION, API_KEY))
data = json.loads(requests.get(url).content)
routes = data.get("routes")
parse_route(routes[0])
def load_config():
global START
global DESTINATION
global API_KEY
directory = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(directory, "config.json")) as f:
json_config = json.loads(f.read())
START = json_config.get("start")
DESTINATION = json_config.get("destination")
API_KEY = json_config.get("api_key")
def parse_route(route):
leg = route.get("legs")[0]
duration = leg.get("duration").get("text")
duration_in_traffic = leg.get("duration_in_traffic").get("text")
print("{}|color={}".format(
duration_in_traffic, get_colour(duration, duration_in_traffic)))
print("---")
print(get_steps(leg).encode().decode())
print("{} | image={}".format(route.get("summary"), get_map(route)))
def get_colour(duration, duration_in_traffic):
"""Return a colour to provide an idea of the current traffic.
Gives a visual representation of the amount of traffic on route.
:param duration: A duration string, like "4 mins"
:param duration_in_traffic: A duration in traffic
"""
now = datetime.now()
# dateparser will put the duration in traffic before the duration without
# traffic. In this funtion, we basically use it to work out the potential
# set off time for each scenario with the same arrival time
set_off_time = dateparser.parse(duration, settings={"RELATIVE_BASE": now})
set_off_time_in_traffic = dateparser.parse(
duration_in_traffic, settings={"RELATIVE_BASE": now})
if set_off_time_in_traffic > set_off_time:
# If real duration is less than the normal duration, return green
return "green"
difference = set_off_time - set_off_time_in_traffic
minutes = difference.seconds / 60
if minutes < 10:
return "green"
elif minutes >= 10 and minutes <= 25:
return "orange"
else:
return "red"
def get_steps(leg):
return "\n".join([step_to_string(step) for step in leg.get("steps")])
def step_to_string(step):
return "{} - {}".format(clean(step.get("html_instructions")),
step.get("duration").get("text"))
def get_map(route):
points = route.get("overview_polyline").get("points")
url = ("https://maps.googleapis.com/maps/api/staticmap?"
"size=640x400&path=enc%3A{}&key={}".format(points, API_KEY))
image = requests.get(url).content
return base64.b64encode(image).decode("utf-8")
def clean(text):
text = html.unescape(text)
return TAG_RE.sub("", text)
if __name__ == "__main__":
load_config()
main()
| 29.333333 | 79 | 0.667614 |
786511ba18ede1aee9af267bd06ce6e3e0174bea | 12,586 | py | Python | python/federatedml/param/hetero_nn_param.py | hubert-he/FATE | 6758e150bd7ca7d6f788f9a7a8c8aea7e6500363 | [
"Apache-2.0"
] | 3,787 | 2019-08-30T04:55:10.000Z | 2022-03-31T23:30:07.000Z | python/federatedml/param/hetero_nn_param.py | hubert-he/FATE | 6758e150bd7ca7d6f788f9a7a8c8aea7e6500363 | [
"Apache-2.0"
] | 1,439 | 2019-08-29T16:35:52.000Z | 2022-03-31T11:55:31.000Z | python/federatedml/param/hetero_nn_param.py | hubert-he/FATE | 6758e150bd7ca7d6f788f9a7a8c8aea7e6500363 | [
"Apache-2.0"
] | 1,179 | 2019-08-29T16:18:32.000Z | 2022-03-31T12:55:38.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import collections
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
from types import SimpleNamespace
from federatedml.param.base_param import BaseParam
from federatedml.param.cross_validation_param import CrossValidationParam
from federatedml.param.encrypt_param import EncryptParam
from federatedml.param.encrypted_mode_calculation_param import EncryptedModeCalculatorParam
from federatedml.param.predict_param import PredictParam
from federatedml.util import consts
class SelectorParam(object):
"""
Parameters used for Homo Neural Network.
Args:
method: None or str, back propagation select method, accept "relative" only, default: None
selective_size: int, deque size to use, store the most recent selective_size historical loss, default: 1024
beta: int, sample whose selective probability >= power(np.random, beta) will be selected
min_prob: Numeric, selective probability is max(min_prob, rank_rate)
"""
def __init__(self, method=None, beta=1, selective_size=consts.SELECTIVE_SIZE, min_prob=0, random_state=None):
self.method = method
self.selective_size = selective_size
self.beta = beta
self.min_prob = min_prob
self.random_state = random_state
def check(self):
if self.method is not None and self.method not in ["relative"]:
raise ValueError('selective method should be None be "relative"')
if not isinstance(self.selective_size, int) or self.selective_size <= 0:
raise ValueError("selective size should be a positive integer")
if not isinstance(self.beta, int):
raise ValueError("beta should be integer")
if not isinstance(self.min_prob, (float, int)):
raise ValueError("min_prob should be numeric")
class HeteroNNParam(BaseParam):
"""
Parameters used for Hetero Neural Network.
Args:
task_type: str, task type of hetero nn model, one of 'classification', 'regression'.
config_type: str, accept "keras" only.
bottom_nn_define: a dict represents the structure of bottom neural network.
interactive_layer_define: a dict represents the structure of interactive layer.
interactive_layer_lr: float, the learning rate of interactive layer.
top_nn_define: a dict represents the structure of top neural network.
optimizer: optimizer method, accept following types:
1. a string, one of "Adadelta", "Adagrad", "Adam", "Adamax", "Nadam", "RMSprop", "SGD"
2. a dict, with a required key-value pair keyed by "optimizer",
with optional key-value pairs such as learning rate.
defaults to "SGD"
loss: str, a string to define loss function used
early_stopping_rounds: int, default: None
Will stop training if one metric doesn’t improve in last early_stopping_round rounds
metrics: list, default: None
Indicate when executing evaluation during train process, which metrics will be used. If not set,
default metrics for specific task type will be used. As for binary classification, default metrics are
['auc', 'ks'], for regression tasks, default metrics are ['root_mean_squared_error', 'mean_absolute_error'],
[ACCURACY, PRECISION, RECALL] for multi-classification task
use_first_metric_only: bool, default: False
Indicate whether to use the first metric in `metrics` as the only criterion for early stopping judgement.
epochs: int, the maximum iteration for aggregation in training.
batch_size : int, batch size when updating model.
-1 means use all data in a batch. i.e. Not to use mini-batch strategy.
defaults to -1.
early_stop : str, accept 'diff' only in this version, default: 'diff'
Method used to judge converge or not.
a) diff: Use difference of loss between two iterations to judge whether converge.
validation_freqs: None or positive integer or container object in python. Do validation in training process or Not.
if equals None, will not do validation in train process;
if equals positive integer, will validate data every validation_freqs epochs passes;
if container object in python, will validate data if epochs belong to this container.
e.g. validation_freqs = [10, 15], will validate data when epoch equals to 10 and 15.
Default: None
The default value is None, 1 is suggested. You can set it to a number larger than 1 in order to
speed up training by skipping validation rounds. When it is larger than 1, a number which is
divisible by "epochs" is recommended, otherwise, you will miss the validation scores
of last training epoch.
floating_point_precision: None or integer, if not None, means use floating_point_precision-bit to speed up calculation,
e.g.: convert an x to round(x * 2**floating_point_precision) during Paillier operation, divide
the result by 2**floating_point_precision in the end.
drop_out_keep_rate: float, should betweend 0 and 1, if not equals to 1.0, will enabled drop out
"""
def __init__(self,
task_type='classification',
config_type="keras",
bottom_nn_define=None,
top_nn_define=None,
interactive_layer_define=None,
interactive_layer_lr=0.9,
optimizer='SGD',
loss=None,
epochs=100,
batch_size=-1,
early_stop="diff",
tol=1e-5,
encrypt_param=EncryptParam(),
encrypted_mode_calculator_param=EncryptedModeCalculatorParam(mode="confusion_opt"),
predict_param=PredictParam(),
cv_param=CrossValidationParam(),
validation_freqs=None,
early_stopping_rounds=None,
metrics=None,
use_first_metric_only=True,
selector_param=SelectorParam(),
floating_point_precision=23,
drop_out_keep_rate=1.0):
super(HeteroNNParam, self).__init__()
self.task_type = task_type
self.config_type = config_type
self.bottom_nn_define = bottom_nn_define
self.interactive_layer_define = interactive_layer_define
self.interactive_layer_lr = interactive_layer_lr
self.top_nn_define = top_nn_define
self.batch_size = batch_size
self.epochs = epochs
self.early_stop = early_stop
self.tol = tol
self.optimizer = optimizer
self.loss = loss
self.validation_freqs = validation_freqs
self.early_stopping_rounds = early_stopping_rounds
self.metrics = metrics or []
self.use_first_metric_only = use_first_metric_only
self.encrypt_param = copy.deepcopy(encrypt_param)
self.encrypted_model_calculator_param = encrypted_mode_calculator_param
self.predict_param = copy.deepcopy(predict_param)
self.cv_param = copy.deepcopy(cv_param)
self.selector_param = selector_param
self.floating_point_precision = floating_point_precision
self.drop_out_keep_rate = drop_out_keep_rate
def check(self):
self.optimizer = self._parse_optimizer(self.optimizer)
supported_config_type = ["keras"]
if self.task_type not in ["classification", "regression"]:
raise ValueError("config_type should be classification or regression")
if self.config_type not in supported_config_type:
raise ValueError(f"config_type should be one of {supported_config_type}")
if not isinstance(self.tol, (int, float)):
raise ValueError("tol should be numeric")
if not isinstance(self.epochs, int) or self.epochs <= 0:
raise ValueError("epochs should be a positive integer")
if self.bottom_nn_define and not isinstance(self.bottom_nn_define, dict):
raise ValueError("bottom_nn_define should be a dict defining the structure of neural network")
if self.top_nn_define and not isinstance(self.top_nn_define, dict):
raise ValueError("top_nn_define should be a dict defining the structure of neural network")
if self.interactive_layer_define is not None and not isinstance(self.interactive_layer_define, dict):
raise ValueError(
"the interactive_layer_define should be a dict defining the structure of interactive layer")
if self.batch_size != -1:
if not isinstance(self.batch_size, int) \
or self.batch_size < consts.MIN_BATCH_SIZE:
raise ValueError(
" {} not supported, should be larger than 10 or -1 represent for all data".format(self.batch_size))
if self.early_stop != "diff":
raise ValueError("early stop should be diff in this version")
if self.validation_freqs is None:
pass
elif isinstance(self.validation_freqs, int):
if self.validation_freqs < 1:
raise ValueError("validation_freqs should be larger than 0 when it's integer")
elif not isinstance(self.validation_freqs, collections.Container):
raise ValueError("validation_freqs should be None or positive integer or container")
if self.early_stopping_rounds and not isinstance(self.early_stopping_rounds, int):
raise ValueError("early stopping rounds should be None or int larger than 0")
if self.early_stopping_rounds and isinstance(self.early_stopping_rounds, int):
if self.early_stopping_rounds < 1:
raise ValueError("early stopping should be larger than 0 when it's integer")
if not self.validation_freqs:
raise ValueError("If early stopping rounds is setting, validation_freqs should not be null")
if self.metrics is not None and not isinstance(self.metrics, list):
raise ValueError("metrics should be a list")
if not isinstance(self.use_first_metric_only, bool):
raise ValueError("use_first_metric_only should be a boolean")
if self.floating_point_precision is not None and \
(not isinstance(self.floating_point_precision, int) or\
self.floating_point_precision < 0 or self.floating_point_precision > 63):
raise ValueError("floating point precision should be null or a integer between 0 and 63")
if not isinstance(self.drop_out_keep_rate, (float, int)) or self.drop_out_keep_rate < 0.0 or \
self.drop_out_keep_rate > 1.0:
raise ValueError("drop_out_keep_rate should be in range [0.0, 1.0]")
self.encrypt_param.check()
self.encrypted_model_calculator_param.check()
self.predict_param.check()
self.selector_param.check()
@staticmethod
def _parse_optimizer(opt):
"""
Examples:
1. "optimize": "SGD"
2. "optimize": {
"optimizer": "SGD",
"learning_rate": 0.05
}
"""
kwargs = {}
if isinstance(opt, str):
return SimpleNamespace(optimizer=opt, kwargs=kwargs)
elif isinstance(opt, dict):
optimizer = opt.get("optimizer", kwargs)
if not optimizer:
raise ValueError(f"optimizer config: {opt} invalid")
kwargs = {k: v for k, v in opt.items() if k != "optimizer"}
return SimpleNamespace(optimizer=optimizer, kwargs=kwargs)
else:
raise ValueError(f"invalid type for optimize: {type(opt)}")
| 48.594595 | 129 | 0.662403 |
dcb7dcd7f4104c82860d9b4c3b0b87916701b3fc | 1,451 | py | Python | var/spack/repos/builtin/packages/tcptrace/package.py | HaochengLIU/spack | 26e51ff1705a4d6234e2a0cf734f93f7f95df5cb | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 2 | 2018-11-27T03:39:44.000Z | 2021-09-06T15:50:35.000Z | var/spack/repos/builtin/packages/tcptrace/package.py | HaochengLIU/spack | 26e51ff1705a4d6234e2a0cf734f93f7f95df5cb | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1 | 2019-01-11T20:11:52.000Z | 2019-01-11T20:11:52.000Z | var/spack/repos/builtin/packages/tcptrace/package.py | HaochengLIU/spack | 26e51ff1705a4d6234e2a0cf734f93f7f95df5cb | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1 | 2020-10-14T14:20:17.000Z | 2020-10-14T14:20:17.000Z | # Copyright 2013-2018 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
from os.path import join
class Tcptrace(AutotoolsPackage):
"""tcptrace is a tool written by Shawn Ostermann at Ohio University for
analysis of TCP dump files. It can take as input the files produced by
several popular packet-capture programs, including tcpdump, snoop,
etherpeek, HP Net Metrix, and WinDump."""
homepage = "http://www.tcptrace.org/"
url = "http://www.tcptrace.org/download/tcptrace-6.6.7.tar.gz"
version('6.6.7', '68128dc1817b866475e2f048e158f5b9')
depends_on('bison', type='build')
depends_on('flex', type='build')
depends_on('libpcap')
# Fixes incorrect API access in libpcap.
# See https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=545595
patch('tcpdump.patch')
@run_after('configure')
def patch_makefile(self):
# see https://github.com/blitz/tcptrace/blob/master/README.linux
makefile = FileFilter('Makefile')
makefile.filter(
"PCAP_LDLIBS = -lpcap",
"DEFINES += -D_BSD_SOURCE\nPCAP_LDLIBS = -lpcap")
def install(self, spec, prefix):
# The build system has trouble creating directories
mkdirp(prefix.bin)
install('tcptrace', join(prefix.bin, 'tcptrace'))
| 35.390244 | 77 | 0.682977 |
7e1bcabaee3c2fd6b2266167bacf69e8934cfcba | 1,328 | py | Python | src/utils/dataset_types.py | sisl/MultiAgentVariationalOcclusionInference | c46ef5dc99a6ca7a59937dbd4bf1d3f86d0eb757 | [
"Apache-2.0"
] | 6 | 2021-09-07T18:40:24.000Z | 2022-03-15T06:16:07.000Z | src/utils/dataset_types.py | sisl/MultiAgentVariationalOcclusionInference | c46ef5dc99a6ca7a59937dbd4bf1d3f86d0eb757 | [
"Apache-2.0"
] | 2 | 2021-12-03T05:18:09.000Z | 2021-12-17T09:54:08.000Z | src/utils/dataset_types.py | sisl/MultiAgentVariationalOcclusionInference | c46ef5dc99a6ca7a59937dbd4bf1d3f86d0eb757 | [
"Apache-2.0"
] | 3 | 2021-12-15T03:19:48.000Z | 2022-03-31T18:05:39.000Z | # Code is from: https://github.com/interaction-dataset/interaction-dataset.
DELTA_TIMESTAMP_MS = 100 # similar throughout the whole dataset
class MotionState:
def __init__(self, time_stamp_ms):
assert isinstance(time_stamp_ms, int)
self.time_stamp_ms = time_stamp_ms
self.x = None
self.y = None
self.vx = None
self.vy = None
self.psi_rad = None
def __str__(self):
return "MotionState: " + str(self.__dict__)
class Track:
def __init__(self, id):
# assert isinstance(id, int)
self.track_id = id
self.agent_type = None
self.length = None
self.width = None
self.time_stamp_ms_first = None
self.time_stamp_ms_last = None
self.motion_states = dict()
def __str__(self):
string = "Track: track_id=" + str(self.track_id) + ", agent_type=" + str(self.agent_type) + \
", length=" + str(self.length) + ", width=" + str(self.width) + \
", time_stamp_ms_first=" + str(self.time_stamp_ms_first) + \
", time_stamp_ms_last=" + str(self.time_stamp_ms_last) + \
"\n motion_states:"
for key, value in sorted(self.motion_states.items()):
string += "\n " + str(key) + ": " + str(value)
return string
| 34.051282 | 101 | 0.591867 |
bd89909846c4e5bf54ad5b341a0eb95f6166c0e5 | 5,043 | py | Python | data.py | RamiroFuentes/Portafolio_Web | afe71a08d2b0496103bce19960bc213329bcc289 | [
"MIT"
] | null | null | null | data.py | RamiroFuentes/Portafolio_Web | afe71a08d2b0496103bce19960bc213329bcc289 | [
"MIT"
] | null | null | null | data.py | RamiroFuentes/Portafolio_Web | afe71a08d2b0496103bce19960bc213329bcc289 | [
"MIT"
] | 1 | 2019-11-13T17:40:45.000Z | 2019-11-13T17:40:45.000Z | # encoding: utf-8
Data = {
# Datos de presentacion
"Titulo" : u"Ramiro Fuentes",
"Nombres" : u"José Ramiro",
"Apellidos" : u"Fuentes Lara",
"Imagen" : "../static/img/foto.png",
"Color" : "primary",
# About
"Calle" : "Herradura",
"Numero" : "6",
"Colonia" : "Noria Alta",
"CP" : "36050",
"Telefono" : "(464) 117-0304",
"Email" : "jr.fuenteslara@ugto.mx",
# Redes sociales
"Url LinkedIn" : "https://www.linkedin.com/in/jose-ramiro-fuentes-lara-055331195/",
"Url Twitter" : "https://twitter.com/RamiroFuentesL",
"Url Facebook" : "https://www.facebook.com/joseramiro.fuentes.9",
# Semblanza
"Semblanza" : u"""Estudiante de la licenciatura en ingeniería química, entusiasta en el desarrollo
de proyectos de alto impacto ambiental con conocimiento en la rama de la programación
y la electrónica, habilidades blandas y expresión oral, con numerosos talleres para
la exposición rápida de proyectos y un diplomado en habilidades blandas para emprender
e innovar, encargado principalmente del desarrollo ingenieril del proyecto y como
llevar a cabo las ideas propuestas."""
}
Trabajo_1 = {
"Cargo" : u"Coordinador de Tecnología",
"Periodo" : "Marzo 2020 - Presente",
"Empresa" : u"Future Lab",
"Resumen" : u"""Coordinador de Tecnología a cargo del desarrollo de la plataforma para hosteo de hackatones. Algunas actividades
varias relacionadas pueden ser: organización de charlas y talleres, conferencista en eventos organizados en el Tec
de Monterrey Campus Leon en colaboración con GEAR y charlas de introducción a la programación Web en colaboración con
SEIMIQ-UG 2019-2020"""
}
Trabajo_2 = {
"Cargo" : "Vicepresidente SEIMIQ-UG 2019-2020",
"Periodo" : "Mayo 2019 - Agosto 2020",
"Empresa" : u"Sección Estudiantil del Instituto Mexicano de Ingenieros Químicos",
"Resumen" : u"""Dentro de los cargos de vicepresidente se organizaron eventos como charlas y talleres relacionados con el ambiente de la
ingeniería química, actividades de integración como el Día del amor y la amistad entre otros. Durante mi gestión fuimos acredores al
premio de mejor sección estudiantil del period Agosto-2019 Diciembre - 2020"""
}
Trabajo_3 = {
"Cargo" : "Ayudante General Electricista",
"Periodo" : "Julio 2016 - Actualmente",
"Empresa" : u"Fuentes Eléctricas",
"Resumen" : u"""Ayudante gerelar en oficios relacionados con la electricidad como instalaciónes domésticas, subestaciones eléctricas."""
}
Trabajo_4 = {
"Cargo" : u"Operario de Grúa",
"Periodo" : "Septiembre 2019 - Actualmente",
"Empresa" : u"Fuentes Eléctricas",
"Resumen" : u"""Operario de grúa para aplicaciones en sistemas electromecanicos, instalación de Transformadores, Postes y movimiento de
cargas"""
}
Experience = [Trabajo_1,Trabajo_2,Trabajo_3,Trabajo_4]
Escuela_1 = {
"Escuela" : "Universidad de Guanajuato DCNE",
"Periodo" : "Agosto 2017 - Julio 2022",
"Ocupacion" : "Estudiante de Ciencias",
"Departamento" : u"Ingeniería Química"
}
Escuela_2 = {
"Escuela" : u"Colegio de Nivel Medio Superior",
"Periodo" : u"Agosto 2014 - Julio 2017",
"Ocupacion" : u"Bachiller",
"Departamento" : u"Escuelas de Nivel Medio Superior"
}
Escuela_3 = {
"Escuela" : "Escuela Nacional de Estudios superiores",
"Periodo" : "Agosto 2019 - Diciembre 2020",
"Ocupacion" : "Diplomante",
"Departamento" : u"Sistema de educación continua"
}
Education = [Escuela_1,Escuela_2,Escuela_3]
Skills = {
"Herramientas" : ['Aspen Plus','ANSYS Fluent','MATLAB','Microsoft Excel','Python','HTML','CSS'],
"Workflow": ['Mobile-First, Responsive Design','Seguridad','Fenomenos de transporte',u'Simulación de procesos']
}
Interests = u"""Apasionado por la ciencia y el desarrollo tecnológico, la programación y la domótica. Con mi trabajo busco
generar soluciones que nos lleven a un estilo de vida más sustentable. Todos los proyectos que desarrollo los elaboro
mediente el flujo de trabajo que he aprendido a lo largo de mi vida en los cursos y talleres que participo. Me encanta
compartir el conocimiento y pararme en frente de un público a compartir mis ideas."""
Awards = [
u"Diplomado en Habilidades blandas para emprender e innovar - Diciembre 2019",
u"1er Lugar - Concurso regional de creatividad e innovación - Octubre 2019",
u"1st Place - Concurso a nivel división de Creatividad e innovación - Mayo 2019",
u"1er Lugar - Foro de talento e inoovación ENES Leon UNAM - Mayo 2019",
u"Reconocimiento al desarrollo de la mentefactura oor incubadora Novaera - Noviembre 2019 ",
u"Participante premio estatal a la innovación tecnológica - Noviembre 2019",
u"Finalista en el 15 Concurso de Creatividad e Innovación UG 2020",
]
| 45.432432 | 149 | 0.681935 |
52bd270998e8918263bcf69dd82a24ea7d4b2b43 | 2,053 | py | Python | legacy/pipeline/ocr.py | dirkroorda/fusus | ee83067e1fb4fb4e4a7389554df4ab200c6f2092 | [
"MIT"
] | null | null | null | legacy/pipeline/ocr.py | dirkroorda/fusus | ee83067e1fb4fb4e4a7389554df4ab200c6f2092 | [
"MIT"
] | null | null | null | legacy/pipeline/ocr.py | dirkroorda/fusus | ee83067e1fb4fb4e4a7389554df4ab200c6f2092 | [
"MIT"
] | null | null | null | """
Kraken Arabic model:
[OpenITI](https://github.com/OpenITI/OCR_GS_Data/blob/master/ara/abhath/arabic_generalized.mlmodel)
"""
from pytesseract import image_to_data, image_to_string, image_to_boxes
MODES = dict(
string=image_to_string,
data=image_to_data,
boxes=image_to_boxes,
)
class OCR:
def __init__(self, engine, page=None, pageFile=None):
"""Sets up OCR with Tesseract.
"""
tm = engine.tm
error = tm.error
batch = None
if page is None and pageFile is None:
error("Pass a page object or a page file with a list of file names")
elif page:
batch = False
self.page = page
elif pageFile:
batch = True
self.pageFile = pageFile
self.batch = batch
self.engine = engine
def read(self, mode=None):
"""Perfoms OCR with Tesseract.
"""
engine = self.engine
tm = engine.tm
error = tm.error
batch = self.batch
if batch is None:
error("No input to work on")
return None
if not mode:
mode = 'data'
method = MODES.get(mode, None)
if method is None:
error(f"No such read mode: {mode}")
if batch:
ocrData = method(self.pageFile, lang="ara")
else:
page = self.page
scan = page.stages.get("clean", None)
if scan is None:
return None
blocks = page.blocks
ocrData = []
for ((stripe, column), data) in blocks.items():
(left, top, right, bottom) = data["inner"]
scanPart = scan[top:bottom, left:right]
ocrDataPart = method(scanPart, lang="ara")
ocrData.append(
f"BLOCK START {stripe}{column}\n"
f"{ocrDataPart}\n"
f"BLOCK END {stripe}{column}\n"
)
ocrData = "".join(ocrData)
return ocrData
| 25.6625 | 99 | 0.524111 |
04cfddf3d2f5f2b04c601804f78e2d7790d72dfc | 5,382 | py | Python | capability/test/test_poo_tools/TestPointOfOrigin.py | jscott11/solutions-geoprocessing-toolbox | beafdfa8626eefc5c1e781edd96bfcdd42697c61 | [
"Apache-2.0"
] | null | null | null | capability/test/test_poo_tools/TestPointOfOrigin.py | jscott11/solutions-geoprocessing-toolbox | beafdfa8626eefc5c1e781edd96bfcdd42697c61 | [
"Apache-2.0"
] | null | null | null | capability/test/test_poo_tools/TestPointOfOrigin.py | jscott11/solutions-geoprocessing-toolbox | beafdfa8626eefc5c1e781edd96bfcdd42697c61 | [
"Apache-2.0"
] | 1 | 2018-10-25T15:52:41.000Z | 2018-10-25T15:52:41.000Z | #------------------------------------------------------------------------------
# Copyright 2013 Esri
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#------------------------------------------------------------------------------
# TestPointOfOrigin.py
# Description: Test Point Of Origin Toolbox
# Requirements: ArcGIS Desktop Standard
# ----------------------------------------------------------------------------
import arcpy
import sys
import traceback
import TestUtilities
import os
class LicenseError(Exception):
pass
try:
arcpy.ImportToolbox(TestUtilities.toolbox)
arcpy.env.overwriteOutput = True
#Set tool param variables
inputImpactPoints = os.path.join(TestUtilities.testDataGDB,r"impacts")
inputWeaponsTable = os.path.join(TestUtilities.toolDataGDB,r"Weapons")
inModelField = 'Model'
inMinRangeField = 'Minimum_range'
inMaxRangeField = 'Maximum_range'
inputWeaponsAsString = "'82mm Soviet Mortar';'88mm American Mortar';'120mm American Mortar'"
print("inputWeaponsAsString: " + str(inputWeaponsAsString))
inputWeaponsAsList = inputWeaponsAsString.split(";")
print("inputWeaponsAsList: " + str(inputWeaponsAsList))
outputWorkspace = arcpy.env.scratchGDB
outImpactPrefix = r"imp"
outPooPrefix = r"poo"
outRangePrefix = r"rng"
sr = arcpy.SpatialReference(32642) #WGS_1984_UTM_Zone_42N using factoryCode
#Testing Point Of Origin Site Detection
arcpy.AddMessage("Starting Test: Point of Origin Site Detection")
results = arcpy.PointOfOriginSiteDetection_ptorigin(inputImpactPoints, inputWeaponsTable,
inModelField, inMinRangeField,inMaxRangeField,
inputWeaponsAsString, outputWorkspace,
outImpactPrefix,outPooPrefix,
outRangePrefix,sr)
#print("results.outputCount: " + str(results.outputCount))
#for i in range(0,results.outputCount):
# print("output " + str(i) + ": " + str(results.getOutput(i)))
#Verify Results
print("Checking results...")
outImpactFeatures = results.getOutput(0)
outPOOFeatures = results.getOutput(1).split(";")
outRangeFeatures = results.getOutput(2).split(";")
# check that the same number of impact points between the input and output
countInputImpactPoints = int(arcpy.GetCount_management(inputImpactPoints).getOutput(0))
countOutputImpactPoints = int(arcpy.GetCount_management(outImpactFeatures).getOutput(0))
if (countInputImpactPoints != countOutputImpactPoints):
print("Error: Impact points are not the same. In: " + str(countInputImpactPoints) + ", Out: " + str(countOutputImpactPoints))
raise Exception("Test Failed")
print("Number of input and output impact points match.")
# check the number of combined POO feature classes returned by the tool
countPOOFeatureClasses = int(len(outPOOFeatures))
if (len(inputWeaponsAsList)!= countPOOFeatureClasses):
print("Error: Number of Output Point of Origin features do not match number of selected weapon systems. # weaopns: " + str(len(inputWeaponsAsList)) + ", POO: " + str(countPOOFeatureClasses))
raise Exception("Test Failed")
print("Number of selected weapons match number of output Point Of Origin features.")
# check the number of range feature classes returned by the tool
countImpactXWeapons = int(countInputImpactPoints * len(inputWeaponsAsList))
countRangeFeatureClasses = int(len(outRangeFeatures))
if (countImpactXWeapons != countRangeFeatureClasses):
print("Error: Number of Range feature classes does not match Impact Points x Weapon Models. ImpxWeap: " + str(countImpactXWeapons) + ", Ranges:" + str(countRangeFeatureClasses))
raise Exception("Test Failed")
print("Number of Impacts x Weapons match output range features.")
print("All tests passed.")
except arcpy.ExecuteError:
# Get the arcpy error messages
msgs = arcpy.GetMessages()
arcpy.AddError(msgs)
print msgs
# return a system error code
sys.exit(-1)
except:
# Get the traceback object
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
# Concatenate information together concerning the error into a message string
pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1])
msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"
# Return python error messages for use in script tool or Python Window
arcpy.AddError(pymsg)
arcpy.AddError(msgs)
# Print Python error messages for use in Python / Python Window
print pymsg + "\n"
print msgs
# return a system error code
sys.exit(-1) | 44.479339 | 198 | 0.66388 |
0fe86eb4b41b7f8e1272e5054e0ed63adf85ffdf | 80,786 | py | Python | madgraph/interface/reweight_interface.py | madnklo/madnklo | 646a3db9c8efd7b4cb00e9d89b9197cd5394c01b | [
"NCSA"
] | 1 | 2019-12-14T15:25:38.000Z | 2019-12-14T15:25:38.000Z | madgraph/interface/reweight_interface.py | madnklo/madnklo | 646a3db9c8efd7b4cb00e9d89b9197cd5394c01b | [
"NCSA"
] | 26 | 2018-10-08T15:49:32.000Z | 2020-05-15T13:33:36.000Z | madgraph/interface/reweight_interface.py | madnklo/madnklo | 646a3db9c8efd7b4cb00e9d89b9197cd5394c01b | [
"NCSA"
] | 2 | 2019-03-25T17:28:48.000Z | 2021-04-21T12:15:53.000Z | ################################################################################
#
# Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors
#
# This file is a part of the MadGraph5_aMC@NLO project, an application which
# automatically generates Feynman diagrams and matrix elements for arbitrary
# high-energy processes in the Standard Model and beyond.
#
# It is subject to the MadGraph5_aMC@NLO license which should accompany this
# distribution.
#
# For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch
#
################################################################################
""" Command interface for Re-Weighting """
from __future__ import division
import difflib
import logging
import math
import os
import re
import shutil
import sys
import tempfile
import time
import subprocess
from subprocess import Popen, PIPE, STDOUT
pjoin = os.path.join
import madgraph.interface.extended_cmd as extended_cmd
import madgraph.interface.madgraph_interface as mg_interface
import madgraph.interface.master_interface as master_interface
import madgraph.interface.common_run_interface as common_run_interface
import madgraph.interface.madevent_interface as madevent_interface
import madgraph.iolibs.files as files
#import MadSpin.interface_madspin as madspin_interface
import madgraph.various.misc as misc
import madgraph.various.banner as banner
import madgraph.various.lhe_parser as lhe_parser
import madgraph.various.combine_plots as combine_plots
import madgraph.various.cluster as cluster
import madgraph.fks.fks_common as fks_common
import madgraph.core.diagram_generation as diagram_generation
import models.import_ufo as import_ufo
import models.check_param_card as check_param_card
#import MadSpin.decay as madspin
logger = logging.getLogger('decay.stdout') # -> stdout
logger_stderr = logging.getLogger('decay.stderr') # ->stderr
cmd_logger = logging.getLogger('cmdprint2') # -> print
# global to check which f2py module have been already loaded. (to avoid border effect)
dir_to_f2py_free_mod = {}
nb_f2py_module = 0 # each time the process/model is changed this number is modified to
# forced the python module to re-create an executable
class ReweightInterface(extended_cmd.Cmd):
"""Basic interface for reweighting operation"""
prompt = 'Reweight>'
debug_output = 'Reweight_debug'
@misc.mute_logger()
def __init__(self, event_path=None, allow_madspin=False, mother=None, *completekey, **stdin):
"""initialize the interface with potentially an event_path"""
if not event_path:
cmd_logger.info('************************************************************')
cmd_logger.info('* *')
cmd_logger.info('* Welcome to Reweight Module *')
cmd_logger.info('* *')
cmd_logger.info('************************************************************')
extended_cmd.Cmd.__init__(self, *completekey, **stdin)
self.model = None
self.has_standalone_dir = False
self.mother= mother # calling interface
self.multicore=False
self.options = {'curr_dir': os.path.realpath(os.getcwd()),
'rwgt_name':None}
self.events_file = None
self.processes = {}
self.second_model = None
self.second_process = None
self.mg5cmd = master_interface.MasterCmd()
if mother:
self.mg5cmd.options.update(mother.options)
self.seed = None
self.output_type = "default"
self.helicity_reweighting = True
self.rwgt_mode = '' # can be LO, NLO, NLO_tree, '' is default
self.has_nlo = False
self.rwgt_dir = None
self.exitted = False # Flag to know if do_quit was already called.
if event_path:
logger.info("Extracting the banner ...")
self.do_import(event_path, allow_madspin=allow_madspin)
# dictionary to fortan evaluator
self.calculator = {}
self.calculator_nbcall = {}
#all the cross-section for convenience
self.all_cross_section = {}
def do_import(self, inputfile, allow_madspin=False):
"""import the event file"""
args = self.split_arg(inputfile)
if not args:
return self.InvalidCmd, 'import requires arguments'
# change directory where to write the output
self.options['curr_dir'] = os.path.realpath(os.path.dirname(inputfile))
if os.path.basename(os.path.dirname(os.path.dirname(inputfile))) == 'Events':
self.options['curr_dir'] = pjoin(self.options['curr_dir'],
os.path.pardir, os.pardir)
if not os.path.exists(inputfile):
if inputfile.endswith('.gz'):
if not os.path.exists(inputfile[:-3]):
raise self.InvalidCmd('No such file or directory : %s' % inputfile)
else:
inputfile = inputfile[:-3]
elif os.path.exists(inputfile + '.gz'):
inputfile = inputfile + '.gz'
else:
raise self.InvalidCmd('No such file or directory : %s' % inputfile)
if inputfile.endswith('.gz'):
misc.gunzip(inputfile)
inputfile = inputfile[:-3]
# Read the banner of the inputfile
self.lhe_input = lhe_parser.EventFile(os.path.realpath(inputfile))
if not self.lhe_input.banner:
value = self.ask("What is the path to banner", 0, [0], "please enter a path", timeout=0)
self.lhe_input.banner = open(value).read()
self.banner = self.lhe_input.get_banner()
#get original cross-section/error
if 'init' not in self.banner:
self.orig_cross = (0,0)
#raise self.InvalidCmd('Event file does not contain init information')
else:
for line in self.banner['init'].split('\n'):
split = line.split()
if len(split) == 4:
cross, error = float(split[0]), float(split[1])
self.orig_cross = (cross, error)
# Check the validity of the banner:
if 'slha' not in self.banner:
self.events_file = None
raise self.InvalidCmd('Event file does not contain model information')
elif 'mg5proccard' not in self.banner:
self.events_file = None
raise self.InvalidCmd('Event file does not contain generation information')
if 'madspin' in self.banner and not allow_madspin:
raise self.InvalidCmd('Reweight should be done before running MadSpin')
# load information
process = self.banner.get_detail('proc_card', 'generate')
if '[' in process and isinstance(self.banner.get('run_card'), banner.RunCardNLO):
if not self.banner.get_detail('run_card', 'store_rwgt_info'):
logger.warning("The information to perform a proper NLO reweighting is not present in the event file.")
logger.warning(" We will perform a LO reweighting instead. This does not guarantee NLO precision.")
self.rwgt_mode = 'LO'
if 'OLP' in self.mother.options:
if self.mother.options['OLP'].lower() != 'madloop':
logger.warning("Accurate NLO mode only works for OLP=MadLoop not for OLP=%s. An approximate (LO) reweighting will be performed instead")
self.rwgt_mode = 'LO'
if 'lhapdf' in self.mother.options and not self.mother.options['lhapdf']:
logger.warning('NLO accurate reweighting requires lhapdf to be installed. Pass in approximate LO mode.')
self.rwgt_mode = 'LO'
else:
self.rwgt_mode = 'LO'
if not process:
msg = 'Invalid proc_card information in the file (no generate line):\n %s' % self.banner['mg5proccard']
raise Exception, msg
process, option = mg_interface.MadGraphCmd.split_process_line(process)
self.proc_option = option
logger.info("process: %s" % process)
logger.info("options: %s" % option)
@staticmethod
def get_LO_definition_from_NLO(proc, model, real_only=False):
"""return the LO definitions of the process corresponding to the born/real"""
# split the line definition with the part before and after the NLO tag
process, order, final = re.split('\[\s*(.*)\s*\]', proc)
# add the part without any additional jet.
commandline="add process %s %s --no_warning=duplicate;" % (process, final)
if not order:
#NO NLO tag => nothing to do actually return input
return proc
elif not order.startswith(('virt','loonly','noborn')):
# OK this a standard NLO process
if real_only:
commandline= ''
if '=' in order:
# get the type NLO QCD/QED/...
order = order.split('=',1)[1]
# define the list of particles that are needed for the radiation
pert = fks_common.find_pert_particles_interactions(model,
pert_order = order)['soft_particles']
commandline += "define pert_%s = %s;" % (order.replace(' ',''), ' '.join(map(str,pert)) )
# check if we have to increase by one the born order
if '%s=' % order in process or '%s<=' % order in process:
result=re.split(' ',process)
process=''
for r in result:
if '%s=' % order in r:
ior=re.split('=',r)
r='QCD=%i' % (int(ior[1])+1)
elif '%s<=' % order in r:
ior=re.split('=',r)
r='QCD<=%i' % (int(ior[1])+1)
process=process+r+' '
#handle special tag $ | / @
result = re.split('([/$@]|\w+(?:^2)?(?:=|<=|>)?\w+)', process, 1)
if len(result) ==3:
process, split, rest = result
commandline+="add process %s pert_%s %s%s %s --no_warning=duplicate;" % (process, order.replace(' ','') ,split, rest, final)
else:
commandline +='add process %s pert_%s %s --no_warning=duplicate;' % (process,order.replace(' ',''), final)
elif order.startswith(('noborn=')):
# pass in sqrvirt=
return "add process %s ;" % proc.replace('noborn=', 'sqrvirt=')
else:
#just return the input. since this Madloop.
return "add process %s ;" % proc
return commandline
def check_events(self):
"""Check some basic property of the events file"""
sum_of_weight = 0
sum_of_abs_weight = 0
negative_event = 0
positive_event = 0
start = time.time()
for event_nb,event in enumerate(self.lhe_input):
#control logger
if (event_nb % max(int(10**int(math.log10(float(event_nb)+1))),10)==0):
running_time = misc.format_timer(time.time()-start)
logger.info('Event nb %s %s' % (event_nb, running_time))
if (event_nb==10001): logger.info('reducing number of print status. Next status update in 10000 events')
try:
event.check() #check 4 momenta/...
except Exception, error:
print event
raise error
sum_of_weight += event.wgt
sum_of_abs_weight += abs(event.wgt)
if event.wgt < 0 :
negative_event +=1
else:
positive_event +=1
logger.info("total cross-section: %s" % sum_of_weight)
logger.info("total abs cross-section: %s" % sum_of_abs_weight)
logger.info("fraction of negative event %s", negative_event/(negative_event+positive_event))
logger.info("total number of events %s", (negative_event+positive_event))
logger.info("negative event %s", negative_event)
@extended_cmd.debug()
def complete_import(self, text, line, begidx, endidx):
"Complete the import command"
args=self.split_arg(line[0:begidx])
if len(args) == 1:
base_dir = '.'
else:
base_dir = args[1]
return self.path_completion(text, base_dir)
# Directory continuation
if os.path.sep in args[-1] + text:
return self.path_completion(text,
pjoin(*[a for a in args if \
a.endswith(os.path.sep)]))
def help_change(self):
"""help for change command"""
print "change model X :use model X for the reweighting"
print "change process p p > e+ e-: use a new process for the reweighting"
print "change process p p > mu+ mu- --add : add one new process to existing ones"
def do_change(self, line):
"""allow to define a second model/processes"""
global nb_f2py_module
args = self.split_arg(line)
if len(args)<2:
logger.critical("not enough argument (need at least two). Discard line")
if args[0] == "model":
nb_f2py_module += 1 # tag to force the f2py to reload
self.second_model = " ".join(args[1:])
if self.has_standalone_dir:
self.terminate_fortran_executables()
self.has_standalone_dir = False
elif args[0] == "process":
nb_f2py_module += 1
if self.has_standalone_dir:
self.terminate_fortran_executables()
self.has_standalone_dir = False
if args[-1] == "--add":
self.second_process.append(" ".join(args[1:-1]))
else:
self.second_process = [" ".join(args[1:])]
elif args[0] == "output":
if args[1] in ['default', '2.0', 'unweight']:
self.output_type = args[1]
elif args[0] == "helicity":
self.helicity_reweighting = banner.ConfigFile.format_variable(args[1], bool, "helicity")
elif args[0] == "mode":
if args[1] != 'LO':
if 'OLP' in self.mother.options and self.mother.options['OLP'].lower() != 'madloop':
logger.warning("Only LO reweighting is allowed for OLP!=MadLoop. Keeping the mode to LO.")
self.rwgt_mode = 'LO'
elif not self.banner.get_detail('run_card','store_rwgt_info', default=False):
logger.warning("Missing information for NLO type of reweighting. Keeping the mode to LO.")
self.rwgt_mode = 'LO'
elif 'lhapdf' in self.mother.options and not self.mother.options['lhapdf']:
logger.warning('NLO accurate reweighting requires lhapdf to be installed. Pass in approximate LO mode.')
self.rwgt_mode = 'LO'
else:
self.rwgt_mode = args[1]
else:
self.rwgt_mode = args[1]
elif args[0] == "rwgt_dir":
self.rwgt_dir = args[1]
if not os.path.exists(self.rwgt_dir):
os.mkdir(self.rwgt_dir)
elif args[0] == 'multicore':
pass
# this line is meant to be parsed by common_run_interface and change the way this class is called.
#It has no direct impact on this class.
else:
logger.critical("unknown option! %s. Discard line." % args[0])
def check_launch(self, args):
"""check the validity of the launch command"""
if not self.lhe_input:
if isinstance(self.lhe_input, lhe_parser.EventFile):
self.lhe_input = lhe_parser.EventFile(self.lhe_input.name)
else:
raise self.InvalidCmd("No events files defined.")
opts = {'rwgt_name':None}
if any(a.startswith('--') for a in args):
for a in args[:]:
if a.startswith('--') and '=' in a:
key,value = a[2:].split('=')
opts[key] = value .replace("'","") .replace('"','')
return opts
def help_launch(self):
"""help for the launch command"""
logger.info('''Add to the loaded events a weight associated to a
new param_card (to be define). The weight returned is the ratio of the
square matrix element by the squared matrix element of production.
All scale are kept fix for this re-weighting.''')
def get_weight_names(self):
""" return the various name for the computed weights """
if self.rwgt_mode == 'LO':
return ['']
elif self.rwgt_mode == 'NLO':
return ['_nlo']
elif self.rwgt_mode == 'LO+NLO':
return ['_lo', '_nlo']
elif self.rwgt_mode == 'NLO_tree':
return ['_tree']
elif not self.rwgt_mode and self.has_nlo :
return ['_nlo']
else:
return ['']
@misc.mute_logger()
def do_launch(self, line):
"""end of the configuration launched the code"""
args = self.split_arg(line)
opts = self.check_launch(args)
if opts['rwgt_name']:
self.options['rwgt_name'] = opts['rwgt_name']
model_line = self.banner.get('proc_card', 'full_model_line')
if not self.has_standalone_dir:
if self.rwgt_dir and os.path.exists(pjoin(self.rwgt_dir,'rw_me','rwgt.pkl')):
self.load_from_pickle()
if not self.rwgt_dir:
self.me_dir = self.rwgt_dir
elif self.multicore == 'wait':
while not os.path.exists(pjoin(self.me_dir,'rw_me','rwgt.pkl')):
time.sleep(60)
print 'wait for pickle'
print "loading from pickle"
if not self.rwgt_dir:
self.rwgt_dir = self.me_dir
self.load_from_pickle(keep_name=True)
else:
self.create_standalone_directory()
if self.multicore == 'create':
if not self.rwgt_dir:
self.rwgt_dir = self.me_dir
self.save_to_pickle()
if self.rwgt_dir:
path_me =self.rwgt_dir
else:
path_me = self.me_dir
if self.second_model or self.second_process:
rw_dir = pjoin(path_me, 'rw_me_second')
else:
rw_dir = pjoin(path_me, 'rw_me')
if not '--keep_card' in args:
ff = open(pjoin(rw_dir,'Cards', 'param_card.dat'), 'w')
ff.write(self.banner['slha'])
ff.close()
if self.has_nlo and self.rwgt_mode != "LO":
rwdir_virt = rw_dir.replace('rw_me', 'rw_mevirt')
files.ln(ff.name, starting_dir=pjoin(rwdir_virt, 'Cards'))
ff = open(pjoin(path_me, 'rw_me','Cards', 'param_card_orig.dat'), 'w')
ff.write(self.banner['slha'])
ff.close()
if self.has_nlo and self.rwgt_mode != "LO":
files.ln(ff.name, starting_dir=pjoin(path_me, 'rw_mevirt', 'Cards'))
cmd = common_run_interface.CommonRunCmd.ask_edit_card_static(cards=['param_card.dat'],
ask=self.ask, pwd=rw_dir, first_cmd=self.stored_line)
self.stored_line = None
# get the names of type of reweighting requested
type_rwgt = self.get_weight_names()
# check for potential scan in the new card
new_card = open(pjoin(rw_dir, 'Cards', 'param_card.dat')).read()
pattern_scan = re.compile(r'''^[\s\d]*scan''', re.I+re.M)
param_card_iterator = []
if pattern_scan.search(new_card):
try:
import internal.extended_cmd as extended_internal
Shell_internal = extended_internal.CmdShell
except:
Shell_internal = None
import madgraph.interface.extended_cmd as extended_cmd
if not isinstance(self.mother, (extended_cmd.CmdShell, Shell_internal)):
raise Exception, "scan are not allowed on the Web"
# at least one scan parameter found. create an iterator to go trough the cards
main_card = check_param_card.ParamCardIterator(new_card)
if self.options['rwgt_name']:
self.options['rwgt_name'] = '%s_0' % self.options['rwgt_name']
param_card_iterator = main_card
first_card = param_card_iterator.next(autostart=True)
new_card = first_card.write()
first_card.write(pjoin(rw_dir, 'Cards', 'param_card.dat'))
# check if "Auto" is present for a width parameter
if "auto" in new_card.lower():
self.mother.check_param_card(pjoin(rw_dir, 'Cards', 'param_card.dat'))
new_card = open(pjoin(rw_dir, 'Cards', 'param_card.dat')).read()
# Find new tag in the banner and add information if needed
if 'initrwgt' in self.banner:
if 'name=\'mg_reweighting\'' in self.banner['initrwgt']:
blockpat = re.compile(r'''<weightgroup name=\'mg_reweighting\'\s*>(?P<text>.*?)</weightgroup>''', re.I+re.M+re.S)
before, content, after = blockpat.split(self.banner['initrwgt'])
header_rwgt_other = before + after
pattern = re.compile('<weight id=\'(?:rwgt_(?P<id>\d+)|(?P<id2>[_\w]+))(?P<rwgttype>\s*|_\w+)\'>(?P<info>.*?)</weight>', re.S+re.I+re.M)
mg_rwgt_info = pattern.findall(content)
maxid = 0
for k,(i, fulltag, nlotype, diff) in enumerate(mg_rwgt_info):
if i:
if int(i) > maxid:
maxid = int(i)
mg_rwgt_info[k] = (i, nlotype, diff) # remove the pointless fulltag tag
else:
mg_rwgt_info[k] = (fulltag, nlotype, diff) # remove the pointless id tag
maxid += 1
rewgtid = maxid
if self.options['rwgt_name']:
#ensure that the entry is not already define if so overwrites it
for (i, nlotype, diff) in mg_rwgt_info[:]:
for flag in type_rwgt:
if 'rwgt_%s' % i == '%s%s' %(self.options['rwgt_name'],flag) or \
i == '%s%s' % (self.options['rwgt_name'], flag):
logger.warning("tag %s%s already defines, will replace it", self.options['rwgt_name'],flag)
mg_rwgt_info.remove((i, nlotype, diff))
else:
header_rwgt_other = self.banner['initrwgt']
mg_rwgt_info = []
rewgtid = 1
else:
self.banner['initrwgt'] = ''
header_rwgt_other = ''
mg_rwgt_info = []
rewgtid = 1
# add the reweighting in the banner information:
#starts by computing the difference in the cards.
s_orig = self.banner['slha']
s_new = new_card
#define tag for the run
if self.options['rwgt_name']:
tag = self.options['rwgt_name']
else:
tag = str(rewgtid)
if not self.second_model:
old_param = check_param_card.ParamCard(s_orig.splitlines())
new_param = check_param_card.ParamCard(s_new.splitlines())
card_diff = old_param.create_diff(new_param)
if card_diff == '' and not self.second_process:
if not __debug__:
logger.warning(' REWEIGHTING: original card and new card are identical. Bypass this run')
return
else:
logger.warning(' REWEIGHTING: original card and new card are identical. Run it due to debug mode')
#raise self.InvalidCmd, 'original card and new card are identical'
try:
if old_param['sminputs'].get(3)- new_param['sminputs'].get(3) > 1e-3 * new_param['sminputs'].get(3):
logger.warning("We found different value of alpha_s. Note that the value of alpha_s used is the one associate with the event and not the one from the cards.")
except Exception, error:
logger.debug("error in check of alphas: %s" % str(error))
pass #this is a security
if not self.second_process:
for name in type_rwgt:
mg_rwgt_info.append((tag, name, card_diff))
else:
str_proc = "\n change process ".join([""]+self.second_process)
for name in type_rwgt:
mg_rwgt_info.append((tag, name, str_proc + '\n'+ card_diff))
else:
str_info = "change model %s" % self.second_model
if self.second_process:
str_info += "\n change process ".join([""]+self.second_process)
card_diff = str_info
str_info += '\n' + s_new
for name in type_rwgt:
mg_rwgt_info.append((tag, name, str_info))
# re-create the banner.
self.banner['initrwgt'] = header_rwgt_other
self.banner['initrwgt'] += '\n<weightgroup name=\'mg_reweighting\'>\n'
for tag, rwgttype, diff in mg_rwgt_info:
if tag.isdigit():
self.banner['initrwgt'] += '<weight id=\'rwgt_%s%s\'>%s</weight>\n' % \
(tag, rwgttype, diff)
else:
self.banner['initrwgt'] += '<weight id=\'%s%s\'>%s</weight>\n' % \
(tag, rwgttype, diff)
self.banner['initrwgt'] += '\n</weightgroup>\n'
self.banner['initrwgt'] = self.banner['initrwgt'].replace('\n\n', '\n')
start = time.time()
cross, ratio, ratio_square,error = {},{},{}, {}
for name in type_rwgt + ['orig']:
cross[name], error[name] = 0.,0.
ratio[name],ratio_square[name] = 0., 0.# to compute the variance and associate error
if self.output_type == "default":
output = open( self.lhe_input.name +'rw', 'w')
#write the banner to the output file
self.banner.write(output, close_tag=False)
else:
output = {}
for name in type_rwgt:
output[name] = open( self.lhe_input.name +'rw'+name, 'w')
#write the banner to the output file
self.banner.write(output[name], close_tag=False)
logger.info('starts to compute weight for events with the following modification to the param_card:')
logger.info(card_diff.replace('\n','\nKEEP:'))
# prepare the output file for the weight plot
if self.mother:
out_path = pjoin(self.mother.me_dir, 'Events', 'reweight.lhe')
output2 = open(out_path, 'w')
lha_strategy = self.banner.get_lha_strategy()
self.banner.set_lha_strategy(4*lha_strategy/abs(lha_strategy))
self.banner.write(output2, close_tag=False)
self.banner.set_lha_strategy(lha_strategy)
new_banner = banner.Banner(self.banner)
if not hasattr(self, 'run_card'):
self.run_card = new_banner.charge_card('run_card')
self.run_card['run_tag'] = 'reweight_%s' % rewgtid
new_banner['slha'] = s_new
del new_banner['initrwgt']
assert 'initrwgt' in self.banner
ff = open(pjoin(self.mother.me_dir,'Events',self.mother.run_name, '%s_%s_banner.txt' % \
(self.mother.run_name, self.run_card['run_tag'])),'w')
new_banner.write(ff)
ff.close()
# Loop over all events
if self.options['rwgt_name']:
tag_name = self.options['rwgt_name']
else:
tag_name = 'rwgt_%s' % rewgtid
os.environ['GFORTRAN_UNBUFFERED_ALL'] = 'y'
if self.lhe_input.closed:
self.lhe_input = lhe_parser.EventFile(self.lhe_input.name)
# Multicore option not really stable -> not use it
nb_core = 1
# if nb_core >1:
# multicore = cluster.MultiCore(nb_core)
self.lhe_input.seek(0)
for event_nb,event in enumerate(self.lhe_input):
#control logger
if (event_nb % max(int(10**int(math.log10(float(event_nb)+1))),10)==0):
running_time = misc.format_timer(time.time()-start)
logger.info('Event nb %s %s' % (event_nb, running_time))
if (event_nb==10001): logger.info('reducing number of print status. Next status update in 10000 events')
if nb_core > 1:
# Multicore option not really stable -> not use it
while 1:
if multicore.queue.qsize() < 100 * nb_core:
multicore.submit(self.write_reweighted_event, argument=[event, tag_name])
break
#else:
# time.sleep(0.001)
continue
else:
weight = self.calculate_weight(event)
if not isinstance(weight, dict):
weight = {'':weight}
for name in weight:
cross[name] += weight[name]
ratio[name] += weight[name]/event.wgt
ratio_square[name] += (weight[name]/event.wgt)**2
# ensure to have a consistent order of the weights. new one are put
# at the back, remove old position if already defines
for tag in type_rwgt:
try:
event.reweight_order.remove('%s%s' % (tag_name,tag))
except ValueError:
continue
event.reweight_order += ['%s%s' % (tag_name,name) for name in type_rwgt]
if self.output_type == "default":
for name in weight:
if 'orig' in name:
continue
event.reweight_data['%s%s' % (tag_name,name)] = weight[name]
#write this event with weight
output.write(str(event))
if self.mother:
event.wgt = weight[type_rwgt[0]]
event.reweight_data = {}
output2.write(str(event))
else:
for i,name in enumerate(weight):
event.wgt = weight[name]
event.reweight_data = {}
if self.mother and len(weight)==1:
output2.write(str(event))
elif self.mother and i == 0:
output[name].write(str(event))
output2.write(str(event))
else:
output[name].write(str(event))
# check normalisation of the events:
if 'event_norm' in self.run_card:
if self.run_card['event_norm'] == 'average':
for key, value in cross.items():
cross[key] = value / (event_nb+1)
running_time = misc.format_timer(time.time()-start)
logger.info('All event done (nb_event: %s) %s' % (event_nb+1, running_time))
if self.output_type == "default":
output.write('</LesHouchesEvents>\n')
output.close()
else:
for key in output:
output[key].write('</LesHouchesEvents>\n')
output.close()
os.environ['GFORTRAN_UNBUFFERED_ALL'] = 'n'
if self.mother:
output2.write('</LesHouchesEvents>\n')
output2.close()
# add output information
if hasattr(self.mother, 'results'):
run_name = self.mother.run_name
results = self.mother.results
results.add_run(run_name, self.run_card, current=True)
results.add_detail('nb_event', event_nb+1)
name = type_rwgt[0]
results.add_detail('cross', cross[name])
event_nb +=1
for name in type_rwgt:
variance = ratio_square[name]/event_nb - (ratio[name]/event_nb)**2
orig_cross, orig_error = self.orig_cross
error[name] = variance/math.sqrt(event_nb) * orig_cross + ratio[name]/event_nb * orig_error
results.add_detail('error', error[type_rwgt[0]])
import madgraph.interface.madevent_interface as ME_interface
if isinstance(self.mother, ME_interface.MadEventCmd):
self.mother.create_plot(mode='reweight', event_path=output2.name,
tag=self.run_card['run_tag'])
#modify the html output to add the original run
if 'plot' in results.current.reweight:
html_dir = pjoin(self.mother.me_dir, 'HTML', run_name)
td = pjoin(self.mother.options['td_path'], 'td')
MA = pjoin(self.mother.options['madanalysis_path'])
path1 = pjoin(html_dir, 'plots_parton')
path2 = pjoin(html_dir, 'plots_%s' % self.run_card['run_tag'])
outputplot = path2
combine_plots.merge_all_plots(path2, path1, outputplot, td, MA)
#results.update_status(level='reweight')
#results.update(status, level, makehtml=True, error=False)
#old_name = self.mother.results.current['run_name']
#new_run = '%s_rw_%s' % (old_name, rewgtid)
#self.mother.results.add_run( new_run, self.run_card)
#self.mother.results.add_detail('nb_event', event_nb+1)
#self.mother.results.add_detail('cross', cross)
#self.mother.results.add_detail('error', 'nan')
#self.mother.do_plot('%s -f' % new_run)
#self.mother.update_status('Reweight %s done' % rewgtid, 'madspin')
#self.mother.results.def_current(old_name)
#self.run_card['run_tag'] = self.run_card['run_tag'][9:]
#self.mother.run_name = old_name
self.lhe_input.close()
if not self.mother or self.output_type != "default" :
target = pjoin(self.mother.me_dir, 'Events', run_name, 'events.lhe')
else:
target = self.lhe_input.name
if self.output_type == "default":
files.mv(output.name, target)
logger.info('Event %s have now the additional weight' % self.lhe_input.name)
elif self.output_type == "unweight":
output2.close()
lhe = lhe_parser.EventFile(output2.name)
nb_event = lhe.unweight(target)
if self.mother and hasattr(self.mother, 'results'):
results = self.mother.results
results.add_detail('nb_event', nb_event)
results.current.parton.append('lhe')
logger.info('Event %s is now unweighted under the new theory' % output2.name)
else:
files.mv(output2.name, self.lhe_input.name)
if self.mother and hasattr(self.mother, 'results'):
results = self.mother.results
results.current.parton.append('lhe')
logger.info('Event %s is now created with new central weight' % output2.name)
if self.multicore != 'create':
for name in cross:
if name == 'orig':
continue
logger.info('new cross-section is %s: %g pb (indicative error: %g pb)' %\
('(%s)' %name if name else '',cross[name], error[name]))
self.terminate_fortran_executables(new_card_only=True)
#store result
for name in cross:
if name == 'orig':
self.all_cross_section[name] = (cross[name], error[name])
else:
self.all_cross_section[(tag_name,name)] = (cross[name], error[name])
# perform the scanning
if param_card_iterator:
for i,card in enumerate(param_card_iterator):
if self.options['rwgt_name']:
self.options['rwgt_name'] = '%s_%s' % (self.options['rwgt_name'].rsplit('_',1)[0], i+1)
card.write(pjoin(rw_dir, 'Cards', 'param_card.dat'))
self.exec_cmd("launch --keep_card", printcmd=False, precmd=True)
self.options['rwgt_name'] = None
def do_set(self, line):
"Not in help"
logger.warning("Invalid Syntax. The command 'set' should be placed after the 'launch' one. Continuing by adding automatically 'launch'")
self.stored_line = "set %s" % line
return self.exec_cmd("launch")
def default(self, line, log=True):
"""Default action if line is not recognized"""
if os.path.isfile(line):
if log:
logger.warning("Invalid Syntax. The path to a param_card' should be placed after the 'launch' command. Continuing by adding automatically 'launch'")
self.stored_line = line
return self.exec_cmd("launch")
else:
return super(ReweightInterface,self).default(line, log=log)
def write_reweighted_event(self, event, tag_name, **opt):
"""a function for running in multicore"""
if not hasattr(opt['thread_space'], "calculator"):
opt['thread_space'].calculator = {}
opt['thread_space'].calculator_nbcall = {}
opt['thread_space'].cross = 0
opt['thread_space'].output = open( self.lhe_input.name +'rw.%s' % opt['thread_id'], 'w')
if self.mother:
out_path = pjoin(self.mother.me_dir, 'Events', 'reweight.lhe.%s' % opt['thread_id'])
opt['thread_space'].output2 = open(out_path, 'w')
weight = self.calculate_weight(event, space=opt['thread_space'])
opt['thread_space'].cross += weight
if self.output_type == "default":
event.reweight_data[tag_name] = weight
#write this event with weight
opt['thread_space'].output.write(str(event))
if self.mother:
event.wgt = weight
event.reweight_data = {}
opt['thread_space'].output2.write(str(event))
else:
event.wgt = weight
event.reweight_data = {}
if self.mother:
opt['thread_space'].output2.write(str(event))
else:
opt['thread_space'].output.write(str(event))
return 0
def do_compute_widths(self, line):
return self.mother.do_compute_widths(line)
def calculate_weight(self, event, space=None):
"""space defines where to find the calculator (in multicore)"""
if self.has_nlo and self.rwgt_mode != "LO":
return self.calculate_nlo_weight(event, space)
if not space:
space = self
event.parse_reweight()
# LO reweighting
w_orig = self.calculate_matrix_element(event, 0, space)
w_new = self.calculate_matrix_element(event, 1, space)
if w_orig == 0:
tag, order = event.get_tag_and_order()
orig_order, Pdir, hel_dict = self.id_to_path[tag]
misc.sprint(w_orig, w_new)
misc.sprint(event)
misc.sprint(self.invert_momenta(event.get_momenta(orig_order)))
misc.sprint(event.get_momenta(orig_order))
misc.sprint(event.aqcd)
hel_order = event.get_helicity(orig_order)
if self.helicity_reweighting and 9 not in hel_order:
nhel = hel_dict[tuple(hel_order)]
else:
nhel = 0
misc.sprint(nhel, Pdir, hel_dict)
raise Exception, "Invalid matrix element for original computation (weight=0)"
return {'orig': event.wgt, '': w_new/w_orig*event.wgt}
def calculate_nlo_weight(self, event, space=None):
type_nlo = self.get_weight_names()
final_weight = {'orig': event.wgt}
if not space:
space = self #for multicore: not use so far
event.parse_reweight()
event.parse_nlo_weight()
#initialise the input to the function which recompute the weight
scales2 = []
pdg = []
bjx = []
wgt_tree = [] # reweight for loop-improved type
wgt_virt = [] #reweight b+v together
base_wgt = []
gs=[]
qcdpower = []
ref_wgts = [] #for debugging
orig_wgt = 0
for cevent in event.nloweight.cevents:
#check if we need to compute the virtual for that cevent
need_V = False # the real is nothing else than the born for a N+1 config
all_ctype = [w.type for w in cevent.wgts]
if '_nlo' in type_nlo and any(c in all_ctype for c in [2,14,15]):
need_V =True
w_orig = self.calculate_matrix_element(cevent, 0, space)
w_new = self.calculate_matrix_element(cevent, 1, space)
ratio_T = w_new/w_orig
if need_V:
scale2 = cevent.wgts[0].scales2[0]
#for scale2 in set(c.scales2[1] for c in cevent.wgts):
w_origV = self.calculate_matrix_element(cevent, 'V0', space, scale2=scale2)
w_newV = self.calculate_matrix_element(cevent, 'V1', space, scale2=scale2)
ratio_BV = (w_newV + w_new) / (w_origV + w_orig)
ratio_V = w_newV/w_origV
else:
ratio_V = "should not be used"
ratio_BV = "should not be used"
for c_wgt in cevent.wgts:
orig_wgt += c_wgt.ref_wgt
#add the information to the input
scales2.append(c_wgt.scales2)
pdg.append(c_wgt.pdgs[:2])
bjx.append(c_wgt.bjks)
qcdpower.append(c_wgt.qcdpower)
gs.append(c_wgt.gs)
ref_wgts.append(c_wgt.ref_wgt)
if '_nlo' in type_nlo:
if c_wgt.type in [2,14,15]:
R = ratio_BV
else:
R = ratio_T
new_wgt = [c_wgt.pwgt[0] * R,
c_wgt.pwgt[1] * ratio_T,
c_wgt.pwgt[2] * ratio_T]
wgt_virt.append(new_wgt)
if '_tree' in type_nlo:
new_wgt = [c_wgt.pwgt[0] * ratio_T,
c_wgt.pwgt[1] * ratio_T,
c_wgt.pwgt[2] * ratio_T]
wgt_tree.append(new_wgt)
base_wgt.append(c_wgt.pwgt[:3])
#change the ordering to the fortran one:
scales2 = self.invert_momenta(scales2)
pdg = self.invert_momenta(pdg)
bjx = self.invert_momenta(bjx)
# re-compute original weight to reduce numerical inacurracy
base_wgt = self.invert_momenta(base_wgt)
orig_wgt_check, partial_check = self.combine_wgt(scales2, pdg, bjx, base_wgt, gs, qcdpower, 1., 1.)
if '_nlo' in type_nlo:
wgt = self.invert_momenta(wgt_virt)
with misc.stdchannel_redirected(sys.stdout, os.devnull):
new_out, partial = self.combine_wgt(scales2, pdg, bjx, wgt, gs, qcdpower, 1., 1.)
# try to correct for precision issue
avg = [partial_check[i]/ref_wgts[i] for i in range(len(ref_wgts))]
out = sum(partial[i]/avg[i] if 0.85<avg[i]<1.15 else 0 \
for i in range(len(avg)))
final_weight['_nlo'] = out/orig_wgt*event.wgt
if '_tree' in type_nlo:
wgt = self.invert_momenta(wgt_tree)
with misc.stdchannel_redirected(sys.stdout, os.devnull):
out, partial = self.combine_wgt(scales2, pdg, bjx, wgt, gs, qcdpower, 1., 1.)
# try to correct for precision issue
avg = [partial_check[i]/ref_wgts[i] for i in range(len(ref_wgts))]
new_out = sum(partial[i]/avg[i] if 0.85<avg[i]<1.15 else partial[i] \
for i in range(len(avg)))
final_weight['_tree'] = new_out/orig_wgt*event.wgt
if '_lo' in type_nlo:
w_orig = self.calculate_matrix_element(event, 0, space)
w_new = self.calculate_matrix_element(event, 1, space)
final_weight['_lo'] = w_new/w_orig*event.wgt
return final_weight
@staticmethod
def invert_momenta(p):
""" fortran/C-python do not order table in the same order"""
new_p = []
for i in range(len(p[0])): new_p.append([0]*len(p))
for i, onep in enumerate(p):
for j, x in enumerate(onep):
new_p[j][i] = x
return new_p
@staticmethod
def rename_f2py_lib(Pdir, tag):
if tag == 2:
return
if os.path.exists(pjoin(Pdir, 'matrix%spy.so' % tag)):
return
else:
open(pjoin(Pdir, 'matrix%spy.so' % tag),'w').write(open(pjoin(Pdir, 'matrix2py.so')
).read().replace('matrix2py', 'matrix%spy' % tag))
def calculate_matrix_element(self, event, hypp_id, space, scale2=0):
"""routine to return the matrix element"""
if self.has_nlo:
nb_retry, sleep = 10, 60
else:
nb_retry, sleep = 5, 20
tag, order = event.get_tag_and_order()
if isinstance(hypp_id, str) and hypp_id.startswith('V'):
tag = (tag,'V')
hypp_id = int(hypp_id[1:])
base = "rw_mevirt"
else:
base = "rw_me"
if (not self.second_model and not self.second_process) or hypp_id==0:
orig_order, Pdir, hel_dict = self.id_to_path[tag]
else:
orig_order, Pdir, hel_dict = self.id_to_path_second[tag]
run_id = (tag, hypp_id)
assert space == self
start = False
if run_id in space.calculator:
external = space.calculator[run_id]
# mod = space.calculator[(run_id,'module')]
# #with misc.chdir(Pdir):
# # if hypp_id==0:
# # mod.initialise('param_card_orig.dat')
# # else:
# # mod.initialise('param_card.dat')
elif (not self.second_model and not self.second_process) or hypp_id==0:
# create the executable for this param_card
subdir = pjoin(self.me_dir, base, 'SubProcesses')
if self.me_dir not in sys.path:
sys.path.insert(0,self.me_dir)
if self.rwgt_dir and self.rwgt_dir not in sys.path:
sys.path.insert(0,self.rwgt_dir)
Pname = os.path.basename(Pdir)
if hypp_id == 0:
if (Pdir, 0) not in dir_to_f2py_free_mod:
metag = 1
dir_to_f2py_free_mod[(Pdir,0)] = (metag, nb_f2py_module)
else:
metag, old_module = dir_to_f2py_free_mod[(Pdir,0)]
if old_module != nb_f2py_module:
metag += 1
dir_to_f2py_free_mod[(Pdir,0)] = (metag, nb_f2py_module)
os.environ['MENUM'] = '2'
if not self.rwgt_dir or not os.path.exists(pjoin(Pdir, 'matrix2py.so')):
misc.multiple_try(nb_retry,sleep)(misc.compile)(['matrix2py.so'], cwd=Pdir)
self.rename_f2py_lib(Pdir, 2*metag)
try:
mymod = __import__('%s.SubProcesses.%s.matrix%spy' % (base, Pname, 2*metag), globals(), locals(), [],-1)
except:
import platform
if platform.system() == 'Darwin':
os.system('install_name_tool -change libMadLoop.dylib %s/libMadLoop.dylib matrix%spy.so' % (Pdir,2*metag))
mymod = __import__('%s.SubProcesses.%s.matrix%spy' % (base, Pname, 2*metag), globals(), locals(), [],-1)
else:
misc.sprint("fail compilation")
raise
S = mymod.SubProcesses
P = getattr(S, Pname)
mymod = getattr(P, 'matrix%spy' % (2*metag))
with misc.chdir(Pdir):
with misc.stdchannel_redirected(sys.stdout, os.devnull):
mymod.initialise('param_card_orig.dat')
if hypp_id == 1:
#incorrect line
metag = dir_to_f2py_free_mod[(Pdir,0)][0]
newtag = 2*metag+1
self.rename_f2py_lib(Pdir, newtag)
try:
mymod = __import__('%s.SubProcesses.%s.matrix%spy' % (base, Pname, newtag), globals(), locals(), [],-1)
except Exception, error:
newtag = "L%s" % newtag
os.environ['MENUM'] = newtag
misc.multiple_try(nb_retry,sleep)(misc.compile)(['matrix%spy.so' % newtag], cwd=Pdir)
mymod = __import__('%s.SubProcesses.%s.matrix%spy' % (base, Pname, newtag), globals(), locals(), [],-1)
S = mymod.SubProcesses
P = getattr(S, Pname)
mymod = getattr(P, 'matrix%spy' % newtag)
with misc.chdir(Pdir):
with misc.stdchannel_redirected(sys.stdout, os.devnull):
mymod.initialise('param_card.dat')
space.calculator[run_id] = mymod.get_me
space.calculator[(run_id,'module')] = mymod
external = space.calculator[run_id]
else:
subdir = pjoin(self.me_dir,'%s_second' % base, 'SubProcesses')
if self.me_dir not in sys.path:
sys.path.append(self.me_dir)
assert hypp_id == 1
Pname = os.path.basename(Pdir)
os.environ['MENUM'] = '2'
if not self.rwgt_dir or not os.path.exists(pjoin(Pdir, 'matrix2py.so')):
misc.multiple_try(nb_retry,sleep)(misc.compile)(['matrix2py.so'], cwd=pjoin(subdir, Pdir))
if (Pdir, 1) not in dir_to_f2py_free_mod:
metag = 1
dir_to_f2py_free_mod[(Pdir,1)] = (metag, nb_f2py_module)
else:
metag, old_module = dir_to_f2py_free_mod[(Pdir,1)]
if old_module != nb_f2py_module:
metag += 1
dir_to_f2py_free_mod[(Pdir,1)] = (metag, nb_f2py_module)
self.rename_f2py_lib(Pdir, metag)
try:
mymod = __import__("%s_second.SubProcesses.%s.matrix%spy" % (base, Pname, metag))
except ImportError:
metag = "L%s" % metag
os.environ['MENUM'] = str(metag)
misc.multiple_try(nb_retry,sleep)(misc.compile)(['matrix%spy.so' % metag], cwd=pjoin(subdir, Pdir))
mymod = __import__("%s_second.SubProcesses.%s.matrix%spy" % (base, Pname, metag))
reload(mymod)
S = mymod.SubProcesses
P = getattr(S, Pname)
mymod = getattr(P, 'matrix%spy' % metag)
with misc.chdir(Pdir):
with misc.stdchannel_redirected(sys.stdout, os.devnull):
mymod.initialise('param_card.dat')
space.calculator[run_id] = mymod.get_me
space.calculator[(run_id,'module')] = mymod
external = space.calculator[run_id]
p = event.get_momenta(orig_order)
# add helicity information
hel_order = event.get_helicity(orig_order)
if self.helicity_reweighting and 9 not in hel_order:
nhel = hel_dict[tuple(hel_order)]
if event[1].status == -1: #check if this is a 2 >N processes
# need to pass to the rest-frame
pboost = lhe_parser.FourMomentum(p[0]) + lhe_parser.FourMomentum(p[1])
for i,thisp in enumerate(p):
p[i] = lhe_parser.FourMomentum(thisp).zboost(pboost).get_tuple()
assert p[0][1] == p[0][2] == 0 == p[1][2] == p[1][2] == 0
else:
nhel = 0
pold = list(p)
p = self.invert_momenta(p)
with misc.chdir(Pdir):
with misc.stdchannel_redirected(sys.stdout, os.devnull):
if 'V' in tag or \
(hypp_id ==1 and self.second_process and any('sqrvirt' in l for l in self.second_process)):
me_value = external(p,event.aqcd, math.sqrt(scale2), nhel)
else:
try:
me_value = external(p,event.aqcd, nhel)
except TypeError:
me_value = external(p,event.aqcd, math.sqrt(scale2), nhel)
# for NLO we have also the stability status code
if isinstance(me_value, tuple):
me_value, code = me_value
#if code points unstability -> returns 0
hundred_value = (code % 1000) //100
if hundred_value in [4]:
me_value = 0.
return me_value
def terminate_fortran_executables(self, new_card_only=False):
"""routine to terminate all fortran executables"""
for (mode, production) in dict(self.calculator):
if new_card_only and production == 0:
continue
del self.calculator[(mode, production)]
def do_quit(self, line):
if self.exitted:
return
self.exitted = True
if 'init' in self.banner:
cross = 0
error = 0
for line in self.banner['init'].split('\n'):
split = line.split()
if len(split) == 4:
cross, error = float(split[0]), float(split[1])
if not self.multicore == 'create':
# No print of results for the multicore mode for the one printed on screen
if 'orig' not in self.all_cross_section:
logger.info('Original cross-section: %s +- %s pb' % (cross, error))
else:
logger.info('Original cross-section: %s +- %s pb (cross-section from sum of weights: %s)' % (cross, error, self.all_cross_section['orig'][0]))
logger.info('Computed cross-section:')
keys = self.all_cross_section.keys()
keys.sort()
for key in keys:
if key == 'orig':
continue
logger.info('%s : %s +- %s pb' % (key[0] if not key[1] else '%s%s' % key,
self.all_cross_section[key][0],self.all_cross_section[key][1] ))
self.terminate_fortran_executables()
if self.rwgt_dir and self.multicore == False:
self.save_to_pickle()
with misc.stdchannel_redirected(sys.stdout, os.devnull):
for run_id in self.calculator:
del self.calculator[run_id]
del self.calculator
def __del__(self):
self.do_quit('')
def adding_me(self, matrix_elements, path):
"""Adding one element to the list based on the matrix element"""
@misc.mute_logger()
def create_standalone_directory(self, second=False):
"""generate the various directory for the weight evaluation"""
data={}
if not second:
data['paths'] = ['rw_me', 'rw_mevirt']
# model
info = self.banner.get('proc_card', 'full_model_line')
if '-modelname' in info:
data['mg_names'] = False
else:
data['mg_names'] = True
data['model_name'] = self.banner.get('proc_card', 'model')
#processes
data['processes'] = [line[9:].strip() for line in self.banner.proc_card
if line.startswith('generate')]
data['processes'] += [' '.join(line.split()[2:]) for line in self.banner.proc_card
if re.search('^\s*add\s+process', line)]
#object_collector
self.id_to_path = {}
data['id2path'] = self.id_to_path
else:
data['paths'] = ['rw_me_second', 'rw_mevirt_second']
# model
if self.second_model:
data['mg_names'] = True
if ' ' in self.second_model:
args = self.second_model.split()
if '--modelname' in args:
data['mg_names'] = False
data['model_name'] = args[0]
else:
data['model_name'] = self.second_model
else:
data['model_name'] = None
#processes
if self.second_process:
data['processes'] = self.second_process
else:
data['processes'] = [line[9:].strip() for line in self.banner.proc_card
if line.startswith('generate')]
data['processes'] += [' '.join(line.split()[2:])
for line in self.banner.proc_card
if re.search('^\s*add\s+process', line)]
#object_collector
self.id_to_path_second = {}
data['id2path'] = self.id_to_path_second
# 0. clean previous run ------------------------------------------------
if not self.rwgt_dir:
path_me = self.me_dir
else:
path_me = self.rwgt_dir
try:
shutil.rmtree(pjoin(path_me,data['paths'][0]))
except Exception:
pass
try:
shutil.rmtree(pjoin(path_me, data['paths'][1]))
except Exception:
pass
# 1. prepare the interface----------------------------------------------
mgcmd = self.mg5cmd
complex_mass = False
has_cms = re.compile(r'''set\s+complex_mass_scheme\s*(True|T|1|true|$|;)''')
for line in self.banner.proc_card:
if line.startswith('set'):
mgcmd.exec_cmd(line, printcmd=False, precmd=False, postcmd=False)
if has_cms.search(line):
complex_mass = True
elif line.startswith('define'):
try:
mgcmd.exec_cmd(line, printcmd=False, precmd=False, postcmd=False)
except Exception:
pass
# 1. Load model---------------------------------------------------------
if not data['model_name'] and not second:
raise self.InvalidCmd('Only UFO model can be loaded in this module.')
elif data['model_name']:
self.load_model(data['model_name'], data['mg_names'], complex_mass)
modelpath = self.model.get('modelpath')
if os.path.basename(modelpath) != mgcmd._curr_model['name']:
name, restrict = mgcmd._curr_model['name'].rsplit('-',1)
if os.path.exists(pjoin(os.path.dirname(modelpath),name, 'restrict_%s.dat' % restrict)):
modelpath = pjoin(os.path.dirname(modelpath), mgcmd._curr_model['name'])
commandline="import model %s " % modelpath
if not data['mg_names']:
commandline += ' -modelname '
mgcmd.exec_cmd(commandline)
#multiparticles
for name, content in self.banner.get('proc_card', 'multiparticles'):
mgcmd.exec_cmd("define %s = %s" % (name, content))
# 2. compute the production matrix element -----------------------------
has_nlo = False
mgcmd.exec_cmd("set group_subprocesses False")
if not second:
logger.info('generating the square matrix element for reweighting')
else:
logger.info('generating the square matrix element for reweighting (second model and/or processes)')
start = time.time()
commandline=''
for i,proc in enumerate(data['processes']):
if '[' not in proc:
commandline += "add process %s ;" % proc
else:
has_nlo = True
if self.banner.get('run_card','ickkw') == 3:
if len(proc) == min([len(p.strip()) for p in data['processes']]):
commandline += self.get_LO_definition_from_NLO(proc,self.model)
else:
commandline += self.get_LO_definition_from_NLO(proc,self.model, real_only=True)
else:
commandline += self.get_LO_definition_from_NLO(proc,self.model)
commandline = commandline.replace('add process', 'generate',1)
logger.info(commandline)
try:
mgcmd.exec_cmd(commandline, precmd=True, errorhandling=False)
except diagram_generation.NoDiagramException:
commandline=''
for proc in data['processes']:
if '[' not in proc:
raise
# pass to virtsq=
base, post = proc.split('[',1)
nlo_order, post = post.split(']',1)
if '=' not in nlo_order:
nlo_order = 'virt=%s' % nlo_order
elif 'noborn' in nlo_order:
nlo_order = nlo_order.replace('noborn', 'virt')
commandline += "add process %s [%s] %s;" % (base,nlo_order,post)
commandline = commandline.replace('add process', 'generate',1)
logger.info("RETRY with %s", commandline)
mgcmd.exec_cmd(commandline, precmd=True)
has_nlo = False
except Exception, error:
raise
commandline = 'output standalone_rw %s' % pjoin(path_me,data['paths'][0])
mgcmd.exec_cmd(commandline, precmd=True)
logger.info('Done %.4g' % (time.time()-start))
self.has_standalone_dir = True
# 3. Store id to directory information ---------------------------------
matrix_elements = mgcmd._curr_matrix_elements.get_matrix_elements()
to_check = [] # list of tag that do not have a Pdir at creation time.
for me in matrix_elements:
for proc in me.get('processes'):
initial = [] #filled in the next line
final = [l.get('id') for l in proc.get('legs')\
if l.get('state') or initial.append(l.get('id'))]
order = (initial, final)
tag = proc.get_initial_final_ids()
decay_finals = proc.get_final_ids_after_decay()
if tag[1] != decay_finals:
order = (initial, list(decay_finals))
decay_finals.sort()
tag = (tag[0], tuple(decay_finals))
Pdir = pjoin(path_me, data['paths'][0], 'SubProcesses',
'P%s' % me.get('processes')[0].shell_string())
if not os.path.exists(Pdir):
to_check.append(tag)
continue
if tag in data['id2path']:
if not Pdir == data['id2path'][tag][1]:
misc.sprint(tag, Pdir, data['id2path'][tag][1])
raise self.InvalidCmd, '2 different process have the same final states. This module can not handle such situation'
else:
continue
# build the helicity dictionary
hel_nb = 0
hel_dict = {9:0} # unknown helicity -> use full ME
for helicities in me.get_helicity_matrix():
hel_nb +=1 #fortran starts at 1
hel_dict[tuple(helicities)] = hel_nb
data['id2path'][tag] = [order, Pdir, hel_dict]
for tag in to_check:
if tag not in self.id_to_path:
logger.warning("no valid path for %s" % (tag,))
#raise self.InvalidCmd, "no valid path for %s" % (tag,)
# 4. Check MadLoopParam for Loop induced
if os.path.exists(pjoin(path_me, data['paths'][0], 'Cards', 'MadLoopParams.dat')):
MLCard = banner.MadLoopParam(pjoin(path_me, data['paths'][0], 'Cards', 'MadLoopParams.dat'))
MLCard.set('WriteOutFilters', False)
MLCard.set('UseLoopFilter', False)
MLCard.set("DoubleCheckHelicityFilter", False)
MLCard.set("HelicityFilterLevel", 0)
MLCard.write(pjoin(path_me, data['paths'][0], 'SubProcesses', 'MadLoopParams.dat'),
pjoin(path_me, data['paths'][0], 'Cards', 'MadLoopParams.dat'),
commentdefault=False)
if self.multicore == 'create':
try:
misc.compile(['OLP_static'], cwd=pjoin(path_me, data['paths'][0],'SubProcesses'),
nb_core=self.mother.options['nb_core'])
except:
misc.compile(['OLP_static'], cwd=pjoin(path_me, data['paths'][0],'SubProcesses'))
if os.path.exists(pjoin(path_me, data['paths'][1], 'Cards', 'MadLoopParams.dat')):
if self.multicore == 'create':
print "compile OLP", data['paths'][1]
try:
misc.compile(['OLP_static'], cwd=pjoin(path_me, data['paths'][1],'SubProcesses'),
nb_core=self.mother.options['nb_core'])
except:
misc.compile(['OLP_static'], cwd=pjoin(path_me, data['paths'][1],'SubProcesses'))
# 5. create the virtual for NLO reweighting ---------------------------
if has_nlo and 'NLO' in self.rwgt_mode:
# Do not pass here for LO/NLO_tree
start = time.time()
commandline=''
for proc in data['processes']:
if '[' not in proc:
pass
else:
proc = proc.replace('[', '[ virt=')
commandline += "add process %s ;" % proc
# deactivate golem since it creates troubles
old_options = dict(mgcmd.options)
if mgcmd.options['golem'] or mgcmd.options['pjfry']:
logger.info(" When doing NLO reweighting, MG5aMC cannot use the loop reduction algorithms Golem and/or PJFry++")
mgcmd.options['golem'] = None
mgcmd.options['pjfry'] = None
commandline = commandline.replace('add process', 'generate',1)
logger.info(commandline)
mgcmd.exec_cmd(commandline, precmd=True)
commandline = 'output standalone_rw %s -f' % pjoin(path_me, data['paths'][1])
mgcmd.exec_cmd(commandline, precmd=True)
#put back golem to original value
mgcmd.options['golem'] = old_options['golem']
mgcmd.options['pjfry'] = old_options['pjfry']
# update make_opts
m_opts = {}
if mgcmd.options['lhapdf']:
#lhapdfversion = subprocess.Popen([mgcmd.options['lhapdf'], '--version'],
# stdout = subprocess.PIPE).stdout.read().strip()[0]
m_opts['lhapdf'] = True
m_opts['f2pymode'] = True
m_opts['lhapdfversion'] = 5 # 6 always fail on my computer since 5 is compatible but slower always use 5
m_opts['llhapdf'] = self.mother.get_lhapdf_libdir()
else:
raise Exception, "NLO reweighting requires LHAPDF to work correctly"
path = pjoin(path_me,data['paths'][1], 'Source', 'make_opts')
common_run_interface.CommonRunCmd.update_make_opts_full(path, m_opts)
logger.info('Done %.4g' % (time.time()-start))
# Download LHAPDF SET
common_run_interface.CommonRunCmd.install_lhapdf_pdfset_static(\
mgcmd.options['lhapdf'], None, self.banner.run_card.get_lhapdf_id())
# now store the id information
matrix_elements = mgcmd._curr_matrix_elements.get_matrix_elements()
for me in matrix_elements:
for proc in me.get('processes'):
initial = [] #filled in the next line
final = [l.get('id') for l in proc.get('legs')\
if l.get('state') or initial.append(l.get('id'))]
order = (initial, final)
tag = proc.get_initial_final_ids()
decay_finals = proc.get_final_ids_after_decay()
if tag[1] != decay_finals:
order = (initial, list(decay_finals))
decay_finals.sort()
tag = (tag[0], tuple(decay_finals))
Pdir = pjoin(path_me, data['paths'][1], 'SubProcesses',
'P%s' % me.get('processes')[0].shell_string())
assert os.path.exists(Pdir), "Pdir %s do not exists" % Pdir
if (tag,'V') in data['id2path']:
if not Pdir == data['id2path'][(tag,'V')][1]:
misc.sprint(tag, Pdir, self.id_to_path[(tag,'V')][1])
raise self.InvalidCmd, '2 different process have the same final states. This module can not handle such situation'
else:
continue
# build the helicity dictionary
hel_nb = 0
hel_dict = {9:0} # unknown helicity -> use full ME
for helicities in me.get_helicity_matrix():
hel_nb +=1 #fortran starts at 1
hel_dict[tuple(helicities)] = hel_nb
data['id2path'][(tag,'V')] = [order, Pdir, hel_dict]
#compile the module to combine the weight
misc.compile(cwd=pjoin(path_me, data['paths'][1], 'Source'))
#link it
if path_me not in sys.path:
sys.path.insert(0, os.path.realpath(path_me))
with misc.chdir(pjoin(path_me)):
mymod = __import__('%s.Source.rwgt2py' % data['paths'][1], globals(), locals(), [],-1)
mymod = mymod.Source.rwgt2py
with misc.stdchannel_redirected(sys.stdout, os.devnull):
mymod.initialise([self.banner.run_card['lpp1'],
self.banner.run_card['lpp2']],
self.banner.run_card.get_lhapdf_id())
self.combine_wgt = mymod.get_wgt
if self.multicore == 'create':
print "compile OLP", data['paths'][1]
misc.compile(['OLP_static'], cwd=pjoin(path_me, data['paths'][1],'SubProcesses'),
nb_core=self.mother.options['nb_core'])
elif has_nlo and not second and self.rwgt_mode == ['NLO_tree']:
# We do not have any virtual reweighting to do but we still have to
#combine the weights.
#Idea:create a fake directory.
start = time.time()
commandline='import model loop_sm;generate g g > e+ ve [virt=QCD]'
# deactivate golem since it creates troubles
old_options = dict(mgcmd.options)
mgcmd.options['golem'] = None
mgcmd.options['pjfry'] = None
commandline = commandline.replace('add process', 'generate',1)
logger.info(commandline)
mgcmd.exec_cmd(commandline, precmd=True)
commandline = 'output standalone_rw %s -f' % pjoin(path_me, data['paths'][1])
mgcmd.exec_cmd(commandline, precmd=True)
#put back golem to original value
mgcmd.options['golem'] = old_options['golem']
mgcmd.options['pjfry'] = old_options['pjfry']
# update make_opts
m_opts = {}
if mgcmd.options['lhapdf']:
#lhapdfversion = subprocess.Popen([mgcmd.options['lhapdf'], '--version'],
# stdout = subprocess.PIPE).stdout.read().strip()[0]
m_opts['lhapdf'] = True
m_opts['f2pymode'] = True
m_opts['lhapdfversion'] = 5 # 6 always fail on my computer since 5 is compatible but slower always use 5
m_opts['llhapdf'] = self.mother.get_lhapdf_libdir()
else:
raise Exception, "NLO_tree reweighting requires LHAPDF to work correctly"
path = pjoin(path_me,data['paths'][1], 'Source', 'make_opts')
common_run_interface.CommonRunCmd.update_make_opts_full(path, m_opts)
logger.info('Done %.4g' % (time.time()-start))
# Download LHAPDF SET
common_run_interface.CommonRunCmd.install_lhapdf_pdfset_static(\
mgcmd.options['lhapdf'], None, self.banner.run_card.get_lhapdf_id())
#compile the module to combine the weight
misc.compile(cwd=pjoin(path_me, data['paths'][1], 'Source'))
#link it
with misc.chdir(pjoin(path_me)):
if path_me not in sys.path:
sys.path.insert(0, path_me)
mymod = __import__('%s.Source.rwgt2py' % data['paths'][1], globals(), locals(), [],-1)
mymod = mymod.Source.rwgt2py
with misc.stdchannel_redirected(sys.stdout, os.devnull):
mymod.initialise([self.banner.run_card['lpp1'],
self.banner.run_card['lpp2']],
self.banner.run_card.get_lhapdf_id())
self.combine_wgt = mymod.get_wgt
# 6. If we need a new model/process-------------------------------------
if (self.second_model or self.second_process) and not second:
self.create_standalone_directory(second=True)
if not second:
self.has_nlo = has_nlo
def load_model(self, name, use_mg_default, complex_mass=False):
"""load the model"""
loop = False
logger.info('detected model: %s. Loading...' % name)
model_path = name
# Import model
base_model = import_ufo.import_model(name, decay=False,
complex_mass_scheme=complex_mass)
if use_mg_default:
base_model.pass_particles_name_in_mg_default()
self.model = base_model
self.mg5cmd._curr_model = self.model
self.mg5cmd.process_model()
def save_to_pickle(self):
import madgraph.iolibs.save_load_object as save_load_object
to_save = {}
to_save['id_to_path'] = self.id_to_path
if hasattr(self, 'id_to_path_second'):
to_save['id_to_path_second'] = self.id_to_path_second
else:
to_save['id_to_path_second'] = {}
to_save['all_cross_section'] = self.all_cross_section
to_save['processes'] = self.processes
to_save['second_process'] = self.second_process
if self.second_model:
to_save['second_model'] =True
else:
to_save['second_model'] = None
to_save['rwgt_dir'] = self.rwgt_dir
to_save['has_nlo'] = self.has_nlo
to_save['rwgt_mode'] = self.rwgt_mode
to_save['rwgt_name'] = self.options['rwgt_name']
name = pjoin(self.rwgt_dir, 'rw_me', 'rwgt.pkl')
save_load_object.save_to_file(name, to_save)
def load_from_pickle(self, keep_name=False):
import madgraph.iolibs.save_load_object as save_load_object
obj = save_load_object.load_from_file( pjoin(self.rwgt_dir, 'rw_me', 'rwgt.pkl'))
self.has_standalone_dir = True
self.options = {'curr_dir': os.path.realpath(os.getcwd()),
'rwgt_name': None}
if keep_name:
self.options['rwgt_name'] = obj['rwgt_name']
old_rwgt = obj['rwgt_dir']
# path to fortran executable
self.id_to_path = {}
for key , (order, Pdir, hel_dict) in obj['id_to_path'].items():
new_P = Pdir.replace(old_rwgt, self.rwgt_dir)
self.id_to_path[key] = [order, new_P, hel_dict]
# path to fortran executable (for second directory)
self.id_to_path_second = {}
for key , (order, Pdir, hel_dict) in obj['id_to_path_second'].items():
new_P = Pdir.replace(old_rwgt, self.rwgt_dir)
self.id_to_path_second[key] = [order, new_P, hel_dict]
self.all_cross_section = obj['all_cross_section']
self.processes = obj['processes']
self.second_process = obj['second_process']
self.second_model = obj['second_model']
self.has_nlo = obj['has_nlo']
if not self.rwgt_mode:
self.rwgt_mode = obj['rwgt_mode']
logger.info("mode set to %s" % self.rwgt_mode)
if self.has_nlo and 'NLO' in self.rwgt_mode:
path = pjoin(obj['rwgt_dir'], 'rw_mevirt','Source')
sys.path.insert(0, path)
try:
mymod = __import__('rwgt2py', globals(), locals())
except ImportError:
misc.compile(['rwgt2py.so'], cwd=path)
mymod = __import__('rwgt2py', globals(), locals())
with misc.stdchannel_redirected(sys.stdout, os.devnull):
mymod.initialise([self.banner.run_card['lpp1'],
self.banner.run_card['lpp2']],
self.banner.run_card.get_lhapdf_id())
self.combine_wgt = mymod.get_wgt
| 45.797052 | 178 | 0.528681 |
63d89492a2eb8ef90198fd02c310f67c2c7ad703 | 1,978 | py | Python | aliyun-python-sdk-cdn/aliyunsdkcdn/request/v20141111/SetL2OssKeyConfigRequest.py | jia-jerry/aliyun-openapi-python-sdk | e90f3683a250cfec5b681b5f1d73a68f0dc9970d | [
"Apache-2.0"
] | 1 | 2021-03-08T02:59:17.000Z | 2021-03-08T02:59:17.000Z | aliyun-python-sdk-cdn/aliyunsdkcdn/request/v20141111/SetL2OssKeyConfigRequest.py | jia-jerry/aliyun-openapi-python-sdk | e90f3683a250cfec5b681b5f1d73a68f0dc9970d | [
"Apache-2.0"
] | 1 | 2020-05-31T14:51:47.000Z | 2020-05-31T14:51:47.000Z | aliyun-python-sdk-cdn/aliyunsdkcdn/request/v20141111/SetL2OssKeyConfigRequest.py | jia-jerry/aliyun-openapi-python-sdk | e90f3683a250cfec5b681b5f1d73a68f0dc9970d | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcdn.endpoint import endpoint_data
class SetL2OssKeyConfigRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cdn', '2014-11-11', 'SetL2OssKeyConfig')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_DomainName(self):
return self.get_query_params().get('DomainName')
def set_DomainName(self,DomainName):
self.add_query_param('DomainName',DomainName)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_SecurityToken(self):
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self,SecurityToken):
self.add_query_param('SecurityToken',SecurityToken)
def get_PrivateOssAuth(self):
return self.get_query_params().get('PrivateOssAuth')
def set_PrivateOssAuth(self,PrivateOssAuth):
self.add_query_param('PrivateOssAuth',PrivateOssAuth) | 35.321429 | 74 | 0.768959 |
240c1740948e7fda97c16c719124f629198f56ee | 1,535 | py | Python | test/ec/test_key_agreement.py | Tomko10/pyecsca | 900503e602c3079c6293e17f297e3b111ba9611a | [
"MIT"
] | 24 | 2019-07-01T00:27:24.000Z | 2022-02-17T00:46:28.000Z | test/ec/test_key_agreement.py | Tomko10/pyecsca | 900503e602c3079c6293e17f297e3b111ba9611a | [
"MIT"
] | 18 | 2020-12-10T15:08:56.000Z | 2022-03-01T11:44:37.000Z | test/ec/test_key_agreement.py | Tomko10/pyecsca | 900503e602c3079c6293e17f297e3b111ba9611a | [
"MIT"
] | 7 | 2020-02-20T18:44:29.000Z | 2021-11-30T21:16:44.000Z | from unittest import TestCase
from parameterized import parameterized
from pyecsca.ec.params import get_params
from pyecsca.ec.key_agreement import (
ECDH_NONE,
ECDH_SHA1,
ECDH_SHA224,
ECDH_SHA256,
ECDH_SHA384,
ECDH_SHA512,
)
from pyecsca.ec.mod import Mod
from pyecsca.ec.mult import LTRMultiplier
class KeyAgreementTests(TestCase):
def setUp(self):
self.secp128r1 = get_params("secg", "secp128r1", "projective")
self.add = self.secp128r1.curve.coordinate_model.formulas["add-2007-bl"]
self.dbl = self.secp128r1.curve.coordinate_model.formulas["dbl-2007-bl"]
self.mult = LTRMultiplier(self.add, self.dbl)
self.priv_a = Mod(0xDEADBEEF, self.secp128r1.order)
self.mult.init(self.secp128r1, self.secp128r1.generator)
self.pub_a = self.mult.multiply(int(self.priv_a))
self.priv_b = Mod(0xCAFEBABE, self.secp128r1.order)
self.pub_b = self.mult.multiply(int(self.priv_b))
@parameterized.expand(
[
("NONE", ECDH_NONE),
("SHA1", ECDH_SHA1),
("SHA224", ECDH_SHA224),
("SHA256", ECDH_SHA256),
("SHA384", ECDH_SHA384),
("SHA512", ECDH_SHA512),
]
)
def test_all(self, name, algo):
result_ab = algo(self.mult, self.secp128r1, self.pub_a, self.priv_b).perform()
result_ba = algo(self.mult, self.secp128r1, self.pub_b, self.priv_a).perform()
self.assertEqual(result_ab, result_ba)
# TODO: Add KAT-based tests here.
| 33.369565 | 86 | 0.657329 |
0920f5bb05990dcc278f2fa96f7b62288601cdbb | 6,315 | py | Python | baselines/common/running_mean_std.py | rwill128/baselines | 24dd0c80db01623bb1224ab044b64da3fbec63cc | [
"MIT"
] | null | null | null | baselines/common/running_mean_std.py | rwill128/baselines | 24dd0c80db01623bb1224ab044b64da3fbec63cc | [
"MIT"
] | null | null | null | baselines/common/running_mean_std.py | rwill128/baselines | 24dd0c80db01623bb1224ab044b64da3fbec63cc | [
"MIT"
] | null | null | null | import tensorflow as tf
import numpy as np
from baselines.common.tf_util import get_session
class RunningMeanStd(object):
# https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm
def __init__(self, epsilon=1e-4, shape=()):
self.mean = np.zeros(shape, 'float64')
self.var = np.ones(shape, 'float64')
self.count = epsilon
def update(self, x):
batch_mean = np.mean(x, axis=0)
batch_var = np.var(x, axis=0)
batch_count = x.shape[0]
self.update_from_moments(batch_mean, batch_var, batch_count)
def update_from_moments(self, batch_mean, batch_var, batch_count):
self.mean, self.var, self.count = update_mean_var_count_from_moments(
self.mean, self.var, self.count, batch_mean, batch_var, batch_count)
def update_mean_var_count_from_moments(mean, var, count, batch_mean, batch_var, batch_count):
delta = batch_mean - mean
tot_count = count + batch_count
new_mean = mean + delta * batch_count / tot_count
m_a = var * count
m_b = batch_var * batch_count
M2 = m_a + m_b + np.square(delta) * count * batch_count / tot_count
new_var = M2 / tot_count
new_count = tot_count
return new_mean, new_var, new_count
class TfRunningMeanStd(object):
# https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm
'''
TensorFlow variables-based implmentation of computing running mean and std
Benefit of this implementation is that it can be saved / loaded together with the tensorflow model
'''
def __init__(self, epsilon=1e-4, shape=(), scope=''):
sess = get_session()
self._new_mean = tf.compat.v1.placeholder(shape=shape, dtype=tf.float64)
self._new_var = tf.compat.v1.placeholder(shape=shape, dtype=tf.float64)
self._new_count = tf.compat.v1.placeholder(shape=(), dtype=tf.float64)
with tf.compat.v1.variable_scope(scope, reuse=tf.compat.v1.AUTO_REUSE):
self._mean = tf.compat.v1.get_variable('mean', initializer=np.zeros(shape, 'float64'), dtype=tf.float64)
self._var = tf.compat.v1.get_variable('std', initializer=np.ones(shape, 'float64'), dtype=tf.float64)
self._count = tf.compat.v1.get_variable('count', initializer=np.full((), epsilon, 'float64'),
dtype=tf.float64)
self.update_ops = tf.group([
self._var.assign(self._new_var),
self._mean.assign(self._new_mean),
self._count.assign(self._new_count)
])
sess.run(tf.compat.v1.variables_initializer([self._mean, self._var, self._count]))
self.sess = sess
self._set_mean_var_count()
def _set_mean_var_count(self):
self.mean, self.var, self.count = self.sess.run([self._mean, self._var, self._count])
def update(self, x):
batch_mean = np.mean(x, axis=0)
batch_var = np.var(x, axis=0)
batch_count = x.shape[0]
new_mean, new_var, new_count = update_mean_var_count_from_moments(self.mean, self.var, self.count, batch_mean,
batch_var, batch_count)
self.sess.run(self.update_ops, feed_dict={
self._new_mean: new_mean,
self._new_var: new_var,
self._new_count: new_count
})
self._set_mean_var_count()
def test_runningmeanstd():
for (x1, x2, x3) in [
(np.random.randn(3), np.random.randn(4), np.random.randn(5)),
(np.random.randn(3, 2), np.random.randn(4, 2), np.random.randn(5, 2)),
]:
rms = RunningMeanStd(epsilon=0.0, shape=x1.shape[1:])
x = np.concatenate([x1, x2, x3], axis=0)
ms1 = [x.mean(axis=0), x.var(axis=0)]
rms.update(x1)
rms.update(x2)
rms.update(x3)
ms2 = [rms.mean, rms.var]
np.testing.assert_allclose(ms1, ms2)
def test_tf_runningmeanstd():
for (x1, x2, x3) in [
(np.random.randn(3), np.random.randn(4), np.random.randn(5)),
(np.random.randn(3, 2), np.random.randn(4, 2), np.random.randn(5, 2)),
]:
rms = TfRunningMeanStd(epsilon=0.0, shape=x1.shape[1:],
scope='running_mean_std' + str(np.random.randint(0, 128)))
x = np.concatenate([x1, x2, x3], axis=0)
ms1 = [x.mean(axis=0), x.var(axis=0)]
rms.update(x1)
rms.update(x2)
rms.update(x3)
ms2 = [rms.mean, rms.var]
np.testing.assert_allclose(ms1, ms2)
def profile_tf_runningmeanstd():
import time
from baselines.common import tf_util
tf_util.get_session(config=tf.compat.v1.ConfigProto(
inter_op_parallelism_threads=1,
intra_op_parallelism_threads=1,
allow_soft_placement=True
))
x = np.random.random((376,))
n_trials = 10000
rms = RunningMeanStd()
tfrms = TfRunningMeanStd()
tic1 = time.time()
for _ in range(n_trials):
rms.update(x)
tic2 = time.time()
for _ in range(n_trials):
tfrms.update(x)
tic3 = time.time()
print('rms update time ({} trials): {} s'.format(n_trials, tic2 - tic1))
print('tfrms update time ({} trials): {} s'.format(n_trials, tic3 - tic2))
tic1 = time.time()
for _ in range(n_trials):
z1 = rms.mean
tic2 = time.time()
for _ in range(n_trials):
z2 = tfrms.mean
assert z1 == z2
tic3 = time.time()
print('rms get mean time ({} trials): {} s'.format(n_trials, tic2 - tic1))
print('tfrms get mean time ({} trials): {} s'.format(n_trials, tic3 - tic2))
'''
options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE) #pylint: disable=E1101
run_metadata = tf.RunMetadata()
profile_opts = dict(options=options, run_metadata=run_metadata)
from tensorflow.python.client import timeline
fetched_timeline = timeline.Timeline(run_metadata.step_stats) #pylint: disable=E1101
chrome_trace = fetched_timeline.generate_chrome_trace_format()
outfile = '/tmp/timeline.json'
with open(outfile, 'wt') as f:
f.write(chrome_trace)
print('Successfully saved profile to {}. Exiting.'.format(outfile))
exit(0)
'''
if __name__ == '__main__':
profile_tf_runningmeanstd()
| 33.770053 | 118 | 0.632462 |
d3e8afc8b4b0b6b323b738aa12a32168d61b73a6 | 665 | py | Python | src/ensembl/production/dbcopy/migrations/0002_adding_request_date.py | luca-drf/ensembl-prodinf-dbcopy | d6ee2e402fd4eba1f85a9455c007492080732941 | [
"Apache-2.0"
] | null | null | null | src/ensembl/production/dbcopy/migrations/0002_adding_request_date.py | luca-drf/ensembl-prodinf-dbcopy | d6ee2e402fd4eba1f85a9455c007492080732941 | [
"Apache-2.0"
] | 2 | 2021-09-08T18:09:15.000Z | 2022-01-20T16:43:22.000Z | src/ensembl/production/dbcopy/migrations/0002_adding_request_date.py | luca-drf/ensembl-prodinf-dbcopy | d6ee2e402fd4eba1f85a9455c007492080732941 | [
"Apache-2.0"
] | 1 | 2021-06-08T13:10:44.000Z | 2021-06-08T13:10:44.000Z | # Generated by Django 2.2.13 on 2020-06-08 10:16
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('ensembl_dbcopy', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='requestjob',
options={'verbose_name': 'Copy job', 'verbose_name_plural': 'Copy jobs'},
),
migrations.AddField(
model_name='requestjob',
name='request_date',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
]
| 26.6 | 93 | 0.621053 |
f99cc6783a98400826b8b0875e98532a20d8f72b | 3,394 | py | Python | learning_log/learning_log/settings.py | adrian88szymanski/Python_Crash_Course_Eric_Matthes | 74e9a627e3e044ea30e4a8579843d95fe8e4fc14 | [
"MIT"
] | 8 | 2021-07-21T02:52:49.000Z | 2022-02-08T20:47:09.000Z | learning_log/learning_log/settings.py | barbarian47/Python_Crash_Course_Eric_Matthes | 74e9a627e3e044ea30e4a8579843d95fe8e4fc14 | [
"MIT"
] | null | null | null | learning_log/learning_log/settings.py | barbarian47/Python_Crash_Course_Eric_Matthes | 74e9a627e3e044ea30e4a8579843d95fe8e4fc14 | [
"MIT"
] | 7 | 2021-06-10T12:27:56.000Z | 2022-01-29T13:53:15.000Z | """
Django settings for learning_log project.
Generated by 'django-admin startproject' using Django 3.1.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '+m3(zicynsuur!8wfm(wz)2ulryx1^*hvfwfr15b&exq+(@wqg'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'learning_logs',
'users',
'bootstrap4',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'learning_log.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'learning_log.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
# My settings
LOGIN_URL = 'users:login'
# Heroku settings
import django_heroku
django_heroku.settings(locals())
if os.environ.get('DEBUG') == 'TRUE':
DEBUG = True
elif os.environ.get('DEBUG') == 'FALSE':
DEBUG = False | 25.328358 | 91 | 0.696229 |
d0a47c287177019f839f3dc9be0e68e0fc2357c1 | 953 | py | Python | openstack_dashboard/dashboards/admin/metadata_defs/panel.py | ankur-gupta91/block_storage | 938548a3d4507dc56c1c26b442767eb41aa2e610 | [
"Apache-2.0"
] | 9 | 2016-06-03T03:53:24.000Z | 2017-05-20T16:53:23.000Z | openstack_dashboard/dashboards/admin/metadata_defs/panel.py | ankur-gupta91/block_storage | 938548a3d4507dc56c1c26b442767eb41aa2e610 | [
"Apache-2.0"
] | 1 | 2021-03-21T11:48:09.000Z | 2021-03-21T11:48:09.000Z | openstack_dashboard/dashboards/admin/metadata_defs/panel.py | ankur-gupta91/block_storage | 938548a3d4507dc56c1c26b442767eb41aa2e610 | [
"Apache-2.0"
] | 4 | 2016-08-01T10:50:15.000Z | 2017-02-22T12:11:19.000Z | #
# (c) Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.api import glance
class MetadataDefinitions(horizon.Panel):
name = _("Metadata Definitions")
slug = 'metadata_defs'
@staticmethod
def can_register():
return glance.VERSIONS.active >= 2
| 31.766667 | 78 | 0.728227 |
9379087f0028935d1c7dab0a235f5a7bfc30de9b | 559 | py | Python | IDLE-files/koch_snowflake.py | arvidl/dynamical-systems-with-applications-using-python | db747f550337a7e7ec4a0851b188dd6e2e816a64 | [
"BSD-2-Clause"
] | 106 | 2018-10-10T18:04:02.000Z | 2022-03-11T06:32:38.000Z | IDLE-files/koch_snowflake.py | arvidl/dynamical-systems-with-applications-using-python | db747f550337a7e7ec4a0851b188dd6e2e816a64 | [
"BSD-2-Clause"
] | null | null | null | IDLE-files/koch_snowflake.py | arvidl/dynamical-systems-with-applications-using-python | db747f550337a7e7ec4a0851b188dd6e2e816a64 | [
"BSD-2-Clause"
] | 54 | 2018-02-06T09:47:42.000Z | 2022-03-25T15:41:43.000Z | # Plot the Koch snowflake.
# See Exercise 1(d).
# Run the Module (or type F5).
from turtle import *
def koch_snowflake(length, level): # KochSnowflake function.
speed(0) # Fastest speed.
for i in range(3):
plot_side(length, level)
rt(120)
def plot_side(length, level): # PlotSide function.
if level==0:
fd(length)
return
plot_side(length/3, level-1)
lt(60)
plot_side(length/3, level-1)
lt(-120)
plot_side(length/3, level-1)
lt(60)
plot_side(length/3, level-1)
| 24.304348 | 61 | 0.599284 |
2542b39e2c4b8be21f74fde1941c18f8cee0f1dc | 7,202 | py | Python | lexicon/providers/inwx.py | k-serenade/lexicon | 9475eb01b6beb3ac996ce70a14bc81c0749275c7 | [
"MIT"
] | 1,184 | 2016-02-02T17:08:04.000Z | 2022-03-20T13:15:26.000Z | lexicon/providers/inwx.py | alexAubin/lexicon | 9475eb01b6beb3ac996ce70a14bc81c0749275c7 | [
"MIT"
] | 1,038 | 2016-02-15T21:25:16.000Z | 2022-03-31T13:18:21.000Z | lexicon/providers/inwx.py | alexAubin/lexicon | 9475eb01b6beb3ac996ce70a14bc81c0749275c7 | [
"MIT"
] | 358 | 2016-02-05T09:57:36.000Z | 2022-03-30T07:15:26.000Z | """Module provider for INWX"""
import logging
from lexicon.exceptions import AuthenticationError
from lexicon.providers.base import Provider as BaseProvider
try:
import xmlrpclib # type: ignore
except ImportError:
import xmlrpc.client as xmlrpclib # type: ignore
LOGGER = logging.getLogger(__name__)
NAMESERVER_DOMAINS = ["inwx.com"]
def provider_parser(subparser):
"""Configure provider parser for INWX"""
subparser.add_argument(
"--auth-username", help="specify username for authentication"
)
subparser.add_argument(
"--auth-password", help="specify password for authentication"
)
class Provider(BaseProvider):
"""
INWX offers a free testing system on https://ote.inwx.com
see https://www.inwx.de/en/offer/api for details about ote and the api
"""
def __init__(self, config):
"""
:param config: command line options
"""
super(Provider, self).__init__(config)
self._auth = {
"user": self._get_provider_option("auth_username"),
"pass": self._get_provider_option("auth_password"),
}
self._domain = self.domain.lower()
self.domain_id = None
endpoint = (
self._get_provider_option("endpoint") or "https://api.domrobot.com/xmlrpc/"
)
self._api = xmlrpclib.ServerProxy(endpoint, allow_none=True)
def _validate_response(self, response, message, exclude_code=None):
"""
validate an api server response
:param dict response: server response to check
:param str message: error message to raise
:param int exclude_code: error codes to exclude from errorhandling
:return:
":raises Exception: on error
"""
if "code" in response and response["code"] >= 2000:
if exclude_code is not None and response["code"] == exclude_code:
return
raise Exception(f"{message}: {response['msg']} ({response['code']})")
# Make any request to validate credentials
def _authenticate(self):
"""
run any request against the API just to make sure the credentials
are valid
:return bool: success status
:raises Exception: on error
"""
opts = {"domain": self._domain}
opts.update(self._auth)
response = self._api.nameserver.info(opts)
try:
self._validate_response(response=response, message="Failed to authenticate")
except Exception as e:
raise AuthenticationError(str(e))
# set to fake id to pass tests, inwx doesn't work on domain id but
# uses domain names for identification
self.domain_id = 1
return True
def _create_record(self, rtype, name, content):
"""
create a record
does nothing if the record already exists
:param str rtype: type of record
:param str name: name of record
:param mixed content: value of record
:return bool: success status
:raises Exception: on error
"""
opts = {
"domain": self._domain,
"type": rtype.upper(),
"name": self._full_name(name),
"content": content,
}
if self._get_lexicon_option("ttl"):
opts["ttl"] = self._get_lexicon_option("ttl")
opts.update(self._auth)
response = self._api.nameserver.createRecord(opts)
self._validate_response(
response=response, message="Failed to create record", exclude_code=2302
)
return True
def _list_records(self, rtype=None, name=None, content=None):
"""
list all records
:param str rtype: type of record
:param str name: name of record
:param mixed content: value of record
:return list: list of found records
:raises Exception: on error
"""
opts = {"domain": self._domain}
if rtype is not None:
opts["type"] = rtype.upper()
if name is not None:
opts["name"] = self._full_name(name)
if content is not None:
opts["content"] = content
opts.update(self._auth)
response = self._api.nameserver.info(opts)
self._validate_response(response=response, message="Failed to get records")
records = []
if "record" in response["resData"]:
for record in response["resData"]["record"]:
processed_record = {
"type": record["type"],
"name": record["name"],
"ttl": record["ttl"],
"content": record["content"],
"id": record["id"],
}
records.append(processed_record)
return records
def _update_record(self, identifier, rtype=None, name=None, content=None):
"""
update a record
:param int identifier: identifier of record to update
:param str rtype: type of record
:param str name: name of record
:param mixed content: value of record
:return bool: success status
:raises Exception: on error
"""
record_ids = []
if not identifier:
records = self._list_records(rtype, name)
record_ids = [record["id"] for record in records]
else:
record_ids.append(identifier)
for an_identifier in record_ids:
opts = {"id": an_identifier}
if rtype is not None:
opts["type"] = rtype.upper()
if name is not None:
opts["name"] = self._full_name(name)
if content is not None:
opts["content"] = content
opts.update(self._auth)
response = self._api.nameserver.updateRecord(opts)
self._validate_response(
response=response, message="Failed to update record", exclude_code=2302
)
return True
def _delete_record(self, identifier=None, rtype=None, name=None, content=None):
"""
delete a record
filter selection to delete by identifier or rtype/name/content
:param int identifier: identifier of record to update
:param str rtype: rtype of record
:param str name: name of record
:param mixed content: value of record
:return bool: success status
:raises Exception: on error
"""
record_ids = []
if not identifier:
records = self._list_records(rtype, name, content)
record_ids = [record["id"] for record in records]
else:
record_ids.append(identifier)
for record_id in record_ids:
opts = {"id": record_id}
opts.update(self._auth)
response = self._api.nameserver.deleteRecord(opts)
self._validate_response(
response=response, message="Failed to update record"
)
return True
def _request(self, action="GET", url="/", data=None, query_params=None):
# Helper _request is not used for INWX provider.
pass
| 32.441441 | 88 | 0.589697 |
7e5ff245401c83fac4dbe37b2c5255f35fa79c3c | 463 | py | Python | newExam/newExam/urls.py | M0673N/Python-Web-Basics | cecc27f7a12f990756edcc8885290eb3b2e487b7 | [
"MIT"
] | null | null | null | newExam/newExam/urls.py | M0673N/Python-Web-Basics | cecc27f7a12f990756edcc8885290eb3b2e487b7 | [
"MIT"
] | null | null | null | newExam/newExam/urls.py | M0673N/Python-Web-Basics | cecc27f7a12f990756edcc8885290eb3b2e487b7 | [
"MIT"
] | null | null | null | from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from django.urls import path, include
from newExam.profile_app.views import show_home
urlpatterns = [
path('admin/', admin.site.urls),
path('', show_home, name='home'),
path('', include('newExam.expense_app.urls')),
path('profile/', include('newExam.profile_app.urls'))
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 33.071429 | 65 | 0.7473 |
e62d66a69b9ca6e59df9c2e264f05bd5e7f1e41d | 6,009 | py | Python | scripts/separate_plots.py | JIC-Image-Analysis/senescence-in-field | f310e34df377eb807423c38cf27d1ade0782f5a2 | [
"MIT"
] | null | null | null | scripts/separate_plots.py | JIC-Image-Analysis/senescence-in-field | f310e34df377eb807423c38cf27d1ade0782f5a2 | [
"MIT"
] | null | null | null | scripts/separate_plots.py | JIC-Image-Analysis/senescence-in-field | f310e34df377eb807423c38cf27d1ade0782f5a2 | [
"MIT"
] | null | null | null | """Separate individual plots."""
import os
import argparse
import numpy as np
from scipy.misc import imsave
from jicbioimage.core.image import Image
from jicbioimage.segment import SegmentedImage
from dtoolcore import DataSet, ProtoDataSet
from jicgeometry import Point2D
from dtoolutils import temp_working_dir
def load_segmentation_from_rgb_image(filename):
rgb_image = Image.from_file(filename)
ydim, xdim, _ = rgb_image.shape
segmentation = np.zeros((ydim, xdim), dtype=np.uint32)
segmentation += rgb_image[:, :, 2]
segmentation += rgb_image[:, :, 1].astype(np.uint32) * 256
segmentation += rgb_image[:, :, 0].astype(np.uint32) * 256 * 256
return segmentation.view(SegmentedImage)
def generate_region_image(original_image, segmentation, identifier):
"""Generate image of section of original image represented by the region
of the segmentation with the given identifier."""
region = segmentation.region_by_identifier(identifier)
region_rgb = np.dstack([region] * 3)
masked_image = region_rgb * original_image
rmin, rmax = min(region.index_arrays[0]), max(region.index_arrays[0])
cmin, cmax = min(region.index_arrays[1]), max(region.index_arrays[1])
image_section = masked_image[rmin:rmax, cmin:cmax]
return image_section
def find_approx_plot_locs(dataset, identifier):
"""Return array of approximate plot locations based on the corner locations
identified through clicking with a tagger.
These are calculated by dividing the space between the corers into a grid
based on the known numbers of plots (6 horizontal, 5 vertical).
Points are returned in normalised coordinates."""
corner_coords = dataset.get_overlay("coords")[identifier]
def coords_to_point2d(coords):
x = float(coords['x'])
y = float(coords['y'])
return Point2D(x, y)
top_left = coords_to_point2d(corner_coords['topLeft'])
bottom_left = coords_to_point2d(corner_coords['bottomLeft'])
top_right = coords_to_point2d(corner_coords['topRight'])
vdiff = bottom_left - top_left
hdiff = top_right - top_left
plot_locs = []
for hmult in np.linspace(0, 1, 6):
for vmult in np.linspace(0, 1, 5):
plot_locs.append(top_left + hdiff * hmult + vdiff * vmult)
return plot_locs
def image_coords_to_rel_coords(image, point):
ydim, xdim = image.shape
y_abs, x_abs = point
x_rel = float(x_abs) / xdim
y_rel = float(y_abs) / ydim
return Point2D(x_rel, y_rel)
def generate_segmentation_identifier_to_label_map(
approx_plot_locs,
segmentation
):
"""Generate dictionary mapping segmentation identifiers to numberical id
of plot in field. This id should be consistent across images."""
loc_labels = {l: str(n) for n, l in enumerate(approx_plot_locs)}
def closest_loc_label(p):
dists = [(p.distance(l), l) for l in approx_plot_locs]
dists.sort()
return loc_labels[dists[0][1]]
sid_to_label = {}
for sid in segmentation.identifiers:
c = segmentation.region_by_identifier(sid).centroid
c_rel = image_coords_to_rel_coords(segmentation, c)
sid_to_label[sid] = closest_loc_label(c_rel)
return sid_to_label
def separate_plots(dataset, identifier, resource_dataset, working_dir):
fpath = dataset.item_content_abspath(identifier)
segmentation = load_segmentation_from_rgb_image(fpath)
original_id = dataset.get_overlay('from')[identifier]
original_fpath = resource_dataset.item_content_abspath(original_id)
original_image = Image.from_file(original_fpath)
approx_plot_locs = find_approx_plot_locs(dataset, identifier)
sid_to_label = generate_segmentation_identifier_to_label_map(
approx_plot_locs,
segmentation
)
outputs = []
for identifier in segmentation.identifiers:
image_section = generate_region_image(
original_image,
segmentation,
identifier
)
fname = 'region_{}.png'.format(sid_to_label[identifier])
output_fpath = os.path.join(working_dir, fname)
imsave(output_fpath, image_section)
outputs.append((fname, {'plot_number': sid_to_label[identifier]}))
return outputs
def stage_outputs(
outputs,
working_dir,
dataset,
output_dataset,
overlays_to_copy,
identifier
):
for filename, metadata in outputs:
src_abspath = os.path.join(working_dir, filename)
useful_name = dataset.get_overlay('useful_name')[identifier]
relpath = os.path.join(useful_name, filename)
output_dataset.put_item(src_abspath, relpath)
# Add 'from' overlay
output_dataset.add_item_metadata(relpath, 'from', identifier)
# Copy overlays
for overlay_name in overlays_to_copy:
value = dataset.get_overlay(overlay_name)[identifier]
output_dataset.add_item_metadata(relpath, overlay_name, value)
# Add extra metadata
for k, v in metadata.items():
output_dataset.add_item_metadata(relpath, k, v)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--dataset-uri')
parser.add_argument('--resource-uri')
parser.add_argument('--identifier')
parser.add_argument('--output-uri')
args = parser.parse_args()
dataset = DataSet.from_uri(args.dataset_uri)
resource_dataset = DataSet.from_uri(args.resource_uri)
output_dataset = ProtoDataSet.from_uri(args.output_uri)
with temp_working_dir() as working_dir:
outputs = separate_plots(
dataset,
args.identifier,
resource_dataset,
working_dir
)
overlays_to_copy = ['ordering', 'date']
stage_outputs(
outputs,
working_dir,
dataset,
output_dataset,
overlays_to_copy,
args.identifier
)
if __name__ == '__main__':
main()
| 27.313636 | 79 | 0.689466 |
01ebbcc71fbb0dd4ea7cdebbaeb66230bc95916c | 306 | py | Python | py/widget/get_wh.py | teppchan/tkintertips | dc4b43e86185906d72ba5d5712edee94c76f8b37 | [
"MIT"
] | null | null | null | py/widget/get_wh.py | teppchan/tkintertips | dc4b43e86185906d72ba5d5712edee94c76f8b37 | [
"MIT"
] | null | null | null | py/widget/get_wh.py | teppchan/tkintertips | dc4b43e86185906d72ba5d5712edee94c76f8b37 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import Tkinter as Tk
root=Tk.Tk()
root.title(u"Hello Tk window!")
root["width"]=300
root["height"]=200
def f_btn():
print("width=%d height=%d" % (root["width"], root["height"]))
btn=Tk.Button(root, text="window size", width=30)
btn.pack()
btn["command"]=f_btn
root.mainloop()
| 18 | 65 | 0.663399 |
eb950cbe38261625eec86d12235dcda412d7114d | 2,513 | py | Python | userbot/modules/hash.py | ClauzyingFast/OpenUserBot | a2094ed2ccb2f9d9bfa7c0fd447afa99ee64b210 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 14 | 2019-10-27T17:25:12.000Z | 2021-08-29T14:51:25.000Z | userbot/modules/hash.py | ClauzyingFast/OpenUserBot | a2094ed2ccb2f9d9bfa7c0fd447afa99ee64b210 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 4 | 2020-06-10T09:44:34.000Z | 2020-07-28T16:17:17.000Z | userbot/modules/hash.py | ClauzyingFast/OpenUserBot | a2094ed2ccb2f9d9bfa7c0fd447afa99ee64b210 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 325 | 2019-10-26T09:20:47.000Z | 2022-01-28T19:07:01.000Z | # Copyright (C) 2019 The Raphielscape Company LLC.
#
# Licensed under the Raphielscape Public License, Version 1.c (the "License");
# you may not use this file except in compliance with the License.
#
""" Userbot module containing hash and encode/decode commands. """
from subprocess import PIPE
from subprocess import run as runapp
import pybase64
from userbot import CMD_HELP
from userbot.events import register
@register(outgoing=True, pattern="^.hash (.*)")
async def gethash(hash_q):
""" For .hash command, find the md5, sha1, sha256, sha512 of the string. """
hashtxt_ = hash_q.pattern_match.group(1)
hashtxt = open("hashdis.txt", "w+")
hashtxt.write(hashtxt_)
hashtxt.close()
md5 = runapp(["md5sum", "hashdis.txt"], stdout=PIPE)
md5 = md5.stdout.decode()
sha1 = runapp(["sha1sum", "hashdis.txt"], stdout=PIPE)
sha1 = sha1.stdout.decode()
sha256 = runapp(["sha256sum", "hashdis.txt"], stdout=PIPE)
sha256 = sha256.stdout.decode()
sha512 = runapp(["sha512sum", "hashdis.txt"], stdout=PIPE)
runapp(["rm", "hashdis.txt"], stdout=PIPE)
sha512 = sha512.stdout.decode()
ans = ("Text: `" + hashtxt_ + "`\nMD5: `" + md5 + "`SHA1: `" + sha1 +
"`SHA256: `" + sha256 + "`SHA512: `" + sha512[:-1] + "`")
if len(ans) > 4096:
hashfile = open("hashes.txt", "w+")
hashfile.write(ans)
hashfile.close()
await hash_q.client.send_file(
hash_q.chat_id,
"hashes.txt",
reply_to=hash_q.id,
caption="`It's too big, sending a text file instead. `")
runapp(["rm", "hashes.txt"], stdout=PIPE)
else:
await hash_q.reply(ans)
@register(outgoing=True, pattern="^.base64 (en|de) (.*)")
async def endecrypt(query):
""" For .base64 command, find the base64 encoding of the given string. """
if query.pattern_match.group(1) == "en":
lething = str(
pybase64.b64encode(bytes(query.pattern_match.group(2),
"utf-8")))[2:]
await query.reply("Encoded: `" + lething[:-1] + "`")
else:
lething = str(
pybase64.b64decode(bytes(query.pattern_match.group(2), "utf-8"),
validate=True))[2:]
await query.reply("Decoded: `" + lething[:-1] + "`")
CMD_HELP.update({"base64": "Find the base64 encoding of the given string"})
CMD_HELP.update({
"hash":
"Find the md5, sha1, sha256, sha512 of the string when written into a txt file."
})
| 36.955882 | 84 | 0.608038 |
3b025ce56e71b51a9fc4b6319cec3f498e22de67 | 13,189 | py | Python | heat/core/relational.py | Mystic-Slice/heat | dd1b83d6d8b36cb4a70eefc631f00277b0745fee | [
"MIT"
] | null | null | null | heat/core/relational.py | Mystic-Slice/heat | dd1b83d6d8b36cb4a70eefc631f00277b0745fee | [
"MIT"
] | null | null | null | heat/core/relational.py | Mystic-Slice/heat | dd1b83d6d8b36cb4a70eefc631f00277b0745fee | [
"MIT"
] | null | null | null | """
Functions for relational oprations, i.e. equal/no equal...
"""
from __future__ import annotations
import torch
import numpy as np
from typing import Union
from .communication import MPI
from .dndarray import DNDarray
from . import _operations
from . import dndarray
from . import types
from . import sanitation
from . import factories
__all__ = [
"eq",
"equal",
"ge",
"greater",
"greater_equal",
"gt",
"le",
"less",
"less_equal",
"lt",
"ne",
"not_equal",
]
def eq(x: Union[DNDarray, float, int], y: Union[DNDarray, float, int]) -> DNDarray:
"""
Returns a :class:`~heat.core.dndarray.DNDarray` containing the results of element-wise comparision.
Takes the first and second operand (scalar or :class:`~heat.core.dndarray.DNDarray`) whose elements are to be
compared as argument.
Parameters
----------
x: DNDarray or scalar
The first operand involved in the comparison
y: DNDarray or scalar
The second operand involved in the comparison
Examples
---------
>>> import heat as ht
>>> x = ht.float32([[1, 2],[3, 4]])
>>> ht.eq(x, 3.0)
DNDarray([[False, False],
[ True, False]], dtype=ht.bool, device=cpu:0, split=None)
>>> y = ht.float32([[2, 2], [2, 2]])
>>> ht.eq(x, y)
DNDarray([[False, True],
[False, False]], dtype=ht.bool, device=cpu:0, split=None)
"""
res = _operations.__binary_op(torch.eq, x, y)
if res.dtype != types.bool:
res = dndarray.DNDarray(
res.larray.type(torch.bool),
res.gshape,
types.bool,
res.split,
res.device,
res.comm,
res.balanced,
)
return res
DNDarray.__eq__ = lambda self, other: eq(self, other)
DNDarray.__eq__.__doc__ = eq.__doc__
def equal(x: Union[DNDarray, float, int], y: Union[DNDarray, float, int]) -> bool:
"""
Overall comparison of equality between two :class:`~heat.core.dndarray.DNDarray`. Returns ``True`` if two arrays
have the same size and elements, and ``False`` otherwise.
Parameters
----------
x: DNDarray or scalar
The first operand involved in the comparison
y: DNDarray or scalar
The second operand involved in the comparison
Examples
---------
>>> import heat as ht
>>> x = ht.float32([[1, 2],[3, 4]])
>>> ht.equal(x, ht.float32([[1, 2],[3, 4]]))
True
>>> y = ht.float32([[2, 2], [2, 2]])
>>> ht.equal(x, y)
False
>>> ht.equal(x, 3.0)
False
"""
if np.isscalar(x) and np.isscalar(y):
x = factories.array(x)
y = factories.array(y)
elif isinstance(x, DNDarray) and np.isscalar(y):
if x.gnumel == 1:
return equal(x.item(), y)
return False
# y = factories.full_like(x, fill_value=y)
elif np.isscalar(x) and isinstance(y, DNDarray):
if y.gnumel == 1:
return equal(x, y.item())
return False
# x = factories.full_like(y, fill_value=x)
else: # elif isinstance(x, DNDarray) and isinstance(y, DNDarray):
if x.gnumel == 1:
return equal(x.item(), y)
elif y.gnumel == 1:
return equal(x, y.item())
elif not x.comm == y.comm:
raise NotImplementedError("Not implemented for other comms")
elif not x.gshape == y.gshape:
return False
if x.split is None and y.split is None:
pass
elif x.split is None and y.split is not None:
if y.is_balanced(force_check=False):
x = factories.array(x, split=y.split, copy=False, comm=x.comm, device=x.device)
else:
target_map = y.lshape_map
idx = [slice(None)] * x.ndim
idx[y.split] = slice(
target_map[: x.comm.rank, y.split].sum(),
target_map[: x.comm.rank + 1, y.split].sum(),
)
x = factories.array(
x.larray[tuple(idx)], is_split=y.split, copy=False, comm=x.comm, device=x.device
)
elif x.split is not None and y.split is None:
if x.is_balanced(force_check=False):
y = factories.array(y, split=x.split, copy=False, comm=y.comm, device=y.device)
else:
target_map = x.lshape_map
idx = [slice(None)] * y.ndim
idx[x.split] = slice(
target_map[: y.comm.rank, x.split].sum(),
target_map[: y.comm.rank + 1, x.split].sum(),
)
y = factories.array(
y.larray[tuple(idx)], is_split=x.split, copy=False, comm=y.comm, device=y.device
)
elif not x.split == y.split:
raise ValueError(
"DNDarrays must have the same split axes, found {} and {}".format(x.split, y.split)
)
elif not (x.is_balanced(force_check=False) and y.is_balanced(force_check=False)):
x_lmap = x.lshape_map
y_lmap = y.lshape_map
if not torch.equal(x_lmap, y_lmap):
x = x.balance()
y = y.balance()
result_type = types.result_type(x, y)
x = x.astype(result_type)
y = y.astype(result_type)
if x.larray.numel() > 0:
result_value = torch.equal(x.larray, y.larray)
else:
result_value = True
return x.comm.allreduce(result_value, MPI.LAND)
def ge(x: Union[DNDarray, float, int], y: Union[DNDarray, float, int]) -> DNDarray:
"""
Returns a D:class:`~heat.core.dndarray.DNDarray` containing the results of element-wise rich greater than or equal comparison between values from operand ``x`` with respect to values of
operand ``y`` (i.e. ``x>=y``), not commutative. Takes the first and second operand (scalar or
:class:`~heat.core.dndarray.DNDarray`) whose elements are to be compared as argument.
Parameters
----------
x: DNDarray or scalar
The first operand to be compared greater than or equal to second operand
y: DNDarray or scalar
The second operand to be compared less than or equal to first operand
Examples
-------
>>> import heat as ht
>>> x = ht.float32([[1, 2],[3, 4]])
>>> ht.ge(x, 3.0)
DNDarray([[False, False],
[ True, True]], dtype=ht.bool, device=cpu:0, split=None)
>>> y = ht.float32([[2, 2], [2, 2]])
>>> ht.ge(x, y)
DNDarray([[False, True],
[ True, True]], dtype=ht.bool, device=cpu:0, split=None)
"""
res = _operations.__binary_op(torch.ge, x, y)
if res.dtype != types.bool:
res = dndarray.DNDarray(
res.larray.type(torch.bool),
res.gshape,
types.bool,
res.split,
res.device,
res.comm,
res.balanced,
)
return res
DNDarray.__ge__ = lambda self, other: ge(self, other)
DNDarray.__ge__.__doc__ = ge.__doc__
# alias
greater_equal = ge
greater_equal.__doc__ = ge.__doc__
def gt(x: Union[DNDarray, float, int], y: Union[DNDarray, float, int]) -> DNDarray:
"""
Returns a :class:`~heat.core.dndarray.DNDarray` containing the results of element-wise rich greater than comparison between values from operand ``x`` with respect to values of
operand ``y`` (i.e. ``x>y``), not commutative. Takes the first and second operand (scalar or
:class:`~heat.core.dndarray.DNDarray`) whose elements are to be compared as argument.
Parameters
----------
x: DNDarray or scalar
The first operand to be compared greater than second operand
y: DNDarray or scalar
The second operand to be compared less than first operand
Examples
-------
>>> import heat as ht
>>> x = ht.float32([[1, 2],[3, 4]])
>>> ht.gt(x, 3.0)
DNDarray([[False, False],
[False, True]], dtype=ht.bool, device=cpu:0, split=None)
>>> y = ht.float32([[2, 2], [2, 2]])
>>> ht.gt(x, y)
DNDarray([[False, False],
[ True, True]], dtype=ht.bool, device=cpu:0, split=None)
"""
res = _operations.__binary_op(torch.gt, x, y)
if res.dtype != types.bool:
res = dndarray.DNDarray(
res.larray.type(torch.bool),
res.gshape,
types.bool,
res.split,
res.device,
res.comm,
res.balanced,
)
return res
DNDarray.__gt__ = lambda self, other: gt(self, other)
DNDarray.__gt__.__doc__ = gt.__doc__
# alias
greater = gt
greater.__doc__ = gt.__doc__
def le(x: Union[DNDarray, float, int], y: Union[DNDarray, float, int]) -> DNDarray:
"""
Return a :class:`~heat.core.dndarray.DNDarray` containing the results of element-wise rich less than or equal comparison between values from operand ``x`` with respect to values of
operand ``y`` (i.e. ``x<=y``), not commutative. Takes the first and second operand (scalar or
:class:`~heat.core.dndarray.DNDarray`) whose elements are to be compared as argument.
Parameters
----------
x: DNDarray or scalar
The first operand to be compared less than or equal to second operand
y: DNDarray or scalar
The second operand to be compared greater than or equal to first operand
Examples
-------
>>> import heat as ht
>>> x = ht.float32([[1, 2],[3, 4]])
>>> ht.le(x, 3.0)
DNDarray([[ True, True],
[ True, False]], dtype=ht.bool, device=cpu:0, split=None)
>>> y = ht.float32([[2, 2], [2, 2]])
>>> ht.le(x, y)
DNDarray([[ True, True],
[False, False]], dtype=ht.bool, device=cpu:0, split=None)
"""
res = _operations.__binary_op(torch.le, x, y)
if res.dtype != types.bool:
res = dndarray.DNDarray(
res.larray.type(torch.bool),
res.gshape,
types.bool,
res.split,
res.device,
res.comm,
res.balanced,
)
return res
DNDarray.__le__ = lambda self, other: le(self, other)
DNDarray.__le__.__doc__ = le.__doc__
# alias
less_equal = le
less_equal.__doc__ = le.__doc__
def lt(x: Union[DNDarray, float, int], y: Union[DNDarray, float, int]) -> DNDarray:
"""
Returns a :class:`~heat.core.dndarray.DNDarray` containing the results of element-wise rich less than comparison between values from operand ``x`` with respect to values of
operand ``y`` (i.e. ``x<y``), not commutative. Takes the first and second operand (scalar or
:class:`~heat.core.dndarray.DNDarray`) whose elements are to be compared as argument.
Parameters
----------
x: DNDarray or scalar
The first operand to be compared less than second operand
y: DNDarray or scalar
The second operand to be compared greater than first operand
Examples
-------
>>> import heat as ht
>>> x = ht.float32([[1, 2],[3, 4]])
>>> ht.lt(x, 3.0)
DNDarray([[ True, True],
[False, False]], dtype=ht.bool, device=cpu:0, split=None)
>>> y = ht.float32([[2, 2], [2, 2]])
>>> ht.lt(x, y)
DNDarray([[ True, False],
[False, False]], dtype=ht.bool, device=cpu:0, split=None)
"""
res = _operations.__binary_op(torch.lt, x, y)
if res.dtype != types.bool:
res = dndarray.DNDarray(
res.larray.type(torch.bool),
res.gshape,
types.bool,
res.split,
res.device,
res.comm,
res.balanced,
)
return res
DNDarray.__lt__ = lambda self, other: lt(self, other)
DNDarray.__lt__.__doc__ = lt.__doc__
# alias
less = lt
less.__doc__ = lt.__doc__
def ne(x: Union[DNDarray, float, int], y: Union[DNDarray, float, int]) -> DNDarray:
"""
Returns a :class:`~heat.core.dndarray.DNDarray` containing the results of element-wise rich comparison of non-equality between values from two operands, commutative.
Takes the first and second operand (scalar or :class:`~heat.core.dndarray.DNDarray`) whose elements are to be
compared as argument.
Parameters
----------
x: DNDarray or scalar
The first operand involved in the comparison
y: DNDarray or scalar
The second operand involved in the comparison
Examples
---------
>>> import heat as ht
>>> x = ht.float32([[1, 2],[3, 4]])
>>> ht.ne(x, 3.0)
DNDarray([[ True, True],
[False, True]], dtype=ht.bool, device=cpu:0, split=None)
>>> y = ht.float32([[2, 2], [2, 2]])
>>> ht.ne(x, y)
DNDarray([[ True, False],
[ True, True]], dtype=ht.bool, device=cpu:0, split=None)
"""
res = _operations.__binary_op(torch.ne, x, y)
if res.dtype != types.bool:
res = dndarray.DNDarray(
res.larray.type(torch.bool),
res.gshape,
types.bool,
res.split,
res.device,
res.comm,
res.balanced,
)
return res
DNDarray.__ne__ = lambda self, other: ne(self, other)
DNDarray.__ne__.__doc__ = ne.__doc__
# alias
not_equal = ne
not_equal.__doc__ = ne.__doc__
| 31.327791 | 189 | 0.576769 |
83b3d4409b9fbdd0748ccc1d77a1c7b1a4f76c8e | 11,283 | py | Python | examples/nsmc.py | seujung/KoBART | 6040da72ca5744c17e5e89092158a8ee2a6b9673 | [
"MIT"
] | 1 | 2020-12-20T10:49:26.000Z | 2020-12-20T10:49:26.000Z | examples/nsmc.py | seujung/KoBART | 6040da72ca5744c17e5e89092158a8ee2a6b9673 | [
"MIT"
] | null | null | null | examples/nsmc.py | seujung/KoBART | 6040da72ca5744c17e5e89092158a8ee2a6b9673 | [
"MIT"
] | null | null | null | # coding=utf-8
# Modified MIT License
# Software Copyright (c) 2020 SK telecom
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
# The above copyright notice and this permission notice need not be included
# with content created by the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
import argparse
import logging
import os
import pandas as pd
import numpy as np
import torch
from torch.utils.data import DataLoader, Dataset
import pytorch_lightning as pl
from pytorch_lightning import loggers as pl_loggers
from transformers.optimization import AdamW, get_cosine_schedule_with_warmup
from transformers import BartForSequenceClassification
from kobart import get_kobart_tokenizer, get_pytorch_kobart_model
parser = argparse.ArgumentParser(description='subtask for KoBART')
parser.add_argument('--subtask',
type=str,
default='NSMC',
help='NSMC')
logger = logging.getLogger()
logger.setLevel(logging.INFO)
class ArgsBase():
@staticmethod
def add_model_specific_args(parent_parser):
parser = argparse.ArgumentParser(
parents=[parent_parser], add_help=False)
parser.add_argument('--train_file',
type=str,
default='nsmc/ratings_train.txt',
help='train file')
parser.add_argument('--test_file',
type=str,
default='nsmc/ratings_test.txt',
help='test file')
parser.add_argument('--batch_size',
type=int,
default=128,
help='')
parser.add_argument('--max_seq_len',
type=int,
default=128,
help='')
return parser
class NSMCDataset(Dataset):
def __init__(self, filepath, max_seq_len=128):
self.filepath = filepath
self.data = pd.read_csv(self.filepath, sep='\t')
self.max_seq_len = max_seq_len
self.tokenizer = get_kobart_tokenizer()
def __len__(self):
return len(self.data)
def __getitem__(self, index):
record = self.data.iloc[index]
document, label = str(record['document']), int(record['label'])
tokens = [self.tokenizer.bos_token] + \
self.tokenizer.tokenize(document) + [self.tokenizer.eos_token]
encoder_input_id = self.tokenizer.convert_tokens_to_ids(tokens)
attention_mask = [1] * len(encoder_input_id)
if len(encoder_input_id) < self.max_seq_len:
while len(encoder_input_id) < self.max_seq_len:
encoder_input_id += [self.tokenizer.pad_token_id]
attention_mask += [0]
else:
encoder_input_id = encoder_input_id[:self.max_seq_len - 1] + [
self.tokenizer.eos_token_id]
attention_mask = attention_mask[:self.max_seq_len]
return {'input_ids': np.array(encoder_input_id, dtype=np.int_),
'attention_mask': np.array(attention_mask, dtype=np.float),
'labels': np.array(label, dtype=np.int_)}
class NSMCDataModule(pl.LightningDataModule):
def __init__(self, train_file,
test_file,
max_seq_len=128,
batch_size=32):
super().__init__()
self.batch_size = batch_size
self.max_seq_len = max_seq_len
self.train_file_path = train_file
self.test_file_path = test_file
@staticmethod
def add_model_specific_args(parent_parser):
parser = argparse.ArgumentParser(
parents=[parent_parser], add_help=False)
return parser
# OPTIONAL, called for every GPU/machine (assigning state is OK)
def setup(self, stage):
# split dataset
self.nsmc_train = NSMCDataset(self.train_file_path,
self.max_seq_len)
self.nsmc_test = NSMCDataset(self.test_file_path,
self.max_seq_len)
# return the dataloader for each split
def train_dataloader(self):
nsmc_train = DataLoader(self.nsmc_train,
batch_size=self.batch_size,
num_workers=5, shuffle=True)
return nsmc_train
def val_dataloader(self):
nsmc_val = DataLoader(self.nsmc_test,
batch_size=self.batch_size,
num_workers=5, shuffle=False)
return nsmc_val
def test_dataloader(self):
nsmc_test = DataLoader(self.nsmc_test,
batch_size=self.batch_size,
num_workers=5, shuffle=False)
return nsmc_test
class Classification(pl.LightningModule):
def __init__(self, hparams, **kwargs) -> None:
super(Classification, self).__init__()
self.hparams = hparams
@staticmethod
def add_model_specific_args(parent_parser):
# add model specific args
parser = argparse.ArgumentParser(
parents=[parent_parser], add_help=False)
parser.add_argument('--batch-size',
type=int,
default=32,
help='batch size for training (default: 96)')
parser.add_argument('--lr',
type=float,
default=5e-5,
help='The initial learning rate')
parser.add_argument('--warmup_ratio',
type=float,
default=0.1,
help='warmup ratio')
return parser
def configure_optimizers(self):
# Prepare optimizer
param_optimizer = list(self.model.named_parameters())
no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight']
optimizer_grouped_parameters = [
{'params': [p for n, p in param_optimizer if not any(
nd in n for nd in no_decay)], 'weight_decay': 0.01},
{'params': [p for n, p in param_optimizer if any(
nd in n for nd in no_decay)], 'weight_decay': 0.0}
]
optimizer = AdamW(optimizer_grouped_parameters,
lr=self.hparams.lr, correct_bias=False)
# warm up lr
num_workers = (self.hparams.gpus if self.hparams.gpus is not None else 1) * (self.hparams.num_nodes if self.hparams.num_nodes is not None else 1)
data_len = len(self.train_dataloader().dataset)
logging.info(f'number of workers {num_workers}, data length {data_len}')
num_train_steps = int(data_len / (self.hparams.batch_size * num_workers * self.hparams.accumulate_grad_batches) * self.hparams.max_epochs)
logging.info(f'num_train_steps : {num_train_steps}')
num_warmup_steps = int(num_train_steps * self.hparams.warmup_ratio)
logging.info(f'num_warmup_steps : {num_warmup_steps}')
scheduler = get_cosine_schedule_with_warmup(
optimizer,
num_warmup_steps=num_warmup_steps, num_training_steps=num_train_steps)
lr_scheduler = {'scheduler': scheduler,
'monitor': 'loss', 'interval': 'step',
'frequency': 1}
return [optimizer], [lr_scheduler]
class KoBARTClassification(Classification):
def __init__(self, hparams, **kwargs):
super(KoBARTClassification, self).__init__(hparams, **kwargs)
self.model = BartForSequenceClassification.from_pretrained(get_pytorch_kobart_model())
self.model.train()
self.metric_acc = pl.metrics.classification.Accuracy()
def forward(self, input_ids, attention_mask, labels=None):
return self.model(input_ids=input_ids, attention_mask=attention_mask, labels=labels, return_dict=True)
def training_step(self, batch, batch_idx):
outs = self(batch['input_ids'], batch['attention_mask'], batch['labels'])
loss = outs.loss
self.log('train_loss', loss, prog_bar=True)
return loss
def validation_step(self, batch, batch_idx):
pred = self(batch['input_ids'], batch['attention_mask'])
labels = batch['labels']
accuracy = self.metric_acc(pred.logits, labels)
self.log('accuracy', accuracy)
result = {'accuracy': accuracy}
# Checkpoint model based on validation loss
return result
def validation_epoch_end(self, outputs):
val_acc = torch.stack([i['accuracy'] for i in outputs]).mean()
self.log('val_acc', val_acc, prog_bar=True)
if __name__ == '__main__':
parser = Classification.add_model_specific_args(parser)
parser = ArgsBase.add_model_specific_args(parser)
parser = NSMCDataModule.add_model_specific_args(parser)
parser = pl.Trainer.add_argparse_args(parser)
args = parser.parse_args()
logging.info(args)
# init model
model = KoBARTClassification(args)
if args.subtask == 'NSMC':
# init data
dm = NSMCDataModule(args.train_file,
args.test_file,
batch_size=args.batch_size, max_seq_len=args.max_seq_len)
checkpoint_callback = pl.callbacks.ModelCheckpoint(monitor='val_acc',
dirpath=args.default_root_dir,
filename='model_chp/{epoch:02d}-{val_acc:.3f}',
verbose=True,
save_last=True,
mode='max',
save_top_k=-1,
prefix=f'{args.subtask}')
else:
# add more subtasks
assert False
tb_logger = pl_loggers.TensorBoardLogger(os.path.join(args.default_root_dir, 'tb_logs'))
# train
lr_logger = pl.callbacks.LearningRateMonitor()
trainer = pl.Trainer.from_argparse_args(args, logger=tb_logger,
callbacks=[checkpoint_callback, lr_logger])
trainer.fit(model, dm)
| 41.788889 | 153 | 0.60631 |
ae69f61839620110037a106fd2d4bd25625e481b | 245 | py | Python | flask/model/__init__.py | Dev-Jahn/cms | 84ea115bdb865daff83d069502f6f0dd105fc4f0 | [
"RSA-MD"
] | null | null | null | flask/model/__init__.py | Dev-Jahn/cms | 84ea115bdb865daff83d069502f6f0dd105fc4f0 | [
"RSA-MD"
] | 9 | 2021-01-05T07:48:28.000Z | 2021-05-14T06:38:27.000Z | flask/model/__init__.py | Dev-Jahn/cms | 84ea115bdb865daff83d069502f6f0dd105fc4f0 | [
"RSA-MD"
] | 4 | 2021-01-05T06:46:09.000Z | 2021-05-06T01:44:28.000Z | from .company_model import Company
from .device_model import Device
from .device_entry_model import DeviceEntry
from .image_model import Image
from .project_model import Project
from .cell_model import Cell
from .user_model import User
| 27.222222 | 44 | 0.820408 |
e3be9855102b294572b598543db46b1daa0f4a00 | 5,654 | py | Python | teleBot/duckduckgo.py | HikingSheep/Testy_Rasbian | 8948eb070629ed9fd257828926863fb7f8834d53 | [
"Apache-2.0"
] | 1 | 2020-05-24T19:45:30.000Z | 2020-05-24T19:45:30.000Z | teleBot/duckduckgo.py | HikingSheep/Testy_Rasbian | 8948eb070629ed9fd257828926863fb7f8834d53 | [
"Apache-2.0"
] | null | null | null | teleBot/duckduckgo.py | HikingSheep/Testy_Rasbian | 8948eb070629ed9fd257828926863fb7f8834d53 | [
"Apache-2.0"
] | null | null | null | # duckduckgo.py - Library for querying the DuckDuckGo API
#
# Copyright (c) 2010 Michael Stephens <me@mikej.st>
# Copyright (c) 2012-2013 Michael Smith <crazedpsyc@gshellz.org>
#
# See LICENSE for terms of usage, modification and redistribution.
import urllib
from urllib.request import urlopen
import json as j
import sys
__version__ = 0.242
def query(query, useragent='python-duckduckgo '+str(__version__), safesearch=True, html=False, meanings=True, **kwargs):
"""
Query DuckDuckGo, returning a Results object.
Here's a query that's unlikely to change:
>>> result = query('1 + 1')
>>> result.type
'nothing'
>>> result.answer.text
'1 + 1 = 2'
>>> result.answer.type
'calc'
Keword arguments:
useragent: UserAgent to use while querying. Default: "python-duckduckgo %d" (str)
safesearch: True for on, False for off. Default: True (bool)
html: True to allow HTML in output. Default: False (bool)
meanings: True to include disambiguations in results (bool)
Any other keyword arguments are passed directly to DuckDuckGo as URL params.
""" % __version__
safesearch = '1' if safesearch else '-1'
html = '0' if html else '1'
meanings = '0' if meanings else '1'
params = {
'q': query,
'o': 'json',
'kp': safesearch,
'no_redirect': '1',
'no_html': html,
'd': meanings,
}
params.update(kwargs)
encparams = urllib.urlencode(params)
url = 'http://api.duckduckgo.com/?' + encparams
request = urllib2.Request(url, headers={'User-Agent': useragent})
response = urllib2.urlopen(request)
json = j.loads(response.read())
response.close()
return Results(json)
class Results(object):
def __init__(self, json):
self.type = {'A': 'answer', 'D': 'disambiguation',
'C': 'category', 'N': 'name',
'E': 'exclusive', '': 'nothing'}.get(json.get('Type',''), '')
self.json = json
self.api_version = None # compat
self.heading = json.get('Heading', '')
self.results = [Result(elem) for elem in json.get('Results',[])]
self.related = [Result(elem) for elem in
json.get('RelatedTopics',[])]
self.abstract = Abstract(json)
self.redirect = Redirect(json)
self.definition = Definition(json)
self.answer = Answer(json)
self.image = Image({'Result':json.get('Image','')})
class Abstract(object):
def __init__(self, json):
self.html = json.get('Abstract', '')
self.text = json.get('AbstractText', '')
self.url = json.get('AbstractURL', '')
self.source = json.get('AbstractSource')
class Redirect(object):
def __init__(self, json):
self.url = json.get('Redirect', '')
class Result(object):
def __init__(self, json):
self.topics = json.get('Topics', [])
if self.topics:
self.topics = [Result(t) for t in self.topics]
return
self.html = json.get('Result')
self.text = json.get('Text')
self.url = json.get('FirstURL')
icon_json = json.get('Icon')
if icon_json is not None:
self.icon = Image(icon_json)
else:
self.icon = None
class Image(object):
def __init__(self, json):
self.url = json.get('Result')
self.height = json.get('Height', None)
self.width = json.get('Width', None)
class Answer(object):
def __init__(self, json):
self.text = json.get('Answer')
self.type = json.get('AnswerType', '')
class Definition(object):
def __init__(self, json):
self.text = json.get('Definition','')
self.url = json.get('DefinitionURL')
self.source = json.get('DefinitionSource')
def get_zci(q, web_fallback=True, priority=['answer', 'abstract', 'related.0', 'definition'], urls=True, **kwargs):
'''A helper method to get a single (and hopefully the best) ZCI result.
priority=list can be used to set the order in which fields will be checked for answers.
Use web_fallback=True to fall back to grabbing the first web result.
passed to query. This method will fall back to 'Sorry, no results.'
if it cannot find anything.'''
ddg = query('\\'+q, **kwargs)
response = ''
for p in priority:
ps = p.split('.')
type = ps[0]
index = int(ps[1]) if len(ps) > 1 else None
result = getattr(ddg, type)
if index is not None:
if not hasattr(result, '__getitem__'): raise TypeError('%s field is not indexable' % type)
result = result[index] if len(result) > index else None
if not result: continue
if result.text: response = result.text
if result.text and hasattr(result,'url') and urls:
if result.url: response += ' (%s)' % result.url
if response: break
# if there still isn't anything, try to get the first web result
if not response and web_fallback:
if ddg.redirect.url:
response = ddg.redirect.url
# final fallback
if not response:
response = 'Sorry, no results.'
return response
def main():
if len(sys.argv) > 1:
q = query(' '.join(sys.argv[1:]))
keys = q.json.keys()
keys.sort()
for key in keys:
sys.stdout.write(key)
if type(q.json[key]) in [str,unicode,int]: print(':', q.json[key])
else:
sys.stdout.write('\n')
for i in q.json[key]: print('\t',i)
else:
print('Usage: %s [query]' % sys.argv[0])
| 30.235294 | 120 | 0.593208 |
f6ccf7251d1a28538a8c38dd62ad5a0f7fcc8c89 | 4,298 | py | Python | tools/versioning.py | JessyDL/psl | 2396efc1ffed6e2105e324f34deadcb04aad76c3 | [
"MIT"
] | null | null | null | tools/versioning.py | JessyDL/psl | 2396efc1ffed6e2105e324f34deadcb04aad76c3 | [
"MIT"
] | null | null | null | tools/versioning.py | JessyDL/psl | 2396efc1ffed6e2105e324f34deadcb04aad76c3 | [
"MIT"
] | null | null | null | import subprocess
def run_command(command = []):
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
data = ""
while proc.poll() is None:
output = proc.stdout.readline()
if output:
data = data + output.decode("utf-8")
output = proc.communicate()[0]
if output:
data = data + output.decode("utf-8")
return data
def all_authors():
possible_authors = run_command(["git", "shortlog", "-s", "-n", "--all", "--no-merges"]).split("\n")
author_exemptions = ["Travis-CI"]
author_alias = {'JessyDL':'Jessy De Lannoit', 'Jessy':'Jessy De Lannoit'}
authors = {}
for author in possible_authors:
if author and not any(s in author for s in author_exemptions):
author = author.strip()
number, name = author.split(None,1)
if name in author_alias.keys():
name = author_alias[name]
if name not in authors:
authors[name] = number
else:
authors[name] += number
list(sorted(authors.items()))
return authors.keys()
def git_version():
version = run_command(["git", "tag", "-l", "--sort=-v:refname"])
version = version.split('\n')[0]
if len(version) == 0:
return 0, 0, 0
major, minor, patch = version.split('.')
return int(major), int(minor), int(patch)
def git_sha1():
return run_command(["git", "rev-parse", "HEAD"]).rstrip()
def git_timestamp():
res = run_command(["git", "log", "-1", "--pretty=format:%ct"])
if not res:
return 0
return int(res)
def git_log_since(major, minor, patch):
if major == 0 and minor == 0 and patch == 0:
logs = run_command(["git", "log", "--format=%B", "--no-merges"])
else:
logs = run_command(["git", "log", f"{major}.{minor}.{patch}..HEAD", "--format=%B", "--no-merges"])
if not logs:
return ""
logs = logs.splitlines()
logs = [x for x in logs if x]
logs.sort()
return "\n".join(logs)
def create_patch(message = None):
current_major, current_minor, current_patch = git_version()
next_major = current_major
next_minor = current_minor
next_patch = current_patch + 1
changes = git_log_since(current_major, current_minor, current_patch)
if not changes or len(changes) < 3:
print("unlikely small changes in patch version, please verify this is what you want to do")
return
if message:
message = f"\n{message}\n"
create_version(next_major, next_minor, next_patch, f'patch release {next_major}.{next_minor}.{next_patch}\n{message}\nchanges since {current_major}.{current_minor}.{current_patch}:\n{changes}"')
def create_minor(message = None):
current_major, current_minor, _ = git_version()
next_major = current_major
next_minor = current_minor + 1
next_patch = 0
changes = git_log_since(current_major, current_minor, 0)
if not changes or len(changes) < 3:
print("unlikely small changes in ninor version, please verify this is what you want to do")
return
if message:
message = f"\n{message}\n"
create_version(next_major, next_minor, next_patch, f'minor release {next_major}.{next_minor}.{next_patch}\n{message}\nchanges since {current_major}.{current_minor}.{0}:\n{changes}"')
def create_major(message = None):
current_major, _, _ = git_version()
next_major = current_major + 1
next_minor = 0
next_patch = 0
changes = git_log_since(current_major, 0, 0)
if not changes or len(changes) < 3:
print("unlikely small changes in major version, please verify this is what you want to do")
return
if message:
message = f"\n{message}\n"
create_version(next_major, next_minor, next_patch, f'major release {next_major}.{next_minor}.{next_patch}\n{message}\nchanges since {current_major}.{0}.{0}:\n{changes}"')
def create_version(major, minor, patch, message):
run_command(["git", "tag", '-a', f'{major}.{minor}.{patch}', '-m', message])
# in case we created a patch we no longer want.
def destroy_local_tag(major, minor, patch):
run_command(["git", "tag", '-d', f'{major}.{minor}.{patch}'])
if __name__ == '__main__':
destroy_local_tag(1,0,0) | 38.035398 | 198 | 0.625872 |
aa1042a9973bc687dfc310056ab5b6b582bfeb95 | 2,578 | py | Python | src/tf_transformers/layers/mask/cross_attention_mask.py | legacyai/tf-transformers | 65a5f9a4bcb3236483daa598a37b91673f56cb97 | [
"Apache-2.0"
] | 116 | 2021-03-15T09:48:41.000Z | 2022-03-24T05:15:51.000Z | src/tf_transformers/layers/mask/cross_attention_mask.py | legacyai/tf-transformers | 65a5f9a4bcb3236483daa598a37b91673f56cb97 | [
"Apache-2.0"
] | 4 | 2021-03-20T11:20:57.000Z | 2022-01-05T04:59:07.000Z | src/tf_transformers/layers/mask/cross_attention_mask.py | legacyai/tf-transformers | 65a5f9a4bcb3236483daa598a37b91673f56cb97 | [
"Apache-2.0"
] | 9 | 2021-03-17T04:14:48.000Z | 2021-09-13T07:15:31.000Z | # coding=utf-8
# Copyright 2021 TF-Transformers Authors and The TensorFlow Authors.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras layer that creates a self-attention mask."""
# from __future__ import google_type_annotations
from __future__ import absolute_import, division, print_function
import tensorflow as tf
from tf_transformers.utils import tf_utils
@tf.keras.utils.register_keras_serializable(package="Text")
class CrossAttentionMask(tf.keras.layers.Layer):
"""Create 3D attention mask from a 2D tensor mask.
inputs[0]: from_tensor: 2D or 3D Tensor of shape
[batch_size, from_seq_length, ...].
inputs[1]: to_mask: int32 Tensor of shape [batch_size, to_seq_length].
Returns:
float Tensor of shape [batch_size, from_seq_length, to_seq_length].
"""
def __init__(self, **kwargs):
# We need to have a default dtype of float32, since the inputs (which Keras
# usually uses to infer the dtype) will always be int32.
if "dtype" not in kwargs:
kwargs["dtype"] = tf_utils.get_dtype()
super(CrossAttentionMask, self).__init__(**kwargs)
self._dtype = kwargs["dtype"]
def call(self, inputs):
to_mask = inputs[1]
batch_size, from_seq_length = tf_utils.get_shape_list(inputs[0])
_, to_seq_length = tf_utils.get_shape_list(inputs[1])
to_mask = tf.cast(tf.reshape(to_mask, [batch_size, 1, to_seq_length]), dtype=self._dtype)
# We don't assume that `from_tensor` is a mask (although it could be). We
# don't actually care if we attend *from* padding tokens (only *to* padding)
# tokens so we create a tensor of all ones.
#
# `broadcast_ones` = [batch_size, from_seq_length, 1]
broadcast_ones = tf.ones(shape=[batch_size, from_seq_length, 1], dtype=self._dtype)
# Here we broadcast along two dimensions to create the mask.
mask = broadcast_ones * to_mask
return mask
| 39.060606 | 97 | 0.683476 |
137e438eb43c6b0fd624aca3b650d51d10e29e45 | 8,443 | py | Python | Scripts/compiler_dump_test/__main__.py | LiarPrincess/Violet | 0a4268649b0eec3ab631d19015d7043394c6571e | [
"MIT"
] | null | null | null | Scripts/compiler_dump_test/__main__.py | LiarPrincess/Violet | 0a4268649b0eec3ab631d19015d7043394c6571e | [
"MIT"
] | 6 | 2021-10-14T15:55:16.000Z | 2022-03-31T14:04:02.000Z | Scripts/compiler_dump_test/__main__.py | LiarPrincess/Violet | 0a4268649b0eec3ab631d19015d7043394c6571e | [
"MIT"
] | null | null | null | import io
import os
import re
import dis
from typing import List
def in_current_directory(file):
current_file = __file__
current_dir = os.path.dirname(current_file)
return os.path.join(current_dir, file)
def read_input_file():
file = in_current_directory('input.py')
with open(file) as f:
source = f.read()
return source
def print_docs(lines):
index = 0
# python code
while not lines[index].startswith(' '):
print('///', lines[index])
index += 1
# bytecode instructions
without_source_code_line = 13
while index < len(lines):
line = lines[index]
if len(line) > 0:
print('///', line[without_source_code_line:])
index += 1
def print_expected(lines: List[str]):
index = 0
# skip python code
while not lines[index].startswith(' '):
index += 1
arg_value_from_parens_regex = re.compile('.*\((.*)\)')
# expected
without_source_code_line_and_bytecode_index = 16
while index < len(lines):
line = lines[index]
index += 1
if not line:
continue
# Are we now code object?
if line.startswith('Disassembly of '):
print()
print(line)
print()
continue
instruction = line[without_source_code_line_and_bytecode_index:]
arg_start_index = instruction.find(' ')
name_end = len(instruction) if arg_start_index == -1 else arg_start_index
name = instruction[:name_end]
name = to_camel_case(name)
name = name.replace('Subscr', 'Subscript')
name = name.replace('Attr', 'Attribute')
if name == 'returnValue':
name = 'return'
if name == 'jumpForward':
name = 'jumpAbsolute'
if name == 'continueLoop':
name = 'continue'
args = '' if arg_start_index == -1 else instruction[arg_start_index:]
args = args.strip()
# Extract arg values from parens: '(0 ('Aurora')),' -> 'Aurora'
value_in_paren = arg_value_from_parens_regex.findall(args)
if value_in_paren:
args = value_in_paren[0]
# Jump addres
if args.startswith('to '):
args = args[3:]
# Instruction specifics
# (this is in the same order as cases in 'Instruction' enum)
if name == 'compareOp':
compare_type = \
'.equal' if args == '==' else \
'.notEqual' if args == '!=' else \
'.less' if args == '<' else \
'.lessEqual' if args == '<=' else \
'.greater' if args == '>' else \
'.greaterEqual' if args == '>=' else \
'.is' if args == 'is' else \
'.isNot' if args == 'is not' else \
'.in' if args == 'in' else \
'.notIn' if args == 'not in' else \
'.exceptionMatch' if args == 'exception match' else \
args
args = 'type: ' + compare_type
elif name == 'setupLoop':
args = 'loopEndTarget: ' + args
elif name == 'forIter':
args = 'ifEmptyTarget: ' + args
elif name == 'continue':
args = 'loopStartTarget: ' + args
elif name in ('buildTuple', 'buildList', 'buildSet', 'buildMap', 'buildConstKeyMap'):
args = 'elementCount: ' + args
elif name in ('setAdd', 'listAppend', 'mapAdd'):
args = 'relativeStackIndex: ' + args
elif name in ('buildTupleUnpack', 'buildTupleUnpackWithCall', 'buildListUnpack', 'buildSetUnpack', 'buildMapUnpack', 'buildMapUnpackWithCall', 'unpackSequence'):
args = 'elementCount: ' + args
elif name == 'unpackEx':
pass # TODO: unpackEx
elif name == 'loadConst':
if args in ('None', 'Ellipsis', 'True', 'False'):
args = '.' + args.lower()
elif args and args[0].isnumeric():
# We sometimes get additional parens
args = args.replace('(', '').replace(')', '')
if '+' in args or args[-1] == 'j':
split = args.split('+')
real = split[0] if len(split) == 2 else '0'
imag = split[1] if len(split) == 2 else split[0]
args = f'real: {real}, imag: {imag}'
elif '.' in args:
args = 'float: ' + args
else:
args = 'integer: ' + args
elif 'code object' in args:
args = 'codeObject: .any'
elif args.startswith("'") or args.startswith('"'):
args = args.replace("'", '"') # ' -> "
args = 'string: ' + args
elif args.startswith('b"') or args.startswith("b'"):
args = args.replace("'", '"') # ' -> "
args = 'bytes: ' + args
elif ',' in args:
args = 'tuple: ' + args
elif args.startswith('<code object'):
args = 'codeObject: .any'
elif name in ('loadName', 'storeName', 'deleteName'):
args = f'name: "{args}"'
elif name in ('loadAttribute', 'storeAttribute', 'deleteAttribute'):
args = f'name: "{args}"'
elif name in ('loadGlobal', 'storeGlobal', 'deleteGlobal'):
args = f'name: "{args}"'
elif name in ('loadFast', 'storeFast', 'deleteFast'):
args = f'variable: MangledName(withoutClass: "{args}")'
elif name in ('loadDeref', 'storeDeref', 'deleteDeref'):
args = f'cell: MangledName(withoutClass: "{args}")'
if name == 'loadDeref':
name = 'loadCellOrFree'
elif name == 'storeDeref':
name = 'storeCellOrFree'
elif name == 'deleteDeref':
name = 'deleteCellOrFree'
elif name == 'loadClosure':
args = f'cellOrFree: MangledName(withoutClass: "{args}")'
elif name == 'loadClassderef':
name = 'loadClassFree'
args = f'free: MangledName(withoutClass: "{args}")'
elif name == 'makeFunction':
if args == '0':
args = 'flags: []'
else:
args = 'flags: ' + args
elif name in ('callFunction', 'callFunctionKw'):
args = 'argumentCount: ' + args
elif name == 'callFunctionEx':
value = 'true' if args == '1' else 'false'
args = 'hasKeywordArguments: ' + value
elif name == 'loadMethod':
args = 'name: ' + args
elif name == 'callMethod':
args = 'argumentCount: ' + args
elif name in ('importName', 'importFrom'):
args = 'name: ' + args
elif name == 'setupExcept':
args = f'firstExceptTarget: {args}'
elif name == 'setupFinally':
args = f'finallyStartTarget: {args}'
elif name == 'raiseVarargs':
if args == '1':
args = 'type: .exceptionOnly'
else:
# TODO: Other 'raiseVarargs'
assert False, 'Add missing raiseVarargs arguments'
elif name == 'setupWith':
args = 'afterBodyTarget: ' + args
elif name in ('jumpAbsolute', 'popJumpIfTrue', 'popJumpIfFalse', 'jumpIfTrueOrPop', 'jumpIfFalseOrPop'):
args = f'target: {args}'
elif name == 'formatValue':
pass # TODO: formatValue
elif name == 'buildString':
args = 'elementCount:' + args
elif name == 'buildSlice':
pass # TODO: buildSlice
if args:
args = '(' + args + ')'
is_last = index == len(lines)
comma = '' if is_last else ','
print(f'.{name}{args}{comma}')
def to_camel_case(snake_str):
components = snake_str.split('_')
return components[0].lower() + ''.join(x.title() for x in components[1:])
if __name__ == '__main__':
code = read_input_file()
bytecode_stream = io.StringIO()
dis.dis(code, file=bytecode_stream)
bytecode_stream.seek(0)
bytecode_lines = bytecode_stream.readlines()
lines = [l.replace('\n', '') for l in bytecode_lines]
for c in code.splitlines():
print('///', c)
print('///')
print_docs(lines)
print('-----------------')
print_expected(lines)
| 32.102662 | 169 | 0.515457 |
8c9241a3196762649779018acb8e52f1255afe34 | 1,493 | py | Python | molo/core/tests/test_blocks.py | Ishma59/molo | 4fd31df9266bc251e09e9339a132d3ccd4143c69 | [
"BSD-2-Clause"
] | null | null | null | molo/core/tests/test_blocks.py | Ishma59/molo | 4fd31df9266bc251e09e9339a132d3ccd4143c69 | [
"BSD-2-Clause"
] | null | null | null | molo/core/tests/test_blocks.py | Ishma59/molo | 4fd31df9266bc251e09e9339a132d3ccd4143c69 | [
"BSD-2-Clause"
] | null | null | null | from django.core.exceptions import ValidationError
from django.test import TestCase
from molo.core.blocks import MarkDownBlock
class TestMarkDownBlock(TestCase):
def test_save_block_with_html_value_fails_validation(self):
# Test with some commonly used html tags
block = MarkDownBlock()
with (self.assertRaisesMessage(ValidationError,
"Please use MarkDown for formatting text instead of HTML.")):
block.clean(value="<b>Hello</b> There!")
with (self.assertRaisesMessage(ValidationError,
"Please use MarkDown for formatting text instead of HTML.")):
block.clean(value="<p>Hello There!</p>")
with (self.assertRaisesMessage(ValidationError,
"Please use MarkDown for formatting text instead of HTML.")):
block.clean(value='<a href="">Hello There!</a>')
with (self.assertRaisesMessage(ValidationError,
"Please use MarkDown for formatting text instead of HTML.")):
block.clean(value='<em>Hello There!</em>')
with (self.assertRaisesMessage(ValidationError,
"Please use MarkDown for formatting text instead of HTML.")):
block.clean(value='Hello There!<br>')
# Test that a commonly used but invalid tag is also caught
with (self.assertRaisesMessage(ValidationError,
"Please use MarkDown for formatting text instead of HTML.")):
block.clean(value="Hello There!</br>")
| 48.16129 | 75 | 0.665104 |
f64008c655a864ff20b65818e028a6b4540b6337 | 706 | py | Python | malw.py | levio-sa/Malwares | b441af8b4ad87fbfbf292e4c0c0176b94f7faae4 | [
"MIT"
] | null | null | null | malw.py | levio-sa/Malwares | b441af8b4ad87fbfbf292e4c0c0176b94f7faae4 | [
"MIT"
] | null | null | null | malw.py | levio-sa/Malwares | b441af8b4ad87fbfbf292e4c0c0176b94f7faae4 | [
"MIT"
] | null | null | null | import os
import random
n=random.random()
print("Hello")
if(n>0.5):
os.system("mkdir new")
os.chdir('./new')
f=open("malw.py",'w+')
s='''import os\nimport random\nn=random.random()\nprint({q1}Hello{q1})\nif(n>0.5):\n os.system({q1}mkdir new{q1})\n os.chdir({q1}./new{q1})\n f=open({q1}malw.py{q1},{q1}w+{q1})\n s={q1}{q1}{q1}{st}{q1}{q1}{q1}\n f.write(s.format(q1=chr(39),st=str(s)))\n f.close()\n print({q1}Here{q1})\nelse:\n print({q1}You are being hacked{q1})\nos.system({q1}python3 malw.py{q1})'''
f.write(s.format(q1=chr(39),st=str(s)))
f.close()
print("Here")
else:
print("You are being hacked")
os.system("python3 malw.py")
| 44.125 | 398 | 0.580737 |
c5fb9985c357c32c74d11e2cff99547da60df832 | 711 | py | Python | feed/spiders/day/cssforest_spider.py | awesome-archive/oh-my-rss | 477cca56e00f96aa53ef07212328cb2b39bdd6da | [
"MIT"
] | 2 | 2020-10-02T07:38:43.000Z | 2021-06-29T07:32:15.000Z | feed/spiders/day/cssforest_spider.py | virtual-emperor/oh-my-rss | 3e04899aba4dec27026f67e44193ca8f1eca616a | [
"MIT"
] | null | null | null | feed/spiders/day/cssforest_spider.py | virtual-emperor/oh-my-rss | 3e04899aba4dec27026f67e44193ca8f1eca616a | [
"MIT"
] | null | null | null |
from feed.spiders.spider import Spider
class CssforestSpider(Spider):
name = 'cssforest'
def __init__(self):
Spider.__init__(self,
start_urls=[
'http://blog.cssforest.org/',
],
index_xpath="//section/h4/a/@href",
article_title_xpath="//article/header/h1/text()",
article_content_xpath="//main//article",
index_limit_count=2,
article_trim_xpaths=[
"//article/header",
"//article/footer",
]
)
| 32.318182 | 73 | 0.412096 |
4fc46d147d311b3727641b71a5655c3eb7edbb37 | 94,844 | py | Python | nova/virt/vmwareapi/vmops.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | null | null | null | nova/virt/vmwareapi/vmops.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | null | null | null | nova/virt/vmwareapi/vmops.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | 1 | 2020-07-24T01:18:44.000Z | 2020-07-24T01:18:44.000Z | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2012 VMware, Inc.
# Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Class for VM tasks like spawn, snapshot, suspend, resume etc.
"""
import collections
import os
import time
import decorator
from oslo_concurrency import lockutils
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import excutils
from oslo_utils import strutils
from oslo_utils import units
from oslo_utils import uuidutils
from oslo_vmware import exceptions as vexc
from oslo_vmware.objects import datastore as ds_obj
from oslo_vmware import vim_util as vutil
from nova.api.metadata import base as instance_metadata
from nova import compute
from nova.compute import power_state
from nova.compute import task_states
import nova.conf
from nova.console import type as ctype
from nova import context as nova_context
from nova import exception
from nova.i18n import _, _LE, _LI, _LW
from nova import network
from nova import objects
from nova import utils
from nova import version
from nova.virt import configdrive
from nova.virt import diagnostics
from nova.virt import driver
from nova.virt import hardware
from nova.virt.vmwareapi import constants
from nova.virt.vmwareapi import ds_util
from nova.virt.vmwareapi import error_util
from nova.virt.vmwareapi import imagecache
from nova.virt.vmwareapi import images
from nova.virt.vmwareapi import vif as vmwarevif
from nova.virt.vmwareapi import vim_util
from nova.virt.vmwareapi import vm_util
CONF = nova.conf.CONF
LOG = logging.getLogger(__name__)
VMWARE_POWER_STATES = {'poweredOff': power_state.SHUTDOWN,
'poweredOn': power_state.RUNNING,
'suspended': power_state.SUSPENDED}
RESIZE_TOTAL_STEPS = 6
class VirtualMachineInstanceConfigInfo(object):
"""Parameters needed to create and configure a new instance."""
def __init__(self, instance, image_info, datastore, dc_info, image_cache,
extra_specs=None):
# Some methods called during spawn take the instance parameter purely
# for logging purposes.
# TODO(vui) Clean them up, so we no longer need to keep this variable
self.instance = instance
self.ii = image_info
self.root_gb = instance.root_gb
self.datastore = datastore
self.dc_info = dc_info
self._image_cache = image_cache
self._extra_specs = extra_specs
@property
def cache_image_folder(self):
if self.ii.image_id is None:
return
return self._image_cache.get_image_cache_folder(
self.datastore, self.ii.image_id)
@property
def cache_image_path(self):
if self.ii.image_id is None:
return
cached_image_file_name = "%s.%s" % (self.ii.image_id,
self.ii.file_type)
return self.cache_image_folder.join(cached_image_file_name)
# Note(vui): See https://bugs.launchpad.net/nova/+bug/1363349
# for cases where mocking time.sleep() can have unintended effects on code
# not under test. For now, unblock the affected test cases by providing
# a wrapper function to work around needing to mock time.sleep()
def _time_sleep_wrapper(delay):
time.sleep(delay)
@decorator.decorator
def retry_if_task_in_progress(f, *args, **kwargs):
retries = max(CONF.vmware.api_retry_count, 1)
delay = 1
for attempt in range(1, retries + 1):
if attempt != 1:
_time_sleep_wrapper(delay)
delay = min(2 * delay, 60)
try:
f(*args, **kwargs)
return
except vexc.TaskInProgress:
pass
class VMwareVMOps(object):
"""Management class for VM-related tasks."""
def __init__(self, session, virtapi, volumeops, cluster=None,
datastore_regex=None):
"""Initializer."""
self.compute_api = compute.API()
self._session = session
self._virtapi = virtapi
self._volumeops = volumeops
self._cluster = cluster
self._root_resource_pool = vm_util.get_res_pool_ref(self._session,
self._cluster)
self._datastore_regex = datastore_regex
self._base_folder = self._get_base_folder()
self._tmp_folder = 'vmware_temp'
self._datastore_browser_mapping = {}
self._imagecache = imagecache.ImageCacheManager(self._session,
self._base_folder)
self._network_api = network.API()
def _get_base_folder(self):
# Enable more than one compute node to run on the same host
if CONF.vmware.cache_prefix:
base_folder = '%s%s' % (CONF.vmware.cache_prefix,
CONF.image_cache_subdirectory_name)
# Ensure that the base folder is unique per compute node
elif CONF.remove_unused_base_images:
base_folder = '%s%s' % (CONF.my_ip,
CONF.image_cache_subdirectory_name)
else:
# Aging disable ensures backward compatibility
base_folder = CONF.image_cache_subdirectory_name
return base_folder
def _extend_virtual_disk(self, instance, requested_size, name, dc_ref):
service_content = self._session.vim.service_content
LOG.debug("Extending root virtual disk to %s", requested_size,
instance=instance)
vmdk_extend_task = self._session._call_method(
self._session.vim,
"ExtendVirtualDisk_Task",
service_content.virtualDiskManager,
name=name,
datacenter=dc_ref,
newCapacityKb=requested_size,
eagerZero=False)
try:
self._session._wait_for_task(vmdk_extend_task)
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Extending virtual disk failed with error: %s'),
e, instance=instance)
# Clean up files created during the extend operation
files = [name.replace(".vmdk", "-flat.vmdk"), name]
for file in files:
ds_path = ds_obj.DatastorePath.parse(file)
self._delete_datastore_file(ds_path, dc_ref)
LOG.debug("Extended root virtual disk", instance=instance)
def _delete_datastore_file(self, datastore_path, dc_ref):
try:
ds_util.file_delete(self._session, datastore_path, dc_ref)
except (vexc.CannotDeleteFileException,
vexc.FileFaultException,
vexc.FileLockedException,
vexc.FileNotFoundException):
LOG.debug("Unable to delete %(ds)s. There may be more than "
"one process or thread trying to delete the file",
{'ds': datastore_path},
exc_info=True)
def _extend_if_required(self, dc_info, image_info, instance,
root_vmdk_path):
"""Increase the size of the root vmdk if necessary."""
if instance.root_gb * units.Gi > image_info.file_size:
size_in_kb = instance.root_gb * units.Mi
self._extend_virtual_disk(instance, size_in_kb,
root_vmdk_path, dc_info.ref)
def _configure_config_drive(self, instance, vm_ref, dc_info, datastore,
injected_files, admin_password, network_info):
session_vim = self._session.vim
cookies = session_vim.client.options.transport.cookiejar
dc_path = vutil.get_inventory_path(session_vim, dc_info.ref)
uploaded_iso_path = self._create_config_drive(instance,
injected_files,
admin_password,
network_info,
datastore.name,
dc_path,
instance.uuid,
cookies)
uploaded_iso_path = datastore.build_path(uploaded_iso_path)
self._attach_cdrom_to_vm(
vm_ref, instance,
datastore.ref,
str(uploaded_iso_path))
def _get_instance_metadata(self, context, instance):
flavor = instance.flavor
return ('name:%s\n'
'userid:%s\n'
'username:%s\n'
'projectid:%s\n'
'projectname:%s\n'
'flavor:name:%s\n'
'flavor:memory_mb:%s\n'
'flavor:vcpus:%s\n'
'flavor:ephemeral_gb:%s\n'
'flavor:root_gb:%s\n'
'flavor:swap:%s\n'
'imageid:%s\n'
'package:%s\n') % (instance.display_name,
context.user_id,
context.user_name,
context.project_id,
context.project_name,
flavor.name,
flavor.memory_mb,
flavor.vcpus,
flavor.ephemeral_gb,
flavor.root_gb,
flavor.swap,
instance.image_ref,
version.version_string_with_package())
def _create_folders(self, parent_folder, folder_path):
folders = folder_path.split('/')
path_list = []
for folder in folders:
path_list.append(folder)
folder_path = '/'.join(path_list)
folder_ref = vm_util.folder_ref_cache_get(folder_path)
if not folder_ref:
folder_ref = vm_util.create_folder(self._session,
parent_folder,
folder)
vm_util.folder_ref_cache_update(folder_path, folder_ref)
parent_folder = folder_ref
return folder_ref
def _get_folder_name(self, name, id):
# Maximum folder length must be less than 80 characters.
# The 'id' length is 36. The maximum prefix for name is 40.
# We cannot truncate the 'id' as this is unique across OpenStack.
return '%s (%s)' % (name[:40], id[:36])
def build_virtual_machine(self, instance, image_info,
dc_info, datastore, network_info, extra_specs,
metadata):
vif_infos = vmwarevif.get_vif_info(self._session,
self._cluster,
utils.is_neutron(),
image_info.vif_model,
network_info)
if extra_specs.storage_policy:
profile_spec = vm_util.get_storage_profile_spec(
self._session, extra_specs.storage_policy)
else:
profile_spec = None
# Get the create vm config spec
client_factory = self._session.vim.client.factory
config_spec = vm_util.get_vm_create_spec(client_factory,
instance,
datastore.name,
vif_infos,
extra_specs,
image_info.os_type,
profile_spec=profile_spec,
metadata=metadata)
folder_name = self._get_folder_name('Project',
instance.project_id)
folder_path = 'OpenStack/%s/Instances' % folder_name
folder = self._create_folders(dc_info.vmFolder, folder_path)
# Create the VM
vm_ref = vm_util.create_vm(self._session, instance, folder,
config_spec, self._root_resource_pool)
return vm_ref
def _get_extra_specs(self, flavor, image_meta=None):
image_meta = image_meta or objects.ImageMeta.from_dict({})
extra_specs = vm_util.ExtraSpecs()
for resource in ['cpu', 'memory', 'disk_io', 'vif']:
for (key, type) in (('limit', int),
('reservation', int),
('shares_level', str),
('shares_share', int)):
value = flavor.extra_specs.get('quota:' + resource + '_' + key)
if value:
setattr(getattr(extra_specs, resource + '_limits'),
key, type(value))
extra_specs.cpu_limits.validate()
extra_specs.memory_limits.validate()
extra_specs.disk_io_limits.validate()
extra_specs.vif_limits.validate()
hw_version = flavor.extra_specs.get('vmware:hw_version')
extra_specs.hw_version = hw_version
if CONF.vmware.pbm_enabled:
storage_policy = flavor.extra_specs.get('vmware:storage_policy',
CONF.vmware.pbm_default_policy)
extra_specs.storage_policy = storage_policy
topology = hardware.get_best_cpu_topology(flavor, image_meta,
allow_threads=False)
extra_specs.cores_per_socket = topology.cores
return extra_specs
def _get_esx_host_and_cookies(self, datastore, dc_path, file_path):
hosts = datastore.get_connected_hosts(self._session)
host = ds_obj.Datastore.choose_host(hosts)
host_name = self._session._call_method(vutil, 'get_object_property',
host, 'name')
url = ds_obj.DatastoreURL('https', host_name, file_path, dc_path,
datastore.name)
cookie_header = url.get_transfer_ticket(self._session, 'PUT')
name, value = cookie_header.split('=')
# TODO(rgerganov): this is a hack to emulate cookiejar until we fix
# oslo.vmware to accept plain http headers
Cookie = collections.namedtuple('Cookie', ['name', 'value'])
return host_name, [Cookie(name, value)]
def _fetch_vsphere_image(self, context, vi, image_ds_loc):
"""Fetch image which is located on a vSphere datastore."""
location = vi.ii.vsphere_location
LOG.debug("Using vSphere location: %s", location)
LOG.debug("Copying image file data %(image_id)s to "
"%(file_path)s on the data store "
"%(datastore_name)s",
{'image_id': vi.ii.image_id,
'file_path': image_ds_loc,
'datastore_name': vi.datastore.name},
instance=vi.instance)
location_url = ds_obj.DatastoreURL.urlparse(location)
datacenter_path = location_url.datacenter_path
datacenter_moref = ds_util.get_datacenter_ref(
self._session, datacenter_path)
datastore_name = location_url.datastore_name
src_path = ds_obj.DatastorePath(datastore_name, location_url.path)
ds_util.file_copy(
self._session, str(src_path), datacenter_moref,
str(image_ds_loc), vi.dc_info.ref)
LOG.debug("Copied image file data %(image_id)s to "
"%(file_path)s on the data store "
"%(datastore_name)s",
{'image_id': vi.ii.image_id,
'file_path': image_ds_loc,
'datastore_name': vi.datastore.name},
instance=vi.instance)
def _fetch_image_as_file(self, context, vi, image_ds_loc):
"""Download image as an individual file to host via HTTP PUT."""
session = self._session
LOG.debug("Downloading image file data %(image_id)s to "
"%(file_path)s on the data store "
"%(datastore_name)s",
{'image_id': vi.ii.image_id,
'file_path': image_ds_loc,
'datastore_name': vi.datastore.name},
instance=vi.instance)
# try to get esx cookie to upload
try:
dc_path = 'ha-datacenter'
host, cookies = self._get_esx_host_and_cookies(vi.datastore,
dc_path, image_ds_loc.rel_path)
except Exception as e:
LOG.warning(_LW("Get esx cookies failed: %s"), e)
dc_path = vutil.get_inventory_path(session.vim, vi.dc_info.ref)
host = self._session._host
cookies = session.vim.client.options.transport.cookiejar
images.fetch_image(
context,
vi.instance,
host,
session._port,
dc_path,
vi.datastore.name,
image_ds_loc.rel_path,
cookies=cookies)
def _fetch_image_as_vapp(self, context, vi, image_ds_loc):
"""Download stream optimized image to host as a vApp."""
# The directory of the imported disk is the unique name
# of the VM use to import it with.
vm_name = image_ds_loc.parent.basename
LOG.debug("Downloading stream optimized image %(image_id)s to "
"%(file_path)s on the data store "
"%(datastore_name)s as vApp",
{'image_id': vi.ii.image_id,
'file_path': image_ds_loc,
'datastore_name': vi.datastore.name},
instance=vi.instance)
image_size = images.fetch_image_stream_optimized(
context,
vi.instance,
self._session,
vm_name,
vi.datastore.name,
vi.dc_info.vmFolder,
self._root_resource_pool)
# The size of the image is different from the size of the virtual disk.
# We want to use the latter. On vSAN this is the only way to get this
# size because there is no VMDK descriptor.
vi.ii.file_size = image_size
def _fetch_image_as_ova(self, context, vi, image_ds_loc):
"""Download root disk of an OVA image as streamOptimized."""
# The directory of the imported disk is the unique name
# of the VM use to import it with.
vm_name = image_ds_loc.parent.basename
image_size = images.fetch_image_ova(context,
vi.instance,
self._session,
vm_name,
vi.datastore.name,
vi.dc_info.vmFolder,
self._root_resource_pool)
# The size of the image is different from the size of the virtual disk.
# We want to use the latter. On vSAN this is the only way to get this
# size because there is no VMDK descriptor.
vi.ii.file_size = image_size
def _prepare_sparse_image(self, vi):
tmp_dir_loc = vi.datastore.build_path(
self._tmp_folder, uuidutils.generate_uuid())
tmp_image_ds_loc = tmp_dir_loc.join(
vi.ii.image_id, "tmp-sparse.vmdk")
ds_util.mkdir(self._session, tmp_image_ds_loc.parent, vi.dc_info.ref)
return tmp_dir_loc, tmp_image_ds_loc
def _prepare_flat_image(self, vi):
tmp_dir_loc = vi.datastore.build_path(
self._tmp_folder, uuidutils.generate_uuid())
tmp_image_ds_loc = tmp_dir_loc.join(
vi.ii.image_id, vi.cache_image_path.basename)
ds_util.mkdir(self._session, tmp_image_ds_loc.parent, vi.dc_info.ref)
vm_util.create_virtual_disk(
self._session, vi.dc_info.ref,
vi.ii.adapter_type,
vi.ii.disk_type,
str(tmp_image_ds_loc),
vi.ii.file_size_in_kb)
flat_vmdk_name = vi.cache_image_path.basename.replace('.vmdk',
'-flat.vmdk')
flat_vmdk_ds_loc = tmp_dir_loc.join(vi.ii.image_id, flat_vmdk_name)
self._delete_datastore_file(str(flat_vmdk_ds_loc), vi.dc_info.ref)
return tmp_dir_loc, flat_vmdk_ds_loc
def _prepare_stream_optimized_image(self, vi):
vm_name = "%s_%s" % (constants.IMAGE_VM_PREFIX,
uuidutils.generate_uuid())
tmp_dir_loc = vi.datastore.build_path(vm_name)
tmp_image_ds_loc = tmp_dir_loc.join("%s.vmdk" % tmp_dir_loc.basename)
return tmp_dir_loc, tmp_image_ds_loc
def _prepare_iso_image(self, vi):
tmp_dir_loc = vi.datastore.build_path(
self._tmp_folder, uuidutils.generate_uuid())
tmp_image_ds_loc = tmp_dir_loc.join(
vi.ii.image_id, vi.cache_image_path.basename)
return tmp_dir_loc, tmp_image_ds_loc
def _move_to_cache(self, dc_ref, src_folder_ds_path, dst_folder_ds_path):
try:
ds_util.file_move(self._session, dc_ref,
src_folder_ds_path, dst_folder_ds_path)
except vexc.FileAlreadyExistsException:
# Folder move has failed. This may be due to the fact that a
# process or thread has already completed the operation.
# Since image caching is synchronized, this can only happen
# due to action external to the process.
# In the event of a FileAlreadyExists we continue,
# all other exceptions will be raised.
LOG.warning(_LW("Destination %s already exists! Concurrent moves "
"can lead to unexpected results."),
dst_folder_ds_path)
def _cache_sparse_image(self, vi, tmp_image_ds_loc):
tmp_dir_loc = tmp_image_ds_loc.parent.parent
converted_image_ds_loc = tmp_dir_loc.join(
vi.ii.image_id, vi.cache_image_path.basename)
# converts fetched image to preallocated disk
vm_util.copy_virtual_disk(
self._session,
vi.dc_info.ref,
str(tmp_image_ds_loc),
str(converted_image_ds_loc))
self._delete_datastore_file(str(tmp_image_ds_loc), vi.dc_info.ref)
self._move_to_cache(vi.dc_info.ref,
tmp_image_ds_loc.parent,
vi.cache_image_folder)
def _cache_flat_image(self, vi, tmp_image_ds_loc):
self._move_to_cache(vi.dc_info.ref,
tmp_image_ds_loc.parent,
vi.cache_image_folder)
def _cache_stream_optimized_image(self, vi, tmp_image_ds_loc):
dst_path = vi.cache_image_folder.join("%s.vmdk" % vi.ii.image_id)
ds_util.mkdir(self._session, vi.cache_image_folder, vi.dc_info.ref)
try:
ds_util.disk_move(self._session, vi.dc_info.ref,
tmp_image_ds_loc, dst_path)
except vexc.FileAlreadyExistsException:
pass
def _cache_iso_image(self, vi, tmp_image_ds_loc):
self._move_to_cache(vi.dc_info.ref,
tmp_image_ds_loc.parent,
vi.cache_image_folder)
def _get_vm_config_info(self, instance, image_info,
extra_specs):
"""Captures all relevant information from the spawn parameters."""
if (instance.root_gb != 0 and
image_info.file_size > instance.root_gb * units.Gi):
reason = _("Image disk size greater than requested disk size")
raise exception.InstanceUnacceptable(instance_id=instance.uuid,
reason=reason)
allowed_ds_types = ds_util.get_allowed_datastore_types(
image_info.disk_type)
datastore = ds_util.get_datastore(self._session,
self._cluster,
self._datastore_regex,
extra_specs.storage_policy,
allowed_ds_types)
dc_info = self.get_datacenter_ref_and_name(datastore.ref)
return VirtualMachineInstanceConfigInfo(instance,
image_info,
datastore,
dc_info,
self._imagecache,
extra_specs)
def _get_image_callbacks(self, vi):
disk_type = vi.ii.disk_type
if vi.ii.is_ova:
image_fetch = self._fetch_image_as_ova
elif disk_type == constants.DISK_TYPE_STREAM_OPTIMIZED:
image_fetch = self._fetch_image_as_vapp
elif vi.ii.vsphere_location:
image_fetch = self._fetch_vsphere_image
else:
image_fetch = self._fetch_image_as_file
if vi.ii.is_iso:
image_prepare = self._prepare_iso_image
image_cache = self._cache_iso_image
elif disk_type == constants.DISK_TYPE_SPARSE:
image_prepare = self._prepare_sparse_image
image_cache = self._cache_sparse_image
elif disk_type == constants.DISK_TYPE_STREAM_OPTIMIZED:
image_prepare = self._prepare_stream_optimized_image
image_cache = self._cache_stream_optimized_image
elif disk_type in constants.SUPPORTED_FLAT_VARIANTS:
image_prepare = self._prepare_flat_image
image_cache = self._cache_flat_image
else:
reason = _("disk type '%s' not supported") % disk_type
raise exception.InvalidDiskInfo(reason=reason)
return image_prepare, image_fetch, image_cache
def _fetch_image_if_missing(self, context, vi):
image_prepare, image_fetch, image_cache = self._get_image_callbacks(vi)
LOG.debug("Processing image %s", vi.ii.image_id, instance=vi.instance)
with lockutils.lock(str(vi.cache_image_path),
lock_file_prefix='nova-vmware-fetch_image'):
self.check_cache_folder(vi.datastore.name, vi.datastore.ref)
ds_browser = self._get_ds_browser(vi.datastore.ref)
if not ds_util.file_exists(self._session, ds_browser,
vi.cache_image_folder,
vi.cache_image_path.basename):
LOG.debug("Preparing fetch location", instance=vi.instance)
tmp_dir_loc, tmp_image_ds_loc = image_prepare(vi)
LOG.debug("Fetch image to %s", tmp_image_ds_loc,
instance=vi.instance)
image_fetch(context, vi, tmp_image_ds_loc)
LOG.debug("Caching image", instance=vi.instance)
image_cache(vi, tmp_image_ds_loc)
LOG.debug("Cleaning up location %s", str(tmp_dir_loc),
instance=vi.instance)
self._delete_datastore_file(str(tmp_dir_loc), vi.dc_info.ref)
# The size of the sparse image is different from the size of the
# virtual disk. We want to use the latter.
if vi.ii.disk_type == constants.DISK_TYPE_SPARSE:
self._update_image_size(vi)
def _create_and_attach_thin_disk(self, instance, vm_ref, dc_info, size,
adapter_type, path):
disk_type = constants.DISK_TYPE_THIN
vm_util.create_virtual_disk(
self._session, dc_info.ref,
adapter_type,
disk_type,
path,
size)
self._volumeops.attach_disk_to_vm(
vm_ref, instance,
adapter_type, disk_type,
path, size, False)
def _create_ephemeral(self, bdi, instance, vm_ref, dc_info,
datastore, folder, adapter_type):
ephemerals = None
if bdi is not None:
ephemerals = driver.block_device_info_get_ephemerals(bdi)
for idx, eph in enumerate(ephemerals):
size = eph['size'] * units.Mi
at = eph.get('disk_bus') or adapter_type
filename = vm_util.get_ephemeral_name(idx)
path = str(ds_obj.DatastorePath(datastore.name, folder,
filename))
self._create_and_attach_thin_disk(instance, vm_ref, dc_info,
size, at, path)
# There may be block devices defined but no ephemerals. In this case
# we need to allocate an ephemeral disk if required
if not ephemerals and instance.ephemeral_gb:
size = instance.ephemeral_gb * units.Mi
filename = vm_util.get_ephemeral_name(0)
path = str(ds_obj.DatastorePath(datastore.name, folder,
filename))
self._create_and_attach_thin_disk(instance, vm_ref, dc_info, size,
adapter_type, path)
def _create_swap(self, bdi, instance, vm_ref, dc_info, datastore,
folder, adapter_type):
swap = None
filename = "swap.vmdk"
path = str(ds_obj.DatastorePath(datastore.name, folder, filename))
if bdi is not None:
swap = driver.block_device_info_get_swap(bdi)
if driver.swap_is_usable(swap):
size = swap['swap_size'] * units.Ki
self._create_and_attach_thin_disk(instance, vm_ref, dc_info,
size, adapter_type, path)
else:
# driver.block_device_info_get_swap returns
# {'device_name': None, 'swap_size': 0} if swap is None
# in block_device_info. If block_device_info does not contain
# a swap device, we need to reset swap to None, so we can
# extract the swap_size from the instance's flavor.
swap = None
size = instance.flavor.swap * units.Ki
if not swap and size > 0:
self._create_and_attach_thin_disk(instance, vm_ref, dc_info, size,
adapter_type, path)
def _update_vnic_index(self, context, instance, network_info):
if network_info:
for index, vif in enumerate(network_info):
self._network_api.update_instance_vnic_index(
context, instance, vif, index)
def _update_image_size(self, vi):
"""Updates the file size of the specified image."""
# The size of the Glance image is different from the deployed VMDK
# size for sparse, streamOptimized and OVA images. We need to retrieve
# the size of the flat VMDK and update the file_size property of the
# image. This ensures that further operations involving size checks
# and disk resizing will work as expected.
ds_browser = self._get_ds_browser(vi.datastore.ref)
flat_file = "%s-flat.vmdk" % vi.ii.image_id
new_size = ds_util.file_size(self._session, ds_browser,
vi.cache_image_folder, flat_file)
if new_size is not None:
vi.ii.file_size = new_size
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info=None):
client_factory = self._session.vim.client.factory
image_info = images.VMwareImage.from_image(context,
instance.image_ref,
image_meta)
extra_specs = self._get_extra_specs(instance.flavor, image_meta)
vi = self._get_vm_config_info(instance, image_info,
extra_specs)
metadata = self._get_instance_metadata(context, instance)
# Creates the virtual machine. The virtual machine reference returned
# is unique within Virtual Center.
vm_ref = self.build_virtual_machine(instance,
image_info,
vi.dc_info,
vi.datastore,
network_info,
extra_specs,
metadata)
# Cache the vm_ref. This saves a remote call to the VC. This uses the
# instance uuid.
vm_util.vm_ref_cache_update(instance.uuid, vm_ref)
# Update the Neutron VNIC index
self._update_vnic_index(context, instance, network_info)
# Set the machine.id parameter of the instance to inject
# the NIC configuration inside the VM
if CONF.flat_injected:
self._set_machine_id(client_factory, instance, network_info,
vm_ref=vm_ref)
# Set the vnc configuration of the instance, vnc port starts from 5900
if CONF.vnc.enabled:
self._get_and_set_vnc_config(client_factory, instance, vm_ref)
block_device_mapping = []
if block_device_info is not None:
block_device_mapping = driver.block_device_info_get_mapping(
block_device_info)
if instance.image_ref:
self._imagecache.enlist_image(
image_info.image_id, vi.datastore, vi.dc_info.ref)
self._fetch_image_if_missing(context, vi)
if image_info.is_iso:
self._use_iso_image(vm_ref, vi)
elif image_info.linked_clone:
self._use_disk_image_as_linked_clone(vm_ref, vi)
else:
self._use_disk_image_as_full_clone(vm_ref, vi)
if block_device_mapping:
msg = "Block device information present: %s" % block_device_info
# NOTE(mriedem): block_device_info can contain an auth_password
# so we have to scrub the message before logging it.
LOG.debug(strutils.mask_password(msg), instance=instance)
# Before attempting to attach any volume, make sure the
# block_device_mapping (i.e. disk_bus) is valid
self._is_bdm_valid(block_device_mapping)
for disk in block_device_mapping:
connection_info = disk['connection_info']
adapter_type = disk.get('disk_bus') or vi.ii.adapter_type
# TODO(hartsocks): instance is unnecessary, remove it
# we still use instance in many locations for no other purpose
# than logging, can we simplify this?
if disk.get('boot_index') == 0:
self._volumeops.attach_root_volume(connection_info,
instance, vi.datastore.ref, adapter_type)
else:
self._volumeops.attach_volume(connection_info,
instance, adapter_type)
# Create ephemeral disks
self._create_ephemeral(block_device_info, instance, vm_ref,
vi.dc_info, vi.datastore, instance.uuid,
vi.ii.adapter_type)
self._create_swap(block_device_info, instance, vm_ref, vi.dc_info,
vi.datastore, instance.uuid, vi.ii.adapter_type)
if configdrive.required_by(instance):
self._configure_config_drive(
instance, vm_ref, vi.dc_info, vi.datastore,
injected_files, admin_password, network_info)
# Rename the VM. This is done after the spec is created to ensure
# that all of the files for the instance are under the directory
# 'uuid' of the instance
vm_util.rename_vm(self._session, vm_ref, instance)
vm_util.power_on_instance(self._session, instance, vm_ref=vm_ref)
def _is_bdm_valid(self, block_device_mapping):
"""Checks if the block device mapping is valid."""
valid_bus = (constants.DEFAULT_ADAPTER_TYPE,
constants.ADAPTER_TYPE_BUSLOGIC,
constants.ADAPTER_TYPE_IDE,
constants.ADAPTER_TYPE_LSILOGICSAS,
constants.ADAPTER_TYPE_PARAVIRTUAL)
for disk in block_device_mapping:
adapter_type = disk.get('disk_bus')
if (adapter_type is not None and adapter_type not in valid_bus):
raise exception.UnsupportedHardware(model=adapter_type,
virt="vmware")
def _create_config_drive(self, instance, injected_files, admin_password,
network_info, data_store_name, dc_name,
upload_folder, cookies):
if CONF.config_drive_format != 'iso9660':
reason = (_('Invalid config_drive_format "%s"') %
CONF.config_drive_format)
raise exception.InstancePowerOnFailure(reason=reason)
LOG.info(_LI('Using config drive for instance'), instance=instance)
extra_md = {}
if admin_password:
extra_md['admin_pass'] = admin_password
inst_md = instance_metadata.InstanceMetadata(instance,
content=injected_files,
extra_md=extra_md,
network_info=network_info)
try:
with configdrive.ConfigDriveBuilder(instance_md=inst_md) as cdb:
with utils.tempdir() as tmp_path:
tmp_file = os.path.join(tmp_path, 'configdrive.iso')
cdb.make_drive(tmp_file)
upload_iso_path = "%s/configdrive.iso" % (
upload_folder)
images.upload_iso_to_datastore(
tmp_file, instance,
host=self._session._host,
port=self._session._port,
data_center_name=dc_name,
datastore_name=data_store_name,
cookies=cookies,
file_path=upload_iso_path)
return upload_iso_path
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Creating config drive failed with error: %s'),
e, instance=instance)
def _attach_cdrom_to_vm(self, vm_ref, instance,
datastore, file_path):
"""Attach cdrom to VM by reconfiguration."""
client_factory = self._session.vim.client.factory
devices = self._session._call_method(vutil,
"get_object_property",
vm_ref,
"config.hardware.device")
(controller_key, unit_number,
controller_spec) = vm_util.allocate_controller_key_and_unit_number(
client_factory,
devices,
constants.ADAPTER_TYPE_IDE)
cdrom_attach_config_spec = vm_util.get_cdrom_attach_config_spec(
client_factory, datastore, file_path,
controller_key, unit_number)
if controller_spec:
cdrom_attach_config_spec.deviceChange.append(controller_spec)
LOG.debug("Reconfiguring VM instance to attach cdrom %s",
file_path, instance=instance)
vm_util.reconfigure_vm(self._session, vm_ref, cdrom_attach_config_spec)
LOG.debug("Reconfigured VM instance to attach cdrom %s",
file_path, instance=instance)
def _create_vm_snapshot(self, instance, vm_ref):
LOG.debug("Creating Snapshot of the VM instance", instance=instance)
snapshot_task = self._session._call_method(
self._session.vim,
"CreateSnapshot_Task", vm_ref,
name="%s-snapshot" % instance.uuid,
description="Taking Snapshot of the VM",
memory=False,
quiesce=True)
self._session._wait_for_task(snapshot_task)
LOG.debug("Created Snapshot of the VM instance", instance=instance)
task_info = self._session._call_method(vutil,
"get_object_property",
snapshot_task,
"info")
snapshot = task_info.result
return snapshot
@retry_if_task_in_progress
def _delete_vm_snapshot(self, instance, vm_ref, snapshot):
LOG.debug("Deleting Snapshot of the VM instance", instance=instance)
delete_snapshot_task = self._session._call_method(
self._session.vim,
"RemoveSnapshot_Task", snapshot,
removeChildren=False, consolidate=True)
self._session._wait_for_task(delete_snapshot_task)
LOG.debug("Deleted Snapshot of the VM instance", instance=instance)
def _create_linked_clone_from_snapshot(self, instance,
vm_ref, snapshot_ref, dc_info):
"""Create linked clone VM to be deployed to same ds as source VM
"""
client_factory = self._session.vim.client.factory
rel_spec = vm_util.relocate_vm_spec(
client_factory,
datastore=None,
host=None,
disk_move_type="createNewChildDiskBacking")
clone_spec = vm_util.clone_vm_spec(client_factory, rel_spec,
power_on=False, snapshot=snapshot_ref, template=True)
vm_name = "%s_%s" % (constants.SNAPSHOT_VM_PREFIX,
uuidutils.generate_uuid())
LOG.debug("Creating linked-clone VM from snapshot", instance=instance)
vm_clone_task = self._session._call_method(
self._session.vim,
"CloneVM_Task",
vm_ref,
folder=dc_info.vmFolder,
name=vm_name,
spec=clone_spec)
self._session._wait_for_task(vm_clone_task)
LOG.info(_LI("Created linked-clone VM from snapshot"),
instance=instance)
task_info = self._session._call_method(vutil,
"get_object_property",
vm_clone_task,
"info")
return task_info.result
def snapshot(self, context, instance, image_id, update_task_state):
"""Create snapshot from a running VM instance.
Steps followed are:
1. Get the name of the vmdk file which the VM points to right now.
Can be a chain of snapshots, so we need to know the last in the
chain.
2. Create the snapshot. A new vmdk is created which the VM points to
now. The earlier vmdk becomes read-only.
3. Creates a linked clone VM from the snapshot
4. Exports the disk in the link clone VM as a streamOptimized disk.
5. Delete the linked clone VM
6. Deletes the snapshot in original instance.
"""
vm_ref = vm_util.get_vm_ref(self._session, instance)
def _get_vm_and_vmdk_attribs():
# Get the vmdk info that the VM is pointing to
vmdk = vm_util.get_vmdk_info(self._session, vm_ref,
instance.uuid)
if not vmdk.path:
LOG.debug("No root disk defined. Unable to snapshot.",
instance=instance)
raise error_util.NoRootDiskDefined()
lst_properties = ["datastore", "summary.config.guestId"]
props = self._session._call_method(vutil,
"get_object_properties_dict",
vm_ref,
lst_properties)
os_type = props['summary.config.guestId']
datastores = props['datastore']
return (vmdk, datastores, os_type)
vmdk, datastores, os_type = _get_vm_and_vmdk_attribs()
ds_ref = datastores.ManagedObjectReference[0]
dc_info = self.get_datacenter_ref_and_name(ds_ref)
update_task_state(task_state=task_states.IMAGE_PENDING_UPLOAD)
# TODO(vui): convert to creating plain vm clone and uploading from it
# instead of using live vm snapshot.
snapshot_ref = self._create_vm_snapshot(instance, vm_ref)
update_task_state(task_state=task_states.IMAGE_UPLOADING,
expected_state=task_states.IMAGE_PENDING_UPLOAD)
snapshot_vm_ref = None
try:
# Create a temporary VM (linked clone from snapshot), then export
# the VM's root disk to glance via HttpNfc API
snapshot_vm_ref = self._create_linked_clone_from_snapshot(
instance, vm_ref, snapshot_ref, dc_info)
images.upload_image_stream_optimized(
context, image_id, instance, self._session, vm=snapshot_vm_ref,
vmdk_size=vmdk.capacity_in_bytes)
finally:
if snapshot_vm_ref:
vm_util.destroy_vm(self._session, instance, snapshot_vm_ref)
# Deleting the snapshot after destroying the temporary VM created
# based on it allows the instance vm's disks to be consolidated.
# TODO(vui) Add handling for when vmdk volume is attached.
self._delete_vm_snapshot(instance, vm_ref, snapshot_ref)
def reboot(self, instance, network_info, reboot_type="SOFT"):
"""Reboot a VM instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
lst_properties = ["summary.guest.toolsStatus", "runtime.powerState",
"summary.guest.toolsRunningStatus"]
props = self._session._call_method(vutil,
"get_object_properties_dict",
vm_ref,
lst_properties)
pwr_state = props['runtime.powerState']
tools_status = props['summary.guest.toolsStatus']
tools_running_status = props['summary.guest.toolsRunningStatus']
# Raise an exception if the VM is not powered On.
if pwr_state not in ["poweredOn"]:
reason = _("instance is not powered on")
raise exception.InstanceRebootFailure(reason=reason)
# If latest vmware tools are installed in the VM, and that the tools
# are running, then only do a guest reboot. Otherwise do a hard reset.
if (tools_status == "toolsOk" and
tools_running_status == "guestToolsRunning" and
reboot_type == "SOFT"):
LOG.debug("Rebooting guest OS of VM", instance=instance)
self._session._call_method(self._session.vim, "RebootGuest",
vm_ref)
LOG.debug("Rebooted guest OS of VM", instance=instance)
else:
LOG.debug("Doing hard reboot of VM", instance=instance)
reset_task = self._session._call_method(self._session.vim,
"ResetVM_Task", vm_ref)
self._session._wait_for_task(reset_task)
LOG.debug("Did hard reboot of VM", instance=instance)
def _destroy_instance(self, instance, destroy_disks=True):
# Destroy a VM instance
try:
vm_ref = vm_util.get_vm_ref(self._session, instance)
lst_properties = ["config.files.vmPathName", "runtime.powerState",
"datastore"]
props = self._session._call_method(vutil,
"get_object_properties_dict",
vm_ref,
lst_properties)
pwr_state = props['runtime.powerState']
vm_config_pathname = props.get('config.files.vmPathName')
vm_ds_path = None
if vm_config_pathname is not None:
vm_ds_path = ds_obj.DatastorePath.parse(
vm_config_pathname)
# Power off the VM if it is in PoweredOn state.
if pwr_state == "poweredOn":
vm_util.power_off_instance(self._session, instance, vm_ref)
# Un-register the VM
try:
LOG.debug("Unregistering the VM", instance=instance)
self._session._call_method(self._session.vim,
"UnregisterVM", vm_ref)
LOG.debug("Unregistered the VM", instance=instance)
except Exception as excep:
LOG.warning(_LW("In vmwareapi:vmops:_destroy_instance, got "
"this exception while un-registering the VM: "
"%s"), excep)
# Delete the folder holding the VM related content on
# the datastore.
if destroy_disks and vm_ds_path:
try:
dir_ds_compliant_path = vm_ds_path.parent
LOG.debug("Deleting contents of the VM from "
"datastore %(datastore_name)s",
{'datastore_name': vm_ds_path.datastore},
instance=instance)
ds_ref_ret = props['datastore']
ds_ref = ds_ref_ret.ManagedObjectReference[0]
dc_info = self.get_datacenter_ref_and_name(ds_ref)
ds_util.file_delete(self._session,
dir_ds_compliant_path,
dc_info.ref)
LOG.debug("Deleted contents of the VM from "
"datastore %(datastore_name)s",
{'datastore_name': vm_ds_path.datastore},
instance=instance)
except Exception:
LOG.warning(_LW("In vmwareapi:vmops:_destroy_instance, "
"exception while deleting the VM contents "
"from the disk"), exc_info=True)
except exception.InstanceNotFound:
LOG.warning(_LW('Instance does not exist on backend'),
instance=instance)
except Exception:
LOG.exception(_LE('Destroy instance failed'),
instance=instance)
finally:
vm_util.vm_ref_cache_delete(instance.uuid)
def destroy(self, instance, destroy_disks=True):
"""Destroy a VM instance.
Steps followed for each VM are:
1. Power off, if it is in poweredOn state.
2. Un-register.
3. Delete the contents of the folder holding the VM related data.
"""
LOG.debug("Destroying instance", instance=instance)
self._destroy_instance(instance, destroy_disks=destroy_disks)
LOG.debug("Instance destroyed", instance=instance)
def pause(self, instance):
msg = _("pause not supported for vmwareapi")
raise NotImplementedError(msg)
def unpause(self, instance):
msg = _("unpause not supported for vmwareapi")
raise NotImplementedError(msg)
def suspend(self, instance):
"""Suspend the specified instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
pwr_state = self._session._call_method(vutil,
"get_object_property",
vm_ref,
"runtime.powerState")
# Only PoweredOn VMs can be suspended.
if pwr_state == "poweredOn":
LOG.debug("Suspending the VM", instance=instance)
suspend_task = self._session._call_method(self._session.vim,
"SuspendVM_Task", vm_ref)
self._session._wait_for_task(suspend_task)
LOG.debug("Suspended the VM", instance=instance)
# Raise Exception if VM is poweredOff
elif pwr_state == "poweredOff":
reason = _("instance is powered off and cannot be suspended.")
raise exception.InstanceSuspendFailure(reason=reason)
else:
LOG.debug("VM was already in suspended state. So returning "
"without doing anything", instance=instance)
def resume(self, instance):
"""Resume the specified instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
pwr_state = self._session._call_method(vutil,
"get_object_property",
vm_ref,
"runtime.powerState")
if pwr_state.lower() == "suspended":
LOG.debug("Resuming the VM", instance=instance)
suspend_task = self._session._call_method(
self._session.vim,
"PowerOnVM_Task", vm_ref)
self._session._wait_for_task(suspend_task)
LOG.debug("Resumed the VM", instance=instance)
else:
reason = _("instance is not in a suspended state")
raise exception.InstanceResumeFailure(reason=reason)
def _get_rescue_device(self, instance, vm_ref):
hardware_devices = self._session._call_method(vutil,
"get_object_property",
vm_ref,
"config.hardware.device")
return vm_util.find_rescue_device(hardware_devices,
instance)
def rescue(self, context, instance, network_info, image_meta):
"""Rescue the specified instance.
Attach the image that the instance was created from and boot from it.
"""
vm_ref = vm_util.get_vm_ref(self._session, instance)
# Get the root disk vmdk object
vmdk = vm_util.get_vmdk_info(self._session, vm_ref,
uuid=instance.uuid)
ds_ref = vmdk.device.backing.datastore
datastore = ds_obj.get_datastore_by_ref(self._session, ds_ref)
dc_info = self.get_datacenter_ref_and_name(datastore.ref)
# Get the image details of the instance
image_info = images.VMwareImage.from_image(context,
image_meta.id,
image_meta)
vi = VirtualMachineInstanceConfigInfo(instance,
image_info,
datastore,
dc_info,
self._imagecache)
vm_util.power_off_instance(self._session, instance, vm_ref)
# Fetch the image if it does not exist in the cache
self._fetch_image_if_missing(context, vi)
# Get the rescue disk path
rescue_disk_path = datastore.build_path(instance.uuid,
"%s-rescue.%s" % (image_info.image_id, image_info.file_type))
# Copy the cached image to the be the rescue disk. This will be used
# as the rescue disk for the instance.
ds_util.disk_copy(self._session, dc_info.ref,
vi.cache_image_path, rescue_disk_path)
# Attach the rescue disk to the instance
self._volumeops.attach_disk_to_vm(vm_ref, instance, vmdk.adapter_type,
vmdk.disk_type, rescue_disk_path)
# Get the rescue device and configure the boot order to
# boot from this device
rescue_device = self._get_rescue_device(instance, vm_ref)
factory = self._session.vim.client.factory
boot_spec = vm_util.get_vm_boot_spec(factory, rescue_device)
# Update the VM with the new boot order and power on
vm_util.reconfigure_vm(self._session, vm_ref, boot_spec)
vm_util.power_on_instance(self._session, instance, vm_ref=vm_ref)
def unrescue(self, instance, power_on=True):
"""Unrescue the specified instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
# Get the rescue device and detach it from the instance.
try:
rescue_device = self._get_rescue_device(instance, vm_ref)
except exception.NotFound:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Unable to access the rescue disk'),
instance=instance)
vm_util.power_off_instance(self._session, instance, vm_ref)
self._volumeops.detach_disk_from_vm(vm_ref, instance, rescue_device,
destroy_disk=True)
if power_on:
vm_util.power_on_instance(self._session, instance, vm_ref=vm_ref)
def power_off(self, instance):
"""Power off the specified instance.
:param instance: nova.objects.instance.Instance
"""
vm_util.power_off_instance(self._session, instance)
def power_on(self, instance):
vm_util.power_on_instance(self._session, instance)
def _update_instance_progress(self, context, instance, step, total_steps):
"""Update instance progress percent to reflect current step number
"""
# Divide the action's workflow into discrete steps and "bump" the
# instance's progress field as each step is completed.
#
# For a first cut this should be fine, however, for large VM images,
# the clone disk step begins to dominate the equation. A
# better approximation would use the percentage of the VM image that
# has been streamed to the destination host.
progress = round(float(step) / total_steps * 100)
instance_uuid = instance.uuid
LOG.debug("Updating instance '%(instance_uuid)s' progress to"
" %(progress)d",
{'instance_uuid': instance_uuid, 'progress': progress},
instance=instance)
instance.progress = progress
instance.save()
def _resize_vm(self, context, instance, vm_ref, flavor, image_meta):
"""Resizes the VM according to the flavor."""
client_factory = self._session.vim.client.factory
extra_specs = self._get_extra_specs(flavor, image_meta)
metadata = self._get_instance_metadata(context, instance)
vm_resize_spec = vm_util.get_vm_resize_spec(client_factory,
int(flavor.vcpus),
int(flavor.memory_mb),
extra_specs,
metadata=metadata)
vm_util.reconfigure_vm(self._session, vm_ref, vm_resize_spec)
def _resize_disk(self, instance, vm_ref, vmdk, flavor):
if (flavor.root_gb > instance.root_gb and
flavor.root_gb > vmdk.capacity_in_bytes / units.Gi):
root_disk_in_kb = flavor.root_gb * units.Mi
ds_ref = vmdk.device.backing.datastore
dc_info = self.get_datacenter_ref_and_name(ds_ref)
folder = ds_obj.DatastorePath.parse(vmdk.path).dirname
datastore = ds_obj.DatastorePath.parse(vmdk.path).datastore
resized_disk = str(ds_obj.DatastorePath(datastore, folder,
'resized.vmdk'))
ds_util.disk_copy(self._session, dc_info.ref, vmdk.path,
str(resized_disk))
self._extend_virtual_disk(instance, root_disk_in_kb, resized_disk,
dc_info.ref)
self._volumeops.detach_disk_from_vm(vm_ref, instance, vmdk.device)
original_disk = str(ds_obj.DatastorePath(datastore, folder,
'original.vmdk'))
ds_util.disk_move(self._session, dc_info.ref, vmdk.path,
original_disk)
ds_util.disk_move(self._session, dc_info.ref, resized_disk,
vmdk.path)
self._volumeops.attach_disk_to_vm(vm_ref, instance,
vmdk.adapter_type,
vmdk.disk_type, vmdk.path)
def _remove_ephemerals_and_swap(self, vm_ref):
devices = vm_util.get_ephemerals(self._session, vm_ref)
swap = vm_util.get_swap(self._session, vm_ref)
if swap is not None:
devices.append(swap)
if devices:
vm_util.detach_devices_from_vm(self._session, vm_ref, devices)
def _resize_create_ephemerals_and_swap(self, vm_ref, instance,
block_device_info):
vmdk = vm_util.get_vmdk_info(self._session, vm_ref,
uuid=instance.uuid)
if not vmdk.device:
LOG.debug("No root disk attached!", instance=instance)
return
ds_ref = vmdk.device.backing.datastore
datastore = ds_obj.get_datastore_by_ref(self._session, ds_ref)
dc_info = self.get_datacenter_ref_and_name(ds_ref)
folder = ds_obj.DatastorePath.parse(vmdk.path).dirname
self._create_ephemeral(block_device_info, instance, vm_ref,
dc_info, datastore, folder, vmdk.adapter_type)
self._create_swap(block_device_info, instance, vm_ref, dc_info,
datastore, folder, vmdk.adapter_type)
def migrate_disk_and_power_off(self, context, instance, dest,
flavor):
"""Transfers the disk of a running instance in multiple phases, turning
off the instance before the end.
"""
vm_ref = vm_util.get_vm_ref(self._session, instance)
vmdk = vm_util.get_vmdk_info(self._session, vm_ref,
uuid=instance.uuid)
# Checks if the migration needs a disk resize down.
if (flavor.root_gb < instance.root_gb or
(flavor.root_gb != 0 and
flavor.root_gb < vmdk.capacity_in_bytes / units.Gi)):
reason = _("Unable to shrink disk.")
raise exception.InstanceFaultRollback(
exception.ResizeError(reason=reason))
# TODO(garyk): treat dest parameter. Migration needs to be treated.
# 0. Zero out the progress to begin
self._update_instance_progress(context, instance,
step=0,
total_steps=RESIZE_TOTAL_STEPS)
# 1. Power off the instance
vm_util.power_off_instance(self._session, instance, vm_ref)
self._update_instance_progress(context, instance,
step=1,
total_steps=RESIZE_TOTAL_STEPS)
# 2. Reconfigure the VM properties
self._resize_vm(context, instance, vm_ref, flavor, instance.image_meta)
self._update_instance_progress(context, instance,
step=2,
total_steps=RESIZE_TOTAL_STEPS)
# 3.Reconfigure the disk properties
self._resize_disk(instance, vm_ref, vmdk, flavor)
self._update_instance_progress(context, instance,
step=3,
total_steps=RESIZE_TOTAL_STEPS)
# 4. Purge ephemeral and swap disks
self._remove_ephemerals_and_swap(vm_ref)
self._update_instance_progress(context, instance,
step=4,
total_steps=RESIZE_TOTAL_STEPS)
def confirm_migration(self, migration, instance, network_info):
"""Confirms a resize, destroying the source VM."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
vmdk = vm_util.get_vmdk_info(self._session, vm_ref,
uuid=instance.uuid)
if not vmdk.device:
return
ds_ref = vmdk.device.backing.datastore
dc_info = self.get_datacenter_ref_and_name(ds_ref)
folder = ds_obj.DatastorePath.parse(vmdk.path).dirname
datastore = ds_obj.DatastorePath.parse(vmdk.path).datastore
original_disk = ds_obj.DatastorePath(datastore, folder,
'original.vmdk')
ds_browser = self._get_ds_browser(ds_ref)
if ds_util.file_exists(self._session, ds_browser,
original_disk.parent,
original_disk.basename):
ds_util.disk_delete(self._session, dc_info.ref,
str(original_disk))
def _revert_migration_update_disks(self, vm_ref, instance, vmdk,
block_device_info):
ds_ref = vmdk.device.backing.datastore
dc_info = self.get_datacenter_ref_and_name(ds_ref)
folder = ds_obj.DatastorePath.parse(vmdk.path).dirname
datastore = ds_obj.DatastorePath.parse(vmdk.path).datastore
original_disk = ds_obj.DatastorePath(datastore, folder,
'original.vmdk')
ds_browser = self._get_ds_browser(ds_ref)
if ds_util.file_exists(self._session, ds_browser,
original_disk.parent,
original_disk.basename):
self._volumeops.detach_disk_from_vm(vm_ref, instance,
vmdk.device)
ds_util.disk_delete(self._session, dc_info.ref, vmdk.path)
ds_util.disk_move(self._session, dc_info.ref,
str(original_disk), vmdk.path)
self._volumeops.attach_disk_to_vm(vm_ref, instance,
vmdk.adapter_type,
vmdk.disk_type, vmdk.path)
# Reconfigure ephemerals
self._remove_ephemerals_and_swap(vm_ref)
self._resize_create_ephemerals_and_swap(vm_ref, instance,
block_device_info)
def finish_revert_migration(self, context, instance, network_info,
block_device_info, power_on=True):
"""Finish reverting a resize."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
# Ensure that the VM is off
vm_util.power_off_instance(self._session, instance, vm_ref)
client_factory = self._session.vim.client.factory
# Reconfigure the VM properties
extra_specs = self._get_extra_specs(instance.flavor,
instance.image_meta)
metadata = self._get_instance_metadata(context, instance)
vm_resize_spec = vm_util.get_vm_resize_spec(client_factory,
int(instance.vcpus),
int(instance.memory_mb),
extra_specs,
metadata=metadata)
vm_util.reconfigure_vm(self._session, vm_ref, vm_resize_spec)
vmdk = vm_util.get_vmdk_info(self._session, vm_ref,
uuid=instance.uuid)
if vmdk.device:
self._revert_migration_update_disks(vm_ref, instance, vmdk,
block_device_info)
if power_on:
vm_util.power_on_instance(self._session, instance)
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance=False,
block_device_info=None, power_on=True):
"""Completes a resize, turning on the migrated instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
# 5. Update ephemerals if necessary
self._resize_create_ephemerals_and_swap(vm_ref, instance,
block_device_info)
self._update_instance_progress(context, instance,
step=5,
total_steps=RESIZE_TOTAL_STEPS)
# 6. Start VM
if power_on:
vm_util.power_on_instance(self._session, instance, vm_ref=vm_ref)
self._update_instance_progress(context, instance,
step=6,
total_steps=RESIZE_TOTAL_STEPS)
def poll_rebooting_instances(self, timeout, instances):
"""Poll for rebooting instances."""
ctxt = nova_context.get_admin_context()
instances_info = dict(instance_count=len(instances),
timeout=timeout)
if instances_info["instance_count"] > 0:
LOG.info(_LI("Found %(instance_count)d hung reboots "
"older than %(timeout)d seconds"), instances_info)
for instance in instances:
LOG.info(_LI("Automatically hard rebooting"), instance=instance)
self.compute_api.reboot(ctxt, instance, "HARD")
def get_info(self, instance):
"""Return data about the VM instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
lst_properties = ["summary.config.numCpu",
"summary.config.memorySizeMB",
"runtime.powerState"]
try:
vm_props = self._session._call_method(vutil,
"get_object_properties_dict",
vm_ref,
lst_properties)
except vexc.ManagedObjectNotFoundException:
raise exception.InstanceNotFound(instance_id=instance.uuid)
max_mem = int(vm_props.get('summary.config.memorySizeMB', 0)) * 1024
num_cpu = int(vm_props.get('summary.config.numCpu', 0))
return hardware.InstanceInfo(
state=VMWARE_POWER_STATES[vm_props['runtime.powerState']],
max_mem_kb=max_mem,
mem_kb=max_mem,
num_cpu=num_cpu)
def _get_diagnostics(self, instance):
"""Return data about VM diagnostics."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
lst_properties = ["summary.config",
"summary.quickStats",
"summary.runtime"]
vm_props = self._session._call_method(vutil,
"get_object_properties_dict",
vm_ref,
lst_properties)
data = {}
# All of values received are objects. Convert them to dictionaries
for value in vm_props.values():
prop_dict = vim_util.object_to_dict(value, list_depth=1)
data.update(prop_dict)
return data
def get_diagnostics(self, instance):
"""Return data about VM diagnostics."""
data = self._get_diagnostics(instance)
# Add a namespace to all of the diagnostsics
return {'vmware:' + k: v for k, v in data.items()}
def get_instance_diagnostics(self, instance):
"""Return data about VM diagnostics."""
data = self._get_diagnostics(instance)
state = data.get('powerState')
if state:
state = power_state.STATE_MAP[VMWARE_POWER_STATES[state]]
uptime = data.get('uptimeSeconds', 0)
config_drive = configdrive.required_by(instance)
diags = diagnostics.Diagnostics(state=state,
driver='vmwareapi',
config_drive=config_drive,
hypervisor_os='esxi',
uptime=uptime)
diags.memory_details.maximum = data.get('memorySizeMB', 0)
diags.memory_details.used = data.get('guestMemoryUsage', 0)
# TODO(garyk): add in cpu, nic and disk stats
return diags
def _get_vnc_console_connection(self, instance):
"""Return connection info for a vnc console."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
opt_value = self._session._call_method(vutil,
'get_object_property',
vm_ref,
vm_util.VNC_CONFIG_KEY)
if opt_value:
port = int(opt_value.value)
else:
raise exception.ConsoleTypeUnavailable(console_type='vnc')
return {'port': port,
'internal_access_path': None}
@staticmethod
def _get_machine_id_str(network_info):
machine_id_str = ''
for vif in network_info:
# TODO(vish): add support for dns2
# TODO(sateesh): add support for injection of ipv6 configuration
network = vif['network']
ip_v4 = netmask_v4 = gateway_v4 = broadcast_v4 = dns = None
subnets_v4 = [s for s in network['subnets'] if s['version'] == 4]
if len(subnets_v4) > 0:
if len(subnets_v4[0]['ips']) > 0:
ip_v4 = subnets_v4[0]['ips'][0]
if len(subnets_v4[0]['dns']) > 0:
dns = subnets_v4[0]['dns'][0]['address']
netmask_v4 = str(subnets_v4[0].as_netaddr().netmask)
gateway_v4 = subnets_v4[0]['gateway']['address']
broadcast_v4 = str(subnets_v4[0].as_netaddr().broadcast)
interface_str = ";".join([vif['address'],
ip_v4 and ip_v4['address'] or '',
netmask_v4 or '',
gateway_v4 or '',
broadcast_v4 or '',
dns or ''])
machine_id_str = machine_id_str + interface_str + '#'
return machine_id_str
def _set_machine_id(self, client_factory, instance, network_info,
vm_ref=None):
"""Set the machine id of the VM for guest tools to pick up
and reconfigure the network interfaces.
"""
if vm_ref is None:
vm_ref = vm_util.get_vm_ref(self._session, instance)
machine_id_change_spec = vm_util.get_machine_id_change_spec(
client_factory,
self._get_machine_id_str(network_info))
LOG.debug("Reconfiguring VM instance to set the machine id",
instance=instance)
vm_util.reconfigure_vm(self._session, vm_ref, machine_id_change_spec)
LOG.debug("Reconfigured VM instance to set the machine id",
instance=instance)
@utils.synchronized('vmware.get_and_set_vnc_port')
def _get_and_set_vnc_config(self, client_factory, instance, vm_ref):
"""Set the vnc configuration of the VM."""
port = vm_util.get_vnc_port(self._session)
vnc_config_spec = vm_util.get_vnc_config_spec(
client_factory, port)
LOG.debug("Reconfiguring VM instance to enable vnc on "
"port - %(port)s", {'port': port},
instance=instance)
vm_util.reconfigure_vm(self._session, vm_ref, vnc_config_spec)
LOG.debug("Reconfigured VM instance to enable vnc on "
"port - %(port)s", {'port': port},
instance=instance)
def _get_ds_browser(self, ds_ref):
ds_browser = self._datastore_browser_mapping.get(ds_ref.value)
if not ds_browser:
ds_browser = self._session._call_method(vutil,
"get_object_property",
ds_ref,
"browser")
self._datastore_browser_mapping[ds_ref.value] = ds_browser
return ds_browser
def _get_host_ref_from_name(self, host_name):
"""Get reference to the host with the name specified."""
host_objs = self._session._call_method(vim_util, "get_objects",
"HostSystem", ["name"])
vm_util._cancel_retrieve_if_necessary(self._session, host_objs)
for host in host_objs:
if hasattr(host, 'propSet'):
if host.propSet[0].val == host_name:
return host.obj
return None
def _create_folder_if_missing(self, ds_name, ds_ref, folder):
"""Create a folder if it does not exist.
Currently there are two folder that are required on the datastore
- base folder - the folder to store cached images
- temp folder - the folder used for snapshot management and
image uploading
This method is aimed to be used for the management of those
folders to ensure that they are created if they are missing.
The ds_util method mkdir will be used to check if the folder
exists. If this throws and exception 'FileAlreadyExistsException'
then the folder already exists on the datastore.
"""
path = ds_obj.DatastorePath(ds_name, folder)
dc_info = self.get_datacenter_ref_and_name(ds_ref)
try:
ds_util.mkdir(self._session, path, dc_info.ref)
LOG.debug("Folder %s created.", path)
except vexc.FileAlreadyExistsException:
# NOTE(hartsocks): if the folder already exists, that
# just means the folder was prepped by another process.
pass
def check_cache_folder(self, ds_name, ds_ref):
"""Check that the cache folder exists."""
self._create_folder_if_missing(ds_name, ds_ref, self._base_folder)
def check_temp_folder(self, ds_name, ds_ref):
"""Check that the temp folder exists."""
self._create_folder_if_missing(ds_name, ds_ref, self._tmp_folder)
def inject_network_info(self, instance, network_info):
"""inject network info for specified instance."""
# Set the machine.id parameter of the instance to inject
# the NIC configuration inside the VM
client_factory = self._session.vim.client.factory
self._set_machine_id(client_factory, instance, network_info)
def manage_image_cache(self, context, instances):
if not CONF.remove_unused_base_images:
LOG.debug("Image aging disabled. Aging will not be done.")
return
datastores = ds_util.get_available_datastores(self._session,
self._cluster,
self._datastore_regex)
datastores_info = []
for ds in datastores:
dc_info = self.get_datacenter_ref_and_name(ds.ref)
datastores_info.append((ds, dc_info))
self._imagecache.update(context, instances, datastores_info)
def _get_valid_vms_from_retrieve_result(self, retrieve_result):
"""Returns list of valid vms from RetrieveResult object."""
lst_vm_names = []
while retrieve_result:
for vm in retrieve_result.objects:
vm_uuid = None
conn_state = None
for prop in vm.propSet:
if prop.name == "runtime.connectionState":
conn_state = prop.val
elif prop.name == 'config.extraConfig["nvp.vm-uuid"]':
vm_uuid = prop.val.value
# Ignore VM's that do not have nvp.vm-uuid defined
if not vm_uuid:
continue
# Ignoring the orphaned or inaccessible VMs
if conn_state not in ["orphaned", "inaccessible"]:
lst_vm_names.append(vm_uuid)
retrieve_result = self._session._call_method(vutil,
'continue_retrieval',
retrieve_result)
return lst_vm_names
def instance_exists(self, instance):
try:
vm_util.get_vm_ref(self._session, instance)
return True
except exception.InstanceNotFound:
return False
def attach_interface(self, instance, image_meta, vif):
"""Attach an interface to the instance."""
vif_model = image_meta.properties.get('hw_vif_model',
constants.DEFAULT_VIF_MODEL)
vif_model = vm_util.convert_vif_model(vif_model)
vif_info = vmwarevif.get_vif_dict(self._session, self._cluster,
vif_model, utils.is_neutron(), vif)
vm_ref = vm_util.get_vm_ref(self._session, instance)
# Ensure that there is not a race with the port index management
with lockutils.lock(instance.uuid,
lock_file_prefix='nova-vmware-hot-plug'):
port_index = vm_util.get_attach_port_index(self._session, vm_ref)
client_factory = self._session.vim.client.factory
attach_config_spec = vm_util.get_network_attach_config_spec(
client_factory, vif_info, port_index)
LOG.debug("Reconfiguring VM to attach interface",
instance=instance)
try:
vm_util.reconfigure_vm(self._session, vm_ref,
attach_config_spec)
except Exception as e:
LOG.error(_LE('Attaching network adapter failed. Exception: '
'%s'),
e, instance=instance)
raise exception.InterfaceAttachFailed(
instance_uuid=instance.uuid)
context = nova_context.get_admin_context()
self._network_api.update_instance_vnic_index(
context, instance, vif, port_index)
LOG.debug("Reconfigured VM to attach interface", instance=instance)
def detach_interface(self, instance, vif):
"""Detach an interface from the instance."""
vm_ref = vm_util.get_vm_ref(self._session, instance)
# Ensure that there is not a race with the port index management
with lockutils.lock(instance.uuid,
lock_file_prefix='nova-vmware-hot-plug'):
port_index = vm_util.get_vm_detach_port_index(self._session,
vm_ref,
vif['id'])
if port_index is None:
msg = _("No device with interface-id %s exists on "
"VM") % vif['id']
raise exception.NotFound(msg)
hardware_devices = self._session._call_method(
vutil,
"get_object_property",
vm_ref,
"config.hardware.device")
device = vmwarevif.get_network_device(hardware_devices,
vif['address'])
if device is None:
msg = _("No device with MAC address %s exists on the "
"VM") % vif['address']
raise exception.NotFound(msg)
context = nova_context.get_admin_context()
self._network_api.update_instance_vnic_index(
context, instance, vif, None)
client_factory = self._session.vim.client.factory
detach_config_spec = vm_util.get_network_detach_config_spec(
client_factory, device, port_index)
LOG.debug("Reconfiguring VM to detach interface",
instance=instance)
try:
vm_util.reconfigure_vm(self._session, vm_ref,
detach_config_spec)
except Exception as e:
LOG.error(_LE('Detaching network adapter failed. Exception: '
'%s'),
e, instance=instance)
raise exception.InterfaceDetachFailed(
instance_uuid=instance.uuid)
LOG.debug("Reconfigured VM to detach interface", instance=instance)
def _use_disk_image_as_full_clone(self, vm_ref, vi):
"""Uses cached image disk by copying it into the VM directory."""
instance_folder = vi.instance.uuid
root_disk_name = "%s.vmdk" % vi.instance.uuid
root_disk_ds_loc = vi.datastore.build_path(instance_folder,
root_disk_name)
vm_util.copy_virtual_disk(
self._session,
vi.dc_info.ref,
str(vi.cache_image_path),
str(root_disk_ds_loc))
self._extend_if_required(
vi.dc_info, vi.ii, vi.instance, str(root_disk_ds_loc))
self._volumeops.attach_disk_to_vm(
vm_ref, vi.instance,
vi.ii.adapter_type, vi.ii.disk_type,
str(root_disk_ds_loc),
vi.root_gb * units.Mi, False,
disk_io_limits=vi._extra_specs.disk_io_limits)
def _sized_image_exists(self, sized_disk_ds_loc, ds_ref):
ds_browser = self._get_ds_browser(ds_ref)
return ds_util.file_exists(
self._session, ds_browser, sized_disk_ds_loc.parent,
sized_disk_ds_loc.basename)
def _use_disk_image_as_linked_clone(self, vm_ref, vi):
"""Uses cached image as parent of a COW child in the VM directory."""
sized_image_disk_name = "%s.vmdk" % vi.ii.image_id
if vi.root_gb > 0:
sized_image_disk_name = "%s.%s.vmdk" % (vi.ii.image_id, vi.root_gb)
sized_disk_ds_loc = vi.cache_image_folder.join(sized_image_disk_name)
# Ensure only a single thread extends the image at once.
# We do this by taking a lock on the name of the extended
# image. This allows multiple threads to create resized
# copies simultaneously, as long as they are different
# sizes. Threads attempting to create the same resized copy
# will be serialized, with only the first actually creating
# the copy.
#
# Note that the object is in a per-nova cache directory,
# so inter-nova locking is not a concern. Consequently we
# can safely use simple thread locks.
with lockutils.lock(str(sized_disk_ds_loc),
lock_file_prefix='nova-vmware-image'):
if not self._sized_image_exists(sized_disk_ds_loc,
vi.datastore.ref):
LOG.debug("Copying root disk of size %sGb", vi.root_gb,
instance=vi.instance)
try:
vm_util.copy_virtual_disk(
self._session,
vi.dc_info.ref,
str(vi.cache_image_path),
str(sized_disk_ds_loc))
except Exception as e:
LOG.warning(_LW("Root disk file creation "
"failed - %s"), e)
with excutils.save_and_reraise_exception():
LOG.error(_LE('Failed to copy cached '
'image %(source)s to '
'%(dest)s for resize: '
'%(error)s'),
{'source': vi.cache_image_path,
'dest': sized_disk_ds_loc,
'error': e})
try:
ds_util.file_delete(self._session,
sized_disk_ds_loc,
vi.dc_info.ref)
except vexc.FileNotFoundException:
# File was never created: cleanup not
# required
pass
# Resize the copy to the appropriate size. No need
# for cleanup up here, as _extend_virtual_disk
# already does it
self._extend_if_required(
vi.dc_info, vi.ii, vi.instance, str(sized_disk_ds_loc))
# Associate the sized image disk to the VM by attaching to the VM a
# COW child of said disk.
self._volumeops.attach_disk_to_vm(
vm_ref, vi.instance,
vi.ii.adapter_type, vi.ii.disk_type,
str(sized_disk_ds_loc),
vi.root_gb * units.Mi, vi.ii.linked_clone,
disk_io_limits=vi._extra_specs.disk_io_limits)
def _use_iso_image(self, vm_ref, vi):
"""Uses cached image as a bootable virtual cdrom."""
self._attach_cdrom_to_vm(
vm_ref, vi.instance, vi.datastore.ref,
str(vi.cache_image_path))
# Optionally create and attach blank disk
if vi.root_gb > 0:
instance_folder = vi.instance.uuid
root_disk_name = "%s.vmdk" % vi.instance.uuid
root_disk_ds_loc = vi.datastore.build_path(instance_folder,
root_disk_name)
# It is pointless to COW a blank disk
linked_clone = False
vm_util.create_virtual_disk(
self._session, vi.dc_info.ref,
vi.ii.adapter_type,
vi.ii.disk_type,
str(root_disk_ds_loc),
vi.root_gb * units.Mi)
self._volumeops.attach_disk_to_vm(
vm_ref, vi.instance,
vi.ii.adapter_type, vi.ii.disk_type,
str(root_disk_ds_loc),
vi.root_gb * units.Mi, linked_clone,
disk_io_limits=vi._extra_specs.disk_io_limits)
def get_datacenter_ref_and_name(self, ds_ref):
"""Get the datacenter name and the reference."""
return ds_util.get_dc_info(self._session, ds_ref)
def list_instances(self):
"""Lists the VM instances that are registered with vCenter cluster."""
properties = ['runtime.connectionState',
'config.extraConfig["nvp.vm-uuid"]']
LOG.debug("Getting list of instances from cluster %s",
self._cluster)
vms = []
if self._root_resource_pool:
vms = self._session._call_method(
vim_util, 'get_inner_objects', self._root_resource_pool, 'vm',
'VirtualMachine', properties)
lst_vm_names = self._get_valid_vms_from_retrieve_result(vms)
LOG.debug("Got total of %s instances", str(len(lst_vm_names)))
return lst_vm_names
def get_vnc_console(self, instance):
"""Return connection info for a vnc console using vCenter logic."""
# vCenter does not run virtual machines and does not run
# a VNC proxy. Instead, you need to tell OpenStack to talk
# directly to the ESX host running the VM you are attempting
# to connect to via VNC.
vnc_console = self._get_vnc_console_connection(instance)
host_name = vm_util.get_host_name_for_vm(
self._session,
instance)
vnc_console['host'] = host_name
# NOTE: VM can move hosts in some situations. Debug for admins.
LOG.debug("VM %(uuid)s is currently on host %(host_name)s",
{'uuid': instance.uuid, 'host_name': host_name},
instance=instance)
return ctype.ConsoleVNC(**vnc_console)
def get_mks_console(self, instance):
vm_ref = vm_util.get_vm_ref(self._session, instance)
ticket = self._session._call_method(self._session.vim,
'AcquireTicket',
vm_ref,
ticketType='mks')
thumbprint = ticket.sslThumbprint.replace(':', '').lower()
mks_auth = {'ticket': ticket.ticket,
'cfgFile': ticket.cfgFile,
'thumbprint': thumbprint}
internal_access_path = jsonutils.dumps(mks_auth)
return ctype.ConsoleMKS(ticket.host, ticket.port, internal_access_path)
| 47.18607 | 79 | 0.565476 |
9f94960d1662c85e8cd165f0804dda93be335f83 | 19,645 | py | Python | configs/visdrone/visdrone_cascade_FusionFactor_mask_arf_DSH.py | w-sugar/mmdetection | 3f263c496c99827e4c90eb2e2f2be92f061f3b66 | [
"Apache-2.0"
] | null | null | null | configs/visdrone/visdrone_cascade_FusionFactor_mask_arf_DSH.py | w-sugar/mmdetection | 3f263c496c99827e4c90eb2e2f2be92f061f3b66 | [
"Apache-2.0"
] | null | null | null | configs/visdrone/visdrone_cascade_FusionFactor_mask_arf_DSH.py | w-sugar/mmdetection | 3f263c496c99827e4c90eb2e2f2be92f061f3b66 | [
"Apache-2.0"
] | null | null | null | model = dict(
type='CascadeRCNN',
pretrained='torchvision://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
norm_eval=True,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
fusion_factors=[1.368, 0.727, 0.470],
with_ExtraMask=[256,5,True,True],
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_generator=dict(
type='AnchorGenerator',
scales=[4],
ratios=[0.5, 1.0, 2.0],
strides=[4, 8, 16, 32, 64]),
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
clip_border=False,
target_means=[0.0, 0.0, 0.0, 0.0],
target_stds=[1.0, 1.0, 1.0, 1.0]),
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(
type='SmoothL1Loss', beta=0.1111111111111111, loss_weight=1.0)),
roi_head=dict(
type='TailCascadeRoIHead',
# labels_tail=[2, 5, 6, 7, 8, 10],
# labels=[0, 1, 3, 4, 9],
labels_tail=[2, 4, 5, 6, 7, 8, 9],
labels=[0, 1, 3],
num_stages=3,
stage_loss_weights=[1, 0.5, 0.25],
bbox_roi_extractor=dict(
# type='SingleRoIExtractor',
type='SoftRoIExtractor',
roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=[
dict(
type='Shared2FCBBoxHead',
# type='GSBBoxHeadWith',
# gs_config=dict(
# loss_bg=dict(
# type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0
# ),
# loss_bin=dict(
# type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0
# ),
# ),
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=10,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0.0, 0.0, 0.0, 0.0],
target_stds=[0.1, 0.1, 0.2, 0.2]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
# loss_cls=dict(
# type='WordTreeFocalLoss',
# use_sigmoid=True,
# gamma=2.0,
# alpha=0.25,
# loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=1.0)),
dict(
type='Shared2FCBBoxHead',
# type='GSBBoxHeadWith',
# gs_config=dict(
# loss_bg=dict(
# type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0
# ),
# loss_bin=dict(
# type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0
# ),
# ),
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=10,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0.0, 0.0, 0.0, 0.0],
target_stds=[0.05, 0.05, 0.1, 0.1]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
# loss_cls=dict(
# type='WordTreeFocalLoss',
# use_sigmoid=True,
# gamma=2.0,
# alpha=0.25,
# loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=1.0)),
dict(
type='Shared2FCBBoxHead',
# type='GSBBoxHeadWith',
# gs_config=dict(
# loss_bg=dict(
# type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0
# ),
# loss_bin=dict(
# type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0
# ),
# ),
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=10,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0.0, 0.0, 0.0, 0.0],
target_stds=[0.033, 0.033, 0.067, 0.067]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
# loss_cls=dict(
# type='WordTreeFocalLoss',
# use_sigmoid=True,
# gamma=2.0,
# alpha=0.25,
# loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))
],
bbox_head_tail=[
dict(
type='Shared2FCBBoxHead',
# type='GSBBoxHeadWith',
# gs_config=dict(
# loss_bg=dict(
# type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0
# ),
# loss_bin=dict(
# type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0
# ),
# ),
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=10,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0.0, 0.0, 0.0, 0.0],
target_stds=[0.1, 0.1, 0.2, 0.2]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=2.0),
# loss_cls=dict(
# type='WordTreeFocalLoss',
# use_sigmoid=True,
# gamma=2.0,
# alpha=0.25,
# loss_weight=2.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=2.0)),
dict(
type='Shared2FCBBoxHead',
# type='GSBBoxHeadWith',
# gs_config=dict(
# loss_bg=dict(
# type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0
# ),
# loss_bin=dict(
# type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0
# ),
# ),
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=10,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0.0, 0.0, 0.0, 0.0],
target_stds=[0.05, 0.05, 0.1, 0.1]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=2.0),
# loss_cls=dict(
# type='WordTreeFocalLoss',
# use_sigmoid=True,
# gamma=2.0,
# alpha=0.25,
# loss_weight=2.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=2.0)),
dict(
type='Shared2FCBBoxHead',
# type='GSBBoxHeadWith',
# gs_config=dict(
# loss_bg=dict(
# type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0
# ),
# loss_bin=dict(
# type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0
# ),
# ),
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=10,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0.0, 0.0, 0.0, 0.0],
target_stds=[0.033, 0.033, 0.067, 0.067]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=2.0),
# loss_cls=dict(
# type='WordTreeFocalLoss',
# use_sigmoid=True,
# gamma=2.0,
# alpha=0.25,
# loss_weight=2.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=2.0))
]
),
train_cfg=dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
match_low_quality=True,
ignore_iof_thr=0.5),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_pre=2000,
max_per_img=2000,
nms=dict(type='nms', iou_threshold=0.7),
min_bbox_size=0),
rcnn=[
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
match_low_quality=False,
ignore_iof_thr=0.5),
sampler=dict(
type='ClassBalancedPosSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
# labels=[0, 1, 3, 4, 9],
labels=[0, 1, 3],
add_gt_as_proposals=True),
assigner_tail=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
match_low_quality=False,
ignore_iof_thr=0.5),
sampler_tail=dict(
type='ClassBalancedPosSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
# labels=[2, 5, 6, 7, 8, 10],
labels=[2, 4, 5, 6, 7, 8, 9],
add_gt_as_proposals=True),
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.6,
neg_iou_thr=0.6,
min_pos_iou=0.6,
match_low_quality=False,
ignore_iof_thr=0.5),
sampler=dict(
type='ClassBalancedPosSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
# labels=[0, 1, 3, 4, 9],
labels=[0, 1, 3],
add_gt_as_proposals=True),
assigner_tail=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.6,
neg_iou_thr=0.6,
min_pos_iou=0.6,
match_low_quality=False,
ignore_iof_thr=0.5),
sampler_tail=dict(
type='ClassBalancedPosSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
# labels=[2, 5, 6, 7, 8, 10],
labels=[2, 4, 5, 6, 7, 8, 9],
add_gt_as_proposals=True),
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.7,
min_pos_iou=0.7,
match_low_quality=False,
ignore_iof_thr=0.5),
sampler=dict(
type='ClassBalancedPosSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
# labels=[0, 1, 3, 4, 9],
labels=[0, 1, 3],
add_gt_as_proposals=True),
assigner_tail=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.7,
min_pos_iou=0.7,
match_low_quality=False,
ignore_iof_thr=0.5),
sampler_tail=dict(
type='ClassBalancedPosSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
# labels=[2, 5, 6, 7, 8, 10],
labels=[2, 4, 5, 6, 7, 8, 9],
add_gt_as_proposals=True),
pos_weight=-1,
debug=False)
]),
test_cfg=dict(
rpn=dict(
nms_pre=1000,
max_per_img=1000,
nms=dict(type='nms', iou_threshold=0.7),
min_bbox_size=0),
rcnn=dict(
score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.5),
max_per_img=500)))
dataset_type = 'VisDroneDataset'
data_root = 'data/visdrone/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(
type='Resize',
img_scale=(1600, 1050),
keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(
type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels'])
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=[(3000, 1969), (3200, 2100), (3400, 2231)],
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True, bbox_clip_border=False),
dict(type='RandomFlip'),
dict(
type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img'])
])
]
data = dict(
samples_per_gpu=2,
workers_per_gpu=2,
train=dict(
type='VisDroneDataset',
ann_file='data/visdrone/annotations/coco-cut_train_val_NOother_new.json',
img_prefix='data/visdrone/images/VisDrone2019-DET-train_val/images-cut-NOother-new',
pipeline=[
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(
type='Resize',
img_scale=(1600, 1050),
keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(
type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels'])
]),
val=dict(
type='VisDroneDataset',
ann_file='data/visdrone/annotations/coco-test-dev.json',
img_prefix='data/visdrone/images/VisDrone2019-DET-test-dev/images',
pipeline=[
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(
type='Resize', keep_ratio=True,
bbox_clip_border=False),
dict(type='RandomFlip'),
dict(
type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img'])
])
]),
test=dict(
type='VisDroneDataset',
ann_file='data/visdrone/annotations/coco-test-dev.json',
img_prefix='data/visdrone/images/VisDrone2019-DET-test-dev/images',
pipeline=[
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=[(3000, 1969), (3200, 2100), (3400, 2231)],
flip=False,
transforms=[
dict(
type='Resize', keep_ratio=True,
bbox_clip_border=False),
dict(type='RandomFlip'),
dict(
type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img'])
])
]))
evaluation = dict(interval=1, metric='bbox')
optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=None)
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[8, 11])
runner = dict(type='EpochBasedRunner', max_epochs=12)
checkpoint_config = dict(interval=1)
log_config = dict(interval=50, hooks=[dict(type='TextLoggerHook')])
custom_hooks = [dict(type='NumClassCheckHook')]
dist_params = dict(backend='nccl')
log_level = 'INFO'
load_from = '/home/sugar/workspace/mmdetection/checkpoints/cascade_rcnn_r50_fpn_1x_coco_20200316-3dc56deb.pth'
resume_from = None
workflow = [('train', 1)]
work_dir = '/data/sugar/checkpoints/mmdetection_work_dirs/cascade_rcnn_r50_fpn_1x_coco_cut_four_multiscale_dcn_softroi/'
gpu_ids = range(0, 4)
| 38.145631 | 120 | 0.445508 |
b96e9524eb11b5b8295c8b236e33463a77357ade | 3,717 | py | Python | dash_core_components/Slider.py | mako-npm/dash-core-components | 0cbc3d8093c678e59b5b4dfa3aa2637d071a5b33 | [
"MIT"
] | null | null | null | dash_core_components/Slider.py | mako-npm/dash-core-components | 0cbc3d8093c678e59b5b4dfa3aa2637d071a5b33 | [
"MIT"
] | null | null | null | dash_core_components/Slider.py | mako-npm/dash-core-components | 0cbc3d8093c678e59b5b4dfa3aa2637d071a5b33 | [
"MIT"
] | null | null | null | # AUTO GENERATED FILE - DO NOT EDIT
from dash.development.base_component import Component, _explicitize_args
class Slider(Component):
"""A Slider component.
A slider component with a single handle.
Keyword arguments:
- id (string; optional)
- marks (optional): Marks on the slider.
The key determines the position,
and the value determines what will show.
If you want to set the style of a specific mark point,
the value should be an object which
contains style and label properties.. marks has the following type: dict containing keys 'number'.
Those keys have the following types:
- number (optional): . number has the following type: string | dict containing keys 'style', 'label'.
Those keys have the following types:
- style (dict; optional)
- label (string; optional)
- value (number; optional): The value of the input
- className (string; optional): Additional CSS class for the root DOM node
- disabled (boolean; optional): If true, the handles can't be moved.
- dots (boolean; optional): When the step value is greater than 1,
you can set the dots to true if you want to
render the slider with dots.
- included (boolean; optional): If the value is true, it means a continuous
value is included. Otherwise, it is an independent value.
- min (number; optional): Minimum allowed value of the slider
- max (number; optional): Maximum allowed value of the slider
- step (number; optional): Value by which increments or decrements are made
- vertical (boolean; optional): If true, the slider will be vertical
- updatemode (a value equal to: 'mouseup', 'drag'; optional): Determines when the component should update
its value. If `mouseup`, then the slider
will only trigger its value when the user has
finished dragging the slider. If `drag`, then
the slider will update its value continuously
as it is being dragged.
Only use `drag` if your updates are fast.
- loading_state (optional): Object that holds the loading state object coming from dash-renderer. loading_state has the following type: dict containing keys 'is_loading', 'prop_name', 'component_name'.
Those keys have the following types:
- is_loading (boolean; optional): Determines if the component is loading or not
- prop_name (string; optional): Holds which property is loading
- component_name (string; optional): Holds the name of the component that is loading"""
@_explicitize_args
def __init__(self, id=Component.UNDEFINED, marks=Component.UNDEFINED, value=Component.UNDEFINED, className=Component.UNDEFINED, disabled=Component.UNDEFINED, dots=Component.UNDEFINED, included=Component.UNDEFINED, min=Component.UNDEFINED, max=Component.UNDEFINED, step=Component.UNDEFINED, vertical=Component.UNDEFINED, updatemode=Component.UNDEFINED, loading_state=Component.UNDEFINED, **kwargs):
self._prop_names = ['id', 'marks', 'value', 'className', 'disabled', 'dots', 'included', 'min', 'max', 'step', 'vertical', 'updatemode', 'loading_state']
self._type = 'Slider'
self._namespace = 'dash_core_components'
self._valid_wildcard_attributes = []
self.available_properties = ['id', 'marks', 'value', 'className', 'disabled', 'dots', 'included', 'min', 'max', 'step', 'vertical', 'updatemode', 'loading_state']
self.available_wildcard_properties = []
_explicit_args = kwargs.pop('_explicit_args')
_locals = locals()
_locals.update(kwargs) # For wildcard attrs
args = {k: _locals[k] for k in _explicit_args if k != 'children'}
for k in []:
if k not in args:
raise TypeError(
'Required argument `' + k + '` was not specified.')
super(Slider, self).__init__(**args)
| 56.318182 | 401 | 0.723433 |
f3cc46059317f4a45dabe1d72893b73e1b4238d9 | 4,039 | py | Python | workspace_tools/settings.py | x893/mbed | 103a6a5bc6da7db33c723c278ef08ee491b92cb5 | [
"Apache-2.0"
] | 1 | 2015-01-02T06:55:31.000Z | 2015-01-02T06:55:31.000Z | workspace_tools/settings.py | x893/mbed | 103a6a5bc6da7db33c723c278ef08ee491b92cb5 | [
"Apache-2.0"
] | null | null | null | workspace_tools/settings.py | x893/mbed | 103a6a5bc6da7db33c723c278ef08ee491b92cb5 | [
"Apache-2.0"
] | null | null | null | """
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from os.path import join, abspath, dirname
import logging
ROOT = abspath(join(dirname(__file__), ".."))
# These default settings have two purposes:
# 1) Give a template for writing local "private_settings.py"
# 2) Give default initialization fields for the "toolchains.py" constructors
##############################################################################
# Build System Settings
##############################################################################
BUILD_DIR = abspath(join(ROOT, "build"))
# ARM
armcc = "standalone" # "keil", or "standalone", or "ds-5"
if armcc == "keil":
ARM_PATH = "C:/Keil_4_54/ARM"
ARM_BIN = join(ARM_PATH, "BIN40")
ARM_INC = join(ARM_PATH, "RV31", "INC")
ARM_LIB = join(ARM_PATH, "RV31", "LIB")
elif armcc == "standalone":
ARM_PATH = "C:/Program Files/ARM/armcc_4.1_791"
ARM_BIN = join(ARM_PATH, "bin")
ARM_INC = join(ARM_PATH, "include")
ARM_LIB = join(ARM_PATH, "lib")
elif armcc == "ds-5":
ARM_PATH = "C:/Program Files (x86)/DS-5"
ARM_BIN = join(ARM_PATH, "bin")
ARM_INC = join(ARM_PATH, "include")
ARM_LIB = join(ARM_PATH, "lib")
ARM_CPPLIB = join(ARM_LIB, "cpplib")
MY_ARM_CLIB = join(ARM_PATH, "lib", "microlib")
# GCC ARM
GCC_ARM_PATH = ""
# GCC CodeSourcery
GCC_CS_PATH = "C:/Program Files (x86)/CodeSourcery/Sourcery_CodeBench_Lite_for_ARM_EABI/bin"
# GCC CodeRed
GCC_CR_PATH = "C:/code_red/RedSuite_4.2.0_349/redsuite/Tools/bin"
# IAR
IAR_PATH = "C:/Program Files (x86)/IAR Systems/Embedded Workbench 6.0/arm"
# GCC Code Warrior
CW_GCC_PATH = "C:/Freescale/CW MCU v10.3/Cross_Tools/arm-none-eabi-gcc-4_6_2/bin"
CW_EWL_PATH = "C:/Freescale/CW MCU v10.3/MCU/ARM_GCC_Support/ewl/lib"
# Goanna static analyser. Please overload it in private_settings.py
GOANNA_PATH = "c:/Program Files (x86)/RedLizards/Goanna Central 3.2.3/bin"
# cppcheck path (command) and output message format
CPPCHECK_CMD = ["cppcheck", "--enable=all"]
CPPCHECK_MSG_FORMAT = ["--template=[{severity}] {file}@{line}: {id}:{message}"]
# SiliconLabs energyAware Commander 2.84 path
EACOMMANDER_CMD = 'c:/SiliconLabs/SimplicityStudio/v2/commander/eACommander.exe'
BUILD_OPTIONS = []
# mbed.org username
MBED_ORG_USER = ""
##############################################################################
# Test System Settings
##############################################################################
SERVER_PORT = 59432
SERVER_ADDRESS = "10.2.200.94"
LOCALHOST = "10.2.200.94"
MUTs = {
"1" : {"mcu": "LPC1768",
"port":"COM41", "disk":'E:\\',
"peripherals": ["TMP102", "digital_loop", "port_loop", "analog_loop", "SD"]
},
"2": {"mcu": "LPC11U24",
"port":"COM42", "disk":'F:\\',
"peripherals": ["TMP102", "digital_loop", "port_loop", "SD"]
},
"3" : {"mcu": "KL25Z",
"port":"COM43", "disk":'G:\\',
"peripherals": ["TMP102", "digital_loop", "port_loop", "analog_loop", "SD"]
},
}
##############################################################################
# Private Settings
##############################################################################
try:
# Allow to overwrite the default settings without the need to edit the
# settings file stored in the repository
from workspace_tools.private_settings import *
except ImportError:
print '[WARNING] Using default settings. Define you settings in the file "workspace_tools/private_settings.py" or in "./mbed_settings.py"'
| 34.228814 | 142 | 0.612775 |
3acefc3a69f542fbe69c1e3b354fe7a648abde3b | 2,244 | py | Python | lemur/migrations/env.py | charhate/lemur | 98668b09722695c5c4f353c122359383edb1c811 | [
"Apache-2.0"
] | null | null | null | lemur/migrations/env.py | charhate/lemur | 98668b09722695c5c4f353c122359383edb1c811 | [
"Apache-2.0"
] | 2 | 2021-02-10T02:29:45.000Z | 2021-04-30T21:40:40.000Z | lemur/migrations/env.py | charhate/lemur | 98668b09722695c5c4f353c122359383edb1c811 | [
"Apache-2.0"
] | null | null | null | from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
import alembic_autogenerate_enums
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from flask import current_app
config.set_main_option(
"sqlalchemy.url", current_app.config.get("SQLALCHEMY_DATABASE_URI")
)
target_metadata = current_app.extensions["migrate"].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata,
**current_app.extensions["migrate"].configure_args,
compare_type=True
)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
| 26.4 | 71 | 0.727273 |
3d5378e499be25b503f8279ea1c8b2933dc62eb4 | 1,395 | py | Python | src/models/quiche_nbody.py | HhotateA/quiche_pantie_patch | f50c4fd69bd43cccaeb38f026d486e3ccc3850d8 | [
"CC-BY-4.0"
] | 73 | 2019-01-26T02:57:24.000Z | 2022-02-15T08:45:11.000Z | src/models/quiche_nbody.py | HhotateA/quiche_pantie_patch | f50c4fd69bd43cccaeb38f026d486e3ccc3850d8 | [
"CC-BY-4.0"
] | 9 | 2019-04-09T10:53:41.000Z | 2020-09-11T13:18:26.000Z | src/models/quiche_nbody.py | HhotateA/quiche_pantie_patch | f50c4fd69bd43cccaeb38f026d486e3ccc3850d8 | [
"CC-BY-4.0"
] | 15 | 2019-04-07T11:28:57.000Z | 2022-03-29T04:35:48.000Z | from PIL import Image, ImageOps
from src.models.class_patcher import patcher
class patcher(patcher):
def __init__(self, body='./body/body_quiche_nbody.png', **options):
super().__init__('キッシュ(素体)', body=body, pantie_position=[403, 836], **options)
try:
self.with_bra = self.options['with_bra']
except:
self.with_bra = self.ask(question='With bra?', default=True)
if self.with_bra:
import src.models.quiche_bra as bra
self.bra_patcher = bra.patcher(options=options)
def convert(self, image):
cut = 7
right_pantie = image.crop((cut, 0, image.size[0], image.size[1]))
left_pantie = ImageOps.mirror(right_pantie)
npantie = Image.new("RGBA", (right_pantie.size[0] * 2, right_pantie.size[1]))
npantie.paste(right_pantie, (right_pantie.size[0], 0))
npantie.paste(left_pantie, (0, 0))
return npantie
def patch(self, image, transparent=False):
pantie = self.convert(image)
if transparent:
patched = Image.new("RGBA", self.body_size)
else:
patched = self.body.copy()
if self.with_bra:
bra = self.bra_patcher.convert(image)
self.paste(patched, bra, self.bra_patcher.pantie_position)
patched = self.paste(patched, pantie, self.pantie_position)
return patched
| 38.75 | 86 | 0.625806 |
06b4a05d145501faecc5a48ebd9c87951089ed33 | 2,763 | py | Python | host/index.py | novacrazy/arduino-music-led-strip | 8d449bf5f4b0d51a5527985734f8c8ec44b009d6 | [
"MIT"
] | null | null | null | host/index.py | novacrazy/arduino-music-led-strip | 8d449bf5f4b0d51a5527985734f8c8ec44b009d6 | [
"MIT"
] | null | null | null | host/index.py | novacrazy/arduino-music-led-strip | 8d449bf5f4b0d51a5527985734f8c8ec44b009d6 | [
"MIT"
] | null | null | null | from listen_thread import ListenerThread
import commands
from command_protocol import CommandProtocol, Command
import serial
import time
import struct
import numpy as np
import numpy.fft as fft
from collections import deque
m = ListenerThread()
m.start()
bands = ((0, 60), # 255, 0, 0
(60, 250), # 255, 171, 0
(250, 500), # 100, 255, 0
(500, 1000), # 50, 255, 150
(1000, 4000), # 0, 255, 255
(4000, 10000), # 0, 100, 255
(10000, 20000)) # 0, 0, 255
def map_range(x, in_min, in_max, out_min, out_max):
return (x - in_min) * (out_max - out_min) / (in_max - in_min) + out_min
zeroed = False
last_time = time.time()
refresh_rate = 1.0 / 60.0
peaks = deque(maxlen=(100 * 10)) # at 100 requests per second, this would be 10 seconds
ser = serial.Serial('COM5', 256000, parity=serial.PARITY_EVEN, stopbits=serial.STOPBITS_TWO)
time.sleep(2)
protocol = CommandProtocol(ser)
def format_stereo(data):
return struct.pack("<fff", *data)
def write_stereo(channels):
global protocol, peaks
peaks.append(np.max(channels))
stereo_cmd = Command(cmd=commands.COMMAND_WRITE_STEREO,
data=(channels[0], channels[1], np.max(peaks)),
fmt=format_stereo)
res = protocol.send_command(stereo_cmd)
def callback(cmd):
global peaks, last_time, protocol
if cmd.cmd is commands.COMMAND_REQUEST_ACTION:
channels = np.fromstring(''.join(m.data), dtype=np.int16).reshape((2, -1), order='F')
if channels.any():
mono = channels.mean(axis=0)
avg_level = np.absolute(mono).mean()
if avg_level >= 1.0:
mean = np.absolute(channels).mean(axis=1)
write_stereo(mean)
last_time = time.time()
# print(len(mono))
# w = fft.fft(mono)
# freqs = fft.fftfreq(len(w))
# print(len(w))
# print(len(freqs))
# idx = np.argmax(np.abs(w))
# freq = freqs[idx]
# freq_in_hertz = abs(freq * 44100)
# print(freq_in_hertz)
# ranges = []
# current_band = 0
# for freq in freqs:
# if freq >
# plt.plot(freqs, w.real)
# value = np.sum((min, max), axis=0).astype(np.uint8)
else:
if time.time() - last_time > 15.0:
time.sleep(0.5)
write_stereo((0, 0))
else:
write_stereo((0, 0))
protocol.start(callback)
print("Initialized")
protocol.await_command()
while True:
protocol.run()
time.sleep(1.0 / 100.0)
| 24.026087 | 93 | 0.549403 |
c0432715e149592f3cd06dc5a226ba6a18e6d513 | 50,688 | py | Python | aiida/transports/plugins/ssh.py | borellim/aiida_core | eebef392c81e8b130834a92e1d7abf5e2e30b3ce | [
"BSD-2-Clause"
] | 1 | 2019-03-15T10:37:53.000Z | 2019-03-15T10:37:53.000Z | aiida/transports/plugins/ssh.py | odarbelaeze/aiida_core | 934b4ccdc73a993f2a6656caf516500470e3da08 | [
"BSD-2-Clause"
] | null | null | null | aiida/transports/plugins/ssh.py | odarbelaeze/aiida_core | 934b4ccdc73a993f2a6656caf516500470e3da08 | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import io
import os
import click
import glob
from stat import S_ISDIR, S_ISREG
import six
from six.moves import cStringIO as StringIO
from aiida.cmdline.params import options
from aiida.cmdline.params.types.path import AbsolutePathParamType
from aiida.common.escaping import escape_for_bash
from ..transport import Transport, TransportInternalError
__all__ = ('parse_sshconfig', 'convert_to_bool', 'SshTransport')
# TODO : callback functions in paramiko are currently not used much and probably broken
def parse_sshconfig(computername):
import paramiko
config = paramiko.SSHConfig()
try:
config.parse(io.open(os.path.expanduser('~/.ssh/config'), encoding='utf8'))
except IOError:
# No file found, so empty configuration
pass
return config.lookup(computername)
def convert_to_bool(string):
upstring = str(string).upper()
if upstring in ['Y', 'YES', 'T', 'TRUE']:
return True
elif upstring in ['N', 'NO', 'F', 'FALSE']:
return False
else:
raise ValueError("Invalid boolean value provided")
class SshTransport(Transport):
"""
Support connection, command execution and data transfer to remote computers via SSH+SFTP.
"""
# Valid keywords accepted by the connect method of paramiko.SSHClient
# I disable 'password' and 'pkey' to avoid these data to get logged in the
# aiida log file.
_valid_connect_options = [
('username', {'prompt': 'User name', 'help': 'user name for the computer', 'non_interactive_default': True}),
('port', {'option': options.PORT, 'prompt': 'port Nr', 'non_interactive_default': True}),
('look_for_keys', {'switch': True, 'prompt': 'Look for keys', 'help': 'switch automatic key file discovery on / off', 'non_interactive_default': True}),
('key_filename', {'type': AbsolutePathParamType(dir_okay=False, exists=True), 'prompt': 'SSH key file', 'help': 'Manually pass a key file if default path is not set in ssh config', 'non_interactive_default': True}),
('timeout', {'type': int, 'prompt': 'Connection timeout in s', 'help': 'time in seconds to wait for connection before giving up', 'non_interactive_default': True}),
('allow_agent', {'switch': True, 'prompt': 'Allow ssh agent', 'help': 'switch to allow or disallow ssh agent', 'non_interactive_default': True}),
('proxy_command', {'prompt': 'SSH proxy command', 'help': 'SSH proxy command', 'non_interactive_default': True}), # Managed 'manually' in connect
('compress', {'switch': True, 'prompt': 'Compress file transfers', 'help': 'switch file transfer compression on / off', 'non_interactive_default': True}),
('gss_auth', {'type': bool, 'prompt': 'GSS auth', 'help': 'GSS auth for kerberos', 'non_interactive_default': True}),
('gss_kex', {'type': bool, 'prompt': 'GSS kex', 'help': 'GSS kex for kerberos', 'non_interactive_default': True}),
('gss_deleg_creds', {'type': bool, 'prompt': 'GSS deleg_creds', 'help': 'GSS deleg_creds for kerberos', 'non_interactive_default': True}),
('gss_host', {'prompt': 'GSS host', 'help': 'GSS host for kerberos', 'non_interactive_default': True}),
# for Kerberos support through python-gssapi
]
_valid_connect_params = [i[0] for i in _valid_connect_options]
# Valid parameters for the ssh transport
# For each param, a class method with name
# _convert_PARAMNAME_fromstring
# should be defined, that returns the value converted from a string to
# a correct type, or raise a ValidationError
#
# moreover, if you want to help in the default configuration, you can
# define a _get_PARAMNAME_suggestion_string
# to return a suggestion; it must accept only one parameter, being a Computer
# instance
_valid_auth_options = _valid_connect_options + [
('load_system_host_keys', {'switch': True, 'prompt': 'Load system host keys', 'help': 'switch loading system host keys on / off', 'non_interactive_default': True}),
('key_policy', {'type': click.Choice(['RejectPolicy', 'WarningPolicy', 'AutoAddPolicy']), 'prompt': 'Key policy', 'help': 'SSH key policy', 'non_interactive_default': True})
]
# I set the (default) value here to 5 secs between consecutive SSH checks.
# This should be incremented to 30, probably.
_DEFAULT_SAFE_OPEN_INTERVAL = 5
@classmethod
def _get_username_suggestion_string(cls, computer):
"""
Return a suggestion for the specific field.
"""
import getpass
config = parse_sshconfig(computer.hostname)
# Either the configured user in the .ssh/config, or the current username
return str(config.get('user', getpass.getuser()))
@classmethod
def _get_port_suggestion_string(cls, computer):
"""
Return a suggestion for the specific field.
"""
config = parse_sshconfig(computer.hostname)
# Either the configured user in the .ssh/config, or the default SSH port
return str(config.get('port', 22))
@classmethod
def _get_key_filename_suggestion_string(cls, computer):
"""
Return a suggestion for the specific field.
"""
config = parse_sshconfig(computer.hostname)
try:
identities = config['identityfile']
# In paramiko > 0.10, identity file is a list of strings.
if isinstance(identities, six.string_types):
identity = identities
elif isinstance(identities, (list, tuple)):
if not identities:
# An empty list should not be provided; to be sure,
# anyway, behave as if no identityfile were defined
raise KeyError
# By default we suggest only the first one
identity = identities[0]
else:
# If the parser provides an unknown type, just skip to
# the 'except KeyError' section, as if no identityfile
# were provided (hopefully, this should never happen)
raise KeyError
except KeyError:
# No IdentityFile defined: return an empty string
return ""
return os.path.expanduser(identity)
@classmethod
def _get_timeout_suggestion_string(cls, computer):
"""
Return a suggestion for the specific field.
Provide 60s as a default timeout for connections.
"""
config = parse_sshconfig(computer.hostname)
return str(config.get('connecttimeout', "60"))
@classmethod
def _get_allow_agent_suggestion_string(cls, computer):
"""
Return a suggestion for the specific field.
"""
config = parse_sshconfig(computer.hostname)
return convert_to_bool(str(config.get('allow_agent', "no")))
@classmethod
def _get_look_for_keys_suggestion_string(cls, computer):
"""
Return a suggestion for the specific field.
"""
config = parse_sshconfig(computer.hostname)
return convert_to_bool(str(config.get('look_for_keys', "no")))
@classmethod
def _get_proxy_command_suggestion_string(cls, computer):
"""
Return a suggestion for the specific field.
"""
config = parse_sshconfig(computer.hostname)
# Either the configured user in the .ssh/config, or the default SSH port
raw_string = str(config.get('proxycommand', ''))
# Note: %h and %p get already automatically substituted with
# hostname and port by the config parser!
pieces = raw_string.split()
new_pieces = []
for piece in pieces:
if '>' in piece:
# If there is a piece with > to readdress stderr or stdout,
# skip from here on (anything else can only be readdressing)
break
else:
new_pieces.append(piece)
return' '.join(new_pieces)
@classmethod
def _get_compress_suggestion_string(cls, computer):
"""
Return a suggestion for the specific field.
"""
return "True"
@classmethod
def _get_load_system_host_keys_suggestion_string(cls, computer):
"""
Return a suggestion for the specific field.
"""
return "True"
@classmethod
def _get_key_policy_suggestion_string(cls, computer):
"""
Return a suggestion for the specific field.
"""
return "RejectPolicy"
@classmethod
def _get_gss_auth_suggestion_string(cls, computer):
"""
Return a suggestion for the specific field.
"""
config = parse_sshconfig(computer.hostname)
return convert_to_bool(str(config.get('gssapiauthentication', "no")))
@classmethod
def _get_gss_kex_suggestion_string(cls, computer):
"""
Return a suggestion for the specific field.
"""
config = parse_sshconfig(computer.hostname)
return convert_to_bool(str(config.get('gssapikeyexchange', "no")))
@classmethod
def _get_gss_deleg_creds_suggestion_string(cls, computer):
"""
Return a suggestion for the specific field.
"""
config = parse_sshconfig(computer.hostname)
return convert_to_bool(str(config.get('gssapidelegatecredentials', "no")))
@classmethod
def _get_gss_host_suggestion_string(cls, computer):
"""
Return a suggestion for the specific field.
"""
config = parse_sshconfig(computer.hostname)
return str(config.get('gssapihostname', computer.hostname))
@classmethod
def _get_safe_interval_suggestion_string(cls, computer):
return cls._DEFAULT_SAFE_OPEN_INTERVAL
def __init__(self, machine, **kwargs):
"""
Initialize the SshTransport class.
:param machine: the machine to connect to
:param load_system_host_keys: (optional, default False)
if False, do not load the system host keys
:param key_policy: (optional, default = paramiko.RejectPolicy())
the policy to use for unknown keys
Other parameters valid for the ssh connect function (see the
self._valid_connect_params list) are passed to the connect
function (as port, username, password, ...); taken from the
accepted paramiko.SSHClient.connect() params.
"""
import paramiko
super(SshTransport, self).__init__()
self._is_open = False
self._sftp = None
self._proxy = None
self._machine = machine
self._client = paramiko.SSHClient()
self._load_system_host_keys = kwargs.pop('load_system_host_keys', False)
if self._load_system_host_keys:
self._client.load_system_host_keys()
self._safe_open_interval = kwargs.pop('safe_interval', self._DEFAULT_SAFE_OPEN_INTERVAL)
self._missing_key_policy = kwargs.pop('key_policy', 'RejectPolicy') # This is paramiko default
if self._missing_key_policy == 'RejectPolicy':
self._client.set_missing_host_key_policy(paramiko.RejectPolicy())
elif self._missing_key_policy == 'WarningPolicy':
self._client.set_missing_host_key_policy(paramiko.WarningPolicy())
elif self._missing_key_policy == 'AutoAddPolicy':
self._client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
else:
raise ValueError("Unknown value of the key policy, allowed values "
"are: RejectPolicy, WarningPolicy, AutoAddPolicy")
self._connect_args = {}
for k in self._valid_connect_params:
try:
self._connect_args[k] = kwargs.pop(k)
except KeyError:
pass
if kwargs:
raise ValueError("The following parameters were not accepted by "
"the transport: {}".format(",".join(str(k) for k in kwargs)))
def open(self):
"""
Open a SSHClient to the machine possibly using the parameters given
in the __init__.
Also opens a sftp channel, ready to be used.
The current working directory is set explicitly, so it is not None.
:raise aiida.common.InvalidOperation: if the channel is already open
"""
from aiida.common.exceptions import InvalidOperation
from aiida.transports.util import _DetachedProxyCommand
if self._is_open:
raise InvalidOperation("Cannot open the transport twice")
# Open a SSHClient
connection_arguments = self._connect_args
proxystring = connection_arguments.pop('proxy_command', None)
if proxystring:
self._proxy = _DetachedProxyCommand(proxystring)
connection_arguments['sock'] = self._proxy
try:
self._client.connect(self._machine, **connection_arguments)
except Exception as exc:
self.logger.error("Error connecting through SSH: [{}] {}, "
"connect_args were: {}".format(exc.__class__.__name__, exc, self._connect_args))
raise
# Open also a SFTPClient
self._sftp = self._client.open_sftp()
# Set the current directory to a explicit path, and not to None
self._sftp.chdir(self._sftp.normalize('.'))
self._is_open = True
return self
def close(self):
"""
Close the SFTP channel, and the SSHClient.
:todo: correctly manage exceptions
:raise aiida.common.InvalidOperation: if the channel is already open
"""
from aiida.common.exceptions import InvalidOperation
if not self._is_open:
raise InvalidOperation("Cannot close the transport: it is already closed")
self._sftp.close()
self._client.close()
self._is_open = False
@property
def sshclient(self):
if not self._is_open:
raise TransportInternalError("Error, ssh method called for SshTransport without opening the channel first")
return self._client
@property
def sftp(self):
if not self._is_open:
raise TransportInternalError("Error, sftp method called for SshTransport without opening the channel first")
return self._sftp
def __str__(self):
"""
Return a useful string.
"""
conn_info = self._machine
try:
conn_info = "{}@{}".format(self._connect_args['username'], conn_info)
except KeyError:
# No username explicitly defined: ignore
pass
try:
conn_info += ':{}'.format(self._connect_args['port'])
except KeyError:
# No port explicitly defined: ignore
pass
return "{} [{}]".format("OPEN" if self._is_open else "CLOSED", conn_info)
def chdir(self, path):
"""
Change directory of the SFTP session. Emulated internally by paramiko.
Differently from paramiko, if you pass None to chdir, nothing
happens and the cwd is unchanged.
"""
from paramiko.sftp import SFTPError
old_path = self.sftp.getcwd()
if path is not None:
try:
self.sftp.chdir(path)
except SFTPError as e:
# e.args[0] is an error code. For instance,
# 20 is 'the object is not a directory'
# Here I just re-raise the message as IOError
raise IOError(e.args[1])
# Paramiko already checked that path is a folder, otherwise I would
# have gotten an exception. Now, I want to check that I have read
# permissions in this folder (nothing is said on write permissions,
# though).
# Otherwise, if I do _exec_command_internal, that as a first operation
# cd's in a folder, I get a wrong retval, that is an unwanted behavior.
#
# Note: I don't store the result of the function; if I have no
# read permissions, this will raise an exception.
try:
self.sftp.stat('.')
except IOError as exc:
if 'Permission denied' in str(exc):
self.chdir(old_path)
raise IOError(str(exc))
def normalize(self, path):
"""
Returns the normalized path (removing double slashes, etc...)
"""
return self.sftp.normalize(path)
def getcwd(self):
"""
Return the current working directory for this SFTP session, as
emulated by paramiko. If no directory has been set with chdir,
this method will return None. But in __enter__ this is set explicitly,
so this should never happen within this class.
"""
return self.sftp.getcwd()
def makedirs(self, path, ignore_existing=False):
"""
Super-mkdir; create a leaf directory and all intermediate ones.
Works like mkdir, except that any intermediate path segment (not
just the rightmost) will be created if it does not exist.
NOTE: since os.path.split uses the separators as the host system
(that could be windows), I assume the remote computer is Linux-based
and use '/' as separators!
:param path: directory to create (string)
:param ignore_existing: if set to true, it doesn't give any error
if the leaf directory does already exist (bool)
:raise OSError: If the directory already exists.
"""
# check to avoid creation of empty dirs
path = os.path.normpath(path)
if path.startswith('/'):
to_create = path.strip().split('/')[1:]
this_dir = '/'
else:
to_create = path.strip().split('/')
this_dir = ''
for count, element in enumerate(to_create):
if count > 0:
this_dir += '/'
this_dir += element
if count + 1 == len(to_create) and self.isdir(this_dir) and ignore_existing:
return
if count + 1 == len(to_create) and self.isdir(this_dir) and not ignore_existing:
self.mkdir(this_dir)
if not self.isdir(this_dir):
self.mkdir(this_dir)
def mkdir(self, path, ignore_existing=False):
"""
Create a folder (directory) named path.
:param path: name of the folder to create
:param ignore_existing: if True, does not give any error if the directory
already exists
:raise OSError: If the directory already exists.
"""
if ignore_existing and self.isdir(path):
return
try:
self.sftp.mkdir(path)
except IOError as exc:
if os.path.isabs(path):
raise OSError("Error during mkdir of '{}', "
"maybe you don't have the permissions to do it, "
"or the directory already exists? ({})".format(path, exc))
else:
raise OSError("Error during mkdir of '{}' from folder '{}', "
"maybe you don't have the permissions to do it, "
"or the directory already exists? ({})".format(path, self.getcwd(), exc))
# TODO : implement rmtree
def rmtree(self, path):
"""
Remove a file or a directory at path, recursively
Flags used: -r: recursive copy; -f: force, makes the command non interactive;
:param path: remote path to delete
:raise IOError: if the rm execution failed.
"""
# Assuming linux rm command!
# TODO : do we need to avoid the aliases when calling rm_exe='rm'? Call directly /bin/rm?
rm_exe = 'rm'
rm_flags = '-r -f'
# if in input I give an invalid object raise ValueError
if not path:
raise ValueError('Input to rmtree() must be a non empty string. ' + 'Found instead %s as path' % path)
command = '{} {} {}'.format(rm_exe, rm_flags, escape_for_bash(path))
retval, stdout, stderr = self.exec_command_wait(command)
if retval == 0:
if stderr.strip():
self.logger.warning("There was nonempty stderr in the rm command: {}".format(stderr))
return True
else:
self.logger.error("Problem executing rm. Exit code: {}, stdout: '{}', "
"stderr: '{}'".format(retval, stdout, stderr))
raise IOError("Error while executing rm. Exit code: {}".format(retval))
def rmdir(self, path):
"""
Remove the folder named 'path' if empty.
"""
self.sftp.rmdir(path)
def isdir(self, path):
"""
Return True if the given path is a directory, False otherwise.
Return False also if the path does not exist.
"""
# Return False on empty string (paramiko would map this to the local
# folder instead)
if not path:
return False
try:
return S_ISDIR(self.sftp.stat(path).st_mode)
except IOError as e:
if getattr(e, "errno", None) == 2:
# errno=2 means path does not exist: I return False
return False
else:
raise # Typically if I don't have permissions (errno=13)
def chmod(self, path, mode):
"""
Change permissions to path
:param path: path to file
:param mode: new permission bits (integer)
"""
if not path:
raise IOError("Input path is an empty argument.")
return self.sftp.chmod(path, mode)
def _os_path_split_asunder(self, path):
"""
Used by makedirs. Takes path (a str)
and returns a list deconcatenating the path
"""
parts = []
while True:
newpath, tail = os.path.split(path)
if newpath == path:
assert not tail
if path: parts.append(path)
break
parts.append(tail)
path = newpath
parts.reverse()
return parts
def put(self, localpath, remotepath, callback=None, dereference=True, overwrite=True, ignore_nonexisting=False):
"""
Put a file or a folder from local to remote.
Redirects to putfile or puttree.
:param localpath: an (absolute) local path
:param remotepath: a remote path
:param dereference: follow symbolic links (boolean).
Default = True (default behaviour in paramiko). False is not implemented.
:param overwrite: if True overwrites files and folders (boolean).
Default = False.
:raise ValueError: if local path is invalid
:raise OSError: if the localpath does not exist
"""
# TODO: flag confirm exists since v1.7.7. What is the paramiko
# version supported?
# TODO : add dereference
if not dereference:
raise NotImplementedError
if not os.path.isabs(localpath):
raise ValueError("The localpath must be an absolute path")
if self.has_magic(localpath):
if self.has_magic(remotepath):
raise ValueError("Pathname patterns are not allowed in the destination")
# use the imported glob to analyze the path locally
to_copy_list = glob.glob(localpath)
rename_remote = False
if len(to_copy_list) > 1:
# I can't scp more than one file on a single file
if self.isfile(remotepath):
raise OSError("Remote destination is not a directory")
# I can't scp more than one file in a non existing directory
elif not self.path_exists(remotepath): # questo dovrebbe valere solo per file
raise OSError("Remote directory does not exist")
else: # the remote path is a directory
rename_remote = True
for s in to_copy_list:
if os.path.isfile(s):
if rename_remote: # copying more than one file in one directory
# here is the case isfile and more than one file
r = os.path.join(remotepath, os.path.split(s)[1])
self.putfile(s, r, callback, dereference, overwrite)
elif self.isdir(remotepath): # one file to copy in '.'
r = os.path.join(remotepath, os.path.split(s)[1])
self.putfile(s, r, callback, dereference, overwrite)
else: # one file to copy on one file
self.putfile(s, remotepath, callback, dereference, overwrite)
else:
self.puttree(s, remotepath, callback, dereference, overwrite)
else:
if os.path.isdir(localpath):
self.puttree(localpath, remotepath, callback, dereference, overwrite)
elif os.path.isfile(localpath):
if self.isdir(remotepath):
r = os.path.join(remotepath, os.path.split(localpath)[1])
self.putfile(localpath, r, callback, dereference, overwrite)
else:
self.putfile(localpath, remotepath, callback, dereference, overwrite)
else:
if ignore_nonexisting:
pass
else:
raise OSError("The local path {} does not exist".format(localpath))
def putfile(self, localpath, remotepath, callback=None, dereference=True, overwrite=True):
"""
Put a file from local to remote.
:param localpath: an (absolute) local path
:param remotepath: a remote path
:param overwrite: if True overwrites files and folders (boolean).
Default = True.
:raise ValueError: if local path is invalid
:raise OSError: if the localpath does not exist,
or unintentionally overwriting
"""
# TODO : add dereference
if not dereference:
raise NotImplementedError
# TODO : check what happens if I give in input a directory
if not os.path.isabs(localpath):
raise ValueError("The localpath must be an absolute path")
if self.isfile(remotepath) and not overwrite:
raise OSError('Destination already exists: not overwriting it')
return self.sftp.put(localpath, remotepath, callback=callback)
def puttree(self, localpath, remotepath, callback=None, dereference=True, overwrite=True): # by default overwrite
"""
Put a folder recursively from local to remote.
:param localpath: an (absolute) local path
:param remotepath: a remote path
:param dereference: follow symbolic links (boolean)
Default = True (default behaviour in paramiko). False is not implemented.
:param overwrite: if True overwrites files and folders (boolean).
Default = True
:raise ValueError: if local path is invalid
:raise OSError: if the localpath does not exist, or trying to overwrite
:raise IOError: if remotepath is invalid
.. note:: setting dereference equal to True could cause infinite loops.
see os.walk() documentation
"""
# TODO : add dereference
if not dereference:
raise NotImplementedError
if not os.path.isabs(localpath):
raise ValueError("The localpath must be an absolute path")
if not os.path.exists(localpath):
raise OSError("The localpath does not exists")
if not os.path.isdir(localpath):
raise ValueError("Input localpath is not a folder: {}".format(localpath))
if not remotepath:
raise IOError("remotepath must be a non empty string")
if self.path_exists(remotepath) and not overwrite:
raise OSError("Can't overwrite existing files")
if self.isfile(remotepath):
raise OSError("Cannot copy a directory into a file")
if not self.isdir(remotepath): # in this case copy things in the remotepath directly
self.mkdir(remotepath) # and make a directory at its place
else: # remotepath exists already: copy the folder inside of it!
remotepath = os.path.join(remotepath, os.path.split(localpath)[1])
self.mkdir(remotepath) # create a nested folder
# TODO, NOTE: we are not using 'onerror' because we checked above that
# the folder exists, but it would be better to use it
for this_source in os.walk(localpath):
# Get the relative path
this_basename = os.path.relpath(path=this_source[0], start=localpath)
try:
self.sftp.stat(os.path.join(remotepath, this_basename))
except IOError as e:
import errno
if e.errno == errno.ENOENT: # Missing file
self.mkdir(os.path.join(remotepath, this_basename))
else:
raise
for this_file in this_source[2]:
this_local_file = os.path.join(localpath, this_basename, this_file)
this_remote_file = os.path.join(remotepath, this_basename, this_file)
self.putfile(this_local_file, this_remote_file)
def get(self, remotepath, localpath, callback=None, dereference=True, overwrite=True, ignore_nonexisting=False):
"""
Get a file or folder from remote to local.
Redirects to getfile or gettree.
:param remotepath: a remote path
:param localpath: an (absolute) local path
:param dereference: follow symbolic links.
Default = True (default behaviour in paramiko).
False is not implemented.
:param overwrite: if True overwrites files and folders.
Default = False
:raise ValueError: if local path is invalid
:raise IOError: if the remotepath is not found
"""
# TODO : add dereference
if not dereference:
raise NotImplementedError
if not os.path.isabs(localpath):
raise ValueError("The localpath must be an absolute path")
if self.has_magic(remotepath):
if self.has_magic(localpath):
raise ValueError("Pathname patterns are not allowed in the destination")
# use the self glob to analyze the path remotely
to_copy_list = self.glob(remotepath)
rename_local = False
if len(to_copy_list) > 1:
# I can't scp more than one file on a single file
if os.path.isfile(localpath):
raise IOError("Remote destination is not a directory")
# I can't scp more than one file in a non existing directory
elif not os.path.exists(localpath): # this should hold only for files
raise OSError("Remote directory does not exist")
else: # the remote path is a directory
rename_local = True
for s in to_copy_list:
if self.isfile(s):
if rename_local: # copying more than one file in one directory
# here is the case isfile and more than one file
r = os.path.join(localpath, os.path.split(s)[1])
self.getfile(s, r, callback, dereference, overwrite)
else: # one file to copy on one file
self.getfile(s, localpath, callback, dereference, overwrite)
else:
self.gettree(s, localpath, callback, dereference, overwrite)
else:
if self.isdir(remotepath):
self.gettree(remotepath, localpath, callback, dereference, overwrite)
elif self.isfile(remotepath):
if os.path.isdir(localpath):
r = os.path.join(localpath, os.path.split(remotepath)[1])
self.getfile(remotepath, r, callback, dereference, overwrite)
else:
self.getfile(remotepath, localpath, callback, dereference, overwrite)
else:
if ignore_nonexisting:
pass
else:
raise IOError("The remote path {} does not exist".format(remotepath))
def getfile(self, remotepath, localpath, callback=None, dereference=True, overwrite=True):
"""
Get a file from remote to local.
:param remotepath: a remote path
:param localpath: an (absolute) local path
:param overwrite: if True overwrites files and folders.
Default = False
:raise ValueError: if local path is invalid
:raise OSError: if unintentionally overwriting
"""
# TODO : add dereference
if not os.path.isabs(localpath):
raise ValueError("localpath must be an absolute path")
if os.path.isfile(localpath) and not overwrite:
raise OSError('Destination already exists: not overwriting it')
if not dereference:
raise NotImplementedError
# Workaround for bug #724 in paramiko -- remove localpath on IOError
try:
return self.sftp.get(remotepath, localpath, callback)
except IOError:
try:
os.remove(localpath)
except OSError:
pass
raise
def gettree(self, remotepath, localpath, callback=None, dereference=True, overwrite=True):
"""
Get a folder recursively from remote to local.
:param remotepath: a remote path
:param localpath: an (absolute) local path
:param dereference: follow symbolic links.
Default = True (default behaviour in paramiko).
False is not implemented.
:param overwrite: if True overwrites files and folders.
Default = False
:raise ValueError: if local path is invalid
:raise IOError: if the remotepath is not found
:raise OSError: if unintentionally overwriting
"""
# TODO : add dereference
if not dereference:
raise NotImplementedError
if not remotepath:
raise IOError("Remotepath must be a non empty string")
if not localpath:
raise ValueError("Localpaths must be a non empty string")
if not os.path.isabs(localpath):
raise ValueError("Localpaths must be an absolute path")
if not self.isdir(remotepath):
raise IOError("Input remotepath is not a folder: {}".format(localpath))
if os.path.exists(localpath) and not overwrite:
raise OSError("Can't overwrite existing files")
if os.path.isfile(localpath):
raise OSError("Cannot copy a directory into a file")
if not os.path.isdir(localpath): # in this case copy things in the remotepath directly
os.mkdir(localpath) # and make a directory at its place
else: # localpath exists already: copy the folder inside of it!
localpath = os.path.join(localpath, os.path.split(remotepath)[1])
os.mkdir(localpath) # create a nested folder
item_list = self.listdir(remotepath)
dest = str(localpath)
for item in item_list:
item = str(item)
if self.isdir(os.path.join(remotepath, item)):
self.gettree(os.path.join(remotepath, item), os.path.join(dest, item))
else:
self.getfile(os.path.join(remotepath, item), os.path.join(dest, item))
def get_attribute(self, path):
"""
Returns the object Fileattribute, specified in aiida.transports
Receives in input the path of a given file.
"""
from aiida.transports.util import FileAttribute
paramiko_attr = self.sftp.lstat(path)
aiida_attr = FileAttribute()
# map the paramiko class into the aiida one
# note that paramiko object contains more informations than the aiida
for key in aiida_attr._valid_fields:
aiida_attr[key] = getattr(paramiko_attr, key)
return aiida_attr
def copyfile(self, remotesource, remotedestination, dereference=False):
return self.copy(remotesource, remotedestination, dereference)
def copytree(self, remotesource, remotedestination, dereference=False):
return self.copy(remotesource, remotedestination, dereference, recursive=True)
def copy(self, remotesource, remotedestination, dereference=False, recursive=True):
"""
Copy a file or a directory from remote source to remote destination.
Flags used: ``-r``: recursive copy; ``-f``: force, makes the command non interactive;
``-L`` follows symbolic links
:param remotesource: file to copy from
:param remotedestination: file to copy to
:param dereference: if True, copy content instead of copying the symlinks only
Default = False.
:param recursive: if True copy directories recursively, otherwise only copy the specified file(s)
:type recursive: bool
:raise IOError: if the cp execution failed.
.. note:: setting dereference equal to True could cause infinite loops.
"""
# In the majority of cases, we should deal with linux cp commands
# TODO : do we need to avoid the aliases when calling cp_exe='cp'? Call directly /bin/cp?
cp_flags = '-f'
if recursive:
cp_flags += ' -r'
# For the moment, this is hardcoded. May become a parameter
cp_exe = 'cp'
# To evaluate if we also want -p: preserves mode,ownership and timestamp
if dereference:
# use -L; --dereference is not supported on mac
cp_flags += ' -L'
# if in input I give an invalid object raise ValueError
if not remotesource:
raise ValueError(
'Input to copy() must be a non empty string. ' + 'Found instead %s as remotesource' % remotesource)
if not remotedestination:
raise ValueError('Input to copy() must be a non empty string. ' +
'Found instead %s as remotedestination' % remotedestination)
if self.has_magic(remotedestination):
raise ValueError("Pathname patterns are not allowed in the destination")
if self.has_magic(remotesource):
to_copy_list = self.glob(remotesource)
if len(to_copy_list) > 1:
if not self.path_exists(remotedestination) or self.isfile(remotedestination):
raise OSError("Can't copy more than one file in the same destination file")
for s in to_copy_list:
self._exec_cp(cp_exe, cp_flags, s, remotedestination)
else:
self._exec_cp(cp_exe, cp_flags, remotesource, remotedestination)
def _exec_cp(self, cp_exe, cp_flags, src, dst):
# to simplify writing the above copy function
command = '{} {} {} {}'.format(cp_exe, cp_flags, escape_for_bash(src), escape_for_bash(dst))
retval, stdout, stderr = self.exec_command_wait(command)
# TODO : check and fix below
if retval == 0:
if stderr.strip():
self.logger.warning("There was nonempty stderr in the cp command: {}".format(stderr))
else:
self.logger.error("Problem executing cp. Exit code: {}, stdout: '{}', "
"stderr: '{}', command: '{}'".format(retval, stdout, stderr, command))
raise IOError("Error while executing cp. Exit code: {}, "
"stdout: '{}', stderr: '{}', "
"command: '{}'".format(retval, stdout, stderr, command))
def _local_listdir(self, path, pattern=None):
"""
Acts on the local folder, for the rest, same as listdir
"""
if not pattern:
return os.listdir(path)
else:
import re
if path.startswith('/'): # always this is the case in the local case
base_dir = path
else:
base_dir = os.path.join(os.getcwd(), path)
filtered_list = glob.glob(os.path.join(base_dir, pattern))
if not base_dir.endswith(os.sep):
base_dir += os.sep
return [re.sub(base_dir, '', i) for i in filtered_list]
def listdir(self, path='.', pattern=None):
"""
Get the list of files at path.
:param path: default = '.'
:param pattern: returns the list of files matching pattern.
Unix only. (Use to emulate ``ls *`` for example)
"""
if not pattern:
return self.sftp.listdir(path)
else:
import re
if path.startswith('/'):
base_dir = path
else:
base_dir = os.path.join(self.getcwd(), path)
filtered_list = glob.glob(os.path.join(base_dir, pattern))
if not base_dir.endswith('/'):
base_dir += '/'
return [re.sub(base_dir, '', i) for i in filtered_list]
def remove(self, path):
"""
Remove a single file at 'path'
"""
return self.sftp.remove(path)
def rename(self, src, dst):
"""
Rename a file or folder from src to dst.
:param str oldpath: existing name of the file or folder
:param str newpath: new name for the file or folder
:raises IOError: if src/dst is not found
:raises ValueError: if src/dst is not a valid string
"""
if not src:
raise ValueError("Source {} is not a valid string".format(src))
if not dst:
raise ValueError("Destination {} is not a valid string".format(dst))
if not self.isfile(src):
if not self.isdir(src):
raise IOError("Source {} does not exist".format(src))
if not self.isfile(dst):
if not self.isdir(dst):
raise IOError("Destination {} does not exist".format(dst))
return self.sftp.rename(src, dst)
def isfile(self, path):
"""
Return True if the given path is a file, False otherwise.
Return False also if the path does not exist.
"""
# This should not be needed for files, since an empty string should
# be mapped by paramiko to the local directory - which is not a file -
# but this is just to be sure
if not path:
return False
try:
self.logger.debug("stat for path '{}' ('{}'): {} [{}]".format(path, self.sftp.normalize(path),
self.sftp.stat(path),
self.sftp.stat(path).st_mode))
return S_ISREG(self.sftp.stat(path).st_mode)
except IOError as e:
if getattr(e, "errno", None) == 2:
# errno=2 means path does not exist: I return False
return False
else:
raise # Typically if I don't have permissions (errno=13)
def _exec_command_internal(self, command, combine_stderr=False, bufsize=-1):
"""
Executes the specified command in bash login shell.
Before the command is executed, changes directory to the current
working directory as returned by self.getcwd().
For executing commands and waiting for them to finish, use
exec_command_wait.
:param command: the command to execute. The command is assumed to be
already escaped using :py:func:`aiida.common.escaping.escape_for_bash`.
:param combine_stderr: (default False) if True, combine stdout and
stderr on the same buffer (i.e., stdout).
Note: If combine_stderr is True, stderr will always be empty.
:param bufsize: same meaning of the one used by paramiko.
:return: a tuple with (stdin, stdout, stderr, channel),
where stdin, stdout and stderr behave as file-like objects,
plus the methods provided by paramiko, and channel is a
paramiko.Channel object.
"""
channel = self.sshclient.get_transport().open_session()
channel.set_combine_stderr(combine_stderr)
if self.getcwd() is not None:
escaped_folder = escape_for_bash(self.getcwd())
command_to_execute = ("cd {escaped_folder} && "
"{real_command}".format(escaped_folder=escaped_folder, real_command=command))
else:
command_to_execute = command
self.logger.debug("Command to be executed: {}".format(command_to_execute))
# Note: The default shell will eat one level of escaping, while
# 'bash -l -c ...' will eat another. Thus, we need to escape again.
channel.exec_command('bash -l -c ' + escape_for_bash(command_to_execute))
stdin = channel.makefile('wb', bufsize)
stdout = channel.makefile('rb', bufsize)
stderr = channel.makefile_stderr('rb', bufsize)
return stdin, stdout, stderr, channel
def exec_command_wait(self, command, stdin=None, combine_stderr=False, bufsize=-1):
"""
Executes the specified command and waits for it to finish.
:param command: the command to execute
:param stdin: (optional,default=None) can be a string or a
file-like object.
:param combine_stderr: (optional, default=False) see docstring of
self._exec_command_internal()
:param bufsize: same meaning of paramiko.
:return: a tuple with (return_value, stdout, stderr) where stdout and stderr
are strings.
"""
# TODO: To see if like this it works or hangs because of buffer problems.
ssh_stdin, stdout, stderr, channel = self._exec_command_internal(command, combine_stderr, bufsize=bufsize)
if stdin is not None:
if isinstance(stdin, six.string_types):
filelike_stdin = StringIO(stdin)
else:
filelike_stdin = stdin
try:
for l in filelike_stdin.readlines():
ssh_stdin.write(l)
except AttributeError:
raise ValueError("stdin can only be either a string of a file-like object!")
# I flush and close them anyway; important to call shutdown_write
# to avoid hangouts
ssh_stdin.flush()
ssh_stdin.channel.shutdown_write()
# I get the return code (blocking)
retval = channel.recv_exit_status()
# needs to be after 'recv_exit_status', otherwise it might hang
output_text = stdout.read().decode('utf-8')
stderr_text = stderr.read().decode('utf-8')
return retval, output_text, stderr_text
def gotocomputer_command(self, remotedir):
"""
Specific gotocomputer string to connect to a given remote computer via
ssh and directly go to the calculation folder.
"""
# TODO: add also ProxyCommand and Timeout support
further_params = []
if 'username' in self._connect_args:
further_params.append("-l {}".format(escape_for_bash(self._connect_args['username'])))
if 'port' in self._connect_args:
further_params.append("-p {}".format(self._connect_args['port']))
if 'key_filename' in self._connect_args:
further_params.append("-i {}".format(escape_for_bash(self._connect_args['key_filename'])))
further_params_str = ' '.join(further_params)
connect_string = """ssh -t {machine} {further_params} "if [ -d {escaped_remotedir} ] ; then cd {escaped_remotedir} ; bash -l ; else echo ' ** The directory' ; echo ' ** {remotedir}' ; echo ' ** seems to have been deleted, I logout...' ; fi" """.format(
further_params=further_params_str,
machine=self._machine,
escaped_remotedir="'{}'".format(remotedir),
remotedir=remotedir)
# print connect_string
return connect_string
def symlink(self, remotesource, remotedestination):
"""
Create a symbolic link between the remote source and the remote
destination.
:param remotesource: remote source. Can contain a pattern.
:param remotedestination: remote destination
"""
# paramiko gives some errors if path is starting with '.'
s = os.path.normpath(remotesource)
d = os.path.normpath(remotedestination)
if self.has_magic(s):
if self.has_magic(d):
# if there are patterns in dest, I don't know which name to assign
raise ValueError("Remotedestination cannot have patterns")
# find all files matching pattern
for this_s in self.glob(s):
# create the name of the link: take the last part of the path
this_d = os.path.join(remotedestination, os.path.split(this_s)[-1])
self.sftp.symlink(this_s, this_d)
else:
self.sftp.symlink(s, d)
def path_exists(self, path):
"""
Check if path exists
"""
import errno
try:
self.sftp.stat(path)
except IOError as e:
if e.errno == errno.ENOENT:
return False
raise
else:
return True
| 40.680578 | 263 | 0.603062 |
2582bf3ce78bc051ac66d9f9db516d05ca5d9417 | 2,611 | py | Python | pysteps/visualization/spectral.py | leabeusch/pysteps | 5f162d4b1155e4cfd894c9635eed3f0e823adedd | [
"BSD-3-Clause"
] | 285 | 2018-07-11T10:42:43.000Z | 2022-03-23T13:44:54.000Z | pysteps/visualization/spectral.py | leabeusch/pysteps | 5f162d4b1155e4cfd894c9635eed3f0e823adedd | [
"BSD-3-Clause"
] | 246 | 2018-07-16T06:17:12.000Z | 2022-03-22T15:45:08.000Z | pysteps/visualization/spectral.py | leabeusch/pysteps | 5f162d4b1155e4cfd894c9635eed3f0e823adedd | [
"BSD-3-Clause"
] | 97 | 2018-07-12T12:05:45.000Z | 2022-03-31T14:56:31.000Z | # -*- coding: utf-8 -*-
"""
pysteps.visualization.spectral
==============================
Methods for plotting Fourier spectra.
.. autosummary::
:toctree: ../generated/
plot_spectrum1d
"""
import matplotlib.pylab as plt
import numpy as np
def plot_spectrum1d(
fft_freq,
fft_power,
x_units=None,
y_units=None,
wavelength_ticks=None,
color="k",
lw=1.0,
label=None,
ax=None,
**kwargs,
):
"""
Function to plot in log-log a radially averaged Fourier spectrum.
Parameters
----------
fft_freq: array-like
1d array containing the Fourier frequencies computed with the function
:py:func:`pysteps.utils.spectral.rapsd`.
fft_power: array-like
1d array containing the radially averaged Fourier power spectrum
computed with the function :py:func:`pysteps.utils.spectral.rapsd`.
x_units: str, optional
Units of the X variable (distance, e.g. "km").
y_units: str, optional
Units of the Y variable (amplitude, e.g. "dBR").
wavelength_ticks: array-like, optional
List of wavelengths where to show xticklabels.
color: str, optional
Line color.
lw: float, optional
Line width.
label: str, optional
Label (for legend).
ax: Axes, optional
Plot axes.
Returns
-------
ax: Axes
Plot axes
"""
# Check input dimensions
n_freq = len(fft_freq)
n_pow = len(fft_power)
if n_freq != n_pow:
raise ValueError(
f"Dimensions of the 1d input arrays must be equal. {n_freq} vs {n_pow}"
)
if ax is None:
ax = plt.subplot(111)
# Plot spectrum in log-log scale
ax.plot(
10 * np.log10(fft_freq),
10 * np.log10(fft_power),
color=color,
linewidth=lw,
label=label,
**kwargs,
)
# X-axis
if wavelength_ticks is not None:
wavelength_ticks = np.array(wavelength_ticks)
freq_ticks = 1 / wavelength_ticks
ax.set_xticks(10 * np.log10(freq_ticks))
ax.set_xticklabels(wavelength_ticks)
if x_units is not None:
ax.set_xlabel(f"Wavelength [{x_units}]")
else:
if x_units is not None:
ax.set_xlabel(f"Frequency [1/{x_units}]")
# Y-axis
if y_units is not None:
# { -> {{ with f-strings
power_units = fr"$10log_{{ 10 }}(\frac{{ {y_units}^2 }}{{ {x_units} }})$"
ax.set_ylabel(f"Power {power_units}")
return ax
| 25.851485 | 84 | 0.571812 |
ae8b78672ba9cdc7f557488c13e42be940f3d387 | 1,744 | py | Python | starthinker/task/dcm_api/schema/mobileAppsListResponse.py | viohman/starthinker | 20bd2d7fd1e541eb8a2c9b7159941f667e22e38e | [
"Apache-2.0"
] | null | null | null | starthinker/task/dcm_api/schema/mobileAppsListResponse.py | viohman/starthinker | 20bd2d7fd1e541eb8a2c9b7159941f667e22e38e | [
"Apache-2.0"
] | 6 | 2021-03-19T12:00:18.000Z | 2022-02-10T09:43:42.000Z | starthinker/task/dcm_api/schema/mobileAppsListResponse.py | viohman/starthinker | 20bd2d7fd1e541eb8a2c9b7159941f667e22e38e | [
"Apache-2.0"
] | null | null | null | ###########################################################################
#
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
mobileAppsListResponse_Schema = [
{
"description": "",
"name": "kind",
"type": "STRING",
"mode": "NULLABLE"
},
{
"name": "mobileApps",
"type": "RECORD",
"mode": "REPEATED",
"fields": [
{
"description": "APPLE_APP_STORE, GOOGLE_PLAY_STORE, UNKNOWN",
"name": "directory",
"type": "STRING",
"mode": "NULLABLE"
},
{
"description": "",
"name": "id",
"type": "STRING",
"mode": "NULLABLE"
},
{
"description": "",
"name": "kind",
"type": "STRING",
"mode": "NULLABLE"
},
{
"description": "",
"name": "publisherName",
"type": "STRING",
"mode": "NULLABLE"
},
{
"description": "",
"name": "title",
"type": "STRING",
"mode": "NULLABLE"
}
]
},
{
"description": "",
"name": "nextPageToken",
"type": "STRING",
"mode": "NULLABLE"
}
]
| 24.914286 | 75 | 0.491972 |
6b227df79c8bcbfb373c290f1831243e513f1b2c | 342 | py | Python | app/verify.py | altg0x0/mail_service | a16adc10fa91f9bf51689eb74e352a273de69b1e | [
"Unlicense"
] | null | null | null | app/verify.py | altg0x0/mail_service | a16adc10fa91f9bf51689eb74e352a273de69b1e | [
"Unlicense"
] | null | null | null | app/verify.py | altg0x0/mail_service | a16adc10fa91f9bf51689eb74e352a273de69b1e | [
"Unlicense"
] | null | null | null | from hashlib import sha512
import crypt
import base64
from hmac import compare_digest as constant_time_compare
def verify(password, encoded):
striped = encoded.replace('{SHA512-CRYPT}', '')
salt = striped[:19]
#print(password, crypt.crypt(password, salt))
return constant_time_compare(striped, crypt.crypt(password, salt))
| 31.090909 | 71 | 0.751462 |
ae23f414a930cad7f71eda4feb7a420f8d281eb9 | 3,692 | py | Python | cloudmesh_client/shell/plugins/ClusterCommand.py | izelarabm/client | d257de3b5ded6714b7cb52765b4b7fd0bffecd0c | [
"Apache-2.0"
] | null | null | null | cloudmesh_client/shell/plugins/ClusterCommand.py | izelarabm/client | d257de3b5ded6714b7cb52765b4b7fd0bffecd0c | [
"Apache-2.0"
] | null | null | null | cloudmesh_client/shell/plugins/ClusterCommand.py | izelarabm/client | d257de3b5ded6714b7cb52765b4b7fd0bffecd0c | [
"Apache-2.0"
] | null | null | null | from __future__ import print_function
from cloudmesh_client.shell.command import command, PluginCommand, CloudPluginCommand
from cloudmesh_client.shell.console import Console
class ClusterCommand(PluginCommand, CloudPluginCommand):
topics = {"cluster": "notimplemented"}
def __init__(self, context):
self.context = context
if self.context.debug:
print("init command cluster ")
# noinspection PyUnusedLocal
@command
def do_cluster(self, args, arguments):
"""
::
Usage:
cluster list [--format=FORMAT]
cluster list NAME
[--format=FORMAT]
[--column=COLUMN]
[--detail]
cluster create NAME
[--count=COUNT]
[--login=USERNAME]
[--cloud=CLOUD]
[--image=IMAGE]
[--flavor=FLAVOR]
[--add]
cluster delete NAME
Description:
with the help of the cluster command you can create a number
of virtual machines that are integrated in a named virtual cluster.
You will be able to login between the nodes of the virtual cluster
while using public keys.
Examples:
cluster list
list the clusters
cluster create NAME --count=COUNT --login=USERNAME [options...]
Start a cluster of VMs, and each of them can log into each other.
CAUTION: you should specify defaults before using this command:
1. select cloud to work on, e.g. cloud select kilo
default cloud=kilo
2. test if you can create a single VM on the cloud to see if
everything is set up
3. set the default key to start VMs, e.g. key default [USERNAME-key]
5. set image of VMs, e.g. default image
6. set flavor of VMs, e.g. default flavor
7. Make sure to use a new unused group name
cluster list NAME
show the detailed information about the cluster VMs
cluster delete NAME
remove the cluster and its VMs
Arguments:
NAME cluster name or group name
Options:
--count=COUNT give the number of VMs to add into the cluster
--login=USERNAME give a login name for the VMs, e.g. ubuntu
--cloud=CLOUD give a cloud to work on
--flavor=FLAVOR give the name of the flavor or flavor id
--image=IMAGE give the name of the image or image id
--add if a group exists and there are VMs in it
additional vms will be added to this cluster and the
keys will be added to each other so one can login between
them
FORMAT output format: table, json, csv
COLUMN customize what information to display, for example:
--column=status,addresses prints the columns status
and addresses
--detail for table print format, a brief version
is used as default, use this flag to print
detailed table
"""
Console.error("NOT YET IMPLEMENTED")
return ""
| 42.436782 | 89 | 0.513814 |
04ff760fe28ba4ba265cb30fa692efd8a420986c | 1,478 | py | Python | dvclive/__init__.py | pmrowla/dvclive | fe95917c965db210a6a11ff3d6f287c2df298330 | [
"Apache-2.0"
] | null | null | null | dvclive/__init__.py | pmrowla/dvclive | fe95917c965db210a6a11ff3d6f287c2df298330 | [
"Apache-2.0"
] | null | null | null | dvclive/__init__.py | pmrowla/dvclive | fe95917c965db210a6a11ff3d6f287c2df298330 | [
"Apache-2.0"
] | null | null | null | from typing import Optional, Union
from dvclive.version import __version__ # noqa: F401
from .metrics import MetricLogger
_metric_logger: Optional[MetricLogger] = None
def init(
path: str = None, resume: bool = False, summary: bool = True,
) -> MetricLogger:
global _metric_logger # pylint: disable=global-statement
_metric_logger = MetricLogger(
path=path or MetricLogger.DEFAULT_DIR, resume=resume, summary=summary,
)
return _metric_logger
def _lazy_init(_metric_logger):
if _metric_logger:
if not _metric_logger.matches_env_setup():
from .error import ConfigMismatchError
raise ConfigMismatchError(_metric_logger)
else:
_metric_logger = MetricLogger.from_env()
if not _metric_logger:
_metric_logger = MetricLogger()
return _metric_logger
def log(name: str, val: Union[int, float], step: int = None) -> None:
global _metric_logger # pylint: disable=global-statement
_metric_logger = _lazy_init(_metric_logger)
_metric_logger.log(name=name, val=val, step=step)
def get_step() -> None:
global _metric_logger # pylint: disable=global-statement
_metric_logger = _lazy_init(_metric_logger)
return _metric_logger.step
def next_step() -> None:
global _metric_logger # pylint: disable=global-statement
if not _metric_logger:
from .error import InitializationError
raise InitializationError()
_metric_logger.next_step()
| 27.886792 | 78 | 0.719892 |
f4b650695c3a6bd5732cf096a0d145ac6caf7ea4 | 8,425 | py | Python | Lib/site-packages/Cryptodome/SelfTest/Hash/test_CMAC.py | Kronos3/pyexec | c9e76a0302dee047ed137bc38aa669cec04c24cd | [
"bzip2-1.0.6"
] | 78 | 2017-08-19T03:46:13.000Z | 2020-02-19T04:29:45.000Z | Lib/site-packages/Cryptodome/SelfTest/Hash/test_CMAC.py | Kronos3/pyexec | c9e76a0302dee047ed137bc38aa669cec04c24cd | [
"bzip2-1.0.6"
] | 5 | 2017-08-21T16:33:08.000Z | 2018-06-21T18:37:18.000Z | Lib/site-packages/Cryptodome/SelfTest/Hash/test_CMAC.py | Kronos3/pyexec | c9e76a0302dee047ed137bc38aa669cec04c24cd | [
"bzip2-1.0.6"
] | 13 | 2017-08-19T16:46:08.000Z | 2018-11-05T23:11:34.000Z | #
# SelfTest/Hash/CMAC.py: Self-test for the CMAC module
#
# ===================================================================
#
# Copyright (c) 2014, Legrandin <helderijs@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ===================================================================
"""Self-test suite for Cryptodome.Hash.CMAC"""
import unittest
from Cryptodome.Util.py3compat import tobytes
from Cryptodome.Hash import CMAC
from Cryptodome.Cipher import AES, DES3
from Cryptodome.Hash import SHAKE128
# This is a list of (key, data, result, description, module) tuples.
test_data = [
## Test vectors from RFC 4493 ##
## The are also in NIST SP 800 38B D.2 ##
( '2b7e151628aed2a6abf7158809cf4f3c',
'',
'bb1d6929e95937287fa37d129b756746',
'RFC 4493 #1',
AES
),
( '2b7e151628aed2a6abf7158809cf4f3c',
'6bc1bee22e409f96e93d7e117393172a',
'070a16b46b4d4144f79bdd9dd04a287c',
'RFC 4493 #2',
AES
),
( '2b7e151628aed2a6abf7158809cf4f3c',
'6bc1bee22e409f96e93d7e117393172a'+
'ae2d8a571e03ac9c9eb76fac45af8e51'+
'30c81c46a35ce411',
'dfa66747de9ae63030ca32611497c827',
'RFC 4493 #3',
AES
),
( '2b7e151628aed2a6abf7158809cf4f3c',
'6bc1bee22e409f96e93d7e117393172a'+
'ae2d8a571e03ac9c9eb76fac45af8e51'+
'30c81c46a35ce411e5fbc1191a0a52ef'+
'f69f2445df4f9b17ad2b417be66c3710',
'51f0bebf7e3b9d92fc49741779363cfe',
'RFC 4493 #4',
AES
),
## The rest of Appendix D of NIST SP 800 38B
## was not totally correct.
## Values in Examples 14, 15, 18, and 19 were wrong.
## The updated test values are published in:
## http://csrc.nist.gov/publications/nistpubs/800-38B/Updated_CMAC_Examples.pdf
( '8e73b0f7da0e6452c810f32b809079e5'+
'62f8ead2522c6b7b',
'',
'd17ddf46adaacde531cac483de7a9367',
'NIST SP 800 38B D.2 Example 5',
AES
),
( '8e73b0f7da0e6452c810f32b809079e5'+
'62f8ead2522c6b7b',
'6bc1bee22e409f96e93d7e117393172a',
'9e99a7bf31e710900662f65e617c5184',
'NIST SP 800 38B D.2 Example 6',
AES
),
( '8e73b0f7da0e6452c810f32b809079e5'+
'62f8ead2522c6b7b',
'6bc1bee22e409f96e93d7e117393172a'+
'ae2d8a571e03ac9c9eb76fac45af8e51'+
'30c81c46a35ce411',
'8a1de5be2eb31aad089a82e6ee908b0e',
'NIST SP 800 38B D.2 Example 7',
AES
),
( '8e73b0f7da0e6452c810f32b809079e5'+
'62f8ead2522c6b7b',
'6bc1bee22e409f96e93d7e117393172a'+
'ae2d8a571e03ac9c9eb76fac45af8e51'+
'30c81c46a35ce411e5fbc1191a0a52ef'+
'f69f2445df4f9b17ad2b417be66c3710',
'a1d5df0eed790f794d77589659f39a11',
'NIST SP 800 38B D.2 Example 8',
AES
),
( '603deb1015ca71be2b73aef0857d7781'+
'1f352c073b6108d72d9810a30914dff4',
'',
'028962f61b7bf89efc6b551f4667d983',
'NIST SP 800 38B D.3 Example 9',
AES
),
( '603deb1015ca71be2b73aef0857d7781'+
'1f352c073b6108d72d9810a30914dff4',
'6bc1bee22e409f96e93d7e117393172a',
'28a7023f452e8f82bd4bf28d8c37c35c',
'NIST SP 800 38B D.3 Example 10',
AES
),
( '603deb1015ca71be2b73aef0857d7781'+
'1f352c073b6108d72d9810a30914dff4',
'6bc1bee22e409f96e93d7e117393172a'+
'ae2d8a571e03ac9c9eb76fac45af8e51'+
'30c81c46a35ce411',
'aaf3d8f1de5640c232f5b169b9c911e6',
'NIST SP 800 38B D.3 Example 11',
AES
),
( '603deb1015ca71be2b73aef0857d7781'+
'1f352c073b6108d72d9810a30914dff4',
'6bc1bee22e409f96e93d7e117393172a'+
'ae2d8a571e03ac9c9eb76fac45af8e51'+
'30c81c46a35ce411e5fbc1191a0a52ef'+
'f69f2445df4f9b17ad2b417be66c3710',
'e1992190549f6ed5696a2c056c315410',
'NIST SP 800 38B D.3 Example 12',
AES
),
( '8aa83bf8cbda1062'+
'0bc1bf19fbb6cd58'+
'bc313d4a371ca8b5',
'',
'b7a688e122ffaf95',
'NIST SP 800 38B D.4 Example 13',
DES3
),
( '8aa83bf8cbda1062'+
'0bc1bf19fbb6cd58'+
'bc313d4a371ca8b5',
'6bc1bee22e409f96',
'8e8f293136283797',
'NIST SP 800 38B D.4 Example 14',
DES3
),
( '8aa83bf8cbda1062'+
'0bc1bf19fbb6cd58'+
'bc313d4a371ca8b5',
'6bc1bee22e409f96'+
'e93d7e117393172a'+
'ae2d8a57',
'743ddbe0ce2dc2ed',
'NIST SP 800 38B D.4 Example 15',
DES3
),
( '8aa83bf8cbda1062'+
'0bc1bf19fbb6cd58'+
'bc313d4a371ca8b5',
'6bc1bee22e409f96'+
'e93d7e117393172a'+
'ae2d8a571e03ac9c'+
'9eb76fac45af8e51',
'33e6b1092400eae5',
'NIST SP 800 38B D.4 Example 16',
DES3
),
( '4cf15134a2850dd5'+
'8a3d10ba80570d38',
'',
'bd2ebf9a3ba00361',
'NIST SP 800 38B D.7 Example 17',
DES3
),
( '4cf15134a2850dd5'+
'8a3d10ba80570d38',
'6bc1bee22e409f96',
'4ff2ab813c53ce83',
'NIST SP 800 38B D.7 Example 18',
DES3
),
( '4cf15134a2850dd5'+
'8a3d10ba80570d38',
'6bc1bee22e409f96'+
'e93d7e117393172a'+
'ae2d8a57',
'62dd1b471902bd4e',
'NIST SP 800 38B D.7 Example 19',
DES3
),
( '4cf15134a2850dd5'+
'8a3d10ba80570d38',
'6bc1bee22e409f96'+
'e93d7e117393172a'+
'ae2d8a571e03ac9c'+
'9eb76fac45af8e51',
'31b1e431dabc4eb8',
'NIST SP 800 38B D.7 Example 20',
DES3
),
]
def get_tag_random(tag, length):
return SHAKE128.new(data=tobytes(tag)).read(length)
class MultipleUpdates(unittest.TestCase):
"""Verify that internal caching is implemented correctly"""
def runTest(self):
data_to_mac = get_tag_random("data_to_mac", 128)
key = get_tag_random("key", 16)
ref_mac = CMAC.new(key, msg=data_to_mac, ciphermod=AES).digest()
# Break up in chunks of different length
# The result must always be the same
for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128:
chunks = [data_to_mac[i:i+chunk_length] for i in
range(0, len(data_to_mac), chunk_length)]
mac = CMAC.new(key, ciphermod=AES)
for chunk in chunks:
mac.update(chunk)
self.assertEqual(ref_mac, mac.digest())
def get_tests(config={}):
global test_data
from .common import make_mac_tests
# Add new() parameters to the back of each test vector
params_test_data = []
for row in test_data:
t = list(row)
t[4] = dict(ciphermod=t[4])
params_test_data.append(t)
tests = make_mac_tests(CMAC, "CMAC", params_test_data)
tests.append(MultipleUpdates())
return tests
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
| 29.152249 | 83 | 0.629199 |
1948f230bc4601b419973340619a228fe0ceb85d | 654 | py | Python | app/db/models/__init__.py | maxzhenzhera/my_vocab_backend | 2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1 | [
"MIT"
] | null | null | null | app/db/models/__init__.py | maxzhenzhera/my_vocab_backend | 2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1 | [
"MIT"
] | null | null | null | app/db/models/__init__.py | maxzhenzhera/my_vocab_backend | 2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1 | [
"MIT"
] | null | null | null | """
Some mixins are not used:
mypy does not correctly work with mixins (SQLAlchemy) behaviour
as I expect (on moment of writing).
"""
from .auth import (
OAuthConnection,
RefreshSession
)
from .base import Base
from .entities import (
Tag,
User,
Vocab,
VocabTagsAssociation,
Word
)
__all__ = [
# Base
# -------------------------------------------
'Base',
# Auth
# -------------------------------------------
'OAuthConnection',
'RefreshSession',
# Entities
# -------------------------------------------
'User',
'Tag',
'Vocab',
'VocabTagsAssociation',
'Word'
]
| 17.675676 | 67 | 0.46789 |
e5dd95476a7e379ffdc427fff8a843d261da88b6 | 10,096 | py | Python | docs/conf.py | franwe/spd_trading | e59965b67c2273ec6c0e07ee986932b1548c8f2f | [
"MIT"
] | 2 | 2021-04-28T09:39:51.000Z | 2021-10-22T03:11:15.000Z | docs/conf.py | franwe/spd_trading | e59965b67c2273ec6c0e07ee986932b1548c8f2f | [
"MIT"
] | null | null | null | docs/conf.py | franwe/spd_trading | e59965b67c2273ec6c0e07ee986932b1548c8f2f | [
"MIT"
] | 2 | 2021-05-12T08:21:15.000Z | 2021-12-07T17:05:58.000Z | # -*- coding: utf-8 -*-
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
import inspect
import shutil
# Get version information for the package documentation
# which is published on read the docs: https://spd_trading.readthedocs.io/en/latest/
# release = get_distribution("spd_trading").version
# version = ".".join(release.split(".")[:2])
version = "0.1.3"
__location__ = os.path.join(os.getcwd(), os.path.dirname(inspect.getfile(inspect.currentframe())))
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.join(__location__, ".."))
autodoc_mock_imports = [".", "utils"]
exclude_patterns = ["**setup**", "**config**"]
# -- Run sphinx-apidoc ------------------------------------------------------
# This hack is necessary since RTD does not issue `sphinx-apidoc` before running
# `sphinx-build -b html . _build/html`. See Issue:
# https://github.com/rtfd/readthedocs.org/issues/1139
# DON'T FORGET: Check the box "Install your project inside a virtualenv using
# setup.py install" in the RTD Advanced Settings.
# Additionally it helps us to avoid running apidoc manually
try: # for Sphinx >= 1.7
from sphinx.ext import apidoc
except ImportError:
from sphinx import apidoc
output_dir = os.path.join(__location__, "../docs/api")
module_dir = os.path.normpath(__location__ + os.sep + os.pardir)
try:
shutil.rmtree(output_dir)
except FileNotFoundError:
pass
try:
import sphinx
from pkg_resources import parse_version
cmd_line_template = "sphinx-apidoc -f -o {outputdir} {moduledir} {excludepatterns}"
cmd_line = cmd_line_template.format(outputdir=output_dir, moduledir=module_dir, excludepatterns=exclude_patterns)
args = cmd_line.split(" ")
if parse_version(sphinx.__version__) >= parse_version("1.7"):
args = args[1:]
apidoc.main(args)
except Exception as e:
print("Running `sphinx-apidoc` failed!\n{}".format(e))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"sphinx.ext.todo",
"sphinx.ext.autosummary",
"sphinx.ext.viewcode",
"sphinx.ext.coverage",
"sphinx.ext.doctest",
"sphinx.ext.ifconfig",
"sphinx.ext.mathjax",
"sphinx.ext.napoleon",
"sphinx_rtd_theme",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = u"spd_trading"
copyright = u"2021, franwe"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "" # Is set by calling `setup.py docs`
# The full version, including alpha/beta/rc tags.
release = "" # Is set by calling `setup.py docs`
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# exclude_patterns = ["_build"]
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"logo_only": False,
"display_version": True,
"prev_next_buttons_location": "bottom",
"style_external_links": False,
# There is still a bug which will probably get fixed soon
# https://github.com/readthedocs/sphinx_rtd_theme/pull/1010
# therefore the option vcs_pageview_mode is commented out
# "vcs_pageview_mode": "blob",
"style_nav_header_background": "white",
# Toc options
"collapse_navigation": True,
"sticky_navigation": True,
"navigation_depth": 4,
"includehidden": True,
"titles_only": False,
}
html_context = {
"display_github": True,
"github_user": "buskill",
"github_repo": "buskill-app",
"github_version": "master/docs/",
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
try:
from spd_trading import __version__ as version
except ImportError:
pass
else:
release = version
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = "./_static/spd_trading-python-logo.png"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = "./_static/spd_trading-python-favicon.png"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = "spd_trading-doc"
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
(
"index",
"user_guide.tex",
u"spd_trading Documentation",
u"franwe",
"manual",
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = ""
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- External mapping ---------------------------------------------------------
python_version = ".".join(map(str, sys.version_info[0:2]))
intersphinx_mapping = {
"sphinx": ("http://www.sphinx-doc.org/en/stable", None),
"python": ("https://docs.python.org/" + python_version, None),
}
| 32.779221 | 117 | 0.702159 |
6499f11651d8c314668095085006dee89898178f | 1,070 | py | Python | nanome/api/macro/macro.py | rramji/nanome-lib | 2806598af31cfb4bb6e16366f0b300d2ddcc9c13 | [
"MIT"
] | null | null | null | nanome/api/macro/macro.py | rramji/nanome-lib | 2806598af31cfb4bb6e16366f0b300d2ddcc9c13 | [
"MIT"
] | null | null | null | nanome/api/macro/macro.py | rramji/nanome-lib | 2806598af31cfb4bb6e16366f0b300d2ddcc9c13 | [
"MIT"
] | null | null | null | import nanome
from nanome._internal._macro._macro import _Macro
class Macro(_Macro):
def __init__(self, title = "", logic = ""):
self.title = title
self.logic = logic
super(Macro, self).__init__()
@classmethod
def get_plugin_identifier(cls):
return _Macro._plugin_identifier
@classmethod
def set_plugin_identifier(cls, value):
_Macro._plugin_identifier = value
@property
def title(self):
return self._title
@title.setter
def title(self, value):
self._title = value
@property
def logic(self):
return self._logic
@logic.setter
def logic(self, value):
self._logic = value
def save(self, all_users = False):
self._save(all_users)
def run(self):
self._run()
def delete(self, all_users = False):
self._delete(all_users)
@classmethod
def stop(cls):
cls._stop()
@classmethod
def get_live(cls, callback):
return cls._get_live(callback)
_Macro._create = Macro | 20.980392 | 49 | 0.614953 |
478f4e6fc935289c4d389895670797b95676ad53 | 8,545 | py | Python | tools/logger/modules/driver/driver_framer.py | ti-simplelink/swol | 331fd1096c643297505da010aff979f6c7baf02b | [
"BSD-3-Clause"
] | 5 | 2020-04-16T00:38:14.000Z | 2022-02-10T12:47:14.000Z | tools/logger/modules/driver/driver_framer.py | ti-simplelink/swol | 331fd1096c643297505da010aff979f6c7baf02b | [
"BSD-3-Clause"
] | null | null | null | tools/logger/modules/driver/driver_framer.py | ti-simplelink/swol | 331fd1096c643297505da010aff979f6c7baf02b | [
"BSD-3-Clause"
] | 1 | 2020-04-27T21:47:17.000Z | 2020-04-27T21:47:17.000Z | import sys
import os
import enum
from dataclasses import *
from swo.swo_framer import *
from wireshark_output.wireshark_output import *
from trace_db.trace_db import *
import logging
logger = logging.getLogger("Driver Framer")
driver_map = {
# Power driver
"PowerCC26X": "Power Driver",
"UARTCC26X": "UART Driver",
"RFCC26X": "RF Driver",
}
reset_constraints = {
0: [0, {}],
1: [0, {}],
2: [0, {}],
3: [0, {}],
4: [0, {}],
5: [0, {}],
6: [0, {}]
}
constraint_to_string = {
0: "PowerCC26XX_RETAIN_VIMS_CACHE_IN_STANDBY",
1: "PowerCC26XX_DISALLOW_SHUTDOWN",
2: "PowerCC26XX_DISALLOW_STANDBY",
3: "PowerCC26XX_DISALLOW_IDLE",
4: "PowerCC26XX_NEED_FLASH_IN_IDLE",
5: "PowerCC26XX_SWITCH_XOSC_HF_MANUALLY",
6: "PowerCC26XX_DISALLOW_XOSC_HF_SWITCHING"
}
@dataclass
class DriverStatus(enum.Enum):
OK = "Ok"
ERROR = "Error"
POSSIBLE_ERROR = "Possible Error"
@dataclass
class DriverEvent(FrameBase):
swo_frame: SWOFrame = None
wireshark_out: list = None
def __post_init__(self):
self.rat_ts_s = self.swo_frame.rat_ts_s
self.rtc_ts_s = self.swo_frame.rtc_ts_s
self.opcode = self.swo_frame.opcode
self.file = self.swo_frame.file
self.line = self.swo_frame.line
self.level = self.swo_frame.level
self.module = self.swo_frame.module
self.status = DriverStatus.OK.value
self.driver = ""
for key in driver_map:
if key in self.file:
self.driver = driver_map[key]
break
def __str__(self):
return "RAT: {:.7f} s, RTC: {:.7f} : {} --> ".format(self.rat_ts_s, self.rtc_ts_s, self.file)
@dataclass
class PowerEvent(DriverEvent):
constraints: dict = field(default_factory=dict)
traceDB: TraceDB = None
def __post_init__(self):
super().__post_init__()
self.info_string = ""
file = ""
line = ""
fxn = ""
# If the event is a power constraint event
if str("Power constraint event") == self.swo_frame.string:
# Get the origin function, file and line using the "lr" argument
fxn, file, line = self.traceDB.get_info_for_address(self.swo_frame.values[1])
# Store total count as well as incrementing the individual file count
self.constraints[self.swo_frame.values[3]][0] = self.swo_frame.values[2]
# If first value is 1 it is a set event, else release
if self.swo_frame.values[0]:
# Increment file counter
if file in self.constraints[self.swo_frame.values[3]][1]:
self.constraints[self.swo_frame.values[3]][1][file] += 1
else:
self.constraints[self.swo_frame.values[3]][1][file] = 1
# Info string
self.info_string = (constraint_to_string[self.swo_frame.values[3]] + "'" +
" constraint was set inside %s by %s:%d.") % (
file.decode("utf-8"), fxn.decode("utf-8"), line)
else:
# Decrement file counter
if file in self.constraints[self.swo_frame.values[3]][1]:
self.constraints[self.swo_frame.values[3]][1][file] -= 1
else:
# If the file is not inside the dict (for some reason), add a dummy entry with 0 count
self.constraints[self.swo_frame.values[3]][1][file] = 0
# Info string
self.info_string = (constraint_to_string[self.swo_frame.values[3]] + "'" +
" constraint was released inside {} by {}:{}.").format(file.decode("utf-8"),
fxn.decode("utf-8"), line)
# Construct WS output
# TODO: Remove this sepreator from wireshark output
self.wireshark_out = [WSOutputElement(Protofields.COMMON_CUSTOM,
": ========================" + \
" Power Constraint Event " + \
"========================"
, "")]
# Which constraint is set/released
self.wireshark_out += [
WSOutputElement(Protofields.DRIVER_POWER_CONSTRAINT, constraint_to_string[self.swo_frame.values[3]])]
# From which file does the action relate to ...
if self.swo_frame.values[0]:
self.wireshark_out += [
WSOutputElement(Protofields.COMMON_CUSTOM, "Set in file", file.decode("utf-8"))]
else:
self.wireshark_out += [
WSOutputElement(Protofields.COMMON_CUSTOM, "Released in file", file.decode("utf-8"))]
# ... and at which line
self.wireshark_out += [WSOutputElement(Protofields.COMMON_CUSTOM, "Line", str(line))]
# If there is active power constraints, add these as part of the WS output
if len(self.constraints):
# Open tree (level 1)
self.wireshark_out += [WSOutputElement(Protofields.COMMON_OPEN_TREE, "Active power constraints")]
for key, val in self.constraints.items():
if val[0]:
# Open tree (level 2)
self.wireshark_out += [WSOutputElement(Protofields.COMMON_OPEN_TREE, constraint_to_string[key])]
counter = 0
# For each constraint, list each file holding constraints
for file, count in val[1].items():
counter = counter + count
if count != 0:
tmp = file.decode("utf-8")
# If the count is negative, the software could be having a bug, provide some printout on this
if count < 0:
tmp += " [Negative count, possible software bug!]"
self.status = DriverStatus.POSSIBLE_ERROR.value
self.wireshark_out += [WSOutputElement(Protofields.COMMON_CUSTOM, tmp, str(count))]
# If list happens to be empty but there is constraint or if there is constraints that is unaccounted for
if (counter < val[0]) and (counter > -1):
dif = val[0] - counter
self.wireshark_out += [
WSOutputElement(Protofields.COMMON_CUSTOM, "[Unknown source(s)]", str(dif))]
# Close tree (level 2)
self.wireshark_out += [WSOutputElement(Protofields.COMMON_CLOSE_TREE)]
# Close tree (level 1)
self.wireshark_out += [WSOutputElement(Protofields.COMMON_CLOSE_TREE)]
def __str__(self):
return self.info_string
class DriverFramer(FramerBase):
def __init__(self, db):
self._constraints = reset_constraints
self._traceDB = db
def reset(self):
self._constraints = reset_constraints
def parse(self, swo_frame=None):
driver_frame = None
try:
# Is this an Power driver Event?
if swo_frame.opcode == SWOOpcode.EVENT and ("PowerCC26X" in swo_frame.file):
driver_frame = PowerEvent(swo_frame=swo_frame, constraints=self._constraints, traceDB=self._traceDB)
except Exception as e:
logger.error(e)
finally:
if driver_frame is not None:
self.completed(driver_frame)
# This frame was not parsed. The same input SWO frame will be returned
else:
return swo_frame
return driver_frame
def completed(self, frame=None):
# Finish building wireshark output
# Append open tree and decoded driver after SWO output
frame.wireshark_out = frame.swo_frame.wireshark_out + \
[WSOutputElement(Protofields.COMMON_OPEN_TREE, "Driver Logger Frame")] + \
[WSOutputElement(Protofields.DRIVER_FILE, frame.driver)] + \
[WSOutputElement(Protofields.DRIVER_STATUS, frame.status)] + \
frame.wireshark_out + \
[WSOutputElement(Protofields.COMMON_INFO, str(frame))] + \
[WSOutputElement(Protofields.COMMON_CLOSE_TREE)]
| 43.156566 | 124 | 0.557753 |
0ff36593e86940dd5ebbf2bf4b2605e0f14432ca | 712 | py | Python | python-files/guess-number-game.py | chirumist/Python-Practice | fc7d6447ca492989221904121321aaf762bb6b43 | [
"MIT"
] | null | null | null | python-files/guess-number-game.py | chirumist/Python-Practice | fc7d6447ca492989221904121321aaf762bb6b43 | [
"MIT"
] | null | null | null | python-files/guess-number-game.py | chirumist/Python-Practice | fc7d6447ca492989221904121321aaf762bb6b43 | [
"MIT"
] | null | null | null | """
Guess number count start
"""
guesses = 8
numbers = [2, 4, 5, 6, 9]
guessCount = int(0)
print(numbers)
while guessCount <= guesses:
number = int(input("Guess number: \n"))
if number in numbers:
numbers.remove(number)
print("Remain number guess ", len(numbers))
print("remain guesses count ", guesses - guessCount)
if len(numbers) == 0:
print("All number guess with ", guessCount)
break
else:
print("Wrong Number guess count remain ", guesses - guessCount)
if guessCount == guesses - 1:
print("sorry your guess has been over")
break
guessCount = guessCount + 1
"""
Guess number count end
"""
| 22.25 | 71 | 0.592697 |
7d3c056ef71f5ddd76f4e51420cead9074b5333e | 5,284 | py | Python | names_oracle/__init__.py | leoli51/Names-Oracle | 7ebde812ceb5c1e08ad57805c384eab3cd5dcd75 | [
"MIT"
] | null | null | null | names_oracle/__init__.py | leoli51/Names-Oracle | 7ebde812ceb5c1e08ad57805c384eab3cd5dcd75 | [
"MIT"
] | null | null | null | names_oracle/__init__.py | leoli51/Names-Oracle | 7ebde812ceb5c1e08ad57805c384eab3cd5dcd75 | [
"MIT"
] | null | null | null | import os
from os import path
import importlib.resources
data_handle = importlib.resources.files(__package__).joinpath("data")
with data_handle as p:
data_path = p
# common tags
NAME_TAG = 'name'
# name tags
NAME_COUNT_TAG = 'first_name_frequency'
NAME_NORM_COUNT_TAG = 'first_name_norm_frequency'
MALE_COUNT_TAG = 'male_frequency'
FEMALE_COUNT_TAG = 'female_frequency'
MALE_PROBABILITY_TAG = 'male_probability'
FEMALE_PROBABILITY_TAG = 'female_probability'
FIRST_NAME_PROBABILITY_TAG = 'first_name_probability'
# last name tags
LAST_NAME_COUNT_TAG = 'last_name_frequency'
LAST_NAME_NORM_COUNT_TAG = 'last_name_norm_frequency'
LAST_NAME_PROBABILITY_TAG = 'last_name_probability'
# country -> name -> sex -> count
names = dict()
# country -> surname -> count
last_names = dict()
max_occurrences = dict()
def get_available_countries():
with data_handle as data_path:
return os.listdir(data_path)
def load(country):
global names
global last_names
if country not in get_available_countries():
raise ValueError(f'Country: {country} is not supported or is not a valid country. Hint: use list_available_countries()')
names[country] = dict()
max_occurrences[country] = {'name' : 0, 'last_name' : 0}
max_name_occ = 0
with open(path.join(data_path, country, 'names.csv'), 'r', encoding='utf-8') as namesfile:
for i, line in enumerate(namesfile.readlines()):
if i == 0:
continue
line = line.strip().split(',')
name = line[0]
sex = line[1]
count = int(line[2])
if name not in names[country]:
names[country][name] = {'M' : 0, 'F' : 0}
names[country][name][sex] = count
max_name_occ = max(max_name_occ, sum(names[country][name].values()))
max_occurrences[country]['name'] = max_name_occ
last_names[country] = dict()
max_last_name_occ = 0
with open(path.join(data_path, country, 'last_names.csv'), 'r', encoding='utf-8') as lastnamesfile:
for i, line in enumerate(lastnamesfile.readlines()):
if i == 0:
continue
line = line.strip().split(',')
lastname = line[0]
count = int(line[1])
last_names[country][lastname] = count
max_last_name_occ = max(max_last_name_occ, count)
max_occurrences[country]['last_name'] = max_last_name_occ
# query name data:
def get_name_info(name, country):
global names
global last_names
if country not in last_names:
load(country)
first_name_counts = None
if name in names[country]:
first_name_counts = names[country][name]
last_name_count = 0
if name in last_names[country]:
last_name_count = last_names[country][name]
# returns none if there is no available data for this entry
if not (first_name_counts or last_name_count):
return None
name_count = first_name_counts if first_name_counts else {'M' : 0, 'F': 0}
total_name_count = sum(name_count.values())
total_count = total_name_count + last_name_count
info = {
NAME_TAG : name,
NAME_COUNT_TAG : total_name_count,
NAME_NORM_COUNT_TAG : total_name_count / max_occurrences[country]['name'],
MALE_COUNT_TAG : name_count['M'],
FEMALE_COUNT_TAG : name_count['F'],
MALE_PROBABILITY_TAG : name_count['M'] / (total_name_count if total_name_count else 1),
FEMALE_PROBABILITY_TAG : name_count['F'] / (total_name_count if total_name_count else 1),
FIRST_NAME_PROBABILITY_TAG : total_name_count / total_count,
LAST_NAME_COUNT_TAG : last_name_count,
LAST_NAME_NORM_COUNT_TAG : last_name_count / max_occurrences[country]['last_name'],
LAST_NAME_PROBABILITY_TAG : last_name_count / total_count
}
return info
def split_name_in_first_and_last(name, country):
name_parts = name.title().strip().split()
best_score = 0
best_names = []
for i in range(len(name_parts) + 1): # +1 to include the case in which it is only a name
first_name_guess = " ".join(name_parts[:i])
last_name_guess = " ".join(name_parts[i:])
first_name_data = get_name_info(first_name_guess, country)
last_name_data = get_name_info(last_name_guess, country)
if not (first_name_data or last_name_data):
continue
fn_fn_score = first_name_data[FIRST_NAME_PROBABILITY_TAG] if first_name_data else 0
fn_ln_score = first_name_data[LAST_NAME_PROBABILITY_TAG] if first_name_data else 0
ln_fn_score = last_name_data[FIRST_NAME_PROBABILITY_TAG] if last_name_data else 0
ln_ln_score = last_name_data[LAST_NAME_PROBABILITY_TAG] if last_name_data else 0
if fn_fn_score + ln_ln_score > ln_fn_score + fn_ln_score:
score = fn_fn_score + ln_ln_score
if score > best_score:
best_score = score
best_names = [first_name_guess, last_name_guess]
else:
score = ln_fn_score + fn_ln_score
if score > best_score:
best_score = score
best_names = [last_name_guess, first_name_guess]
return best_names
| 34.993377 | 128 | 0.667108 |
ec0634445ffa1e5372dee1d0937d780c23ca9ada | 276 | py | Python | header_tableau_materials.py | invisiblebob395/awefawe | 42daf9d3ae06bcdb3b91973d94eed8bed1303e2b | [
"BSD-3-Clause"
] | 17 | 2015-01-19T07:53:57.000Z | 2021-07-10T02:26:51.000Z | header_tableau_materials.py | qt911025/pw_module_system | dbd257e5231d16c47f17091a3ab18972be7687e5 | [
"BSD-3-Clause"
] | 1 | 2017-08-31T03:55:09.000Z | 2017-08-31T03:55:09.000Z | header_tableau_materials.py | qt911025/pw_module_system | dbd257e5231d16c47f17091a3ab18972be7687e5 | [
"BSD-3-Clause"
] | 14 | 2015-05-03T05:20:01.000Z | 2021-12-29T17:10:50.000Z | ###################################################
# header_tableau_materials.py
# This file contains declarations for tableau materials
# DO NOT EDIT THIS FILE!
###################################################
from header_common import *
from header_operations import *
| 30.666667 | 55 | 0.507246 |
ed1c987f0196566bd15bc8ce0dec4951c13917df | 456 | py | Python | fynance/algorithms/__init__.py | ArthurBernard/Fynance | efd9a2e6f8eddcff017d828972236312f6f24084 | [
"MIT"
] | 19 | 2018-12-13T18:52:51.000Z | 2021-09-03T00:33:47.000Z | fynance/algorithms/__init__.py | ArthurBernard/Fynance | efd9a2e6f8eddcff017d828972236312f6f24084 | [
"MIT"
] | null | null | null | fynance/algorithms/__init__.py | ArthurBernard/Fynance | efd9a2e6f8eddcff017d828972236312f6f24084 | [
"MIT"
] | 6 | 2019-05-31T16:51:51.000Z | 2021-07-29T21:31:25.000Z | #!/usr/bin/env python3
# coding: utf-8
# @Author: ArthurBernard
# @Email: arthur.bernard.92@gmail.com
# @Date: 2019-09-12 17:54:50
# @Last modified by: ArthurBernard
# @Last modified time: 2019-11-05 20:22:04
"""
.. currentmodule:: fynance.algorithms
.. toctree::
:maxdepth: 1
:caption: Contents:
algorithms.allocation
"""
# Built-in packages
# Third party packages
# Local packages
from .allocation import *
__all__ = allocation.__all__
| 15.724138 | 42 | 0.699561 |
7b2b9b21854b098e3ff789530de0b843acad6f14 | 1,848 | py | Python | triage.py | Rafiot/viper-modules | 812642effecf8ae64bce76fa0f72116e0c2c81cd | [
"BSD-3-Clause"
] | 5 | 2019-12-20T09:42:41.000Z | 2021-04-30T07:05:00.000Z | triage.py | SubSpaceManeuvers/viper-modules | c8f19c6d4e0e976e2ad8730e0862c2250e3acdd5 | [
"BSD-3-Clause"
] | 7 | 2019-11-25T13:13:15.000Z | 2020-09-09T09:04:46.000Z | triage.py | SubSpaceManeuvers/viper-modules | c8f19c6d4e0e976e2ad8730e0862c2250e3acdd5 | [
"BSD-3-Clause"
] | 10 | 2019-11-20T04:57:51.000Z | 2021-01-21T18:51:47.000Z | # -*- coding: utf-8 -*-
# This file is part of Viper - https://github.com/viper-framework/viper
# See the file 'LICENSE' for copying permission.
from viper.common.abstracts import Module
from viper.core.database import Database
from viper.core.session import __sessions__
class Triage(Module):
cmd = 'triage'
description = "Perform some initial triaging and tagging of the file"
authors = ['nex']
def __init__(self):
super(Triage, self).__init__()
self.parser.add_argument('-a', '--all', action='store_true', help="Triage all files")
def _triage_file_type(self, obj):
tags = []
# TODO: extend this triaging with as many relevant tags as possible.
# For example, avoid "exe" or other too common or obvious attributes.
if 'PE32' in obj.type:
if 'DLL' in obj.type:
self.log('info', "{} is a DLL".format(obj.name))
tags.append('dll')
elif 'native' in obj.type:
self.log('info', "{} is a Windows driver".format(obj.name))
tags.append('driver')
return tags
def run(self):
super(Triage, self).run()
db = Database()
if self.args and self.args.all:
samples = db.find(key='all')
for sample in samples:
tags = []
tags.extend(self._triage_file_type(sample))
db.add_tags(sample.sha256, tags)
# We're running against the already opened file.
else:
if not __sessions__.is_set():
self.log('error', "No open session. This command expects a file to be open.")
return
tags = []
tags.extend(self._triage_file_type(__sessions__.current.file))
db.add_tags(__sessions__.current.file.sha256, tags)
| 32.421053 | 93 | 0.589286 |
17f00b215265772a02d2780901dacca87ed9e147 | 15,653 | py | Python | pikachu/general.py | FriederikeBiermann/pikachu | 8eb5a71a34c70adde4c14ee3717e0b53f27d09a7 | [
"MIT"
] | null | null | null | pikachu/general.py | FriederikeBiermann/pikachu | 8eb5a71a34c70adde4c14ee3717e0b53f27d09a7 | [
"MIT"
] | null | null | null | pikachu/general.py | FriederikeBiermann/pikachu | 8eb5a71a34c70adde4c14ee3717e0b53f27d09a7 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import time
import os
import matplotlib.pyplot as plt
from pikachu.smiles.smiles import Smiles
from pikachu.errors import StructureError, ColourError
from pikachu.smiles.graph_to_smiles import GraphToSmiles
from pikachu.drawing.drawing import Drawer, Options, draw_multiple
from pikachu.drawing.colours import *
from pikachu.chem.molfile.write_molfile import MolFileWriter
def smiles_from_file(smiles_file, all=False):
if not all:
with open(smiles_file, 'r') as smiles:
smiles_string = smiles.readline().strip()
return smiles_string
else:
smiles_strings = []
with open(smiles_file, 'r') as smiles:
for line in smiles:
smiles_string = line.strip()
smiles_strings.append(smiles_string)
return smiles_strings
def read_smiles(smiles_string):
"""
Return structure object from SMILES string
Input:
smiles_string: str, SMILES string
Output:
Structure object if correct SMILES string was parsed, None otherwise
"""
if smiles_string:
try:
smiles = Smiles(smiles_string)
structure = smiles.smiles_to_structure()
return structure
except StructureError as e:
print(f'Error parsing "{smiles_string}": {e.message}')
return
def structure_to_smiles(structure, kekule=False):
"""
Return SMILES string from structure object
Input:
structure: Structure object
kekule: bool, return kekulised SMILES string if True, unkekulised SMILES string if False
Output:
str, SMILES string
"""
if kekule:
structure = structure.kekulise()
return GraphToSmiles(structure).smiles
def draw_structure(structure, finetune=True):
"""
Display structure from structure object
Input:
structure: Structure object
"""
options = Options()
options.finetune = finetune
drawer = Drawer(structure, options=options)
drawer.show_molecule()
def position_smiles(smiles):
"""
Return structure with stored atom coordinates
Input
----------
smiles: str, SMILES string
Output
----------
structure: Structure object
"""
structure = read_smiles(smiles)
if '.' in smiles:
drawer = draw_multiple(structure, coords_only=True)
else:
drawer = Drawer(structure, coords_only=True)
return drawer
def draw_smiles(smiles, finetune=True):
"""
Display structure from SMILES string
Input:
smiles: str, SMILES string
"""
options = Options()
options.finetune = finetune
structure = read_smiles(smiles)
if '.' in smiles:
drawer = draw_multiple(structure, options=options)
else:
drawer = Drawer(structure, options=options)
drawer.show_molecule()
def smiles_to_molfile(smiles, molfile, options=None):
if not options:
options = Options()
structure = read_smiles(smiles)
if '.' in smiles:
MolFileWriter(structure, molfile, drawing_options=options, multiple=True).write_mol_file()
else:
MolFileWriter(structure, molfile, drawing_options=options).write_mol_file()
def svg_from_smiles_timed(smiles, svg_out):
start_time = time.time()
print("Start")
time_1 = time.time()
print(time_1 - start_time)
structure = read_smiles(smiles)
print("reading smiles")
time_2 = time.time()
print(time_2 - time_1)
structure = structure.kekulise()
print("Kekulising")
time_3 = time.time()
print(time_3 - time_2)
drawer = Drawer(structure)
print("Drawing")
time_4 = time.time()
print(time_4 - time_3)
drawer.save_svg(svg_out)
print("Saving")
time_5 = time.time()
print(time_5 - time_4)
def svg_from_structure(structure, svg_out, finetune=True):
"""
Save structure drawing of Structure object to .svg
Input:
structure: Structure object
svg_out: str, output file name, should end in .svg
"""
options = Options()
options.finetune = finetune
drawer = Drawer(structure, options=options)
drawer.save_svg(svg_out)
def svg_string_from_structure(structure, finetune=True):
options = Options()
options.finetune = finetune
drawer = Drawer(structure, options=options)
svg_string = drawer.save_svg_string()
return svg_string
def png_from_structure(structure, png_out, finetune=True):
"""
Save structure drawing of Structure object to .png
Input:
structure: Structure object
png_out: str, output file name, should end in .png
"""
options = Options()
options.finetune = finetune
drawer = Drawer(structure, options=options)
drawer.save_png(png_out)
def svg_from_smiles(smiles, svg_out, finetune=True):
"""
Save structure drawing of SMILES string to .svg
Input:
smiles: str, SMILES string
svg_out: str, output file name, should end in .svg
"""
structure = read_smiles(smiles)
options = Options()
options.finetune = finetune
drawer = Drawer(structure, options=options)
drawer.save_svg(svg_out)
def png_from_smiles(smiles, png_out, finetune=True):
"""
Save structure drawing of SMILES string to .png
Input:
smiles: str, SMILES string
png_out: str, output file name, should end in .png
"""
structure = read_smiles(smiles)
options = Options()
options.finetune = finetune
drawer = Drawer(structure, options=options)
drawer.save_png(png_out)
def highlight_substructure(substructure_smiles, parent_smiles, search_mode='all',
colour=None,
check_chiral_centres=True,
check_bond_chirality=True,
visualisation='show',
out_file=None):
"""
Find occurrences of (a) substructure(s) in a parent structure and highlight it in a drawing
Input:
substructure_smiles: str, SMILES string of substructure, OR list of str, with each str a SMILES string
parent_smiles: str, SMILES string of superstructure
search_mode: str, 'single', 'multiple' or 'all. If single, highlight only the first detected instance of a
substructure. If 'all', highlight all instances of a substructure. If 'multiple', highlight all instances of
all substructures, assigning one colour per substructure.
colour: str, hex colour code, ie #ffffff, colour in which substructure will be highlighted, OR list of str,
with each str a colour.
Default: None (RASPBERRY for single/ all matching, RANDOM_PALETTE_2 for multiple matching
check_chiral_centres: bool, if True, only matches substructure to superstructure if stereochemistry
of all stereocentres match; if False, matches substructure to superstructure regardless of
stereochemistry of stereocentres.
check_bond_chirality: bool, if True, only matches substructure to superstructure if stereochemistry
of all stereobonds match; if False, matches substructure to superstructure regardless of
stereochemistry of stereobonds.
visualisation: str, 'show', 'png', or 'svg'. If 'png' or 'svg', out_file is required.
out_file: str, output file of png or svg drawing
"""
assert search_mode in {'all', 'single', 'multiple'}
if search_mode == 'all' or search_mode == 'single':
assert type(substructure_smiles) == str
if colour:
assert type(colour) in {str}
else:
colour = RASPBERRY
elif search_mode == 'multiple':
assert type(substructure_smiles) in {list, tuple, set}
assert type(colour) in {list, tuple, set}
if search_mode == 'all':
highlight_subsmiles_all(substructure_smiles, parent_smiles, colour=colour,
check_chiral_centres=check_chiral_centres,
check_bond_chirality=check_bond_chirality,
visualisation=visualisation,
out_file=out_file)
elif search_mode == 'multiple':
highlight_subsmiles_multiple(substructure_smiles, parent_smiles, colours=colour,
check_chiral_centres=check_chiral_centres,
check_bond_chirality=check_bond_chirality,
visualisation=visualisation,
out_file=out_file)
elif search_mode == 'single':
highlight_subsmiles_single(substructure_smiles, parent_smiles, colour=colour,
check_chiral_centres=check_chiral_centres,
check_bond_chirality=check_bond_chirality,
visualisation=visualisation,
out_file=out_file)
def highlight_subsmiles_single(substructure_smiles, parent_smiles, colour=RASPBERRY,
check_chiral_centres=True,
check_bond_chirality=True,
visualisation='show',
out_file=None):
"""
Draw structure with a single occurrence of substructure_smiles highlighted with colour
Input:
substructure_smiles: str, SMILES string of substructure
parent_smiles: str, SMILES string of superstructure
colour: str, hex colour code, ie #ffffff, colour in which substructure will be highlighted
Default: inbuilt colour raspberry
check_chiral_centres: bool, if True, only matches substructure to superstructure if stereochemistry
of all stereocentres match; if False, matches substructure to superstructure regardless of
stereochemistry of stereocentres.
check_bond_chirality: bool, if True, only matches substructure to superstructure if stereochemistry
of all stereobonds match; if False, matches substructure to superstructure regardless of
stereochemistry of stereobonds.
visualisation: str, 'show', 'png', or 'svg'. If 'png' or 'svg', out_file is required.
out_file: str, output file of png or svg drawing
"""
child_structure = read_smiles(substructure_smiles)
parent_structure = read_smiles(parent_smiles)
if not colour.startswith('#'):
colour = get_hex(colour)
parent_structure.colour_substructure_single(child_structure, colour=colour,
check_chiral_centres=check_chiral_centres,
check_bond_chirality=check_bond_chirality)
drawer = Drawer(parent_structure)
if visualisation == 'show':
drawer.show_molecule()
elif visualisation == 'svg':
assert out_file
drawer.save_svg(out_file)
elif visualisation == 'png':
assert out_file
drawer.save_png(out_file)
def highlight_subsmiles_all(substructure_smiles, parent_smiles, colour=RASPBERRY,
check_chiral_centres=True,
check_bond_chirality=True,
visualisation='show',
out_file=None):
"""
Draw structure with all occurrences of substructure_smiles highlighted with colour
Input:
substructure_smiles: str, SMILES string of substructure
parent_smiles: str, SMILES string of superstructure
colour: str, hex colour code, ie #ffffff, colour in which substructure will be highlighted.
Default: inbuilt colour raspberry
check_chiral_centres: bool, if True, only matches substructure to superstructure if stereochemistry
of all stereocentres match; if False, matches substructure to superstructure regardless of
stereochemistry of stereocentres.
check_bond_chirality: bool, if True, only matches substructure to superstructure if stereochemistry
of all stereobonds match; if False, matches substructure to superstructure regardless of
stereochemistry of stereobonds.
visualisation: str, 'show', 'png', or 'svg'. If 'png' or 'svg', out_file is required.
out_file: str, output file of png or svg drawing
"""
child_structure = read_smiles(substructure_smiles)
parent_structure = read_smiles(parent_smiles)
if not colour.startswith('#'):
colour = get_hex(colour)
parent_structure.colour_substructure_all(child_structure, colour=colour,
check_chiral_centres=check_chiral_centres,
check_bond_chirality=check_bond_chirality)
drawer = Drawer(parent_structure)
if visualisation == 'show':
drawer.show_molecule()
elif visualisation == 'svg':
assert out_file
drawer.save_svg(out_file)
elif visualisation == 'png':
assert out_file
drawer.save_png(out_file)
def highlight_subsmiles_multiple(substructure_smiles_list, parent_smiles, colours=None,
check_chiral_centres=True,
check_bond_chirality=True,
visualisation='show',
out_file=None):
"""
Draw structure with all occurrences of all substructure_smiles highlighted in different colours
Input:
substructure_smiles_list: list of str, with each str a SMILES string of substructure. Length must be shorter
than or equal to the length of colours.
parent_smiles: str, SMILES string of superstructure
colours: list of str, with each str a hex colour code, ie #ffffff, colours in which substructures will be
highlighted in order of occurrence. Length must be longer than or equal to the length of
substructure_smiles_list
check_chiral_centres: bool, if True, only matches substructure to superstructure if stereochemistry
of all stereocentres match; if False, matches substructure to superstructure regardless of
stereochemistry of stereocentres.
check_bond_chirality: bool, if True, only matches substructure to superstructure if stereochemistry
of all stereobonds match; if False, matches substructure to superstructure regardless of
stereochemistry of stereobonds.
visualisation: str, 'show', 'png', or 'svg'. If 'png' or 'svg', out_file is required.
out_file: str, output file of png or svg drawing
"""
parent_structure = read_smiles(parent_smiles)
smiles_nr = len(substructure_smiles_list)
if not colours:
colour_list = RANDOM_PALETTE_2[:smiles_nr]
else:
colour_list = []
for colour in colours:
hex_colour = get_hex(colour)
colour_list.append(hex_colour)
colour_list = colour_list[:smiles_nr]
try:
assert len(colour_list) == smiles_nr
except AssertionError:
raise ColourError('too few colours')
for i, smiles in enumerate(substructure_smiles_list):
child_structure = read_smiles(smiles)
colour = colour_list[i]
parent_structure.colour_substructure_all(child_structure, colour=colour,
check_chiral_centres=check_chiral_centres,
check_bond_chirality=check_bond_chirality)
drawer = Drawer(parent_structure)
if visualisation == 'show':
drawer.show_molecule()
elif visualisation == 'svg':
assert out_file
drawer.save_svg(out_file)
elif visualisation == 'png':
assert out_file
drawer.save_png(out_file)
| 35.017897 | 116 | 0.660448 |
e290d2e1bc07488c84824e9d07c9e96254d127f2 | 1,313 | py | Python | spreadsplug/web/tasks.py | atomotic/spreads | 754a5b21ec6d1bf83bd0137195a468b387670d16 | [
"MIT"
] | 1 | 2019-05-03T11:58:10.000Z | 2019-05-03T11:58:10.000Z | spreadsplug/web/tasks.py | atomotic/spreads | 754a5b21ec6d1bf83bd0137195a468b387670d16 | [
"MIT"
] | null | null | null | spreadsplug/web/tasks.py | atomotic/spreads | 754a5b21ec6d1bf83bd0137195a468b387670d16 | [
"MIT"
] | null | null | null | import json
import logging
import shutil
import requests
from spreads.vendor.pathlib import Path
from spreadsplug.web import task_queue
from util import find_stick, mount_stick
from persistence import get_workflow, save_workflow
logger = logging.getLogger('spreadsplug.web.tasks')
@task_queue.task()
def transfer_to_stick(workflow_id):
stick = find_stick()
workflow = get_workflow(workflow_id)
with mount_stick(stick) as p:
workflow.step = 'transfer'
workflow.step_done = False
# Filter out problematic characters
clean_name = (workflow.path.name.replace(':', '_')
.replace('/', '_'))
target_path = Path(p)/clean_name
if target_path.exists():
shutil.rmtree(unicode(target_path))
try:
shutil.copytree(unicode(workflow.path), unicode(target_path))
except shutil.Error as e:
# Error 38 means that some permissions could not be copied, this is
# expected behaviour for filesystems like FAT32 or exFAT, so we
# silently ignore it here, since the actual data will have been
# copied nevertheless.
if any("[Errno 38]" not in exc for src, dst, exc in e[0]):
raise e
workflow.step_done = True
| 34.552632 | 79 | 0.648134 |
033183e50b666ca55cb9873b046159b476b9aa2e | 857 | py | Python | badgyal/bgnet.py | kennyfrc/a0lite | a3b69ce6bc059be93c9b62fd7577360c07b98523 | [
"MIT"
] | null | null | null | badgyal/bgnet.py | kennyfrc/a0lite | a3b69ce6bc059be93c9b62fd7577360c07b98523 | [
"MIT"
] | null | null | null | badgyal/bgnet.py | kennyfrc/a0lite | a3b69ce6bc059be93c9b62fd7577360c07b98523 | [
"MIT"
] | null | null | null | import torch
import badgyal.model as model
import badgyal.net as proto_net
import badgyal.proto.net_pb2 as pb
import chess
from badgyal.board2planes import board2planes, policy2moves, bulk_board2planes
import pylru
import sys
import os.path
from badgyal import AbstractNet
CHANNELS=128
BLOCKS=10
SE=4
class BGNet(AbstractNet):
def __init__(self, cuda=True, torchScript=False):
super().__init__(cuda=cuda, torchScript=torchScript)
def load_net(self):
my_path = os.path.abspath(os.path.dirname(__file__))
file = os.path.join(my_path, "badgyal-9.pb.gz")
net = model.Net(CHANNELS, BLOCKS, CHANNELS, SE, classical=True)
net.import_proto_classical(file)
# fix the rule50 weights
net.conv_block[0].weight.data[:, 109, :, :] /= 99 # scale rule50 weights due to legacy reasons
return net
| 28.566667 | 103 | 0.719953 |
0e8ca831e7a1889f61f00351dfd6d8b37070c8a0 | 22,625 | py | Python | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2017_03_30/aio/operations/_images_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | 8 | 2021-01-13T23:44:08.000Z | 2021-03-17T10:13:36.000Z | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2017_03_30/aio/operations/_images_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | null | null | null | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2017_03_30/aio/operations/_images_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ImagesOperations:
"""ImagesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2017_03_30.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _create_or_update_initial(
self,
resource_group_name: str,
image_name: str,
parameters: "models.Image",
**kwargs
) -> "models.Image":
cls = kwargs.pop('cls', None) # type: ClsType["models.Image"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'imageName': self._serialize.url("image_name", image_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'Image')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Image', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('Image', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/images/{imageName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
image_name: str,
parameters: "models.Image",
**kwargs
) -> AsyncLROPoller["models.Image"]:
"""Create or update an image.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param image_name: The name of the image.
:type image_name: str
:param parameters: Parameters supplied to the Create Image operation.
:type parameters: ~azure.mgmt.compute.v2017_03_30.models.Image
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Image or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.Image]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.Image"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
image_name=image_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('Image', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/images/{imageName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
image_name: str,
**kwargs
) -> Optional["models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'imageName': self._serialize.url("image_name", image_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/images/{imageName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
image_name: str,
**kwargs
) -> AsyncLROPoller["models.OperationStatusResponse"]:
"""Deletes an Image.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param image_name: The name of the image.
:type image_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
image_name=image_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/images/{imageName}'} # type: ignore
async def get(
self,
resource_group_name: str,
image_name: str,
expand: Optional[str] = None,
**kwargs
) -> "models.Image":
"""Gets an image.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param image_name: The name of the image.
:type image_name: str
:param expand: The expand expression to apply on the operation.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Image, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2017_03_30.models.Image
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Image"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'imageName': self._serialize.url("image_name", image_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Image', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/images/{imageName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["models.ImageListResult"]:
"""Gets the list of images under a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ImageListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.compute.v2017_03_30.models.ImageListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ImageListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ImageListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/images'} # type: ignore
def list(
self,
**kwargs
) -> AsyncIterable["models.ImageListResult"]:
"""Gets the list of Images in the subscription. Use nextLink property in the response to get the
next page of Images. Do this till nextLink is null to fetch all the Images.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ImageListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.compute.v2017_03_30.models.ImageListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ImageListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ImageListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/images'} # type: ignore
| 47.431866 | 181 | 0.658298 |
6e79ce60fde056c59046f3c2068ab627140a70f7 | 7,439 | py | Python | test_multiback.py | sirrah23/MultiBack | 1622075bbb936cdfeda461fee70119030382556e | [
"MIT"
] | null | null | null | test_multiback.py | sirrah23/MultiBack | 1622075bbb936cdfeda461fee70119030382556e | [
"MIT"
] | null | null | null | test_multiback.py | sirrah23/MultiBack | 1622075bbb936cdfeda461fee70119030382556e | [
"MIT"
] | null | null | null | import os
import shutil
from core import *
def setup_module():
os.mkdir("./test")
def create_file_with_content(fname, content=""):
with open(fname, "w+") as f:
f.write(content)
def teardown_module():
shutil.rmtree("./test")
class TestFileDirExist:
@classmethod
def setup_class(cls):
os.mkdir("./test/exist")
create_file_with_content("./test/exist/exist1.txt")
create_file_with_content("./test/exist/exist2.txt")
@classmethod
def teardown_class(cls):
shutil.rmtree("./test/exist")
def test_is_file_not_exist(self):
assert is_file("./notexist.txt") == False
def test_is_file_exists(self):
assert is_file("./test/exist/exist1.txt") == True
assert is_file("./test/exist/exist2.txt") == True
def test_is_dir_not_exists(self):
assert is_dir("./notexist") == False
def test_is_dir_exists(self):
assert is_dir("./test/exist") == True
class TestTimestamp:
def test_filename_timestamp(self):
fname = "test.txt"
tstamp = "20180629.111516"
assert fname_timestamp(fname, tstamp) == "test.20180629.111516.txt"
class TestFileManipulation:
@classmethod
def setup_class(cls):
os.mkdir("./test/rename")
os.mkdir("./test/copy")
create_file_with_content("./test/rename/r1.txt")
create_file_with_content("./test/copy/c1.txt")
create_file_with_content("./test/copy/c1.txt")
@classmethod
def teardown_class(cls):
shutil.rmtree("./test/rename")
shutil.rmtree("./test/copy")
def test_rename(self):
rename_file("./test/rename/r1.txt", "./test/rename/r2.txt")
assert is_file("./test/rename/r1.txt") == False
assert is_file("./test/rename/r2.txt") == True
def test_copy_file(self):
copy_file("./test/copy/c1.txt", "./test/copy/c2.txt")
assert is_file("./test/copy/c1.txt") == True
assert is_file("./test/copy/c2.txt") == True
def test_path_filename(self):
fpath = "/home/john/test.txt"
assert path_filename(fpath) == "test.txt"
class TestBackup:
@classmethod
def setup_class(cls):
os.mkdir("./test/backup")
os.mkdir("./test/backup/src")
os.mkdir("./test/backup/dest_empty")
os.mkdir("./test/backup/dest_populated")
os.mkdir("./test/backups")
os.mkdir("./test/backups/src")
os.mkdir("./test/backups/dest_empty")
os.mkdir("./test/backups/dest_populated")
create_file_with_content("./test/backup/src/test.txt")
create_file_with_content("./test/backup/dest_populated/test.txt")
create_file_with_content("./test/backups/src/test.txt")
create_file_with_content("./test/backups/src/test2.txt")
create_file_with_content("./test/backups/src/test3.txt")
create_file_with_content("./test/backups/dest_populated/test.txt")
create_file_with_content("./test/backups/dest_populated/test2.txt")
create_file_with_content("./test/backups/dest_populated/test3.txt")
@classmethod
def teardown_class(cls):
shutil.rmtree("./test/backup")
shutil.rmtree("./test/backups")
def test_backup_file_no_rename(self):
src = os.path.abspath("./test/backup/src/test.txt")
dest = [os.path.abspath("./test/backup/dest_empty")]
backup_file(src, dest)
assert is_file(src) == True
assert is_file(os.path.join(dest[0], "test.txt")) == True
def test_backup_file_rename(self):
src = os.path.abspath("./test/backup/src/test.txt")
dest = [os.path.abspath("./test/backup/dest_populated")]
backup_file(src, dest)
files = os.listdir(dest[0])
assert len(files) == 2
assert files[0].startswith("test") == True
assert files[0].endswith(".txt") == True
assert files[1].startswith("test") == True
assert files[0].endswith(".txt") == True
assert files[0] != files[1]
def test_backup_files_no_rename(self):
srcs = [
os.path.abspath("./test/backups/src/test.txt"),
os.path.abspath("./test/backups/src/test2.txt"),
os.path.abspath("./test/backups/src/test3.txt")
]
dest = [os.path.abspath("./test/backups/dest_empty")]
backup_files(srcs, dest)
files = os.listdir(dest[0])
assert len(files) == 3
assert "test.txt" in files
assert "test2.txt" in files
assert "test3.txt" in files
def test_backup_files_rename(self):
srcs = [
os.path.abspath("./test/backups/src/test.txt"),
os.path.abspath("./test/backups/src/test2.txt"),
os.path.abspath("./test/backups/src/test3.txt")
]
dest = [os.path.abspath("./test/backups/dest_populated")]
backup_files(srcs, dest)
files = os.listdir(dest[0])
assert len(files) == 6
counts = [0, 0, 0]
for file in files:
if file.startswith("test2"):
counts[1] += 1
elif file.startswith("test3"):
counts[2] += 1
elif file.startswith("test"):
counts[0] += 1
assert counts == [2, 2, 2]
class TestConfig:
@classmethod
def setup_class(cls):
os.mkdir("./test/template")
@classmethod
def teardown_class(cls):
shutil.rmtree("./test/template")
def test_template_config(self):
loc = os.path.abspath("./test/template/")
loc = os.path.join(loc, fname_timestamp("config.json", timestamp_str()))
template_config(loc)
files = os.listdir("./test/template")
assert len(files) == 1
assert files[0].startswith("config") == True
assert files[0].endswith(".json") == True
def test_config_read(self):
loc = os.path.abspath("./test/template/")
loc = os.path.join(loc, fname_timestamp("config.json", timestamp_str()))
template_config(loc)
valid, cfg = read_config(loc)
assert valid == True
assert "/home/anon/test1.txt" in cfg["sources"]
assert "/home/anon/test2.txt" in cfg["sources"]
assert "/home/anon/backup/" in cfg["destinations"]
assert "/home/anon/backup2/" in cfg["destinations"]
class TestUserInputValidation:
@classmethod
def setup_class(cls):
os.mkdir("./test/validate")
os.mkdir("./test/validate/dest")
create_file_with_content("./test/validate/test.txt")
@classmethod
def teardown_class(cls):
shutil.rmtree("./test/validate")
def test_user_input_validation_errs(self):
root = os.path.abspath("./test/validate")
bad_file = os.path.join(root, "nosrc1.txt")
bad_dir = os.path.join(root, "nosrc2/")
valid, errors = validate_user_input([bad_file], [bad_dir])
assert valid == False
assert len(errors) == 2
assert "The source file `{}` does not exist".format(bad_file) in errors
assert "The destination directory `{}` does not exist".format(bad_dir) in errors
def test_user_input_validation_no_errs(self):
root = os.path.abspath("./test/validate")
good_file = os.path.join(root, "test.txt")
good_dir = os.path.join(root, "dest/")
valid, errors = validate_user_input([good_file], [good_dir])
assert valid == True
assert len(errors) == 0
| 33.509009 | 88 | 0.612851 |
08db42f0cd3bad6708f2b0d07a4e487302d54c4d | 752 | py | Python | Chapter14/Scripts/cartoframes_dataedit.py | monocilindro/Mastering-Geospatial-Analysis-with-Python | 2cee571403aa0d96f6c2eb7400792286a81dc7e9 | [
"MIT"
] | 64 | 2018-05-04T16:54:59.000Z | 2022-03-22T11:26:21.000Z | Chapter14/Scripts/cartoframes_dataedit.py | monocilindro/Mastering-Geospatial-Analysis-with-Python | 2cee571403aa0d96f6c2eb7400792286a81dc7e9 | [
"MIT"
] | 1 | 2020-05-31T00:45:28.000Z | 2020-05-31T18:29:07.000Z | Chapter14/Scripts/cartoframes_dataedit.py | monocilindro/Mastering-Geospatial-Analysis-with-Python | 2cee571403aa0d96f6c2eb7400792286a81dc7e9 | [
"MIT"
] | 41 | 2018-05-10T21:31:44.000Z | 2022-03-23T11:12:33.000Z | import geopandas as gdp
import cartoframes
import pandas as pd
APIKEY = "1353407a098fef50ec1b6324c437d6d52617b890"
cc = cartoframes.CartoContext(base_url='https://lokiintelligent.carto.com/',
api_key=APIKEY)
from shapely.geometry import Point
from shapely.wkb import loads
arenas_df = cc.read('arenas_nba')
shp = r"C:\Data\US_States\US_States.shp"
states_df = gdp.read_file(shp)
data = []
for index, ref in arenas_df.iterrows():
check = 0
for index2, orig in states_df.iterrows():
if loads(ref['the_geom'], hex=True).intersects(orig['geometry']):
print(orig['STATE'], ref['team'])
data.append(orig['STATE'])
check = 1
if check == 0:
data.append(None)
arenas_df['state'] = data
cc.write(arenas_df,'arenas_nba', overwrite=True) | 30.08 | 76 | 0.732713 |
68d0a1f51c3ba1c1a4111b878bf6d1d3fd04f873 | 7,007 | py | Python | mopidy/local/search.py | rzr/mopidy | f6556ffafce34aebbc43ca266f69ac0068edc31d | [
"Apache-2.0"
] | 2 | 2015-07-09T09:36:26.000Z | 2019-10-05T04:13:19.000Z | mopidy/local/search.py | rzr/mopidy | f6556ffafce34aebbc43ca266f69ac0068edc31d | [
"Apache-2.0"
] | 1 | 2015-01-05T10:40:38.000Z | 2015-03-21T15:42:16.000Z | mopidy/local/search.py | rzr/mopidy | f6556ffafce34aebbc43ca266f69ac0068edc31d | [
"Apache-2.0"
] | 1 | 2019-10-05T04:13:10.000Z | 2019-10-05T04:13:10.000Z | from __future__ import unicode_literals
from mopidy.models import Album, SearchResult
def find_exact(tracks, query=None, uris=None):
# TODO Only return results within URI roots given by ``uris``
if query is None:
query = {}
_validate_query(query)
for (field, values) in query.iteritems():
if not hasattr(values, '__iter__'):
values = [values]
# FIXME this is bound to be slow for large libraries
for value in values:
if field == 'track_no':
q = _convert_to_int(value)
else:
q = value.strip()
uri_filter = lambda t: q == t.uri
track_name_filter = lambda t: q == t.name
album_filter = lambda t: q == getattr(t, 'album', Album()).name
artist_filter = lambda t: filter(
lambda a: q == a.name, t.artists)
albumartist_filter = lambda t: any([
q == a.name
for a in getattr(t.album, 'artists', [])])
composer_filter = lambda t: any([
q == a.name
for a in getattr(t, 'composers', [])])
performer_filter = lambda t: any([
q == a.name
for a in getattr(t, 'performers', [])])
track_no_filter = lambda t: q == t.track_no
genre_filter = lambda t: t.genre and q == t.genre
date_filter = lambda t: q == t.date
comment_filter = lambda t: q == t.comment
any_filter = lambda t: (
uri_filter(t) or
track_name_filter(t) or
album_filter(t) or
artist_filter(t) or
albumartist_filter(t) or
composer_filter(t) or
performer_filter(t) or
track_no_filter(t) or
genre_filter(t) or
date_filter(t) or
comment_filter(t))
if field == 'uri':
tracks = filter(uri_filter, tracks)
elif field == 'track_name':
tracks = filter(track_name_filter, tracks)
elif field == 'album':
tracks = filter(album_filter, tracks)
elif field == 'artist':
tracks = filter(artist_filter, tracks)
elif field == 'albumartist':
tracks = filter(albumartist_filter, tracks)
elif field == 'composer':
tracks = filter(composer_filter, tracks)
elif field == 'performer':
tracks = filter(performer_filter, tracks)
elif field == 'track_no':
tracks = filter(track_no_filter, tracks)
elif field == 'genre':
tracks = filter(genre_filter, tracks)
elif field == 'date':
tracks = filter(date_filter, tracks)
elif field == 'comment':
tracks = filter(comment_filter, tracks)
elif field == 'any':
tracks = filter(any_filter, tracks)
else:
raise LookupError('Invalid lookup field: %s' % field)
# TODO: add local:search:<query>
return SearchResult(uri='local:search', tracks=tracks)
def search(tracks, query=None, uris=None):
# TODO Only return results within URI roots given by ``uris``
if query is None:
query = {}
_validate_query(query)
for (field, values) in query.iteritems():
if not hasattr(values, '__iter__'):
values = [values]
# FIXME this is bound to be slow for large libraries
for value in values:
if field == 'track_no':
q = _convert_to_int(value)
else:
q = value.strip().lower()
uri_filter = lambda t: bool(t.uri and q in t.uri.lower())
track_name_filter = lambda t: bool(t.name and q in t.name.lower())
album_filter = lambda t: bool(
t.album and t.album.name and q in t.album.name.lower())
artist_filter = lambda t: bool(filter(
lambda a: bool(a.name and q in a.name.lower()), t.artists))
albumartist_filter = lambda t: any([
a.name and q in a.name.lower()
for a in getattr(t.album, 'artists', [])])
composer_filter = lambda t: any([
a.name and q in a.name.lower()
for a in getattr(t, 'composers', [])])
performer_filter = lambda t: any([
a.name and q in a.name.lower()
for a in getattr(t, 'performers', [])])
track_no_filter = lambda t: q == t.track_no
genre_filter = lambda t: bool(t.genre and q in t.genre.lower())
date_filter = lambda t: bool(t.date and t.date.startswith(q))
comment_filter = lambda t: bool(
t.comment and q in t.comment.lower())
any_filter = lambda t: (
uri_filter(t) or
track_name_filter(t) or
album_filter(t) or
artist_filter(t) or
albumartist_filter(t) or
composer_filter(t) or
performer_filter(t) or
track_no_filter(t) or
genre_filter(t) or
date_filter(t) or
comment_filter(t))
if field == 'uri':
tracks = filter(uri_filter, tracks)
elif field == 'track_name':
tracks = filter(track_name_filter, tracks)
elif field == 'album':
tracks = filter(album_filter, tracks)
elif field == 'artist':
tracks = filter(artist_filter, tracks)
elif field == 'albumartist':
tracks = filter(albumartist_filter, tracks)
elif field == 'composer':
tracks = filter(composer_filter, tracks)
elif field == 'performer':
tracks = filter(performer_filter, tracks)
elif field == 'track_no':
tracks = filter(track_no_filter, tracks)
elif field == 'genre':
tracks = filter(genre_filter, tracks)
elif field == 'date':
tracks = filter(date_filter, tracks)
elif field == 'comment':
tracks = filter(comment_filter, tracks)
elif field == 'any':
tracks = filter(any_filter, tracks)
else:
raise LookupError('Invalid lookup field: %s' % field)
# TODO: add local:search:<query>
return SearchResult(uri='local:search', tracks=tracks)
def _validate_query(query):
for (_, values) in query.iteritems():
if not values:
raise LookupError('Missing query')
for value in values:
if not value:
raise LookupError('Missing query')
def _convert_to_int(string):
try:
return int(string)
except ValueError:
return object()
| 38.712707 | 78 | 0.524761 |
c5e424b16a0ab89f825443d5065ddea96d29e3f1 | 5,483 | py | Python | examples/MultiServer/gross_te43.py | peterlharding/PDQ | b6ff8dd958dbae85b4402745539898b711760713 | [
"MIT"
] | 5 | 2015-08-12T16:22:11.000Z | 2019-06-05T05:57:35.000Z | examples/MultiServer/gross_te43.py | peterlharding/PDQ | b6ff8dd958dbae85b4402745539898b711760713 | [
"MIT"
] | null | null | null | examples/MultiServer/gross_te43.py | peterlharding/PDQ | b6ff8dd958dbae85b4402745539898b711760713 | [
"MIT"
] | 3 | 2018-03-12T12:25:28.000Z | 2019-06-05T05:57:44.000Z | #!/usr/bin/env python
###############################################################################
# Copyright (C) 1994 - 2007, Performance Dynamics Company #
# #
# This software is licensed as described in the file COPYING, which #
# you should have received as part of this distribution. The terms #
# are also available at http://www.perfdynamics.com/Tools/copyright.html. #
# #
# You may opt to use, copy, modify, merge, publish, distribute and/or sell #
# copies of the Software, and permit persons to whom the Software is #
# furnished to do so, under the terms of the COPYING file. #
# #
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY #
# KIND, either express or implied. #
###############################################################################
#
# gross_te43.py
#
# Exercise 4.3 on p. 182 of Gross & Harris
# Same parameters as Ex. 4.2 but with 2 workload classes
# Solve traffic eqns using NumPy, rather than PDQ-MSQ
#
# Created by NJG on Sun, Aug 26, 2007
# Updated by NJG on Mon, Aug 27, 2007
import sys
from numpy import *
from numpy.linalg import solve
def ErlangC(servers, erlangs):
if (erlangs >= servers):
print "Error: %4.2f Erlangs > %d servers" % (erlangs, servers)
sys.exit()
rho = erlangs / servers
erlangB = erlangs / (1 + erlangs)
for mm in range(2, servers+1):
eb = erlangB
erlangB = eb * erlangs / (mm + (eb * erlangs))
erlangC = erlangB / (1 - rho + (rho * erlangB))
return(erlangC)
# Traffic equations
"""
Let subscript 'a' denote type-1 customers and 'b' type-2 customers.
L = 35 ... external arrival rate per HOUR
The traffic equations can be obtained from the R matrices in G&H by
reading each column vertically in the order 1,2,3.
From the R_(1) matrix we get:
La1 = 0.00 La1 + 0.00 La2 + 0.00 La3 = 0.55 L
La2 = 1.00 La1 + 0.00 La2 + 0.00 La3
La3 = 0.00 La1 + 0.02 La2 + 0.00 La3
Rearrange terms to produce the Aa coefficient matrix below:
1.00 La1 + 0.00 La2 + 0.00 La3 = 0.55 L
1.00 La1 - 1.00 La2 + 0.00 La3 = 0.0
0.00 La1 + 0.02 La2 - 1.00 La3 = 0.0
Similary, from the R_(2) matrix we get:
La1 = 0.00 La1 + 0.00 La2 + 0.00 La3 = 0.45 L
La2 = 0.00 La1 + 0.00 La2 + 0.01 La3
La3 = 1.00 La1 + 0.02 La2 + 0.00 La3
which, on rearragement gives for Ab below:
1.00 La1 + 0.00 La2 + 0.00 La3 = 0.45 L
0.00 La1 - 1.00 La2 + 0.01 La3 = 0.0
1.00 La1 + 0.02 La2 - 1.00 La3 = 0.0
"""
# Matices of coeffs
Aa = array([[1.00, 0.00, 0.00],
[1.00, -1.00, 0.00],
[0.00, 0.02, -1.00]])
Ab = array([[1.00, 0.00, 0.00],
[0.00, -1.00, 0.01],
[1.00, 0.00, -1.00]])
# Fraction of total traffic L going to 'a' and 'b' streams
fLa = 0.55 * 35
fLb = 0.45 * 35
# RHS of the traffic eqns
Ba = array([fLa, 0.0, 0.0])
Bb = array([fLb, 0.0, 0.0])
# Solve the traffic eqns for the local arrivals
La = solve(Aa, Ba)
Lb = solve(Ab, Bb)
print "Arrival ratesA: %7.4f %7.4f %7.4f" % (La[0], La[1], La[2])
print "Arrival ratesB: %7.4f %7.4f %7.4f" % (Lb[0], Lb[1], Lb[2])
# Server capacity
m = array([1, 3, 7])
print "Server cap: %7d %7d %7d" % (m[0], m[1], m[2])
# Visit ratios (v_kc = L_kc / Lc)
va = array([La[0]/fLa, La[1]/fLa, La[2]/fLa])
vb = array([Lb[0]/fLb, Lb[1]/fLb, Lb[2]/fLb])
print "Visit ratioA: %7.4f %7.4f %7.4f" % (va[0], va[1], va[2])
print "Expected V_a: %7.4f %7.4f %7.4f" % (19.25/fLa, 19.25/fLa, 0.385/fLa)
print "Visit ratioB: %7.4f %7.4f %7.4f" % (vb[0], vb[1], vb[2])
print "Expected V_b: %7.4f %7.4f %7.4f" % (15.75/fLb, 0.1575/fLb, 15.75/fLb)
# Service demands in HOURS (same for both classes at each node)
S = array([0.5/60, 6.0/60, 20.0/60])
Da = array([va[0] * S[0], va[1] * S[1], va[2] * S[2]])
Db = array([vb[0] * S[0], vb[1] * S[1], vb[2] * S[2]])
# Total utilization per server
rho = array([La[0]*Da[0] + Lb[0]*Db[0], (La[1]*Da[1] + Lb[1]*Db[1])/m[1], (La[2]*Da[2] + Lb[2]*Db[2])/m[2]])
print "Utilizations: %7.4f %7.4f %7.4f" % (rho[0], rho[1], rho[2])
# Queue lengths
Q0 = m[0]*rho[0] + ErlangC(m[0], m[0]*rho[0]) * (rho[0]/(1 - rho[0]))
Q1 = m[1]*rho[1] + ErlangC(m[1], m[1]*rho[1]) * (rho[1]/(1 - rho[1]))
Q2 = m[2]*rho[2] + ErlangC(m[2], m[2]*rho[2]) * (rho[2]/(1 - rho[2]))
#print "Queue length1 : %7.4f (Expected: 0.412)" % (rho[0] / (1 - rho[0]))
#print "Queue length1a: %7.4f (Expected: 0.227)" % (La[0] * Da[0] / (1 - rho[0]))
print "Queue length1 : %7.4f (Expected: 0.412)" % (Q0)
print "Queue length2 : %7.4f (Expected: 2.705)" % (Q1)
print "Queue length3 : %7.4f (Expected: 6.777)" % (Q2)
print "Queue length1a: %7.4f (Expected: 0.227)" % (Q0 * (La[0]/(La[0]+Lb[0])))
print "Queue length2a: %7.4f (Expected: 2.683)" % (Q1 * (La[1]/(La[1]+Lb[1])))
print "Queue length3a: %7.4f (Expected: 0.162)" % (Q2 * (La[2]/(La[2]+Lb[2])))
print "Queue length1b: %7.4f (Expected: 0.185)" % (Q0 * (Lb[0]/(La[0]+Lb[0])))
print "Queue length2b: %7.4f (Expected: 0.022)" % (Q1 * (Lb[1]/(La[1]+Lb[1])))
print "Queue length3b: %7.4f (Expected: 6.616)" % (Q2 * (Lb[2]/(La[2]+Lb[2])))
| 38.342657 | 109 | 0.52526 |
97f395bac308df779ed8ffaf65c6a987838fc46b | 24,424 | py | Python | bin/basenji_motifs.py | polyaB/basenji | 53949cade0966c25fc946761d55092e1851b6908 | [
"Apache-2.0"
] | null | null | null | bin/basenji_motifs.py | polyaB/basenji | 53949cade0966c25fc946761d55092e1851b6908 | [
"Apache-2.0"
] | null | null | null | bin/basenji_motifs.py | polyaB/basenji | 53949cade0966c25fc946761d55092e1851b6908 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Copyright 2020 Calico LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
from __future__ import print_function
from optparse import OptionParser
import json
import multiprocessing
import os
import pdb
import subprocess
import time
import h5py
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from scipy.stats import spearmanr
import seaborn as sns
from sklearn import preprocessing
import tensorflow as tf
if tf.__version__[0] == '1':
tf.compat.v1.enable_eager_execution()
from basenji import dataset
from basenji import dna_io
from basenji import seqnn
'''
basenji_motifs.py
Collect statistics and make plots to explore the first convolution layer
of the given model using the given sequences.
'''
weblogo_opts = '-X NO -Y NO --errorbars NO --fineprint ""'
weblogo_opts += ' -C "#CB2026" A A'
weblogo_opts += ' -C "#34459C" C C'
weblogo_opts += ' -C "#FBB116" G G'
weblogo_opts += ' -C "#0C8040" T T'
################################################################################
# main
################################################################################
def main():
usage = 'usage: %prog [options] <params_file> <model_file> <data_dir>'
parser = OptionParser(usage)
parser.add_option('-a', dest='act_t',
default=0.5, type='float',
help='Activation threshold (as proportion of max) to consider for PWM [Default: %default]')
parser.add_option('-c', dest='post_conv',
default=False, action='store_true',
help='Embed first layer post-convolution rather than batch norm [Default: %default]')
parser.add_option('-d', dest='plot_density',
default=False, action='store_true',
help='Plot filter activation density [Default: %default]')
parser.add_option('--heat', dest='plot_heats',
default=False, action='store_true',
help='Plot heat maps describing filter activations in the test sequences [Default: %default]')
parser.add_option('-l', dest='seq_length_crop',
default=None, type='int',
help='Crop sequences to shorter length [Default: %default]')
parser.add_option('-o', dest='out_dir',
default='basenji_motifs')
parser.add_option('-m', dest='meme_db',
default='%s/cisbp/Homo_sapiens.meme' % os.environ['HG38'],
help='MEME database used to annotate motifs')
parser.add_option('-p', dest='parallel_threads',
default=1, type='int',
help='Generate weblogos in parallal threads [Default: %default]')
parser.add_option('-s', dest='sample',
default=None, type='int',
help='Sample sequences from the test set [Default:%default]')
parser.add_option('-t', dest='trim_filters',
default=False, action='store_true',
help='Trim uninformative positions off the filter ends [Default: %default]')
parser.add_option('--tfr', dest='tfr_pattern',
default=None,
help='TFR pattern string appended to data_dir/tfrecords for subsetting [Default: %default]')
parser.add_option('-v', dest='high_var_pct',
default=1.0, type='float',
help='Highly variable site proportion to take [Default: %default]')
(options, args) = parser.parse_args()
if len(args) != 3:
parser.error('Must provide Basenji params and model files and data directory')
else:
params_file = args[0]
model_file = args[1]
data_dir = args[2]
if not os.path.isdir(options.out_dir):
os.mkdir(options.out_dir)
#######################################################
# inputs
# read model parameters
with open(params_file) as params_open:
params = json.load(params_open)
params_model = params['model']
params_train = params['train']
if options.seq_length_crop is not None:
params_model['seq_length'] = options.seq_length_crop
# construct data
eval_data = dataset.SeqDataset(data_dir,
split_label=options.split_label,
batch_size=params_train['batch_size'],
mode=tf.estimator.ModeKeys.EVAL,
tfr_pattern=options.tfr_pattern)
# obtain sequences
eval_seqs_1hot = eval_data.numpy(return_inputs=True, return_outputs=False)
eval_seqs_dna = dna_io.hot1_dna(eval_seqs_1hot)
del eval_seqs_1hot
#################################################################
# model
# initialize model
seqnn_model = seqnn.SeqNN(params_model)
seqnn_model.restore(model_file)
# first layer embedding
seqnn_model.build_embed(0, batch_norm=(~options.post_conv))
_, preds_length, preds_depth = seqnn_model.embed.output.shape
# get weights
filter_weights = seqnn_model.get_conv_weights()
print(filter_weights.shape)
num_filters, _, filter_size = filter_weights.shape
# compute filter activations
filter_outs = seqnn_model.predict(eval_data)
print(filter_outs.shape)
#################################################################
# individual filter plots
# save information contents
filters_ic = []
meme_out = meme_intro('%s/filters_meme.txt' % options.out_dir, eval_seqs_dna)
# plot weblogo of high scoring outputs (in parallel)
if options.parallel_threads > 1:
pfl_args = []
for f in range(num_filters):
pfl_args.append((filter_outs[:, :, f], filter_size,
eval_seqs_dna, '%s/filter%d_logo'%(options.out_dir,f),
options.act_t))
with multiprocessing.get_context('spawn').Pool(options.parallel_threads) as pool:
pool.starmap(plot_filter_logo, pfl_args)
for f in range(num_filters):
print('Filter %d' % f)
# plot filter parameters as a heatmap
plot_filter_heat(filter_weights[f, :, :],
'%s/filter%d_heat.pdf' % (options.out_dir, f))
if options.parallel_threads == 1:
plot_filter_logo(filter_outs[:, :, f], filter_size,
eval_seqs_dna, '%s/filter%d_logo'%(options.out_dir,f),
options.act_t)
# write possum motif file
# filter_possum(filter_weights[f, :, :], 'filter%d' % f,
# '%s/filter%d_possum.txt' % (options.out_dir,
# f), options.trim_filters)
# make a PWM for the filter
filter_pwm, nsites = make_filter_pwm('%s/filter%d_logo.fa' %
(options.out_dir, f))
if nsites < 10:
# no information
filters_ic.append(0)
else:
# compute and save information content
filters_ic.append(info_content(filter_pwm))
# add to the meme motif file
meme_add(meme_out, f, filter_pwm, nsites, options.trim_filters)
meme_out.close()
#################################################################
# annotate filters
#################################################################
# run tomtom
subprocess.call(
'tomtom -dist pearson -thresh 0.1 -oc %s/tomtom %s/filters_meme.txt %s' %
(options.out_dir, options.out_dir, options.meme_db),
shell=True)
# read in annotations
filter_names = name_filters(
num_filters, '%s/tomtom/tomtom.tsv' % options.out_dir, options.meme_db)
#################################################################
# print a table of information
#################################################################
table_out = open('%s/table.txt' % options.out_dir, 'w')
# print header for later panda reading
header_cols = ('', 'consensus', 'annotation', 'ic', 'mean', 'std')
print('%3s %19s %10s %5s %6s %6s' % header_cols, file=table_out)
for f in range(num_filters):
# collapse to a consensus motif
consensus = filter_motif(filter_weights[f, :, :])
# grab annotation
annotation = '.'
name_pieces = filter_names[f].split('_')
if len(name_pieces) > 1:
annotation = name_pieces[1]
f_scores = np.ravel(filter_outs[:, :, f])
fmean, fstd = f_scores.mean(), f_scores.std()
if options.plot_density:
# plot density of filter output scores
plot_score_density(f_scores,
'%s/filter%d_dens.pdf' % (options.out_dir, f))
row_cols = (f, consensus, annotation, filters_ic[f], fmean, fstd)
print('%-3d %19s %10s %5.2f %6.4f %6.4f' % row_cols, file=table_out)
table_out.close()
#################################################################
# global filter plots
#################################################################
# these methods make less sense for longer sequences;
# I should fragment the sequences first.
if options.plot_heats:
# plot filter-sequence heatmap
plot_filter_seq_heat(filter_outs, '%s/filter_seqs.pdf' % options.out_dir)
# plot filter-segment heatmap
plot_filter_seg_heat(filter_outs, '%s/filter_segs.pdf' % options.out_dir)
plot_filter_seg_heat(
filter_outs, '%s/filter_segs_raw.pdf' % options.out_dir, whiten=False)
# plot filter-target correlation heatmap
plot_target_corr(filter_outs, seq_targets, filter_names, target_names,
'%s/filter_target_cors_mean.pdf' % options.out_dir, 'mean')
plot_target_corr(filter_outs, seq_targets, filter_names, target_names,
'%s/filter_target_cors_max.pdf' % options.out_dir, 'max')
def get_motif_proteins(meme_db_file):
""" Hash motif_id's to protein names using the MEME DB file """
motif_protein = {}
for line in open(meme_db_file):
a = line.split()
if len(a) > 0 and a[0] == 'MOTIF':
if a[2][0] == '(':
motif_protein[a[1]] = a[2][1:a[2].find(')')]
else:
motif_protein[a[1]] = a[2]
return motif_protein
def info_content(pwm, transpose=False, bg_gc=0.415):
""" Compute PWM information content.
In the original analysis, I used a bg_gc=0.5. For any
future analysis, I ought to switch to the true hg19
value of 0.415.
"""
pseudoc = 1e-9
if transpose:
pwm = np.transpose(pwm)
bg_pwm = [1 - bg_gc, bg_gc, bg_gc, 1 - bg_gc]
ic = 0
for i in range(pwm.shape[0]):
for j in range(4):
# ic += 0.5 + pwm[i][j]*np.log2(pseudoc+pwm[i][j])
ic += -bg_pwm[j] * np.log2(
bg_pwm[j]) + pwm[i][j] * np.log2(pseudoc + pwm[i][j])
return ic
def make_filter_pwm(filter_fasta):
""" Make a PWM for this filter from its top hits """
nts = {'A': 0, 'C': 1, 'G': 2, 'T': 3}
pwm_counts = []
nsites = 4 # pseudocounts
for line in open(filter_fasta):
if line[0] != '>':
seq = line.rstrip()
nsites += 1
if len(pwm_counts) == 0:
# initialize with the length
for i in range(len(seq)):
pwm_counts.append(np.array([1.0] * 4))
# count
for i in range(len(seq)):
try:
pwm_counts[i][nts[seq[i]]] += 1
except KeyError:
pwm_counts[i] += np.array([0.25] * 4)
# normalize
pwm_freqs = []
for i in range(len(pwm_counts)):
pwm_freqs.append([pwm_counts[i][j] / float(nsites) for j in range(4)])
return np.array(pwm_freqs), nsites - 4
def meme_add(meme_out, f, filter_pwm, nsites, trim_filters=False):
""" Print a filter to the growing MEME file
Attrs:
meme_out : open file
f (int) : filter index #
filter_pwm (array) : filter PWM array
nsites (int) : number of filter sites
"""
if not trim_filters:
ic_start = 0
ic_end = filter_pwm.shape[0] - 1
else:
ic_t = 0.2
# trim PWM of uninformative prefix
ic_start = 0
while ic_start < filter_pwm.shape[0] and info_content(
filter_pwm[ic_start:ic_start + 1]) < ic_t:
ic_start += 1
# trim PWM of uninformative suffix
ic_end = filter_pwm.shape[0] - 1
while ic_end >= 0 and info_content(filter_pwm[ic_end:ic_end + 1]) < ic_t:
ic_end -= 1
if ic_start < ic_end:
print('MOTIF filter%d' % f, file=meme_out)
print(
'letter-probability matrix: alength= 4 w= %d nsites= %d' %
(ic_end - ic_start + 1, nsites),
file=meme_out)
for i in range(ic_start, ic_end + 1):
print('%.4f %.4f %.4f %.4f' % tuple(filter_pwm[i]), file=meme_out)
print('', file=meme_out)
def meme_intro(meme_file, seqs):
""" Open MEME motif format file and print intro
Attrs:
meme_file (str) : filename
seqs [str] : list of strings for obtaining background freqs
Returns:
mem_out : open MEME file
"""
nts = {'A': 0, 'C': 1, 'G': 2, 'T': 3}
# count
nt_counts = [1] * 4
for i in range(len(seqs)):
for nt in seqs[i]:
try:
nt_counts[nts[nt]] += 1
except KeyError:
pass
# normalize
nt_sum = float(sum(nt_counts))
nt_freqs = [nt_counts[i] / nt_sum for i in range(4)]
# open file for writing
meme_out = open(meme_file, 'w')
# print intro material
print('MEME version 4', file=meme_out)
print('', file=meme_out)
print('ALPHABET= ACGT', file=meme_out)
print('', file=meme_out)
print('Background letter frequencies:', file=meme_out)
print('A %.4f C %.4f G %.4f T %.4f' % tuple(nt_freqs), file=meme_out)
print('', file=meme_out)
return meme_out
def name_filters(num_filters, tomtom_file, meme_db_file):
""" Name the filters using Tomtom matches.
Attrs:
num_filters (int) : total number of filters
tomtom_file (str) : filename of Tomtom output table.
meme_db_file (str) : filename of MEME db
Returns:
filter_names [str] :
"""
# name by number
filter_names = ['f%d' % fi for fi in range(num_filters)]
# name by protein
if tomtom_file is not None and meme_db_file is not None:
motif_protein = get_motif_proteins(meme_db_file)
# hash motifs and q-value's by filter
filter_motifs = {}
tt_in = open(tomtom_file)
tt_in.readline()
for line in tt_in:
a = line.split()
if line[0] != '#' and len(a) > 0:
fi = int(a[0][6:])
motif_id = a[1]
qval = float(a[5])
filter_motifs.setdefault(fi, []).append((qval, motif_id))
tt_in.close()
# assign filter's best match
for fi in filter_motifs:
top_motif = sorted(filter_motifs[fi])[0][1]
filter_names[fi] += '_%s' % motif_protein[top_motif]
return np.array(filter_names)
################################################################################
# plot_target_corr
#
# Plot a clustered heatmap of correlations between filter activations and
# targets.
#
# Input
# filter_outs:
# filter_names:
# target_names:
# out_pdf:
################################################################################
def plot_target_corr(filter_outs, seq_targets, filter_names, target_names, out_pdf, seq_op='mean'):
num_seqs = filter_outs.shape[0]
num_targets = len(target_names)
if seq_op == 'mean':
filter_outs_seq = filter_outs.mean(axis=2)
else:
filter_outs_seq = filter_outs.max(axis=2)
# std is sequence by filter.
filter_seqs_std = filter_outs_seq.std(axis=0)
filter_outs_seq = filter_outs_seq[:, filter_seqs_std > 0]
filter_names_live = filter_names[filter_seqs_std > 0]
filter_target_cors = np.zeros((len(filter_names_live), num_targets))
for fi in range(len(filter_names_live)):
for ti in range(num_targets):
cor, p = spearmanr(filter_outs_seq[:, fi], seq_targets[:num_seqs, ti])
filter_target_cors[fi, ti] = cor
cor_df = pd.DataFrame(
filter_target_cors, index=filter_names_live, columns=target_names)
sns.set(font_scale=0.3)
plt.figure()
sns.clustermap(cor_df, cmap='BrBG', center=0, figsize=(8, 10))
plt.savefig(out_pdf)
plt.close()
################################################################################
# plot_filter_seq_heat
#
# Plot a clustered heatmap of filter activations in
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def plot_filter_seq_heat(filter_outs, out_pdf, whiten=True, drop_dead=True):
# compute filter output means per sequence
filter_seqs = filter_outs.mean(axis=2)
# whiten
if whiten:
filter_seqs = preprocessing.scale(filter_seqs)
# transpose
filter_seqs = np.transpose(filter_seqs)
if drop_dead:
filter_stds = filter_seqs.std(axis=1)
filter_seqs = filter_seqs[filter_stds > 0]
# downsample sequences
seqs_i = np.random.randint(0, filter_seqs.shape[1], 500)
hmin = np.percentile(filter_seqs[:, seqs_i], 0.1)
hmax = np.percentile(filter_seqs[:, seqs_i], 99.9)
sns.set(font_scale=0.3)
plt.figure()
sns.clustermap(
filter_seqs[:, seqs_i],
row_cluster=True,
col_cluster=True,
linewidths=0,
xticklabels=False,
vmin=hmin,
vmax=hmax)
plt.savefig(out_pdf)
#out_png = out_pdf[:-2] + 'ng'
#plt.savefig(out_png, dpi=300)
plt.close()
################################################################################
# plot_filter_seq_heat
#
# Plot a clustered heatmap of filter activations in sequence segments.
#
# Mean doesn't work well for the smaller segments for some reason, but taking
# the max looks OK. Still, similar motifs don't cluster quite as well as you
# might expect.
#
# Input
# filter_outs
################################################################################
def plot_filter_seg_heat(filter_outs, out_pdf, whiten=True, drop_dead=True):
b = filter_outs.shape[0]
f = filter_outs.shape[1]
l = filter_outs.shape[2]
s = 5
while l / float(s) - (l / s) > 0:
s += 1
print('%d segments of length %d' % (s, l / s))
# split into multiple segments
filter_outs_seg = np.reshape(filter_outs, (b, f, s, l / s))
# mean across the segments
filter_outs_mean = filter_outs_seg.max(axis=3)
# break each segment into a new instance
filter_seqs = np.reshape(np.swapaxes(filter_outs_mean, 2, 1), (s * b, f))
# whiten
if whiten:
filter_seqs = preprocessing.scale(filter_seqs)
# transpose
filter_seqs = np.transpose(filter_seqs)
if drop_dead:
filter_stds = filter_seqs.std(axis=1)
filter_seqs = filter_seqs[filter_stds > 0]
# downsample sequences
seqs_i = np.random.randint(0, filter_seqs.shape[1], 500)
hmin = np.percentile(filter_seqs[:, seqs_i], 0.1)
hmax = np.percentile(filter_seqs[:, seqs_i], 99.9)
sns.set(font_scale=0.3)
if whiten:
dist = 'euclidean'
else:
dist = 'cosine'
plt.figure()
sns.clustermap(
filter_seqs[:, seqs_i],
metric=dist,
row_cluster=True,
col_cluster=True,
linewidths=0,
xticklabels=False,
vmin=hmin,
vmax=hmax)
plt.savefig(out_pdf)
#out_png = out_pdf[:-2] + 'ng'
#plt.savefig(out_png, dpi=300)
plt.close()
################################################################################
# filter_motif
#
# Collapse the filter parameter matrix to a single DNA motif.
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def filter_motif(param_matrix):
nts = 'ACGT'
motif_list = []
for v in range(param_matrix.shape[1]):
max_n = 0
for n in range(1, 4):
if param_matrix[n, v] > param_matrix[max_n, v]:
max_n = n
if param_matrix[max_n, v] > 0:
motif_list.append(nts[max_n])
else:
motif_list.append('N')
return ''.join(motif_list)
################################################################################
# filter_possum
#
# Write a Possum-style motif
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def filter_possum(param_matrix, motif_id, possum_file, trim_filters=False, mult=200):
# possible trim
trim_start = 0
trim_end = param_matrix.shape[1] - 1
trim_t = 0.3
if trim_filters:
# trim PWM of uninformative prefix
while trim_start < param_matrix.shape[1] and np.max(
param_matrix[:, trim_start]) - np.min(
param_matrix[:, trim_start]) < trim_t:
trim_start += 1
# trim PWM of uninformative suffix
while trim_end >= 0 and np.max(param_matrix[:, trim_end]) - np.min(
param_matrix[:, trim_end]) < trim_t:
trim_end -= 1
if trim_start < trim_end:
possum_out = open(possum_file, 'w')
print('BEGIN GROUP', file=possum_out)
print('BEGIN FLOAT', file=possum_out)
print('ID %s' % motif_id, file=possum_out)
print('AP DNA', file=possum_out)
print('LE %d' % (trim_end + 1 - trim_start), file=possum_out)
for ci in range(trim_start, trim_end + 1):
print(
'MA %s' % ' '.join(['%.2f' % (mult * n)
for n in param_matrix[:, ci]]),
file=possum_out)
print('END', file=possum_out)
print('END', file=possum_out)
possum_out.close()
################################################################################
# plot_filter_heat
#
# Plot a heatmap of the filter's parameters.
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def plot_filter_heat(param_matrix, out_pdf):
param_range = abs(param_matrix).max()
sns.set(font_scale=2)
plt.figure(figsize=(param_matrix.shape[1], 4))
sns.heatmap(
param_matrix,
cmap='PRGn',
linewidths=0.2,
vmin=-param_range,
vmax=param_range)
ax = plt.gca()
ax.set_xticklabels(range(1, param_matrix.shape[1] + 1))
ax.set_yticklabels('TGCA', rotation='horizontal') # , size=10)
plt.savefig(out_pdf)
plt.close()
################################################################################
# plot_filter_logo
#
# Plot a weblogo of the filter's occurrences
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def plot_filter_logo(filter_outs, filter_size, seqs, out_prefix, maxpct_t=None, raw_t=0):
print(out_prefix)
if maxpct_t is not None:
all_outs = np.ravel(filter_outs)
all_outs_mean = all_outs.mean()
all_outs_norm = all_outs - all_outs_mean
raw_t = maxpct_t * all_outs_norm.max() + all_outs_mean
left_pad = (filter_size - 1) // 2
right_pad = filter_size - left_pad
# print fasta file of positive outputs
filter_fasta_out = open('%s.fa' % out_prefix, 'w')
filter_count = 0
for i in range(filter_outs.shape[0]):
for j in np.where(filter_outs[i] > raw_t)[0]:
# construct kmer
kmer = ''
# determine boundaries, considering padding
fstart = j - left_pad
fend = fstart + filter_size
# if it starts in left_pad
if fstart < 0:
kmer += 'N' * (-fstart)
fstart = 0
# add primary sequence
kmer += seqs[i][fstart:fend]
# if it ends in right_pad
if fend > len(seqs[i]):
kmer += 'N' * (fend - len(seqs[i]))
# output
print('>%d_%d' % (i, j), file=filter_fasta_out)
print(kmer, file=filter_fasta_out)
filter_count += 1
filter_fasta_out.close()
# make weblogo
if filter_count > 0:
weblogo_cmd = 'weblogo %s < %s.fa > %s.eps' % (weblogo_opts, out_prefix,
out_prefix)
subprocess.call(weblogo_cmd, shell=True)
################################################################################
# plot_score_density
#
# Plot the score density and print to the stats table.
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def plot_score_density(f_scores, out_pdf):
sns.set(font_scale=1.3)
plt.figure()
sns.distplot(f_scores, kde=False)
plt.xlabel('ReLU output')
plt.savefig(out_pdf)
plt.close()
################################################################################
# __main__
################################################################################
if __name__ == '__main__':
main()
# pdb.runcall(main)
| 30.722013 | 105 | 0.598796 |
cc003bdaa9004be119e24938c3f5502de2563b0f | 347 | py | Python | devscripts/import_all.py | RealNethical/python3-android | 203247f3adaee49fb867c2dbe07704443f043e31 | [
"0BSD"
] | 75 | 2015-11-26T13:20:04.000Z | 2022-03-31T04:02:19.000Z | devscripts/import_all.py | HACKKIMSE/python3-android | a3f6c420ecf91e330a78506edd30a5e37217d1a0 | [
"WTFPL"
] | 48 | 2017-05-18T17:18:52.000Z | 2022-02-25T12:36:01.000Z | devscripts/import_all.py | HACKKIMSE/python3-android | a3f6c420ecf91e330a78506edd30a5e37217d1a0 | [
"WTFPL"
] | 29 | 2016-08-26T09:36:49.000Z | 2022-03-01T02:14:12.000Z | import sys
import os.path
mod_path = os.path.join(
sys.prefix,
'lib/python%d.%d/lib-dynload' % (sys.version_info[0], sys.version_info[1]))
for mod_filename in os.listdir(mod_path):
mod_name = mod_filename.split('.')[0]
try:
mod = __import__(mod_name)
except ImportError as e:
print(mod_name)
print(e)
| 23.133333 | 79 | 0.645533 |
318fe1f115b34b5bf50fc46bc7dd5d7cbdaf276b | 2,645 | py | Python | tests/unit/docs/test_client.py | ScriptSmith/boto3 | 3dfa3d7d5eeb9649243a4e2a43c1e78501872ae7 | [
"Apache-2.0"
] | 1 | 2019-04-19T07:11:42.000Z | 2019-04-19T07:11:42.000Z | tests/unit/docs/test_client.py | ScriptSmith/boto3 | 3dfa3d7d5eeb9649243a4e2a43c1e78501872ae7 | [
"Apache-2.0"
] | null | null | null | tests/unit/docs/test_client.py | ScriptSmith/boto3 | 3dfa3d7d5eeb9649243a4e2a43c1e78501872ae7 | [
"Apache-2.0"
] | 2 | 2020-04-14T17:26:55.000Z | 2020-06-19T18:40:47.000Z | # Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests.unit.docs import BaseDocsTest
from boto3.docs.client import Boto3ClientDocumenter
class TestBoto3ClientDocumenter(BaseDocsTest):
def setUp(self):
super(TestBoto3ClientDocumenter, self).setUp()
self.client_documenter = Boto3ClientDocumenter(self.client)
def test_document_client(self):
self.client_documenter.document_client(self.doc_structure)
self.assert_contains_lines_in_order([
'======',
'Client',
'======',
'.. py:class:: MyService.Client',
' A low-level client representing AWS MyService::',
' import boto3',
' client = boto3.client(\'myservice\')',
' These are the available methods:',
' * :py:meth:`~MyService.Client.can_paginate`',
' * :py:meth:`~MyService.Client.get_paginator`',
' * :py:meth:`~MyService.Client.get_waiter`',
' * :py:meth:`~MyService.Client.sample_operation`',
' .. py:method:: can_paginate(operation_name)',
' .. py:method:: get_paginator(operation_name)',
' .. py:method:: get_waiter(waiter_name)',
' .. py:method:: sample_operation(**kwargs)',
' **Request Syntax**',
' ::',
' response = client.sample_operation(',
' Foo=\'string\'',
' Bar=\'string\'',
' )',
' :type Foo: string',
' :param Foo: Documents Foo',
' :type Bar: string',
' :param Bar: Documents Bar',
' :rtype: dict',
' :returns:',
' **Response Syntax**',
' ::',
' {',
' \'Foo\': \'string\'',
' \'Bar\': \'string\'',
' }',
' **Response Structure**',
' - *(dict) --*',
' - **Foo** *(string) --*',
' - **Bar** *(string) --*'
])
| 39.477612 | 73 | 0.519093 |
14e969147889319f74087b8fd552750a7ee53edd | 1,405 | py | Python | setup.py | zoidbergwill/RegistroBR | c867d7e17692471a2c3ec38fbfbd449112d63d03 | [
"BSD-2-Clause"
] | 1 | 2017-04-30T06:33:23.000Z | 2017-04-30T06:33:23.000Z | setup.py | zoidbergwill/RegistroBR | c867d7e17692471a2c3ec38fbfbd449112d63d03 | [
"BSD-2-Clause"
] | null | null | null | setup.py | zoidbergwill/RegistroBR | c867d7e17692471a2c3ec38fbfbd449112d63d03 | [
"BSD-2-Clause"
] | null | null | null | import os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='RegistroBR',
version='0.5',
author='Code originally by RegistroBR. Modified by William Stewart',
author_email='zoidbergwill@gmail.com',
description=('A custom library for contacting the RegistroBR API'),
license = 'Custom Registro.br License',
keywords = 'registro.br .com.br domains',
url = 'https://github.com/zoidbergwill/RegistroBR',
packages=['RegistroBR'],
long_description=read('README.md'),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Customer Service',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
| 36.025641 | 79 | 0.654093 |
6466fa7b9551be75d7514d499bd348b08ed0e6b3 | 1,272 | py | Python | websockify/setup.py | rgschmitz1/BioDepot-workflow-builder | f74d904eeaf91ec52ec9b703d9fb38e9064e5a66 | [
"MIT"
] | 54 | 2017-01-08T17:21:49.000Z | 2021-11-02T08:46:07.000Z | websockify/setup.py | Synthia-3/BioDepot-workflow-builder | 4ee93abe2d79465755e82a145af3b6a6e1e79fd4 | [
"MIT"
] | 22 | 2017-03-28T06:03:14.000Z | 2021-07-28T05:43:55.000Z | websockify/setup.py | Synthia-3/BioDepot-workflow-builder | 4ee93abe2d79465755e82a145af3b6a6e1e79fd4 | [
"MIT"
] | 21 | 2017-01-26T21:12:09.000Z | 2022-01-31T21:34:59.000Z | from setuptools import setup, find_packages
version = '0.9.0'
name = 'websockify'
long_description = open("README.md").read() + "\n" + \
open("CHANGES.txt").read() + "\n"
setup(name=name,
version=version,
description="Websockify.",
long_description=long_description,
long_description_content_type="text/markdown",
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.4"
"Programming Language :: Python :: 3.5"
"Programming Language :: Python :: 3.6"
"Programming Language :: Python :: 3.7"
"Programming Language :: Python :: 3.8"
"Programming Language :: Python :: 3.9"
],
keywords='noVNC websockify',
license='LGPLv3',
url="https://github.com/novnc/websockify",
author="Joel Martin",
author_email="github@martintribe.org",
packages=['websockify'],
include_package_data=True,
install_requires=['numpy'],
zip_safe=False,
entry_points={
'console_scripts': [
'websockify = websockify.websocketproxy:websockify_init',
]
},
)
| 31.8 | 69 | 0.595126 |
f231c23980e358e7ad31e77472a7b65528b51fac | 14,297 | py | Python | third_party/maya/lib/usdMaya/testenv/testUsdExportRfMLight.py | dongyifan/USD | 61c6685d05d777a973539ba2a28f6f8deb5c042f | [
"Unlicense"
] | null | null | null | third_party/maya/lib/usdMaya/testenv/testUsdExportRfMLight.py | dongyifan/USD | 61c6685d05d777a973539ba2a28f6f8deb5c042f | [
"Unlicense"
] | null | null | null | third_party/maya/lib/usdMaya/testenv/testUsdExportRfMLight.py | dongyifan/USD | 61c6685d05d777a973539ba2a28f6f8deb5c042f | [
"Unlicense"
] | null | null | null | #!/pxrpythonsubst
#
# Copyright 2017 Pixar
#
# Licensed under the Apache License, Version 2.0 (the "Apache License")
# with the following modification; you may not use this file except in
# compliance with the Apache License and the following modification to it:
# Section 6. Trademarks. is deleted and replaced with:
#
# 6. Trademarks. This License does not grant permission to use the trade
# names, trademarks, service marks, or product names of the Licensor
# and its affiliates, except as required to comply with Section 4(c) of
# the License and to reproduce the content of the NOTICE file.
#
# You may obtain a copy of the Apache License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the Apache License with the above modification is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the Apache License for the specific
# language governing permissions and limitations under the Apache License.
#
import os
import unittest
from pxr import Gf
from pxr import Usd
from pxr import UsdGeom
from pxr import UsdLux
from pxr import UsdRi
from maya import cmds
from maya import standalone
class testUsdExportRfMLight(unittest.TestCase):
START_TIMECODE = 1.0
END_TIMECODE = 5.0
@classmethod
def setUpClass(cls):
standalone.initialize('usd')
mayaFile = os.path.abspath('RfMLightsTest.ma')
cmds.file(mayaFile, open=True, force=True)
# Export to USD.
usdFilePath = os.path.abspath('RfMLightsTest.usda')
cmds.loadPlugin('pxrUsd')
cmds.usdExport(mergeTransformAndShape=True, file=usdFilePath,
shadingMode='pxrRis',
frameRange=(cls.START_TIMECODE, cls.END_TIMECODE))
cls._stage = Usd.Stage.Open(usdFilePath)
@classmethod
def tearDownClass(cls):
standalone.uninitialize()
def testStageOpens(self):
"""
Tests that the USD stage was opened successfully.
"""
self.assertTrue(self._stage)
self.assertEqual(self._stage.GetStartTimeCode(), self.START_TIMECODE)
self.assertEqual(self._stage.GetEndTimeCode(), self.END_TIMECODE)
def _ValidateUsdLuxLight(self, lightTypeName):
primPathFormat = '/RfMLightsTest/Lights/%s'
lightPrimPath = primPathFormat % lightTypeName
lightPrim = self._stage.GetPrimAtPath(lightPrimPath)
self.assertTrue(lightPrim)
testNumber = None
if lightTypeName == 'DiskLight':
self.assertTrue(lightPrim.IsA(UsdLux.DiskLight))
testNumber = 1
elif lightTypeName == 'DistantLight':
self.assertTrue(lightPrim.IsA(UsdLux.DistantLight))
testNumber = 2
elif lightTypeName == 'DomeLight':
self.assertTrue(lightPrim.IsA(UsdLux.DomeLight))
testNumber = 3
elif lightTypeName == 'MeshLight':
self.assertTrue(lightPrim.IsA(UsdLux.GeometryLight))
testNumber = 4
elif lightTypeName == 'RectLight':
self.assertTrue(lightPrim.IsA(UsdLux.RectLight))
testNumber = 5
elif lightTypeName == 'SphereLight':
self.assertTrue(lightPrim.IsA(UsdLux.SphereLight))
testNumber = 6
elif lightTypeName == 'AovLight':
self.assertTrue(lightPrim.IsA(UsdRi.PxrAovLight))
testNumber = 7
elif lightTypeName == 'EnvDayLight':
self.assertTrue(lightPrim.IsA(UsdRi.PxrEnvDayLight))
testNumber = 8
else:
raise NotImplementedError('Invalid light type %s' % lightTypeName)
lightSchema = UsdLux.Light(lightPrim)
self.assertTrue(lightSchema)
if lightTypeName == 'AovLight':
# PxrAovLight doesn't have any of the below attributes.
return
expectedIntensity = 1.0 + (testNumber * 0.1)
self.assertTrue(Gf.IsClose(lightSchema.GetIntensityAttr().Get(),
expectedIntensity, 1e-6))
expectedExposure = 0.1 * testNumber
self.assertTrue(Gf.IsClose(lightSchema.GetExposureAttr().Get(),
expectedExposure, 1e-6))
expectedDiffuse = 1.0 + (testNumber * 0.1)
self.assertTrue(Gf.IsClose(lightSchema.GetDiffuseAttr().Get(),
expectedDiffuse, 1e-6))
expectedSpecular = 1.0 + (testNumber * 0.1)
self.assertTrue(Gf.IsClose(lightSchema.GetSpecularAttr().Get(),
expectedSpecular, 1e-6))
if lightTypeName == 'EnvDayLight':
# PxrEnvDayLight doesn't have any of the below attributes.
return
if lightTypeName == 'DomeLight':
# PxrDomeLight has no normalize attribute
self.assertFalse(
lightSchema.GetNormalizeAttr().HasAuthoredValue())
else:
expectedNormalize = True
self.assertEqual(lightSchema.GetNormalizeAttr().Get(),
expectedNormalize)
expectedColor = Gf.Vec3f(0.1 * testNumber)
self.assertTrue(Gf.IsClose(lightSchema.GetColorAttr().Get(),
expectedColor, 1e-6))
expectedEnableTemperature = True
self.assertEqual(lightSchema.GetEnableColorTemperatureAttr().Get(),
expectedEnableTemperature)
expectedTemperature = 6500.0 + testNumber
self.assertTrue(Gf.IsClose(lightSchema.GetColorTemperatureAttr().Get(),
expectedTemperature, 1e-6))
def _ValidateDiskLightXformAnimation(self):
lightPrimPath = '/RfMLightsTest/Lights/DiskLight'
lightPrim = self._stage.GetPrimAtPath(lightPrimPath)
self.assertTrue(lightPrim)
diskLight = UsdLux.DiskLight(lightPrim)
self.assertTrue(diskLight)
xformOps = diskLight.GetOrderedXformOps()
self.assertEqual(len(xformOps), 1)
translateOp = xformOps[0]
self.assertEqual(translateOp.GetOpName(), 'xformOp:translate')
self.assertEqual(translateOp.GetOpType(), UsdGeom.XformOp.TypeTranslate)
for frame in xrange(int(self.START_TIMECODE), int(self.END_TIMECODE + 1.0)):
expectedTranslation = Gf.Vec3d(1.0, float(frame), 1.0)
self.assertTrue(
Gf.IsClose(translateOp.Get(frame), expectedTranslation, 1e-6))
def _ValidateUsdLuxDistantLightAngle(self):
lightPrimPath = '/RfMLightsTest/Lights/DistantLight'
lightPrim = self._stage.GetPrimAtPath(lightPrimPath)
self.assertTrue(lightPrim)
distantLight = UsdLux.DistantLight(lightPrim)
self.assertTrue(distantLight)
expectedAngle = 0.73
self.assertTrue(Gf.IsClose(distantLight.GetAngleAttr().Get(),
expectedAngle, 1e-6))
def _ValidateUsdLuxRectLightTextureFile(self):
lightPrimPath = '/RfMLightsTest/Lights/RectLight'
lightPrim = self._stage.GetPrimAtPath(lightPrimPath)
self.assertTrue(lightPrim)
rectLight = UsdLux.RectLight(lightPrim)
self.assertTrue(rectLight)
expectedTextureFile = './RectLight_texture.tex'
self.assertEqual(rectLight.GetTextureFileAttr().Get(),
expectedTextureFile)
def _ValidateUsdLuxDomeLightTextureFile(self):
lightPrimPath = '/RfMLightsTest/Lights/DomeLight'
lightPrim = self._stage.GetPrimAtPath(lightPrimPath)
self.assertTrue(lightPrim)
domeLight = UsdLux.DomeLight(lightPrim)
self.assertTrue(domeLight)
expectedTextureFile = './DomeLight_texture.tex'
self.assertEqual(domeLight.GetTextureFileAttr().Get(),
expectedTextureFile)
def _ValidateUsdRiPxrAovLight(self):
lightPrimPath = '/RfMLightsTest/Lights/AovLight'
lightPrim = self._stage.GetPrimAtPath(lightPrimPath)
self.assertTrue(lightPrim)
aovLight = UsdRi.PxrAovLight(lightPrim)
self.assertTrue(aovLight)
expectedAovName = 'testAovName'
self.assertEqual(aovLight.GetAovNameAttr().Get(), expectedAovName)
expectedInPrimaryHit = False
self.assertEqual(aovLight.GetInPrimaryHitAttr().Get(),
expectedInPrimaryHit)
expectedInReflection = True
self.assertEqual(aovLight.GetInReflectionAttr().Get(),
expectedInReflection)
expectedInRefraction = True
self.assertEqual(aovLight.GetInRefractionAttr().Get(),
expectedInRefraction)
expectedInvert = True
self.assertEqual(aovLight.GetInvertAttr().Get(), expectedInvert)
expectedOnVolumeBoundaries = False
self.assertEqual(aovLight.GetOnVolumeBoundariesAttr().Get(),
expectedOnVolumeBoundaries)
expectedUseColor = True
self.assertEqual(aovLight.GetUseColorAttr().Get(), expectedUseColor)
expectedUseThroughput = False
self.assertEqual(aovLight.GetUseThroughputAttr().Get(),
expectedUseThroughput)
def _ValidateUsdRiPxrEnvDayLight(self):
lightPrimPath = '/RfMLightsTest/Lights/EnvDayLight'
lightPrim = self._stage.GetPrimAtPath(lightPrimPath)
self.assertTrue(lightPrim)
envDayLight = UsdRi.PxrEnvDayLight(lightPrim)
self.assertTrue(envDayLight)
expectedDay = 8
self.assertEqual(envDayLight.GetDayAttr().Get(), expectedDay)
expectedHaziness = 1.8
self.assertTrue(Gf.IsClose(envDayLight.GetHazinessAttr().Get(),
expectedHaziness, 1e-6))
expectedHour = 8.8
self.assertTrue(Gf.IsClose(envDayLight.GetHourAttr().Get(),
expectedHour, 1e-6))
expectedLatitude = 80.0
self.assertTrue(Gf.IsClose(envDayLight.GetLatitudeAttr().Get(),
expectedLatitude, 1e-6))
expectedLongitude = -80.0
self.assertTrue(Gf.IsClose(envDayLight.GetLongitudeAttr().Get(),
expectedLongitude, 1e-6))
expectedMonth = 8
self.assertEqual(envDayLight.GetMonthAttr().Get(), expectedMonth)
expectedSkyTint = Gf.Vec3f(0.8)
self.assertTrue(Gf.IsClose(envDayLight.GetSkyTintAttr().Get(),
expectedSkyTint, 1e-6))
expectedSunDirection = Gf.Vec3f(0.0, 0.0, 0.8)
self.assertTrue(Gf.IsClose(envDayLight.GetSunDirectionAttr().Get(),
expectedSunDirection, 1e-6))
expectedSunSize = 0.8
self.assertTrue(Gf.IsClose(envDayLight.GetSunSizeAttr().Get(),
expectedSunSize, 1e-6))
expectedSunTint = Gf.Vec3f(0.8)
self.assertTrue(Gf.IsClose(envDayLight.GetSunTintAttr().Get(),
expectedSunTint, 1e-6))
expectedYear = 2018
self.assertEqual(envDayLight.GetYearAttr().Get(), expectedYear)
expectedZone = 8.0
self.assertTrue(Gf.IsClose(envDayLight.GetZoneAttr().Get(),
expectedZone, 1e-6))
def _ValidateUsdLuxShapingAPI(self):
lightPrimPath = '/RfMLightsTest/Lights/DiskLight'
lightPrim = self._stage.GetPrimAtPath(lightPrimPath)
self.assertTrue(lightPrim)
self.assertTrue(lightPrim.HasAPI(UsdLux.ShapingAPI))
shapingAPI = UsdLux.ShapingAPI(lightPrim)
self.assertTrue(shapingAPI)
expectedFocus = 0.1
self.assertTrue(Gf.IsClose(shapingAPI.GetShapingFocusAttr().Get(),
expectedFocus, 1e-6))
expectedFocusTint = Gf.Vec3f(0.1)
self.assertTrue(Gf.IsClose(shapingAPI.GetShapingFocusTintAttr().Get(),
expectedFocusTint, 1e-6))
expectedConeAngle = 91.0
self.assertTrue(Gf.IsClose(shapingAPI.GetShapingConeAngleAttr().Get(),
expectedConeAngle, 1e-6))
expectedConeSoftness = 0.1
self.assertTrue(Gf.IsClose(shapingAPI.GetShapingConeSoftnessAttr().Get(),
expectedConeSoftness, 1e-6))
expectedProfilePath = './DiskLight_profile.ies'
self.assertEqual(shapingAPI.GetShapingIesFileAttr().Get(),
expectedProfilePath)
expectedProfileScale = 1.1
self.assertTrue(Gf.IsClose(shapingAPI.GetShapingIesAngleScaleAttr().Get(),
expectedProfileScale, 1e-6))
def _ValidateUsdLuxShadowAPI(self):
lightPrimPath = '/RfMLightsTest/Lights/RectLight'
lightPrim = self._stage.GetPrimAtPath(lightPrimPath)
self.assertTrue(lightPrim)
self.assertTrue(lightPrim.HasAPI(UsdLux.ShadowAPI))
shadowAPI = UsdLux.ShadowAPI(lightPrim)
self.assertTrue(shadowAPI)
# Shadows are enabled by default, and we author sparsely, so there
# should NOT be an opinion.
self.assertFalse(
shadowAPI.GetShadowEnableAttr().HasAuthoredValue())
expectedShadowColor = Gf.Vec3f(0.5)
self.assertTrue(Gf.IsClose(shadowAPI.GetShadowColorAttr().Get(),
expectedShadowColor, 1e-6))
expectedShadowDistance = -0.5
self.assertTrue(Gf.IsClose(shadowAPI.GetShadowDistanceAttr().Get(),
expectedShadowDistance, 1e-6))
expectedShadowFalloff = -0.5
self.assertTrue(Gf.IsClose(shadowAPI.GetShadowFalloffAttr().Get(),
expectedShadowFalloff, 1e-6))
expectedShadowFalloffGamma = 0.5
self.assertTrue(Gf.IsClose(shadowAPI.GetShadowFalloffGammaAttr().Get(),
expectedShadowFalloffGamma, 1e-6))
def testExportRenderManForMayaLights(self):
"""
Tests that RenderMan for Maya lights export as UsdLux schema USD prims
correctly.
"""
self._ValidateUsdLuxLight('DiskLight')
self._ValidateDiskLightXformAnimation()
self._ValidateUsdLuxLight('DistantLight')
self._ValidateUsdLuxLight('DomeLight')
self._ValidateUsdLuxLight('MeshLight')
self._ValidateUsdLuxLight('RectLight')
self._ValidateUsdLuxLight('SphereLight')
self._ValidateUsdLuxLight('AovLight')
self._ValidateUsdLuxLight('EnvDayLight')
self._ValidateUsdLuxDistantLightAngle()
self._ValidateUsdLuxRectLightTextureFile()
self._ValidateUsdLuxDomeLightTextureFile()
self._ValidateUsdRiPxrAovLight()
self._ValidateUsdRiPxrEnvDayLight()
self._ValidateUsdLuxShapingAPI()
self._ValidateUsdLuxShadowAPI()
if __name__ == '__main__':
unittest.main(verbosity=2)
| 35.83208 | 84 | 0.671539 |
af32f5341c662cd64072c21c18c744de939a994a | 1,276 | py | Python | examples/client.py | desty2k/QtPyNetwork | 63e892370a0a1648646bdfed57fea9689d927494 | [
"MIT"
] | null | null | null | examples/client.py | desty2k/QtPyNetwork | 63e892370a0a1648646bdfed57fea9689d927494 | [
"MIT"
] | null | null | null | examples/client.py | desty2k/QtPyNetwork | 63e892370a0a1648646bdfed57fea9689d927494 | [
"MIT"
] | null | null | null | from qtpy.QtWidgets import QApplication
from qtpy.QtCore import QObject, Slot, QCoreApplication
import sys
import logging
from QtPyNetwork.client import QThreadedClient
IP = "127.0.0.1"
PORT = 12500
class Main(QObject):
def __init__(self):
super(Main, self).__init__(None)
def setup(self):
self.logger = logging.getLogger(self.__class__.__name__)
self.cln = QThreadedClient()
self.cln.message.connect(self.client_data_received)
self.cln.failed_to_connect.connect(self.close)
self.cln.disconnected.connect(self.close)
self.cln.start(IP, PORT)
@Slot(bytes)
def client_data_received(self, data: bytes):
self.logger.info(data)
self.cln.write(b"Kick me plz")
@Slot()
def close(self):
self.cln.close()
while self.cln.is_running():
self.cln.wait()
QApplication.instance().quit()
if __name__ == '__main__':
logging.basicConfig(
level=logging.NOTSET,
format="%(asctime)s [%(threadName)s] [%(name)s] [%(levelname)s] %(message)s",
handlers=[logging.StreamHandler()])
logging.getLogger().debug("Logger enabled")
app = QCoreApplication(sys.argv)
main = Main()
main.setup()
sys.exit(app.exec_())
| 24.538462 | 85 | 0.65047 |
ecb919dbda16a63eba609faa589766eff7668a1a | 3,062 | py | Python | TwitterFacebookAPI/TwitterAPI.py | AbdelrahmanTealab/SocialDataMiningTechniques_Assignment2 | 293b2585004853d6b6dae9d52e33fdae7bb6d894 | [
"Apache-2.0"
] | null | null | null | TwitterFacebookAPI/TwitterAPI.py | AbdelrahmanTealab/SocialDataMiningTechniques_Assignment2 | 293b2585004853d6b6dae9d52e33fdae7bb6d894 | [
"Apache-2.0"
] | null | null | null | TwitterFacebookAPI/TwitterAPI.py | AbdelrahmanTealab/SocialDataMiningTechniques_Assignment2 | 293b2585004853d6b6dae9d52e33fdae7bb6d894 | [
"Apache-2.0"
] | null | null | null | from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import tweepy
import time
import twitter_credentials
import json
import pandas
from pandas.io.json import json_normalize
import csv
class TweetStreamer():
def __init__(self):
self.dataframe = pandas.DataFrame()
pass
def startStream(self, fetched_tweets_filename, hash_tag_list):
listen = Listener(fetched_tweets_filename)
auth = OAuthHandler(twitter_credentials.CONSUMER_KEY, twitter_credentials.CONSUMER_SECRET)
auth.set_access_token(twitter_credentials.ACCESS_TOKEN, twitter_credentials.ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
public_tweets = api.home_timeline()
for tweet in public_tweets:
print(tweet.text)
stream = Stream(auth, listen)
stream.filter(track=hash_tag_list)
self.dataframe = listen.dataframe
class Listener(StreamListener):
def __init__(self, tweetsFilename, time_limit=10):
self.tweetsFilename = tweetsFilename
self.start_time = time.time()
self.limit = time_limit
self.counter = 0
self.countLimit = 10
self.dataframe = pandas.DataFrame()
def on_data(self, data):
self.counter += 1
if self.counter<=self.countLimit:
try:
jsonData = json.loads(data)
keys = ("id","user","text", "created_at", "source")
jsonData = {k: jsonData[k] for k in keys}
jsonData = json_normalize(jsonData)
jsonData = jsonData.drop(['user.id','user.id_str','user.name','user.location','user.url','user.description','user.translator_type','user.protected','user.verified','user.followers_count','user.friends_count','user.listed_count','user.favourites_count','user.statuses_count','user.created_at','user.utc_offset','user.time_zone','user.geo_enabled','user.lang','user.contributors_enabled','user.is_translator','user.profile_banner_url','user.profile_background_color','user.profile_background_image_url','user.profile_background_image_url_https','user.profile_background_tile','user.profile_link_color','user.profile_sidebar_border_color','user.profile_sidebar_fill_color','user.profile_text_color','user.profile_use_background_image','user.profile_image_url','user.profile_image_url_https','user.default_profile','user.default_profile_image','user.following','user.follow_request_sent','user.notifications'], axis=1)
#print(jsonData)
self.dataframe = self.dataframe.append(jsonData)
print(self.dataframe)
self.dataframe.to_csv('tweets.csv',index = False)
return True
except BaseException as e:
print("Error on_data: %s" % str(e))
return True
else:
return False
return self.dataframe
def on_error(self, status):
print(status)
| 45.701493 | 931 | 0.668844 |
6895962e8b184508b2e45cf403607b131de2554b | 408 | py | Python | build/path_planning/catkin_generated/pkg.develspace.context.pc.py | Tejal-19/Obstacle_avoidance-bot-using-ROS-and-GAZEBO | 91d9d366ff013623774efa24d9ad4be09d90b20b | [
"MIT"
] | null | null | null | build/path_planning/catkin_generated/pkg.develspace.context.pc.py | Tejal-19/Obstacle_avoidance-bot-using-ROS-and-GAZEBO | 91d9d366ff013623774efa24d9ad4be09d90b20b | [
"MIT"
] | null | null | null | build/path_planning/catkin_generated/pkg.develspace.context.pc.py | Tejal-19/Obstacle_avoidance-bot-using-ROS-and-GAZEBO | 91d9d366ff013623774efa24d9ad4be09d90b20b | [
"MIT"
] | null | null | null | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "path_planning"
PROJECT_SPACE_DIR = "/home/tejal/Obstacle_avoidance-bot-using-ROS-and-GAZEBO/devel"
PROJECT_VERSION = "0.0.0"
| 45.333333 | 83 | 0.718137 |
5797fb95142f556bade163051f4f51a40e32cda7 | 2,697 | py | Python | youtube_dl/extractor/carambatv.py | LyleH/youtube-dl | 7564b09ef5c09454908f78cb91c3bd2d6daacac5 | [
"Unlicense"
] | null | null | null | youtube_dl/extractor/carambatv.py | LyleH/youtube-dl | 7564b09ef5c09454908f78cb91c3bd2d6daacac5 | [
"Unlicense"
] | null | null | null | youtube_dl/extractor/carambatv.py | LyleH/youtube-dl | 7564b09ef5c09454908f78cb91c3bd2d6daacac5 | [
"Unlicense"
] | null | null | null | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
float_or_none,
int_or_none,
try_get,
)
class CarambaTVIE(InfoExtractor):
_VALID_URL = r'(?:carambatv:|https?://video1\.carambatv\.ru/v/)(?P<id>\d+)'
_TESTS = [{
'url': 'http://video1.carambatv.ru/v/191910501',
'md5': '2f4a81b7cfd5ab866ee2d7270cb34a2a',
'info_dict': {
'id': '191910501',
'ext': 'mp4',
'title': '[BadComedian] - Разборка в Маниле (Абсолютный обзор)',
'thumbnail': 're:^https?://.*\.jpg',
'duration': 2678.31,
},
}, {
'url': 'carambatv:191910501',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
video = self._download_json(
'http://video1.carambatv.ru/v/%s/videoinfo.js' % video_id,
video_id)
title = video['title']
base_url = video.get('video') or 'http://video1.carambatv.ru/v/%s/' % video_id
formats = [{
'url': base_url + f['fn'],
'height': int_or_none(f.get('height')),
'format_id': '%sp' % f['height'] if f.get('height') else None,
} for f in video['qualities'] if f.get('fn')]
self._sort_formats(formats)
thumbnail = video.get('splash')
duration = float_or_none(try_get(
video, lambda x: x['annotations'][0]['end_time'], compat_str))
return {
'id': video_id,
'title': title,
'thumbnail': thumbnail,
'duration': duration,
'formats': formats,
}
class CarambaTVPageIE(InfoExtractor):
_VALID_URL = r'https?://carambatv\.ru/(?:[^/]+/)+(?P<id>[^/?#&]+)'
_TEST = {
'url': 'http://carambatv.ru/movie/bad-comedian/razborka-v-manile/',
'md5': '',
'info_dict': {
'id': '191910501',
'ext': 'mp4',
'title': '[BadComedian] - Разборка в Маниле (Абсолютный обзор)',
'thumbnail': 're:^https?://.*\.jpg$',
'duration': 2678.31,
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_url = self._og_search_property('video:iframe', webpage, default=None)
if not video_url:
video_id = self._search_regex(
r'(?:video_id|crmb_vuid)\s*[:=]\s*["\']?(\d+)',
webpage, 'video id')
video_url = 'carambatv:%s' % video_id
return self.url_result(video_url, CarambaTVIE.ie_key())
| 30.303371 | 86 | 0.539488 |
c68e6ec4bee5667cf629efa3fbaca052f6f0d8e0 | 385 | py | Python | api/next/wsgi.py | UilSiqueira/django-rest-api | 1a91a80f32b73ecebf3ac5f7a1c87c45e67832ea | [
"MIT"
] | null | null | null | api/next/wsgi.py | UilSiqueira/django-rest-api | 1a91a80f32b73ecebf3ac5f7a1c87c45e67832ea | [
"MIT"
] | null | null | null | api/next/wsgi.py | UilSiqueira/django-rest-api | 1a91a80f32b73ecebf3ac5f7a1c87c45e67832ea | [
"MIT"
] | null | null | null | """
WSGI config for next project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'next.settings')
application = get_wsgi_application()
| 22.647059 | 78 | 0.781818 |
601f54d1983a09cf4417a2783ceae01f82fa817b | 3,162 | py | Python | feed_hunter/feed_hunter/settings.py | perna/feed-hunter | e7649d091095797d0869101a418b71cf83a25708 | [
"MIT"
] | 4 | 2018-05-03T20:53:13.000Z | 2021-03-15T05:29:36.000Z | feed_hunter/feed_hunter/settings.py | perna/feed-hunter | e7649d091095797d0869101a418b71cf83a25708 | [
"MIT"
] | 3 | 2021-03-31T18:41:33.000Z | 2022-02-11T03:39:00.000Z | feed_hunter/feed_hunter/settings.py | perna/feed-hunter | e7649d091095797d0869101a418b71cf83a25708 | [
"MIT"
] | 1 | 2018-06-30T13:57:40.000Z | 2018-06-30T13:57:40.000Z | # -*- coding: utf-8 -*-
# Scrapy settings for feed_hunter project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'feed_hunter'
SPIDER_MODULES = ['feed_hunter.spiders']
NEWSPIDER_MODULE = 'feed_hunter.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'feed_hunter (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 6
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 2
#CONCURRENT_REQUESTS_PER_IP = 2
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'feed_hunter.middlewares.FeedEaterSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'feed_hunter.middlewares.MyCustomDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
ITEM_PIPELINES = {
'feed_hunter.pipelines.FeedEaterPipeline': 300,
}
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
AUTOTHROTTLE_ENABLED = True
# The initial download delay
AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
AUTOTHROTTLE_DEBUG = True
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
| 34.747253 | 109 | 0.781467 |
c2a9dd0489266d5354e48dff421fbd2f32d66990 | 3,577 | py | Python | bindings/python/ensmallen/datasets/string/methanomassiliicoccalesarchaeonrumenm2.py | AnacletoLAB/ensmallen_graph | b2c1b18fb1e5801712852bcc239f239e03076f09 | [
"MIT"
] | 5 | 2021-02-17T00:44:45.000Z | 2021-08-09T16:41:47.000Z | bindings/python/ensmallen/datasets/string/methanomassiliicoccalesarchaeonrumenm2.py | AnacletoLAB/ensmallen_graph | b2c1b18fb1e5801712852bcc239f239e03076f09 | [
"MIT"
] | 18 | 2021-01-07T16:47:39.000Z | 2021-08-12T21:51:32.000Z | bindings/python/ensmallen/datasets/string/methanomassiliicoccalesarchaeonrumenm2.py | AnacletoLAB/ensmallen | b2c1b18fb1e5801712852bcc239f239e03076f09 | [
"MIT"
] | 3 | 2021-01-14T02:20:59.000Z | 2021-08-04T19:09:52.000Z | """
This file offers the methods to automatically retrieve the graph Methanomassiliicoccales archaeon RumEn M2.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def MethanomassiliicoccalesArchaeonRumenM2(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Methanomassiliicoccales archaeon RumEn M2 graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.5
- physical.links.v11.5
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Methanomassiliicoccales archaeon RumEn M2 graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="MethanomassiliicoccalesArchaeonRumenM2",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 34.066667 | 223 | 0.685491 |
e9c52578cceebabb33befac2b6da72db513e7d8e | 350,531 | py | Python | Python/isp-load-statistics.py | perara/gotham | dc1335a6ece1be8c0771d3474061d44b19810fa8 | [
"Unlicense",
"MIT"
] | null | null | null | Python/isp-load-statistics.py | perara/gotham | dc1335a6ece1be8c0771d3474061d44b19810fa8 | [
"Unlicense",
"MIT"
] | null | null | null | Python/isp-load-statistics.py | perara/gotham | dc1335a6ece1be8c0771d3474061d44b19810fa8 | [
"Unlicense",
"MIT"
] | null | null | null | [{"countryCode": "US", "test_count": "10413572", "ip_addresses": "94920788", "country": "United States", "isps": [{"ip_addresses": "3108448", "label": "Verizon FiOS", "test_count": "485520", "aggregate_date": "2015-05-01", "id": "79831", "index_value": "44582", "rating": "3.8"}, {"ip_addresses": "17247314", "label": "Comcast", "test_count": "2483821", "aggregate_date": "2015-05-01", "id": "20", "index_value": "43968", "rating": "2.9"}, {"ip_addresses": "4400668", "label": "Cox", "test_count": "657771", "aggregate_date": "2015-05-01", "id": "9", "index_value": "43897", "rating": "3.1"}, {"ip_addresses": "6137720", "label": "Time Warner Cable", "test_count": "1969420", "aggregate_date": "2015-05-01", "id": "23508", "index_value": "43839", "rating": "2.5"}, {"ip_addresses": "4117422", "label": "Charter Communications", "test_count": "480617", "aggregate_date": "2015-05-01", "id": "14", "index_value": "42073", "rating": "2.9"}, {"ip_addresses": "2009530", "label": "Optimum Online", "test_count": "216670", "aggregate_date": "2015-05-01", "id": "47677", "index_value": "38963", "rating": "3.2"}, {"ip_addresses": "434460", "label": "Mediacom Cable", "test_count": "152071", "aggregate_date": "2015-05-01", "id": "86125", "index_value": "38493", "rating": "2.3"}, {"ip_addresses": "959190", "label": "Suddenlink Communications", "test_count": "189876", "aggregate_date": "2015-05-01", "id": "14774", "index_value": "37693", "rating": "2.6"}, {"ip_addresses": "448021", "label": "Cable One", "test_count": "73492", "aggregate_date": "2015-05-01", "id": "154", "index_value": "34730", "rating": "2.6"}, {"ip_addresses": "551596", "label": "EarthLink", "test_count": "28268", "aggregate_date": "2015-05-01", "id": "166", "index_value": "22381", "rating": "2.8"}, {"ip_addresses": "444792", "label": "WOW! Internet", "test_count": "55460", "aggregate_date": "2015-05-01", "id": "143", "index_value": "21124", "rating": "3.1"}, {"ip_addresses": "4829699", "label": "AT&T U-verse", "test_count": "1151610", "aggregate_date": "2015-05-01", "id": "79832", "index_value": "20439", "rating": "2.4"}, {"ip_addresses": "3330809", "label": "CenturyLink", "test_count": "428063", "aggregate_date": "2015-05-01", "id": "86113", "index_value": "15127", "rating": "2.0"}, {"ip_addresses": "769923", "label": "Verizon Wireless", "test_count": "112082", "aggregate_date": "2015-05-01", "id": "5729", "index_value": "12788", "rating": "2.9"}, {"ip_addresses": "1792674", "label": "Frontier Communications", "test_count": "150821", "aggregate_date": "2015-05-01", "id": "34", "index_value": "11085", "rating": "1.8"}, {"ip_addresses": "1537415", "label": "Windstream Communications", "test_count": "106730", "aggregate_date": "2015-05-01", "id": "14788", "index_value": "8901", "rating": "1.8"}, {"ip_addresses": "8147514", "label": "Sprint PCS", "test_count": "33236", "aggregate_date": "2015-05-01", "id": "102", "index_value": "8705", "rating": "2.8"}, {"ip_addresses": "5897873", "label": "Verizon DSL", "test_count": "133361", "aggregate_date": "2015-05-01", "id": "11", "index_value": "8583", "rating": "2.7"}, {"ip_addresses": "341351", "label": "Clear Wireless", "test_count": "52597", "aggregate_date": "2015-05-01", "id": "89512", "index_value": "4512", "rating": "2.0"}, {"ip_addresses": "3461288", "label": "Bell South", "test_count": "69433", "aggregate_date": "2015-05-01", "id": "26", "index_value": "4248", "rating": "2.1"}], "bandwidth": "35795"}, {"countryCode": "PL", "test_count": "948491", "ip_addresses": "7273491", "country": "Poland", "isps": [{"ip_addresses": "124894", "label": "UPC Polska", "test_count": "72542", "aggregate_date": "2015-05-01", "id": "102896", "index_value": "56348", "rating": "3.6"}, {"ip_addresses": "639891", "label": "UPC Polska", "test_count": "79", "aggregate_date": "2015-05-01", "id": "30675", "index_value": "53583", "rating": "3.7"}, {"ip_addresses": "51997", "label": "Inea", "test_count": "14057", "aggregate_date": "2015-05-01", "id": "87754", "index_value": "51442", "rating": "3.3"}, {"ip_addresses": "49412", "label": "ASTER Sp. z.o.o.", "test_count": "8619", "aggregate_date": "2015-05-01", "id": "53970", "index_value": "47811", "rating": "3.5"}, {"ip_addresses": "57089", "label": "Toya sp.z.o.o", "test_count": "8641", "aggregate_date": "2015-05-01", "id": "4034", "index_value": "41290", "rating": "3.8"}, {"ip_addresses": "148055", "label": "Vectra Broadband", "test_count": "37064", "aggregate_date": "2015-05-01", "id": "92110", "index_value": "39769", "rating": "3.2"}, {"ip_addresses": "119041", "label": "Telefonia Dialog sp.z.o.o.", "test_count": "24358", "aggregate_date": "2015-05-01", "id": "79835", "index_value": "30342", "rating": "3.4"}, {"ip_addresses": "32333", "label": "Exatel S.A.", "test_count": "5639", "aggregate_date": "2015-05-01", "id": "54", "index_value": "27828", "rating": "3.6"}, {"ip_addresses": "32204", "label": "Stream Communications Sp. z o.o.", "test_count": "2208", "aggregate_date": "2015-05-01", "id": "4780", "index_value": "26204", "rating": "3.1"}, {"ip_addresses": "45617", "label": "Internetia Sp.z o.o.", "test_count": "6751", "aggregate_date": "2015-05-01", "id": "37695", "index_value": "24600", "rating": "3.2"}, {"ip_addresses": "230286", "label": "Multimedia Polska S.A.", "test_count": "23256", "aggregate_date": "2015-05-01", "id": "73519", "index_value": "22800", "rating": "3.0"}, {"ip_addresses": "43227", "label": "GTS Poland Sp. z o.o.", "test_count": "6600", "aggregate_date": "2015-05-01", "id": "58558", "index_value": "21519", "rating": "3.6"}, {"ip_addresses": "588427", "label": "Netia SA", "test_count": "53576", "aggregate_date": "2015-05-01", "id": "41", "index_value": "16177", "rating": "3.0"}, {"ip_addresses": "1309504", "label": "Neostrada Plus", "test_count": "57725", "aggregate_date": "2015-05-01", "id": "43703", "index_value": "15044", "rating": "2.7"}, {"ip_addresses": "32198", "label": "TK Telekom", "test_count": "16287", "aggregate_date": "2015-05-01", "id": "53983", "index_value": "14526", "rating": "3.3"}, {"ip_addresses": "611263", "label": "Polkomtel Sp. z o.o.", "test_count": "50352", "aggregate_date": "2015-05-01", "id": "79231", "index_value": "14503", "rating": "3.0"}, {"ip_addresses": "270648", "label": "Orange Polska", "test_count": "100193", "aggregate_date": "2015-05-01", "id": "102895", "index_value": "13823", "rating": "2.7"}, {"ip_addresses": "285891", "label": "Play", "test_count": "24723", "aggregate_date": "2015-05-01", "id": "95467", "index_value": "13539", "rating": "3.0"}, {"ip_addresses": "144206", "label": "T-mobile Polska Spolka Akcyjna", "test_count": "21470", "aggregate_date": "2015-05-01", "id": "96036", "index_value": "12709", "rating": "2.9"}, {"ip_addresses": "19813", "label": "MNI Telecom S.A.", "test_count": "2203", "aggregate_date": "2015-05-01", "id": "38776", "index_value": "7512", "rating": "2.7"}], "bandwidth": "24739"}, {"countryCode": "IT", "test_count": "2627634", "ip_addresses": "15152831", "country": "Italy", "isps": [{"ip_addresses": "8205", "label": "UNIDATA S.p.A.", "test_count": "2246", "aggregate_date": "2015-05-01", "id": "10989", "index_value": "27850", "rating": "3.3"}, {"ip_addresses": "1580863", "label": "Fastweb", "test_count": "430583", "aggregate_date": "2015-05-01", "id": "486", "index_value": "18114", "rating": "2.7"}, {"ip_addresses": "1072631", "label": "Vodafone Italia DSL", "test_count": "249275", "aggregate_date": "2015-05-01", "id": "84978", "index_value": "13897", "rating": "2.2"}, {"ip_addresses": "178204", "label": "Vodafone Italia", "test_count": "9131", "aggregate_date": "2015-05-01", "id": "96309", "index_value": "11524", "rating": "2.4"}, {"ip_addresses": "8888", "label": "Intred", "test_count": "4072", "aggregate_date": "2015-05-01", "id": "30684", "index_value": "10361", "rating": "3.4"}, {"ip_addresses": "25211", "label": "MC-link", "test_count": "5597", "aggregate_date": "2015-05-01", "id": "43719", "index_value": "10072", "rating": "3.1"}, {"ip_addresses": "8724", "label": "TWT S.p.A.", "test_count": "1971", "aggregate_date": "2015-05-01", "id": "14864", "index_value": "9195", "rating": "3.1"}, {"ip_addresses": "5968983", "label": "Telecom Italia", "test_count": "886487", "aggregate_date": "2015-05-01", "id": "1096", "index_value": "9140", "rating": "2.8"}, {"ip_addresses": "78781", "label": "NGI", "test_count": "29557", "aggregate_date": "2015-05-01", "id": "1090", "index_value": "8641", "rating": "3.6"}, {"ip_addresses": "12968", "label": "Skylogic", "test_count": "953", "aggregate_date": "2015-05-01", "id": "19801", "index_value": "8594", "rating": "2.6"}, {"ip_addresses": "1007032", "label": "3 Italia", "test_count": "650", "aggregate_date": "2015-05-01", "id": "28976", "index_value": "7996", "rating": "2.4"}, {"ip_addresses": "919021", "label": "Tiscali", "test_count": "79306", "aggregate_date": "2015-05-01", "id": "326", "index_value": "7919", "rating": "2.9"}, {"ip_addresses": "965787", "label": "TIM", "test_count": "22092", "aggregate_date": "2015-05-01", "id": "1121", "index_value": "7547", "rating": "2.7"}, {"ip_addresses": "10307", "label": "Acantho", "test_count": "4592", "aggregate_date": "2015-05-01", "id": "51895", "index_value": "7281", "rating": "3.3"}, {"ip_addresses": "912370", "label": "Wind/Infostrada", "test_count": "9510", "aggregate_date": "2015-05-01", "id": "35347", "index_value": "7255", "rating": "2.6"}, {"ip_addresses": "15676", "label": "Convergenze", "test_count": "5337", "aggregate_date": "2015-05-01", "id": "4023", "index_value": "7049", "rating": "3.5"}, {"ip_addresses": "1354725", "label": "INFOSTRADA", "test_count": "357151", "aggregate_date": "2015-05-01", "id": "68674", "index_value": "6651", "rating": "2.5"}, {"ip_addresses": "8371", "label": "Alternatyva", "test_count": "5511", "aggregate_date": "2015-05-01", "id": "84938", "index_value": "5014", "rating": "2.8"}, {"ip_addresses": "37542", "label": "Linkem", "test_count": "55170", "aggregate_date": "2015-05-01", "id": "28821", "index_value": "4511", "rating": "2.5"}, {"ip_addresses": "47878", "label": "Aria", "test_count": "15336", "aggregate_date": "2015-05-01", "id": "33246", "index_value": "2945", "rating": "2.3"}], "bandwidth": "10645"}, {"countryCode": "GB", "test_count": "2786744", "ip_addresses": "25577723", "country": "United Kingdom", "isps": [{"ip_addresses": "4810174", "label": "Virgin Media", "test_count": "752532", "aggregate_date": "2015-05-01", "id": "25020", "index_value": "56517", "rating": "3.0"}, {"ip_addresses": "90094", "label": "Kcom", "test_count": "14497", "aggregate_date": "2015-05-01", "id": "64766", "index_value": "27232", "rating": "1.9"}, {"ip_addresses": "48228", "label": "Zen Internet Ltd", "test_count": "18351", "aggregate_date": "2015-05-01", "id": "90069", "index_value": "26750", "rating": "4.2"}, {"ip_addresses": "3991119", "label": "BT", "test_count": "618576", "aggregate_date": "2015-05-01", "id": "93031", "index_value": "26104", "rating": "2.5"}, {"ip_addresses": "20268", "label": "Ask4 Limited", "test_count": "3456", "aggregate_date": "2015-05-01", "id": "11404", "index_value": "26082", "rating": "3.1"}, {"ip_addresses": "698059", "label": "PlusNet Technologies", "test_count": "52572", "aggregate_date": "2015-05-01", "id": "51", "index_value": "24310", "rating": "3.2"}, {"ip_addresses": "14786", "label": "Keycom Plc", "test_count": "2991", "aggregate_date": "2015-05-01", "id": "63491", "index_value": "21229", "rating": "2.3"}, {"ip_addresses": "15226", "label": "Gamma Telecom Holdings Ltd", "test_count": "8018", "aggregate_date": "2015-05-01", "id": "67723", "index_value": "20574", "rating": "2.7"}, {"ip_addresses": "31094", "label": "Griffin Information Systems Limited", "test_count": "6021", "aggregate_date": "2015-05-01", "id": "51954", "index_value": "20473", "rating": "2.1"}, {"ip_addresses": "52505", "label": "Cablecom Networking Limited", "test_count": "4931", "aggregate_date": "2015-05-01", "id": "54105", "index_value": "19816", "rating": "2.3"}, {"ip_addresses": "135202", "label": "Eclipse Internet", "test_count": "7316", "aggregate_date": "2015-05-01", "id": "18", "index_value": "19202", "rating": "2.9"}, {"ip_addresses": "15809", "label": "Timico Limited", "test_count": "1859", "aggregate_date": "2015-05-01", "id": "42027", "index_value": "18839", "rating": "3.0"}, {"ip_addresses": "2492157", "label": "TalkTalk", "test_count": "310103", "aggregate_date": "2015-05-01", "id": "61490", "index_value": "16032", "rating": "2.2"}, {"ip_addresses": "80596", "label": "Demon", "test_count": "4459", "aggregate_date": "2015-05-01", "id": "17", "index_value": "14539", "rating": "2.6"}, {"ip_addresses": "4331516", "label": "SKY Broadband", "test_count": "367243", "aggregate_date": "2015-05-01", "id": "30679", "index_value": "14261", "rating": "2.4"}, {"ip_addresses": "1135567", "label": "EE", "test_count": "80466", "aggregate_date": "2015-05-01", "id": "22941", "index_value": "13987", "rating": "2.1"}, {"ip_addresses": "91474", "label": "Tesco", "test_count": "9369", "aggregate_date": "2015-05-01", "id": "54187", "index_value": "11016", "rating": "2.3"}, {"ip_addresses": "357568", "label": "Three", "test_count": "11541", "aggregate_date": "2015-05-01", "id": "47185", "index_value": "8475", "rating": "2.8"}, {"ip_addresses": "42270", "label": "The Cloud Networks Limited", "test_count": "501", "aggregate_date": "2015-05-01", "id": "67618", "index_value": "7797", "rating": "2.6"}, {"ip_addresses": "35101", "label": "Supanet Limited", "test_count": "147", "aggregate_date": "2015-05-01", "id": "824", "index_value": "6242", "rating": "2.1"}], "bandwidth": "30720"}, {"countryCode": "NZ", "test_count": "234729", "ip_addresses": "1474381", "country": "New Zealand", "isps": [{"ip_addresses": "13233", "label": "Snap Limited", "test_count": "9939", "aggregate_date": "2015-05-01", "id": "102905", "index_value": "61323", "rating": "4.1"}, {"ip_addresses": "2687", "label": "Voyager", "test_count": "2356", "aggregate_date": "2015-05-01", "id": "65201", "index_value": "36542", "rating": "3.8"}, {"ip_addresses": "2302", "label": "Lightwire", "test_count": "1157", "aggregate_date": "2015-05-01", "id": "89069", "index_value": "32409", "rating": "4.4"}, {"ip_addresses": "4287", "label": "Inspire Net", "test_count": "1365", "aggregate_date": "2015-05-01", "id": "71909", "index_value": "32176", "rating": "3.7"}, {"ip_addresses": "95896", "label": "Orcon", "test_count": "18039", "aggregate_date": "2015-05-01", "id": "21", "index_value": "31121", "rating": "3.0"}, {"ip_addresses": "194100", "label": "Vodafone New Zealand", "test_count": "34464", "aggregate_date": "2015-05-01", "id": "608", "index_value": "26079", "rating": "2.5"}, {"ip_addresses": "17888", "label": "TrustPower", "test_count": "3580", "aggregate_date": "2015-05-01", "id": "66378", "index_value": "25445", "rating": "2.5"}, {"ip_addresses": "143143", "label": "Vodafone New Zealand Broadband", "test_count": "26617", "aggregate_date": "2015-05-01", "id": "75071", "index_value": "21467", "rating": "2.5"}, {"ip_addresses": "149147", "label": "Spark New Zealand", "test_count": "60339", "aggregate_date": "2015-05-01", "id": "100678", "index_value": "20301", "rating": "2.3"}, {"ip_addresses": "3365", "label": "NOW", "test_count": "2417", "aggregate_date": "2015-05-01", "id": "76453", "index_value": "19037", "rating": "3.7"}, {"ip_addresses": "95198", "label": "Slingshot", "test_count": "14792", "aggregate_date": "2015-05-01", "id": "859", "index_value": "16633", "rating": "2.5"}, {"ip_addresses": "5421", "label": "The Internet Group", "test_count": "1838", "aggregate_date": "2015-05-01", "id": "972", "index_value": "16534", "rating": "2.4"}, {"ip_addresses": "4444", "label": "Two Degrees Mobile Limited", "test_count": "402", "aggregate_date": "2015-05-01", "id": "99070", "index_value": "14771", "rating": "3.8"}, {"ip_addresses": "46715", "label": "WorldxChange", "test_count": "1043", "aggregate_date": "2015-05-01", "id": "66", "index_value": "13819", "rating": "3.1"}, {"ip_addresses": "6209", "label": "Compass Communications", "test_count": "541", "aggregate_date": "2015-05-01", "id": "390", "index_value": "13448", "rating": "2.4"}, {"ip_addresses": "3406", "label": "Actrix", "test_count": "875", "aggregate_date": "2015-05-01", "id": "18633", "index_value": "12306", "rating": "3.0"}, {"ip_addresses": "1753", "label": "Amuri", "test_count": "717", "aggregate_date": "2015-05-01", "id": "47838", "index_value": "12030", "rating": "4.0"}, {"ip_addresses": "3377", "label": "Megatel", "test_count": "1053", "aggregate_date": "2015-05-01", "id": "48782", "index_value": "11230", "rating": "2.1"}, {"ip_addresses": "28113", "label": "Woosh", "test_count": "937", "aggregate_date": "2015-05-01", "id": "20594", "index_value": "7039", "rating": "2.2"}, {"ip_addresses": "2116", "label": "NetSmart Ltd, ISP, Tauranga, New Zealand.", "test_count": "569", "aggregate_date": "2015-05-01", "id": "33728", "index_value": "4840", "rating": "4.4"}], "bandwidth": "25776"}, {"countryCode": "SE", "test_count": "96072", "ip_addresses": "1516402", "country": "Sweden", "isps": [{"ip_addresses": "7824", "label": "Net at Once Sweden", "test_count": "809", "aggregate_date": "2015-05-01", "id": "47190", "index_value": "142403", "rating": "3.5"}, {"ip_addresses": "15916", "label": "Ownit", "test_count": "2248", "aggregate_date": "2015-05-01", "id": "3490", "index_value": "125419", "rating": "4.2"}, {"ip_addresses": "26731", "label": "SunNet NORDUnet", "test_count": "722", "aggregate_date": "2015-05-01", "id": "1442", "index_value": "101068", "rating": "3.9"}, {"ip_addresses": "61571", "label": "Bredband2", "test_count": "5359", "aggregate_date": "2015-05-01", "id": "44503", "index_value": "86628", "rating": "3.7"}, {"ip_addresses": "274243", "label": "Com Hem", "test_count": "18385", "aggregate_date": "2015-05-01", "id": "604", "index_value": "82971", "rating": "3.2"}, {"ip_addresses": "24016", "label": "Telenor", "test_count": "791", "aggregate_date": "2015-05-01", "id": "806", "index_value": "79296", "rating": "2.6"}, {"ip_addresses": "85520", "label": "Bahnhof Internet", "test_count": "7458", "aggregate_date": "2015-05-01", "id": "2799", "index_value": "74155", "rating": "4.1"}, {"ip_addresses": "9426", "label": "Ratt Internet Kapacitet i Sverige AB", "test_count": "867", "aggregate_date": "2015-05-01", "id": "1493", "index_value": "67590", "rating": "3.9"}, {"ip_addresses": "9199", "label": "Telecom3 Sverige AB", "test_count": "1703", "aggregate_date": "2015-05-01", "id": "94549", "index_value": "65388", "rating": "3.5"}, {"ip_addresses": "38345", "label": "All Tele", "test_count": "2464", "aggregate_date": "2015-05-01", "id": "25471", "index_value": "61767", "rating": "3.4"}, {"ip_addresses": "316121", "label": "Bredbandsbolaget", "test_count": "13017", "aggregate_date": "2015-05-01", "id": "552", "index_value": "61734", "rating": "3.3"}, {"ip_addresses": "12344", "label": "Tyfon", "test_count": "952", "aggregate_date": "2015-05-01", "id": "4243", "index_value": "52321", "rating": "3.3"}, {"ip_addresses": "5406", "label": "Perspektiv Bredband", "test_count": "314", "aggregate_date": "2015-05-01", "id": "1583", "index_value": "45181", "rating": "3.3"}, {"ip_addresses": "211269", "label": "Tele2", "test_count": "2391", "aggregate_date": "2015-05-01", "id": "34200", "index_value": "44689", "rating": "2.9"}, {"ip_addresses": "12322", "label": "TDC Sverige ", "test_count": "852", "aggregate_date": "2015-05-01", "id": "27347", "index_value": "36388", "rating": "3.1"}, {"ip_addresses": "169464", "label": "Hi3G Access 3", "test_count": "499", "aggregate_date": "2015-05-01", "id": "4270", "index_value": "21180", "rating": "2.8"}, {"ip_addresses": "25365", "label": "Tele2 Mobile", "test_count": "1488", "aggregate_date": "2015-05-01", "id": "96849", "index_value": "16360", "rating": "2.8"}, {"ip_addresses": "9684", "label": "Telenor Business Solutions AB", "test_count": "435", "aggregate_date": "2015-05-01", "id": "67649", "index_value": "14915", "rating": "2.3"}, {"ip_addresses": "149336", "label": "Telenor Sverige", "test_count": "3319", "aggregate_date": "2015-05-01", "id": "26702", "index_value": "14395", "rating": "2.3"}, {"ip_addresses": "9310", "label": "Via Europa", "test_count": "102", "aggregate_date": "2015-05-01", "id": "35370", "index_value": "7767", "rating": "4.2"}], "bandwidth": "59013"}, {"countryCode": "CA", "test_count": "1417255", "ip_addresses": "12616096", "country": "Canada", "isps": [{"ip_addresses": "1915934", "label": "Rogers Cable", "test_count": "260022", "aggregate_date": "2015-05-01", "id": "164", "index_value": "67000", "rating": "2.7"}, {"ip_addresses": "313896", "label": "Bell Aliant", "test_count": "32952", "aggregate_date": "2015-05-01", "id": "66759", "index_value": "43949", "rating": "2.6"}, {"ip_addresses": "502778", "label": "Cogeco Cable", "test_count": "63267", "aggregate_date": "2015-05-01", "id": "282", "index_value": "33688", "rating": "3.3"}, {"ip_addresses": "1787574", "label": "Shaw Communications", "test_count": "154552", "aggregate_date": "2015-05-01", "id": "8", "index_value": "29954", "rating": "3.1"}, {"ip_addresses": "241192", "label": "EastLink", "test_count": "26629", "aggregate_date": "2015-05-01", "id": "19734", "index_value": "29128", "rating": "3.0"}, {"ip_addresses": "1006451", "label": "Videotron", "test_count": "68913", "aggregate_date": "2015-05-01", "id": "70", "index_value": "26156", "rating": "3.8"}, {"ip_addresses": "37226", "label": "Start Communications", "test_count": "15079", "aggregate_date": "2015-05-01", "id": "16391", "index_value": "25635", "rating": "4.3"}, {"ip_addresses": "63306", "label": "CIK Telecom", "test_count": "33605", "aggregate_date": "2015-05-01", "id": "49771", "index_value": "21345", "rating": "2.2"}, {"ip_addresses": "1656601", "label": "Telus", "test_count": "208270", "aggregate_date": "2015-05-01", "id": "7", "index_value": "20855", "rating": "2.5"}, {"ip_addresses": "2349702", "label": "Bell Canada", "test_count": "216082", "aggregate_date": "2015-05-01", "id": "95", "index_value": "18134", "rating": "2.4"}, {"ip_addresses": "289082", "label": "TekSavvy", "test_count": "71625", "aggregate_date": "2015-05-01", "id": "2324", "index_value": "16946", "rating": "3.7"}, {"ip_addresses": "248624", "label": "MTS Allstream", "test_count": "37974", "aggregate_date": "2015-05-01", "id": "676", "index_value": "15844", "rating": "2.4"}, {"ip_addresses": "139354", "label": "SaskTel", "test_count": "13169", "aggregate_date": "2015-05-01", "id": "431", "index_value": "14842", "rating": "2.6"}, {"ip_addresses": "101338", "label": "Primus Canada", "test_count": "25269", "aggregate_date": "2015-05-01", "id": "861", "index_value": "12370", "rating": "2.1"}, {"ip_addresses": "63394", "label": "ACN DSL", "test_count": "9173", "aggregate_date": "2015-05-01", "id": "85653", "index_value": "11782", "rating": "2.3"}, {"ip_addresses": "44226", "label": "Bell Mobility", "test_count": "7811", "aggregate_date": "2015-05-01", "id": "27713", "index_value": "10048", "rating": "2.5"}, {"ip_addresses": "83386", "label": "Acanac Inc.", "test_count": "8230", "aggregate_date": "2015-05-01", "id": "3372", "index_value": "9983", "rating": "2.9"}, {"ip_addresses": "397552", "label": "Sympatico", "test_count": "255", "aggregate_date": "2015-05-01", "id": "25028", "index_value": "9904", "rating": "2.4"}, {"ip_addresses": "128917", "label": "Xplornet Communications", "test_count": "32512", "aggregate_date": "2015-05-01", "id": "70988", "index_value": "4777", "rating": "1.8"}, {"ip_addresses": "132710", "label": "Distributel Communications", "test_count": "4", "aggregate_date": "2015-05-01", "id": "2916", "index_value": "1487", "rating": "2.6"}], "bandwidth": "29954"}, {"countryCode": "UA", "test_count": "1993639", "ip_addresses": "5426760", "country": "Ukraine", "isps": [{"ip_addresses": "66269", "label": "Lanet Network Ltd.", "test_count": "17114", "aggregate_date": "2015-05-01", "id": "3266", "index_value": "81611", "rating": "4.2"}, {"ip_addresses": "66983", "label": "Triolan", "test_count": "92929", "aggregate_date": "2015-05-01", "id": "102563", "index_value": "61479", "rating": "4.0"}, {"ip_addresses": "66521", "label": "Freenet Ltd.", "test_count": "18636", "aggregate_date": "2015-05-01", "id": "14953", "index_value": "48916", "rating": "4.0"}, {"ip_addresses": "41715", "label": "LLC McLaut-Invest", "test_count": "11569", "aggregate_date": "2015-05-01", "id": "79269", "index_value": "46189", "rating": "4.0"}, {"ip_addresses": "68150", "label": "ISP Fregat Ltd.", "test_count": "21132", "aggregate_date": "2015-05-01", "id": "47204", "index_value": "39839", "rating": "3.5"}, {"ip_addresses": "496663", "label": "Volia", "test_count": "109484", "aggregate_date": "2015-05-01", "id": "76423", "index_value": "39384", "rating": "2.7"}, {"ip_addresses": "54476", "label": "LLC AB Ukraine", "test_count": "10084", "aggregate_date": "2015-05-01", "id": "33066", "index_value": "37082", "rating": "4.0"}, {"ip_addresses": "32846", "label": "TOV TRK Briz", "test_count": "7325", "aggregate_date": "2015-05-01", "id": "27360", "index_value": "36694", "rating": "4.0"}, {"ip_addresses": "63781", "label": "TeNeT Scientific Production Enterprise LLC", "test_count": "15572", "aggregate_date": "2015-05-01", "id": "95235", "index_value": "36475", "rating": "3.8"}, {"ip_addresses": "141657", "label": "Golden Telecom LLC", "test_count": "56299", "aggregate_date": "2015-05-01", "id": "29451", "index_value": "34114", "rating": "3.5"}, {"ip_addresses": "47512", "label": "WildPark Co", "test_count": "11454", "aggregate_date": "2015-05-01", "id": "1473", "index_value": "33086", "rating": "3.9"}, {"ip_addresses": "431070", "label": "Kyivstar PJSC", "test_count": "71661", "aggregate_date": "2015-05-01", "id": "85643", "index_value": "31300", "rating": "3.4"}, {"ip_addresses": "32461", "label": "Online Technologies Ltd.", "test_count": "11091", "aggregate_date": "2015-05-01", "id": "90113", "index_value": "30207", "rating": "3.7"}, {"ip_addresses": "43339", "label": "Cifrovye Dispetcherskie Sistemy", "test_count": "3074", "aggregate_date": "2015-05-01", "id": "25890", "index_value": "30201", "rating": "4.0"}, {"ip_addresses": "49581", "label": "PRIVATE JOINT STOCK COMPANY DATAGROUP", "test_count": "11667", "aggregate_date": "2015-05-01", "id": "49661", "index_value": "21383", "rating": "3.6"}, {"ip_addresses": "146872", "label": "PRIVATE JOINT-STOCK COMPANY FARLEP-INVEST", "test_count": "32383", "aggregate_date": "2015-05-01", "id": "68671", "index_value": "19601", "rating": "3.4"}, {"ip_addresses": "64130", "label": "PrJSC MTS UKRAINE", "test_count": "17193", "aggregate_date": "2015-05-01", "id": "73478", "index_value": "7849", "rating": "2.8"}, {"ip_addresses": "81321", "label": "Telesystems of Ukraine", "test_count": "6365", "aggregate_date": "2015-05-01", "id": "47213", "index_value": "7592", "rating": "2.9"}, {"ip_addresses": "149005", "label": "Intertelecom Ltd.", "test_count": "5621", "aggregate_date": "2015-05-01", "id": "91951", "index_value": "7280", "rating": "2.5"}, {"ip_addresses": "1398378", "label": "JSC Ukrtelecom", "test_count": "144971", "aggregate_date": "2015-05-01", "id": "483", "index_value": "5609", "rating": "2.9"}], "bandwidth": "27434"}, {"countryCode": "RO", "test_count": "354177", "ip_addresses": "3527647", "country": "Romania", "isps": [{"ip_addresses": "1156410", "label": "RCS & RDS", "test_count": "152411", "aggregate_date": "2015-05-01", "id": "53993", "index_value": "110905", "rating": "3.9"}, {"ip_addresses": "6570", "label": "SC MILLENNIUM IT SRL", "test_count": "952", "aggregate_date": "2015-05-01", "id": "36028", "index_value": "87264", "rating": "4.2"}, {"ip_addresses": "31168", "label": "Jump Management", "test_count": "390", "aggregate_date": "2015-05-01", "id": "43109", "index_value": "80649", "rating": "3.7"}, {"ip_addresses": "6427", "label": "iNes Telecom", "test_count": "1345", "aggregate_date": "2015-05-01", "id": "4195", "index_value": "75857", "rating": "4.0"}, {"ip_addresses": "301505", "label": "UPC Romania", "test_count": "35443", "aggregate_date": "2015-05-01", "id": "37687", "index_value": "74388", "rating": "3.0"}, {"ip_addresses": "3702", "label": "S.C. EXPERTNET S.R.L.", "test_count": "717", "aggregate_date": "2015-05-01", "id": "36031", "index_value": "69317", "rating": "3.9"}, {"ip_addresses": "3369", "label": "Prime Telecom SRL", "test_count": "332", "aggregate_date": "2015-05-01", "id": "44978", "index_value": "55001", "rating": "3.7"}, {"ip_addresses": "73001", "label": "NEXTGEN COMMUNICATIONS SRL", "test_count": "7344", "aggregate_date": "2015-05-01", "id": "58577", "index_value": "52467", "rating": "3.8"}, {"ip_addresses": "5617", "label": "Canals", "test_count": "236", "aggregate_date": "2015-05-01", "id": "27723", "index_value": "51704", "rating": "3.2"}, {"ip_addresses": "5995", "label": "SC AMBRA SRL", "test_count": "948", "aggregate_date": "2015-05-01", "id": "36004", "index_value": "51368", "rating": "4.4"}, {"ip_addresses": "7124", "label": "TV SAT 2002 SRL", "test_count": "1016", "aggregate_date": "2015-05-01", "id": "38820", "index_value": "50632", "rating": "4.2"}, {"ip_addresses": "3222", "label": "SC TV Adler-Trading SRL", "test_count": "318", "aggregate_date": "2015-05-01", "id": "43721", "index_value": "47311", "rating": "3.7"}, {"ip_addresses": "3334", "label": "SC Baleanu ServCom SRL", "test_count": "449", "aggregate_date": "2015-05-01", "id": "35951", "index_value": "46729", "rating": "3.6"}, {"ip_addresses": "7691", "label": "Orange Romania", "test_count": "5854", "aggregate_date": "2015-05-01", "id": "80158", "index_value": "39413", "rating": "2.9"}, {"ip_addresses": "5987", "label": "Digital Cable Systems S.A.", "test_count": "1408", "aggregate_date": "2015-05-01", "id": "101638", "index_value": "31331", "rating": "3.5"}, {"ip_addresses": "57682", "label": "DIGITAL CABLE SYSTEMS SA", "test_count": "8641", "aggregate_date": "2015-05-01", "id": "46384", "index_value": "30586", "rating": "3.5"}, {"ip_addresses": "829410", "label": "ROMTelecom", "test_count": "41189", "aggregate_date": "2015-05-01", "id": "39233", "index_value": "25161", "rating": "2.9"}, {"ip_addresses": "3613", "label": "SC PAN ELECTRO SRL", "test_count": "496", "aggregate_date": "2015-05-01", "id": "35970", "index_value": "20936", "rating": "4.2"}, {"ip_addresses": "8401", "label": "Vodafone Romania S.A.", "test_count": "8396", "aggregate_date": "2015-05-01", "id": "54809", "index_value": "14952", "rating": "2.8"}, {"ip_addresses": "14004", "label": "Radiocom", "test_count": "746", "aggregate_date": "2015-05-01", "id": "7712", "index_value": "4781", "rating": "3.3"}], "bandwidth": "71540"}, {"countryCode": "PH", "test_count": "2005062", "ip_addresses": "2702640", "country": "Philippines", "isps": [{"ip_addresses": "1915", "label": "PHILCOM CORPORATION INTERNET SERVICE", "test_count": "1157", "aggregate_date": "2015-05-01", "id": "10847", "index_value": "16539", "rating": "2.6"}, {"ip_addresses": "17421", "label": "Comclark", "test_count": "8359", "aggregate_date": "2015-05-01", "id": "39646", "index_value": "10477", "rating": "2.1"}, {"ip_addresses": "1802", "label": "Tri-Isys Internet", "test_count": "807", "aggregate_date": "2015-05-01", "id": "4364", "index_value": "7973", "rating": "3.0"}, {"ip_addresses": "1579", "label": "Philippine Telegraph and Telephone Corporation", "test_count": "1833", "aggregate_date": "2015-05-01", "id": "1262", "index_value": "7164", "rating": "2.7"}, {"ip_addresses": "8961", "label": "Eastern Telecoms Philippines, Inc.", "test_count": "945", "aggregate_date": "2015-05-01", "id": "3373", "index_value": "6959", "rating": "2.6"}, {"ip_addresses": "13375", "label": "Eastern Telecom Philippines Inc.", "test_count": "7023", "aggregate_date": "2015-05-01", "id": "23573", "index_value": "6444", "rating": "2.6"}, {"ip_addresses": "13415", "label": "ComClark Network & Technology Corp", "test_count": "35755", "aggregate_date": "2015-05-01", "id": "66363", "index_value": "3991", "rating": "2.1"}, {"ip_addresses": "15080", "label": "SKY Broadband", "test_count": "32013", "aggregate_date": "2015-05-01", "id": "30679", "index_value": "3917", "rating": "2.4"}, {"ip_addresses": "1822450", "label": "Phillipine Telephone", "test_count": "840759", "aggregate_date": "2015-05-01", "id": "15", "index_value": "3779", "rating": "2.4"}, {"ip_addresses": "23103", "label": "SkyBroadband", "test_count": "14306", "aggregate_date": "2015-05-01", "id": "46022", "index_value": "3472", "rating": "2.2"}, {"ip_addresses": "3542", "label": "Smart Broadband", "test_count": "109764", "aggregate_date": "2015-05-01", "id": "103369", "index_value": "3237", "rating": "2.0"}, {"ip_addresses": "108136", "label": "Bayan", "test_count": "83245", "aggregate_date": "2015-05-01", "id": "37702", "index_value": "2908", "rating": "2.3"}, {"ip_addresses": "209738", "label": "Globe Telecom", "test_count": "379143", "aggregate_date": "2015-05-01", "id": "900", "index_value": "2668", "rating": "2.3"}, {"ip_addresses": "2423", "label": "Destiny Ip Pool", "test_count": "997", "aggregate_date": "2015-05-01", "id": "99523", "index_value": "2147", "rating": "2.0"}, {"ip_addresses": "2135", "label": "Sky Internet", "test_count": "13580", "aggregate_date": "2015-05-01", "id": "2278", "index_value": "2038", "rating": "2.0"}, {"ip_addresses": "2314", "label": "CLICK Broadband", "test_count": "2393", "aggregate_date": "2015-05-01", "id": "99531", "index_value": "1963", "rating": "2.4"}, {"ip_addresses": "2430", "label": "Clear Path Networks Inc", "test_count": "3886", "aggregate_date": "2015-05-01", "id": "38777", "index_value": "1892", "rating": "2.4"}, {"ip_addresses": "2695", "label": "Cablelink Internet Sservices Inc.", "test_count": "1395", "aggregate_date": "2015-05-01", "id": "39655", "index_value": "1356", "rating": "2.3"}, {"ip_addresses": "70449", "label": "DigitelOne", "test_count": "3661", "aggregate_date": "2015-05-01", "id": "1119", "index_value": "1310", "rating": "1.9"}, {"ip_addresses": "11571", "label": "Liberty Broadcasting Network Inc", "test_count": "6224", "aggregate_date": "2015-05-01", "id": "42873", "index_value": "1034", "rating": "2.3"}], "bandwidth": "3592"}, {"countryCode": "IN", "test_count": "4365091", "ip_addresses": "9476774", "country": "India", "isps": [{"ip_addresses": "253957", "label": "Hathway Cable Internet", "test_count": "119679", "aggregate_date": "2015-05-01", "id": "724", "index_value": "20111", "rating": "2.6"}, {"ip_addresses": "114665", "label": "Hathway Cable and Datacom Pvt Ltd", "test_count": "45380", "aggregate_date": "2015-05-01", "id": "3805", "index_value": "19616", "rating": "2.5"}, {"ip_addresses": "182441", "label": "Beam Telecom", "test_count": "162603", "aggregate_date": "2015-05-01", "id": "102897", "index_value": "19220", "rating": "3.5"}, {"ip_addresses": "95437", "label": "YOU Telecom India Pvt Ltd", "test_count": "63604", "aggregate_date": "2015-05-01", "id": "71775", "index_value": "13476", "rating": "2.5"}, {"ip_addresses": "341450", "label": "Tata Communications", "test_count": "90527", "aggregate_date": "2015-05-01", "id": "37733", "index_value": "8900", "rating": "2.5"}, {"ip_addresses": "1315887", "label": "Bharti Airtel Limited", "test_count": "81602", "aggregate_date": "2015-05-01", "id": "46101", "index_value": "8314", "rating": "3.0"}, {"ip_addresses": "261575", "label": "Vodafone Spain", "test_count": "35879", "aggregate_date": "2015-05-01", "id": "46053", "index_value": "7579", "rating": "2.9"}, {"ip_addresses": "429414", "label": "Bharti Broadband", "test_count": "230712", "aggregate_date": "2015-05-01", "id": "115", "index_value": "5886", "rating": "3.0"}, {"ip_addresses": "67513", "label": "Tata Teleservices (Maharashtra) Ltd", "test_count": "26638", "aggregate_date": "2015-05-01", "id": "54166", "index_value": "5205", "rating": "2.8"}, {"ip_addresses": "1098534", "label": "Airtel Broadband", "test_count": "472234", "aggregate_date": "2015-05-01", "id": "82287", "index_value": "4760", "rating": "2.9"}, {"ip_addresses": "94107", "label": "Sify", "test_count": "11637", "aggregate_date": "2015-05-01", "id": "53", "index_value": "4554", "rating": "2.6"}, {"ip_addresses": "395021", "label": "Aircel Ltd.", "test_count": "9877", "aggregate_date": "2015-05-01", "id": "66758", "index_value": "4415", "rating": "3.3"}, {"ip_addresses": "72010", "label": "National Internet Backbone", "test_count": "25441", "aggregate_date": "2015-05-01", "id": "958", "index_value": "3991", "rating": "3.0"}, {"ip_addresses": "1065471", "label": "Reliance Communications", "test_count": "174753", "aggregate_date": "2015-05-01", "id": "24249", "index_value": "3583", "rating": "2.5"}, {"ip_addresses": "278991", "label": "Idea Cellular", "test_count": "19111", "aggregate_date": "2015-05-01", "id": "75034", "index_value": "2834", "rating": "3.1"}, {"ip_addresses": "697693", "label": "Mahanagar Telephone", "test_count": "88933", "aggregate_date": "2015-05-01", "id": "14801", "index_value": "2737", "rating": "2.7"}, {"ip_addresses": "2839419", "label": "BSNL", "test_count": "454159", "aggregate_date": "2015-05-01", "id": "73461", "index_value": "2524", "rating": "2.8"}, {"ip_addresses": "72702", "label": "Quadrant Televentures Limited", "test_count": "19580", "aggregate_date": "2015-05-01", "id": "86182", "index_value": "2470", "rating": "3.0"}, {"ip_addresses": "193680", "label": "MTS", "test_count": "131687", "aggregate_date": "2015-05-01", "id": "11756", "index_value": "1209", "rating": "2.3"}, {"ip_addresses": "260064", "label": "Tata Indicom", "test_count": "74127", "aggregate_date": "2015-05-01", "id": "73469", "index_value": "1087", "rating": "2.0"}], "bandwidth": "6853"}, {"countryCode": "DK", "test_count": "213606", "ip_addresses": "1516838", "country": "Denmark", "isps": [{"ip_addresses": "4865", "label": "jay.net a/s", "test_count": "515", "aggregate_date": "2015-05-01", "id": "6020", "index_value": "118432", "rating": "4.0"}, {"ip_addresses": "5892", "label": "Nianet A/S", "test_count": "3429", "aggregate_date": "2015-05-01", "id": "8770", "index_value": "95364", "rating": "3.8"}, {"ip_addresses": "3669", "label": "Zen Systems A/S", "test_count": "2960", "aggregate_date": "2015-05-01", "id": "80353", "index_value": "87336", "rating": "4.0"}, {"ip_addresses": "5976", "label": "Fiberby ApS", "test_count": "1566", "aggregate_date": "2015-05-01", "id": "42694", "index_value": "67461", "rating": "4.4"}, {"ip_addresses": "29834", "label": "Syd Energi Bredbaand A/S", "test_count": "12155", "aggregate_date": "2015-05-01", "id": "43742", "index_value": "65410", "rating": "4.3"}, {"ip_addresses": "6409", "label": "Sydfyns-Intranet A/S", "test_count": "752", "aggregate_date": "2015-05-01", "id": "9499", "index_value": "58276", "rating": "3.9"}, {"ip_addresses": "27203", "label": "SEAS-NVE Holding A/S", "test_count": "7219", "aggregate_date": "2015-05-01", "id": "28682", "index_value": "57935", "rating": "4.1"}, {"ip_addresses": "6571", "label": "NRGI Fibernet A/S", "test_count": "2085", "aggregate_date": "2015-05-01", "id": "24525", "index_value": "57737", "rating": "4.3"}, {"ip_addresses": "33684", "label": "TRE-FOR Bredbaand A/S", "test_count": "8058", "aggregate_date": "2015-05-01", "id": "7606", "index_value": "50911", "rating": "4.2"}, {"ip_addresses": "3320", "label": "NetGroup A/S", "test_count": "1403", "aggregate_date": "2015-05-01", "id": "40249", "index_value": "49718", "rating": "3.7"}, {"ip_addresses": "8519", "label": "Altibox Danmark AS", "test_count": "2127", "aggregate_date": "2015-05-01", "id": "38908", "index_value": "47387", "rating": "4.0"}, {"ip_addresses": "13098", "label": "Bolignet-Aarhus", "test_count": "1355", "aggregate_date": "2015-05-01", "id": "2812", "index_value": "47127", "rating": "3.5"}, {"ip_addresses": "20429", "label": "ComX Networks A/S", "test_count": "2297", "aggregate_date": "2015-05-01", "id": "4866", "index_value": "46802", "rating": "4.1"}, {"ip_addresses": "125870", "label": "Telia Stofa", "test_count": "12821", "aggregate_date": "2015-05-01", "id": "1271", "index_value": "45602", "rating": "2.9"}, {"ip_addresses": "5467", "label": "Verdo Tele A/S", "test_count": "945", "aggregate_date": "2015-05-01", "id": "54344", "index_value": "41711", "rating": "4.2"}, {"ip_addresses": "3183", "label": "HEF Fibernet A/S", "test_count": "277", "aggregate_date": "2015-05-01", "id": "78313", "index_value": "36356", "rating": "4.4"}, {"ip_addresses": "739486", "label": "Tele Danmark", "test_count": "84059", "aggregate_date": "2015-05-01", "id": "22", "index_value": "34717", "rating": "3.1"}, {"ip_addresses": "5443", "label": "Dansk Net A/S", "test_count": "1210", "aggregate_date": "2015-05-01", "id": "90114", "index_value": "22254", "rating": "3.3"}, {"ip_addresses": "133979", "label": "Hi3G Access 3", "test_count": "6048", "aggregate_date": "2015-05-01", "id": "4270", "index_value": "18028", "rating": "2.8"}, {"ip_addresses": "86044", "label": "Telenor A/S", "test_count": "9989", "aggregate_date": "2015-05-01", "id": "45285", "index_value": "13927", "rating": "2.7"}], "bandwidth": "46106"}, {"countryCode": "BR", "test_count": "2846836", "ip_addresses": "18394588", "country": "Brazil", "isps": [{"ip_addresses": "549175", "label": "TIM Celular", "test_count": "59341", "aggregate_date": "2015-05-01", "id": "30680", "index_value": "30787", "rating": "2.9"}, {"ip_addresses": "2286147", "label": "Vivo", "test_count": "359433", "aggregate_date": "2015-05-01", "id": "86119", "index_value": "24971", "rating": "2.5"}, {"ip_addresses": "56816", "label": "Cabo Telecom", "test_count": "4834", "aggregate_date": "2015-05-01", "id": "30694", "index_value": "18768", "rating": "3.2"}, {"ip_addresses": "2770662", "label": "NET", "test_count": "619740", "aggregate_date": "2015-05-01", "id": "87729", "index_value": "18697", "rating": "2.8"}, {"ip_addresses": "58488", "label": "Sercomtel", "test_count": "3557", "aggregate_date": "2015-05-01", "id": "386", "index_value": "18360", "rating": "2.7"}, {"ip_addresses": "18270", "label": "On Telecom", "test_count": "7388", "aggregate_date": "2015-05-01", "id": "83393", "index_value": "14832", "rating": "3.7"}, {"ip_addresses": "17760", "label": "Gigalink", "test_count": "3631", "aggregate_date": "2015-05-01", "id": "2938", "index_value": "13725", "rating": "2.6"}, {"ip_addresses": "332809", "label": "Embratel", "test_count": "33240", "aggregate_date": "2015-05-01", "id": "14791", "index_value": "13300", "rating": "3.2"}, {"ip_addresses": "2445984", "label": "Global Village Telecom", "test_count": "359596", "aggregate_date": "2015-05-01", "id": "1487", "index_value": "12946", "rating": "3.6"}, {"ip_addresses": "19137", "label": "Telecom S/a", "test_count": "2637", "aggregate_date": "2015-05-01", "id": "86538", "index_value": "12226", "rating": "2.4"}, {"ip_addresses": "32598", "label": "Multiplay Telecom", "test_count": "12760", "aggregate_date": "2015-05-01", "id": "39606", "index_value": "10228", "rating": "2.7"}, {"ip_addresses": "447088", "label": "Claro", "test_count": "3337", "aggregate_date": "2015-05-01", "id": "30677", "index_value": "7887", "rating": "2.7"}, {"ip_addresses": "197175", "label": "CTBC/Algar Telecom", "test_count": "18032", "aggregate_date": "2015-05-01", "id": "78641", "index_value": "7832", "rating": "2.2"}, {"ip_addresses": "37469", "label": "ViaCabo", "test_count": "4875", "aggregate_date": "2015-05-01", "id": "70284", "index_value": "7782", "rating": "2.1"}, {"ip_addresses": "24625", "label": "CILNET Comunicacao e Informatica LTDA.", "test_count": "2335", "aggregate_date": "2015-05-01", "id": "8684", "index_value": "7351", "rating": "3.3"}, {"ip_addresses": "30286", "label": "Televisao Cidade S/A", "test_count": "4939", "aggregate_date": "2015-05-01", "id": "48467", "index_value": "5511", "rating": "2.2"}, {"ip_addresses": "2098110", "label": "Oi Internet", "test_count": "467251", "aggregate_date": "2015-05-01", "id": "86115", "index_value": "5050", "rating": "2.3"}, {"ip_addresses": "86811", "label": "MasterCabo - RBC", "test_count": "50812", "aggregate_date": "2015-05-01", "id": "33222", "index_value": "4295", "rating": "2.2"}, {"ip_addresses": "26959", "label": "Brisanet", "test_count": "4171", "aggregate_date": "2015-05-01", "id": "37035", "index_value": "3829", "rating": "2.7"}, {"ip_addresses": "23027", "label": "LPNet", "test_count": "5715", "aggregate_date": "2015-05-01", "id": "30761", "index_value": "3247", "rating": "2.9"}], "bandwidth": "13354"}, {"countryCode": "Unknown | Russia", "test_count": "4202215", "ip_addresses": "16912645", "country": "Russia", "isps": [{"ip_addresses": "92200", "label": "Good Line", "test_count": "15874", "aggregate_date": "2015-05-01", "id": "20455", "index_value": "47638", "rating": "3.5"}, {"ip_addresses": "189384", "label": "Ufanet", "test_count": "24995", "aggregate_date": "2015-05-01", "id": "24712", "index_value": "44420", "rating": "3.8"}, {"ip_addresses": "86402", "label": "Electronic City", "test_count": "20376", "aggregate_date": "2015-05-01", "id": "1421", "index_value": "43843", "rating": "3.8"}, {"ip_addresses": "63147", "label": "Seven Sky", "test_count": "25288", "aggregate_date": "2015-05-01", "id": "1863", "index_value": "42775", "rating": "3.7"}, {"ip_addresses": "1514942", "label": "DOM.RU", "test_count": "191209", "aggregate_date": "2015-05-01", "id": "66756", "index_value": "42221", "rating": "4.9"}, {"ip_addresses": "226959", "label": "InterSvyaz", "test_count": "24594", "aggregate_date": "2015-05-01", "id": "37691", "index_value": "40984", "rating": "3.7"}, {"ip_addresses": "547280", "label": "NETBYNET", "test_count": "86090", "aggregate_date": "2015-05-01", "id": "73475", "index_value": "37780", "rating": "3.6"}, {"ip_addresses": "72690", "label": "Omsk Cable Networks", "test_count": "8216", "aggregate_date": "2015-05-01", "id": "29120", "index_value": "36614", "rating": "3.6"}, {"ip_addresses": "50671", "label": "IC-VORONEZH", "test_count": "6976", "aggregate_date": "2015-05-01", "id": "1757", "index_value": "33839", "rating": "4.1"}, {"ip_addresses": "87066", "label": "Central Telegraph", "test_count": "17367", "aggregate_date": "2015-05-01", "id": "71787", "index_value": "33818", "rating": "4.0"}, {"ip_addresses": "54621", "label": "Tis Dialog", "test_count": "11203", "aggregate_date": "2015-05-01", "id": "1461", "index_value": "33491", "rating": "3.5"}, {"ip_addresses": "1977208", "label": "VimpelCom", "test_count": "319469", "aggregate_date": "2015-05-01", "id": "70159", "index_value": "29108", "rating": "3.7"}, {"ip_addresses": "69606", "label": "Flex", "test_count": "10691", "aggregate_date": "2015-05-01", "id": "89518", "index_value": "28878", "rating": "3.7"}, {"ip_addresses": "65789", "label": "AIST", "test_count": "9136", "aggregate_date": "2015-05-01", "id": "85648", "index_value": "28751", "rating": "3.4"}, {"ip_addresses": "6378392", "label": "Rostelecom", "test_count": "1279137", "aggregate_date": "2015-05-01", "id": "67610", "index_value": "28560", "rating": "3.1"}, {"ip_addresses": "212478", "label": "Bashtel", "test_count": "25176", "aggregate_date": "2015-05-01", "id": "84997", "index_value": "24269", "rating": "3.0"}, {"ip_addresses": "530097", "label": "MTS", "test_count": "336624", "aggregate_date": "2015-05-01", "id": "69154", "index_value": "21205", "rating": "3.2"}, {"ip_addresses": "218150", "label": "Tattelecom", "test_count": "39527", "aggregate_date": "2015-05-01", "id": "2506", "index_value": "17517", "rating": "2.9"}, {"ip_addresses": "110133", "label": "MegaFon", "test_count": "331252", "aggregate_date": "2015-05-01", "id": "1538", "index_value": "8202", "rating": "2.9"}, {"ip_addresses": "61166", "label": "Moscow Cellular Communications", "test_count": "2424", "aggregate_date": "2015-05-01", "id": "1196", "index_value": "1235", "rating": "2.8"}], "bandwidth": "29582"}, {"countryCode": "ES", "test_count": "850889", "ip_addresses": "7327956", "country": "Spain", "isps": [{"ip_addresses": "8788", "label": "Adamo", "test_count": "5917", "aggregate_date": "2015-05-01", "id": "61534", "index_value": "270702", "rating": "4.3"}, {"ip_addresses": "114371", "label": "Euskaltel", "test_count": "11599", "aggregate_date": "2015-05-01", "id": "49009", "index_value": "62863", "rating": "3.7"}, {"ip_addresses": "33634", "label": "TeleCable", "test_count": "4760", "aggregate_date": "2015-05-01", "id": "85649", "index_value": "62214", "rating": "3.6"}, {"ip_addresses": "33817", "label": "R", "test_count": "9747", "aggregate_date": "2015-05-01", "id": "77789", "index_value": "54069", "rating": "3.9"}, {"ip_addresses": "24394", "label": "PTV Telecom", "test_count": "11071", "aggregate_date": "2015-05-01", "id": "3768", "index_value": "38541", "rating": "3.7"}, {"ip_addresses": "899068", "label": "ONO", "test_count": "160896", "aggregate_date": "2015-05-01", "id": "120", "index_value": "37734", "rating": "3.5"}, {"ip_addresses": "17852", "label": "cableworld", "test_count": "3682", "aggregate_date": "2015-05-01", "id": "17914", "index_value": "37276", "rating": "3.6"}, {"ip_addresses": "1139083", "label": "Jazztel", "test_count": "117850", "aggregate_date": "2015-05-01", "id": "68", "index_value": "35042", "rating": "3.0"}, {"ip_addresses": "11974", "label": "Vodafonespainnetwork", "test_count": "12136", "aggregate_date": "2015-05-01", "id": "102436", "index_value": "29283", "rating": "3.1"}, {"ip_addresses": "3402893", "label": "Movistar", "test_count": "243123", "aggregate_date": "2015-05-01", "id": "48", "index_value": "26968", "rating": "2.5"}, {"ip_addresses": "13858", "label": "Yoigo", "test_count": "73", "aggregate_date": "2015-05-01", "id": "19461", "index_value": "20491", "rating": "3.0"}, {"ip_addresses": "3894", "label": "Onlycable Comunicaciones", "test_count": "1087", "aggregate_date": "2015-05-01", "id": "79474", "index_value": "19656", "rating": "3.2"}, {"ip_addresses": "404971", "label": "Orange Espana", "test_count": "74765", "aggregate_date": "2015-05-01", "id": "86124", "index_value": "16091", "rating": "2.2"}, {"ip_addresses": "419080", "label": "Vodafone Spain", "test_count": "39707", "aggregate_date": "2015-05-01", "id": "2930", "index_value": "11323", "rating": "2.3"}, {"ip_addresses": "7355", "label": "Airenetworks", "test_count": "5489", "aggregate_date": "2015-05-01", "id": "98330", "index_value": "9809", "rating": "2.7"}, {"ip_addresses": "3629", "label": "Wimax On Line", "test_count": "1952", "aggregate_date": "2015-05-01", "id": "77779", "index_value": "8300", "rating": "2.8"}, {"ip_addresses": "3022", "label": "Telecable Santa Pola", "test_count": "1844", "aggregate_date": "2015-05-01", "id": "71574", "index_value": "6237", "rating": "2.3"}, {"ip_addresses": "8759", "label": "Ole Comunicacion", "test_count": "2925", "aggregate_date": "2015-05-01", "id": "75765", "index_value": "4527", "rating": "2.4"}, {"ip_addresses": "10637", "label": "Eurona", "test_count": "5206", "aggregate_date": "2015-05-01", "id": "64795", "index_value": "3934", "rating": "2.2"}, {"ip_addresses": "33385", "label": "Iberbanda", "test_count": "4263", "aggregate_date": "2015-05-01", "id": "383", "index_value": "2001", "rating": "2.5"}], "bandwidth": "29961"}, {"countryCode": "MX", "test_count": "2342556", "ip_addresses": "10441124", "country": "Mexico", "isps": [{"ip_addresses": "305727", "label": "Axtel", "test_count": "156886", "aggregate_date": "2015-05-01", "id": "67626", "index_value": "43207", "rating": "3.1"}, {"ip_addresses": "40549", "label": "Iusacell", "test_count": "176146", "aggregate_date": "2015-05-01", "id": "98018", "index_value": "30590", "rating": "3.3"}, {"ip_addresses": "137312", "label": "Cablevision", "test_count": "396147", "aggregate_date": "2015-05-01", "id": "75216", "index_value": "20350", "rating": "2.5"}, {"ip_addresses": "6363", "label": "Bestel", "test_count": "10413", "aggregate_date": "2015-05-01", "id": "35310", "index_value": "17556", "rating": "2.9"}, {"ip_addresses": "147169", "label": "Telecable", "test_count": "67804", "aggregate_date": "2015-05-01", "id": "14889", "index_value": "15256", "rating": "2.9"}, {"ip_addresses": "3937", "label": "Gigacable", "test_count": "2105", "aggregate_date": "2015-05-01", "id": "58744", "index_value": "12865", "rating": "2.7"}, {"ip_addresses": "1615", "label": "Movistar M\u00e9xico", "test_count": "2808", "aggregate_date": "2015-05-01", "id": "101227", "index_value": "11943", "rating": "2.9"}, {"ip_addresses": "529", "label": "Kiwi Networks", "test_count": "1976", "aggregate_date": "2015-05-01", "id": "93906", "index_value": "10702", "rating": "3.3"}, {"ip_addresses": "5301", "label": "TvRey", "test_count": "2437", "aggregate_date": "2015-05-01", "id": "45405", "index_value": "10506", "rating": "2.2"}, {"ip_addresses": "447570", "label": "Cablem\u00e1s", "test_count": "187370", "aggregate_date": "2015-05-01", "id": "33937", "index_value": "9004", "rating": "2.8"}, {"ip_addresses": "887934", "label": "Megacable", "test_count": "283874", "aggregate_date": "2015-05-01", "id": "69153", "index_value": "8795", "rating": "2.5"}, {"ip_addresses": "174076", "label": "Telnor", "test_count": "38105", "aggregate_date": "2015-05-01", "id": "71023", "index_value": "6815", "rating": "2.4"}, {"ip_addresses": "467", "label": "ServNet Mexico", "test_count": "2043", "aggregate_date": "2015-05-01", "id": "32601", "index_value": "6417", "rating": "3.4"}, {"ip_addresses": "14132", "label": "Maxcom", "test_count": "23035", "aggregate_date": "2015-05-01", "id": "69166", "index_value": "6164", "rating": "2.4"}, {"ip_addresses": "1212479", "label": "Telmex", "test_count": "786020", "aggregate_date": "2015-05-01", "id": "89642", "index_value": "5845", "rating": "2.3"}, {"ip_addresses": "1050", "label": "Gemtel", "test_count": "3330", "aggregate_date": "2015-05-01", "id": "75179", "index_value": "5150", "rating": "2.4"}, {"ip_addresses": "4865", "label": "Nextel", "test_count": "741", "aggregate_date": "2015-05-01", "id": "93662", "index_value": "4420", "rating": "3.1"}], "bandwidth": "13059"}, {"countryCode": "SG", "test_count": "312034", "ip_addresses": "1486501", "country": "Singapore", "isps": [{"ip_addresses": "715", "label": "Viewqwest Fibernet", "test_count": "894", "aggregate_date": "2015-05-01", "id": "81166", "index_value": "470169", "rating": "4.2"}, {"ip_addresses": "8135", "label": "Viewqwest Pte Ltd", "test_count": "10302", "aggregate_date": "2015-05-01", "id": "72022", "index_value": "361312", "rating": "4.4"}, {"ip_addresses": "2637", "label": "Myhosting", "test_count": "20037", "aggregate_date": "2015-05-01", "id": "95321", "index_value": "283347", "rating": "3.8"}, {"ip_addresses": "7277", "label": "M1 Connect Pte Ltd", "test_count": "11277", "aggregate_date": "2015-05-01", "id": "51976", "index_value": "245887", "rating": "3.2"}, {"ip_addresses": "3104", "label": "MyRepublic", "test_count": "5374", "aggregate_date": "2015-05-01", "id": "77753", "index_value": "161413", "rating": "3.6"}, {"ip_addresses": "96012", "label": "MobileOne", "test_count": "43221", "aggregate_date": "2015-05-01", "id": "97011", "index_value": "140496", "rating": "2.7"}, {"ip_addresses": "647264", "label": "SingNet", "test_count": "105042", "aggregate_date": "2015-05-01", "id": "429", "index_value": "103390", "rating": "2.3"}, {"ip_addresses": "53525", "label": "StarHub Internet", "test_count": "28216", "aggregate_date": "2015-05-01", "id": "98303", "index_value": "103213", "rating": "2.3"}, {"ip_addresses": "23323", "label": "Starhub Ltd", "test_count": "10656", "aggregate_date": "2015-05-01", "id": "99041", "index_value": "91229", "rating": "2.3"}, {"ip_addresses": "408376", "label": "StarHub Cable Vision", "test_count": "50393", "aggregate_date": "2015-05-01", "id": "76", "index_value": "81195", "rating": "2.2"}, {"ip_addresses": "2066", "label": "M1 Connect", "test_count": "1703", "aggregate_date": "2015-05-01", "id": "98338", "index_value": "33926", "rating": "2.9"}, {"ip_addresses": "5393", "label": "Singapore Telecommunications", "test_count": "2977", "aggregate_date": "2015-05-01", "id": "4857", "index_value": "20824", "rating": "2.6"}, {"ip_addresses": "4882", "label": "Starhub Mobile Ltd", "test_count": "1575", "aggregate_date": "2015-05-01", "id": "99550", "index_value": "19968", "rating": "1.6"}, {"ip_addresses": "44398", "label": "SingTel Mobile", "test_count": "2127", "aggregate_date": "2015-05-01", "id": "28486", "index_value": "18184", "rating": "1.7"}, {"ip_addresses": "724", "label": "Pacnet Services (Japan) Corp.", "test_count": "1780", "aggregate_date": "2015-05-01", "id": "70195", "index_value": "15048", "rating": "2.9"}, {"ip_addresses": "1395", "label": "SoftLayer Dutch Holdings B.V.", "test_count": "104", "aggregate_date": "2015-05-01", "id": "77530", "index_value": "9774", "rating": "4.4"}], "bandwidth": "121626"}, {"countryCode": "AU", "test_count": "1590546", "ip_addresses": "8917032", "country": "Australia", "isps": [{"ip_addresses": "236555", "label": "Internode", "test_count": "273", "aggregate_date": "2015-05-01", "id": "1225", "index_value": "99772", "rating": "4.1"}, {"ip_addresses": "5694", "label": "SkyMesh", "test_count": "2645", "aggregate_date": "2015-05-01", "id": "89535", "index_value": "43579", "rating": "2.9"}, {"ip_addresses": "14226", "label": "e-wire", "test_count": "3425", "aggregate_date": "2015-05-01", "id": "7155", "index_value": "25749", "rating": "2.4"}, {"ip_addresses": "995297", "label": "Optus", "test_count": "190456", "aggregate_date": "2015-05-01", "id": "71769", "index_value": "22184", "rating": "2.4"}, {"ip_addresses": "3908709", "label": "Telstra", "test_count": "567425", "aggregate_date": "2015-05-01", "id": "39", "index_value": "17708", "rating": "2.3"}, {"ip_addresses": "76744", "label": "Adam Internet", "test_count": "364", "aggregate_date": "2015-05-01", "id": "100", "index_value": "16220", "rating": "3.3"}, {"ip_addresses": "1051662", "label": "iiNet", "test_count": "272192", "aggregate_date": "2015-05-01", "id": "32", "index_value": "14332", "rating": "3.3"}, {"ip_addresses": "78678", "label": "Exetel", "test_count": "42751", "aggregate_date": "2015-05-01", "id": "908", "index_value": "14318", "rating": "3.1"}, {"ip_addresses": "150886", "label": "Vodafone Australia", "test_count": "23142", "aggregate_date": "2015-05-01", "id": "83837", "index_value": "14308", "rating": "2.6"}, {"ip_addresses": "281380", "label": "TPG Internet", "test_count": "212312", "aggregate_date": "2015-05-01", "id": "98023", "index_value": "12252", "rating": "2.6"}, {"ip_addresses": "7990", "label": "Amnet", "test_count": "8845", "aggregate_date": "2015-05-01", "id": "1666", "index_value": "11350", "rating": "3.6"}, {"ip_addresses": "265090", "label": "iPrimus", "test_count": "28530", "aggregate_date": "2015-05-01", "id": "1022", "index_value": "9782", "rating": "2.3"}, {"ip_addresses": "5334", "label": "Telcoinabox Pty Ltd", "test_count": "2385", "aggregate_date": "2015-05-01", "id": "42071", "index_value": "9135", "rating": "2.6"}, {"ip_addresses": "7895", "label": "M2 Telecommunications Group", "test_count": "5944", "aggregate_date": "2015-05-01", "id": "99652", "index_value": "7385", "rating": "2.0"}, {"ip_addresses": "241103", "label": "Dodo", "test_count": "92137", "aggregate_date": "2015-05-01", "id": "23621", "index_value": "7006", "rating": "2.0"}, {"ip_addresses": "14656", "label": "iPrimus", "test_count": "8934", "aggregate_date": "2015-05-01", "id": "99331", "index_value": "7000", "rating": "1.9"}, {"ip_addresses": "12264", "label": "Eftel", "test_count": "3328", "aggregate_date": "2015-05-01", "id": "71054", "index_value": "6604", "rating": "2.5"}, {"ip_addresses": "6006", "label": "Engin", "test_count": "1990", "aggregate_date": "2015-05-01", "id": "48440", "index_value": "6504", "rating": "2.2"}, {"ip_addresses": "12574", "label": "SpinTel", "test_count": "9552", "aggregate_date": "2015-05-01", "id": "71839", "index_value": "4994", "rating": "2.4"}, {"ip_addresses": "38747", "label": "vividwireless", "test_count": "10766", "aggregate_date": "2015-05-01", "id": "47191", "index_value": "3358", "rating": "2.2"}], "bandwidth": "16034"}, {"countryCode": "EE", "test_count": "114847", "ip_addresses": "443560", "country": "Estonia", "isps": [{"ip_addresses": "744", "label": "Hariduse Infotehnoloogia Sihtasutus", "test_count": "1671", "aggregate_date": "2015-05-01", "id": "95245", "index_value": "94792", "rating": "3.7"}, {"ip_addresses": "66263", "label": "Starman AS", "test_count": "19732", "aggregate_date": "2015-05-01", "id": "746", "index_value": "52673", "rating": "3.3"}, {"ip_addresses": "393", "label": "Fill Ltd.", "test_count": "571", "aggregate_date": "2015-05-01", "id": "85864", "index_value": "40770", "rating": "4.1"}, {"ip_addresses": "3696", "label": "UUS PROGRAMM", "test_count": "1332", "aggregate_date": "2015-05-01", "id": "946", "index_value": "38510", "rating": "4.1"}, {"ip_addresses": "1539", "label": "Linx Telecommunications B.V.", "test_count": "387", "aggregate_date": "2015-05-01", "id": "34275", "index_value": "30819", "rating": "4.1"}, {"ip_addresses": "33263", "label": "AS INFONET", "test_count": "6445", "aggregate_date": "2015-05-01", "id": "479", "index_value": "29079", "rating": "3.7"}, {"ip_addresses": "22609", "label": "Televorgu AS", "test_count": "1775", "aggregate_date": "2015-05-01", "id": "10529", "index_value": "26770", "rating": "2.4"}, {"ip_addresses": "13181", "label": "AS STV", "test_count": "2070", "aggregate_date": "2015-05-01", "id": "1143", "index_value": "26664", "rating": "3.3"}, {"ip_addresses": "5170", "label": "telset ltd", "test_count": "1436", "aggregate_date": "2015-05-01", "id": "2424", "index_value": "20983", "rating": "3.2"}, {"ip_addresses": "42505", "label": "Eesti Telekom", "test_count": "52887", "aggregate_date": "2015-05-01", "id": "103101", "index_value": "19433", "rating": "2.9"}, {"ip_addresses": "1014", "label": "SONICTEST Ltd.", "test_count": "799", "aggregate_date": "2015-05-01", "id": "75863", "index_value": "18346", "rating": "3.7"}, {"ip_addresses": "1053", "label": "Osauhing Teetormaja", "test_count": "329", "aggregate_date": "2015-05-01", "id": "2950", "index_value": "17414", "rating": "3.7"}, {"ip_addresses": "1348", "label": "Elisa Eesti", "test_count": "7345", "aggregate_date": "2015-05-01", "id": "104431", "index_value": "13776", "rating": "2.1"}, {"ip_addresses": "58786", "label": "Tele2", "test_count": "1257", "aggregate_date": "2015-05-01", "id": "34200", "index_value": "12936", "rating": "2.9"}], "bandwidth": "26331"}, {"countryCode": "AT", "test_count": "319684", "ip_addresses": "2272225", "country": "Austria", "isps": [{"ip_addresses": "242210", "label": "UPC", "test_count": "71579", "aggregate_date": "2015-05-01", "id": "15012", "index_value": "54215", "rating": "3.0"}, {"ip_addresses": "47635", "label": "LIWEST Kabelfernsehen Errichtungs- und Betriebs Ge", "test_count": "5768", "aggregate_date": "2015-05-01", "id": "1977", "index_value": "38879", "rating": "4.1"}, {"ip_addresses": "5349", "label": "Ainet Telekommunikations-Netzwerk Betriebs GmbH", "test_count": "985", "aggregate_date": "2015-05-01", "id": "8204", "index_value": "38271", "rating": "4.0"}, {"ip_addresses": "3420", "label": "Innsbrucker Kommunalbetriebe AG", "test_count": "944", "aggregate_date": "2015-05-01", "id": "2120", "index_value": "34909", "rating": "4.3"}, {"ip_addresses": "2411", "label": "Stadtwerke Kufstein GmbH", "test_count": "424", "aggregate_date": "2015-05-01", "id": "4715", "index_value": "32557", "rating": "3.9"}, {"ip_addresses": "25373", "label": "Kabelplus GmbH", "test_count": "7708", "aggregate_date": "2015-05-01", "id": "85002", "index_value": "30557", "rating": "4.0"}, {"ip_addresses": "82415", "label": "Hutchison Drei Austria GmbH", "test_count": "10891", "aggregate_date": "2015-05-01", "id": "95716", "index_value": "29610", "rating": "3.0"}, {"ip_addresses": "3586", "label": "Multikom Austria Telekom GmbH", "test_count": "886", "aggregate_date": "2015-05-01", "id": "8214", "index_value": "27118", "rating": "2.3"}, {"ip_addresses": "6483", "label": "ASCUS Telecom GmbH", "test_count": "500", "aggregate_date": "2015-05-01", "id": "42885", "index_value": "22941", "rating": "3.5"}, {"ip_addresses": "2662", "label": "Stadtwerke Kapfenberg", "test_count": "484", "aggregate_date": "2015-05-01", "id": "7895", "index_value": "22188", "rating": "4.2"}, {"ip_addresses": "20597", "label": "SALZBURG AG", "test_count": "2945", "aggregate_date": "2015-05-01", "id": "80", "index_value": "21987", "rating": "4.2"}, {"ip_addresses": "3959", "label": "Russmedia IT GmbH", "test_count": "2159", "aggregate_date": "2015-05-01", "id": "99560", "index_value": "21870", "rating": "3.2"}, {"ip_addresses": "28349", "label": "T-Mobile Austria GmbH", "test_count": "1784", "aggregate_date": "2015-05-01", "id": "548", "index_value": "17952", "rating": "2.7"}, {"ip_addresses": "8279", "label": "JM-DATA GmbH", "test_count": "1158", "aggregate_date": "2015-05-01", "id": "37698", "index_value": "16482", "rating": "3.9"}, {"ip_addresses": "3599", "label": "myNet Internet Solutions", "test_count": "685", "aggregate_date": "2015-05-01", "id": "4615", "index_value": "15509", "rating": "3.7"}, {"ip_addresses": "22539", "label": "Tele2 Telecommunication GmbH", "test_count": "3940", "aggregate_date": "2015-05-01", "id": "42068", "index_value": "14266", "rating": "3.1"}, {"ip_addresses": "4835", "label": "Infotech EDV-Systeme GmbH", "test_count": "545", "aggregate_date": "2015-05-01", "id": "8507", "index_value": "13341", "rating": "3.6"}, {"ip_addresses": "1042282", "label": "Telekom Austria", "test_count": "82469", "aggregate_date": "2015-05-01", "id": "53972", "index_value": "10981", "rating": "2.7"}, {"ip_addresses": "3094", "label": "iplace Internet Services Wellinger Harald", "test_count": "365", "aggregate_date": "2015-05-01", "id": "45295", "index_value": "9777", "rating": "3.7"}, {"ip_addresses": "72902", "label": "Tele2", "test_count": "2005", "aggregate_date": "2015-05-01", "id": "34200", "index_value": "5417", "rating": "2.9"}], "bandwidth": "28464"}, {"countryCode": "PK", "test_count": "305659", "ip_addresses": "2369771", "country": "Pakistan", "isps": [{"ip_addresses": "5465", "label": "Multinet Pakistan Pvt. Ltd.", "test_count": "7041", "aggregate_date": "2015-05-01", "id": "85658", "index_value": "11273", "rating": "3.6"}, {"ip_addresses": "9205", "label": "Transworld Associates (Pvt.) Ltd.", "test_count": "2748", "aggregate_date": "2015-05-01", "id": "15186", "index_value": "9055", "rating": "3.4"}, {"ip_addresses": "6990", "label": "Wateen Telecom", "test_count": "6443", "aggregate_date": "2015-05-01", "id": "103743", "index_value": "7285", "rating": "3.0"}, {"ip_addresses": "7113", "label": "Cyber Internet Services Pakistan", "test_count": "1181", "aggregate_date": "2015-05-01", "id": "66348", "index_value": "6460", "rating": "3.4"}, {"ip_addresses": "7438", "label": "CYBER INTERNET SERVICES (PVT.) LTD.", "test_count": "1126", "aggregate_date": "2015-05-01", "id": "3497", "index_value": "5839", "rating": "3.3"}, {"ip_addresses": "246373", "label": "PTCL", "test_count": "26481", "aggregate_date": "2015-05-01", "id": "71764", "index_value": "5436", "rating": "2.9"}, {"ip_addresses": "13392", "label": "Fiberlink Pvt.Ltd", "test_count": "2318", "aggregate_date": "2015-05-01", "id": "39350", "index_value": "5387", "rating": "3.3"}, {"ip_addresses": "16329", "label": "Fiberlink", "test_count": "2245", "aggregate_date": "2015-05-01", "id": "42220", "index_value": "5245", "rating": "3.6"}, {"ip_addresses": "7905", "label": "Fariya Networks", "test_count": "1424", "aggregate_date": "2015-05-01", "id": "49034", "index_value": "4463", "rating": "3.7"}, {"ip_addresses": "34411", "label": "Micronet Broadband (Pvt) Ltd.", "test_count": "6056", "aggregate_date": "2015-05-01", "id": "962", "index_value": "4245", "rating": "3.7"}, {"ip_addresses": "6258", "label": "WorldCALL Multimedia Ltd", "test_count": "898", "aggregate_date": "2015-05-01", "id": "4649", "index_value": "4220", "rating": "3.0"}, {"ip_addresses": "39603", "label": "Worldcall Telecom Ltd", "test_count": "11447", "aggregate_date": "2015-05-01", "id": "23027", "index_value": "3576", "rating": "2.9"}, {"ip_addresses": "7707", "label": "National Telecommunication Corporation", "test_count": "694", "aggregate_date": "2015-05-01", "id": "43305", "index_value": "3285", "rating": "2.9"}, {"ip_addresses": "7195", "label": "Fariya Networks Pvt.", "test_count": "967", "aggregate_date": "2015-05-01", "id": "94747", "index_value": "3102", "rating": "3.7"}, {"ip_addresses": "42827", "label": "LINKdotNET Telecom Limited", "test_count": "5948", "aggregate_date": "2015-05-01", "id": "29418", "index_value": "3023", "rating": "3.0"}, {"ip_addresses": "1298266", "label": "Pakistan Telecommuication company limited", "test_count": "140393", "aggregate_date": "2015-05-01", "id": "67907", "index_value": "2925", "rating": "3.0"}, {"ip_addresses": "49136", "label": "Qubee", "test_count": "14205", "aggregate_date": "2015-05-01", "id": "42016", "index_value": "2757", "rating": "2.9"}, {"ip_addresses": "15811", "label": "Metro Ethernet Network", "test_count": "3717", "aggregate_date": "2015-05-01", "id": "24405", "index_value": "2396", "rating": "3.7"}, {"ip_addresses": "7090", "label": "MAGSNET LIMITED", "test_count": "2390", "aggregate_date": "2015-05-01", "id": "77747", "index_value": "2088", "rating": "3.4"}, {"ip_addresses": "96631", "label": "Wi-Tribe Pakistan Limited", "test_count": "15770", "aggregate_date": "2015-05-01", "id": "34729", "index_value": "1146", "rating": "2.6"}], "bandwidth": "3880"}, {"countryCode": "GR", "test_count": "376608", "ip_addresses": "3132352", "country": "Greece", "isps": [{"ip_addresses": "640", "label": "JM-DATA GmbH", "test_count": "1352", "aggregate_date": "2015-05-01", "id": "37698", "index_value": "33600", "rating": "3.9"}, {"ip_addresses": "1198890", "label": "OTEnet S.A.", "test_count": "157322", "aggregate_date": "2015-05-01", "id": "173", "index_value": "11600", "rating": "2.9"}, {"ip_addresses": "16159", "label": "Vodafone-panafon Hellenic Telecommunications Compa", "test_count": "1192", "aggregate_date": "2015-05-01", "id": "81085", "index_value": "11283", "rating": "2.8"}, {"ip_addresses": "6186", "label": "Vivodi Telecom", "test_count": "488", "aggregate_date": "2015-05-01", "id": "94187", "index_value": "10313", "rating": "2.6"}, {"ip_addresses": "152455", "label": "Cosmote", "test_count": "1456", "aggregate_date": "2015-05-01", "id": "35621", "index_value": "10156", "rating": "2.7"}, {"ip_addresses": "291543", "label": "WIND Hellas", "test_count": "41715", "aggregate_date": "2015-05-01", "id": "14784", "index_value": "10057", "rating": "2.6"}, {"ip_addresses": "921", "label": "Cyprus Telecommuncations Authority", "test_count": "116", "aggregate_date": "2015-05-01", "id": "93040", "index_value": "9259", "rating": "2.5"}, {"ip_addresses": "512866", "label": "Hellas On Line", "test_count": "40246", "aggregate_date": "2015-05-01", "id": "328", "index_value": "9086", "rating": "2.7"}, {"ip_addresses": "514021", "label": "FOURTHnet", "test_count": "53617", "aggregate_date": "2015-05-01", "id": "513", "index_value": "8737", "rating": "2.6"}, {"ip_addresses": "282670", "label": "Cyta Hellas", "test_count": "10703", "aggregate_date": "2015-05-01", "id": "31862", "index_value": "7987", "rating": "2.9"}, {"ip_addresses": "9819", "label": "On Telecoms", "test_count": "1281", "aggregate_date": "2015-05-01", "id": "89530", "index_value": "6926", "rating": "2.9"}], "bandwidth": "10560"}, {"countryCode": "TH", "test_count": "530299", "ip_addresses": "3363315", "country": "Thailand", "isps": [{"ip_addresses": "9037", "label": "CHULAL", "test_count": "1487", "aggregate_date": "2015-05-01", "id": "9064", "index_value": "41857", "rating": "4.1"}, {"ip_addresses": "7857", "label": "KMIT", "test_count": "376", "aggregate_date": "2015-05-01", "id": "8557", "index_value": "41427", "rating": "4.2"}, {"ip_addresses": "7742", "label": "KSC Commercial Internet Co. Ltd.", "test_count": "601", "aggregate_date": "2015-05-01", "id": "1601", "index_value": "35852", "rating": "3.2"}, {"ip_addresses": "10393", "label": "Internet Thailand Company Limited", "test_count": "1906", "aggregate_date": "2015-05-01", "id": "62", "index_value": "31132", "rating": "3.1"}, {"ip_addresses": "3166", "label": "Loxley Information Company Ltd.", "test_count": "500", "aggregate_date": "2015-05-01", "id": "992", "index_value": "26135", "rating": "3.5"}, {"ip_addresses": "8812", "label": "CS LoxInfo", "test_count": "3012", "aggregate_date": "2015-05-01", "id": "85661", "index_value": "19233", "rating": "3.4"}, {"ip_addresses": "256043", "label": "3BB Broadband", "test_count": "62500", "aggregate_date": "2015-05-01", "id": "42011", "index_value": "18910", "rating": "3.2"}, {"ip_addresses": "798368", "label": "True Internet", "test_count": "199358", "aggregate_date": "2015-05-01", "id": "70160", "index_value": "18667", "rating": "2.9"}, {"ip_addresses": "4709", "label": "KSC Internet Commercial Co., Ltd.", "test_count": "590", "aggregate_date": "2015-05-01", "id": "815", "index_value": "18280", "rating": "3.0"}, {"ip_addresses": "5241", "label": "MaxNet", "test_count": "761", "aggregate_date": "2015-05-01", "id": "2331", "index_value": "17868", "rating": "3.4"}, {"ip_addresses": "6157", "label": "CAT Telecom Public Company Limited", "test_count": "3448", "aggregate_date": "2015-05-01", "id": "99548", "index_value": "17710", "rating": "3.4"}, {"ip_addresses": "83155", "label": "CAT Telecom public company Ltd", "test_count": "30204", "aggregate_date": "2015-05-01", "id": "2353", "index_value": "17351", "rating": "3.1"}, {"ip_addresses": "14693", "label": "TOT Mobile Co LTD", "test_count": "2715", "aggregate_date": "2015-05-01", "id": "70981", "index_value": "14940", "rating": "2.9"}, {"ip_addresses": "7705", "label": "BB BROADBAND CO., LTD.", "test_count": "2203", "aggregate_date": "2015-05-01", "id": "25289", "index_value": "14417", "rating": "3.3"}, {"ip_addresses": "307197", "label": "TOT", "test_count": "77655", "aggregate_date": "2015-05-01", "id": "99514", "index_value": "10641", "rating": "2.6"}, {"ip_addresses": "350278", "label": "Triple T Internet Company Limited", "test_count": "54187", "aggregate_date": "2015-05-01", "id": "67646", "index_value": "8378", "rating": "3.2"}, {"ip_addresses": "32935", "label": "True Move Company Limited", "test_count": "5542", "aggregate_date": "2015-05-01", "id": "61492", "index_value": "7141", "rating": "3.0"}, {"ip_addresses": "64727", "label": "Total Access Communication", "test_count": "1484", "aggregate_date": "2015-05-01", "id": "32378", "index_value": "6384", "rating": "3.4"}, {"ip_addresses": "6695", "label": "Ministry of Education - EMISC", "test_count": "316", "aggregate_date": "2015-05-01", "id": "47222", "index_value": "6129", "rating": "3.5"}, {"ip_addresses": "10522", "label": "DTAC", "test_count": "1358", "aggregate_date": "2015-05-01", "id": "98845", "index_value": "4550", "rating": "3.0"}], "bandwidth": "20045"}, {"countryCode": "CO", "test_count": "754760", "ip_addresses": "3696651", "country": "Colombia", "isps": [{"ip_addresses": "10798", "label": "UNITEL S.A E.S.P", "test_count": "244", "aggregate_date": "2015-05-01", "id": "3663", "index_value": "14641", "rating": "3.3"}, {"ip_addresses": "282011", "label": "ETB", "test_count": "56627", "aggregate_date": "2015-05-01", "id": "87746", "index_value": "11713", "rating": "2.8"}, {"ip_addresses": "9304", "label": "Empresa De Telecomunicaciones De Pereira S.A.", "test_count": "2597", "aggregate_date": "2015-05-01", "id": "97009", "index_value": "8501", "rating": "3.2"}, {"ip_addresses": "94615", "label": "UNE", "test_count": "73135", "aggregate_date": "2015-05-01", "id": "103132", "index_value": "7739", "rating": "2.8"}, {"ip_addresses": "8610", "label": "IFX NETWORKS COLOMBIA", "test_count": "2848", "aggregate_date": "2015-05-01", "id": "568", "index_value": "7518", "rating": "3.0"}, {"ip_addresses": "49756", "label": "Empresa de Telecomunicaciones de Pereira S.A. E.S.", "test_count": "2199", "aggregate_date": "2015-05-01", "id": "3384", "index_value": "6750", "rating": "3.2"}, {"ip_addresses": "13064", "label": "TIGO COLOMBIA", "test_count": "596", "aggregate_date": "2015-05-01", "id": "103373", "index_value": "6564", "rating": "3.7"}, {"ip_addresses": "12117", "label": "INTERNEXA S.A. E.S.P", "test_count": "10225", "aggregate_date": "2015-05-01", "id": "17052", "index_value": "6244", "rating": "3.2"}, {"ip_addresses": "741584", "label": "Telmex Colombia S.A.", "test_count": "295053", "aggregate_date": "2015-05-01", "id": "3932", "index_value": "6146", "rating": "2.8"}, {"ip_addresses": "8194", "label": "Claro Colombia", "test_count": "73", "aggregate_date": "2015-05-01", "id": "100662", "index_value": "5209", "rating": "2.9"}, {"ip_addresses": "79905", "label": "EDATEL S.A. E.S.P", "test_count": "9619", "aggregate_date": "2015-05-01", "id": "828", "index_value": "4898", "rating": "3.1"}, {"ip_addresses": "8116", "label": "Comcel", "test_count": "70", "aggregate_date": "2015-05-01", "id": "87771", "index_value": "4596", "rating": "2.0"}, {"ip_addresses": "101767", "label": "Telefonica Moviles Colombia", "test_count": "3335", "aggregate_date": "2015-05-01", "id": "32429", "index_value": "4566", "rating": "2.5"}, {"ip_addresses": "38683", "label": "METROTEL REDES S.A.", "test_count": "1862", "aggregate_date": "2015-05-01", "id": "153", "index_value": "4224", "rating": "3.0"}, {"ip_addresses": "8454", "label": "Tv Azteca Sucursal Colombia", "test_count": "33175", "aggregate_date": "2015-05-01", "id": "83950", "index_value": "4204", "rating": "3.2"}, {"ip_addresses": "55303", "label": "TELEBUCARAMANGA S.A. E.S.P.", "test_count": "3957", "aggregate_date": "2015-05-01", "id": "3625", "index_value": "3899", "rating": "2.6"}, {"ip_addresses": "735116", "label": "Colombia Telecommunicaciones", "test_count": "84290", "aggregate_date": "2015-05-01", "id": "1118", "index_value": "3778", "rating": "3.0"}, {"ip_addresses": "15236", "label": "SUPERCABLE TELECOMUNICACIONES", "test_count": "484", "aggregate_date": "2015-05-01", "id": "1342", "index_value": "3775", "rating": "2.6"}, {"ip_addresses": "73380", "label": "Emcatel", "test_count": "6270", "aggregate_date": "2015-05-01", "id": "1045", "index_value": "2969", "rating": "3.0"}, {"ip_addresses": "8618", "label": "Metrotel SA ESP", "test_count": "3997", "aggregate_date": "2015-05-01", "id": "100667", "index_value": "2130", "rating": "2.5"}], "bandwidth": "6615"}, {"countryCode": "SK", "test_count": "131941", "ip_addresses": "873370", "country": "Slovakia", "isps": [{"ip_addresses": "1412", "label": "e-Net, s.r.o.", "test_count": "333", "aggregate_date": "2015-05-01", "id": "35537", "index_value": "83456", "rating": "4.4"}, {"ip_addresses": "1975", "label": "ANTIK Telecom s.r.o", "test_count": "4731", "aggregate_date": "2015-05-01", "id": "79851", "index_value": "71392", "rating": "4.3"}, {"ip_addresses": "102296", "label": "Orange Slovensko", "test_count": "11377", "aggregate_date": "2015-05-01", "id": "4438", "index_value": "70962", "rating": "4.1"}, {"ip_addresses": "77768", "label": "UPC Slovensko s.r.o", "test_count": "13123", "aggregate_date": "2015-05-01", "id": "67633", "index_value": "51598", "rating": "3.6"}, {"ip_addresses": "4088", "label": "RUPKKI s.r.o.", "test_count": "322", "aggregate_date": "2015-05-01", "id": "34891", "index_value": "34361", "rating": "3.8"}, {"ip_addresses": "1904", "label": "DataNetworks s.r.o.", "test_count": "1168", "aggregate_date": "2015-05-01", "id": "33407", "index_value": "32803", "rating": "3.2"}, {"ip_addresses": "4444", "label": "DIGI SLOVAKIA, s.r.o.", "test_count": "594", "aggregate_date": "2015-05-01", "id": "37124", "index_value": "31702", "rating": "4.1"}, {"ip_addresses": "13961", "label": "DSi data s.r.o.", "test_count": "2015", "aggregate_date": "2015-05-01", "id": "4179", "index_value": "27194", "rating": "3.3"}, {"ip_addresses": "4293", "label": "Satro, s.r.o.", "test_count": "410", "aggregate_date": "2015-05-01", "id": "54045", "index_value": "24899", "rating": "4.0"}, {"ip_addresses": "8520", "label": "SWAN a.s.", "test_count": "6240", "aggregate_date": "2015-05-01", "id": "3446", "index_value": "23525", "rating": "3.5"}, {"ip_addresses": "2477", "label": "ARTOS a.s.", "test_count": "2924", "aggregate_date": "2015-05-01", "id": "58660", "index_value": "23104", "rating": "3.7"}, {"ip_addresses": "191105", "label": "Slovak Telecom", "test_count": "24509", "aggregate_date": "2015-05-01", "id": "89038", "index_value": "22205", "rating": "3.0"}, {"ip_addresses": "2123", "label": "Minet s.r.o.", "test_count": "784", "aggregate_date": "2015-05-01", "id": "34369", "index_value": "19618", "rating": "3.5"}, {"ip_addresses": "7537", "label": "Martico s.r.o.", "test_count": "710", "aggregate_date": "2015-05-01", "id": "16771", "index_value": "19347", "rating": "3.7"}, {"ip_addresses": "63210", "label": "Slovanet a.s.", "test_count": "3207", "aggregate_date": "2015-05-01", "id": "4819", "index_value": "17931", "rating": "3.1"}, {"ip_addresses": "3410", "label": "SATRO s.r.o.", "test_count": "726", "aggregate_date": "2015-05-01", "id": "4060", "index_value": "15614", "rating": "3.8"}, {"ip_addresses": "2940", "label": "BENESTRA, s.r.o.", "test_count": "7330", "aggregate_date": "2015-05-01", "id": "104035", "index_value": "13463", "rating": "3.4"}, {"ip_addresses": "5055", "label": "ENERGOTEL a.s.", "test_count": "3688", "aggregate_date": "2015-05-01", "id": "3758", "index_value": "12924", "rating": "3.8"}, {"ip_addresses": "12457", "label": "Orange", "test_count": "240", "aggregate_date": "2015-05-01", "id": "48496", "index_value": "11389", "rating": "2.7"}, {"ip_addresses": "848", "label": "RadioLAN", "test_count": "1566", "aggregate_date": "2015-05-01", "id": "103746", "index_value": "10867", "rating": "3.2"}], "bandwidth": "29032"}, {"countryCode": "MY", "test_count": "522225", "ip_addresses": "2628062", "country": "Malaysia", "isps": [{"ip_addresses": "720735", "label": "Telekom Malaysia", "test_count": "70", "aggregate_date": "2015-05-01", "id": "892", "index_value": "31073", "rating": "2.0"}, {"ip_addresses": "3218", "label": "Optical Communication Engineering Sdn Bhd", "test_count": "416", "aggregate_date": "2015-05-01", "id": "27302", "index_value": "17392", "rating": "3.0"}, {"ip_addresses": "34144", "label": "TIME dotCom Berhad", "test_count": "6538", "aggregate_date": "2015-05-01", "id": "83844", "index_value": "13272", "rating": "3.2"}, {"ip_addresses": "172886", "label": "Mais Communications", "test_count": "10160", "aggregate_date": "2015-05-01", "id": "17961", "index_value": "12616", "rating": "2.0"}, {"ip_addresses": "17542", "label": "Celcom Internet Service Provider", "test_count": "11142", "aggregate_date": "2015-05-01", "id": "12941", "index_value": "9760", "rating": "3.0"}, {"ip_addresses": "87801", "label": "TMnet Telekom Malaysia", "test_count": "37902", "aggregate_date": "2015-05-01", "id": "1765", "index_value": "9527", "rating": "2.3"}, {"ip_addresses": "6469", "label": "Maxis Broadband Sdn.Bhd", "test_count": "18113", "aggregate_date": "2015-05-01", "id": "54285", "index_value": "9127", "rating": "2.3"}, {"ip_addresses": "20496", "label": "JARING Communications Sdn Bhd", "test_count": "2176", "aggregate_date": "2015-05-01", "id": "22940", "index_value": "8532", "rating": "2.8"}, {"ip_addresses": "98355", "label": "TM Net", "test_count": "28373", "aggregate_date": "2015-05-01", "id": "86120", "index_value": "8306", "rating": "2.3"}, {"ip_addresses": "55421", "label": "TMNET, TELEKOM MALAYSIA", "test_count": "16044", "aggregate_date": "2015-05-01", "id": "2411", "index_value": "8228", "rating": "2.3"}, {"ip_addresses": "1920", "label": "MyKRIS Asia Sdn Bhd", "test_count": "1227", "aggregate_date": "2015-05-01", "id": "2712", "index_value": "6607", "rating": "2.5"}, {"ip_addresses": "137595", "label": "DiGi", "test_count": "4350", "aggregate_date": "2015-05-01", "id": "31816", "index_value": "6301", "rating": "2.6"}, {"ip_addresses": "7477", "label": "Maxis", "test_count": "9461", "aggregate_date": "2015-05-01", "id": "99040", "index_value": "6265", "rating": "2.4"}, {"ip_addresses": "695655", "label": "Telekom Malaysia", "test_count": "252749", "aggregate_date": "2015-05-01", "id": "99511", "index_value": "5587", "rating": "2.0"}, {"ip_addresses": "36220", "label": "TM ADSL", "test_count": "5897", "aggregate_date": "2015-05-01", "id": "1365", "index_value": "4908", "rating": "2.0"}, {"ip_addresses": "43390", "label": "TM NET", "test_count": "1900", "aggregate_date": "2015-05-01", "id": "20591", "index_value": "4341", "rating": "2.1"}, {"ip_addresses": "47027", "label": "YTL Communications Sdn Bhd", "test_count": "41819", "aggregate_date": "2015-05-01", "id": "51882", "index_value": "4024", "rating": "3.7"}, {"ip_addresses": "56673", "label": "Maxis Broadband Sdn Bhd", "test_count": "171", "aggregate_date": "2015-05-01", "id": "39759", "index_value": "3603", "rating": "2.9"}, {"ip_addresses": "294234", "label": "Packet One Networks", "test_count": "9456", "aggregate_date": "2015-05-01", "id": "25031", "index_value": "2680", "rating": "2.3"}, {"ip_addresses": "3883", "label": "Maxis 3G", "test_count": "478", "aggregate_date": "2015-05-01", "id": "71856", "index_value": "1377", "rating": "2.4"}], "bandwidth": "6994"}, {"countryCode": "DE", "test_count": "1150187", "ip_addresses": "15763561", "country": "Germany", "isps": [{"ip_addresses": "21355", "label": "UPC", "test_count": "7457", "aggregate_date": "2015-05-01", "id": "15012", "index_value": "65255", "rating": "3.0"}, {"ip_addresses": "652011", "label": "Unitymedia", "test_count": "107429", "aggregate_date": "2015-05-01", "id": "39245", "index_value": "60764", "rating": "4.0"}, {"ip_addresses": "20963", "label": "wilhelm.tel", "test_count": "12927", "aggregate_date": "2015-05-01", "id": "91706", "index_value": "57477", "rating": "4.2"}, {"ip_addresses": "259531", "label": "Kabel BW", "test_count": "85878", "aggregate_date": "2015-05-01", "id": "84980", "index_value": "52710", "rating": "3.7"}, {"ip_addresses": "26004", "label": "Primacom", "test_count": "14841", "aggregate_date": "2015-05-01", "id": "88534", "index_value": "44717", "rating": "3.3"}, {"ip_addresses": "1030881", "label": "Kabel Deutschland", "test_count": "262388", "aggregate_date": "2015-05-01", "id": "75022", "index_value": "41075", "rating": "3.4"}, {"ip_addresses": "33616", "label": "Cablesurf.de", "test_count": "6250", "aggregate_date": "2015-05-01", "id": "4368", "index_value": "39020", "rating": "4.0"}, {"ip_addresses": "57096", "label": "Tele Columbus", "test_count": "4559", "aggregate_date": "2015-05-01", "id": "76439", "index_value": "36823", "rating": "3.1"}, {"ip_addresses": "261481", "label": "NetCologne", "test_count": "12623", "aggregate_date": "2015-05-01", "id": "369", "index_value": "24775", "rating": "3.5"}, {"ip_addresses": "23419", "label": "QUiX", "test_count": "883", "aggregate_date": "2015-05-01", "id": "38473", "index_value": "20518", "rating": "3.1"}, {"ip_addresses": "7345623", "label": "Deutsche Telekom", "test_count": "294436", "aggregate_date": "2015-05-01", "id": "252", "index_value": "20499", "rating": "3.1"}, {"ip_addresses": "223081", "label": "EWE Tel", "test_count": "12163", "aggregate_date": "2015-05-01", "id": "54119", "index_value": "18667", "rating": "3.1"}, {"ip_addresses": "146995", "label": "M-net", "test_count": "4562", "aggregate_date": "2015-05-01", "id": "256", "index_value": "17702", "rating": "4.0"}, {"ip_addresses": "229866", "label": "Versatel", "test_count": "11395", "aggregate_date": "2015-05-01", "id": "42020", "index_value": "17322", "rating": "2.8"}, {"ip_addresses": "48900", "label": "htp", "test_count": "2031", "aggregate_date": "2015-05-01", "id": "3894", "index_value": "16339", "rating": "3.7"}, {"ip_addresses": "1655756", "label": "O2 Telefonica Germany", "test_count": "89583", "aggregate_date": "2015-05-01", "id": "73468", "index_value": "13792", "rating": "2.8"}, {"ip_addresses": "906354", "label": "Vodafone", "test_count": "84524", "aggregate_date": "2015-05-01", "id": "87743", "index_value": "12411", "rating": "2.7"}, {"ip_addresses": "177556", "label": "QSC", "test_count": "4413", "aggregate_date": "2015-05-01", "id": "1458", "index_value": "11634", "rating": "3.2"}, {"ip_addresses": "63685", "label": "Vodafone GmbH", "test_count": "6943", "aggregate_date": "2015-05-01", "id": "98537", "index_value": "11147", "rating": "2.8"}, {"ip_addresses": "96074", "label": "E-Plus Mobilfunk", "test_count": "269", "aggregate_date": "2015-05-01", "id": "634", "index_value": "10792", "rating": "3.0"}], "bandwidth": "30675"}, {"countryCode": "FR", "test_count": "1249994", "ip_addresses": "11079128", "country": "France", "isps": [{"ip_addresses": "34961", "label": "Tiscali France", "test_count": "106", "aggregate_date": "2015-05-01", "id": "25029", "index_value": "274201", "rating": "3.3"}, {"ip_addresses": "15744", "label": "OVH Telecom", "test_count": "7736", "aggregate_date": "2015-05-01", "id": "93058", "index_value": "130829", "rating": "3.9"}, {"ip_addresses": "9314", "label": "Wibox", "test_count": "2801", "aggregate_date": "2015-05-01", "id": "52184", "index_value": "70637", "rating": "2.8"}, {"ip_addresses": "3546", "label": "K-Net", "test_count": "2455", "aggregate_date": "2015-05-01", "id": "94972", "index_value": "58394", "rating": "4.5"}, {"ip_addresses": "777963", "label": "Numericable", "test_count": "133919", "aggregate_date": "2015-05-01", "id": "415", "index_value": "55435", "rating": "3.3"}, {"ip_addresses": "2874867", "label": "Orange", "test_count": "449195", "aggregate_date": "2015-05-01", "id": "48496", "index_value": "49087", "rating": "2.7"}, {"ip_addresses": "3444", "label": "CityPlay", "test_count": "377", "aggregate_date": "2015-05-01", "id": "68704", "index_value": "38413", "rating": "3.4"}, {"ip_addresses": "589773", "label": "Bouygues Telecom", "test_count": "118996", "aggregate_date": "2015-05-01", "id": "12170", "index_value": "34092", "rating": "2.6"}, {"ip_addresses": "8219", "label": "Calixo par Vialis", "test_count": "1992", "aggregate_date": "2015-05-01", "id": "4268", "index_value": "32742", "rating": "3.5"}, {"ip_addresses": "1709065", "label": "SFR", "test_count": "148966", "aggregate_date": "2015-05-01", "id": "17894", "index_value": "29461", "rating": "2.5"}, {"ip_addresses": "270", "label": "OPDOP SCIC", "test_count": "379", "aggregate_date": "2015-05-01", "id": "103440", "index_value": "25896", "rating": "3.5"}, {"ip_addresses": "271", "label": "Eweka Internet Services B.V.", "test_count": "394", "aggregate_date": "2015-05-01", "id": "57265", "index_value": "23660", "rating": "4.2"}, {"ip_addresses": "1021", "label": "HUB TELECOM SA", "test_count": "450", "aggregate_date": "2015-05-01", "id": "71785", "index_value": "20070", "rating": "2.9"}, {"ip_addresses": "1547929", "label": "Free", "test_count": "212422", "aggregate_date": "2015-05-01", "id": "27704", "index_value": "19334", "rating": "3.3"}, {"ip_addresses": "5592", "label": "Skylogic", "test_count": "931", "aggregate_date": "2015-05-01", "id": "19801", "index_value": "9161", "rating": "2.6"}, {"ip_addresses": "1369", "label": "OpenIP", "test_count": "389", "aggregate_date": "2015-05-01", "id": "33906", "index_value": "7843", "rating": "2.7"}, {"ip_addresses": "7587", "label": "Alsatis", "test_count": "2075", "aggregate_date": "2015-05-01", "id": "28844", "index_value": "6952", "rating": "2.1"}, {"ip_addresses": "22470", "label": "NordNet", "test_count": "451", "aggregate_date": "2015-05-01", "id": "3827", "index_value": "5717", "rating": "2.5"}, {"ip_addresses": "5416", "label": "Ozone", "test_count": "2185", "aggregate_date": "2015-05-01", "id": "81129", "index_value": "4013", "rating": "2.3"}], "bandwidth": "40562"}, {"countryCode": "AD", "test_count": "1404", "ip_addresses": "14246", "country": "Andorra", "isps": [{"ip_addresses": "14239", "label": "Servei de Telecomunicacions d'Andorra", "test_count": "1352", "aggregate_date": "2015-05-01", "id": "3332", "index_value": "36731", "rating": "2.8"}], "bandwidth": "37618"}, {"countryCode": "HR", "test_count": "162292", "ip_addresses": "1207044", "country": "Croatia", "isps": [{"ip_addresses": "84425", "label": "VIPnet d.o.o.", "test_count": "128", "aggregate_date": "2015-05-01", "id": "3703", "index_value": "32894", "rating": "3.1"}, {"ip_addresses": "25023", "label": "Amis Telekom d.o.o.", "test_count": "8400", "aggregate_date": "2015-05-01", "id": "15753", "index_value": "24375", "rating": "3.7"}, {"ip_addresses": "23148", "label": "Metronet telekomunikacije d.d.", "test_count": "3287", "aggregate_date": "2015-05-01", "id": "7627", "index_value": "20229", "rating": "3.4"}, {"ip_addresses": "84329", "label": "VIPNET", "test_count": "22197", "aggregate_date": "2015-05-01", "id": "88059", "index_value": "13878", "rating": "3.1"}, {"ip_addresses": "2278", "label": "Magic Net D.o.o.", "test_count": "676", "aggregate_date": "2015-05-01", "id": "76462", "index_value": "13760", "rating": "3.3"}, {"ip_addresses": "99458", "label": "ISKON INTERNET d.d. za informatiku i telekomunikac", "test_count": "27913", "aggregate_date": "2015-05-01", "id": "61503", "index_value": "12785", "rating": "2.8"}, {"ip_addresses": "2525", "label": "BT NET d.o.o. za trgovinu i usluge", "test_count": "463", "aggregate_date": "2015-05-01", "id": "67936", "index_value": "12684", "rating": "3.1"}, {"ip_addresses": "1998", "label": "Terrakom d.o.o.", "test_count": "708", "aggregate_date": "2015-05-01", "id": "85013", "index_value": "12407", "rating": "3.8"}, {"ip_addresses": "347852", "label": "Hrvatski Telekom fixed broadband", "test_count": "49147", "aggregate_date": "2015-05-01", "id": "96283", "index_value": "11236", "rating": "2.2"}, {"ip_addresses": "39590", "label": "Hrvatski Telekom mobile broadband", "test_count": "2240", "aggregate_date": "2015-05-01", "id": "96341", "index_value": "9005", "rating": "2.5"}, {"ip_addresses": "32390", "label": "H1 TELEKOM d.d.", "test_count": "3374", "aggregate_date": "2015-05-01", "id": "67729", "index_value": "8000", "rating": "3.0"}, {"ip_addresses": "21796", "label": "Optima Telekom", "test_count": "12530", "aggregate_date": "2015-05-01", "id": "104031", "index_value": "7058", "rating": "2.8"}, {"ip_addresses": "1671", "label": "PRO-PING d.o.o.", "test_count": "803", "aggregate_date": "2015-05-01", "id": "81155", "index_value": "3734", "rating": "3.5"}, {"ip_addresses": "17871", "label": "Tele2", "test_count": "94", "aggregate_date": "2015-05-01", "id": "34200", "index_value": "3386", "rating": "2.9"}], "bandwidth": "12234"}, {"countryCode": "ID", "test_count": "1425972", "ip_addresses": "4580339", "country": "Indonesia", "isps": [{"ip_addresses": "4227", "label": "PT. LINKNET", "test_count": "2383", "aggregate_date": "2015-05-01", "id": "10184", "index_value": "14607", "rating": "3.5"}, {"ip_addresses": "3729", "label": "PT. IndoInternet", "test_count": "1372", "aggregate_date": "2015-05-01", "id": "1098", "index_value": "13823", "rating": "3.2"}, {"ip_addresses": "5478", "label": "PT Remala Abadi", "test_count": "13575", "aggregate_date": "2015-05-01", "id": "13486", "index_value": "13009", "rating": "3.6"}, {"ip_addresses": "11415", "label": "Indonesia Online Access", "test_count": "3621", "aggregate_date": "2015-05-01", "id": "7248", "index_value": "12915", "rating": "3.4"}, {"ip_addresses": "5808", "label": "PT. Media Antar Nusa", "test_count": "4440", "aggregate_date": "2015-05-01", "id": "11501", "index_value": "11722", "rating": "3.7"}, {"ip_addresses": "4884", "label": "PT Telekomunikasi Indonesia", "test_count": "2523", "aggregate_date": "2015-05-01", "id": "928", "index_value": "11089", "rating": "2.9"}, {"ip_addresses": "20620", "label": "PT. Cyberindo Aditama", "test_count": "3607", "aggregate_date": "2015-05-01", "id": "6208", "index_value": "9341", "rating": "2.9"}, {"ip_addresses": "334970", "label": "FASTNET", "test_count": "155000", "aggregate_date": "2015-05-01", "id": "14702", "index_value": "8687", "rating": "3.0"}, {"ip_addresses": "57350", "label": "Telkom", "test_count": "18874", "aggregate_date": "2015-05-01", "id": "881", "index_value": "7457", "rating": "2.6"}, {"ip_addresses": "3644", "label": "PT INET GLOBAL INDO", "test_count": "2656", "aggregate_date": "2015-05-01", "id": "15616", "index_value": "6493", "rating": "3.2"}, {"ip_addresses": "19012", "label": "PT INDOSAT MEGA MEDIA", "test_count": "2652", "aggregate_date": "2015-05-01", "id": "9532", "index_value": "6407", "rating": "3.2"}, {"ip_addresses": "43603", "label": "Biznet ISP", "test_count": "22507", "aggregate_date": "2015-05-01", "id": "2306", "index_value": "5956", "rating": "3.3"}, {"ip_addresses": "107568", "label": "First Media", "test_count": "35325", "aggregate_date": "2015-05-01", "id": "38778", "index_value": "5416", "rating": "3.1"}, {"ip_addresses": "5537", "label": "netZAP Wireless Broadband Provider", "test_count": "541", "aggregate_date": "2015-05-01", "id": "14053", "index_value": "5410", "rating": "2.4"}, {"ip_addresses": "4559", "label": "Centrin Internet Service Provider", "test_count": "511", "aggregate_date": "2015-05-01", "id": "9410", "index_value": "5138", "rating": "2.5"}, {"ip_addresses": "1795660", "label": "PT Telkom Indonesia", "test_count": "748699", "aggregate_date": "2015-05-01", "id": "37723", "index_value": "5002", "rating": "2.8"}, {"ip_addresses": "816328", "label": "Telkomsel", "test_count": "30915", "aggregate_date": "2015-05-01", "id": "67639", "index_value": "4937", "rating": "3.3"}, {"ip_addresses": "57524", "label": "PT Indosat Tbk.", "test_count": "22238", "aggregate_date": "2015-05-01", "id": "11151", "index_value": "4389", "rating": "3.1"}, {"ip_addresses": "4908", "label": "Jasa Telematika Terpadu ( Jasatel )", "test_count": "1264", "aggregate_date": "2015-05-01", "id": "99712", "index_value": "2456", "rating": "2.8"}, {"ip_addresses": "17910", "label": "PT. Wireless Indonesia", "test_count": "9571", "aggregate_date": "2015-05-01", "id": "10791", "index_value": "1791", "rating": "2.8"}], "bandwidth": "6496"}, {"countryCode": "IL", "test_count": "227783", "ip_addresses": "2727988", "country": "Israel", "isps": [{"ip_addresses": "10284", "label": "ilan", "test_count": "431", "aggregate_date": "2015-05-01", "id": "86293", "index_value": "63656", "rating": "3.8"}, {"ip_addresses": "118917", "label": "Hot-Net internet services Ltd.", "test_count": "21984", "aggregate_date": "2015-05-01", "id": "66375", "index_value": "40871", "rating": "3.0"}, {"ip_addresses": "2932", "label": "Elron Technologies", "test_count": "8", "aggregate_date": "2015-05-01", "id": "1764", "index_value": "37248", "rating": "3.6"}, {"ip_addresses": "26826", "label": "XFone", "test_count": "8731", "aggregate_date": "2015-05-01", "id": "54112", "index_value": "35085", "rating": "3.5"}, {"ip_addresses": "637884", "label": "Bezeq International", "test_count": "63398", "aggregate_date": "2015-05-01", "id": "42898", "index_value": "28351", "rating": "3.3"}, {"ip_addresses": "8726", "label": "Internet Society of Israel", "test_count": "1336", "aggregate_date": "2015-05-01", "id": "1395", "index_value": "27889", "rating": "3.4"}, {"ip_addresses": "127305", "label": "012 Smile", "test_count": "41347", "aggregate_date": "2015-05-01", "id": "99521", "index_value": "27337", "rating": "3.1"}, {"ip_addresses": "47869", "label": "Orange Israel", "test_count": "4626", "aggregate_date": "2015-05-01", "id": "93655", "index_value": "20767", "rating": "3.3"}, {"ip_addresses": "16629", "label": "Triple C Computation Ltd.", "test_count": "3192", "aggregate_date": "2015-05-01", "id": "43125", "index_value": "20339", "rating": "3.6"}, {"ip_addresses": "638", "label": "Amit Net Telecom LTD", "test_count": "1147", "aggregate_date": "2015-05-01", "id": "98155", "index_value": "14691", "rating": "2.4"}, {"ip_addresses": "169003", "label": "Cellcom Israel", "test_count": "330", "aggregate_date": "2015-05-01", "id": "17488", "index_value": "6381", "rating": "2.2"}, {"ip_addresses": "1994", "label": "HOT Mobile Ltd.", "test_count": "93", "aggregate_date": "2015-05-01", "id": "96609", "index_value": "6236", "rating": "2.7"}, {"ip_addresses": "670", "label": "Golan Telecom Ltd.", "test_count": "170", "aggregate_date": "2015-05-01", "id": "76123", "index_value": "5577", "rating": "3.6"}, {"ip_addresses": "2978", "label": "Cellcom/Netvision", "test_count": "413", "aggregate_date": "2015-05-01", "id": "101232", "index_value": "3146", "rating": "2.8"}], "bandwidth": "29444"}, {"countryCode": "FI", "test_count": "277500", "ip_addresses": "1898459", "country": "Finland", "isps": [{"ip_addresses": "24683", "label": "CSC - Tieteen tietotekniikan keskus Oy", "test_count": "2123", "aggregate_date": "2015-05-01", "id": "38747", "index_value": "147763", "rating": "4.5"}, {"ip_addresses": "79140", "label": "Anvia", "test_count": "14266", "aggregate_date": "2015-05-01", "id": "39621", "index_value": "71483", "rating": "3.0"}, {"ip_addresses": "5705", "label": "Netplaza Oy", "test_count": "2391", "aggregate_date": "2015-05-01", "id": "12153", "index_value": "60859", "rating": "4.0"}, {"ip_addresses": "5354", "label": "Vakka-Suomen Puhelin Oy", "test_count": "338", "aggregate_date": "2015-05-01", "id": "9371", "index_value": "57424", "rating": "3.0"}, {"ip_addresses": "747500", "label": "DNA Oy", "test_count": "49855", "aggregate_date": "2015-05-01", "id": "39622", "index_value": "55300", "rating": "3.0"}, {"ip_addresses": "14620", "label": "Mikkelin Puhelin Oyj", "test_count": "1528", "aggregate_date": "2015-05-01", "id": "3304", "index_value": "55153", "rating": "3.1"}, {"ip_addresses": "3892", "label": "Paraisten Puhelin Oy", "test_count": "397", "aggregate_date": "2015-05-01", "id": "25509", "index_value": "54295", "rating": "3.0"}, {"ip_addresses": "21125", "label": "Kaisanet Oy", "test_count": "4824", "aggregate_date": "2015-05-01", "id": "54050", "index_value": "53491", "rating": "3.4"}, {"ip_addresses": "7085", "label": "FSP Net", "test_count": "400", "aggregate_date": "2015-05-01", "id": "9443", "index_value": "52177", "rating": "3.2"}, {"ip_addresses": "7296", "label": "Karjaan Puhelin Oy", "test_count": "802", "aggregate_date": "2015-05-01", "id": "26613", "index_value": "41317", "rating": "2.9"}, {"ip_addresses": "776156", "label": "TeliaSonera Finland", "test_count": "57786", "aggregate_date": "2015-05-01", "id": "751", "index_value": "34418", "rating": "2.8"}, {"ip_addresses": "15188", "label": "Jakobstadsnejdens Telefon Ab", "test_count": "1489", "aggregate_date": "2015-05-01", "id": "30118", "index_value": "33779", "rating": "3.0"}, {"ip_addresses": "19892", "label": "SSP Yhtiot Oy", "test_count": "2328", "aggregate_date": "2015-05-01", "id": "58557", "index_value": "33394", "rating": "3.1"}, {"ip_addresses": "21549", "label": "AinaCom Oy", "test_count": "305", "aggregate_date": "2015-05-01", "id": "14941", "index_value": "31707", "rating": "3.0"}, {"ip_addresses": "7071", "label": "Blue Lake Communications Oy", "test_count": "716", "aggregate_date": "2015-05-01", "id": "64799", "index_value": "30889", "rating": "3.5"}, {"ip_addresses": "6071", "label": "Pohjois-Hameen Puhelin Oy", "test_count": "599", "aggregate_date": "2015-05-01", "id": "23447", "index_value": "30381", "rating": "2.7"}, {"ip_addresses": "18350", "label": "Nebula Oy", "test_count": "3199", "aggregate_date": "2015-05-01", "id": "2095", "index_value": "25957", "rating": "3.6"}, {"ip_addresses": "6097", "label": "Suomi Communications Oy", "test_count": "579", "aggregate_date": "2015-05-01", "id": "6304", "index_value": "21804", "rating": "3.6"}, {"ip_addresses": "3789", "label": "Etela-Satakunnan Puhelin Oy", "test_count": "518", "aggregate_date": "2015-05-01", "id": "48507", "index_value": "19165", "rating": "2.5"}, {"ip_addresses": "126311", "label": "Elisa Oyj Mobile", "test_count": "1048", "aggregate_date": "2015-05-01", "id": "80331", "index_value": "11724", "rating": "2.6"}], "bandwidth": "36399"}, {"countryCode": "HU", "test_count": "412067", "ip_addresses": "2768281", "country": "Hungary", "isps": [{"ip_addresses": "215607", "label": "DIGI Tavkozlesi es Szolgaltato Kft.", "test_count": "53363", "aggregate_date": "2015-05-01", "id": "98017", "index_value": "77422", "rating": "4.1"}, {"ip_addresses": "290804", "label": "UPC Hungary", "test_count": "74011", "aggregate_date": "2015-05-01", "id": "90074", "index_value": "60425", "rating": "3.6"}, {"ip_addresses": "10130", "label": "Parisat Kft.", "test_count": "1326", "aggregate_date": "2015-05-01", "id": "38800", "index_value": "48008", "rating": "3.9"}, {"ip_addresses": "52621", "label": "Tarr Kft.", "test_count": "10868", "aggregate_date": "2015-05-01", "id": "16822", "index_value": "23093", "rating": "3.6"}, {"ip_addresses": "281311", "label": "Magyar Telekom", "test_count": "109863", "aggregate_date": "2015-05-01", "id": "101583", "index_value": "23088", "rating": "3.0"}, {"ip_addresses": "18546", "label": "Dravanet Co Ltd.", "test_count": "1706", "aggregate_date": "2015-05-01", "id": "67627", "index_value": "22578", "rating": "3.1"}, {"ip_addresses": "8864", "label": "Naracom Kft.", "test_count": "1868", "aggregate_date": "2015-05-01", "id": "41938", "index_value": "22450", "rating": "3.0"}, {"ip_addresses": "9699", "label": "VidaNet Cable Television Provider Ltd.", "test_count": "1075", "aggregate_date": "2015-05-01", "id": "38771", "index_value": "22303", "rating": "3.3"}, {"ip_addresses": "25615", "label": "ViDaNet Cabletelevision Provider Ltd.", "test_count": "3263", "aggregate_date": "2015-05-01", "id": "38758", "index_value": "22068", "rating": "3.3"}, {"ip_addresses": "52362", "label": "PR-TELECOM Rt.", "test_count": "6960", "aggregate_date": "2015-05-01", "id": "8477", "index_value": "21955", "rating": "3.2"}, {"ip_addresses": "35993", "label": "GTS Hungary Telecommunications Limited Liability C", "test_count": "2139", "aggregate_date": "2015-05-01", "id": "51884", "index_value": "20035", "rating": "3.4"}, {"ip_addresses": "268288", "label": "Invitel Tavkozlesi", "test_count": "33473", "aggregate_date": "2015-05-01", "id": "47197", "index_value": "19389", "rating": "3.0"}, {"ip_addresses": "40271", "label": "Telenor Hungary plc", "test_count": "2055", "aggregate_date": "2015-05-01", "id": "95727", "index_value": "19248", "rating": "2.5"}, {"ip_addresses": "25714", "label": "Deninet KFT", "test_count": "5934", "aggregate_date": "2015-05-01", "id": "28979", "index_value": "18400", "rating": "3.7"}, {"ip_addresses": "52342", "label": "Telenor Hungary", "test_count": "809", "aggregate_date": "2015-05-01", "id": "80337", "index_value": "18025", "rating": "2.9"}, {"ip_addresses": "15279", "label": "ANTENNA HUNGARIA Magyar Musorszoro es Radiohirkozl", "test_count": "3396", "aggregate_date": "2015-05-01", "id": "54124", "index_value": "16966", "rating": "3.2"}, {"ip_addresses": "17882", "label": "OPTICON Telekommunikacios Halozati Szolgaltato", "test_count": "2901", "aggregate_date": "2015-05-01", "id": "3554", "index_value": "15001", "rating": "3.5"}, {"ip_addresses": "7457", "label": "Externet Nyrt", "test_count": "1201", "aggregate_date": "2015-05-01", "id": "30374", "index_value": "14991", "rating": "3.0"}, {"ip_addresses": "125004", "label": "Vodafone Hungary", "test_count": "1814", "aggregate_date": "2015-05-01", "id": "11858", "index_value": "12392", "rating": "2.7"}, {"ip_addresses": "244075", "label": "UPC Magyarorszag", "test_count": "2499", "aggregate_date": "2015-05-01", "id": "527", "index_value": "7149", "rating": "3.5"}], "bandwidth": "36177"}, {"countryCode": "CR", "test_count": "102312", "ip_addresses": "272458", "country": "Costa Rica", "isps": [{"ip_addresses": "1457", "label": "American Data Networks", "test_count": "917", "aggregate_date": "2015-05-01", "id": "29374", "index_value": "9747", "rating": "3.7"}, {"ip_addresses": "13664", "label": "RADIOGRAFICA COSTARRICENSE", "test_count": "3505", "aggregate_date": "2015-05-01", "id": "49714", "index_value": "8320", "rating": "2.9"}, {"ip_addresses": "51002", "label": "Cable Tica", "test_count": "10063", "aggregate_date": "2015-05-01", "id": "32568", "index_value": "6856", "rating": "2.7"}, {"ip_addresses": "7340", "label": "Millicom Cable Costa Rica S.A.", "test_count": "7000", "aggregate_date": "2015-05-01", "id": "102908", "index_value": "6829", "rating": "2.2"}, {"ip_addresses": "97583", "label": "Amnet Cable Costa Rica", "test_count": "20481", "aggregate_date": "2015-05-01", "id": "43095", "index_value": "5456", "rating": "2.4"}, {"ip_addresses": "25360", "label": "Grupo ICE", "test_count": "24536", "aggregate_date": "2015-05-01", "id": "104033", "index_value": "5329", "rating": "2.6"}, {"ip_addresses": "5547", "label": "Cooperativa de Electrificaci\u00f3n Rural de San Carlos", "test_count": "2024", "aggregate_date": "2015-05-01", "id": "70309", "index_value": "4682", "rating": "3.2"}, {"ip_addresses": "6007", "label": "Telecable Economico S.A.", "test_count": "8307", "aggregate_date": "2015-05-01", "id": "42151", "index_value": "4315", "rating": "2.7"}, {"ip_addresses": "899", "label": "Telecable Residencial", "test_count": "1227", "aggregate_date": "2015-05-01", "id": "87510", "index_value": "4205", "rating": "2.8"}, {"ip_addresses": "11047", "label": "SAN JOSE", "test_count": "1104", "aggregate_date": "2015-05-01", "id": "75051", "index_value": "4165", "rating": "3.0"}, {"ip_addresses": "255", "label": "Star Fire", "test_count": "850", "aggregate_date": "2015-05-01", "id": "87933", "index_value": "3679", "rating": "3.2"}, {"ip_addresses": "2941", "label": "HEREDIA", "test_count": "205", "aggregate_date": "2015-05-01", "id": "75137", "index_value": "3294", "rating": "3.0"}, {"ip_addresses": "3054", "label": "CURRIDABAT", "test_count": "558", "aggregate_date": "2015-05-01", "id": "75170", "index_value": "3276", "rating": "3.0"}, {"ip_addresses": "2311", "label": "EL ALTO", "test_count": "471", "aggregate_date": "2015-05-01", "id": "75099", "index_value": "3020", "rating": "2.6"}, {"ip_addresses": "1772", "label": "Tres Rios", "test_count": "317", "aggregate_date": "2015-05-01", "id": "75095", "index_value": "2918", "rating": "3.0"}, {"ip_addresses": "872", "label": "Claro CR Telecomunicaciones S.A.", "test_count": "4883", "aggregate_date": "2015-05-01", "id": "76432", "index_value": "2654", "rating": "2.8"}, {"ip_addresses": "1362", "label": "IBW Comunicaciones S.A", "test_count": "2134", "aggregate_date": "2015-05-01", "id": "70285", "index_value": "2012", "rating": "2.6"}, {"ip_addresses": "2424", "label": "NORTE", "test_count": "1823", "aggregate_date": "2015-05-01", "id": "75084", "index_value": "1887", "rating": "2.9"}], "bandwidth": "5435"}, {"countryCode": "GT", "test_count": "86004", "ip_addresses": "271128", "country": "Guatemala", "isps": [{"ip_addresses": "12912", "label": "TELEFONICA MOVILES GUATEMALA S.A.", "test_count": "2414", "aggregate_date": "2015-05-01", "id": "47195", "index_value": "8109", "rating": "2.1"}, {"ip_addresses": "8386", "label": "COMCEL GUATEMALA S.A.", "test_count": "12442", "aggregate_date": "2015-05-01", "id": "234", "index_value": "6325", "rating": "2.8"}, {"ip_addresses": "4987", "label": "Navega.com S.A.", "test_count": "2910", "aggregate_date": "2015-05-01", "id": "823", "index_value": "5999", "rating": "3.2"}, {"ip_addresses": "592", "label": "Columbus Networks Guatemala", "test_count": "641", "aggregate_date": "2015-05-01", "id": "81198", "index_value": "5866", "rating": "3.3"}, {"ip_addresses": "5501", "label": "CyberNet de Guatemala S.A.", "test_count": "1988", "aggregate_date": "2015-05-01", "id": "687", "index_value": "4571", "rating": "2.6"}, {"ip_addresses": "230735", "label": "Telgua", "test_count": "54880", "aggregate_date": "2015-05-01", "id": "925", "index_value": "4404", "rating": "2.7"}], "bandwidth": "4881"}, {"countryCode": "IR", "test_count": "100181", "ip_addresses": "1540956", "country": "Iran, Islamic Republic of", "isps": [{"ip_addresses": "8937", "label": "Research Center of Theoretical Physics & Mathemati", "test_count": "399", "aggregate_date": "2015-05-01", "id": "84307", "index_value": "13315", "rating": "4.0"}, {"ip_addresses": "8461", "label": "Afranet", "test_count": "49", "aggregate_date": "2015-05-01", "id": "12544", "index_value": "7850", "rating": "3.1"}, {"ip_addresses": "15854", "label": "PJSC Fars Telecommunication Company", "test_count": "1408", "aggregate_date": "2015-05-01", "id": "80679", "index_value": "6828", "rating": "3.0"}, {"ip_addresses": "27960", "label": "Pars Online PJS", "test_count": "7894", "aggregate_date": "2015-05-01", "id": "102899", "index_value": "5174", "rating": "3.1"}, {"ip_addresses": "52511", "label": "Dadeh Gostar Asr Novin P.J.S. Co.", "test_count": "3052", "aggregate_date": "2015-05-01", "id": "65382", "index_value": "4694", "rating": "3.6"}, {"ip_addresses": "41608", "label": "Asiatech Data Transfer Inc. PLC", "test_count": "935", "aggregate_date": "2015-05-01", "id": "97530", "index_value": "4683", "rating": "3.2"}, {"ip_addresses": "12728", "label": "Andishe Sabz Khazar Co. P.J.S.", "test_count": "237", "aggregate_date": "2015-05-01", "id": "18368", "index_value": "4272", "rating": "2.5"}, {"ip_addresses": "10985", "label": "Pishgaman Toseeh Ertebatat Company (Private Joint-", "test_count": "933", "aggregate_date": "2015-05-01", "id": "81174", "index_value": "3991", "rating": "3.0"}, {"ip_addresses": "15141", "label": "Parsonline", "test_count": "2565", "aggregate_date": "2015-05-01", "id": "93997", "index_value": "3958", "rating": "2.9"}, {"ip_addresses": "16492", "label": "Bozorg Net-e Aria", "test_count": "456", "aggregate_date": "2015-05-01", "id": "34540", "index_value": "3927", "rating": "3.1"}, {"ip_addresses": "23344", "label": "Respina Networks & Beyond PJSC", "test_count": "2036", "aggregate_date": "2015-05-01", "id": "39678", "index_value": "3907", "rating": "3.2"}, {"ip_addresses": "42377", "label": "Esfahan Telecommunication Company (P.J.S.)", "test_count": "1380", "aggregate_date": "2015-05-01", "id": "79458", "index_value": "3869", "rating": "2.9"}, {"ip_addresses": "177460", "label": "Aria Shatel Company Ltd", "test_count": "849", "aggregate_date": "2015-05-01", "id": "90084", "index_value": "3525", "rating": "3.2"}, {"ip_addresses": "27597", "label": "Shahrad Net Company Ltd.", "test_count": "731", "aggregate_date": "2015-05-01", "id": "38769", "index_value": "3314", "rating": "3.4"}, {"ip_addresses": "6462", "label": "Pishgaman Kavir Yazd Cooperative", "test_count": "272", "aggregate_date": "2015-05-01", "id": "2655", "index_value": "3278", "rating": "2.8"}, {"ip_addresses": "33190", "label": "GOSTARESH-E-ERTEBATAT-E MABNA COMPANY (Private Joi", "test_count": "306", "aggregate_date": "2015-05-01", "id": "79301", "index_value": "3087", "rating": "2.9"}, {"ip_addresses": "32446", "label": "Asre Enteghal Dadeha", "test_count": "135", "aggregate_date": "2015-05-01", "id": "35743", "index_value": "2883", "rating": "3.4"}, {"ip_addresses": "12040", "label": "Sepanta Communication Development Co. Ltd", "test_count": "132", "aggregate_date": "2015-05-01", "id": "2863", "index_value": "2626", "rating": "3.0"}, {"ip_addresses": "97204", "label": "Iran Telecommunication Company PJS", "test_count": "6134", "aggregate_date": "2015-05-01", "id": "48113", "index_value": "2356", "rating": "2.5"}, {"ip_addresses": "144994", "label": "Mobin Net Communication Company (Private Joint Sto", "test_count": "60", "aggregate_date": "2015-05-01", "id": "45413", "index_value": "1297", "rating": "2.6"}], "bandwidth": "5675"}, {"countryCode": "SI", "test_count": "91535", "ip_addresses": "826868", "country": "Slovenia", "isps": [{"ip_addresses": "26098", "label": "Telemach Rotovz d.d.", "test_count": "8308", "aggregate_date": "2015-05-01", "id": "42036", "index_value": "45578", "rating": "3.3"}, {"ip_addresses": "46666", "label": "Telemach Communication Services d.o.o.", "test_count": "10000", "aggregate_date": "2015-05-01", "id": "3283", "index_value": "36772", "rating": "3.3"}, {"ip_addresses": "66192", "label": "Telekom Slovenije, d.d.", "test_count": "396", "aggregate_date": "2015-05-01", "id": "69161", "index_value": "35703", "rating": "2.8"}, {"ip_addresses": "117468", "label": "T-2 Access Network", "test_count": "12594", "aggregate_date": "2015-05-01", "id": "32359", "index_value": "33618", "rating": "3.9"}, {"ip_addresses": "9135", "label": "Telemach d.o.o.", "test_count": "4845", "aggregate_date": "2015-05-01", "id": "63452", "index_value": "30519", "rating": "3.2"}, {"ip_addresses": "50967", "label": "Si.Mobil d.d.", "test_count": "407", "aggregate_date": "2015-05-01", "id": "11930", "index_value": "27683", "rating": "2.9"}, {"ip_addresses": "5781", "label": "TELEING d.o.o.", "test_count": "816", "aggregate_date": "2015-05-01", "id": "52402", "index_value": "17133", "rating": "3.4"}, {"ip_addresses": "30107", "label": "TUSMOBIL d.o.o.", "test_count": "900", "aggregate_date": "2015-05-01", "id": "17363", "index_value": "17100", "rating": "3.2"}, {"ip_addresses": "16179", "label": "SOFTNET d.o.o.", "test_count": "1349", "aggregate_date": "2015-05-01", "id": "37747", "index_value": "15117", "rating": "3.2"}, {"ip_addresses": "3237", "label": "KRS Networks", "test_count": "546", "aggregate_date": "2015-05-01", "id": "42321", "index_value": "14979", "rating": "3.3"}, {"ip_addresses": "86389", "label": "Amis", "test_count": "7625", "aggregate_date": "2015-05-01", "id": "6538", "index_value": "14808", "rating": "3.4"}, {"ip_addresses": "280953", "label": "Telekom Slovenije", "test_count": "21274", "aggregate_date": "2015-05-01", "id": "25024", "index_value": "12650", "rating": "3.0"}, {"ip_addresses": "1282", "label": "TELESAT, d.o.o., Jesenice, operater kabelsko distr", "test_count": "243", "aggregate_date": "2015-05-01", "id": "57013", "index_value": "10815", "rating": "3.0"}, {"ip_addresses": "3268", "label": "Zavod Kabelska televizija Nova Gorica", "test_count": "712", "aggregate_date": "2015-05-01", "id": "49328", "index_value": "7722", "rating": "2.8"}, {"ip_addresses": "1169", "label": "POLANS, telekomunikacijske storitve d.o.o.", "test_count": "304", "aggregate_date": "2015-05-01", "id": "68728", "index_value": "4324", "rating": "2.9"}, {"ip_addresses": "3873", "label": "Ario, d.o.o.", "test_count": "353", "aggregate_date": "2015-05-01", "id": "29736", "index_value": "2288", "rating": "3.4"}], "bandwidth": "23140"}, {"countryCode": "LV", "test_count": "110813", "ip_addresses": "697339", "country": "Latvia", "isps": [{"ip_addresses": "37060", "label": "JSC BALTICOM", "test_count": "6173", "aggregate_date": "2015-05-01", "id": "279", "index_value": "91713", "rating": "4.0"}, {"ip_addresses": "37605", "label": "SIA Baltcom TV", "test_count": "11243", "aggregate_date": "2015-05-01", "id": "83197", "index_value": "88981", "rating": "3.3"}, {"ip_addresses": "4749", "label": "Elektrons K", "test_count": "748", "aggregate_date": "2015-05-01", "id": "29840", "index_value": "82566", "rating": "3.4"}, {"ip_addresses": "2649", "label": "Telenet SIA", "test_count": "1212", "aggregate_date": "2015-05-01", "id": "71854", "index_value": "79224", "rating": "4.1"}, {"ip_addresses": "11627", "label": "DauTKom TV", "test_count": "1244", "aggregate_date": "2015-05-01", "id": "1496", "index_value": "69385", "rating": "3.6"}, {"ip_addresses": "4563", "label": "Sia Mits Lv", "test_count": "1013", "aggregate_date": "2015-05-01", "id": "73259", "index_value": "64384", "rating": "3.6"}, {"ip_addresses": "6638", "label": "OSTKOM SIA", "test_count": "1663", "aggregate_date": "2015-05-01", "id": "16212", "index_value": "57206", "rating": "3.3"}, {"ip_addresses": "6735", "label": "LATNET SERVISS Ltd.", "test_count": "1292", "aggregate_date": "2015-05-01", "id": "49620", "index_value": "54107", "rating": "3.4"}, {"ip_addresses": "297932", "label": "Lattelecom", "test_count": "40676", "aggregate_date": "2015-05-01", "id": "40234", "index_value": "53685", "rating": "3.3"}, {"ip_addresses": "1880", "label": "NEOLAIN IT SIA", "test_count": "430", "aggregate_date": "2015-05-01", "id": "75691", "index_value": "52635", "rating": "3.6"}, {"ip_addresses": "8168", "label": "Sia Pronets", "test_count": "1292", "aggregate_date": "2015-05-01", "id": "38836", "index_value": "52456", "rating": "3.5"}, {"ip_addresses": "13879", "label": "LATNET ISP", "test_count": "1903", "aggregate_date": "2015-05-01", "id": "17962", "index_value": "42841", "rating": "3.6"}, {"ip_addresses": "11014", "label": "LIVAS NET", "test_count": "2553", "aggregate_date": "2015-05-01", "id": "1519", "index_value": "40157", "rating": "2.9"}, {"ip_addresses": "26489", "label": "Telia Latvija SIA", "test_count": "2600", "aggregate_date": "2015-05-01", "id": "747", "index_value": "34196", "rating": "3.4"}, {"ip_addresses": "4860", "label": "SIA Stream Networks", "test_count": "445", "aggregate_date": "2015-05-01", "id": "32199", "index_value": "28115", "rating": "3.6"}, {"ip_addresses": "2703", "label": "SIA Skatvis", "test_count": "352", "aggregate_date": "2015-05-01", "id": "1481", "index_value": "24087", "rating": "3.8"}, {"ip_addresses": "2474", "label": "Microlines", "test_count": "426", "aggregate_date": "2015-05-01", "id": "25781", "index_value": "22151", "rating": "3.3"}, {"ip_addresses": "7211", "label": "Latvenergo", "test_count": "1008", "aggregate_date": "2015-05-01", "id": "1419", "index_value": "19449", "rating": "3.3"}, {"ip_addresses": "2781", "label": "Latvian Mobile Telephone Co.", "test_count": "16513", "aggregate_date": "2015-05-01", "id": "67619", "index_value": "14297", "rating": "3.2"}, {"ip_addresses": "85366", "label": "Tele2", "test_count": "1153", "aggregate_date": "2015-05-01", "id": "34200", "index_value": "6837", "rating": "2.9"}], "bandwidth": "48400"}, {"countryCode": "CL", "test_count": "537933", "ip_addresses": "2849256", "country": "Chile", "isps": [{"ip_addresses": "8337", "label": "CTC. CORP S.A. (TELEFONICA EMPRESAS)", "test_count": "4514", "aggregate_date": "2015-05-01", "id": "3343", "index_value": "30427", "rating": "2.8"}, {"ip_addresses": "1064179", "label": "VTR Banda Ancha", "test_count": "196629", "aggregate_date": "2015-05-01", "id": "871", "index_value": "22850", "rating": "2.6"}, {"ip_addresses": "23151", "label": "Telmex Chile Internet S.A.", "test_count": "6655", "aggregate_date": "2015-05-01", "id": "22950", "index_value": "20039", "rating": "2.8"}, {"ip_addresses": "61268", "label": "Gtd Internet S.A.", "test_count": "12615", "aggregate_date": "2015-05-01", "id": "3613", "index_value": "19937", "rating": "2.9"}, {"ip_addresses": "33532", "label": "Manquehuenet", "test_count": "4267", "aggregate_date": "2015-05-01", "id": "1034", "index_value": "18545", "rating": "2.9"}, {"ip_addresses": "3609", "label": "IFX NETWORKS COLOMBIA", "test_count": "733", "aggregate_date": "2015-05-01", "id": "568", "index_value": "17666", "rating": "3.0"}, {"ip_addresses": "9441", "label": "Chile S.A.", "test_count": "3991", "aggregate_date": "2015-05-01", "id": "86245", "index_value": "17050", "rating": "2.8"}, {"ip_addresses": "41439", "label": "ENTEL CHILE S.A.", "test_count": "18440", "aggregate_date": "2015-05-01", "id": "3340", "index_value": "14794", "rating": "2.8"}, {"ip_addresses": "15884", "label": "Entel Internet", "test_count": "5488", "aggregate_date": "2015-05-01", "id": "3336", "index_value": "14048", "rating": "2.7"}, {"ip_addresses": "331839", "label": "Telmex", "test_count": "57831", "aggregate_date": "2015-05-01", "id": "22942", "index_value": "13343", "rating": "2.7"}, {"ip_addresses": "70481", "label": "CLARO CHILE S.A.", "test_count": "3334", "aggregate_date": "2015-05-01", "id": "33051", "index_value": "12262", "rating": "2.2"}, {"ip_addresses": "213552", "label": "Telefonica Empresas", "test_count": "32175", "aggregate_date": "2015-05-01", "id": "867", "index_value": "11537", "rating": "2.5"}, {"ip_addresses": "23911", "label": "Pacifico Cable S.A.", "test_count": "10427", "aggregate_date": "2015-05-01", "id": "23835", "index_value": "8774", "rating": "2.2"}, {"ip_addresses": "122872", "label": "Telefonica Chile", "test_count": "6672", "aggregate_date": "2015-05-01", "id": "15054", "index_value": "8744", "rating": "2.2"}, {"ip_addresses": "646181", "label": "Movistar Chile", "test_count": "113554", "aggregate_date": "2015-05-01", "id": "94547", "index_value": "7759", "rating": "2.2"}, {"ip_addresses": "4172", "label": "CMET SACI", "test_count": "3439", "aggregate_date": "2015-05-01", "id": "4136", "index_value": "4963", "rating": "2.0"}, {"ip_addresses": "3707", "label": "Stel Chile S.A.", "test_count": "3521", "aggregate_date": "2015-05-01", "id": "30603", "index_value": "4020", "rating": "2.4"}, {"ip_addresses": "183097", "label": "Entel", "test_count": "7355", "aggregate_date": "2015-05-01", "id": "32355", "index_value": "3774", "rating": "2.3"}, {"ip_addresses": "2516", "label": "TV Cable Loncomilla S.A.", "test_count": "703", "aggregate_date": "2015-05-01", "id": "73551", "index_value": "3769", "rating": "2.1"}, {"ip_addresses": "3877", "label": "Plug and play Net S.A.", "test_count": "259", "aggregate_date": "2015-05-01", "id": "18080", "index_value": "3612", "rating": "2.2"}], "bandwidth": "15292"}, {"countryCode": "AR", "test_count": "630406", "ip_addresses": "4598483", "country": "Argentina", "isps": [{"ip_addresses": "16801", "label": "NSS S.A.", "test_count": "9102", "aggregate_date": "2015-05-01", "id": "2807", "index_value": "11561", "rating": "3.6"}, {"ip_addresses": "600030", "label": "Cablevision", "test_count": "152793", "aggregate_date": "2015-05-01", "id": "87740", "index_value": "9969", "rating": "2.9"}, {"ip_addresses": "205781", "label": "Telecentro", "test_count": "36623", "aggregate_date": "2015-05-01", "id": "3348", "index_value": "9878", "rating": "2.7"}, {"ip_addresses": "217230", "label": "Prima S.A.", "test_count": "3542", "aggregate_date": "2015-05-01", "id": "872", "index_value": "9850", "rating": "3.2"}, {"ip_addresses": "17124", "label": "BVNET S.A.", "test_count": "2102", "aggregate_date": "2015-05-01", "id": "4237", "index_value": "9400", "rating": "3.5"}, {"ip_addresses": "92416", "label": "Techtel LMDS Comunicaciones Interactivas S.A.", "test_count": "16184", "aggregate_date": "2015-05-01", "id": "3362", "index_value": "6944", "rating": "3.0"}, {"ip_addresses": "111201", "label": "AMX Argentina S.A.", "test_count": "8544", "aggregate_date": "2015-05-01", "id": "34748", "index_value": "6344", "rating": "2.4"}, {"ip_addresses": "40367", "label": "Telecom Personal", "test_count": "2571", "aggregate_date": "2015-05-01", "id": "87747", "index_value": "5976", "rating": "2.5"}, {"ip_addresses": "35588", "label": "Ver TV S.A.", "test_count": "5056", "aggregate_date": "2015-05-01", "id": "33038", "index_value": "5968", "rating": "2.3"}, {"ip_addresses": "10602", "label": "Telef\u00f3nica M\u00f3viles Argentina S.A. (Movistar Argent", "test_count": "1598", "aggregate_date": "2015-05-01", "id": "69796", "index_value": "5459", "rating": "2.6"}, {"ip_addresses": "1289310", "label": "Telecom Argentina", "test_count": "107856", "aggregate_date": "2015-05-01", "id": "242", "index_value": "5316", "rating": "2.3"}, {"ip_addresses": "32380", "label": "Gigared S.A.", "test_count": "12283", "aggregate_date": "2015-05-01", "id": "3349", "index_value": "4906", "rating": "2.7"}, {"ip_addresses": "19408", "label": "MERCO COMUNICACIONES", "test_count": "6367", "aggregate_date": "2015-05-01", "id": "33059", "index_value": "4378", "rating": "2.6"}, {"ip_addresses": "15083", "label": "ARLINK S.A.", "test_count": "5037", "aggregate_date": "2015-05-01", "id": "38677", "index_value": "4126", "rating": "2.5"}, {"ip_addresses": "10746", "label": "Teledifusora S.A.", "test_count": "6029", "aggregate_date": "2015-05-01", "id": "15048", "index_value": "3938", "rating": "2.5"}, {"ip_addresses": "1228849", "label": "Telefonica de Argentina", "test_count": "90086", "aggregate_date": "2015-05-01", "id": "163", "index_value": "3442", "rating": "2.3"}, {"ip_addresses": "8616", "label": "Red Intercable Digital S.A.", "test_count": "3482", "aggregate_date": "2015-05-01", "id": "33298", "index_value": "3130", "rating": "3.0"}, {"ip_addresses": "17188", "label": "Alpha Tel S.A.", "test_count": "1819", "aggregate_date": "2015-05-01", "id": "46075", "index_value": "2169", "rating": "2.0"}, {"ip_addresses": "16332", "label": "SkyOnline de Argentina S.A.", "test_count": "2640", "aggregate_date": "2015-05-01", "id": "75082", "index_value": "1695", "rating": "1.9"}, {"ip_addresses": "9170", "label": "Telefon\u00eda P\u00fablica y Privada S.A.", "test_count": "2256", "aggregate_date": "2015-05-01", "id": "28349", "index_value": "1157", "rating": "2.5"}], "bandwidth": "6390"}, {"countryCode": "TR", "test_count": "587461", "ip_addresses": "6204890", "country": "Turkey", "isps": [{"ip_addresses": "3262", "label": "Grid Bilisim Teknolojileri A.S.", "test_count": "886", "aggregate_date": "2015-05-01", "id": "12964", "index_value": "34607", "rating": "3.8"}, {"ip_addresses": "374169", "label": "TELLCOM ILETISIM HIZMETLERI A.S.", "test_count": "38439", "aggregate_date": "2015-05-01", "id": "23007", "index_value": "22080", "rating": "3.3"}, {"ip_addresses": "4088", "label": "SolNet", "test_count": "870", "aggregate_date": "2015-05-01", "id": "4815", "index_value": "19118", "rating": "3.4"}, {"ip_addresses": "120479", "label": "Superonline Iletisim Hizmetleri A.S.", "test_count": "34816", "aggregate_date": "2015-05-01", "id": "96850", "index_value": "18857", "rating": "3.3"}, {"ip_addresses": "5883", "label": "Kibris Mobile Telekomunikasyon Ltd.", "test_count": "677", "aggregate_date": "2015-05-01", "id": "87783", "index_value": "18364", "rating": "2.6"}, {"ip_addresses": "158539", "label": "Turksat Uydu-Net Internet", "test_count": "52862", "aggregate_date": "2015-05-01", "id": "93036", "index_value": "17523", "rating": "3.0"}, {"ip_addresses": "18405", "label": "Global Iletisim Hizmetleri A.S.", "test_count": "2203", "aggregate_date": "2015-05-01", "id": "67721", "index_value": "15671", "rating": "2.8"}, {"ip_addresses": "152404", "label": "Vodafone Telekomunikasyon", "test_count": "3165", "aggregate_date": "2015-05-01", "id": "14778", "index_value": "15449", "rating": "2.9"}, {"ip_addresses": "6604", "label": "Vodafone Net Iletisim Hizmetleri Anonim Sirketi", "test_count": "5446", "aggregate_date": "2015-05-01", "id": "95253", "index_value": "13495", "rating": "3.0"}, {"ip_addresses": "4199415", "label": "Turk Telekom", "test_count": "300812", "aggregate_date": "2015-05-01", "id": "189", "index_value": "10268", "rating": "2.2"}, {"ip_addresses": "53646", "label": "Superonline ADSL", "test_count": "5035", "aggregate_date": "2015-05-01", "id": "82471", "index_value": "7676", "rating": "2.7"}, {"ip_addresses": "67678", "label": "MILLENI.COM", "test_count": "3184", "aggregate_date": "2015-05-01", "id": "23046", "index_value": "7374", "rating": "2.7"}, {"ip_addresses": "78010", "label": "TurkNet Iletisim Hizmetleri A.S", "test_count": "10086", "aggregate_date": "2015-05-01", "id": "34760", "index_value": "7153", "rating": "2.4"}, {"ip_addresses": "51294", "label": "BiRi ADSL", "test_count": "7107", "aggregate_date": "2015-05-01", "id": "83194", "index_value": "7041", "rating": "2.7"}, {"ip_addresses": "58605", "label": "Avea Iletisim Hizmetleri A.S", "test_count": "2441", "aggregate_date": "2015-05-01", "id": "93050", "index_value": "6847", "rating": "2.3"}, {"ip_addresses": "152643", "label": "Dogan Tv Digital Platform Isletmeciligi A.s", "test_count": "19052", "aggregate_date": "2015-05-01", "id": "88521", "index_value": "6454", "rating": "2.6"}, {"ip_addresses": "184715", "label": "Turkcell", "test_count": "5615", "aggregate_date": "2015-05-01", "id": "76431", "index_value": "5743", "rating": "2.9"}, {"ip_addresses": "5420", "label": "Nethouse Bilgi Islem Merkezi Ltd", "test_count": "106", "aggregate_date": "2015-05-01", "id": "90079", "index_value": "3503", "rating": "2.4"}, {"ip_addresses": "8774", "label": "Kibrisonline Ltd.", "test_count": "406", "aggregate_date": "2015-05-01", "id": "8802", "index_value": "3056", "rating": "2.6"}, {"ip_addresses": "2534", "label": "Enson Net Ltd Sti", "test_count": "1", "aggregate_date": "2015-05-01", "id": "40592", "index_value": "1141", "rating": "2.7"}], "bandwidth": "13461"}, {"countryCode": "Unknown | Venezuela", "test_count": "307052", "ip_addresses": "1654476", "country": "Venezuela", "isps": [{"ip_addresses": "575", "label": "Gandalf Comunicaciones C.A.", "test_count": "1486", "aggregate_date": "2015-05-01", "id": "73765", "index_value": "9875", "rating": "3.4"}, {"ip_addresses": "1286", "label": "IFX NETWORKS COLOMBIA", "test_count": "950", "aggregate_date": "2015-05-01", "id": "568", "index_value": "5250", "rating": "3.0"}, {"ip_addresses": "80222", "label": "Telcel, C.A", "test_count": "2093", "aggregate_date": "2015-05-01", "id": "26112", "index_value": "4657", "rating": "3.4"}, {"ip_addresses": "4008", "label": "Telefonica Venezolana, C.a.", "test_count": "758", "aggregate_date": "2015-05-01", "id": "87906", "index_value": "4469", "rating": "3.1"}, {"ip_addresses": "1459", "label": "Internet Cable Plus, Maracay", "test_count": "591", "aggregate_date": "2015-05-01", "id": "4217", "index_value": "3997", "rating": "2.6"}, {"ip_addresses": "11375", "label": "Net Uno, C.A.", "test_count": "4545", "aggregate_date": "2015-05-01", "id": "590", "index_value": "3788", "rating": "2.4"}, {"ip_addresses": "2157", "label": "Internet Cable Plus, Maracaibo", "test_count": "873", "aggregate_date": "2015-05-01", "id": "4403", "index_value": "3737", "rating": "2.1"}, {"ip_addresses": "98895", "label": "Internet Cable Plus", "test_count": "7103", "aggregate_date": "2015-05-01", "id": "3009", "index_value": "3271", "rating": "2.5"}, {"ip_addresses": "1354", "label": "Gold Data C.A.", "test_count": "1039", "aggregate_date": "2015-05-01", "id": "32593", "index_value": "2818", "rating": "3.4"}, {"ip_addresses": "2787", "label": "Internet Cable Plus, Valencia", "test_count": "528", "aggregate_date": "2015-05-01", "id": "9393", "index_value": "2794", "rating": "2.6"}, {"ip_addresses": "146427", "label": "Corporaci\u00f3n Telemic C.A.", "test_count": "27419", "aggregate_date": "2015-05-01", "id": "86145", "index_value": "2781", "rating": "2.3"}, {"ip_addresses": "3443", "label": "Omnivision C.A.", "test_count": "665", "aggregate_date": "2015-05-01", "id": "32347", "index_value": "2716", "rating": "3.2"}, {"ip_addresses": "659074", "label": "CANTV", "test_count": "202099", "aggregate_date": "2015-05-01", "id": "93035", "index_value": "1940", "rating": "2.3"}, {"ip_addresses": "6619", "label": "IFX Networks Venezuela C.A.", "test_count": "2264", "aggregate_date": "2015-05-01", "id": "47851", "index_value": "1798", "rating": "2.5"}, {"ip_addresses": "804", "label": "NetLink Am\u00e9rica C.A.", "test_count": "665", "aggregate_date": "2015-05-01", "id": "95215", "index_value": "1776", "rating": "2.6"}, {"ip_addresses": "3091", "label": "Internet Cable Plus, Barquisimeto", "test_count": "515", "aggregate_date": "2015-05-01", "id": "4766", "index_value": "1715", "rating": "2.7"}, {"ip_addresses": "22833", "label": "Telecomunicaciones MOVILNET", "test_count": "261", "aggregate_date": "2015-05-01", "id": "32967", "index_value": "1570", "rating": "3.2"}, {"ip_addresses": "12336", "label": "Supercable", "test_count": "728", "aggregate_date": "2015-05-01", "id": "193", "index_value": "987", "rating": "2.1"}], "bandwidth": "2173"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "CH", "test_count": "171242", "ip_addresses": "1586933", "country": "Switzerland", "isps": [{"ip_addresses": "446544", "label": "Cablecom GmbH", "test_count": "64079", "aggregate_date": "2015-05-01", "id": "1251", "index_value": "71332", "rating": "3.5"}, {"ip_addresses": "15178", "label": "WWZ Telekom AG", "test_count": "2414", "aggregate_date": "2015-05-01", "id": "66771", "index_value": "53590", "rating": "3.7"}, {"ip_addresses": "7499", "label": "GA Weissenstein GmbH", "test_count": "945", "aggregate_date": "2015-05-01", "id": "21306", "index_value": "49744", "rating": "4.1"}, {"ip_addresses": "5387", "label": "Swisscom", "test_count": "182", "aggregate_date": "2015-05-01", "id": "93666", "index_value": "49505", "rating": "3.7"}, {"ip_addresses": "15168", "label": "Improware AG", "test_count": "2152", "aggregate_date": "2015-05-01", "id": "4292", "index_value": "47330", "rating": "4.0"}, {"ip_addresses": "6045", "label": "sasag Kabelkommunikation AG", "test_count": "1193", "aggregate_date": "2015-05-01", "id": "4679", "index_value": "44668", "rating": "4.0"}, {"ip_addresses": "6574", "label": "GIB-Solutions AG", "test_count": "1168", "aggregate_date": "2015-05-01", "id": "3916", "index_value": "44236", "rating": "3.8"}, {"ip_addresses": "20739", "label": "netplus.ch SA", "test_count": "3310", "aggregate_date": "2015-05-01", "id": "51883", "index_value": "34489", "rating": "3.4"}, {"ip_addresses": "741860", "label": "Bluewin", "test_count": "28843", "aggregate_date": "2015-05-01", "id": "25032", "index_value": "33493", "rating": "3.4"}, {"ip_addresses": "4132", "label": "Datapark AG", "test_count": "249", "aggregate_date": "2015-05-01", "id": "49479", "index_value": "28776", "rating": "3.9"}, {"ip_addresses": "20423", "label": "Green Connection AG", "test_count": "632", "aggregate_date": "2015-05-01", "id": "2718", "index_value": "23549", "rating": "3.6"}, {"ip_addresses": "8221", "label": "BSE Software GmbH", "test_count": "223", "aggregate_date": "2015-05-01", "id": "47700", "index_value": "23435", "rating": "3.5"}, {"ip_addresses": "5616", "label": "Cyberlink AG", "test_count": "173", "aggregate_date": "2015-05-01", "id": "44604", "index_value": "22306", "rating": "4.1"}, {"ip_addresses": "213977", "label": "Sunrise Communications", "test_count": "10632", "aggregate_date": "2015-05-01", "id": "33662", "index_value": "20446", "rating": "2.8"}, {"ip_addresses": "5865", "label": "Iway AG", "test_count": "98", "aggregate_date": "2015-05-01", "id": "51943", "index_value": "19981", "rating": "4.5"}, {"ip_addresses": "7358", "label": "Ticinocom SA", "test_count": "114", "aggregate_date": "2015-05-01", "id": "54029", "index_value": "19973", "rating": "3.6"}, {"ip_addresses": "24925", "label": "VTX Services SA", "test_count": "352", "aggregate_date": "2015-05-01", "id": "53988", "index_value": "18072", "rating": "3.3"}, {"ip_addresses": "5028", "label": "Private Layer INC", "test_count": "8519", "aggregate_date": "2015-05-01", "id": "52637", "index_value": "14105", "rating": "3.8"}, {"ip_addresses": "84477", "label": "Swisscom (Schweiz) AG", "test_count": "3907", "aggregate_date": "2015-05-01", "id": "53977", "index_value": "13845", "rating": "3.4"}, {"ip_addresses": "4964", "label": "Monzoon Networks AG", "test_count": "21", "aggregate_date": "2015-05-01", "id": "16468", "index_value": "13424", "rating": "3.8"}], "bandwidth": "44636"}, {"countryCode": "ZA", "test_count": "347572", "ip_addresses": "2655294", "country": "South Africa", "isps": [{"ip_addresses": "563", "label": "Cool-ideas", "test_count": "803", "aggregate_date": "2015-05-01", "id": "97163", "index_value": "29980", "rating": "4.2"}, {"ip_addresses": "795", "label": "RSAWEB Internet Services", "test_count": "796", "aggregate_date": "2015-05-01", "id": "27506", "index_value": "22781", "rating": "3.7"}, {"ip_addresses": "186565", "label": "Vodacom", "test_count": "13313", "aggregate_date": "2015-05-01", "id": "1338", "index_value": "12810", "rating": "2.2"}, {"ip_addresses": "34390", "label": "Neotel", "test_count": "26557", "aggregate_date": "2015-05-01", "id": "35073", "index_value": "9850", "rating": "2.3"}, {"ip_addresses": "941", "label": "BitCo", "test_count": "1755", "aggregate_date": "2015-05-01", "id": "83869", "index_value": "9007", "rating": "3.9"}, {"ip_addresses": "18869", "label": "CELL-C", "test_count": "523", "aggregate_date": "2015-05-01", "id": "87116", "index_value": "6960", "rating": "2.1"}, {"ip_addresses": "13190", "label": "Vox Telecom", "test_count": "5144", "aggregate_date": "2015-05-01", "id": "102256", "index_value": "6444", "rating": "3.1"}, {"ip_addresses": "178752", "label": "Internet Solutions", "test_count": "33657", "aggregate_date": "2015-05-01", "id": "405", "index_value": "6325", "rating": "2.5"}, {"ip_addresses": "504528", "label": "Telkom Internet", "test_count": "74811", "aggregate_date": "2015-05-01", "id": "94550", "index_value": "6065", "rating": "1.8"}, {"ip_addresses": "760", "label": "Fusion Wireless (PTY) LTD t/a Sonic Wireless Solu", "test_count": "2006", "aggregate_date": "2015-05-01", "id": "94569", "index_value": "5919", "rating": "3.1"}, {"ip_addresses": "52225", "label": "Cybersmart", "test_count": "4849", "aggregate_date": "2015-05-01", "id": "22468", "index_value": "5778", "rating": "2.6"}, {"ip_addresses": "59627", "label": "MTN", "test_count": "74379", "aggregate_date": "2015-05-01", "id": "82290", "index_value": "4787", "rating": "2.3"}, {"ip_addresses": "729", "label": "WIRUlink", "test_count": "824", "aggregate_date": "2015-05-01", "id": "86577", "index_value": "4169", "rating": "4.6"}, {"ip_addresses": "274265", "label": "MWEB", "test_count": "48103", "aggregate_date": "2015-05-01", "id": "86121", "index_value": "3838", "rating": "2.3"}, {"ip_addresses": "2774", "label": "SAINET Internet", "test_count": "375", "aggregate_date": "2015-05-01", "id": "67995", "index_value": "3658", "rating": "3.2"}, {"ip_addresses": "42835", "label": "iBurst", "test_count": "4043", "aggregate_date": "2015-05-01", "id": "43717", "index_value": "3592", "rating": "2.2"}, {"ip_addresses": "8082", "label": "FNB Connect", "test_count": "1174", "aggregate_date": "2015-05-01", "id": "94570", "index_value": "2427", "rating": "3.4"}], "bandwidth": "7026"}, {"countryCode": "PR", "test_count": "90183", "ip_addresses": "555439", "country": "Puerto Rico", "isps": [{"ip_addresses": "86405", "label": "Liberty PR", "test_count": "37570", "aggregate_date": "2015-05-01", "id": "214", "index_value": "18400", "rating": "3.1"}, {"ip_addresses": "42", "label": "AT&T Wireless", "test_count": "1091", "aggregate_date": "2015-05-01", "id": "83188", "index_value": "14825", "rating": "2.6"}, {"ip_addresses": "80860", "label": "Choice Cable", "test_count": "13742", "aggregate_date": "2015-05-01", "id": "303", "index_value": "13869", "rating": "2.7"}, {"ip_addresses": "1086", "label": "OSnet Wireless", "test_count": "1570", "aggregate_date": "2015-05-01", "id": "53997", "index_value": "6883", "rating": "3.2"}, {"ip_addresses": "4319", "label": "Verizon Wireless Puerto Rico", "test_count": "761", "aggregate_date": "2015-05-01", "id": "23645", "index_value": "6701", "rating": "2.8"}, {"ip_addresses": "19489", "label": "Open Mobile", "test_count": "704", "aggregate_date": "2015-05-01", "id": "64510", "index_value": "6519", "rating": "2.9"}, {"ip_addresses": "212193", "label": "Claro Puerto Rico", "test_count": "16333", "aggregate_date": "2015-05-01", "id": "308", "index_value": "6113", "rating": "2.3"}], "bandwidth": "14524"}, {"countryCode": "NL", "test_count": "617145", "ip_addresses": "4409164", "country": "Netherlands", "isps": [{"ip_addresses": "5652", "label": "CAIW Internet", "test_count": "4478", "aggregate_date": "2015-05-01", "id": "102871", "index_value": "94186", "rating": "3.8"}, {"ip_addresses": "83815", "label": "Caiway", "test_count": "14926", "aggregate_date": "2015-05-01", "id": "35062", "index_value": "87765", "rating": "3.3"}, {"ip_addresses": "500875", "label": "UPC Internet", "test_count": "113721", "aggregate_date": "2015-05-01", "id": "574", "index_value": "64820", "rating": "3.5"}, {"ip_addresses": "872285", "label": "Ziggo", "test_count": "163626", "aggregate_date": "2015-05-01", "id": "49013", "index_value": "64266", "rating": "3.7"}, {"ip_addresses": "41026", "label": "OnsBrabentNet", "test_count": "5544", "aggregate_date": "2015-05-01", "id": "2535", "index_value": "57151", "rating": "4.3"}, {"ip_addresses": "42959", "label": "ZeelandNet", "test_count": "6371", "aggregate_date": "2015-05-01", "id": "4028", "index_value": "54834", "rating": "3.7"}, {"ip_addresses": "200945", "label": "XS4ALL", "test_count": "17741", "aggregate_date": "2015-05-01", "id": "1008", "index_value": "50606", "rating": "3.8"}, {"ip_addresses": "12141", "label": "KPN Mobile", "test_count": "71", "aggregate_date": "2015-05-01", "id": "98598", "index_value": "48696", "rating": "2.5"}, {"ip_addresses": "4066", "label": "Signet", "test_count": "1552", "aggregate_date": "2015-05-01", "id": "73587", "index_value": "47176", "rating": "3.6"}, {"ip_addresses": "31179", "label": "Solcon", "test_count": "3894", "aggregate_date": "2015-05-01", "id": "3709", "index_value": "44349", "rating": "3.5"}, {"ip_addresses": "24954", "label": "InterNLnet BV", "test_count": "209", "aggregate_date": "2015-05-01", "id": "3896", "index_value": "40424", "rating": "4.3"}, {"ip_addresses": "82653", "label": "T-Mobile Netherlands", "test_count": "491", "aggregate_date": "2015-05-01", "id": "268", "index_value": "35256", "rating": "2.5"}, {"ip_addresses": "35572", "label": "Vodafone NL", "test_count": "649", "aggregate_date": "2015-05-01", "id": "3700", "index_value": "35024", "rating": "2.9"}, {"ip_addresses": "28056", "label": "RoutIT", "test_count": "2617", "aggregate_date": "2015-05-01", "id": "4113", "index_value": "34571", "rating": "3.4"}, {"ip_addresses": "206152", "label": "KPN", "test_count": "120587", "aggregate_date": "2015-05-01", "id": "94681", "index_value": "33710", "rating": "3.0"}, {"ip_addresses": "5459", "label": "InfoPact Netwerkdiensten B.V.", "test_count": "172", "aggregate_date": "2015-05-01", "id": "2202", "index_value": "30790", "rating": "2.9"}, {"ip_addresses": "3962", "label": "XenoSite B.V.", "test_count": "620", "aggregate_date": "2015-05-01", "id": "61537", "index_value": "20836", "rating": "3.6"}, {"ip_addresses": "116578", "label": "Tele2", "test_count": "19686", "aggregate_date": "2015-05-01", "id": "70166", "index_value": "15542", "rating": "2.6"}, {"ip_addresses": "7994", "label": "Wanadoo Cable", "test_count": "625", "aggregate_date": "2015-05-01", "id": "2190", "index_value": "12359", "rating": "2.6"}, {"ip_addresses": "158671", "label": "Online.nl", "test_count": "7522", "aggregate_date": "2015-05-01", "id": "30115", "index_value": "12144", "rating": "2.8"}], "bandwidth": "50831"}, {"countryCode": "IE", "test_count": "232233", "ip_addresses": "1593397", "country": "Ireland", "isps": [{"ip_addresses": "2500", "label": "Local Ethernet", "test_count": "158", "aggregate_date": "2015-05-01", "id": "7019", "index_value": "168358", "rating": "4.7"}, {"ip_addresses": "21532", "label": "HEAnet Limited", "test_count": "1361", "aggregate_date": "2015-05-01", "id": "42876", "index_value": "82375", "rating": "3.6"}, {"ip_addresses": "221189", "label": "UPC Ireland", "test_count": "52227", "aggregate_date": "2015-05-01", "id": "28333", "index_value": "57452", "rating": "3.3"}, {"ip_addresses": "323296", "label": "Vodafone Ireland", "test_count": "1307", "aggregate_date": "2015-05-01", "id": "3509", "index_value": "44577", "rating": "2.0"}, {"ip_addresses": "96765", "label": "BT Communications Ireland Limited", "test_count": "7607", "aggregate_date": "2015-05-01", "id": "89702", "index_value": "25808", "rating": "2.3"}, {"ip_addresses": "528428", "label": "Eircom", "test_count": "38583", "aggregate_date": "2015-05-01", "id": "652", "index_value": "25288", "rating": "2.1"}, {"ip_addresses": "24887", "label": "Magnet Networks Limited", "test_count": "2693", "aggregate_date": "2015-05-01", "id": "4295", "index_value": "21912", "rating": "3.2"}, {"ip_addresses": "3870", "label": "North West Electronics", "test_count": "3166", "aggregate_date": "2015-05-01", "id": "23857", "index_value": "19084", "rating": "2.4"}, {"ip_addresses": "29574", "label": "Digiweb ltd", "test_count": "2910", "aggregate_date": "2015-05-01", "id": "45274", "index_value": "16741", "rating": "2.8"}, {"ip_addresses": "3721", "label": "Adelphi Net1", "test_count": "832", "aggregate_date": "2015-05-01", "id": "8223", "index_value": "9008", "rating": "3.4"}, {"ip_addresses": "24217", "label": "Meteor", "test_count": "1927", "aggregate_date": "2015-05-01", "id": "15817", "index_value": "8843", "rating": "2.5"}, {"ip_addresses": "3624", "label": "SKYTEL NETWORKS IRELAND LTD", "test_count": "1561", "aggregate_date": "2015-05-01", "id": "39265", "index_value": "7553", "rating": "2.3"}, {"ip_addresses": "40340", "label": "Imagine Telecommunications Ltd", "test_count": "2885", "aggregate_date": "2015-05-01", "id": "1403", "index_value": "7005", "rating": "2.4"}, {"ip_addresses": "13621", "label": "Three Ireland", "test_count": "1183", "aggregate_date": "2015-05-01", "id": "104032", "index_value": "7002", "rating": "1.8"}, {"ip_addresses": "4004", "label": "Rapid Broadband Ltd", "test_count": "666", "aggregate_date": "2015-05-01", "id": "15546", "index_value": "6815", "rating": "3.0"}, {"ip_addresses": "3545", "label": "FastCom Broadband Limited", "test_count": "1325", "aggregate_date": "2015-05-01", "id": "11068", "index_value": "6230", "rating": "2.4"}, {"ip_addresses": "5109", "label": "Permanet Limited", "test_count": "1064", "aggregate_date": "2015-05-01", "id": "15529", "index_value": "4265", "rating": "3.0"}, {"ip_addresses": "2956", "label": "Lighthouse Networks Limited", "test_count": "1107", "aggregate_date": "2015-05-01", "id": "39297", "index_value": "4263", "rating": "2.9"}, {"ip_addresses": "3808", "label": "Ripple Communications Ltd", "test_count": "2386", "aggregate_date": "2015-05-01", "id": "89529", "index_value": "3601", "rating": "2.5"}, {"ip_addresses": "2866", "label": "Western Broadband Networks Limited", "test_count": "519", "aggregate_date": "2015-05-01", "id": "6326", "index_value": "2999", "rating": "3.3"}], "bandwidth": "27112"}, {"countryCode": "KN", "test_count": "756", "ip_addresses": "5716", "country": "Saint Kitts and Nevis", "isps": [{"ip_addresses": "4256", "label": "Cable & Wireless Antigua and Barbuda", "test_count": "583", "aggregate_date": "2015-05-01", "id": "69238", "index_value": "4024", "rating": "2.0"}], "bandwidth": "4839"}, {"countryCode": "PE", "test_count": "1062461", "ip_addresses": "1084963", "country": "Peru", "isps": [{"ip_addresses": "774", "label": "Optical Technologies S.a.c.", "test_count": "3335", "aggregate_date": "2015-05-01", "id": "100315", "index_value": "12964", "rating": "3.6"}, {"ip_addresses": "22438", "label": "Telmex infinitum", "test_count": "11585", "aggregate_date": "2015-05-01", "id": "75068", "index_value": "11513", "rating": "2.8"}, {"ip_addresses": "46465", "label": "Telmex Peru", "test_count": "34301", "aggregate_date": "2015-05-01", "id": "94551", "index_value": "9669", "rating": "2.9"}, {"ip_addresses": "947", "label": "Red Cientifica Peruana", "test_count": "8757", "aggregate_date": "2015-05-01", "id": "4120", "index_value": "7023", "rating": "3.4"}, {"ip_addresses": "987933", "label": "Telefonica del Peru", "test_count": "669001", "aggregate_date": "2015-05-01", "id": "255", "index_value": "6899", "rating": "2.3"}, {"ip_addresses": "17098", "label": "NEXTEL DEL PERU S.A.", "test_count": "973", "aggregate_date": "2015-05-01", "id": "23770", "index_value": "6836", "rating": "2.7"}, {"ip_addresses": "34944", "label": "Claro Peru", "test_count": "171953", "aggregate_date": "2015-05-01", "id": "94176", "index_value": "6619", "rating": "2.5"}, {"ip_addresses": "3855", "label": "VIETTEL PER\u00da S.A.C.", "test_count": "3979", "aggregate_date": "2015-05-01", "id": "78798", "index_value": "3554", "rating": "3.8"}, {"ip_addresses": "11901", "label": "AMERICATEL PERU S.A.", "test_count": "4921", "aggregate_date": "2015-05-01", "id": "3697", "index_value": "2757", "rating": "3.0"}, {"ip_addresses": "417", "label": "Olo del Peru S.A.C", "test_count": "7668", "aggregate_date": "2015-05-01", "id": "81230", "index_value": "2561", "rating": "2.2"}], "bandwidth": "6625"}, {"countryCode": "Unknown | Macedonia", "test_count": "54000", "ip_addresses": "419928", "country": "Macedonia", "isps": [{"ip_addresses": "32216", "label": "Blizoo DOOEL Skopje", "test_count": "14343", "aggregate_date": "2015-05-01", "id": "87742", "index_value": "28076", "rating": "3.2"}, {"ip_addresses": "1181", "label": "Company for Computer Services and Trade MOL KOMUNI", "test_count": "300", "aggregate_date": "2015-05-01", "id": "85587", "index_value": "21981", "rating": "3.4"}, {"ip_addresses": "881", "label": "DOCSIS clients in Prilep", "test_count": "965", "aggregate_date": "2015-05-01", "id": "104580", "index_value": "20606", "rating": "2.5"}, {"ip_addresses": "10592", "label": "TELESMART TELEKOM DOO", "test_count": "1488", "aggregate_date": "2015-05-01", "id": "34829", "index_value": "19462", "rating": "3.5"}, {"ip_addresses": "5237", "label": "Neotel Macedonia", "test_count": "2472", "aggregate_date": "2015-05-01", "id": "101590", "index_value": "18756", "rating": "3.1"}, {"ip_addresses": "211207", "label": "Makedonski Telekom", "test_count": "14592", "aggregate_date": "2015-05-01", "id": "37682", "index_value": "17853", "rating": "3.0"}, {"ip_addresses": "9745", "label": "Inel Internacional Dooel Kavadarci", "test_count": "1606", "aggregate_date": "2015-05-01", "id": "35611", "index_value": "14690", "rating": "3.2"}, {"ip_addresses": "6200", "label": "Miksnet", "test_count": "420", "aggregate_date": "2015-05-01", "id": "30499", "index_value": "14278", "rating": "3.8"}, {"ip_addresses": "2242", "label": "PET NET DOO Gevgelija", "test_count": "374", "aggregate_date": "2015-05-01", "id": "82137", "index_value": "13165", "rating": "3.6"}, {"ip_addresses": "3415", "label": "Commercial radio-broadcasting company Cable operat", "test_count": "711", "aggregate_date": "2015-05-01", "id": "69253", "index_value": "11385", "rating": "3.6"}, {"ip_addresses": "13443", "label": "Telekabel Ltd.", "test_count": "4044", "aggregate_date": "2015-05-01", "id": "71083", "index_value": "11265", "rating": "2.7"}, {"ip_addresses": "297", "label": "VIP Operator dooel Skopje", "test_count": "295", "aggregate_date": "2015-05-01", "id": "39687", "index_value": "7696", "rating": "3.3"}, {"ip_addresses": "5774", "label": "NEOTEL-MKD Autonomous System", "test_count": "929", "aggregate_date": "2015-05-01", "id": "54033", "index_value": "7173", "rating": "2.9"}, {"ip_addresses": "21250", "label": "One Macedonia", "test_count": "4495", "aggregate_date": "2015-05-01", "id": "101941", "index_value": "6367", "rating": "2.7"}], "bandwidth": "17865"}, {"countryCode": "SA", "test_count": "797420", "ip_addresses": "4122539", "country": "Saudi Arabia", "isps": [{"ip_addresses": "20236", "label": "GulfNet KSA", "test_count": "4772", "aggregate_date": "2015-05-01", "id": "1350", "index_value": "32884", "rating": "3.0"}, {"ip_addresses": "1088", "label": "King Abdul Aziz City for Science and Technology", "test_count": "2357", "aggregate_date": "2015-05-01", "id": "63506", "index_value": "28025", "rating": "3.7"}, {"ip_addresses": "342914", "label": "Bayanat", "test_count": "9116", "aggregate_date": "2015-05-01", "id": "2476", "index_value": "23663", "rating": "2.9"}, {"ip_addresses": "147463", "label": "Mobily", "test_count": "66101", "aggregate_date": "2015-05-01", "id": "101935", "index_value": "17542", "rating": "2.8"}, {"ip_addresses": "18233", "label": "Integrated Telecom Co. Ltd", "test_count": "5085", "aggregate_date": "2015-05-01", "id": "1270", "index_value": "14696", "rating": "3.0"}, {"ip_addresses": "45868", "label": "Arab Company For Internet & Communications Service", "test_count": "5187", "aggregate_date": "2015-05-01", "id": "42018", "index_value": "14624", "rating": "3.1"}, {"ip_addresses": "1863815", "label": "SaudiNet", "test_count": "602989", "aggregate_date": "2015-05-01", "id": "87730", "index_value": "9443", "rating": "2.9"}, {"ip_addresses": "38028", "label": "Middle East Internet Company Limited", "test_count": "2762", "aggregate_date": "2015-05-01", "id": "850", "index_value": "7559", "rating": "3.0"}, {"ip_addresses": "8527", "label": "Shabakah Net", "test_count": "622", "aggregate_date": "2015-05-01", "id": "903", "index_value": "6852", "rating": "3.1"}, {"ip_addresses": "3730", "label": "Nour Communication Co.Ltd - Nournet", "test_count": "4125", "aggregate_date": "2015-05-01", "id": "42026", "index_value": "6355", "rating": "3.2"}, {"ip_addresses": "106748", "label": "GO Telecom", "test_count": "4876", "aggregate_date": "2015-05-01", "id": "37680", "index_value": "6213", "rating": "2.6"}, {"ip_addresses": "23585", "label": "Sahara Network", "test_count": "2471", "aggregate_date": "2015-05-01", "id": "793", "index_value": "4895", "rating": "3.2"}, {"ip_addresses": "16791", "label": "ODS.ORBITNET", "test_count": "332", "aggregate_date": "2015-05-01", "id": "269", "index_value": "2609", "rating": "2.1"}], "bandwidth": "10492"}, {"countryCode": "PT", "test_count": "414468", "ip_addresses": "3179993", "country": "Portugal", "isps": [{"ip_addresses": "9129", "label": "Fundacao para a Ciencia e a Tecnologia, I.P.", "test_count": "4867", "aggregate_date": "2015-05-01", "id": "94970", "index_value": "63423", "rating": "3.9"}, {"ip_addresses": "5612", "label": "ONITELECOM - INFOCOMUNICACOES, S.A.", "test_count": "602", "aggregate_date": "2015-05-01", "id": "1927", "index_value": "51702", "rating": "2.8"}, {"ip_addresses": "5912", "label": "NOS Madeira Comunicacoes, S.A.", "test_count": "3260", "aggregate_date": "2015-05-01", "id": "101945", "index_value": "49315", "rating": "2.6"}, {"ip_addresses": "249816", "label": "Vodafone Portugal", "test_count": "45145", "aggregate_date": "2015-05-01", "id": "54007", "index_value": "41133", "rating": "2.9"}, {"ip_addresses": "112865", "label": "Cabovisao, televisao por cabovisao, sa", "test_count": "24458", "aggregate_date": "2015-05-01", "id": "67637", "index_value": "37055", "rating": "2.8"}, {"ip_addresses": "2911", "label": "NFSi Telecom, Lda.", "test_count": "295", "aggregate_date": "2015-05-01", "id": "15690", "index_value": "24997", "rating": "4.1"}, {"ip_addresses": "226534", "label": "Novis Telecom", "test_count": "15374", "aggregate_date": "2015-05-01", "id": "556", "index_value": "24160", "rating": "2.7"}, {"ip_addresses": "11758", "label": "Meo - Servicos De Comunicacoes E Multimedia S.A.", "test_count": "4627", "aggregate_date": "2015-05-01", "id": "104037", "index_value": "20938", "rating": "2.5"}, {"ip_addresses": "745", "label": "ONITELECOM - INFOCOMUNICACOES, SA", "test_count": "1514", "aggregate_date": "2015-05-01", "id": "67724", "index_value": "17040", "rating": "2.6"}, {"ip_addresses": "18007", "label": "Meo", "test_count": "617", "aggregate_date": "2015-05-01", "id": "100652", "index_value": "10744", "rating": "2.4"}, {"ip_addresses": "83660", "label": "OPTIMUS Portugal", "test_count": "8790", "aggregate_date": "2015-05-01", "id": "93041", "index_value": "9220", "rating": "2.1"}, {"ip_addresses": "7024", "label": "G9SA, Telecomunicacoes, S.A", "test_count": "184", "aggregate_date": "2015-05-01", "id": "71862", "index_value": "4481", "rating": "2.4"}], "bandwidth": "31352"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "EC", "test_count": "195036", "ip_addresses": "643537", "country": "Ecuador", "isps": [{"ip_addresses": "40073", "label": "NETLIFE", "test_count": "20526", "aggregate_date": "2015-05-01", "id": "78724", "index_value": "16734", "rating": "3.7"}, {"ip_addresses": "25107", "label": "Telconet S.A", "test_count": "24767", "aggregate_date": "2015-05-01", "id": "447", "index_value": "6858", "rating": "3.2"}, {"ip_addresses": "84088", "label": "SATNET", "test_count": "15026", "aggregate_date": "2015-05-01", "id": "1373", "index_value": "5339", "rating": "2.8"}, {"ip_addresses": "5436", "label": "Otecel S.A.", "test_count": "7802", "aggregate_date": "2015-05-01", "id": "11143", "index_value": "4963", "rating": "3.2"}, {"ip_addresses": "72619", "label": "Ecuadortelecom S.A.", "test_count": "13054", "aggregate_date": "2015-05-01", "id": "30153", "index_value": "3902", "rating": "2.9"}, {"ip_addresses": "31770", "label": "ETAPA EP", "test_count": "4847", "aggregate_date": "2015-05-01", "id": "70348", "index_value": "3826", "rating": "2.4"}, {"ip_addresses": "188359", "label": "CORPORACION NACIONAL DE TELECOMUNICACIONES - CNT E", "test_count": "56660", "aggregate_date": "2015-05-01", "id": "51934", "index_value": "3250", "rating": "2.5"}, {"ip_addresses": "5565", "label": "CONECEL", "test_count": "13550", "aggregate_date": "2015-05-01", "id": "3821", "index_value": "3122", "rating": "2.7"}, {"ip_addresses": "8481", "label": "PUNTONET S.A.", "test_count": "9693", "aggregate_date": "2015-05-01", "id": "3414", "index_value": "3075", "rating": "2.7"}, {"ip_addresses": "54246", "label": "EasyNet S.A.", "test_count": "14412", "aggregate_date": "2015-05-01", "id": "13339", "index_value": "2754", "rating": "2.9"}, {"ip_addresses": "847", "label": "Nacional De Telecomunicaciones - Cnt Ep", "test_count": "6642", "aggregate_date": "2015-05-01", "id": "97534", "index_value": "2059", "rating": "3.0"}], "bandwidth": "5256"}, {"countryCode": "NO", "test_count": "244022", "ip_addresses": "2063898", "country": "Norway", "isps": [{"ip_addresses": "19361", "label": "UNINETT, The Norwegian University & Research Netwo", "test_count": "1835", "aggregate_date": "2015-05-01", "id": "86505", "index_value": "148732", "rating": "4.3"}, {"ip_addresses": "8744", "label": "UNINETT AS", "test_count": "546", "aggregate_date": "2015-05-01", "id": "35122", "index_value": "97729", "rating": "4.2"}, {"ip_addresses": "8673", "label": "PowerTech Information Systems AS", "test_count": "1181", "aggregate_date": "2015-05-01", "id": "14876", "index_value": "69700", "rating": "3.6"}, {"ip_addresses": "8528", "label": "Lynet Internett AS", "test_count": "1157", "aggregate_date": "2015-05-01", "id": "76273", "index_value": "69087", "rating": "4.1"}, {"ip_addresses": "192859", "label": "Get", "test_count": "29913", "aggregate_date": "2015-05-01", "id": "26097", "index_value": "55886", "rating": "3.1"}, {"ip_addresses": "115821", "label": "Altibox AS", "test_count": "27617", "aggregate_date": "2015-05-01", "id": "39625", "index_value": "48429", "rating": "4.1"}, {"ip_addresses": "7723", "label": "BKK Fiber", "test_count": "2860", "aggregate_date": "2015-05-01", "id": "86397", "index_value": "48282", "rating": "3.7"}, {"ip_addresses": "16201", "label": "NTE Bredband AS", "test_count": "5063", "aggregate_date": "2015-05-01", "id": "37028", "index_value": "44261", "rating": "3.7"}, {"ip_addresses": "8988", "label": "Loqal", "test_count": "3100", "aggregate_date": "2015-05-01", "id": "94554", "index_value": "42301", "rating": "3.0"}, {"ip_addresses": "270018", "label": "Canal Digital", "test_count": "28144", "aggregate_date": "2015-05-01", "id": "2184", "index_value": "40755", "rating": "3.2"}, {"ip_addresses": "9769", "label": "TDC AS", "test_count": "1536", "aggregate_date": "2015-05-01", "id": "42854", "index_value": "37224", "rating": "3.1"}, {"ip_addresses": "34239", "label": "Eidsiva Bredband AS", "test_count": "4573", "aggregate_date": "2015-05-01", "id": "6170", "index_value": "32035", "rating": "2.8"}, {"ip_addresses": "9384", "label": "Eltele AS", "test_count": "1278", "aggregate_date": "2015-05-01", "id": "58551", "index_value": "30244", "rating": "3.5"}, {"ip_addresses": "11401", "label": "Enivest AS", "test_count": "2340", "aggregate_date": "2015-05-01", "id": "4789", "index_value": "29102", "rating": "3.1"}, {"ip_addresses": "71910", "label": "Broadnet AS", "test_count": "15192", "aggregate_date": "2015-05-01", "id": "82295", "index_value": "25490", "rating": "2.7"}, {"ip_addresses": "61147", "label": "TeliaSonera Norge AS", "test_count": "3038", "aggregate_date": "2015-05-01", "id": "90078", "index_value": "18192", "rating": "3.3"}, {"ip_addresses": "533470", "label": "Telenor Norge AS", "test_count": "45247", "aggregate_date": "2015-05-01", "id": "53978", "index_value": "18055", "rating": "2.4"}, {"ip_addresses": "7295", "label": "StayOn AS", "test_count": "2756", "aggregate_date": "2015-05-01", "id": "73448", "index_value": "15403", "rating": "2.3"}, {"ip_addresses": "195071", "label": "NextGenTel AS", "test_count": "16357", "aggregate_date": "2015-05-01", "id": "629", "index_value": "12232", "rating": "2.6"}, {"ip_addresses": "38371", "label": "Net 1 Sweden", "test_count": "58", "aggregate_date": "2015-05-01", "id": "61509", "index_value": "2381", "rating": "2.3"}], "bandwidth": "42566"}, {"countryCode": "Unknown | Tanzania", "test_count": "9314", "ip_addresses": "26767", "country": "Tanzania", "isps": [{"ip_addresses": "441", "label": "MIC Tanzania LTD", "test_count": "165", "aggregate_date": "2015-05-01", "id": "35362", "index_value": "10060", "rating": "3.0"}, {"ip_addresses": "2317", "label": "Smile Communications Tanzania Ltd", "test_count": "253", "aggregate_date": "2015-05-01", "id": "96352", "index_value": "7646", "rating": "4.0"}, {"ip_addresses": "1784", "label": "Cats-Net Limited", "test_count": "1015", "aggregate_date": "2015-05-01", "id": "9850", "index_value": "7276", "rating": "2.7"}, {"ip_addresses": "1188", "label": "ZanZibar Telecom", "test_count": "214", "aggregate_date": "2015-05-01", "id": "86555", "index_value": "5584", "rating": "3.3"}, {"ip_addresses": "1237", "label": "Startel (T) Ltd", "test_count": "652", "aggregate_date": "2015-05-01", "id": "17726", "index_value": "4343", "rating": "4.1"}, {"ip_addresses": "1463", "label": "Simbanet (T) Ltd", "test_count": "764", "aggregate_date": "2015-05-01", "id": "35854", "index_value": "3328", "rating": "3.1"}, {"ip_addresses": "1135", "label": "Vodacom Tanzania", "test_count": "272", "aggregate_date": "2015-05-01", "id": "40008", "index_value": "2103", "rating": "3.1"}], "bandwidth": "5376"}, {"countryCode": "NG", "test_count": "19215", "ip_addresses": "136217", "country": "Nigeria", "isps": [{"ip_addresses": "3320", "label": "MTN Nigeria", "test_count": "503", "aggregate_date": "2015-05-01", "id": "23703", "index_value": "14457", "rating": "3.1"}, {"ip_addresses": "576", "label": "MainOne Cable Company", "test_count": "495", "aggregate_date": "2015-05-01", "id": "67825", "index_value": "9924", "rating": "3.9"}, {"ip_addresses": "13780", "label": "SPECTRANET LIMITED", "test_count": "1574", "aggregate_date": "2015-05-01", "id": "77946", "index_value": "8294", "rating": "3.1"}, {"ip_addresses": "2248", "label": "Spectranet", "test_count": "348", "aggregate_date": "2015-05-01", "id": "62522", "index_value": "6958", "rating": "3.0"}, {"ip_addresses": "35722", "label": "SWIFT NETWORKS LIMITED", "test_count": "1475", "aggregate_date": "2015-05-01", "id": "18505", "index_value": "5330", "rating": "3.0"}, {"ip_addresses": "6078", "label": "IPNX Nigeria Ltd", "test_count": "707", "aggregate_date": "2015-05-01", "id": "32877", "index_value": "4934", "rating": "2.8"}, {"ip_addresses": "1651", "label": "EMTS Limited / Etisalat Nigeria", "test_count": "525", "aggregate_date": "2015-05-01", "id": "38447", "index_value": "4812", "rating": "3.6"}, {"ip_addresses": "1329", "label": "Globacom Ltd", "test_count": "431", "aggregate_date": "2015-05-01", "id": "36617", "index_value": "3528", "rating": "3.0"}, {"ip_addresses": "8845", "label": "Cobranet Limited", "test_count": "1406", "aggregate_date": "2015-05-01", "id": "10355", "index_value": "2527", "rating": "3.3"}, {"ip_addresses": "1572", "label": "Gateway Telecoms Integrated Services Limited", "test_count": "424", "aggregate_date": "2015-05-01", "id": "42317", "index_value": "2409", "rating": "3.3"}], "bandwidth": "5606"}, {"countryCode": "PA", "test_count": "11459", "ip_addresses": "260963", "country": "Panama", "isps": [{"ip_addresses": "162976", "label": "Cable Onda", "test_count": "5257", "aggregate_date": "2015-05-01", "id": "2101", "index_value": "6607", "rating": "2.7"}, {"ip_addresses": "74991", "label": "Cable & Wireless Panama", "test_count": "1393", "aggregate_date": "2015-05-01", "id": "346", "index_value": "4300", "rating": "2.6"}, {"ip_addresses": "18764", "label": "Claro Panam\u00e1 S.A.", "test_count": "78", "aggregate_date": "2015-05-01", "id": "79273", "index_value": "1474", "rating": "2.9"}], "bandwidth": "7179"}, {"countryCode": "DO", "test_count": "71441", "ip_addresses": "301609", "country": "Dominican Republic", "isps": [{"ip_addresses": "216197", "label": "Claro Dominican Republic", "test_count": "56242", "aggregate_date": "2015-05-01", "id": "93892", "index_value": "7885", "rating": "2.8"}, {"ip_addresses": "49049", "label": "ORANGE DOMINICANA", "test_count": "3309", "aggregate_date": "2015-05-01", "id": "68682", "index_value": "3959", "rating": "2.8"}, {"ip_addresses": "36316", "label": "WIND Telecom S.A.", "test_count": "8405", "aggregate_date": "2015-05-01", "id": "61497", "index_value": "3526", "rating": "2.5"}], "bandwidth": "7248"}, {"countryCode": "Unknown | Bolivia", "test_count": "96170", "ip_addresses": "157525", "country": "Bolivia", "isps": [{"ip_addresses": "5917", "label": "Telef\u00f3nica Celular de Bolivia S.A.", "test_count": "8555", "aggregate_date": "2015-05-01", "id": "30191", "index_value": "5508", "rating": "2.4"}, {"ip_addresses": "28181", "label": "Nuevatel PCS de Bolivia S.A.", "test_count": "6513", "aggregate_date": "2015-05-01", "id": "33704", "index_value": "4082", "rating": "2.3"}, {"ip_addresses": "22345", "label": "COTAS", "test_count": "19218", "aggregate_date": "2015-05-01", "id": "93044", "index_value": "3355", "rating": "2.1"}, {"ip_addresses": "18493", "label": "AXS Bolivia S. A.", "test_count": "15635", "aggregate_date": "2015-05-01", "id": "75055", "index_value": "2681", "rating": "2.3"}, {"ip_addresses": "5666", "label": "DiViNetworks LTD.", "test_count": "6527", "aggregate_date": "2015-05-01", "id": "94910", "index_value": "2385", "rating": "2.2"}, {"ip_addresses": "31913", "label": "Entel S.A. - EntelNet", "test_count": "16620", "aggregate_date": "2015-05-01", "id": "2090", "index_value": "2327", "rating": "2.1"}, {"ip_addresses": "44496", "label": "Comteco Ltda", "test_count": "14863", "aggregate_date": "2015-05-01", "id": "1282", "index_value": "1726", "rating": "1.8"}, {"ip_addresses": "4893", "label": "Digital TV Cable de Edmund Daher", "test_count": "1535", "aggregate_date": "2015-05-01", "id": "63535", "index_value": "1557", "rating": "2.7"}, {"ip_addresses": "3584", "label": "Cotel Ltda.", "test_count": "2091", "aggregate_date": "2015-05-01", "id": "32598", "index_value": "1220", "rating": "2.2"}, {"ip_addresses": "2084", "label": "COTES Ltda.", "test_count": "742", "aggregate_date": "2015-05-01", "id": "28067", "index_value": "888", "rating": "1.7"}], "bandwidth": "2824"}, {"countryCode": "EG", "test_count": "150233", "ip_addresses": "1719163", "country": "Egypt", "isps": [{"ip_addresses": "434", "label": "Egyptian Universities Network (EUN)", "test_count": "239", "aggregate_date": "2015-05-01", "id": "80399", "index_value": "21786", "rating": "4.1"}, {"ip_addresses": "1913", "label": "Nile Online (EG)", "test_count": "1091", "aggregate_date": "2015-05-01", "id": "43260", "index_value": "7232", "rating": "3.0"}, {"ip_addresses": "114410", "label": "Vodafone Egypt", "test_count": "6181", "aggregate_date": "2015-05-01", "id": "31126", "index_value": "3751", "rating": "2.9"}, {"ip_addresses": "15803", "label": "The Noor Group", "test_count": "2195", "aggregate_date": "2015-05-01", "id": "5887", "index_value": "3739", "rating": "2.9"}, {"ip_addresses": "1011170", "label": "TE Data", "test_count": "84502", "aggregate_date": "2015-05-01", "id": "1089", "index_value": "2691", "rating": "2.7"}, {"ip_addresses": "228565", "label": "Link Egypt", "test_count": "14579", "aggregate_date": "2015-05-01", "id": "653", "index_value": "2664", "rating": "2.9"}, {"ip_addresses": "66972", "label": "Etisalat MISR", "test_count": "2202", "aggregate_date": "2015-05-01", "id": "28424", "index_value": "2560", "rating": "2.7"}, {"ip_addresses": "94358", "label": "Nile Online", "test_count": "4839", "aggregate_date": "2015-05-01", "id": "1304", "index_value": "2314", "rating": "3.1"}, {"ip_addresses": "2138", "label": "YallaOnline", "test_count": "106", "aggregate_date": "2015-05-01", "id": "86429", "index_value": "1947", "rating": "2.6"}, {"ip_addresses": "90012", "label": "Link Egypt (Link.NET)", "test_count": "9838", "aggregate_date": "2015-05-01", "id": "70135", "index_value": "1845", "rating": "2.6"}, {"ip_addresses": "6274", "label": "AFMIC", "test_count": "927", "aggregate_date": "2015-05-01", "id": "49308", "index_value": "982", "rating": "2.9"}], "bandwidth": "2730"}, {"countryCode": "FJ", "test_count": "8977", "ip_addresses": "27642", "country": "Fiji", "isps": [{"ip_addresses": "8732", "label": "Vodafone Fiji Limited", "test_count": "980", "aggregate_date": "2015-05-01", "id": "43769", "index_value": "10683", "rating": "4.1"}, {"ip_addresses": "15567", "label": "Connect Internet Services Limited", "test_count": "5498", "aggregate_date": "2015-05-01", "id": "4223", "index_value": "6030", "rating": "2.2"}], "bandwidth": "6903"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "JM", "test_count": "20365", "ip_addresses": "95801", "country": "Jamaica", "isps": [{"ip_addresses": "13546", "label": "FLOW", "test_count": "5314", "aggregate_date": "2015-05-01", "id": "27725", "index_value": "13326", "rating": "3.1"}, {"ip_addresses": "58707", "label": "Cable and Wireless Jamaica", "test_count": "8669", "aggregate_date": "2015-05-01", "id": "460", "index_value": "5218", "rating": "2.3"}, {"ip_addresses": "29556", "label": "Digicel Jamaica", "test_count": "3455", "aggregate_date": "2015-05-01", "id": "19187", "index_value": "3840", "rating": "2.4"}], "bandwidth": "6505"}, {"countryCode": "TT", "test_count": "29935", "ip_addresses": "255687", "country": "Trinidad and Tobago", "isps": [{"ip_addresses": "617", "label": "Digicel Jamaica", "test_count": "566", "aggregate_date": "2015-05-01", "id": "19187", "index_value": "55584", "rating": "2.4"}, {"ip_addresses": "592", "label": "Digicel Trinidad and Tobago Ltd.", "test_count": "872", "aggregate_date": "2015-05-01", "id": "18122", "index_value": "22307", "rating": "3.8"}, {"ip_addresses": "87865", "label": "Columbus Communications Trinidad Limited.", "test_count": "18106", "aggregate_date": "2015-05-01", "id": "20649", "index_value": "16635", "rating": "3.2"}, {"ip_addresses": "155213", "label": "Telecommunication Services of Trinidad and Tobago", "test_count": "3793", "aggregate_date": "2015-05-01", "id": "1057", "index_value": "4959", "rating": "2.2"}, {"ip_addresses": "10798", "label": "GREENDOT", "test_count": "2141", "aggregate_date": "2015-05-01", "id": "6967", "index_value": "2823", "rating": "2.1"}], "bandwidth": "14489"}, {"countryCode": "BB", "test_count": "8572", "ip_addresses": "70873", "country": "Barbados", "isps": [{"ip_addresses": "4606", "label": "Tele (Barbados)", "test_count": "1795", "aggregate_date": "2015-05-01", "id": "94291", "index_value": "33443", "rating": "3.4"}, {"ip_addresses": "63322", "label": "Lime Cable & Wireless Barbados", "test_count": "6353", "aggregate_date": "2015-05-01", "id": "528", "index_value": "8708", "rating": "1.7"}], "bandwidth": "13996"}, {"countryCode": "SV", "test_count": "36405", "ip_addresses": "266649", "country": "El Salvador", "isps": [{"ip_addresses": "929", "label": "TIGO El Salvador", "test_count": "397", "aggregate_date": "2015-05-01", "id": "101218", "index_value": "19584", "rating": "2.3"}, {"ip_addresses": "4150", "label": "SVNet", "test_count": "807", "aggregate_date": "2015-05-01", "id": "539", "index_value": "13297", "rating": "3.0"}, {"ip_addresses": "1356", "label": "Navega.com El Salvador", "test_count": "1375", "aggregate_date": "2015-05-01", "id": "34862", "index_value": "8214", "rating": "3.3"}, {"ip_addresses": "4139", "label": "Telefonica Moviles El Salvador S.A. de C.V.", "test_count": "703", "aggregate_date": "2015-05-01", "id": "291", "index_value": "5845", "rating": "2.5"}, {"ip_addresses": "17681", "label": "Telgua", "test_count": "4452", "aggregate_date": "2015-05-01", "id": "925", "index_value": "4514", "rating": "2.7"}, {"ip_addresses": "29508", "label": "MILLICOM CABLE EL SALVADOR S.A. DE C.V.", "test_count": "6701", "aggregate_date": "2015-05-01", "id": "44808", "index_value": "3300", "rating": "2.9"}, {"ip_addresses": "13723", "label": "AMNET TEL y cia S en C de C.V.", "test_count": "2764", "aggregate_date": "2015-05-01", "id": "75024", "index_value": "3291", "rating": "2.9"}, {"ip_addresses": "169329", "label": "CTE", "test_count": "16761", "aggregate_date": "2015-05-01", "id": "938", "index_value": "2674", "rating": "3.0"}], "bandwidth": "3903"}, {"countryCode": "BG", "test_count": "202631", "ip_addresses": "1527079", "country": "Bulgaria", "isps": [{"ip_addresses": "13198", "label": "ESCOM Ltd. - Haskovo", "test_count": "1178", "aggregate_date": "2015-05-01", "id": "29068", "index_value": "49542", "rating": "4.3"}, {"ip_addresses": "28350", "label": "Net1", "test_count": "6720", "aggregate_date": "2015-05-01", "id": "34834", "index_value": "48676", "rating": "3.9"}, {"ip_addresses": "183301", "label": "Blizoo Media and Broadband", "test_count": "25151", "aggregate_date": "2015-05-01", "id": "70990", "index_value": "47254", "rating": "3.2"}, {"ip_addresses": "36106", "label": "ITD Network SA", "test_count": "3586", "aggregate_date": "2015-05-01", "id": "3505", "index_value": "45033", "rating": "4.0"}, {"ip_addresses": "12747", "label": "Angelsoft ET", "test_count": "1458", "aggregate_date": "2015-05-01", "id": "3173", "index_value": "43989", "rating": "4.3"}, {"ip_addresses": "42535", "label": "Global Communication Net Plc", "test_count": "2455", "aggregate_date": "2015-05-01", "id": "44514", "index_value": "43181", "rating": "3.5"}, {"ip_addresses": "77494", "label": "Mobiltel Ead", "test_count": "16705", "aggregate_date": "2015-05-01", "id": "83843", "index_value": "41454", "rating": "4.0"}, {"ip_addresses": "23963", "label": "Comnet Bulgaria Holding Ltd.", "test_count": "3354", "aggregate_date": "2015-05-01", "id": "35938", "index_value": "40549", "rating": "4.3"}, {"ip_addresses": "11732", "label": "Telehouse EAD", "test_count": "3807", "aggregate_date": "2015-05-01", "id": "70535", "index_value": "39315", "rating": "3.9"}, {"ip_addresses": "13541", "label": "Telnet Limited", "test_count": "1658", "aggregate_date": "2015-05-01", "id": "11377", "index_value": "38421", "rating": "4.1"}, {"ip_addresses": "9984", "label": "Speedy net EAD", "test_count": "835", "aggregate_date": "2015-05-01", "id": "30173", "index_value": "37914", "rating": "3.6"}, {"ip_addresses": "14590", "label": "Optisprint", "test_count": "1812", "aggregate_date": "2015-05-01", "id": "27539", "index_value": "35777", "rating": "4.4"}, {"ip_addresses": "9810", "label": "Lirex net EOOD", "test_count": "880", "aggregate_date": "2015-05-01", "id": "40250", "index_value": "35543", "rating": "3.9"}, {"ip_addresses": "90107", "label": "Neterra", "test_count": "10838", "aggregate_date": "2015-05-01", "id": "1320", "index_value": "35323", "rating": "3.8"}, {"ip_addresses": "93460", "label": "Bulsatcom AD", "test_count": "20126", "aggregate_date": "2015-05-01", "id": "47797", "index_value": "33912", "rating": "3.7"}, {"ip_addresses": "14470", "label": "Vida optics TVV Ltd.", "test_count": "2327", "aggregate_date": "2015-05-01", "id": "84992", "index_value": "33311", "rating": "3.7"}, {"ip_addresses": "18006", "label": "Networx-Bulgaria Ltd.", "test_count": "4875", "aggregate_date": "2015-05-01", "id": "685", "index_value": "31617", "rating": "4.1"}, {"ip_addresses": "15830", "label": "NOVATEL EOOD", "test_count": "1940", "aggregate_date": "2015-05-01", "id": "48478", "index_value": "29635", "rating": "3.8"}, {"ip_addresses": "241552", "label": "Vivacom", "test_count": "34899", "aggregate_date": "2015-05-01", "id": "86133", "index_value": "22760", "rating": "2.9"}, {"ip_addresses": "17634", "label": "Max Telecom Ltd.", "test_count": "1284", "aggregate_date": "2015-05-01", "id": "19210", "index_value": "11451", "rating": "3.1"}], "bandwidth": "37585"}, {"countryCode": "BE", "test_count": "265096", "ip_addresses": "3076612", "country": "Belgium", "isps": [{"ip_addresses": "633258", "label": "Telenet N.V.", "test_count": "69849", "aggregate_date": "2015-05-01", "id": "51881", "index_value": "77406", "rating": "3.5"}, {"ip_addresses": "3987", "label": "UPC", "test_count": "725", "aggregate_date": "2015-05-01", "id": "15012", "index_value": "75569", "rating": "3.0"}, {"ip_addresses": "31292", "label": "Coditel", "test_count": "7766", "aggregate_date": "2015-05-01", "id": "16749", "index_value": "52150", "rating": "3.1"}, {"ip_addresses": "104347", "label": "Brutele", "test_count": "1849", "aggregate_date": "2015-05-01", "id": "4064", "index_value": "42744", "rating": "3.3"}, {"ip_addresses": "152124", "label": "VOO", "test_count": "38921", "aggregate_date": "2015-05-01", "id": "86144", "index_value": "42259", "rating": "3.4"}, {"ip_addresses": "6074", "label": "BASE Company NV/SA", "test_count": "180", "aggregate_date": "2015-05-01", "id": "95722", "index_value": "24127", "rating": "4.0"}, {"ip_addresses": "1328382", "label": "Belgacom Skynet", "test_count": "82899", "aggregate_date": "2015-05-01", "id": "43704", "index_value": "23570", "rating": "2.7"}, {"ip_addresses": "4483", "label": "Destiny N.V", "test_count": "580", "aggregate_date": "2015-05-01", "id": "33720", "index_value": "21865", "rating": "3.0"}, {"ip_addresses": "150095", "label": "Scarlet", "test_count": "12860", "aggregate_date": "2015-05-01", "id": "1399", "index_value": "19766", "rating": "2.9"}, {"ip_addresses": "31419", "label": "EDPNET", "test_count": "1485", "aggregate_date": "2015-05-01", "id": "2167", "index_value": "19253", "rating": "3.5"}, {"ip_addresses": "4419", "label": "e-leven sa", "test_count": "448", "aggregate_date": "2015-05-01", "id": "12766", "index_value": "18697", "rating": "3.0"}, {"ip_addresses": "72662", "label": "Mobistar SA", "test_count": "3204", "aggregate_date": "2015-05-01", "id": "67628", "index_value": "14147", "rating": "2.1"}, {"ip_addresses": "4507", "label": "Alpha Networks S.P.R.L.", "test_count": "264", "aggregate_date": "2015-05-01", "id": "35005", "index_value": "7368", "rating": "2.6"}], "bandwidth": "41340"}, {"countryCode": "BS", "test_count": "6241", "ip_addresses": "37935", "country": "Bahamas", "isps": [{"ip_addresses": "26814", "label": "Cable Bahamas", "test_count": "3664", "aggregate_date": "2015-05-01", "id": "1638", "index_value": "19454", "rating": "2.6"}, {"ip_addresses": "15810", "label": "The Bahamas Telecommunications Company", "test_count": "2572", "aggregate_date": "2015-05-01", "id": "19556", "index_value": "8814", "rating": "2.2"}], "bandwidth": "15083"}, {"countryCode": "HN", "test_count": "52589", "ip_addresses": "104702", "country": "Honduras", "isps": [{"ip_addresses": "2736", "label": "Redes y Telecomunicaciones", "test_count": "2068", "aggregate_date": "2015-05-01", "id": "26715", "index_value": "11844", "rating": "3.5"}, {"ip_addresses": "973", "label": "Telef\u00f3nica Celular S.A", "test_count": "1168", "aggregate_date": "2015-05-01", "id": "48442", "index_value": "8681", "rating": "2.9"}, {"ip_addresses": "3124", "label": "Navega.com S.A.", "test_count": "1055", "aggregate_date": "2015-05-01", "id": "823", "index_value": "6476", "rating": "3.2"}, {"ip_addresses": "5312", "label": "AMNET US", "test_count": "1204", "aggregate_date": "2015-05-01", "id": "697", "index_value": "5136", "rating": "3.1"}, {"ip_addresses": "2345", "label": "Navega.com Honduras", "test_count": "1865", "aggregate_date": "2015-05-01", "id": "35175", "index_value": "5123", "rating": "3.3"}, {"ip_addresses": "1002", "label": "Multidata", "test_count": "977", "aggregate_date": "2015-05-01", "id": "87775", "index_value": "4887", "rating": "3.3"}, {"ip_addresses": "6494", "label": "SERCOM de Honduras", "test_count": "5664", "aggregate_date": "2015-05-01", "id": "20204", "index_value": "4660", "rating": "3.1"}, {"ip_addresses": "2475", "label": "SULANET SA / INSETEC GROUP", "test_count": "2488", "aggregate_date": "2015-05-01", "id": "14835", "index_value": "4584", "rating": "2.8"}, {"ip_addresses": "790", "label": "AsiNetwork", "test_count": "2593", "aggregate_date": "2015-05-01", "id": "90097", "index_value": "4475", "rating": "4.0"}, {"ip_addresses": "2521", "label": "Amnet Cable San Pedro Sula", "test_count": "1003", "aggregate_date": "2015-05-01", "id": "75035", "index_value": "4131", "rating": "3.3"}, {"ip_addresses": "22301", "label": "Millicom Cable Honduras S.A. de C.V.", "test_count": "7837", "aggregate_date": "2015-05-01", "id": "76562", "index_value": "4014", "rating": "2.8"}, {"ip_addresses": "10018", "label": "CABLECOLOR S.A.", "test_count": "9652", "aggregate_date": "2015-05-01", "id": "22298", "index_value": "3758", "rating": "2.5"}, {"ip_addresses": "6614", "label": "Amnet Cable Tegucigalpa", "test_count": "1352", "aggregate_date": "2015-05-01", "id": "75111", "index_value": "3735", "rating": "2.6"}, {"ip_addresses": "5535", "label": "TEVISAT, S.A.", "test_count": "1379", "aggregate_date": "2015-05-01", "id": "28330", "index_value": "2962", "rating": "3.6"}, {"ip_addresses": "517", "label": "Metropolis", "test_count": "1804", "aggregate_date": "2015-05-01", "id": "94560", "index_value": "2562", "rating": "3.0"}, {"ip_addresses": "16204", "label": "HONDUTEL", "test_count": "3380", "aggregate_date": "2015-05-01", "id": "30155", "index_value": "1999", "rating": "3.0"}], "bandwidth": "4669"}, {"countryCode": "LK", "test_count": "13603", "ip_addresses": "303261", "country": "Sri Lanka", "isps": [{"ip_addresses": "185713", "label": "Sri Lanka Telecom", "test_count": "27324", "aggregate_date": "2015-05-01", "id": "555", "index_value": "6681", "rating": "2.5"}, {"ip_addresses": "29753", "label": "Dialog Axiata PLC.", "test_count": "7", "aggregate_date": "2015-05-01", "id": "72051", "index_value": "2842", "rating": "3.0"}], "bandwidth": "6755"}, {"countryCode": "NI", "test_count": "37028", "ip_addresses": "114834", "country": "Nicaragua", "isps": [{"ip_addresses": "1378", "label": "EQUIPOS Y SISTEMAS S.A.", "test_count": "621", "aggregate_date": "2015-05-01", "id": "33287", "index_value": "23790", "rating": "3.8"}, {"ip_addresses": "761", "label": "Telefonia Celular de Nicaragua SA.", "test_count": "119", "aggregate_date": "2015-05-01", "id": "98760", "index_value": "4860", "rating": "4.1"}, {"ip_addresses": "5295", "label": "IBW Communications", "test_count": "4356", "aggregate_date": "2015-05-01", "id": "6522", "index_value": "4395", "rating": "2.8"}, {"ip_addresses": "40765", "label": "TELEMATIX/ ENITEL", "test_count": "12189", "aggregate_date": "2015-05-01", "id": "558", "index_value": "4319", "rating": "2.7"}, {"ip_addresses": "1027", "label": "Alfanumeric S.A.", "test_count": "968", "aggregate_date": "2015-05-01", "id": "87811", "index_value": "3651", "rating": "3.0"}, {"ip_addresses": "37714", "label": "Telgua", "test_count": "9377", "aggregate_date": "2015-05-01", "id": "925", "index_value": "3445", "rating": "2.7"}, {"ip_addresses": "11970", "label": "Yota De Nicaragua", "test_count": "7207", "aggregate_date": "2015-05-01", "id": "46033", "index_value": "2996", "rating": "3.0"}], "bandwidth": "4863"}, {"countryCode": "AW", "test_count": "4415", "ip_addresses": "23338", "country": "Aruba", "isps": [{"ip_addresses": "21255", "label": "Setarnet", "test_count": "4201", "aggregate_date": "2015-05-01", "id": "4126", "index_value": "21570", "rating": "2.3"}], "bandwidth": "21024"}, {"countryCode": "LT", "test_count": "80618", "ip_addresses": "696219", "country": "Lithuania", "isps": [{"ip_addresses": "26693", "label": "Penki Kontinentai", "test_count": "5955", "aggregate_date": "2015-05-01", "id": "1469", "index_value": "95262", "rating": "4.5"}, {"ip_addresses": "8006", "label": "KAVA", "test_count": "1728", "aggregate_date": "2015-05-01", "id": "66419", "index_value": "64388", "rating": "4.0"}, {"ip_addresses": "34252", "label": "Meganet", "test_count": "3212", "aggregate_date": "2015-05-01", "id": "1102", "index_value": "61169", "rating": "4.0"}, {"ip_addresses": "31619", "label": "UAB Cgates", "test_count": "5790", "aggregate_date": "2015-05-01", "id": "71788", "index_value": "58085", "rating": "4.1"}, {"ip_addresses": "9507", "label": "UAB Kauno interneto sistemos", "test_count": "2202", "aggregate_date": "2015-05-01", "id": "19092", "index_value": "50780", "rating": "3.6"}, {"ip_addresses": "2236", "label": "Splius", "test_count": "3125", "aggregate_date": "2015-05-01", "id": "103377", "index_value": "50045", "rating": "3.7"}, {"ip_addresses": "1605", "label": "Airnet", "test_count": "326", "aggregate_date": "2015-05-01", "id": "47718", "index_value": "49459", "rating": "4.0"}, {"ip_addresses": "37013", "label": "TEO LT", "test_count": "30855", "aggregate_date": "2015-05-01", "id": "103742", "index_value": "47457", "rating": "3.3"}, {"ip_addresses": "4087", "label": "Fast Link", "test_count": "541", "aggregate_date": "2015-05-01", "id": "26763", "index_value": "44202", "rating": "4.4"}, {"ip_addresses": "22269", "label": "Nacionalinis Telekomunikaciju Tinklas", "test_count": "1017", "aggregate_date": "2015-05-01", "id": "48449", "index_value": "43744", "rating": "3.7"}, {"ip_addresses": "3473", "label": "Verslo Tiltas", "test_count": "351", "aggregate_date": "2015-05-01", "id": "20989", "index_value": "41161", "rating": "3.7"}, {"ip_addresses": "30622", "label": "Balticum", "test_count": "3023", "aggregate_date": "2015-05-01", "id": "1466", "index_value": "36266", "rating": "3.5"}, {"ip_addresses": "3355", "label": "NNT", "test_count": "153", "aggregate_date": "2015-05-01", "id": "10765", "index_value": "35262", "rating": "3.4"}, {"ip_addresses": "5160", "label": "Baltneta", "test_count": "1092", "aggregate_date": "2015-05-01", "id": "42030", "index_value": "33890", "rating": "3.5"}, {"ip_addresses": "1238", "label": "KLI LT, UAB", "test_count": "1234", "aggregate_date": "2015-05-01", "id": "70217", "index_value": "32390", "rating": "3.1"}, {"ip_addresses": "6568", "label": "UAB Bite Lietuva", "test_count": "2908", "aggregate_date": "2015-05-01", "id": "90132", "index_value": "20461", "rating": "2.6"}, {"ip_addresses": "1697", "label": "Etanetas", "test_count": "556", "aggregate_date": "2015-05-01", "id": "47228", "index_value": "16153", "rating": "4.1"}, {"ip_addresses": "9217", "label": "Omnitel", "test_count": "1630", "aggregate_date": "2015-05-01", "id": "7517", "index_value": "14210", "rating": "2.8"}, {"ip_addresses": "29232", "label": "Tele2", "test_count": "199", "aggregate_date": "2015-05-01", "id": "34200", "index_value": "10168", "rating": "2.9"}, {"ip_addresses": "2875", "label": "TELE2 Telecom Services GmbH", "test_count": "51", "aggregate_date": "2015-05-01", "id": "85011", "index_value": "7275", "rating": "3.0"}], "bandwidth": "48092"}, {"countryCode": "JP", "test_count": "219722", "ip_addresses": "2159098", "country": "Japan", "isps": [{"ip_addresses": "29849", "label": "ctc", "test_count": "2460", "aggregate_date": "2015-05-01", "id": "8391", "index_value": "142937", "rating": "3.8"}, {"ip_addresses": "42355", "label": "Asahi Net", "test_count": "7984", "aggregate_date": "2015-05-01", "id": "8003", "index_value": "120115", "rating": "3.7"}, {"ip_addresses": "22764", "label": "So-net Corporation", "test_count": "11531", "aggregate_date": "2015-05-01", "id": "98637", "index_value": "106824", "rating": "3.1"}, {"ip_addresses": "13054", "label": "TOKAI", "test_count": "2939", "aggregate_date": "2015-05-01", "id": "88640", "index_value": "104552", "rating": "3.2"}, {"ip_addresses": "14418", "label": "BIGLOBE Inc.", "test_count": "6038", "aggregate_date": "2015-05-01", "id": "99561", "index_value": "101017", "rating": "3.2"}, {"ip_addresses": "39895", "label": "Internet Initiative Japan", "test_count": "9563", "aggregate_date": "2015-05-01", "id": "34731", "index_value": "93772", "rating": "3.5"}, {"ip_addresses": "151338", "label": "BIGLOBE", "test_count": "5487", "aggregate_date": "2015-05-01", "id": "16797", "index_value": "93425", "rating": "3.6"}, {"ip_addresses": "5936", "label": "ARTERIA Networks Corporation", "test_count": "3045", "aggregate_date": "2015-05-01", "id": "99556", "index_value": "83273", "rating": "3.4"}, {"ip_addresses": "9826", "label": "Arteria", "test_count": "2614", "aggregate_date": "2015-05-01", "id": "93059", "index_value": "82024", "rating": "4.0"}, {"ip_addresses": "641093", "label": "NTT Open Computer Network", "test_count": "59705", "aggregate_date": "2015-05-01", "id": "2402", "index_value": "77897", "rating": "3.3"}, {"ip_addresses": "295807", "label": "SoftBank", "test_count": "21937", "aggregate_date": "2015-05-01", "id": "1612", "index_value": "74945", "rating": "3.0"}, {"ip_addresses": "69413", "label": "FreeBit", "test_count": "3160", "aggregate_date": "2015-05-01", "id": "7036", "index_value": "58066", "rating": "3.0"}, {"ip_addresses": "21299", "label": "So-net Service", "test_count": "551", "aggregate_date": "2015-05-01", "id": "3436", "index_value": "57814", "rating": "3.4"}, {"ip_addresses": "14537", "label": "Dream Train Internet Inc.", "test_count": "582", "aggregate_date": "2015-05-01", "id": "3706", "index_value": "57563", "rating": "3.5"}, {"ip_addresses": "5688", "label": "iTSCOM", "test_count": "1677", "aggregate_date": "2015-05-01", "id": "90116", "index_value": "55766", "rating": "3.2"}, {"ip_addresses": "8569", "label": "JAPAN CABLENET LIMITED", "test_count": "995", "aggregate_date": "2015-05-01", "id": "9551", "index_value": "52129", "rating": "3.0"}, {"ip_addresses": "13227", "label": "EDION Corporation", "test_count": "738", "aggregate_date": "2015-05-01", "id": "43757", "index_value": "29165", "rating": "2.3"}, {"ip_addresses": "174999", "label": "NTT Communications", "test_count": "755", "aggregate_date": "2015-05-01", "id": "1213", "index_value": "27329", "rating": "3.7"}, {"ip_addresses": "48980", "label": "eMobile", "test_count": "1488", "aggregate_date": "2015-05-01", "id": "76434", "index_value": "16020", "rating": "2.5"}, {"ip_addresses": "29882", "label": "eAccess Ltd.", "test_count": "685", "aggregate_date": "2015-05-01", "id": "10899", "index_value": "6216", "rating": "3.0"}], "bandwidth": "78374"}, {"countryCode": "IS", "test_count": "6755", "ip_addresses": "121130", "country": "Iceland", "isps": [{"ip_addresses": "453", "label": "Rannsokna- og haskolanet Islands hf.", "test_count": "72", "aggregate_date": "2015-05-01", "id": "102936", "index_value": "67184", "rating": "4.3"}, {"ip_addresses": "1211", "label": "Simafelagid ehf", "test_count": "183", "aggregate_date": "2015-05-01", "id": "63905", "index_value": "64027", "rating": "4.2"}, {"ip_addresses": "3989", "label": "Hringdu ehf", "test_count": "778", "aggregate_date": "2015-05-01", "id": "79275", "index_value": "55052", "rating": "3.4"}, {"ip_addresses": "1152", "label": "365 midlar ehf.", "test_count": "260", "aggregate_date": "2015-05-01", "id": "88385", "index_value": "53688", "rating": "4.4"}, {"ip_addresses": "1224", "label": "Hringidan ehf / Vortex Inc", "test_count": "223", "aggregate_date": "2015-05-01", "id": "6906", "index_value": "47546", "rating": "3.9"}, {"ip_addresses": "28376", "label": "Fjarskipti ehf", "test_count": "2560", "aggregate_date": "2015-05-01", "id": "69176", "index_value": "47024", "rating": "2.9"}, {"ip_addresses": "7353", "label": "IP fjarskipti ehf", "test_count": "286", "aggregate_date": "2015-05-01", "id": "2271", "index_value": "45039", "rating": "2.8"}, {"ip_addresses": "38203", "label": "Siminn", "test_count": "1119", "aggregate_date": "2015-05-01", "id": "78646", "index_value": "32584", "rating": "2.8"}, {"ip_addresses": "1400", "label": "Backbone ehf", "test_count": "193", "aggregate_date": "2015-05-01", "id": "51634", "index_value": "20203", "rating": "3.6"}], "bandwidth": "42245"}, {"countryCode": "BY", "test_count": "146237", "ip_addresses": "1048106", "country": "Belarus", "isps": [{"ip_addresses": "18311", "label": "FE ALTERNATIVNAYA ZIFROVAYA SET", "test_count": "13000", "aggregate_date": "2015-05-01", "id": "48878", "index_value": "37798", "rating": "3.3"}, {"ip_addresses": "12722", "label": "ALFA TELECOM s.r.o.", "test_count": "4193", "aggregate_date": "2015-05-01", "id": "33486", "index_value": "35160", "rating": "3.8"}, {"ip_addresses": "3314", "label": "FLYNET.BY", "test_count": "415", "aggregate_date": "2015-05-01", "id": "88642", "index_value": "30434", "rating": "4.7"}, {"ip_addresses": "106618", "label": "Mobile TeleSystems JLLC", "test_count": "16056", "aggregate_date": "2015-05-01", "id": "45160", "index_value": "29584", "rating": "3.3"}, {"ip_addresses": "2037", "label": "PE NETBERRY", "test_count": "416", "aggregate_date": "2015-05-01", "id": "63233", "index_value": "28197", "rating": "3.6"}, {"ip_addresses": "268", "label": "WIKILINK", "test_count": "652", "aggregate_date": "2015-05-01", "id": "103122", "index_value": "27481", "rating": "3.8"}, {"ip_addresses": "14901", "label": "ISP GARANT-CATV-GOMEL", "test_count": "3954", "aggregate_date": "2015-05-01", "id": "99619", "index_value": "13915", "rating": "3.3"}, {"ip_addresses": "12351", "label": "Belinfonet Ltd.", "test_count": "993", "aggregate_date": "2015-05-01", "id": "1506", "index_value": "12438", "rating": "3.8"}, {"ip_addresses": "983", "label": "Elnet", "test_count": "745", "aggregate_date": "2015-05-01", "id": "102743", "index_value": "12431", "rating": "4.6"}, {"ip_addresses": "2279", "label": "Telecom Media Systems JLLC", "test_count": "509", "aggregate_date": "2015-05-01", "id": "47680", "index_value": "12013", "rating": "4.3"}, {"ip_addresses": "355525", "label": "Belpak", "test_count": "87788", "aggregate_date": "2015-05-01", "id": "101584", "index_value": "11464", "rating": "2.8"}, {"ip_addresses": "3220", "label": "IP TelCom", "test_count": "728", "aggregate_date": "2015-05-01", "id": "25745", "index_value": "10514", "rating": "3.2"}, {"ip_addresses": "23228", "label": "FE VELCOM", "test_count": "2936", "aggregate_date": "2015-05-01", "id": "31978", "index_value": "9707", "rating": "3.3"}], "bandwidth": "17195"}, {"countryCode": "Unknown | Kazakstan", "test_count": "279706", "ip_addresses": "1322978", "country": "Kazakstan", "isps": [{"ip_addresses": "641", "label": "X-COMMUNICATION Ltd.", "test_count": "580", "aggregate_date": "2015-05-01", "id": "73512", "index_value": "45450", "rating": "3.9"}, {"ip_addresses": "1550", "label": "Modern telecommunication systems LLP", "test_count": "903", "aggregate_date": "2015-05-01", "id": "37790", "index_value": "40026", "rating": "4.2"}, {"ip_addresses": "2890", "label": "LLP Asket", "test_count": "234", "aggregate_date": "2015-05-01", "id": "55852", "index_value": "37402", "rating": "3.8"}, {"ip_addresses": "1792", "label": "Too b-tel", "test_count": "310", "aggregate_date": "2015-05-01", "id": "88797", "index_value": "34288", "rating": "3.8"}, {"ip_addresses": "118823", "label": "2Day Telecom LLP", "test_count": "30253", "aggregate_date": "2015-05-01", "id": "75023", "index_value": "30807", "rating": "3.2"}, {"ip_addresses": "1358", "label": "ALFA TELECOM s.r.o.", "test_count": "828", "aggregate_date": "2015-05-01", "id": "33486", "index_value": "21407", "rating": "3.8"}, {"ip_addresses": "17364", "label": "JSC AlmaTV", "test_count": "1612", "aggregate_date": "2015-05-01", "id": "1753", "index_value": "20497", "rating": "2.8"}, {"ip_addresses": "6468", "label": "Eurasia Star Ltd.", "test_count": "1684", "aggregate_date": "2015-05-01", "id": "89013", "index_value": "19295", "rating": "2.6"}, {"ip_addresses": "1067000", "label": "JSC Kazakhtelecom", "test_count": "185100", "aggregate_date": "2015-05-01", "id": "42851", "index_value": "18339", "rating": "2.9"}, {"ip_addresses": "3319", "label": "JSC ALTEL", "test_count": "10846", "aggregate_date": "2015-05-01", "id": "19287", "index_value": "11614", "rating": "3.2"}, {"ip_addresses": "1609", "label": "SMARTNET TOO", "test_count": "870", "aggregate_date": "2015-05-01", "id": "26035", "index_value": "11189", "rating": "3.3"}, {"ip_addresses": "5636", "label": "JSC Kaztranscom", "test_count": "1585", "aggregate_date": "2015-05-01", "id": "1760", "index_value": "9612", "rating": "3.2"}, {"ip_addresses": "1828", "label": "ASTEL JSC", "test_count": "874", "aggregate_date": "2015-05-01", "id": "49047", "index_value": "9506", "rating": "3.4"}, {"ip_addresses": "7097", "label": "JSC Transtelecom", "test_count": "5090", "aggregate_date": "2015-05-01", "id": "2769", "index_value": "9429", "rating": "2.9"}, {"ip_addresses": "31927", "label": "Kcell JSC", "test_count": "328", "aggregate_date": "2015-05-01", "id": "82305", "index_value": "8587", "rating": "3.6"}, {"ip_addresses": "3019", "label": "JSC Sky Silk", "test_count": "297", "aggregate_date": "2015-05-01", "id": "10805", "index_value": "7884", "rating": "3.4"}, {"ip_addresses": "691", "label": "TNS-Plus LLP", "test_count": "1796", "aggregate_date": "2015-05-01", "id": "65606", "index_value": "4986", "rating": "4.0"}, {"ip_addresses": "2695", "label": "Baynur and P Ltd.", "test_count": "118", "aggregate_date": "2015-05-01", "id": "80667", "index_value": "3115", "rating": "3.0"}, {"ip_addresses": "4683", "label": "Tele2 Kazakhstan", "test_count": "3945", "aggregate_date": "2015-05-01", "id": "71762", "index_value": "2860", "rating": "2.7"}, {"ip_addresses": "1624", "label": "Aksoran LLP", "test_count": "657", "aggregate_date": "2015-05-01", "id": "78901", "index_value": "2049", "rating": "3.4"}], "bandwidth": "19027"}, {"countryCode": "Unknown | Republic of Moldova", "test_count": "44440", "ip_addresses": "503009", "country": "Republic of Moldova", "isps": [{"ip_addresses": "959", "label": "SC Nordlinks SRL", "test_count": "671", "aggregate_date": "2015-05-01", "id": "89548", "index_value": "158303", "rating": "4.1"}, {"ip_addresses": "61593", "label": "STARNET S.R.L", "test_count": "7285", "aggregate_date": "2015-05-01", "id": "952", "index_value": "73830", "rating": "4.1"}, {"ip_addresses": "3983", "label": "DANIS SRL", "test_count": "428", "aggregate_date": "2015-05-01", "id": "25185", "index_value": "65375", "rating": "4.0"}, {"ip_addresses": "1935", "label": "Larom TV SRL", "test_count": "337", "aggregate_date": "2015-05-01", "id": "75722", "index_value": "58226", "rating": "3.6"}, {"ip_addresses": "9164", "label": "S.C. GLOBNET S.R.L.", "test_count": "1917", "aggregate_date": "2015-05-01", "id": "36182", "index_value": "53376", "rating": "2.8"}, {"ip_addresses": "8143", "label": "Sc Starnet Srl", "test_count": "1789", "aggregate_date": "2015-05-01", "id": "88997", "index_value": "52825", "rating": "4.2"}, {"ip_addresses": "7770", "label": "Arax-Impex s.r.l.", "test_count": "1016", "aggregate_date": "2015-05-01", "id": "950", "index_value": "51158", "rating": "3.7"}, {"ip_addresses": "15112", "label": "JV Sun Communications S.R.L.", "test_count": "2083", "aggregate_date": "2015-05-01", "id": "945", "index_value": "47682", "rating": "2.6"}, {"ip_addresses": "48868", "label": "Moldtelecom SA", "test_count": "7440", "aggregate_date": "2015-05-01", "id": "82299", "index_value": "33226", "rating": "3.3"}, {"ip_addresses": "25978", "label": "JSC Moldtelecom S.A.", "test_count": "2039", "aggregate_date": "2015-05-01", "id": "70620", "index_value": "27465", "rating": "3.1"}, {"ip_addresses": "38821", "label": "Societatea pe Actiuni MOLDTELECOM", "test_count": "6208", "aggregate_date": "2015-05-01", "id": "96071", "index_value": "27383", "rating": "3.4"}, {"ip_addresses": "57192", "label": "JSCC Interdnestrcom", "test_count": "6007", "aggregate_date": "2015-05-01", "id": "1981", "index_value": "24780", "rating": "3.0"}, {"ip_addresses": "590", "label": "Monitoring S.R.L.", "test_count": "311", "aggregate_date": "2015-05-01", "id": "84304", "index_value": "23292", "rating": "3.8"}, {"ip_addresses": "757", "label": "Linkservice LLC", "test_count": "101", "aggregate_date": "2015-05-01", "id": "97743", "index_value": "21485", "rating": "4.1"}, {"ip_addresses": "39996", "label": "ORANGE MOLDOVA S.A.", "test_count": "1463", "aggregate_date": "2015-05-01", "id": "32386", "index_value": "19766", "rating": "3.0"}, {"ip_addresses": "3461", "label": "Linkservice, Ltd", "test_count": "674", "aggregate_date": "2015-05-01", "id": "97034", "index_value": "19415", "rating": "4.2"}, {"ip_addresses": "12483", "label": "Moldcell S.A.", "test_count": "280", "aggregate_date": "2015-05-01", "id": "25601", "index_value": "8192", "rating": "3.0"}], "bandwidth": "42088"}, {"countryCode": "CZ", "test_count": "303657", "ip_addresses": "1225880", "country": "Czech Republic", "isps": [{"ip_addresses": "9415", "label": "CentroNet a.s.", "test_count": "2086", "aggregate_date": "2015-05-01", "id": "37099", "index_value": "137399", "rating": "4.3"}, {"ip_addresses": "11182", "label": "CESNET z.s.p.o.", "test_count": "1977", "aggregate_date": "2015-05-01", "id": "87761", "index_value": "132945", "rating": "4.6"}, {"ip_addresses": "281711", "label": "UPC Ceska Republica, s.r.o.", "test_count": "61582", "aggregate_date": "2015-05-01", "id": "73481", "index_value": "45252", "rating": "3.7"}, {"ip_addresses": "6027", "label": "Casablanca INT", "test_count": "2752", "aggregate_date": "2015-05-01", "id": "3908", "index_value": "43304", "rating": "3.5"}, {"ip_addresses": "4718", "label": "Ipex Ltd.", "test_count": "2282", "aggregate_date": "2015-05-01", "id": "90131", "index_value": "27755", "rating": "3.9"}, {"ip_addresses": "28827", "label": "RIO Media a.s.", "test_count": "4025", "aggregate_date": "2015-05-01", "id": "40253", "index_value": "25777", "rating": "3.7"}, {"ip_addresses": "11360", "label": "Dial Telecom, a.s.", "test_count": "7141", "aggregate_date": "2015-05-01", "id": "8639", "index_value": "23652", "rating": "3.8"}, {"ip_addresses": "4628", "label": "Softex NCP, s.r.o.", "test_count": "224", "aggregate_date": "2015-05-01", "id": "34720", "index_value": "22939", "rating": "3.2"}, {"ip_addresses": "4431", "label": "M-SOFT, spol. s r.o.", "test_count": "3028", "aggregate_date": "2015-05-01", "id": "67617", "index_value": "22487", "rating": "3.4"}, {"ip_addresses": "5742", "label": "ha-vel internet s.r.o.", "test_count": "3719", "aggregate_date": "2015-05-01", "id": "3256", "index_value": "21225", "rating": "3.8"}, {"ip_addresses": "8279", "label": "CD-Telematika a.s.", "test_count": "6370", "aggregate_date": "2015-05-01", "id": "2016", "index_value": "21194", "rating": "3.5"}, {"ip_addresses": "6474", "label": "Internethome, s.r.o.", "test_count": "3066", "aggregate_date": "2015-05-01", "id": "76472", "index_value": "20647", "rating": "3.6"}, {"ip_addresses": "5332", "label": "COPROSYS a.s.", "test_count": "2304", "aggregate_date": "2015-05-01", "id": "15359", "index_value": "20508", "rating": "3.7"}, {"ip_addresses": "8287", "label": "Vodafone Czech Republic a.s.", "test_count": "3670", "aggregate_date": "2015-05-01", "id": "45297", "index_value": "19760", "rating": "3.3"}, {"ip_addresses": "66087", "label": "T-Mobile Czech Republic a.s.", "test_count": "14776", "aggregate_date": "2015-05-01", "id": "42877", "index_value": "19518", "rating": "2.9"}, {"ip_addresses": "5748", "label": "NWT a.s", "test_count": "2625", "aggregate_date": "2015-05-01", "id": "69184", "index_value": "15468", "rating": "4.2"}, {"ip_addresses": "5187", "label": "METRONET s.r.o.", "test_count": "1291", "aggregate_date": "2015-05-01", "id": "51924", "index_value": "14584", "rating": "3.7"}, {"ip_addresses": "11957", "label": "COMA s.r.o.", "test_count": "1264", "aggregate_date": "2015-05-01", "id": "9985", "index_value": "13176", "rating": "3.4"}, {"ip_addresses": "93326", "label": "Eurotel", "test_count": "1792", "aggregate_date": "2015-05-01", "id": "1151", "index_value": "12280", "rating": "2.8"}, {"ip_addresses": "8872", "label": "MobilKom, a.s.", "test_count": "109", "aggregate_date": "2015-05-01", "id": "67695", "index_value": "2789", "rating": "2.8"}], "bandwidth": "29336"}, {"countryCode": "CY", "test_count": "47545", "ip_addresses": "305975", "country": "Cyprus", "isps": [{"ip_addresses": "1268", "label": "Cynet", "test_count": "189", "aggregate_date": "2015-05-01", "id": "3433", "index_value": "50465", "rating": "3.4"}, {"ip_addresses": "22754", "label": "Wavespeed Ltd.", "test_count": "11595", "aggregate_date": "2015-05-01", "id": "90127", "index_value": "16959", "rating": "3.3"}, {"ip_addresses": "2858", "label": "Multimax Iletisim Limited", "test_count": "199", "aggregate_date": "2015-05-01", "id": "67787", "index_value": "11504", "rating": "3.0"}, {"ip_addresses": "1864", "label": "LOGOSNET Services Limited", "test_count": "130", "aggregate_date": "2015-05-01", "id": "21704", "index_value": "9736", "rating": "3.2"}, {"ip_addresses": "3853", "label": "MTN Cyprus Limited", "test_count": "3115", "aggregate_date": "2015-05-01", "id": "61546", "index_value": "9146", "rating": "2.9"}, {"ip_addresses": "1515", "label": "D.y. Worldnet Ltd", "test_count": "1070", "aggregate_date": "2015-05-01", "id": "92540", "index_value": "8731", "rating": "3.0"}, {"ip_addresses": "6600", "label": "DSP NetWay LTD", "test_count": "1214", "aggregate_date": "2015-05-01", "id": "1521", "index_value": "8037", "rating": "3.2"}, {"ip_addresses": "24026", "label": "Primetel PLC", "test_count": "3459", "aggregate_date": "2015-05-01", "id": "42010", "index_value": "7172", "rating": "2.8"}, {"ip_addresses": "104377", "label": "Cyprus Telecommuncations Authority", "test_count": "11626", "aggregate_date": "2015-05-01", "id": "93040", "index_value": "5784", "rating": "2.5"}, {"ip_addresses": "402", "label": "Cypking Network And Communication Ltd", "test_count": "4", "aggregate_date": "2015-05-01", "id": "103387", "index_value": "3326", "rating": "2.4"}], "bandwidth": "9890"}, {"countryCode": "GE", "test_count": "81460", "ip_addresses": "287098", "country": "Georgia", "isps": [{"ip_addresses": "6238", "label": "Delta-net Ltd", "test_count": "6181", "aggregate_date": "2015-05-01", "id": "79254", "index_value": "22728", "rating": "4.0"}, {"ip_addresses": "102882", "label": "Caucasus Online Ltd.", "test_count": "19769", "aggregate_date": "2015-05-01", "id": "54457", "index_value": "20781", "rating": "3.9"}, {"ip_addresses": "7928", "label": "LTD CGC Co", "test_count": "591", "aggregate_date": "2015-05-01", "id": "37113", "index_value": "20743", "rating": "4.1"}, {"ip_addresses": "924", "label": "Caucasus digital network", "test_count": "319", "aggregate_date": "2015-05-01", "id": "7411", "index_value": "15110", "rating": "3.6"}, {"ip_addresses": "94447", "label": "JSC Silknet", "test_count": "25155", "aggregate_date": "2015-05-01", "id": "61507", "index_value": "12485", "rating": "3.3"}, {"ip_addresses": "9517", "label": "System Net Ltd", "test_count": "5925", "aggregate_date": "2015-05-01", "id": "98019", "index_value": "11140", "rating": "3.6"}, {"ip_addresses": "1373", "label": "Railway Telecom, Ltd", "test_count": "559", "aggregate_date": "2015-05-01", "id": "10237", "index_value": "9182", "rating": "3.9"}, {"ip_addresses": "938", "label": "Magticom Ltd.", "test_count": "1796", "aggregate_date": "2015-05-01", "id": "17610", "index_value": "6523", "rating": "3.3"}], "bandwidth": "14903"}, {"countryCode": "KW", "test_count": "61814", "ip_addresses": "442416", "country": "Kuwait", "isps": [{"ip_addresses": "250072", "label": "Mobile Telecommunications Company", "test_count": "12324", "aggregate_date": "2015-05-01", "id": "25883", "index_value": "13032", "rating": "2.7"}, {"ip_addresses": "53572", "label": "Viva Kuwait", "test_count": "14262", "aggregate_date": "2015-05-01", "id": "97012", "index_value": "11941", "rating": "2.6"}, {"ip_addresses": "41384", "label": "Fast Telecommunications Company W.L.L.", "test_count": "6272", "aggregate_date": "2015-05-01", "id": "38748", "index_value": "9557", "rating": "3.1"}, {"ip_addresses": "70831", "label": "QualityNet", "test_count": "8876", "aggregate_date": "2015-05-01", "id": "1616", "index_value": "8784", "rating": "2.8"}, {"ip_addresses": "28558", "label": "Zajil International Telecom Company W.L.L.", "test_count": "5486", "aggregate_date": "2015-05-01", "id": "67611", "index_value": "8760", "rating": "2.9"}, {"ip_addresses": "6412", "label": "Wireless Mobile Data Company", "test_count": "2698", "aggregate_date": "2015-05-01", "id": "45652", "index_value": "8574", "rating": "3.8"}, {"ip_addresses": "33968", "label": "Gulfnet Kuwait", "test_count": "4122", "aggregate_date": "2015-05-01", "id": "11219", "index_value": "6215", "rating": "3.0"}, {"ip_addresses": "28604", "label": "Mada Communications", "test_count": "2642", "aggregate_date": "2015-05-01", "id": "29469", "index_value": "3871", "rating": "2.7"}], "bandwidth": "10302"}, {"countryCode": "Unknown | Palestinian Territory", "test_count": "30649", "ip_addresses": "336466", "country": "Palestinian Territory", "isps": [{"ip_addresses": "97515", "label": "PalTel", "test_count": "2707", "aggregate_date": "2015-05-01", "id": "843", "index_value": "9551", "rating": "2.9"}, {"ip_addresses": "2811", "label": "City Net Informatics, Internet and Communication T", "test_count": "455", "aggregate_date": "2015-05-01", "id": "65462", "index_value": "6576", "rating": "3.7"}, {"ip_addresses": "4855", "label": "ZONE Technologies Ltd", "test_count": "373", "aggregate_date": "2015-05-01", "id": "67726", "index_value": "6527", "rating": "3.8"}, {"ip_addresses": "4275", "label": "Coolnet New Communication Provider", "test_count": "797", "aggregate_date": "2015-05-01", "id": "37778", "index_value": "6124", "rating": "3.1"}, {"ip_addresses": "2380", "label": "SpeedClick for Information Technology and Communic", "test_count": "809", "aggregate_date": "2015-05-01", "id": "73230", "index_value": "5816", "rating": "3.7"}, {"ip_addresses": "4599", "label": "Netstream Technology Joint-Stock Private Ltd.", "test_count": "724", "aggregate_date": "2015-05-01", "id": "66745", "index_value": "5112", "rating": "3.2"}, {"ip_addresses": "4060", "label": "fusion services", "test_count": "353", "aggregate_date": "2015-05-01", "id": "15717", "index_value": "4927", "rating": "3.7"}, {"ip_addresses": "5824", "label": "Gemzo information technology Private Joint-Stock c", "test_count": "239", "aggregate_date": "2015-05-01", "id": "85022", "index_value": "4796", "rating": "2.5"}, {"ip_addresses": "1844", "label": "BCI Telecommunication & Advanced Technology Compan", "test_count": "376", "aggregate_date": "2015-05-01", "id": "30020", "index_value": "4440", "rating": "4.0"}, {"ip_addresses": "14114", "label": "Call U Communications Ltd.", "test_count": "1953", "aggregate_date": "2015-05-01", "id": "48406", "index_value": "4311", "rating": "3.4"}, {"ip_addresses": "36357", "label": "Mada ALArab LTD", "test_count": "1944", "aggregate_date": "2015-05-01", "id": "48828", "index_value": "4008", "rating": "2.8"}, {"ip_addresses": "91005", "label": "Hadara", "test_count": "6621", "aggregate_date": "2015-05-01", "id": "76425", "index_value": "3880", "rating": "2.7"}, {"ip_addresses": "1250", "label": "Ultranet for Communication and Information Technol", "test_count": "241", "aggregate_date": "2015-05-01", "id": "81542", "index_value": "3867", "rating": "3.6"}, {"ip_addresses": "2509", "label": "Jinan modern Techniques and communication Ltd.", "test_count": "912", "aggregate_date": "2015-05-01", "id": "71571", "index_value": "3677", "rating": "3.4"}, {"ip_addresses": "4373", "label": "Gemzo Information Technology", "test_count": "95", "aggregate_date": "2015-05-01", "id": "52745", "index_value": "3336", "rating": "2.6"}, {"ip_addresses": "2310", "label": "GlobalCom Telecommunications PLC", "test_count": "227", "aggregate_date": "2015-05-01", "id": "41633", "index_value": "3259", "rating": "3.4"}, {"ip_addresses": "2753", "label": "Mada", "test_count": "192", "aggregate_date": "2015-05-01", "id": "97022", "index_value": "3182", "rating": "2.2"}, {"ip_addresses": "18304", "label": "AL Zaytona Company For Communication Ltd.", "test_count": "424", "aggregate_date": "2015-05-01", "id": "51574", "index_value": "2987", "rating": "3.0"}, {"ip_addresses": "4506", "label": "Orange Palestine Group Co. for Technological Inves", "test_count": "579", "aggregate_date": "2015-05-01", "id": "73440", "index_value": "2623", "rating": "3.5"}, {"ip_addresses": "21553", "label": "Super Link Communications Co. Ltd", "test_count": "744", "aggregate_date": "2015-05-01", "id": "67706", "index_value": "2460", "rating": "3.2"}], "bandwidth": "4944"}, {"countryCode": "CN", "test_count": "264463", "ip_addresses": "4011746", "country": "China", "isps": [{"ip_addresses": "199478", "label": "China Telecom SHANGHAI", "test_count": "27299", "aggregate_date": "2015-05-01", "id": "88504", "index_value": "38641", "rating": "3.0"}, {"ip_addresses": "182904", "label": "China Telecom Jiangsu", "test_count": "15150", "aggregate_date": "2015-05-01", "id": "88510", "index_value": "38401", "rating": "3.5"}, {"ip_addresses": "34054", "label": "China Telecom Anhui", "test_count": "3663", "aggregate_date": "2015-05-01", "id": "88558", "index_value": "35886", "rating": "3.8"}, {"ip_addresses": "379323", "label": "China Telecom Guangdong", "test_count": "32360", "aggregate_date": "2015-05-01", "id": "88512", "index_value": "31239", "rating": "3.1"}, {"ip_addresses": "133804", "label": "China Telecom", "test_count": "19227", "aggregate_date": "2015-05-01", "id": "7209", "index_value": "30969", "rating": "3.3"}, {"ip_addresses": "131781", "label": "China Unicom Beijing", "test_count": "11669", "aggregate_date": "2015-05-01", "id": "88514", "index_value": "30764", "rating": "3.0"}, {"ip_addresses": "96842", "label": "China Telecom Zhejiang", "test_count": "12899", "aggregate_date": "2015-05-01", "id": "88539", "index_value": "28647", "rating": "3.5"}, {"ip_addresses": "35221", "label": "China Unicom Henan", "test_count": "4898", "aggregate_date": "2015-05-01", "id": "88552", "index_value": "28372", "rating": "3.7"}, {"ip_addresses": "79047", "label": "China Telecom Sichuan", "test_count": "8961", "aggregate_date": "2015-05-01", "id": "88535", "index_value": "25402", "rating": "3.8"}, {"ip_addresses": "45534", "label": "China Telecom HUBEI", "test_count": "5074", "aggregate_date": "2015-05-01", "id": "88551", "index_value": "25395", "rating": "3.2"}, {"ip_addresses": "114405", "label": "China Mobile", "test_count": "7893", "aggregate_date": "2015-05-01", "id": "88508", "index_value": "25223", "rating": "3.4"}, {"ip_addresses": "43384", "label": "China Telecom FUJIAN", "test_count": "4347", "aggregate_date": "2015-05-01", "id": "88533", "index_value": "24390", "rating": "3.5"}, {"ip_addresses": "28064", "label": "CHINA UNICOM Shanghai network", "test_count": "1918", "aggregate_date": "2015-05-01", "id": "35383", "index_value": "23760", "rating": "3.1"}, {"ip_addresses": "53729", "label": "China Unicom Shandong", "test_count": "7955", "aggregate_date": "2015-05-01", "id": "88507", "index_value": "22062", "rating": "3.6"}, {"ip_addresses": "34466", "label": "China Telecom HUNAN", "test_count": "4949", "aggregate_date": "2015-05-01", "id": "88536", "index_value": "18140", "rating": "3.5"}, {"ip_addresses": "28272", "label": "China Unicom Tianjin", "test_count": "2825", "aggregate_date": "2015-05-01", "id": "88557", "index_value": "17423", "rating": "3.2"}, {"ip_addresses": "27078", "label": "China TieTong", "test_count": "2220", "aggregate_date": "2015-05-01", "id": "88517", "index_value": "16333", "rating": "3.1"}, {"ip_addresses": "41110", "label": "China Unicom Liaoning", "test_count": "3307", "aggregate_date": "2015-05-01", "id": "88531", "index_value": "15246", "rating": "3.0"}, {"ip_addresses": "57880", "label": "China Unicom Guangdong", "test_count": "7125", "aggregate_date": "2015-05-01", "id": "88505", "index_value": "14455", "rating": "3.0"}, {"ip_addresses": "26970", "label": "China Telecom GUANGXI", "test_count": "2162", "aggregate_date": "2015-05-01", "id": "88506", "index_value": "14386", "rating": "3.6"}], "bandwidth": "27238"}, {"countryCode": "Unknown | South Korea", "test_count": "94174", "ip_addresses": "1275100", "country": "South Korea", "isps": [{"ip_addresses": "2044", "label": "Korea Cable Television Suwon Broadcating Co.", "test_count": "196", "aggregate_date": "2015-05-01", "id": "11041", "index_value": "82141", "rating": "2.6"}, {"ip_addresses": "591789", "label": "KT", "test_count": "32270", "aggregate_date": "2015-05-01", "id": "1261", "index_value": "66627", "rating": "3.9"}, {"ip_addresses": "5068", "label": "TBroad", "test_count": "1310", "aggregate_date": "2015-05-01", "id": "38792", "index_value": "64572", "rating": "2.4"}, {"ip_addresses": "12945", "label": "CJ HelloVision", "test_count": "1263", "aggregate_date": "2015-05-01", "id": "48571", "index_value": "63867", "rating": "2.7"}, {"ip_addresses": "89566", "label": "LG U+", "test_count": "12789", "aggregate_date": "2015-05-01", "id": "20094", "index_value": "59932", "rating": "3.5"}, {"ip_addresses": "32737", "label": "SK Broadband", "test_count": "9628", "aggregate_date": "2015-05-01", "id": "102903", "index_value": "57603", "rating": "3.1"}, {"ip_addresses": "20585", "label": "C&M", "test_count": "1838", "aggregate_date": "2015-05-01", "id": "26058", "index_value": "53009", "rating": "2.8"}, {"ip_addresses": "1832", "label": "HAIonNet", "test_count": "116", "aggregate_date": "2015-05-01", "id": "18085", "index_value": "29424", "rating": "3.4"}, {"ip_addresses": "24358", "label": "SK Telecom", "test_count": "495", "aggregate_date": "2015-05-01", "id": "16125", "index_value": "19135", "rating": "3.0"}], "bandwidth": "60427"}, {"countryCode": "Unknown | Vietnam", "test_count": "1006135", "ip_addresses": "3841253", "country": "Vietnam", "isps": [{"ip_addresses": "184997", "label": "FPT Telecom", "test_count": "77083", "aggregate_date": "2015-05-01", "id": "23623", "index_value": "24293", "rating": "2.7"}, {"ip_addresses": "745804", "label": "FPT Telecom Company", "test_count": "183354", "aggregate_date": "2015-05-01", "id": "43723", "index_value": "23196", "rating": "2.9"}, {"ip_addresses": "20173", "label": "Dai IP dong su dung cho ket noi xDSL", "test_count": "7021", "aggregate_date": "2015-05-01", "id": "99527", "index_value": "22148", "rating": "3.0"}, {"ip_addresses": "19458", "label": "CMC Telecom Infrastructure Company", "test_count": "15978", "aggregate_date": "2015-05-01", "id": "46039", "index_value": "21422", "rating": "3.0"}, {"ip_addresses": "1179", "label": "Vietnam Technology and Telecommunication JSC", "test_count": "1430", "aggregate_date": "2015-05-01", "id": "49039", "index_value": "21275", "rating": "2.6"}, {"ip_addresses": "13237", "label": "Vietnam Posts and Telecommunications (VNPT)", "test_count": "1917", "aggregate_date": "2015-05-01", "id": "884", "index_value": "19530", "rating": "3.3"}, {"ip_addresses": "10941", "label": "Cty co phan ha tang vien thong CMC", "test_count": "8736", "aggregate_date": "2015-05-01", "id": "73479", "index_value": "17662", "rating": "2.5"}, {"ip_addresses": "958871", "label": "Viettel Corporation", "test_count": "324214", "aggregate_date": "2015-05-01", "id": "48490", "index_value": "16801", "rating": "3.4"}, {"ip_addresses": "1596113", "label": "VDC", "test_count": "350381", "aggregate_date": "2015-05-01", "id": "77737", "index_value": "16522", "rating": "3.4"}, {"ip_addresses": "1645", "label": "VTC Wireless Broadband Company", "test_count": "376", "aggregate_date": "2015-05-01", "id": "77855", "index_value": "16020", "rating": "2.8"}, {"ip_addresses": "68028", "label": "VNPT", "test_count": "9970", "aggregate_date": "2015-05-01", "id": "880", "index_value": "9979", "rating": "3.1"}, {"ip_addresses": "780", "label": "Vietnam Telecom and Technology Solution Joint Stoc", "test_count": "400", "aggregate_date": "2015-05-01", "id": "100357", "index_value": "4243", "rating": "3.6"}, {"ip_addresses": "94159", "label": "SaiGon Tourist Cable Television", "test_count": "20674", "aggregate_date": "2015-05-01", "id": "38786", "index_value": "4087", "rating": "2.5"}], "bandwidth": "18532"}, {"countryCode": "HK", "test_count": "163597", "ip_addresses": "2063911", "country": "Hong Kong", "isps": [{"ip_addresses": "1980", "label": "PCCW IMS Limited", "test_count": "943", "aggregate_date": "2015-05-01", "id": "102901", "index_value": "204114", "rating": "3.6"}, {"ip_addresses": "271279", "label": "Hong Kong Broadband Network", "test_count": "47351", "aggregate_date": "2015-05-01", "id": "77744", "index_value": "149166", "rating": "3.4"}, {"ip_addresses": "697829", "label": "Netvigator", "test_count": "61351", "aggregate_date": "2015-05-01", "id": "70167", "index_value": "112962", "rating": "2.7"}, {"ip_addresses": "316187", "label": "3HK", "test_count": "14753", "aggregate_date": "2015-05-01", "id": "1124", "index_value": "94671", "rating": "2.8"}, {"ip_addresses": "77706", "label": "i-CABLE", "test_count": "8758", "aggregate_date": "2015-05-01", "id": "1828", "index_value": "47930", "rating": "2.0"}, {"ip_addresses": "78412", "label": "SmarTone", "test_count": "2008", "aggregate_date": "2015-05-01", "id": "2404", "index_value": "10850", "rating": "3.5"}, {"ip_addresses": "1046", "label": "CSL Mobile", "test_count": "2412", "aggregate_date": "2015-05-01", "id": "101231", "index_value": "10523", "rating": "3.5"}, {"ip_addresses": "714", "label": "Pacnet Services (Japan) Corp.", "test_count": "201", "aggregate_date": "2015-05-01", "id": "70195", "index_value": "10485", "rating": "2.9"}, {"ip_addresses": "4368", "label": "China Mobile", "test_count": "416", "aggregate_date": "2015-05-01", "id": "18922", "index_value": "5421", "rating": "3.0"}, {"ip_addresses": "248", "label": "Speednet Communications (HK) Ltd.", "test_count": "338", "aggregate_date": "2015-05-01", "id": "103876", "index_value": "3451", "rating": "3.4"}], "bandwidth": "101660"}, {"countryCode": "OM", "test_count": "44743", "ip_addresses": "257719", "country": "Oman", "isps": [{"ip_addresses": "151408", "label": "Omani Qatari Telecommunications Company SAOC", "test_count": "20472", "aggregate_date": "2015-05-01", "id": "43515", "index_value": "18727", "rating": "2.7"}, {"ip_addresses": "29031", "label": "OmanMobile Telecommunication company LLC", "test_count": "3971", "aggregate_date": "2015-05-01", "id": "63104", "index_value": "9381", "rating": "2.3"}, {"ip_addresses": "97920", "label": "Omantel", "test_count": "12000", "aggregate_date": "2015-05-01", "id": "39254", "index_value": "7688", "rating": "2.3"}], "bandwidth": "14753"}, {"countryCode": "IQ", "test_count": "58823", "ip_addresses": "222027", "country": "Iraq", "isps": [{"ip_addresses": "10294", "label": "Goran Net ISP Ltd.", "test_count": "1941", "aggregate_date": "2015-05-01", "id": "39871", "index_value": "17206", "rating": "3.7"}, {"ip_addresses": "1447", "label": "Al-Jazeera Al-Arabiya Company for Communication an", "test_count": "2396", "aggregate_date": "2015-05-01", "id": "72959", "index_value": "9308", "rating": "3.8"}, {"ip_addresses": "1661", "label": "FastLine For Communication And Information Technol", "test_count": "449", "aggregate_date": "2015-05-01", "id": "98032", "index_value": "7595", "rating": "3.8"}, {"ip_addresses": "5407", "label": "IQ Networks", "test_count": "5757", "aggregate_date": "2015-05-01", "id": "44053", "index_value": "7468", "rating": "3.9"}, {"ip_addresses": "95053", "label": "EarthLink Iraq", "test_count": "6363", "aggregate_date": "2015-05-01", "id": "88515", "index_value": "6365", "rating": "3.2"}, {"ip_addresses": "34140", "label": "Newroz Telecom Ltd.", "test_count": "8631", "aggregate_date": "2015-05-01", "id": "67629", "index_value": "6253", "rating": "3.3"}, {"ip_addresses": "1452", "label": "Hilal Al-Rafidain for Computer and Internet Servic", "test_count": "306", "aggregate_date": "2015-05-01", "id": "76810", "index_value": "6188", "rating": "3.5"}, {"ip_addresses": "674", "label": "Hayat for Internet & communication LLC", "test_count": "487", "aggregate_date": "2015-05-01", "id": "71279", "index_value": "4442", "rating": "3.7"}, {"ip_addresses": "2288", "label": "Al-Sard Company for Trading agencies Ltd.", "test_count": "1888", "aggregate_date": "2015-05-01", "id": "49138", "index_value": "4406", "rating": "3.4"}, {"ip_addresses": "2694", "label": "Tarin General Trading and Setting Up Internet Devi", "test_count": "7255", "aggregate_date": "2015-05-01", "id": "61684", "index_value": "4378", "rating": "3.7"}, {"ip_addresses": "433", "label": "Rey Telecom Company, for Telecommunication, LTD", "test_count": "1041", "aggregate_date": "2015-05-01", "id": "93667", "index_value": "3720", "rating": "3.8"}, {"ip_addresses": "1837", "label": "Midya Telecom for telecommunications-LTD", "test_count": "137", "aggregate_date": "2015-05-01", "id": "82031", "index_value": "1178", "rating": "3.4"}], "bandwidth": "6250"}, {"countryCode": "JO", "test_count": "82807", "ip_addresses": "322417", "country": "Jordan", "isps": [{"ip_addresses": "1263", "label": "AL-HADATHEH LIL-ITISALAT WA AL-TECHNOLOGIA CO.", "test_count": "2070", "aggregate_date": "2015-05-01", "id": "68002", "index_value": "32449", "rating": "3.7"}, {"ip_addresses": "23337", "label": "Linkdotnet-Jordan", "test_count": "17238", "aggregate_date": "2015-05-01", "id": "20599", "index_value": "17088", "rating": "3.0"}, {"ip_addresses": "538", "label": "Jordan Telecommunications Company", "test_count": "1129", "aggregate_date": "2015-05-01", "id": "8627", "index_value": "11325", "rating": "3.0"}, {"ip_addresses": "5809", "label": "Jordan Cable Services", "test_count": "3290", "aggregate_date": "2015-05-01", "id": "70995", "index_value": "8046", "rating": "3.5"}, {"ip_addresses": "677", "label": "VTEL HOLDINGS LIMITED/JORDAN CO.", "test_count": "678", "aggregate_date": "2015-05-01", "id": "44553", "index_value": "7555", "rating": "3.6"}, {"ip_addresses": "193971", "label": "Jordan Data Communications", "test_count": "34863", "aggregate_date": "2015-05-01", "id": "1305", "index_value": "6045", "rating": "2.7"}, {"ip_addresses": "6660", "label": "FIRSTNET (First Data Communication Group)", "test_count": "1112", "aggregate_date": "2015-05-01", "id": "1152", "index_value": "5565", "rating": "2.8"}, {"ip_addresses": "15608", "label": "The Blue Zone East / Jordan", "test_count": "4201", "aggregate_date": "2015-05-01", "id": "40079", "index_value": "4141", "rating": "2.8"}, {"ip_addresses": "18380", "label": "Batelco Jordan", "test_count": "3193", "aggregate_date": "2015-05-01", "id": "16853", "index_value": "3307", "rating": "2.9"}, {"ip_addresses": "22168", "label": "Umniah Mobile Company PLC", "test_count": "7684", "aggregate_date": "2015-05-01", "id": "93038", "index_value": "2280", "rating": "2.5"}, {"ip_addresses": "4249", "label": "Orbit Telecom Technology Co. Ltd", "test_count": "692", "aggregate_date": "2015-05-01", "id": "32492", "index_value": "1859", "rating": "2.7"}, {"ip_addresses": "13344", "label": "Umniah Lil-Hawatef Al-Mutanaqelah Co.", "test_count": "1583", "aggregate_date": "2015-05-01", "id": "49598", "index_value": "1549", "rating": "2.6"}], "bandwidth": "8597"}, {"countryCode": "AE", "test_count": "121196", "ip_addresses": "1193858", "country": "United Arab Emirates", "isps": [{"ip_addresses": "231250", "label": "Emirates Integrated Telecommunications", "test_count": "26238", "aggregate_date": "2015-05-01", "id": "14836", "index_value": "19086", "rating": "2.4"}, {"ip_addresses": "939230", "label": "Emirates Telecommunications", "test_count": "86034", "aggregate_date": "2015-05-01", "id": "927", "index_value": "18531", "rating": "2.6"}], "bandwidth": "18652"}, {"countryCode": "BH", "test_count": "5106", "ip_addresses": "192735", "country": "Bahrain", "isps": [{"ip_addresses": "48659", "label": "Mena Broadband Services WLL", "test_count": "67", "aggregate_date": "2015-05-01", "id": "24514", "index_value": "11040", "rating": "2.9"}, {"ip_addresses": "51488", "label": "Batelco", "test_count": "95", "aggregate_date": "2015-05-01", "id": "935", "index_value": "5790", "rating": "2.4"}, {"ip_addresses": "53353", "label": "MTC Vodafone Bahrain (B.S.C.)", "test_count": "170", "aggregate_date": "2015-05-01", "id": "969", "index_value": "3593", "rating": "2.7"}], "bandwidth": "11405"}, {"countryCode": "SD", "test_count": "7761", "ip_addresses": "55451", "country": "Sudan", "isps": [{"ip_addresses": "17710", "label": "Sudatel", "test_count": "850", "aggregate_date": "2015-05-01", "id": "2461", "index_value": "2457", "rating": "2.6"}, {"ip_addresses": "8304", "label": "Sudan Telecom", "test_count": "104", "aggregate_date": "2015-05-01", "id": "38759", "index_value": "801", "rating": "2.6"}], "bandwidth": "2355"}, {"countryCode": "Unknown | Taiwan", "test_count": "253862", "ip_addresses": "3411731", "country": "Taiwan", "isps": [{"ip_addresses": "948", "label": "Da Da Broadband Ltd.", "test_count": "648", "aggregate_date": "2015-05-01", "id": "102904", "index_value": "55248", "rating": "3.4"}, {"ip_addresses": "228929", "label": "HiNet", "test_count": "162563", "aggregate_date": "2015-05-01", "id": "104044", "index_value": "54067", "rating": "3.3"}, {"ip_addresses": "48284", "label": "TBC", "test_count": "2469", "aggregate_date": "2015-05-01", "id": "9460", "index_value": "52413", "rating": "2.9"}, {"ip_addresses": "57180", "label": "TWM Broadband", "test_count": "5176", "aggregate_date": "2015-05-01", "id": "42519", "index_value": "50607", "rating": "3.3"}, {"ip_addresses": "132911", "label": "Taiwan Mobile", "test_count": "16468", "aggregate_date": "2015-05-01", "id": "17696", "index_value": "49922", "rating": "2.6"}, {"ip_addresses": "22720", "label": "CNS", "test_count": "1776", "aggregate_date": "2015-05-01", "id": "15978", "index_value": "49675", "rating": "3.0"}, {"ip_addresses": "23559", "label": "Asia Pacific", "test_count": "1762", "aggregate_date": "2015-05-01", "id": "6903", "index_value": "46135", "rating": "3.4"}, {"ip_addresses": "2967", "label": "Savecom International Inc.", "test_count": "695", "aggregate_date": "2015-05-01", "id": "15640", "index_value": "41801", "rating": "3.8"}, {"ip_addresses": "129931", "label": "Seednet", "test_count": "15108", "aggregate_date": "2015-05-01", "id": "3558", "index_value": "41583", "rating": "3.0"}, {"ip_addresses": "15163", "label": "VeeTIME", "test_count": "1240", "aggregate_date": "2015-05-01", "id": "9713", "index_value": "36594", "rating": "3.2"}, {"ip_addresses": "1668", "label": "SHYH-SHIN CATV GROUP CO.LTD.", "test_count": "419", "aggregate_date": "2015-05-01", "id": "19667", "index_value": "32555", "rating": "3.3"}, {"ip_addresses": "3139", "label": "Asia Pacific On-Line Service Inc.", "test_count": "1199", "aggregate_date": "2015-05-01", "id": "30084", "index_value": "27255", "rating": "3.2"}, {"ip_addresses": "8407", "label": "So-net Taiwan", "test_count": "1428", "aggregate_date": "2015-05-01", "id": "48568", "index_value": "25131", "rating": "2.7"}, {"ip_addresses": "53522", "label": "Chunghwa Telecom Co., Ltd.", "test_count": "11368", "aggregate_date": "2015-05-01", "id": "101936", "index_value": "20480", "rating": "3.3"}, {"ip_addresses": "117045", "label": "FETnet", "test_count": "5201", "aggregate_date": "2015-05-01", "id": "12707", "index_value": "12476", "rating": "2.4"}, {"ip_addresses": "37659", "label": "Taiwan Star Telecom", "test_count": "3406", "aggregate_date": "2015-05-01", "id": "13119", "index_value": "9212", "rating": "2.5"}, {"ip_addresses": "16733", "label": "Global Mobile", "test_count": "907", "aggregate_date": "2015-05-01", "id": "46024", "index_value": "3194", "rating": "2.7"}], "bandwidth": "49979"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "UZ", "test_count": "16312", "ip_addresses": "73654", "country": "Uzbekistan", "isps": [{"ip_addresses": "1778", "label": "Foreign Enterprise COSCOM Liability Limited Compan", "test_count": "359", "aggregate_date": "2015-05-01", "id": "40527", "index_value": "17745", "rating": "3.3"}, {"ip_addresses": "824", "label": "Unitel LLC", "test_count": "475", "aggregate_date": "2015-05-01", "id": "58131", "index_value": "14389", "rating": "3.2"}, {"ip_addresses": "5437", "label": "Uzbektelecom Joint-Stock Company", "test_count": "63", "aggregate_date": "2015-05-01", "id": "1878", "index_value": "8024", "rating": "3.1"}, {"ip_addresses": "13089", "label": "Sarkor-Telecom", "test_count": "1693", "aggregate_date": "2015-05-01", "id": "989", "index_value": "6698", "rating": "3.3"}, {"ip_addresses": "759", "label": "Turon Telecom", "test_count": "158", "aggregate_date": "2015-05-01", "id": "93071", "index_value": "4932", "rating": "4.0"}, {"ip_addresses": "1532", "label": "Inform-Service TV Ltd.", "test_count": "131", "aggregate_date": "2015-05-01", "id": "69991", "index_value": "4780", "rating": "3.3"}, {"ip_addresses": "490", "label": "NANO TELECOM Ltd", "test_count": "454", "aggregate_date": "2015-05-01", "id": "80193", "index_value": "3166", "rating": "4.7"}, {"ip_addresses": "6381", "label": "Sharq Telekom CJSC", "test_count": "475", "aggregate_date": "2015-05-01", "id": "47692", "index_value": "2624", "rating": "3.3"}, {"ip_addresses": "10957", "label": "TEXNOPROSISTEM", "test_count": "765", "aggregate_date": "2015-05-01", "id": "8308", "index_value": "2156", "rating": "3.6"}, {"ip_addresses": "6825", "label": "Super iMax", "test_count": "1119", "aggregate_date": "2015-05-01", "id": "33687", "index_value": "1972", "rating": "3.5"}, {"ip_addresses": "7344", "label": "Uzbektelekom Joint Stock Company", "test_count": "4934", "aggregate_date": "2015-05-01", "id": "87804", "index_value": "1944", "rating": "2.8"}], "bandwidth": "3825"}, {"countryCode": "MA", "test_count": "120194", "ip_addresses": "1001614", "country": "Morocco", "isps": [{"ip_addresses": "44785", "label": "Meditelecom", "test_count": "325", "aggregate_date": "2015-05-01", "id": "32388", "index_value": "8836", "rating": "2.9"}, {"ip_addresses": "9624", "label": "Wana Corporate", "test_count": "1979", "aggregate_date": "2015-05-01", "id": "29419", "index_value": "7942", "rating": "2.9"}, {"ip_addresses": "642001", "label": "IAM Maroc Telecom", "test_count": "68609", "aggregate_date": "2015-05-01", "id": "35557", "index_value": "5732", "rating": "2.4"}, {"ip_addresses": "6694", "label": "Maroc Telecom 3G", "test_count": "174", "aggregate_date": "2015-05-01", "id": "100674", "index_value": "2643", "rating": "2.7"}], "bandwidth": "5761"}, {"countryCode": "CM", "test_count": "4196", "ip_addresses": "21849", "country": "Cameroon", "isps": [{"ip_addresses": "1399", "label": "CAMTEL", "test_count": "280", "aggregate_date": "2015-05-01", "id": "73576", "index_value": "3756", "rating": "3.1"}, {"ip_addresses": "2317", "label": "Creolink", "test_count": "677", "aggregate_date": "2015-05-01", "id": "90203", "index_value": "2064", "rating": "2.4"}, {"ip_addresses": "1552", "label": "MTN NS Cameroon", "test_count": "441", "aggregate_date": "2015-05-01", "id": "72373", "index_value": "791", "rating": "3.1"}, {"ip_addresses": "906", "label": "MTN Network Solutions (Cameroon)", "test_count": "335", "aggregate_date": "2015-05-01", "id": "98384", "index_value": "585", "rating": "2.5"}], "bandwidth": "2411"}, {"countryCode": "UY", "test_count": "16706", "ip_addresses": "398021", "country": "Uruguay", "isps": [{"ip_addresses": "377902", "label": "Administracion Nacional de Telecomunicaciones", "test_count": "14531", "aggregate_date": "2015-05-01", "id": "1104", "index_value": "20480", "rating": "2.4"}, {"ip_addresses": "13905", "label": "Telefonica Moviles del Uruguay SA", "test_count": "218", "aggregate_date": "2015-05-01", "id": "22330", "index_value": "13236", "rating": "2.6"}, {"ip_addresses": "2735", "label": "AMX Argentina S.A.", "test_count": "167", "aggregate_date": "2015-05-01", "id": "34748", "index_value": "4984", "rating": "2.4"}, {"ip_addresses": "10188", "label": "Tecnowind S.A.", "test_count": "1658", "aggregate_date": "2015-05-01", "id": "3359", "index_value": "4172", "rating": "2.7"}], "bandwidth": "18868"}, {"countryCode": "LB", "test_count": "39871", "ip_addresses": "203208", "country": "Lebanon", "isps": [{"ip_addresses": "16681", "label": "Moscanet SAL", "test_count": "1612", "aggregate_date": "2015-05-01", "id": "16692", "index_value": "12067", "rating": "2.4"}, {"ip_addresses": "156", "label": "Mobile Interim Company 1 S.A.L.", "test_count": "545", "aggregate_date": "2015-05-01", "id": "74591", "index_value": "11658", "rating": "4.3"}, {"ip_addresses": "537", "label": "T.H. Global Vision SARL", "test_count": "1929", "aggregate_date": "2015-05-01", "id": "95380", "index_value": "11543", "rating": "3.3"}, {"ip_addresses": "634", "label": "Broadband Plus", "test_count": "1621", "aggregate_date": "2015-05-01", "id": "102739", "index_value": "3236", "rating": "2.8"}, {"ip_addresses": "17205", "label": "terranet sal", "test_count": "2995", "aggregate_date": "2015-05-01", "id": "5140", "index_value": "3019", "rating": "2.5"}, {"ip_addresses": "964", "label": "MedSystems s.a.l.", "test_count": "276", "aggregate_date": "2015-05-01", "id": "69697", "index_value": "2632", "rating": "2.9"}, {"ip_addresses": "2702", "label": "IncoNet-Data Management s.a.l.", "test_count": "1848", "aggregate_date": "2015-05-01", "id": "10181", "index_value": "2213", "rating": "2.7"}, {"ip_addresses": "99474", "label": "LIBANTELECOM", "test_count": "6910", "aggregate_date": "2015-05-01", "id": "20909", "index_value": "2168", "rating": "2.2"}, {"ip_addresses": "35366", "label": "SODETEL S.A.L.", "test_count": "4729", "aggregate_date": "2015-05-01", "id": "15878", "index_value": "2160", "rating": "2.4"}, {"ip_addresses": "5892", "label": "Transmog Inc S.A.L", "test_count": "2087", "aggregate_date": "2015-05-01", "id": "7070", "index_value": "1931", "rating": "2.5"}, {"ip_addresses": "6128", "label": "Reliance Globalcom Limited", "test_count": "258", "aggregate_date": "2015-05-01", "id": "39378", "index_value": "1732", "rating": "2.5"}, {"ip_addresses": "1945", "label": "IncoNet Data Management sal", "test_count": "2332", "aggregate_date": "2015-05-01", "id": "8161", "index_value": "1583", "rating": "2.3"}, {"ip_addresses": "5686", "label": "Virtual ISP s.a.l.", "test_count": "1788", "aggregate_date": "2015-05-01", "id": "12927", "index_value": "1476", "rating": "2.5"}], "bandwidth": "3519"}, {"countryCode": "AM", "test_count": "75433", "ip_addresses": "376471", "country": "Armenia", "isps": [{"ip_addresses": "2214", "label": "Interactive TV LLC", "test_count": "1078", "aggregate_date": "2015-05-01", "id": "68284", "index_value": "32687", "rating": "3.9"}, {"ip_addresses": "2694", "label": "WEB Ltd", "test_count": "3750", "aggregate_date": "2015-05-01", "id": "89517", "index_value": "22309", "rating": "4.0"}, {"ip_addresses": "24009", "label": "GNC-ALFA CJSC", "test_count": "10409", "aggregate_date": "2015-05-01", "id": "41427", "index_value": "19187", "rating": "4.1"}, {"ip_addresses": "47910", "label": "UCOM LLC", "test_count": "14109", "aggregate_date": "2015-05-01", "id": "63453", "index_value": "18547", "rating": "4.1"}, {"ip_addresses": "6600", "label": "Icon Communications CJSC", "test_count": "761", "aggregate_date": "2015-05-01", "id": "34618", "index_value": "11546", "rating": "3.1"}, {"ip_addresses": "3329", "label": "Armenian Datacom Company", "test_count": "1111", "aggregate_date": "2015-05-01", "id": "27709", "index_value": "11236", "rating": "3.9"}, {"ip_addresses": "1444", "label": "Apaga Technologies CJSC", "test_count": "609", "aggregate_date": "2015-05-01", "id": "37180", "index_value": "10869", "rating": "3.8"}, {"ip_addresses": "31086", "label": "K-Telecom CJSC", "test_count": "2304", "aggregate_date": "2015-05-01", "id": "49622", "index_value": "10820", "rating": "3.6"}, {"ip_addresses": "248", "label": "Real Time CJSC", "test_count": "962", "aggregate_date": "2015-05-01", "id": "92098", "index_value": "8060", "rating": "3.3"}, {"ip_addresses": "2432", "label": "Aranea LLC", "test_count": "585", "aggregate_date": "2015-05-01", "id": "46692", "index_value": "7917", "rating": "3.2"}, {"ip_addresses": "60712", "label": "Orange Armenia", "test_count": "1947", "aggregate_date": "2015-05-01", "id": "37416", "index_value": "6739", "rating": "3.1"}, {"ip_addresses": "157427", "label": "ArmenTel CJSC", "test_count": "16819", "aggregate_date": "2015-05-01", "id": "70164", "index_value": "3869", "rating": "3.1"}], "bandwidth": "12893"}, {"countryCode": "SY", "test_count": "8079", "ip_addresses": "168991", "country": "Syrian Arab Republic", "isps": [{"ip_addresses": "9516", "label": "Syriatel Mobile Telecom", "test_count": "109", "aggregate_date": "2015-05-01", "id": "32883", "index_value": "2508", "rating": "2.8"}, {"ip_addresses": "92370", "label": "Syrian Telecommunications Establishment", "test_count": "317", "aggregate_date": "2015-05-01", "id": "1229", "index_value": "1738", "rating": "2.6"}, {"ip_addresses": "5623", "label": "Syriatech", "test_count": "119", "aggregate_date": "2015-05-01", "id": "26013", "index_value": "1378", "rating": "2.7"}, {"ip_addresses": "15776", "label": "AYA Internet Service Provider", "test_count": "70", "aggregate_date": "2015-05-01", "id": "17236", "index_value": "925", "rating": "2.4"}], "bandwidth": "1727"}, {"countryCode": "MT", "test_count": "20289", "ip_addresses": "154527", "country": "Malta", "isps": [{"ip_addresses": "8045", "label": "Vodafone Malta Limited", "test_count": "446", "aggregate_date": "2015-05-01", "id": "18619", "index_value": "18632", "rating": "3.1"}, {"ip_addresses": "61974", "label": "GO P.L.C.", "test_count": "5863", "aggregate_date": "2015-05-01", "id": "51877", "index_value": "16764", "rating": "2.6"}], "bandwidth": "24748"}, {"countryCode": "DZ", "test_count": "63986", "ip_addresses": "788724", "country": "Algeria", "isps": [{"ip_addresses": "1864", "label": "Wataniya Telecom Algerie", "test_count": "113", "aggregate_date": "2015-05-01", "id": "71939", "index_value": "15977", "rating": "3.2"}, {"ip_addresses": "588058", "label": "Telecom Algeria", "test_count": "24498", "aggregate_date": "2015-05-01", "id": "9444", "index_value": "2646", "rating": "2.5"}], "bandwidth": "3107"}, {"countryCode": "ML", "test_count": "1288", "ip_addresses": "7559", "country": "Mali", "isps": [{"ip_addresses": "2612", "label": "Orange Mali SA", "test_count": "459", "aggregate_date": "2015-05-01", "id": "45467", "index_value": "3317", "rating": "2.0"}], "bandwidth": "2755"}, {"countryCode": "AZ", "test_count": "135649", "ip_addresses": "422706", "country": "Azerbaijan", "isps": [{"ip_addresses": "4636", "label": "AZERTELECOM", "test_count": "1433", "aggregate_date": "2015-05-01", "id": "44160", "index_value": "12261", "rating": "3.2"}, {"ip_addresses": "67349", "label": "Delta Telecom Inc.", "test_count": "20119", "aggregate_date": "2015-05-01", "id": "14900", "index_value": "11337", "rating": "3.4"}, {"ip_addresses": "21904", "label": "UNINET", "test_count": "8350", "aggregate_date": "2015-05-01", "id": "1268", "index_value": "8635", "rating": "3.3"}, {"ip_addresses": "11060", "label": "Az.StarNet LLC", "test_count": "2914", "aggregate_date": "2015-05-01", "id": "16108", "index_value": "7659", "rating": "3.5"}, {"ip_addresses": "4970", "label": "Eurosel LLC", "test_count": "1221", "aggregate_date": "2015-05-01", "id": "48954", "index_value": "7337", "rating": "3.5"}, {"ip_addresses": "3829", "label": "SOL", "test_count": "1063", "aggregate_date": "2015-05-01", "id": "24968", "index_value": "6357", "rating": "3.1"}, {"ip_addresses": "17232", "label": "AG Telecom LTD.", "test_count": "3781", "aggregate_date": "2015-05-01", "id": "89031", "index_value": "5696", "rating": "3.9"}, {"ip_addresses": "13255", "label": "Ultel LLC", "test_count": "1723", "aggregate_date": "2015-05-01", "id": "11883", "index_value": "5520", "rating": "3.1"}, {"ip_addresses": "2765", "label": "TransEuroCom LLC", "test_count": "291", "aggregate_date": "2015-05-01", "id": "35056", "index_value": "5466", "rating": "3.4"}, {"ip_addresses": "8369", "label": "SUPERONLAYN LTD", "test_count": "2773", "aggregate_date": "2015-05-01", "id": "68683", "index_value": "5416", "rating": "2.6"}, {"ip_addresses": "4811", "label": "AzEuroTel J.V.", "test_count": "1244", "aggregate_date": "2015-05-01", "id": "67703", "index_value": "4978", "rating": "3.1"}, {"ip_addresses": "4599", "label": "PIXEL LLC", "test_count": "1345", "aggregate_date": "2015-05-01", "id": "47299", "index_value": "4521", "rating": "3.3"}, {"ip_addresses": "11348", "label": "Aztelekom", "test_count": "1220", "aggregate_date": "2015-05-01", "id": "3091", "index_value": "4309", "rating": "3.2"}, {"ip_addresses": "13546", "label": "Stellford LLC.", "test_count": "5269", "aggregate_date": "2015-05-01", "id": "18713", "index_value": "4117", "rating": "3.9"}, {"ip_addresses": "11538", "label": "SMART SISTEMZ TECHNOLOJI MMM", "test_count": "2443", "aggregate_date": "2015-05-01", "id": "34531", "index_value": "3927", "rating": "3.1"}, {"ip_addresses": "24476", "label": "Azqtel, Limited", "test_count": "13750", "aggregate_date": "2015-05-01", "id": "77354", "index_value": "3655", "rating": "3.3"}, {"ip_addresses": "103606", "label": "Baktelekom", "test_count": "23369", "aggregate_date": "2015-05-01", "id": "10240", "index_value": "3209", "rating": "3.1"}, {"ip_addresses": "26409", "label": "Azeronline Information Services", "test_count": "3924", "aggregate_date": "2015-05-01", "id": "7462", "index_value": "3106", "rating": "3.1"}, {"ip_addresses": "3689", "label": "CNC.AZ MMC", "test_count": "488", "aggregate_date": "2015-05-01", "id": "80056", "index_value": "2874", "rating": "3.3"}, {"ip_addresses": "7499", "label": "AZERIN", "test_count": "986", "aggregate_date": "2015-05-01", "id": "68358", "index_value": "2329", "rating": "3.3"}], "bandwidth": "6651"}, {"countryCode": "PY", "test_count": "11241", "ip_addresses": "213981", "country": "Paraguay", "isps": [{"ip_addresses": "2516", "label": "Get Line Internet S.A.", "test_count": "1270", "aggregate_date": "2015-05-01", "id": "35834", "index_value": "5480", "rating": "2.5"}, {"ip_addresses": "1606", "label": "N\u00facleo S.A.", "test_count": "124", "aggregate_date": "2015-05-01", "id": "15917", "index_value": "3043", "rating": "2.4"}, {"ip_addresses": "100823", "label": "Telecel S.A.", "test_count": "6259", "aggregate_date": "2015-05-01", "id": "4336", "index_value": "2851", "rating": "2.4"}, {"ip_addresses": "714", "label": "Sur Multimedia S.A.", "test_count": "327", "aggregate_date": "2015-05-01", "id": "99055", "index_value": "2304", "rating": "2.4"}, {"ip_addresses": "78384", "label": "CO.PA.CO.", "test_count": "1926", "aggregate_date": "2015-05-01", "id": "1274", "index_value": "2167", "rating": "3.1"}], "bandwidth": "3241"}, {"countryCode": "DM", "test_count": "2511", "ip_addresses": "8504", "country": "Dominica", "isps": [{"ip_addresses": "2399", "label": "Cable & Wireless Antigua and Barbuda", "test_count": "542", "aggregate_date": "2015-05-01", "id": "69238", "index_value": "13095", "rating": "2.0"}, {"ip_addresses": "2072", "label": "Sat Telecommunications", "test_count": "1232", "aggregate_date": "2015-05-01", "id": "76198", "index_value": "8099", "rating": "2.6"}, {"ip_addresses": "2199", "label": "Lime Cable & Wireless Barbados", "test_count": "563", "aggregate_date": "2015-05-01", "id": "528", "index_value": "2589", "rating": "1.7"}], "bandwidth": "7897"}, {"countryCode": "KH", "test_count": "43309", "ip_addresses": "95286", "country": "Cambodia", "isps": [{"ip_addresses": "1629", "label": "MEKONGNET INTERNET SERVICE PROVIDER", "test_count": "978", "aggregate_date": "2015-05-01", "id": "37756", "index_value": "20018", "rating": "3.3"}, {"ip_addresses": "368", "label": "Opennet", "test_count": "7172", "aggregate_date": "2015-05-01", "id": "90223", "index_value": "14083", "rating": "2.6"}, {"ip_addresses": "476", "label": "CIDC Information Technology (Sabay)", "test_count": "135", "aggregate_date": "2015-05-01", "id": "99528", "index_value": "11357", "rating": "3.9"}, {"ip_addresses": "376", "label": "S.I Group", "test_count": "761", "aggregate_date": "2015-05-01", "id": "85333", "index_value": "10816", "rating": "3.4"}, {"ip_addresses": "1580", "label": "S.I Building", "test_count": "410", "aggregate_date": "2015-05-01", "id": "39668", "index_value": "9289", "rating": "3.2"}, {"ip_addresses": "7358", "label": "EZECOM limited", "test_count": "4118", "aggregate_date": "2015-05-01", "id": "28961", "index_value": "7755", "rating": "2.9"}, {"ip_addresses": "1852", "label": "DTV-STAR Co. Ltd", "test_count": "1623", "aggregate_date": "2015-05-01", "id": "88583", "index_value": "6348", "rating": "3.2"}, {"ip_addresses": "1233", "label": "DTV-STar Co.,Ltd.Phnom Penh, Cambodia", "test_count": "312", "aggregate_date": "2015-05-01", "id": "39902", "index_value": "6238", "rating": "3.1"}, {"ip_addresses": "29080", "label": "Viettel (Cambodia) Pte., Ltd", "test_count": "6541", "aggregate_date": "2015-05-01", "id": "26849", "index_value": "6171", "rating": "3.0"}, {"ip_addresses": "9751", "label": "Cogetel Online", "test_count": "4444", "aggregate_date": "2015-05-01", "id": "2755", "index_value": "6130", "rating": "3.2"}, {"ip_addresses": "614", "label": "Xinwei (Cambodia) Telecom Co. Ltd", "test_count": "1394", "aggregate_date": "2015-05-01", "id": "99575", "index_value": "5767", "rating": "3.1"}, {"ip_addresses": "387", "label": "COGETEL Co., Ltd", "test_count": "372", "aggregate_date": "2015-05-01", "id": "101521", "index_value": "4051", "rating": "2.6"}, {"ip_addresses": "1044", "label": "Cambodian ISP, Country Wide, Wireless IAP", "test_count": "218", "aggregate_date": "2015-05-01", "id": "11981", "index_value": "3866", "rating": "3.0"}], "bandwidth": "8951"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "MC", "test_count": "1295", "ip_addresses": "11989", "country": "Monaco", "isps": [{"ip_addresses": "10971", "label": "Monaco telecom", "test_count": "872", "aggregate_date": "2015-05-01", "id": "9685", "index_value": "19462", "rating": "2.4"}], "bandwidth": "35043"}, {"countryCode": "GU", "test_count": "4288", "ip_addresses": "38997", "country": "Guam", "isps": [{"ip_addresses": "16350", "label": "TeleGuam Holdings, LLC", "test_count": "182", "aggregate_date": "2015-05-01", "id": "6768", "index_value": "16965", "rating": "2.8"}, {"ip_addresses": "5503", "label": "Kuentos Communications, Inc.", "test_count": "1952", "aggregate_date": "2015-05-01", "id": "90173", "index_value": "14567", "rating": "2.1"}], "bandwidth": "13509"}, {"countryCode": "AF", "test_count": "7301", "ip_addresses": "16606", "country": "Afghanistan", "isps": [{"ip_addresses": "838", "label": "GCN/DCN Networks", "test_count": "1570", "aggregate_date": "2015-05-01", "id": "42989", "index_value": "2140", "rating": "2.8"}, {"ip_addresses": "6740", "label": "Instatelecom Limited", "test_count": "2096", "aggregate_date": "2015-05-01", "id": "86176", "index_value": "1109", "rating": "2.7"}], "bandwidth": "2468"}, {"countryCode": "LU", "test_count": "19824", "ip_addresses": "198355", "country": "Luxembourg", "isps": [{"ip_addresses": "6059", "label": "Visual Online S.A.", "test_count": "497", "aggregate_date": "2015-05-01", "id": "2883", "index_value": "61945", "rating": "3.8"}, {"ip_addresses": "5615", "label": "Coditel", "test_count": "875", "aggregate_date": "2015-05-01", "id": "16749", "index_value": "53959", "rating": "3.1"}, {"ip_addresses": "110019", "label": "POST Luxembourg", "test_count": "9930", "aggregate_date": "2015-05-01", "id": "1430", "index_value": "46073", "rating": "3.6"}, {"ip_addresses": "16599", "label": "Luxembourg Online S.A.", "test_count": "1508", "aggregate_date": "2015-05-01", "id": "3747", "index_value": "36301", "rating": "3.1"}, {"ip_addresses": "2886", "label": "CEGECOM S.A.", "test_count": "188", "aggregate_date": "2015-05-01", "id": "4039", "index_value": "33795", "rating": "3.4"}, {"ip_addresses": "8671", "label": "Tango", "test_count": "2121", "aggregate_date": "2015-05-01", "id": "50622", "index_value": "31851", "rating": "2.5"}, {"ip_addresses": "3279", "label": "root SA", "test_count": "121", "aggregate_date": "2015-05-01", "id": "43781", "index_value": "28765", "rating": "4.0"}, {"ip_addresses": "5841", "label": "EDPNET", "test_count": "745", "aggregate_date": "2015-05-01", "id": "2167", "index_value": "25738", "rating": "3.5"}, {"ip_addresses": "5799", "label": "LuXcommunications S.A.", "test_count": "306", "aggregate_date": "2015-05-01", "id": "26098", "index_value": "16405", "rating": "2.6"}], "bandwidth": "40392"}, {"countryCode": "KG", "test_count": "18001", "ip_addresses": "102998", "country": "Kyrgyzstan", "isps": [{"ip_addresses": "3049", "label": "City Telecom", "test_count": "147", "aggregate_date": "2015-05-01", "id": "34147", "index_value": "28769", "rating": "3.4"}, {"ip_addresses": "30432", "label": "Mega-Line LTD.", "test_count": "1745", "aggregate_date": "2015-05-01", "id": "43798", "index_value": "24694", "rating": "3.3"}, {"ip_addresses": "2063", "label": "AKNET Ltd.", "test_count": "2021", "aggregate_date": "2015-05-01", "id": "1729", "index_value": "20613", "rating": "3.3"}, {"ip_addresses": "7418", "label": "ElCat Ltd.", "test_count": "690", "aggregate_date": "2015-05-01", "id": "89002", "index_value": "20230", "rating": "3.1"}, {"ip_addresses": "1972", "label": "LLC I-space", "test_count": "106", "aggregate_date": "2015-05-01", "id": "82033", "index_value": "16300", "rating": "3.8"}, {"ip_addresses": "11564", "label": "Saimanet Telecomunications", "test_count": "1074", "aggregate_date": "2015-05-01", "id": "1751", "index_value": "15600", "rating": "3.2"}, {"ip_addresses": "38621", "label": "OJSC Kyrgyztelecom", "test_count": "1110", "aggregate_date": "2015-05-01", "id": "73493", "index_value": "8558", "rating": "2.9"}], "bandwidth": "13722"}, {"countryCode": "Unknown | Cote D'Ivoire", "test_count": "4050", "ip_addresses": "33061", "country": "Cote D'Ivoire", "isps": [{"ip_addresses": "5485", "label": "MTN COTE D'IVOIRE S.A", "test_count": "573", "aggregate_date": "2015-05-01", "id": "33217", "index_value": "6906", "rating": "2.8"}, {"ip_addresses": "22649", "label": "Cote d'Ivoire Telecom", "test_count": "2283", "aggregate_date": "2015-05-01", "id": "1594", "index_value": "4745", "rating": "2.8"}], "bandwidth": "6174"}, {"countryCode": "MN", "test_count": "23936", "ip_addresses": "75079", "country": "Mongolia", "isps": [{"ip_addresses": "1539", "label": "Mobinet LLC", "test_count": "1529", "aggregate_date": "2015-05-01", "id": "71963", "index_value": "38283", "rating": "3.3"}, {"ip_addresses": "1068", "label": "OrbitNet LLC", "test_count": "946", "aggregate_date": "2015-05-01", "id": "99563", "index_value": "35247", "rating": "2.9"}, {"ip_addresses": "2962", "label": "MCS Com Co Ltd", "test_count": "2357", "aggregate_date": "2015-05-01", "id": "72108", "index_value": "34815", "rating": "3.2"}, {"ip_addresses": "1018", "label": "Data Center, Viva City town, Khan-Uul District", "test_count": "842", "aggregate_date": "2015-05-01", "id": "100174", "index_value": "34506", "rating": "2.9"}, {"ip_addresses": "18358", "label": "CITINET LLC", "test_count": "1360", "aggregate_date": "2015-05-01", "id": "45288", "index_value": "20151", "rating": "3.2"}, {"ip_addresses": "17350", "label": "YOKOZUNANET", "test_count": "667", "aggregate_date": "2015-05-01", "id": "25578", "index_value": "18541", "rating": "3.2"}, {"ip_addresses": "1120", "label": "Sansar-Internet", "test_count": "476", "aggregate_date": "2015-05-01", "id": "75244", "index_value": "9764", "rating": "2.9"}, {"ip_addresses": "503", "label": "SKYCC", "test_count": "2350", "aggregate_date": "2015-05-01", "id": "101942", "index_value": "8878", "rating": "3.1"}, {"ip_addresses": "594", "label": "Kewiko LLC", "test_count": "5110", "aggregate_date": "2015-05-01", "id": "86156", "index_value": "6801", "rating": "3.1"}, {"ip_addresses": "4385", "label": "Mongolia Telecom", "test_count": "2329", "aggregate_date": "2015-05-01", "id": "71970", "index_value": "3668", "rating": "3.2"}], "bandwidth": "17314"}, {"countryCode": "VI", "test_count": "4588", "ip_addresses": "7421", "country": "Virgin Islands, U.S.", "isps": [{"ip_addresses": "1300", "label": "VI POWERNET, LLC", "test_count": "230", "aggregate_date": "2015-05-01", "id": "53981", "index_value": "17038", "rating": "2.1"}, {"ip_addresses": "396", "label": "CenturyLink", "test_count": "1956", "aggregate_date": "2015-05-01", "id": "86113", "index_value": "11500", "rating": "2.0"}, {"ip_addresses": "1929", "label": "Broadband VI, LLC", "test_count": "780", "aggregate_date": "2015-05-01", "id": "66369", "index_value": "4688", "rating": "3.1"}], "bandwidth": "13200"}, {"countryCode": "BA", "test_count": "99762", "ip_addresses": "560256", "country": "Bosnia and Herzegovina", "isps": [{"ip_addresses": "55486", "label": "Telemach d.o.o. Sarajevo", "test_count": "14727", "aggregate_date": "2015-05-01", "id": "67645", "index_value": "21314", "rating": "3.5"}, {"ip_addresses": "5181", "label": "GLOBAL INTERNET d.o.o. Novi Travnik", "test_count": "747", "aggregate_date": "2015-05-01", "id": "45303", "index_value": "20280", "rating": "3.4"}, {"ip_addresses": "11984", "label": "Logosoft , information engineering and Internet pr", "test_count": "2296", "aggregate_date": "2015-05-01", "id": "73291", "index_value": "19337", "rating": "4.2"}, {"ip_addresses": "3958", "label": "M&H Company d.o.o Sarajevo", "test_count": "1165", "aggregate_date": "2015-05-01", "id": "69165", "index_value": "19313", "rating": "3.6"}, {"ip_addresses": "23971", "label": "Kablovska Televizija HS d.o.o Sarajevo", "test_count": "5391", "aggregate_date": "2015-05-01", "id": "20665", "index_value": "18871", "rating": "3.4"}, {"ip_addresses": "3804", "label": "europroNET Bosnia d.o.o.", "test_count": "394", "aggregate_date": "2015-05-01", "id": "5358", "index_value": "16309", "rating": "2.7"}, {"ip_addresses": "9493", "label": "Telrad doo", "test_count": "1416", "aggregate_date": "2015-05-01", "id": "19738", "index_value": "9998", "rating": "3.7"}, {"ip_addresses": "18883", "label": "ELTA KABEL d.o.o.", "test_count": "7054", "aggregate_date": "2015-05-01", "id": "26080", "index_value": "9678", "rating": "3.0"}, {"ip_addresses": "2926", "label": "AVAX Net doo za telekomunikacije, racunarski inzen", "test_count": "770", "aggregate_date": "2015-05-01", "id": "79868", "index_value": "9467", "rating": "4.7"}, {"ip_addresses": "7969", "label": "TXTV d.o.o. Tuzla", "test_count": "1855", "aggregate_date": "2015-05-01", "id": "61508", "index_value": "9308", "rating": "2.9"}, {"ip_addresses": "2975", "label": "Privredno trgovacko drustvo Velnet d.o.o Mostar", "test_count": "908", "aggregate_date": "2015-05-01", "id": "61725", "index_value": "8381", "rating": "3.2"}, {"ip_addresses": "15713", "label": "Blicnet d.o.o.", "test_count": "2575", "aggregate_date": "2015-05-01", "id": "94185", "index_value": "8161", "rating": "2.6"}, {"ip_addresses": "3725", "label": "DSL-Elektronika d.o.o.", "test_count": "703", "aggregate_date": "2015-05-01", "id": "40024", "index_value": "7190", "rating": "3.9"}, {"ip_addresses": "144605", "label": "BH Telecom", "test_count": "23364", "aggregate_date": "2015-05-01", "id": "1785", "index_value": "7027", "rating": "2.8"}, {"ip_addresses": "57851", "label": "HT d.o.o. Mostar", "test_count": "5162", "aggregate_date": "2015-05-01", "id": "4271", "index_value": "6850", "rating": "2.6"}, {"ip_addresses": "106262", "label": "Telekom Srpske", "test_count": "8232", "aggregate_date": "2015-05-01", "id": "4807", "index_value": "6282", "rating": "3.0"}, {"ip_addresses": "5996", "label": "Team Consulting d.o.o.", "test_count": "2250", "aggregate_date": "2015-05-01", "id": "54701", "index_value": "5876", "rating": "3.5"}, {"ip_addresses": "6942", "label": "Telinea d.o.o.", "test_count": "1101", "aggregate_date": "2015-05-01", "id": "34185", "index_value": "4601", "rating": "2.6"}, {"ip_addresses": "26201", "label": "DASTO semtel d.o.o.", "test_count": "1923", "aggregate_date": "2015-05-01", "id": "4056", "index_value": "3922", "rating": "3.0"}, {"ip_addresses": "3459", "label": "MREZA d.o.o.", "test_count": "152", "aggregate_date": "2015-05-01", "id": "35167", "index_value": "2815", "rating": "2.7"}], "bandwidth": "10418"}, {"countryCode": "GH", "test_count": "14297", "ip_addresses": "45720", "country": "Ghana", "isps": [{"ip_addresses": "32610", "label": "Ghana Telecom", "test_count": "3832", "aggregate_date": "2015-05-01", "id": "14907", "index_value": "7267", "rating": "2.6"}, {"ip_addresses": "15197", "label": "Vodafone Ghana", "test_count": "1798", "aggregate_date": "2015-05-01", "id": "87790", "index_value": "5064", "rating": "2.4"}, {"ip_addresses": "876", "label": "Scancom Ltd", "test_count": "828", "aggregate_date": "2015-05-01", "id": "21097", "index_value": "4376", "rating": "3.2"}], "bandwidth": "6812"}, {"countryCode": "NP", "test_count": "18799", "ip_addresses": "148786", "country": "Nepal", "isps": [{"ip_addresses": "477", "label": "Classic Tech Pvt. Ltd.", "test_count": "219", "aggregate_date": "2015-05-01", "id": "99557", "index_value": "22392", "rating": "3.4"}, {"ip_addresses": "578", "label": "ClassicTech Pvt. Ltd.", "test_count": "404", "aggregate_date": "2015-05-01", "id": "84847", "index_value": "21342", "rating": "2.8"}, {"ip_addresses": "1214", "label": "Subisu CableNet(Pvt) Ltd", "test_count": "354", "aggregate_date": "2015-05-01", "id": "20879", "index_value": "21333", "rating": "3.0"}, {"ip_addresses": "1320", "label": "SingNet", "test_count": "1293", "aggregate_date": "2015-05-01", "id": "429", "index_value": "18397", "rating": "2.3"}, {"ip_addresses": "724", "label": "Ncell Private Ltd.", "test_count": "446", "aggregate_date": "2015-05-01", "id": "74013", "index_value": "17835", "rating": "4.2"}, {"ip_addresses": "1808", "label": "Pulchowk", "test_count": "1241", "aggregate_date": "2015-05-01", "id": "38795", "index_value": "16488", "rating": "3.3"}, {"ip_addresses": "2398", "label": "Nepalese Service Provider", "test_count": "129", "aggregate_date": "2015-05-01", "id": "10764", "index_value": "9538", "rating": "3.1"}, {"ip_addresses": "5031", "label": "Websurfer Nepal", "test_count": "479", "aggregate_date": "2015-05-01", "id": "71873", "index_value": "9438", "rating": "2.6"}, {"ip_addresses": "7368", "label": "Subisu CableNet (Pvt) Ltd", "test_count": "1633", "aggregate_date": "2015-05-01", "id": "36771", "index_value": "5714", "rating": "2.9"}, {"ip_addresses": "769", "label": "Otel Communication Pvt. Ltd", "test_count": "600", "aggregate_date": "2015-05-01", "id": "99857", "index_value": "4708", "rating": "2.7"}, {"ip_addresses": "740", "label": "Subisu Cablenet", "test_count": "117", "aggregate_date": "2015-05-01", "id": "99842", "index_value": "4229", "rating": "2.4"}, {"ip_addresses": "2905", "label": "Communications and communicate Nepal (P)Ltd.", "test_count": "651", "aggregate_date": "2015-05-01", "id": "14898", "index_value": "3932", "rating": "2.5"}, {"ip_addresses": "22567", "label": "Worldlink Communications", "test_count": "4435", "aggregate_date": "2015-05-01", "id": "5933", "index_value": "3876", "rating": "2.7"}, {"ip_addresses": "4428", "label": "Nepal Telecom", "test_count": "2493", "aggregate_date": "2015-05-01", "id": "98048", "index_value": "1871", "rating": "2.3"}], "bandwidth": "8420"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "AG", "test_count": "3182", "ip_addresses": "16949", "country": "Antigua and Barbuda", "isps": [{"ip_addresses": "12873", "label": "Cable & Wireless Antigua and Barbuda", "test_count": "1431", "aggregate_date": "2015-05-01", "id": "69238", "index_value": "4664", "rating": "2.0"}], "bandwidth": "4497"}, {"countryCode": "KE", "test_count": "67082", "ip_addresses": "137222", "country": "Kenya", "isps": [{"ip_addresses": "1391", "label": "Simbaet Com(K) Ltd", "test_count": "605", "aggregate_date": "2015-05-01", "id": "88559", "index_value": "19445", "rating": "2.9"}, {"ip_addresses": "1899", "label": "Kenya Education Network", "test_count": "461", "aggregate_date": "2015-05-01", "id": "58622", "index_value": "16733", "rating": "3.6"}, {"ip_addresses": "3762", "label": "Wananchi-ke", "test_count": "1259", "aggregate_date": "2015-05-01", "id": "86192", "index_value": "13312", "rating": "2.5"}, {"ip_addresses": "6061", "label": "One Communications Ltd", "test_count": "1959", "aggregate_date": "2015-05-01", "id": "29764", "index_value": "11329", "rating": "2.5"}, {"ip_addresses": "882", "label": "Liquid Telecommunications Operations Limited", "test_count": "6530", "aggregate_date": "2015-05-01", "id": "100690", "index_value": "9581", "rating": "3.1"}, {"ip_addresses": "36054", "label": "Wananchi Online Limited", "test_count": "7230", "aggregate_date": "2015-05-01", "id": "19504", "index_value": "9418", "rating": "2.4"}, {"ip_addresses": "2475", "label": "Wananchi Group (Kenya) Limited", "test_count": "1395", "aggregate_date": "2015-05-01", "id": "101057", "index_value": "9270", "rating": "2.3"}, {"ip_addresses": "387", "label": "KDN", "test_count": "3498", "aggregate_date": "2015-05-01", "id": "86340", "index_value": "7709", "rating": "2.8"}, {"ip_addresses": "837", "label": "ACCESSKENYA GROUP LTD is an ISP serving", "test_count": "839", "aggregate_date": "2015-05-01", "id": "86396", "index_value": "6620", "rating": "2.5"}, {"ip_addresses": "7017", "label": "Jamii Telecommunications Limited", "test_count": "13697", "aggregate_date": "2015-05-01", "id": "17174", "index_value": "5981", "rating": "3.2"}, {"ip_addresses": "12916", "label": "Safaricom Limited", "test_count": "196", "aggregate_date": "2015-05-01", "id": "15396", "index_value": "4501", "rating": "2.8"}, {"ip_addresses": "7078", "label": "Access Kenya Group Ltd", "test_count": "6578", "aggregate_date": "2015-05-01", "id": "73487", "index_value": "4081", "rating": "2.3"}, {"ip_addresses": "2018", "label": "Kenyan Post & Telecommunications Company / Telkom", "test_count": "1944", "aggregate_date": "2015-05-01", "id": "73549", "index_value": "3246", "rating": "2.3"}, {"ip_addresses": "2079", "label": "ONECOM", "test_count": "1365", "aggregate_date": "2015-05-01", "id": "86291", "index_value": "2918", "rating": "2.6"}, {"ip_addresses": "455", "label": "Second segment of Broadband IPs", "test_count": "186", "aggregate_date": "2015-05-01", "id": "103396", "index_value": "2885", "rating": "3.5"}, {"ip_addresses": "14692", "label": "Orange Cellular Kenya", "test_count": "1567", "aggregate_date": "2015-05-01", "id": "84999", "index_value": "2627", "rating": "2.4"}, {"ip_addresses": "1515", "label": "Africa Online Kenya", "test_count": "577", "aggregate_date": "2015-05-01", "id": "23643", "index_value": "1676", "rating": "2.4"}], "bandwidth": "7460"}, {"countryCode": "GI", "test_count": "1748", "ip_addresses": "16785", "country": "Gibraltar", "isps": [{"ip_addresses": "14040", "label": "Gibtelecom Ltd.", "test_count": "1285", "aggregate_date": "2015-05-01", "id": "25721", "index_value": "11387", "rating": "2.1"}], "bandwidth": "19162"}, {"countryCode": "AO", "test_count": "6666", "ip_addresses": "45949", "country": "Angola", "isps": [{"ip_addresses": "2972", "label": "Movicel Telecomunicacoes S.A.", "test_count": "287", "aggregate_date": "2015-05-01", "id": "87764", "index_value": "9268", "rating": "3.1"}, {"ip_addresses": "9182", "label": "UNITEL SA", "test_count": "772", "aggregate_date": "2015-05-01", "id": "43788", "index_value": "8392", "rating": "3.1"}, {"ip_addresses": "3938", "label": "Angola Telecom", "test_count": "363", "aggregate_date": "2015-05-01", "id": "30770", "index_value": "4927", "rating": "2.4"}, {"ip_addresses": "9825", "label": "TVCABO ANGOLA", "test_count": "2463", "aggregate_date": "2015-05-01", "id": "82313", "index_value": "4375", "rating": "2.8"}], "bandwidth": "6902"}, {"countryCode": "Unknown | St. Vincent and Grenadines", "test_count": "2141", "ip_addresses": "8961", "country": "St. Vincent and Grenadines", "isps": [{"ip_addresses": "3774", "label": "Karib Cable", "test_count": "329", "aggregate_date": "2015-05-01", "id": "26311", "index_value": "14674", "rating": "2.7"}, {"ip_addresses": "543", "label": "Columbus Communication St. Vincent and the Grenadi", "test_count": "523", "aggregate_date": "2015-05-01", "id": "101985", "index_value": "12066", "rating": "2.7"}, {"ip_addresses": "4537", "label": "Lime Cable & Wireless Barbados", "test_count": "1122", "aggregate_date": "2015-05-01", "id": "528", "index_value": "5860", "rating": "1.7"}], "bandwidth": "9078"}, {"countryCode": "SC", "test_count": "933", "ip_addresses": "5948", "country": "Seychelles", "isps": [], "bandwidth": "17482"}, {"countryCode": "TN", "test_count": "85202", "ip_addresses": "793118", "country": "Tunisia", "isps": [{"ip_addresses": "179180", "label": "TOPNET", "test_count": "2507", "aggregate_date": "2015-05-01", "id": "3138", "index_value": "14313", "rating": "2.1"}, {"ip_addresses": "347082", "label": "ATI", "test_count": "4163", "aggregate_date": "2015-05-01", "id": "2782", "index_value": "8820", "rating": "2.3"}, {"ip_addresses": "46940", "label": "Hexabyte", "test_count": "987", "aggregate_date": "2015-05-01", "id": "82294", "index_value": "7842", "rating": "2.2"}, {"ip_addresses": "65707", "label": "GlobalNet", "test_count": "2118", "aggregate_date": "2015-05-01", "id": "19722", "index_value": "5548", "rating": "2.4"}, {"ip_addresses": "12926", "label": "Tunet", "test_count": "385", "aggregate_date": "2015-05-01", "id": "82342", "index_value": "5258", "rating": "2.6"}, {"ip_addresses": "123611", "label": "Orange Internet", "test_count": "4037", "aggregate_date": "2015-05-01", "id": "82296", "index_value": "4835", "rating": "2.3"}], "bandwidth": "5315"}, {"countryCode": "MU", "test_count": "26738", "ip_addresses": "219948", "country": "Mauritius", "isps": [{"ip_addresses": "38776", "label": "Telecom Plus Limited", "test_count": "3700", "aggregate_date": "2015-05-01", "id": "35099", "index_value": "9564", "rating": "1.9"}, {"ip_addresses": "13438", "label": "Mauritius Telecom", "test_count": "860", "aggregate_date": "2015-05-01", "id": "14696", "index_value": "5015", "rating": "1.9"}, {"ip_addresses": "131190", "label": "Telecom Plus Ltd", "test_count": "8359", "aggregate_date": "2015-05-01", "id": "14909", "index_value": "4927", "rating": "2.9"}, {"ip_addresses": "992", "label": "Emtel Ltd", "test_count": "183", "aggregate_date": "2015-05-01", "id": "89104", "index_value": "4827", "rating": "2.7"}, {"ip_addresses": "7098", "label": "MauritiusTelecom", "test_count": "564", "aggregate_date": "2015-05-01", "id": "93075", "index_value": "3128", "rating": "1.9"}, {"ip_addresses": "1870", "label": "Bharat Telecom", "test_count": "499", "aggregate_date": "2015-05-01", "id": "88649", "index_value": "1290", "rating": "2.2"}, {"ip_addresses": "1257", "label": "Bharat-tel", "test_count": "1202", "aggregate_date": "2015-05-01", "id": "88219", "index_value": "1260", "rating": "2.3"}], "bandwidth": "13160"}, {"countryCode": "AL", "test_count": "93570", "ip_addresses": "108126", "country": "Albania", "isps": [{"ip_addresses": "31073", "label": "Ipko Telecommunications", "test_count": "649", "aggregate_date": "2015-05-01", "id": "47200", "index_value": "15019", "rating": "3.6"}, {"ip_addresses": "5122", "label": "Nisatel LTD", "test_count": "946", "aggregate_date": "2015-05-01", "id": "53180", "index_value": "11383", "rating": "3.3"}, {"ip_addresses": "2404", "label": "4ALB shpk", "test_count": "1301", "aggregate_date": "2015-05-01", "id": "91969", "index_value": "10862", "rating": "3.4"}, {"ip_addresses": "17155", "label": "Abissnet", "test_count": "7339", "aggregate_date": "2015-05-01", "id": "11265", "index_value": "9349", "rating": "3.3"}, {"ip_addresses": "1883", "label": "SATLINK SH.P.K.", "test_count": "549", "aggregate_date": "2015-05-01", "id": "68392", "index_value": "9347", "rating": "4.5"}, {"ip_addresses": "2649", "label": "Pronet sh.p.k.", "test_count": "387", "aggregate_date": "2015-05-01", "id": "8234", "index_value": "8095", "rating": "3.4"}, {"ip_addresses": "4939", "label": "ABCom", "test_count": "9954", "aggregate_date": "2015-05-01", "id": "4723", "index_value": "6913", "rating": "3.3"}, {"ip_addresses": "1330", "label": "Fibernet", "test_count": "445", "aggregate_date": "2015-05-01", "id": "93102", "index_value": "6886", "rating": "3.6"}, {"ip_addresses": "7062", "label": "Albtelecom Sh.a.", "test_count": "17824", "aggregate_date": "2015-05-01", "id": "2816", "index_value": "6855", "rating": "3.3"}, {"ip_addresses": "1458", "label": "VIVO Communications Sh p k", "test_count": "1720", "aggregate_date": "2015-05-01", "id": "103406", "index_value": "6328", "rating": "3.3"}, {"ip_addresses": "1266", "label": "Bleta Sh.p.k", "test_count": "1114", "aggregate_date": "2015-05-01", "id": "40687", "index_value": "2403", "rating": "3.5"}], "bandwidth": "9422"}, {"countryCode": "QA", "test_count": "69398", "ip_addresses": "365861", "country": "Qatar", "isps": [{"ip_addresses": "2480", "label": "Vodafone Qatar Q.S.C.", "test_count": "2894", "aggregate_date": "2015-05-01", "id": "37676", "index_value": "25938", "rating": "2.9"}, {"ip_addresses": "251619", "label": "OOREDOO", "test_count": "65190", "aggregate_date": "2015-05-01", "id": "96838", "index_value": "16382", "rating": "2.6"}], "bandwidth": "17179"}, {"countryCode": "BJ", "test_count": "1034", "ip_addresses": "2311", "country": "Benin", "isps": [], "bandwidth": "2126"}, {"countryCode": "BD", "test_count": "115676", "ip_addresses": "185463", "country": "Bangladesh", "isps": [{"ip_addresses": "2708", "label": "DhakaCom Limited", "test_count": "3006", "aggregate_date": "2015-05-01", "id": "12548", "index_value": "16297", "rating": "3.1"}, {"ip_addresses": "633", "label": "Next Online", "test_count": "4224", "aggregate_date": "2015-05-01", "id": "82411", "index_value": "15781", "rating": "3.5"}, {"ip_addresses": "534", "label": "Delta Infocom Limited", "test_count": "1053", "aggregate_date": "2015-05-01", "id": "96639", "index_value": "13615", "rating": "3.7"}, {"ip_addresses": "1050", "label": "Bangladesh Online Ltd", "test_count": "277", "aggregate_date": "2015-05-01", "id": "32364", "index_value": "13110", "rating": "3.8"}, {"ip_addresses": "942", "label": "Earth Telecommunication ( pvt ) Limited", "test_count": "6119", "aggregate_date": "2015-05-01", "id": "100293", "index_value": "11999", "rating": "3.5"}, {"ip_addresses": "3972", "label": "Link3 Technologies Ltd.", "test_count": "776", "aggregate_date": "2015-05-01", "id": "12540", "index_value": "6960", "rating": "3.7"}, {"ip_addresses": "5153", "label": "BTTB", "test_count": "636", "aggregate_date": "2015-05-01", "id": "20623", "index_value": "6927", "rating": "3.3"}, {"ip_addresses": "2973", "label": "Agni Systems Limited", "test_count": "2136", "aggregate_date": "2015-05-01", "id": "20778", "index_value": "6898", "rating": "3.2"}, {"ip_addresses": "2669", "label": "Aamra Networks", "test_count": "2853", "aggregate_date": "2015-05-01", "id": "47264", "index_value": "6815", "rating": "3.4"}, {"ip_addresses": "8541", "label": "BDCOM Online Limited", "test_count": "2177", "aggregate_date": "2015-05-01", "id": "75070", "index_value": "6685", "rating": "3.3"}, {"ip_addresses": "1782", "label": "Grameen CyberNet", "test_count": "697", "aggregate_date": "2015-05-01", "id": "10726", "index_value": "6526", "rating": "2.8"}, {"ip_addresses": "1806", "label": "Mango Teleservices, IIG of Bangladesh", "test_count": "1138", "aggregate_date": "2015-05-01", "id": "34784", "index_value": "6227", "rating": "3.1"}, {"ip_addresses": "12243", "label": "Link3 Technologies Limited", "test_count": "3344", "aggregate_date": "2015-05-01", "id": "67883", "index_value": "5101", "rating": "3.7"}, {"ip_addresses": "2201", "label": "OptiMax COmmunication Ltd", "test_count": "1130", "aggregate_date": "2015-05-01", "id": "13100", "index_value": "4285", "rating": "2.7"}, {"ip_addresses": "688", "label": "BRACNet Limited", "test_count": "786", "aggregate_date": "2015-05-01", "id": "83965", "index_value": "4130", "rating": "3.2"}, {"ip_addresses": "704", "label": "GSM Service Provider in Bangladesh", "test_count": "319", "aggregate_date": "2015-05-01", "id": "19982", "index_value": "3635", "rating": "3.3"}, {"ip_addresses": "2488", "label": "Grameenphone Ltd.", "test_count": "2760", "aggregate_date": "2015-05-01", "id": "30594", "index_value": "2694", "rating": "3.2"}, {"ip_addresses": "1173", "label": "IS Pros Limited", "test_count": "286", "aggregate_date": "2015-05-01", "id": "83196", "index_value": "2269", "rating": "3.2"}, {"ip_addresses": "57851", "label": "Augere Wireless Broadband Bangladesh Limited", "test_count": "9210", "aggregate_date": "2015-05-01", "id": "43730", "index_value": "2119", "rating": "3.1"}, {"ip_addresses": "34769", "label": "Banglalion WiMAX", "test_count": "4284", "aggregate_date": "2015-05-01", "id": "49610", "index_value": "1477", "rating": "2.9"}], "bandwidth": "10390"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "HT", "test_count": "8190", "ip_addresses": "22938", "country": "Haiti", "isps": [{"ip_addresses": "1106", "label": "Haiti S.A.", "test_count": "555", "aggregate_date": "2015-05-01", "id": "86167", "index_value": "8208", "rating": "2.8"}, {"ip_addresses": "2739", "label": "Haiti Networking Group S.A.", "test_count": "436", "aggregate_date": "2015-05-01", "id": "12814", "index_value": "5444", "rating": "2.6"}, {"ip_addresses": "1144", "label": "Alpha Communications Network", "test_count": "2373", "aggregate_date": "2015-05-01", "id": "18770", "index_value": "5299", "rating": "3.4"}, {"ip_addresses": "6034", "label": "T\u00e9l\u00e9communications de Hait\u00ed (Teleco)", "test_count": "2203", "aggregate_date": "2015-05-01", "id": "58720", "index_value": "4819", "rating": "3.1"}, {"ip_addresses": "1693", "label": "Telecommunications", "test_count": "1253", "aggregate_date": "2015-05-01", "id": "36928", "index_value": "2146", "rating": "3.5"}, {"ip_addresses": "8954", "label": "ACCESS HAITI", "test_count": "980", "aggregate_date": "2015-05-01", "id": "3219", "index_value": "1568", "rating": "2.3"}], "bandwidth": "4359"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | Laos", "test_count": "12565", "ip_addresses": "21102", "country": "Laos", "isps": [{"ip_addresses": "616", "label": "Skytelecom , Transit provider and ISP in Vientiene", "test_count": "784", "aggregate_date": "2015-05-01", "id": "86282", "index_value": "5446", "rating": "3.7"}, {"ip_addresses": "10992", "label": "Telecommunication Service", "test_count": "2644", "aggregate_date": "2015-05-01", "id": "4428", "index_value": "5227", "rating": "2.8"}, {"ip_addresses": "333", "label": "Vimpelcom Lao Co., Ltd.", "test_count": "674", "aggregate_date": "2015-05-01", "id": "99631", "index_value": "4006", "rating": "2.8"}, {"ip_addresses": "590", "label": "Star Telecom", "test_count": "86", "aggregate_date": "2015-05-01", "id": "93101", "index_value": "3933", "rating": "3.7"}, {"ip_addresses": "2401", "label": "Enterprise of Telecommunications Lao", "test_count": "418", "aggregate_date": "2015-05-01", "id": "3455", "index_value": "1813", "rating": "3.2"}], "bandwidth": "4600"}, {"countryCode": "TJ", "test_count": "800", "ip_addresses": "9058", "country": "Tajikistan", "isps": [{"ip_addresses": "333", "label": "Cjsc Indigo Tajikistan", "test_count": "156", "aggregate_date": "2015-05-01", "id": "95287", "index_value": "10444", "rating": "3.9"}], "bandwidth": "14703"}, {"countryCode": "BN", "test_count": "15929", "ip_addresses": "30986", "country": "Brunei Darussalam", "isps": [{"ip_addresses": "683", "label": "SIMPUR ISP", "test_count": "1506", "aggregate_date": "2015-05-01", "id": "12983", "index_value": "14375", "rating": "2.6"}, {"ip_addresses": "7581", "label": "BruNet, Jabatan Telekom Brunei Darussalam", "test_count": "2464", "aggregate_date": "2015-05-01", "id": "3622", "index_value": "8462", "rating": "1.8"}, {"ip_addresses": "2639", "label": "Jabatan Telekom Brunei", "test_count": "8950", "aggregate_date": "2015-05-01", "id": "7080", "index_value": "6372", "rating": "1.8"}, {"ip_addresses": "7254", "label": "BruNet, Telekom Brunei Berhad (Telbru)", "test_count": "2831", "aggregate_date": "2015-05-01", "id": "27624", "index_value": "6244", "rating": "1.8"}], "bandwidth": "7522"}, {"countryCode": "CU", "test_count": "1130", "ip_addresses": "2179", "country": "Cuba", "isps": [{"ip_addresses": "2039", "label": "Empresa de Telecomunicaciones de Cuba, S.A.", "test_count": "961", "aggregate_date": "2015-05-01", "id": "3815", "index_value": "1653", "rating": "2.1"}], "bandwidth": "1665"}, {"countryCode": "BM", "test_count": "3590", "ip_addresses": "25099", "country": "Bermuda", "isps": [{"ip_addresses": "5549", "label": "TransACT", "test_count": "658", "aggregate_date": "2015-05-01", "id": "26708", "index_value": "11588", "rating": "2.6"}, {"ip_addresses": "2689", "label": "Internet Bermuda Limited", "test_count": "309", "aggregate_date": "2015-05-01", "id": "7273", "index_value": "9124", "rating": "2.6"}, {"ip_addresses": "1028", "label": "Bermuda Cablevision", "test_count": "954", "aggregate_date": "2015-05-01", "id": "101240", "index_value": "8223", "rating": "2.4"}, {"ip_addresses": "2461", "label": "Telebermuda International Limited", "test_count": "664", "aggregate_date": "2015-05-01", "id": "7748", "index_value": "7566", "rating": "3.1"}, {"ip_addresses": "9758", "label": "North Rock Communications", "test_count": "810", "aggregate_date": "2015-05-01", "id": "6475", "index_value": "5900", "rating": "2.5"}], "bandwidth": "8583"}, {"countryCode": "MG", "test_count": "11565", "ip_addresses": "26016", "country": "Madagascar", "isps": [{"ip_addresses": "4713", "label": "TELECOM MALAGASY", "test_count": "3146", "aggregate_date": "2015-05-01", "id": "21367", "index_value": "25910", "rating": "2.2"}, {"ip_addresses": "2965", "label": "TGN", "test_count": "1437", "aggregate_date": "2015-05-01", "id": "97538", "index_value": "20869", "rating": "2.5"}, {"ip_addresses": "11877", "label": "Data Telecom Service", "test_count": "793", "aggregate_date": "2015-05-01", "id": "20971", "index_value": "9090", "rating": "2.1"}, {"ip_addresses": "2871", "label": "Orange Madagascar", "test_count": "163", "aggregate_date": "2015-05-01", "id": "37615", "index_value": "1785", "rating": "2.6"}], "bandwidth": "18225"}, {"countryCode": "Unknown | Macau", "test_count": "29382", "ip_addresses": "155549", "country": "Macau", "isps": [{"ip_addresses": "29900", "label": "CTM", "test_count": "8726", "aggregate_date": "2015-05-01", "id": "63039", "index_value": "52163", "rating": "1.8"}, {"ip_addresses": "72104", "label": "CTM Internet Services", "test_count": "17368", "aggregate_date": "2015-05-01", "id": "4014", "index_value": "51794", "rating": "2.0"}], "bandwidth": "51486"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "KY", "test_count": "4517", "ip_addresses": "24146", "country": "Cayman Islands", "isps": [{"ip_addresses": "12485", "label": "Cable & Wireless (Cayman Islands)", "test_count": "3102", "aggregate_date": "2015-05-01", "id": "32418", "index_value": "13655", "rating": "2.1"}, {"ip_addresses": "3008", "label": "Cable & Wireless Americas Operations", "test_count": "686", "aggregate_date": "2015-05-01", "id": "1109", "index_value": "9832", "rating": "2.1"}, {"ip_addresses": "5295", "label": "WestTel", "test_count": "380", "aggregate_date": "2015-05-01", "id": "9384", "index_value": "9708", "rating": "1.9"}, {"ip_addresses": "985", "label": "WestStar TV", "test_count": "154", "aggregate_date": "2015-05-01", "id": "96705", "index_value": "8855", "rating": "1.9"}], "bandwidth": "12441"}, {"countryCode": "Unknown | Reunion", "test_count": "7961", "ip_addresses": "99114", "country": "Reunion", "isps": [{"ip_addresses": "4172", "label": "Reunicable SAS", "test_count": "805", "aggregate_date": "2015-05-01", "id": "73540", "index_value": "33560", "rating": "3.3"}, {"ip_addresses": "32441", "label": "Orange", "test_count": "3670", "aggregate_date": "2015-05-01", "id": "48496", "index_value": "10574", "rating": "2.7"}, {"ip_addresses": "6382", "label": "Mediaserv", "test_count": "569", "aggregate_date": "2015-05-01", "id": "16860", "index_value": "5383", "rating": "2.1"}, {"ip_addresses": "3820", "label": "SFR", "test_count": "282", "aggregate_date": "2015-05-01", "id": "17894", "index_value": "4639", "rating": "2.5"}, {"ip_addresses": "8971", "label": "Societe Reunionnaise Du Radiotelephone Scs", "test_count": "726", "aggregate_date": "2015-05-01", "id": "94184", "index_value": "4064", "rating": "2.8"}, {"ip_addresses": "9323", "label": "Outremer Telecom", "test_count": "151", "aggregate_date": "2015-05-01", "id": "4099", "index_value": "3575", "rating": "2.0"}], "bandwidth": "11156"}, {"countryCode": "LY", "test_count": "14289", "ip_addresses": "4318", "country": "Libya", "isps": [{"ip_addresses": "1134", "label": "Aljeel Aljadeed for Technology", "test_count": "9374", "aggregate_date": "2015-05-01", "id": "47414", "index_value": "4482", "rating": "3.5"}], "bandwidth": "5134"}, {"countryCode": "CV", "test_count": "1095", "ip_addresses": "9566", "country": "Cape Verde", "isps": [], "bandwidth": "7679"}, {"countryCode": "LI", "test_count": "1135", "ip_addresses": "10550", "country": "Liechtenstein", "isps": [{"ip_addresses": "6975", "label": "Telecom Liechtenstein AG", "test_count": "461", "aggregate_date": "2015-05-01", "id": "32375", "index_value": "20792", "rating": "3.0"}], "bandwidth": "22233"}, {"countryCode": "SN", "test_count": "3355", "ip_addresses": "38697", "country": "Senegal", "isps": [{"ip_addresses": "34902", "label": "SONATEL", "test_count": "2065", "aggregate_date": "2015-05-01", "id": "31002", "index_value": "3102", "rating": "2.3"}, {"ip_addresses": "2859", "label": "SONATEL Societe Nationale Des Telecommunications D", "test_count": "146", "aggregate_date": "2015-05-01", "id": "69169", "index_value": "3012", "rating": "1.9"}], "bandwidth": "3165"}, {"countryCode": "GD", "test_count": "1897", "ip_addresses": "11832", "country": "Grenada", "isps": [{"ip_addresses": "4550", "label": "Columbus Communications Grenada", "test_count": "1289", "aggregate_date": "2015-05-01", "id": "35207", "index_value": "13754", "rating": "3.2"}], "bandwidth": "10626"}, {"countryCode": "BZ", "test_count": "10764", "ip_addresses": "28294", "country": "Belize", "isps": [{"ip_addresses": "21806", "label": "Belize Telemedia Limited", "test_count": "7141", "aggregate_date": "2015-05-01", "id": "75183", "index_value": "3236", "rating": "2.0"}, {"ip_addresses": "1252", "label": "Southern Cable Network", "test_count": "1153", "aggregate_date": "2015-05-01", "id": "85014", "index_value": "2442", "rating": "3.2"}], "bandwidth": "3143"}, {"countryCode": "NC", "test_count": "1435", "ip_addresses": "29243", "country": "New Caledonia", "isps": [{"ip_addresses": "4673", "label": "OFFRATEL", "test_count": "247", "aggregate_date": "2015-05-01", "id": "43042", "index_value": "7601", "rating": "2.4"}, {"ip_addresses": "5680", "label": "Micro Logic Systems", "test_count": "628", "aggregate_date": "2015-05-01", "id": "20671", "index_value": "7503", "rating": "3.4"}, {"ip_addresses": "5844", "label": "CANL", "test_count": "253", "aggregate_date": "2015-05-01", "id": "33048", "index_value": "7368", "rating": "2.1"}], "bandwidth": "7391"}, {"countryCode": "GY", "test_count": "8244", "ip_addresses": "19753", "country": "Guyana", "isps": [{"ip_addresses": "3466", "label": "E-Networks Inc.", "test_count": "696", "aggregate_date": "2015-05-01", "id": "47266", "index_value": "4874", "rating": "2.2"}, {"ip_addresses": "15894", "label": "Guyana Telephone & Telegraph Co.", "test_count": "7275", "aggregate_date": "2015-05-01", "id": "15749", "index_value": "3312", "rating": "2.2"}], "bandwidth": "3495"}, {"countryCode": "FO", "test_count": "1993", "ip_addresses": "10258", "country": "Faroe Islands", "isps": [{"ip_addresses": "2165", "label": "P/F Telefonverkid", "test_count": "1342", "aggregate_date": "2015-05-01", "id": "101248", "index_value": "12183", "rating": "2.7"}, {"ip_addresses": "2564", "label": "P/F Kall", "test_count": "532", "aggregate_date": "2015-05-01", "id": "11214", "index_value": "11288", "rating": "2.7"}], "bandwidth": "11680"}, {"countryCode": "VG", "test_count": "900", "ip_addresses": "4944", "country": "Virgin Islands, British", "isps": [{"ip_addresses": "2903", "label": "Cable & Wireless Antigua and Barbuda", "test_count": "388", "aggregate_date": "2015-05-01", "id": "69238", "index_value": "6201", "rating": "2.0"}], "bandwidth": "5494"}, {"countryCode": "MZ", "test_count": "6650", "ip_addresses": "30472", "country": "Mozambique", "isps": [{"ip_addresses": "1910", "label": "TVCabo Maputo", "test_count": "1684", "aggregate_date": "2015-05-01", "id": "100705", "index_value": "4259", "rating": "2.3"}], "bandwidth": "3168"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "MP", "test_count": "710", "ip_addresses": "4463", "country": "Northern Mariana Islands", "isps": [{"ip_addresses": "848", "label": "Micronesian", "test_count": "581", "aggregate_date": "2015-05-01", "id": "99726", "index_value": "3493", "rating": "2.2"}], "bandwidth": "3757"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "LC", "test_count": "1902", "ip_addresses": "16571", "country": "Saint Lucia", "isps": [{"ip_addresses": "6513", "label": "Karib Cable", "test_count": "711", "aggregate_date": "2015-05-01", "id": "26311", "index_value": "14399", "rating": "2.7"}, {"ip_addresses": "2781", "label": "KARIB-CABLE-KELCOM-INTERNATIONAL-SLU", "test_count": "419", "aggregate_date": "2015-05-01", "id": "67642", "index_value": "10667", "rating": "2.7"}, {"ip_addresses": "5374", "label": "Lime Cable & Wireless Barbados", "test_count": "491", "aggregate_date": "2015-05-01", "id": "528", "index_value": "1674", "rating": "1.7"}], "bandwidth": "8798"}, {"countryCode": "MV", "test_count": "5730", "ip_addresses": "28132", "country": "Maldives", "isps": [{"ip_addresses": "9088", "label": "Dhiraagu Internet Services", "test_count": "4165", "aggregate_date": "2015-05-01", "id": "102252", "index_value": "7275", "rating": "3.4"}, {"ip_addresses": "6707", "label": "Focus Infocom Private Limited", "test_count": "698", "aggregate_date": "2015-05-01", "id": "8078", "index_value": "6076", "rating": "2.2"}], "bandwidth": "7091"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "MM", "test_count": "19601", "ip_addresses": "5887", "country": "Myanmar", "isps": [{"ip_addresses": "1038", "label": "Myanma Post and Telecommunication", "test_count": "8423", "aggregate_date": "2015-05-01", "id": "7658", "index_value": "7009", "rating": "2.6"}, {"ip_addresses": "844", "label": "Telenor Myanmar Ltd.", "test_count": "749", "aggregate_date": "2015-05-01", "id": "103370", "index_value": "3218", "rating": "3.4"}, {"ip_addresses": "2406", "label": "Yatanarpon Teleport, Internet Service Provider", "test_count": "6145", "aggregate_date": "2015-05-01", "id": "71797", "index_value": "2974", "rating": "2.0"}], "bandwidth": "6438"}, {"countryCode": "PG", "test_count": "2140", "ip_addresses": "4175", "country": "Papua New Guinea", "isps": [], "bandwidth": "2929"}, {"countryCode": "GA", "test_count": "1312", "ip_addresses": "11699", "country": "Gabon", "isps": [{"ip_addresses": "1993", "label": "Gabon-Telecom", "test_count": "176", "aggregate_date": "2015-05-01", "id": "86466", "index_value": "2627", "rating": "2.1"}, {"ip_addresses": "4984", "label": "Gabon Telecom", "test_count": "105", "aggregate_date": "2015-05-01", "id": "21958", "index_value": "2142", "rating": "2.3"}], "bandwidth": "4164"}, {"countryCode": "UG", "test_count": "7118", "ip_addresses": "12873", "country": "Uganda", "isps": [{"ip_addresses": "195", "label": "Smile Communications Ltd", "test_count": "657", "aggregate_date": "2015-05-01", "id": "81088", "index_value": "7722", "rating": "4.1"}, {"ip_addresses": "2504", "label": "MTN Uganda", "test_count": "468", "aggregate_date": "2015-05-01", "id": "18481", "index_value": "5597", "rating": "2.7"}, {"ip_addresses": "1862", "label": "Uganda Telecom", "test_count": "394", "aggregate_date": "2015-05-01", "id": "16643", "index_value": "4395", "rating": "2.4"}, {"ip_addresses": "915", "label": "Orange Uganda Ltd", "test_count": "664", "aggregate_date": "2015-05-01", "id": "49650", "index_value": "3605", "rating": "2.3"}], "bandwidth": "6480"}, {"countryCode": "RW", "test_count": "6606", "ip_addresses": "6340", "country": "Rwanda", "isps": [{"ip_addresses": "1863", "label": "MTN RwandaCell", "test_count": "823", "aggregate_date": "2015-05-01", "id": "13988", "index_value": "2982", "rating": "2.6"}], "bandwidth": "8798"}, {"countryCode": "CG", "test_count": "433", "ip_addresses": "1223", "country": "Congo", "isps": [], "bandwidth": "2085"}, {"countryCode": "SR", "test_count": "3728", "ip_addresses": "33109", "country": "Suriname", "isps": [{"ip_addresses": "31923", "label": "Telecommunicationcompany Suriname - TeleSur", "test_count": "3157", "aggregate_date": "2015-05-01", "id": "12936", "index_value": "5142", "rating": "2.0"}], "bandwidth": "4957"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "AI", "test_count": "359", "ip_addresses": "3013", "country": "Anguilla", "isps": [{"ip_addresses": "1573", "label": "Cable & Wireless Antigua and Barbuda", "test_count": "204", "aggregate_date": "2015-05-01", "id": "69238", "index_value": "22438", "rating": "2.0"}], "bandwidth": "16327"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "VU", "test_count": "139", "ip_addresses": "4158", "country": "Vanuatu", "isps": [], "bandwidth": "10520"}, {"countryCode": "TG", "test_count": "991", "ip_addresses": "1277", "country": "Togo", "isps": [], "bandwidth": "4293"}, {"countryCode": "PM", "test_count": "524", "ip_addresses": "2330", "country": "Saint Pierre and Miquelon", "isps": [], "bandwidth": "114524"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "BW", "test_count": "3598", "ip_addresses": "6889", "country": "Botswana", "isps": [], "bandwidth": "5119"}, {"countryCode": "GM", "test_count": "834", "ip_addresses": "2797", "country": "Gambia", "isps": [], "bandwidth": "1288"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "BF", "test_count": "829", "ip_addresses": "5939", "country": "Burkina Faso", "isps": [{"ip_addresses": "2606", "label": "ONATEL/FasoNet's", "test_count": "331", "aggregate_date": "2015-05-01", "id": "86454", "index_value": "2488", "rating": "2.1"}], "bandwidth": "2019"}, {"countryCode": "NA", "test_count": "6917", "ip_addresses": "68873", "country": "Namibia", "isps": [{"ip_addresses": "6803", "label": "Powercom Pty Ltd", "test_count": "767", "aggregate_date": "2015-05-01", "id": "32294", "index_value": "14364", "rating": "4.9"}, {"ip_addresses": "7337", "label": "Mobile Telecommunications, LTD", "test_count": "604", "aggregate_date": "2015-05-01", "id": "86233", "index_value": "8425", "rating": "3.5"}, {"ip_addresses": "27938", "label": "MTC - Mobile Telecommunications, Ltd.", "test_count": "1446", "aggregate_date": "2015-05-01", "id": "32399", "index_value": "8068", "rating": "2.2"}, {"ip_addresses": "42193", "label": "Telecom Namibia", "test_count": "2967", "aggregate_date": "2015-05-01", "id": "11106", "index_value": "4360", "rating": "2.0"}], "bandwidth": "7743"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "SM", "test_count": "964", "ip_addresses": "7151", "country": "San Marino", "isps": [{"ip_addresses": "6867", "label": "Telecom Italia San Marino S.p.A", "test_count": "673", "aggregate_date": "2015-05-01", "id": "19816", "index_value": "11059", "rating": "2.4"}], "bandwidth": "10685"}, {"countryCode": "GP", "test_count": "5972", "ip_addresses": "64984", "country": "Guadeloupe", "isps": [{"ip_addresses": "7768", "label": "Outremer Telecom", "test_count": "885", "aggregate_date": "2015-05-01", "id": "4099", "index_value": "13007", "rating": "2.0"}, {"ip_addresses": "21775", "label": "Orange", "test_count": "2573", "aggregate_date": "2015-05-01", "id": "48496", "index_value": "7348", "rating": "2.7"}, {"ip_addresses": "5771", "label": "Mediaserv", "test_count": "944", "aggregate_date": "2015-05-01", "id": "16860", "index_value": "4270", "rating": "2.1"}], "bandwidth": "10102"}, {"countryCode": "BI", "test_count": "1502", "ip_addresses": "1070", "country": "Burundi", "isps": [], "bandwidth": "4299"}, {"countryCode": "Unknown | DR Congo", "test_count": "1694", "ip_addresses": "2327", "country": "DR Congo", "isps": [], "bandwidth": "1676"}, {"countryCode": "MQ", "test_count": "4650", "ip_addresses": "59548", "country": "Martinique", "isps": [{"ip_addresses": "10796", "label": "Outremer Telecom", "test_count": "974", "aggregate_date": "2015-05-01", "id": "4099", "index_value": "16538", "rating": "2.0"}, {"ip_addresses": "16491", "label": "Orange", "test_count": "1969", "aggregate_date": "2015-05-01", "id": "48496", "index_value": "11168", "rating": "2.7"}, {"ip_addresses": "5970", "label": "Mediaserv", "test_count": "531", "aggregate_date": "2015-05-01", "id": "16860", "index_value": "5134", "rating": "2.1"}], "bandwidth": "13682"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "GF", "test_count": "1925", "ip_addresses": "25755", "country": "French Guiana", "isps": [{"ip_addresses": "2168", "label": "Outremer Telecom", "test_count": "105", "aggregate_date": "2015-05-01", "id": "4099", "index_value": "7808", "rating": "2.0"}, {"ip_addresses": "9526", "label": "Orange", "test_count": "948", "aggregate_date": "2015-05-01", "id": "48496", "index_value": "3448", "rating": "2.7"}], "bandwidth": "3823"}, {"countryCode": "ZW", "test_count": "8608", "ip_addresses": "15617", "country": "Zimbabwe", "isps": [{"ip_addresses": "1553", "label": "Yo! Africa", "test_count": "598", "aggregate_date": "2015-05-01", "id": "78656", "index_value": "13382", "rating": "2.7"}, {"ip_addresses": "1288", "label": "Liquid Telecommunications Ltd", "test_count": "82", "aggregate_date": "2015-05-01", "id": "23637", "index_value": "12139", "rating": "2.7"}, {"ip_addresses": "1944", "label": "Telone PVT Ltd", "test_count": "367", "aggregate_date": "2015-05-01", "id": "66394", "index_value": "1464", "rating": "2.3"}], "bandwidth": "8718"}, {"countryCode": "ZM", "test_count": "5159", "ip_addresses": "12349", "country": "Zambia", "isps": [{"ip_addresses": "3072", "label": "ZAMTEL", "test_count": "902", "aggregate_date": "2015-05-01", "id": "44214", "index_value": "10378", "rating": "2.2"}, {"ip_addresses": "4259", "label": "AfriConnect Zambia Ltd", "test_count": "739", "aggregate_date": "2015-05-01", "id": "14910", "index_value": "7821", "rating": "2.4"}], "bandwidth": "5415"}, {"countryCode": "LS", "test_count": "794", "ip_addresses": "2257", "country": "Lesotho", "isps": [{"ip_addresses": "1662", "label": "Telecom Lesotho", "test_count": "275", "aggregate_date": "2015-05-01", "id": "30709", "index_value": "5682", "rating": "1.6"}], "bandwidth": "7845"}, {"countryCode": "MW", "test_count": "1918", "ip_addresses": "6263", "country": "Malawi", "isps": [], "bandwidth": "1844"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "ET", "test_count": "6977", "ip_addresses": "5331", "country": "Ethiopia", "isps": [{"ip_addresses": "1365", "label": "Ethio Telecom", "test_count": "463", "aggregate_date": "2015-05-01", "id": "73572", "index_value": "12624", "rating": "2.0"}, {"ip_addresses": "3515", "label": "Ethiopian Telecommuncation Corporation", "test_count": "3612", "aggregate_date": "2015-05-01", "id": "14613", "index_value": "7267", "rating": "1.9"}], "bandwidth": "9854"}, {"countryCode": "GN", "test_count": "1183", "ip_addresses": "1391", "country": "Guinea", "isps": [], "bandwidth": "1626"}, {"countryCode": "BT", "test_count": "1572", "ip_addresses": "3511", "country": "Bhutan", "isps": [{"ip_addresses": "3151", "label": "DrukNet, Bhutan Telecom", "test_count": "623", "aggregate_date": "2015-05-01", "id": "19796", "index_value": "9793", "rating": "2.3"}], "bandwidth": "6477"}, {"countryCode": "GQ", "test_count": "455", "ip_addresses": "1514", "country": "Equatorial Guinea", "isps": [], "bandwidth": "978"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "SZ", "test_count": "1079", "ip_addresses": "3740", "country": "Swaziland", "isps": [{"ip_addresses": "2940", "label": "SWAZILAND PTC", "test_count": "883", "aggregate_date": "2015-05-01", "id": "19317", "index_value": "2306", "rating": "1.9"}], "bandwidth": "2347"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "DJ", "test_count": "1796", "ip_addresses": "4502", "country": "Djibouti", "isps": [], "bandwidth": "4631"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "MR", "test_count": "1075", "ip_addresses": "6618", "country": "Mauritania", "isps": [{"ip_addresses": "5288", "label": "Mauritanian Telecommunication Company", "test_count": "675", "aggregate_date": "2015-05-01", "id": "16185", "index_value": "3201", "rating": "2.4"}], "bandwidth": "4916"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "TC", "test_count": "1446", "ip_addresses": "7119", "country": "Turks and Caicos Islands", "isps": [{"ip_addresses": "3375", "label": "Cable and Wireless, Turks and Caicos", "test_count": "804", "aggregate_date": "2015-05-01", "id": "25475", "index_value": "14916", "rating": "2.1"}], "bandwidth": "10907"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "NE", "test_count": "12", "ip_addresses": "2355", "country": "Niger", "isps": [], "bandwidth": "788"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "ME", "test_count": "16956", "ip_addresses": "93765", "country": "Montenegro", "isps": [{"ip_addresses": "695", "label": "Promonte GSM", "test_count": "667", "aggregate_date": "2015-05-01", "id": "26900", "index_value": "10092", "rating": "2.7"}, {"ip_addresses": "3943", "label": "M-Kabl doo", "test_count": "2018", "aggregate_date": "2015-05-01", "id": "42566", "index_value": "8995", "rating": "3.1"}, {"ip_addresses": "78794", "label": "Crnogorski Telekom", "test_count": "10345", "aggregate_date": "2015-05-01", "id": "42861", "index_value": "7587", "rating": "2.6"}, {"ip_addresses": "8628", "label": "Drustvo za telekomunikacije MTEL DOO", "test_count": "321", "aggregate_date": "2015-05-01", "id": "25074", "index_value": "3573", "rating": "3.5"}], "bandwidth": "7548"}, {"countryCode": "RS", "test_count": "413164", "ip_addresses": "1346042", "country": "Serbia", "isps": [{"ip_addresses": "3674", "label": "TARGO TELEKOM DOO Belgrade", "test_count": "1709", "aggregate_date": "2015-05-01", "id": "77397", "index_value": "25054", "rating": "4.5"}, {"ip_addresses": "307288", "label": "Serbia BroadBand", "test_count": "95761", "aggregate_date": "2015-05-01", "id": "44506", "index_value": "23137", "rating": "3.5"}, {"ip_addresses": "31479", "label": "Interaktivne Kablovske Objedinjene Mreze - I.KOM D", "test_count": "6789", "aggregate_date": "2015-05-01", "id": "34929", "index_value": "17093", "rating": "3.5"}, {"ip_addresses": "29158", "label": "RADIJUS VEKTOR DOO", "test_count": "11399", "aggregate_date": "2015-05-01", "id": "17192", "index_value": "13840", "rating": "3.1"}, {"ip_addresses": "15222", "label": "Preduzece za proizvodnju, promet i inzenjering Kop", "test_count": "9042", "aggregate_date": "2015-05-01", "id": "67634", "index_value": "11569", "rating": "3.9"}, {"ip_addresses": "24670", "label": "Drustvo za telekomunikacije Orion telekom doo Beog", "test_count": "4794", "aggregate_date": "2015-05-01", "id": "86254", "index_value": "11281", "rating": "2.8"}, {"ip_addresses": "9226", "label": "AVCOM d.o.o.", "test_count": "4399", "aggregate_date": "2015-05-01", "id": "11852", "index_value": "10048", "rating": "3.0"}, {"ip_addresses": "3962", "label": "Ipko Telecommunications", "test_count": "331", "aggregate_date": "2015-05-01", "id": "47200", "index_value": "9849", "rating": "3.6"}, {"ip_addresses": "45053", "label": "Telenor d.o.o. Beograd", "test_count": "10283", "aggregate_date": "2015-05-01", "id": "26719", "index_value": "8434", "rating": "3.3"}, {"ip_addresses": "7523", "label": "Exe Net d.o.o. Nis", "test_count": "7250", "aggregate_date": "2015-05-01", "id": "103380", "index_value": "8254", "rating": "4.0"}, {"ip_addresses": "523553", "label": "Telekom Srbija", "test_count": "120665", "aggregate_date": "2015-05-01", "id": "47194", "index_value": "7974", "rating": "2.9"}, {"ip_addresses": "5491", "label": "TippNet Ltd.", "test_count": "665", "aggregate_date": "2015-05-01", "id": "30113", "index_value": "7774", "rating": "3.9"}, {"ip_addresses": "3744", "label": "Krajnalic Komunikacije d.o.o.", "test_count": "3741", "aggregate_date": "2015-05-01", "id": "76427", "index_value": "7360", "rating": "4.4"}, {"ip_addresses": "10196", "label": "TRUF d.o.o.", "test_count": "5354", "aggregate_date": "2015-05-01", "id": "45282", "index_value": "7082", "rating": "3.4"}, {"ip_addresses": "51556", "label": "Orion Telekom Tim d.o.o.Beograd", "test_count": "30811", "aggregate_date": "2015-05-01", "id": "100347", "index_value": "6723", "rating": "3.0"}, {"ip_addresses": "5769", "label": "BEOTELNET d.o.o. ZRENJANIN", "test_count": "804", "aggregate_date": "2015-05-01", "id": "42943", "index_value": "6544", "rating": "3.3"}, {"ip_addresses": "37135", "label": "BeotelNet-ISP d.o.o", "test_count": "3726", "aggregate_date": "2015-05-01", "id": "35063", "index_value": "6521", "rating": "3.0"}, {"ip_addresses": "40063", "label": "YUnet International d.o.o.", "test_count": "3088", "aggregate_date": "2015-05-01", "id": "39639", "index_value": "6493", "rating": "3.0"}, {"ip_addresses": "10736", "label": "Jotel d.o.o.", "test_count": "1693", "aggregate_date": "2015-05-01", "id": "35089", "index_value": "6426", "rating": "3.0"}, {"ip_addresses": "18596", "label": "JP Posta Srbije Beograd", "test_count": "5503", "aggregate_date": "2015-05-01", "id": "96332", "index_value": "6224", "rating": "2.8"}], "bandwidth": "12451"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "ST", "test_count": "406", "ip_addresses": "1463", "country": "Sao Tome and Principe", "isps": [], "bandwidth": "16279"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | Aland Islands", "test_count": "728", "ip_addresses": "8777", "country": "Aland Islands", "isps": [{"ip_addresses": "5828", "label": "Alands Datakommunikation Ab", "test_count": "500", "aggregate_date": "2015-05-01", "id": "3066", "index_value": "29126", "rating": "2.7"}], "bandwidth": "42987"}, {"countryCode": "IM", "test_count": "4331", "ip_addresses": "29386", "country": "Isle of Man", "isps": [{"ip_addresses": "475", "label": "BlueWave Communications Limited", "test_count": "279", "aggregate_date": "2015-05-01", "id": "99004", "index_value": "46887", "rating": "4.5"}, {"ip_addresses": "2281", "label": "Wi-Manx Limited", "test_count": "1321", "aggregate_date": "2015-05-01", "id": "4263", "index_value": "22404", "rating": "3.7"}, {"ip_addresses": "22177", "label": "Manx Telecom", "test_count": "1830", "aggregate_date": "2015-05-01", "id": "25033", "index_value": "22222", "rating": "2.3"}, {"ip_addresses": "3052", "label": "Cable and Wireless Isle of Man Limited", "test_count": "690", "aggregate_date": "2015-05-01", "id": "49315", "index_value": "20978", "rating": "3.0"}], "bandwidth": "23190"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "JE", "test_count": "5734", "ip_addresses": "38549", "country": "Jersey", "isps": [{"ip_addresses": "22168", "label": "JTGlobal", "test_count": "2501", "aggregate_date": "2015-05-01", "id": "79837", "index_value": "47448", "rating": "2.1"}, {"ip_addresses": "5402", "label": "Sure Jersey Limited", "test_count": "1823", "aggregate_date": "2015-05-01", "id": "93130", "index_value": "26156", "rating": "3.0"}, {"ip_addresses": "7366", "label": "Newtel Limited", "test_count": "1197", "aggregate_date": "2015-05-01", "id": "1246", "index_value": "8572", "rating": "2.3"}], "bandwidth": "32717"}, {"countryCode": "GG", "test_count": "3612", "ip_addresses": "23494", "country": "Guernsey", "isps": [{"ip_addresses": "2961", "label": "JT (Guernsey) Limited", "test_count": "823", "aggregate_date": "2015-05-01", "id": "98037", "index_value": "15099", "rating": "2.5"}, {"ip_addresses": "13363", "label": "Sure Guernsey Limited", "test_count": "2786", "aggregate_date": "2015-05-01", "id": "93048", "index_value": "14288", "rating": "2.1"}], "bandwidth": "14394"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | Curacao", "test_count": "5948", "ip_addresses": "46059", "country": "Curacao", "isps": [{"ip_addresses": "6986", "label": "Columbus Communications Curacao NV", "test_count": "2108", "aggregate_date": "2015-05-01", "id": "58674", "index_value": "20396", "rating": "3.8"}, {"ip_addresses": "30722", "label": "United Telecommunication Services (UTS)", "test_count": "2626", "aggregate_date": "2015-05-01", "id": "42302", "index_value": "18494", "rating": "2.3"}, {"ip_addresses": "3426", "label": "Scarlet B.V.", "test_count": "529", "aggregate_date": "2015-05-01", "id": "32417", "index_value": "3097", "rating": "2.4"}], "bandwidth": "17817"}, {"countryCode": "Unknown | Saint Martin", "test_count": "982", "ip_addresses": "1969", "country": "Saint Martin", "isps": [{"ip_addresses": "1891", "label": "Dauphin Telecom", "test_count": "957", "aggregate_date": "2015-05-01", "id": "20309", "index_value": "7585", "rating": "2.3"}], "bandwidth": "7785"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | Bonaire, Statia & Saba", "test_count": "257", "ip_addresses": "3441", "country": "Bonaire, Statia & Saba", "isps": [], "bandwidth": "10332"}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}, {"countryCode": "Unknown | None", "test_count": null, "ip_addresses": null, "country": null, "isps": [], "bandwidth": null}] | 350,531 | 350,531 | 0.623146 |
1ddc0d0da443fee6b7d4987a8c52f0d1e2d9a26f | 21,443 | py | Python | crabageprediction/venv/Lib/site-packages/sklearn/cross_decomposition/tests/test_pls.py | 13rianlucero/CrabAgePrediction | 92bc7fbe1040f49e820473e33cc3902a5a7177c7 | [
"MIT"
] | 5 | 2022-01-05T00:41:46.000Z | 2022-03-21T07:22:58.000Z | crabageprediction/venv/Lib/site-packages/sklearn/cross_decomposition/tests/test_pls.py | 13rianlucero/CrabAgePrediction | 92bc7fbe1040f49e820473e33cc3902a5a7177c7 | [
"MIT"
] | 3 | 2022-03-18T06:16:45.000Z | 2022-03-23T14:26:58.000Z | crabageprediction/venv/Lib/site-packages/sklearn/cross_decomposition/tests/test_pls.py | 13rianlucero/CrabAgePrediction | 92bc7fbe1040f49e820473e33cc3902a5a7177c7 | [
"MIT"
] | 3 | 2020-08-04T02:48:32.000Z | 2020-08-17T01:20:09.000Z | import pytest
import numpy as np
from numpy.testing import assert_array_almost_equal, assert_array_equal, assert_allclose
from sklearn.datasets import load_linnerud
from sklearn.cross_decomposition._pls import (
_center_scale_xy,
_get_first_singular_vectors_power_method,
_get_first_singular_vectors_svd,
_svd_flip_1d,
)
from sklearn.cross_decomposition import CCA
from sklearn.cross_decomposition import PLSSVD, PLSRegression, PLSCanonical
from sklearn.datasets import make_regression
from sklearn.utils import check_random_state
from sklearn.utils.extmath import svd_flip
from sklearn.exceptions import ConvergenceWarning
from sklearn.utils._testing import ignore_warnings
def assert_matrix_orthogonal(M):
K = np.dot(M.T, M)
assert_array_almost_equal(K, np.diag(np.diag(K)))
def test_pls_canonical_basics():
# Basic checks for PLSCanonical
d = load_linnerud()
X = d.data
Y = d.target
pls = PLSCanonical(n_components=X.shape[1])
pls.fit(X, Y)
assert_matrix_orthogonal(pls.x_weights_)
assert_matrix_orthogonal(pls.y_weights_)
assert_matrix_orthogonal(pls._x_scores)
assert_matrix_orthogonal(pls._y_scores)
# Check X = TP' and Y = UQ'
T = pls._x_scores
P = pls.x_loadings_
U = pls._y_scores
Q = pls.y_loadings_
# Need to scale first
Xc, Yc, x_mean, y_mean, x_std, y_std = _center_scale_xy(
X.copy(), Y.copy(), scale=True
)
assert_array_almost_equal(Xc, np.dot(T, P.T))
assert_array_almost_equal(Yc, np.dot(U, Q.T))
# Check that rotations on training data lead to scores
Xt = pls.transform(X)
assert_array_almost_equal(Xt, pls._x_scores)
Xt, Yt = pls.transform(X, Y)
assert_array_almost_equal(Xt, pls._x_scores)
assert_array_almost_equal(Yt, pls._y_scores)
# Check that inverse_transform works
X_back = pls.inverse_transform(Xt)
assert_array_almost_equal(X_back, X)
def test_sanity_check_pls_regression():
# Sanity check for PLSRegression
# The results were checked against the R-packages plspm, misOmics and pls
d = load_linnerud()
X = d.data
Y = d.target
pls = PLSRegression(n_components=X.shape[1])
pls.fit(X, Y)
expected_x_weights = np.array(
[
[-0.61330704, -0.00443647, 0.78983213],
[-0.74697144, -0.32172099, -0.58183269],
[-0.25668686, 0.94682413, -0.19399983],
]
)
expected_x_loadings = np.array(
[
[-0.61470416, -0.24574278, 0.78983213],
[-0.65625755, -0.14396183, -0.58183269],
[-0.51733059, 1.00609417, -0.19399983],
]
)
expected_y_weights = np.array(
[
[+0.32456184, 0.29892183, 0.20316322],
[+0.42439636, 0.61970543, 0.19320542],
[-0.13143144, -0.26348971, -0.17092916],
]
)
expected_y_loadings = np.array(
[
[+0.32456184, 0.29892183, 0.20316322],
[+0.42439636, 0.61970543, 0.19320542],
[-0.13143144, -0.26348971, -0.17092916],
]
)
assert_array_almost_equal(np.abs(pls.x_loadings_), np.abs(expected_x_loadings))
assert_array_almost_equal(np.abs(pls.x_weights_), np.abs(expected_x_weights))
assert_array_almost_equal(np.abs(pls.y_loadings_), np.abs(expected_y_loadings))
assert_array_almost_equal(np.abs(pls.y_weights_), np.abs(expected_y_weights))
# The R / Python difference in the signs should be consistent across
# loadings, weights, etc.
x_loadings_sign_flip = np.sign(pls.x_loadings_ / expected_x_loadings)
x_weights_sign_flip = np.sign(pls.x_weights_ / expected_x_weights)
y_weights_sign_flip = np.sign(pls.y_weights_ / expected_y_weights)
y_loadings_sign_flip = np.sign(pls.y_loadings_ / expected_y_loadings)
assert_array_almost_equal(x_loadings_sign_flip, x_weights_sign_flip)
assert_array_almost_equal(y_loadings_sign_flip, y_weights_sign_flip)
def test_sanity_check_pls_regression_constant_column_Y():
# Check behavior when the first column of Y is constant
# The results are checked against a modified version of plsreg2
# from the R-package plsdepot
d = load_linnerud()
X = d.data
Y = d.target
Y[:, 0] = 1
pls = PLSRegression(n_components=X.shape[1])
pls.fit(X, Y)
expected_x_weights = np.array(
[
[-0.6273573, 0.007081799, 0.7786994],
[-0.7493417, -0.277612681, -0.6011807],
[-0.2119194, 0.960666981, -0.1794690],
]
)
expected_x_loadings = np.array(
[
[-0.6273512, -0.22464538, 0.7786994],
[-0.6643156, -0.09871193, -0.6011807],
[-0.5125877, 1.01407380, -0.1794690],
]
)
expected_y_loadings = np.array(
[
[0.0000000, 0.0000000, 0.0000000],
[0.4357300, 0.5828479, 0.2174802],
[-0.1353739, -0.2486423, -0.1810386],
]
)
assert_array_almost_equal(np.abs(expected_x_weights), np.abs(pls.x_weights_))
assert_array_almost_equal(np.abs(expected_x_loadings), np.abs(pls.x_loadings_))
# For the PLSRegression with default parameters, y_loadings == y_weights
assert_array_almost_equal(np.abs(pls.y_loadings_), np.abs(expected_y_loadings))
assert_array_almost_equal(np.abs(pls.y_weights_), np.abs(expected_y_loadings))
x_loadings_sign_flip = np.sign(expected_x_loadings / pls.x_loadings_)
x_weights_sign_flip = np.sign(expected_x_weights / pls.x_weights_)
# we ignore the first full-zeros row for y
y_loadings_sign_flip = np.sign(expected_y_loadings[1:] / pls.y_loadings_[1:])
assert_array_equal(x_loadings_sign_flip, x_weights_sign_flip)
assert_array_equal(x_loadings_sign_flip[1:], y_loadings_sign_flip)
def test_sanity_check_pls_canonical():
# Sanity check for PLSCanonical
# The results were checked against the R-package plspm
d = load_linnerud()
X = d.data
Y = d.target
pls = PLSCanonical(n_components=X.shape[1])
pls.fit(X, Y)
expected_x_weights = np.array(
[
[-0.61330704, 0.25616119, -0.74715187],
[-0.74697144, 0.11930791, 0.65406368],
[-0.25668686, -0.95924297, -0.11817271],
]
)
expected_x_rotations = np.array(
[
[-0.61330704, 0.41591889, -0.62297525],
[-0.74697144, 0.31388326, 0.77368233],
[-0.25668686, -0.89237972, -0.24121788],
]
)
expected_y_weights = np.array(
[
[+0.58989127, 0.7890047, 0.1717553],
[+0.77134053, -0.61351791, 0.16920272],
[-0.23887670, -0.03267062, 0.97050016],
]
)
expected_y_rotations = np.array(
[
[+0.58989127, 0.7168115, 0.30665872],
[+0.77134053, -0.70791757, 0.19786539],
[-0.23887670, -0.00343595, 0.94162826],
]
)
assert_array_almost_equal(np.abs(pls.x_rotations_), np.abs(expected_x_rotations))
assert_array_almost_equal(np.abs(pls.x_weights_), np.abs(expected_x_weights))
assert_array_almost_equal(np.abs(pls.y_rotations_), np.abs(expected_y_rotations))
assert_array_almost_equal(np.abs(pls.y_weights_), np.abs(expected_y_weights))
x_rotations_sign_flip = np.sign(pls.x_rotations_ / expected_x_rotations)
x_weights_sign_flip = np.sign(pls.x_weights_ / expected_x_weights)
y_rotations_sign_flip = np.sign(pls.y_rotations_ / expected_y_rotations)
y_weights_sign_flip = np.sign(pls.y_weights_ / expected_y_weights)
assert_array_almost_equal(x_rotations_sign_flip, x_weights_sign_flip)
assert_array_almost_equal(y_rotations_sign_flip, y_weights_sign_flip)
assert_matrix_orthogonal(pls.x_weights_)
assert_matrix_orthogonal(pls.y_weights_)
assert_matrix_orthogonal(pls._x_scores)
assert_matrix_orthogonal(pls._y_scores)
def test_sanity_check_pls_canonical_random():
# Sanity check for PLSCanonical on random data
# The results were checked against the R-package plspm
n = 500
p_noise = 10
q_noise = 5
# 2 latents vars:
rng = check_random_state(11)
l1 = rng.normal(size=n)
l2 = rng.normal(size=n)
latents = np.array([l1, l1, l2, l2]).T
X = latents + rng.normal(size=4 * n).reshape((n, 4))
Y = latents + rng.normal(size=4 * n).reshape((n, 4))
X = np.concatenate((X, rng.normal(size=p_noise * n).reshape(n, p_noise)), axis=1)
Y = np.concatenate((Y, rng.normal(size=q_noise * n).reshape(n, q_noise)), axis=1)
pls = PLSCanonical(n_components=3)
pls.fit(X, Y)
expected_x_weights = np.array(
[
[0.65803719, 0.19197924, 0.21769083],
[0.7009113, 0.13303969, -0.15376699],
[0.13528197, -0.68636408, 0.13856546],
[0.16854574, -0.66788088, -0.12485304],
[-0.03232333, -0.04189855, 0.40690153],
[0.1148816, -0.09643158, 0.1613305],
[0.04792138, -0.02384992, 0.17175319],
[-0.06781, -0.01666137, -0.18556747],
[-0.00266945, -0.00160224, 0.11893098],
[-0.00849528, -0.07706095, 0.1570547],
[-0.00949471, -0.02964127, 0.34657036],
[-0.03572177, 0.0945091, 0.3414855],
[0.05584937, -0.02028961, -0.57682568],
[0.05744254, -0.01482333, -0.17431274],
]
)
expected_x_loadings = np.array(
[
[0.65649254, 0.1847647, 0.15270699],
[0.67554234, 0.15237508, -0.09182247],
[0.19219925, -0.67750975, 0.08673128],
[0.2133631, -0.67034809, -0.08835483],
[-0.03178912, -0.06668336, 0.43395268],
[0.15684588, -0.13350241, 0.20578984],
[0.03337736, -0.03807306, 0.09871553],
[-0.06199844, 0.01559854, -0.1881785],
[0.00406146, -0.00587025, 0.16413253],
[-0.00374239, -0.05848466, 0.19140336],
[0.00139214, -0.01033161, 0.32239136],
[-0.05292828, 0.0953533, 0.31916881],
[0.04031924, -0.01961045, -0.65174036],
[0.06172484, -0.06597366, -0.1244497],
]
)
expected_y_weights = np.array(
[
[0.66101097, 0.18672553, 0.22826092],
[0.69347861, 0.18463471, -0.23995597],
[0.14462724, -0.66504085, 0.17082434],
[0.22247955, -0.6932605, -0.09832993],
[0.07035859, 0.00714283, 0.67810124],
[0.07765351, -0.0105204, -0.44108074],
[-0.00917056, 0.04322147, 0.10062478],
[-0.01909512, 0.06182718, 0.28830475],
[0.01756709, 0.04797666, 0.32225745],
]
)
expected_y_loadings = np.array(
[
[0.68568625, 0.1674376, 0.0969508],
[0.68782064, 0.20375837, -0.1164448],
[0.11712173, -0.68046903, 0.12001505],
[0.17860457, -0.6798319, -0.05089681],
[0.06265739, -0.0277703, 0.74729584],
[0.0914178, 0.00403751, -0.5135078],
[-0.02196918, -0.01377169, 0.09564505],
[-0.03288952, 0.09039729, 0.31858973],
[0.04287624, 0.05254676, 0.27836841],
]
)
assert_array_almost_equal(np.abs(pls.x_loadings_), np.abs(expected_x_loadings))
assert_array_almost_equal(np.abs(pls.x_weights_), np.abs(expected_x_weights))
assert_array_almost_equal(np.abs(pls.y_loadings_), np.abs(expected_y_loadings))
assert_array_almost_equal(np.abs(pls.y_weights_), np.abs(expected_y_weights))
x_loadings_sign_flip = np.sign(pls.x_loadings_ / expected_x_loadings)
x_weights_sign_flip = np.sign(pls.x_weights_ / expected_x_weights)
y_weights_sign_flip = np.sign(pls.y_weights_ / expected_y_weights)
y_loadings_sign_flip = np.sign(pls.y_loadings_ / expected_y_loadings)
assert_array_almost_equal(x_loadings_sign_flip, x_weights_sign_flip)
assert_array_almost_equal(y_loadings_sign_flip, y_weights_sign_flip)
assert_matrix_orthogonal(pls.x_weights_)
assert_matrix_orthogonal(pls.y_weights_)
assert_matrix_orthogonal(pls._x_scores)
assert_matrix_orthogonal(pls._y_scores)
def test_convergence_fail():
# Make sure ConvergenceWarning is raised if max_iter is too small
d = load_linnerud()
X = d.data
Y = d.target
pls_nipals = PLSCanonical(n_components=X.shape[1], max_iter=2)
with pytest.warns(ConvergenceWarning):
pls_nipals.fit(X, Y)
@pytest.mark.filterwarnings("ignore:.*`scores_` was deprecated") # 1.1
@pytest.mark.parametrize("Est", (PLSSVD, PLSRegression, PLSCanonical))
def test_attibutes_shapes(Est):
# Make sure attributes are of the correct shape depending on n_components
d = load_linnerud()
X = d.data
Y = d.target
n_components = 2
pls = Est(n_components=n_components)
pls.fit(X, Y)
assert all(
attr.shape[1] == n_components for attr in (pls.x_weights_, pls.y_weights_)
)
# TODO: remove in 1.1
with ignore_warnings(category=FutureWarning):
assert all(
attr.shape[1] == n_components for attr in (pls.x_scores_, pls.y_scores_)
)
@pytest.mark.parametrize("Est", (PLSRegression, PLSCanonical, CCA))
def test_univariate_equivalence(Est):
# Ensure 2D Y with 1 column is equivalent to 1D Y
d = load_linnerud()
X = d.data
Y = d.target
est = Est(n_components=1)
one_d_coeff = est.fit(X, Y[:, 0]).coef_
two_d_coeff = est.fit(X, Y[:, :1]).coef_
assert one_d_coeff.shape == two_d_coeff.shape
assert_array_almost_equal(one_d_coeff, two_d_coeff)
@pytest.mark.parametrize("Est", (PLSRegression, PLSCanonical, CCA, PLSSVD))
def test_copy(Est):
# check that the "copy" keyword works
d = load_linnerud()
X = d.data
Y = d.target
X_orig = X.copy()
# copy=True won't modify inplace
pls = Est(copy=True).fit(X, Y)
assert_array_equal(X, X_orig)
# copy=False will modify inplace
with pytest.raises(AssertionError):
Est(copy=False).fit(X, Y)
assert_array_almost_equal(X, X_orig)
if Est is PLSSVD:
return # PLSSVD does not support copy param in predict or transform
X_orig = X.copy()
with pytest.raises(AssertionError):
pls.transform(X, Y, copy=False),
assert_array_almost_equal(X, X_orig)
X_orig = X.copy()
with pytest.raises(AssertionError):
pls.predict(X, copy=False),
assert_array_almost_equal(X, X_orig)
# Make sure copy=True gives same transform and predictions as predict=False
assert_array_almost_equal(
pls.transform(X, Y, copy=True), pls.transform(X.copy(), Y.copy(), copy=False)
)
assert_array_almost_equal(
pls.predict(X, copy=True), pls.predict(X.copy(), copy=False)
)
def _generate_test_scale_and_stability_datasets():
"""Generate dataset for test_scale_and_stability"""
# dataset for non-regression 7818
rng = np.random.RandomState(0)
n_samples = 1000
n_targets = 5
n_features = 10
Q = rng.randn(n_targets, n_features)
Y = rng.randn(n_samples, n_targets)
X = np.dot(Y, Q) + 2 * rng.randn(n_samples, n_features) + 1
X *= 1000
yield X, Y
# Data set where one of the features is constraint
X, Y = load_linnerud(return_X_y=True)
# causes X[:, -1].std() to be zero
X[:, -1] = 1.0
yield X, Y
X = np.array([[0.0, 0.0, 1.0], [1.0, 0.0, 0.0], [2.0, 2.0, 2.0], [3.0, 5.0, 4.0]])
Y = np.array([[0.1, -0.2], [0.9, 1.1], [6.2, 5.9], [11.9, 12.3]])
yield X, Y
# Seeds that provide a non-regression test for #18746, where CCA fails
seeds = [530, 741]
for seed in seeds:
rng = np.random.RandomState(seed)
X = rng.randn(4, 3)
Y = rng.randn(4, 2)
yield X, Y
@pytest.mark.parametrize("Est", (CCA, PLSCanonical, PLSRegression, PLSSVD))
@pytest.mark.parametrize("X, Y", _generate_test_scale_and_stability_datasets())
def test_scale_and_stability(Est, X, Y):
"""scale=True is equivalent to scale=False on centered/scaled data
This allows to check numerical stability over platforms as well"""
X_s, Y_s, *_ = _center_scale_xy(X, Y)
X_score, Y_score = Est(scale=True).fit_transform(X, Y)
X_s_score, Y_s_score = Est(scale=False).fit_transform(X_s, Y_s)
assert_allclose(X_s_score, X_score, atol=1e-4)
assert_allclose(Y_s_score, Y_score, atol=1e-4)
@pytest.mark.parametrize("Est", (PLSSVD, PLSCanonical, CCA))
@pytest.mark.parametrize("n_components", (0, 4))
def test_n_components_bounds(Est, n_components):
# n_components should be in [1, min(n_samples, n_features, n_targets)]
# TODO: catch error instead of warning in 1.1
rng = np.random.RandomState(0)
X = rng.randn(10, 5)
Y = rng.randn(10, 3)
est = Est(n_components=n_components)
with pytest.warns(FutureWarning, match="n_components=3 will be used instead"):
est.fit(X, Y)
# make sure upper bound of rank is used as a fallback
assert est.transform(X).shape[1] == 3
@pytest.mark.parametrize("n_components", (0, 6))
def test_n_components_bounds_pls_regression(n_components):
# For PLSRegression, the upper bound for n_components is n_features
# TODO: catch error instead of warning in 1.1
rng = np.random.RandomState(0)
X = rng.randn(10, 5)
Y = rng.randn(10, 3)
est = PLSRegression(n_components=n_components)
with pytest.warns(FutureWarning, match="n_components=5 will be used instead"):
est.fit(X, Y)
# make sure upper bound of rank is used as a fallback
assert est.transform(X).shape[1] == 5
@pytest.mark.parametrize("Est", (PLSSVD, CCA, PLSCanonical))
def test_scores_deprecations(Est):
# Make sure x_scores_ and y_scores_ are deprecated.
# It's not deprecated for PLSRegression because y_score_ is different from
# transform(Y_train)
# TODO: remove attributes and test in 1.1
rng = np.random.RandomState(0)
X = rng.randn(10, 5)
Y = rng.randn(10, 3)
est = Est().fit(X, Y)
with pytest.warns(FutureWarning, match="`x_scores_` was deprecated"):
assert_allclose(est.x_scores_, est.transform(X))
with pytest.warns(FutureWarning, match="`y_scores_` was deprecated"):
assert_allclose(est.y_scores_, est.transform(X, Y)[1])
@pytest.mark.parametrize("Est", (PLSRegression, PLSCanonical, CCA))
def test_norm_y_weights_deprecation(Est):
rng = np.random.RandomState(0)
X = rng.randn(10, 5)
Y = rng.randn(10, 3)
est = Est().fit(X, Y)
with pytest.warns(FutureWarning, match="`norm_y_weights` was deprecated"):
est.norm_y_weights
# TODO: Remove test in 1.1
@pytest.mark.parametrize("Estimator", (PLSRegression, PLSCanonical, CCA, PLSSVD))
@pytest.mark.parametrize("attribute", ("x_mean_", "y_mean_", "x_std_", "y_std_"))
def test_mean_and_std_deprecation(Estimator, attribute):
rng = np.random.RandomState(0)
X = rng.randn(10, 5)
Y = rng.randn(10, 3)
estimator = Estimator().fit(X, Y)
with pytest.warns(FutureWarning, match=f"`{attribute}` was deprecated"):
getattr(estimator, attribute)
@pytest.mark.parametrize("n_samples, n_features", [(100, 10), (100, 200)])
@pytest.mark.parametrize("seed", range(10))
def test_singular_value_helpers(n_samples, n_features, seed):
# Make sure SVD and power method give approximately the same results
X, Y = make_regression(n_samples, n_features, n_targets=5, random_state=seed)
u1, v1, _ = _get_first_singular_vectors_power_method(X, Y, norm_y_weights=True)
u2, v2 = _get_first_singular_vectors_svd(X, Y)
_svd_flip_1d(u1, v1)
_svd_flip_1d(u2, v2)
rtol = 1e-1
assert_allclose(u1, u2, rtol=rtol)
assert_allclose(v1, v2, rtol=rtol)
def test_one_component_equivalence():
# PLSSVD, PLSRegression and PLSCanonical should all be equivalent when
# n_components is 1
X, Y = make_regression(100, 10, n_targets=5, random_state=0)
svd = PLSSVD(n_components=1).fit(X, Y).transform(X)
reg = PLSRegression(n_components=1).fit(X, Y).transform(X)
canonical = PLSCanonical(n_components=1).fit(X, Y).transform(X)
assert_allclose(svd, reg, rtol=1e-2)
assert_allclose(svd, canonical, rtol=1e-2)
def test_svd_flip_1d():
# Make sure svd_flip_1d is equivalent to svd_flip
u = np.array([1, -4, 2])
v = np.array([1, 2, 3])
u_expected, v_expected = svd_flip(u.reshape(-1, 1), v.reshape(1, -1))
_svd_flip_1d(u, v) # inplace
assert_allclose(u, u_expected.ravel())
assert_allclose(u, [-1, 4, -2])
assert_allclose(v, v_expected.ravel())
assert_allclose(v, [-1, -2, -3])
def test_loadings_converges():
"""Test that CCA converges. Non-regression test for #19549."""
X, y = make_regression(n_samples=200, n_features=20, n_targets=20, random_state=20)
cca = CCA(n_components=10, max_iter=500)
with pytest.warns(None) as record:
cca.fit(X, y)
# ConvergenceWarning should not be raised
if len(record) > 0:
pytest.fail(f"Unexpected warning: {str(record[0].message)}")
# Loadings converges to reasonable values
assert np.all(np.abs(cca.x_loadings_) < 1)
def test_pls_constant_y():
"""Checks warning when y is constant. Non-regression test for #19831"""
rng = np.random.RandomState(42)
x = rng.rand(100, 3)
y = np.zeros(100)
pls = PLSRegression()
msg = "Y residual is constant at iteration"
with pytest.warns(UserWarning, match=msg):
pls.fit(x, y)
assert_allclose(pls.x_rotations_, 0)
| 35.268092 | 88 | 0.661102 |
0144c9c630010faf2c603a3a314d2ace17f530de | 2,121 | py | Python | StatsFasta.py | Siddhi-Nargund/Biological-File-Format | 3ead1dd8883e96404f76ceeab5ff5be01b7f6be5 | [
"MIT"
] | null | null | null | StatsFasta.py | Siddhi-Nargund/Biological-File-Format | 3ead1dd8883e96404f76ceeab5ff5be01b7f6be5 | [
"MIT"
] | null | null | null | StatsFasta.py | Siddhi-Nargund/Biological-File-Format | 3ead1dd8883e96404f76ceeab5ff5be01b7f6be5 | [
"MIT"
] | null | null | null | #/usr/bin/python3
#!/usr/bin/env python3
import argparse
import re
import os
import gzip
import shutil
parser = argparse.ArgumentParser(description='Enter File name with location')
parser.add_argument('--file', type=str,dest="filepaths")
parser.add_argument('--output',type=str,dest="outputFileName")
args = parser.parse_args()
#splitting name and extension of the file
name, extension= os.path.splitext(args.filepaths)
fileToOpen = args.filepaths
outputFileName = args.outputFileName
#If extension isn't in fasta file format or in gunzipped, it would exit giving an error
if extension not in ['.gz', '.fa']:
print("Invalid file format")
exit()
#if file is in .gz format, it would open it in fasta file format
if extension=='.gz':
with gzip.open(fileToOpen, 'rb') as faFileGz:
with open(name, 'wb') as faFile:
shutil.copyfileobj(faFileGz, faFile )
fileToOpen=name
with open(fileToOpen, 'r') as faFile:
fastaFile = faFile.read()
#regex to extract "Read_ID"
readIDString= "read_id=[0-9]*"
#searching gor regex in the file
temp = re.findall(readIDString,fastaFile)
readIDDict = {}
readIDArray = []
for i in range(0,len(temp)):
readIDDict['Read_ID'] = temp[i]
readIDArray.append(readIDDict)
readIDDict = {}
#print(readIDArray)
print("Number of sequences are: "+str(len(readIDArray)))
#regex to extract length of reads
lenString= "length=[0-9]*"
#searching gor regex in the file
flag= re.findall(lenString,fastaFile)
lenDict = {}
lenArray = []
for i in range(0,len(flag)):
lenDict = flag[i]
lenArray.append(lenDict)
lenDict = {}
# print(lenArray)
#Accessing number after '=' in 'lenght=nn'
lenArray = [i.split('=')[1] for i in lenArray]
#print(lenArray)
for i in range(0, len(lenArray)):
lenArray[i] = int(lenArray[i])
# print(len(lenArray))
#counting lengths of all the reads
total= sum(lenArray)
print("Number of residues are :", total)
#writing the output in output file
with open(outputFileName, 'w') as f:
f.write("Number of sequences are: "+str(len(readIDArray)))
f.write("\nNumber of residues are :" +str(total))
| 26.848101 | 87 | 0.70297 |
6e203e8ac1777267bd68ea8fa7d1330a501ad551 | 2,644 | py | Python | basicsr/archs/memnet_arch.py | jungheil/BasicSR | 95642e745690523918788a8b3d20c381af6780c8 | [
"Apache-2.0"
] | null | null | null | basicsr/archs/memnet_arch.py | jungheil/BasicSR | 95642e745690523918788a8b3d20c381af6780c8 | [
"Apache-2.0"
] | null | null | null | basicsr/archs/memnet_arch.py | jungheil/BasicSR | 95642e745690523918788a8b3d20c381af6780c8 | [
"Apache-2.0"
] | null | null | null | import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from basicsr.utils.registry import ARCH_REGISTRY
@ARCH_REGISTRY.register()
class MemNet(nn.Module):
def __init__(self, in_channels=3, channels=64, num_memblock=6, num_resblock=6):
super(MemNet, self).__init__()
self.feature_extractor = BNReLUConv(in_channels, channels)
self.reconstructor = BNReLUConv(channels, in_channels)
self.dense_memory = nn.ModuleList([MemoryBlock(channels, num_resblock, i + 1) for i in range(num_memblock)])
def forward(self, x):
# x = x.contiguous()
residual = x
out = self.feature_extractor(x)
ys = [out]
for memory_block in self.dense_memory:
out = memory_block(out, ys)
out = self.reconstructor(out)
out = out + residual
return out
class MemoryBlock(nn.Module):
"""Note: num_memblock denotes the number of MemoryBlock currently"""
def __init__(self, channels, num_resblock, num_memblock):
super(MemoryBlock, self).__init__()
self.recursive_unit = nn.ModuleList([ResidualBlock(channels) for i in range(num_resblock)])
self.gate_unit = BNReLUConv((num_resblock + num_memblock) * channels, channels, 1, 1, 0)
def forward(self, x, ys):
"""ys is a list which contains long-term memory coming from previous memory block
xs denotes the short-term memory coming from recursive unit
"""
xs = []
residual = x
for layer in self.recursive_unit:
x = layer(x)
xs.append(x)
gate_out = self.gate_unit(torch.cat(xs + ys, 1))
ys.append(gate_out)
return gate_out
class ResidualBlock(torch.nn.Module):
"""ResidualBlock
introduced in: https://arxiv.org/abs/1512.03385
x - Relu - Conv - Relu - Conv - x
"""
def __init__(self, channels, k=3, s=1, p=1):
super(ResidualBlock, self).__init__()
self.relu_conv1 = BNReLUConv(channels, channels, k, s, p)
self.relu_conv2 = BNReLUConv(channels, channels, k, s, p)
def forward(self, x):
residual = x
out = self.relu_conv1(x)
out = self.relu_conv2(out)
out = out + residual
return out
class BNReLUConv(nn.Sequential):
def __init__(self, in_channels, channels, k=3, s=1, p=1, inplace=True):
super(BNReLUConv, self).__init__()
self.add_module('bn', nn.BatchNorm2d(in_channels))
self.add_module('relu', nn.ReLU(inplace=inplace))
self.add_module('conv', nn.Conv2d(in_channels, channels, k, s, p, bias=False)) | 33.897436 | 116 | 0.647504 |
ec8128151a8654fbb08c21e8dd8826f1083831b8 | 537 | py | Python | atlantisbot_api/api/permissions.py | johnvictorfs/atlantisbot-api | 1a00ae33497b5c5bf51d7bac154e96d4d9ab534b | [
"MIT"
] | null | null | null | atlantisbot_api/api/permissions.py | johnvictorfs/atlantisbot-api | 1a00ae33497b5c5bf51d7bac154e96d4d9ab534b | [
"MIT"
] | null | null | null | atlantisbot_api/api/permissions.py | johnvictorfs/atlantisbot-api | 1a00ae33497b5c5bf51d7bac154e96d4d9ab534b | [
"MIT"
] | null | null | null | from rest_framework.permissions import SAFE_METHODS, BasePermission
class AdminOrReadOnly(BasePermission):
"""
Allows any safe method for anyone (read-only)
Allows any method for Admins and Superusers
"""
def has_permission(self, request, view):
return request.method in SAFE_METHODS or request.user.is_superuser or request.user.is_staff
class IsSuperUser(BasePermission):
"""
Only allows SuperUsers
"""
def has_permission(self, request, view):
return request.user.is_superuser
| 24.409091 | 99 | 0.726257 |
ab9532b00243ae2b3105ff273ce050bf3b845907 | 2,559 | py | Python | epiocms/main.py | ojii/django-cms-epio-quickstart | 359d655dd1157bd416b50f9d07ac0785391bcd5e | [
"BSD-3-Clause"
] | 2 | 2016-03-01T22:15:57.000Z | 2016-07-17T18:09:33.000Z | epiocms/main.py | ojii/django-cms-epio-quickstart | 359d655dd1157bd416b50f9d07ac0785391bcd5e | [
"BSD-3-Clause"
] | null | null | null | epiocms/main.py | ojii/django-cms-epio-quickstart | 359d655dd1157bd416b50f9d07ac0785391bcd5e | [
"BSD-3-Clause"
] | null | null | null | from __future__ import with_statement
import os
import shutil
import subprocess
import sys
EPIO_APP_NOT_FOUND = 1
SUBPROCESS_FAILED = 2
DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data'))
def _run_command(bits):
process = subprocess.Popen(bits, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=False)
stdout, stderr = process.communicate()
return (process.returncode, stdout, stderr)
def main():
cwd = os.getcwd()
epio_appfile = os.path.join(cwd, '.epio-app')
with open(epio_appfile, 'r') as fobj:
appname = fobj.read()
print "Checking environment"
if not os.path.exists(epio_appfile):
print "Please run `epio create` first and run this command from within"
print "the epio app directory."
print >> sys.stderr, "File .epio-app not found"
sys.exit(EPIO_APP_NOT_FOUND)
print "Copying data"
shutil.copytree(os.path.join(DATA_DIR, 'media'), os.path.join(cwd, 'media'))
shutil.copytree(os.path.join(DATA_DIR, 'templates'), os.path.join(cwd, 'templates'))
shutil.copy(os.path.join(DATA_DIR, 'epio.ini'), cwd)
shutil.copy(os.path.join(DATA_DIR, 'requirements.txt'), cwd)
shutil.copy(os.path.join(DATA_DIR, 'settings.py'), cwd)
shutil.copy(os.path.join(DATA_DIR, 'urls.py'), cwd)
print "Uploading app"
print "This will take a couple of minutes, go grab a coffee!"
retcode, stdout, stderr = _run_command(["epio", "upload"])
if retcode != 0:
print "Subprocess (epio upload) failed with status code %s" % retcode
print >> sys.stderr, stdout
print >> sys.stderr, stderr
sys.exit(SUBPROCESS_FAILED)
print "Syncing database"
retcode, stdout, stderr = _run_command(["epio", "django", "--", "syncdb", "--all"])
if retcode != 0:
print "Subprocess (epio django -- syncdb --all) failed with status code %s" % retcode
print >> sys.stderr, stdout
print >> sys.stderr, stderr
sys.exit(SUBPROCESS_FAILED)
retcode, stdout, stderr = _run_command(["epio", "django", "--", "migrate", "--fake"])
if retcode != 0:
print "Subprocess (epio django -- migrate --fake) failed with status code %s" % retcode
print >> sys.stderr, stdout
print >> sys.stderr, stderr
sys.exit(SUBPROCESS_FAILED)
print "All set up and ready. Please run `epio django createsuperuser` to"
print "create a superuser to log in and go to %s.ep.io to add some pages!" % appname
if __name__ == '__main__':
main() | 41.95082 | 95 | 0.655725 |
4f17c4914f4ef202cad6bb1b14caf5ed6b000622 | 107 | py | Python | enthought/plugins/remote_editor/actions.py | enthought/etsproxy | 4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347 | [
"BSD-3-Clause"
] | 3 | 2016-12-09T06:05:18.000Z | 2018-03-01T13:00:29.000Z | enthought/plugins/remote_editor/actions.py | enthought/etsproxy | 4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347 | [
"BSD-3-Clause"
] | 1 | 2020-12-02T00:51:32.000Z | 2020-12-02T08:48:55.000Z | enthought/plugins/remote_editor/actions.py | enthought/etsproxy | 4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347 | [
"BSD-3-Clause"
] | null | null | null | # proxy module
from __future__ import absolute_import
from envisage.plugins.remote_editor.actions import *
| 26.75 | 52 | 0.850467 |
4f79547fa20d6ec2e547b60c24d310d47b40ade0 | 4,576 | py | Python | data/korean.py | hccho2/Korean-FastSpeech2-Pytorch | 9b8f09021474fd99c4766b66cccf23c7ecdd9258 | [
"MIT"
] | 1 | 2020-08-21T02:05:40.000Z | 2020-08-21T02:05:40.000Z | data/korean.py | hccho2/Korean-FastSpeech2-Pytorch | 9b8f09021474fd99c4766b66cccf23c7ecdd9258 | [
"MIT"
] | null | null | null | data/korean.py | hccho2/Korean-FastSpeech2-Pytorch | 9b8f09021474fd99c4766b66cccf23c7ecdd9258 | [
"MIT"
] | 1 | 2020-08-21T02:05:41.000Z | 2020-08-21T02:05:41.000Z | import numpy as np
import os
import tgt
from scipy.io.wavfile import read
import pyworld as pw
import torch
import audio as Audio
from utils import get_alignment
import hparams as hp
from jamo import h2j
import codecs
def prepare_align(in_dir, meta):
with open(os.path.join(in_dir, meta), encoding='utf-8') as f:
for line in f:
parts = line.strip().split('|')
basename, text = parts[0], parts[3]
basename=basename.replace('.wav','.txt')
with open(os.path.join(in_dir,'wavs',basename),'w') as f1:
f1.write(text)
def build_from_path(in_dir, out_dir, meta):
train, val = list(), list()
f0_max, f0_min = energy_max, energy_min = 0, 1000000
n_frames = 0
with open(os.path.join(in_dir, meta), encoding='utf-8') as f:
for index, line in enumerate(f):
parts = line.strip().split('|')
basename, text = parts[0], parts[3]
ret = process_utterance(in_dir, out_dir, basename)
if ret is None:
continue
else:
info, f_max, f_min, e_max, e_min, n = ret
if basename[0] == '1':
val.append(info)
else:
train.append(info)
if index % 100 == 0:
print("Done %d" % index)
f0_max, f0_min = max(f0_max, f_max), min(f0_min, f_min)
energy_max, energy_min = max(energy_max, e_max), min(energy_min, e_min)
n_frames += n
with open(os.path.join(out_dir, 'stat.txt'), 'w', encoding='utf-8') as f:
strs = ['Total time: {} hours'.format(n_frames*hp.hop_length/hp.sampling_rate/3600),
'Total frames: {}'.format(n_frames),
'Min F0: {}'.format(f0_min),
'Max F0: {}'.format(f0_max),
'Min energy: {}'.format(energy_min),
'Max energy: {}'.format(energy_max)]
for s in strs:
print(s)
f.write(s+'\n')
return [r for r in train if r is not None], [r for r in val if r is not None]
def process_utterance(in_dir, out_dir, basename):
wav_bak_basename=basename.replace('.wav','')
basename = wav_bak_basename[2:]
wav_bak_path = os.path.join(in_dir, "wavs_bak", "{}.wav".format(wav_bak_basename))
wav_path = os.path.join(in_dir, 'wavs', '{}.wav'.format(basename))
# Convert kss data into PCM encoded wavs
if not os.path.isfile(wav_path):
os.system("ffmpeg -i {} -ac 1 -ar 22050 {}".format(wav_bak_path, wav_path))
tg_path = os.path.join(out_dir, 'TextGrid', '{}.TextGrid'.format(basename))
# Get alignments
textgrid = tgt.io.read_textgrid(tg_path)
phone, duration, start, end = get_alignment(textgrid.get_tier_by_name('phones'))
text = '{'+ '}{'.join(phone) + '}' # '{A}{B}{$}{C}', $ represents silent phones
text = text.replace('{$}', ' ') # '{A}{B} {C}'
text = text.replace('}{', ' ') # '{A B} {C}'
if start >= end:
return None
# Read and trim wav files
_, wav = read(wav_path)
wav = wav[int(hp.sampling_rate*start):int(hp.sampling_rate*end)].astype(np.float32)
# Compute fundamental frequency
f0, _ = pw.dio(wav.astype(np.float64), hp.sampling_rate, frame_period=hp.hop_length/hp.sampling_rate*1000)
f0 = f0[:sum(duration)]
# Compute mel-scale spectrogram and energy
mel_spectrogram, energy = Audio.tools.get_mel_from_wav(torch.FloatTensor(wav))
mel_spectrogram = mel_spectrogram.numpy().astype(np.float32)[:, :sum(duration)]
energy = energy.numpy().astype(np.float32)[:sum(duration)]
if mel_spectrogram.shape[1] >= hp.max_seq_len:
return None
# Save alignment
ali_filename = '{}-ali-{}.npy'.format(hp.dataset, basename)
np.save(os.path.join(out_dir, 'alignment', ali_filename), duration, allow_pickle=False)
# Save fundamental prequency
f0_filename = '{}-f0-{}.npy'.format(hp.dataset, basename)
np.save(os.path.join(out_dir, 'f0', f0_filename), f0, allow_pickle=False)
# Save energy
energy_filename = '{}-energy-{}.npy'.format(hp.dataset, basename)
np.save(os.path.join(out_dir, 'energy', energy_filename), energy, allow_pickle=False)
# Save spectrogram
mel_filename = '{}-mel-{}.npy'.format(hp.dataset, basename)
np.save(os.path.join(out_dir, 'mel', mel_filename), mel_spectrogram.T, allow_pickle=False)
return '|'.join([basename, text]), max(f0), min([f for f in f0 if f != 0]), max(energy), min(energy), mel_spectrogram.shape[1]
| 37.203252 | 130 | 0.607299 |
2eb7e4aec9f2e49e2a93f4e9217e098e25e3ce37 | 9,488 | py | Python | GTSRB/VI/Train.py | matthewwicker/StatisticalGuarenteesForBNNs | 1f585636c152b8489e331641c743ff628c2b7cc7 | [
"BSD-3-Clause"
] | 13 | 2019-03-09T21:31:10.000Z | 2022-03-14T13:51:09.000Z | GTSRB/VI/Train.py | matthewwicker/StatisticalGuarenteesForBNNs | 1f585636c152b8489e331641c743ff628c2b7cc7 | [
"BSD-3-Clause"
] | null | null | null | GTSRB/VI/Train.py | matthewwicker/StatisticalGuarenteesForBNNs | 1f585636c152b8489e331641c743ff628c2b7cc7 | [
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras.utils import np_utils
from keras.models import model_from_json
import pickle
import numpy as np
import matplotlib.pyplot as plt
import cv2
import pandas as pd
import os
import shutil
training_file = '../data/train.p'
validation_file= '../data/validate.p'
testing_file = '../data/test.p'
with open(training_file, mode='rb') as f:
train = pickle.load(f)
with open(validation_file, mode='rb') as f:
valid = pickle.load(f)
with open(testing_file, mode='rb') as f:
test = pickle.load(f)
X_train, y_train = train['features'], train['labels']
X_valid, y_valid = valid['features'], valid['labels']
X_test, y_test = test['features'], test['labels']
n_train = len(X_train)
# TODO: Number of validation examples
n_validation = len(X_valid)
# TODO: Number of testing examples.
n_test = len(X_test)
# TODO: What's the shape of an traffic sign image?
image_shape = X_train[0].shape
n_classes = len(np.unique(y_train))
channels = 3
print("Number of training examples =", n_train)
print("Number of testing examples =", n_test)
print("Image data shape =", image_shape)
print("Number of classes =", n_classes)
# In[2]:
def normalize_image(image):
return -0.5 + (image*1.0)/(255)
#return (image*1.0)/(255)
def Gray_image(image):
if(channels == 1):
return np.resize(cv2.cvtColor(image,cv2.COLOR_RGB2YCrCb)[:,:,0],(28,28,1))
return image
def preprocess(image):
img= []
for i in image:
img.append(normalize_image(Gray_image(i)))
img = np.array(img)
return img
X_train = preprocess(X_train)
X_valid = preprocess(X_valid)
X_test = preprocess(X_test)
# 6. Preprocess class labels
y_train = np_utils.to_categorical(y_train, n_classes)
y_test = np_utils.to_categorical(y_test, n_classes)
y_valid = np_utils.to_categorical(y_valid, n_classes)
print "imageshape after grayscale",X_train[0].shape
# In[3]:
# We are going to be limiting the classes so we do that here
desired_classes = 10
numeric = np.argmax(y_train,axis=1)
counts = np.bincount(numeric)
classes_to_use = counts.argsort()[-desired_classes:][::-1]
print classes_to_use
classes_to_use = [2,25] # We preselect classes so that they are the most different
desired_classes = 2 # We reset the class counter
def filter_by_class(x_data, y_data):
X = []
Y = []
for i in range(len(x_data)):
if(np.argmax(y_data[i]) in classes_to_use):
X.append(x_data[i])
Y.append(np.where(classes_to_use == np.argmax(y_data[i])))
X = np.asarray(X)
Y = np.asarray(np_utils.to_categorical(Y))
return X, Y
X_train, y_train = filter_by_class(X_train, y_train)
X_valid, y_valid = filter_by_class(X_valid, y_valid)
X_test, y_test = filter_by_class(X_test, y_test)
image_shape = X_train[0].shape
print "X_train",np.shape(X_train)
print "y_train",np.shape(y_train)
print("Image data shape =", image_shape)
print("Number of classes =", desired_classes)
# In[4]:
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
import tensorflow as tf
from edward.models import Categorical, Normal
import edward as ed
import pandas as pd
from tqdm import tqdm
from tqdm import trange
# In[5]:
ed.set_seed(980297)
N = 128 # number of images in a minibatch.
D = 784 # number of features.
K = 10 # number of classes.
number_of_filters = 25 # 5 also works # 1 works
filter_size = 3 # 1 works
width = 256
# In[6]:
x = tf.placeholder(tf.float32, shape = [N,28,28,channels], name = "x_placeholder")
#y_ = tf.placeholder("float", shape = [None, 10])
y_ = tf.placeholder(tf.int32, [N], name = "y_placeholder")
#x_image = tf.reshape(x, [-1,28,28,1])
x_image = x
W_conv1 = Normal(loc=tf.ones([filter_size,filter_size,channels,number_of_filters]),
scale=tf.ones([filter_size,filter_size,channels,number_of_filters])*0.01, name="conv1_W")
b_conv1 = b_fc1 = Normal(loc=tf.zeros([number_of_filters]), scale=tf.ones([number_of_filters]), name="b_fc1")
h_conv1 = tf.nn.conv2d(x_image, W_conv1, strides=[1,1,1,1], padding='VALID') + b_conv1
conv1 = tf.nn.relu(h_conv1)
#conv1 = tf.nn.max_pool(conv1, ksize=[1,2,2,1], strides=[1,2,2,1], padding='VALID')
features = tf.contrib.layers.flatten(conv1)
W_fc1 = Normal(loc=tf.zeros([16900, width]), scale=tf.ones([16900, width]), name="W_fc1")
b_fc1 = Normal(loc=tf.zeros([width]), scale=tf.ones([width]), name="b_fc1")
h_fc1 = tf.nn.relu(tf.matmul(features, W_fc1) + b_fc1)
W_fc2 = Normal(loc=tf.zeros([width, desired_classes]), scale=tf.ones([width, desired_classes]), name="W_fc2")
b_fc2 = Normal(loc=tf.zeros([desired_classes]), scale=tf.ones([desired_classes]), name="b_fc2")
#y_conv = tf.nn.softmax(tf.matmul(h_fc1_drop, W_fc2) + b_fc2)
y = Categorical(tf.matmul(h_fc1, W_fc2) + b_fc2)
# In[7]:
# number of samples
# we set it to 20 because of the memory constrain in the GPU.
# My GPU can take upto about 200 samples at once.
# INFERENCE
qW_conv1 = Normal(loc=tf.Variable(tf.random_normal([filter_size,filter_size,channels,number_of_filters])),
scale=tf.nn.softplus(tf.Variable(tf.random_normal([filter_size,filter_size,channels,number_of_filters]))))
qb_conv1 = Normal(loc=tf.Variable(tf.random_normal([number_of_filters])),
scale=tf.nn.softplus(tf.Variable(tf.random_normal([number_of_filters]))))
qW_fc1 = Normal(loc=tf.Variable(tf.random_normal([16900, width])),
scale=tf.nn.softplus(tf.Variable(tf.random_normal([16900, width]))))
qb_fc1 = Normal(loc=tf.Variable(tf.random_normal([width])),
scale=tf.nn.softplus(tf.Variable(tf.random_normal([width]))))
# Contruct the q(w) and q(b). in this case we assume Normal distributions.
qW_fc2 = Normal(loc=tf.Variable(tf.random_normal([width, desired_classes])),
scale=tf.nn.softplus(tf.Variable(tf.random_normal([width, desired_classes]))))
qb_fc2 = Normal(loc=tf.Variable(tf.random_normal([desired_classes])),
scale=tf.nn.softplus(tf.Variable(tf.random_normal([desired_classes]))))
# In[8]:
inference = ed.KLqp({W_conv1: qW_conv1,
b_conv1: qb_conv1,
W_fc1: qW_fc1,
b_fc1: qb_fc1,
W_fc2: qW_fc2,
b_fc2: qb_fc2 }, data={y: y_})
#inference.initialize(step_size=0.01, n_steps=10)
optimizer = tf.train.AdamOptimizer(0.001)
inference.initialize(n_iter=7500, optimizer=optimizer)
# We will use an interactive session.
sess = tf.InteractiveSession()
# Initialise all the vairables in the session.
tf.global_variables_initializer().run()
# In[9]:
counter = 0
# In[10]:
def get_batch(n, counter):
if(counter+n+1 >= len(X_train)):
counter = 0
retval = X_train[counter:counter+n], y_train[counter:counter+n]
return retval
for _ in range(inference.n_iter):
X_batch, Y_batch = get_batch(N,counter)
counter+=N
if(counter+N+1 >= len(X_train)):
counter = 0
# TensorFlow method gives the label data in a one hot vetor format. We convert that into a single label.
Y_batch = np.argmax(Y_batch,axis=1)
info_dict_hmc = inference.update(feed_dict= {x:X_batch, y_: Y_batch})
inference.print_progress(info_dict_hmc)
# In[11]:
def test_using_last_sample(x_test, y_test):
x_image = tf.reshape(x_test, [-1,28,28,channels])
W_conv1 = qW_conv1.eval()
b_conv1 = qb_conv1.eval()
W_fc1 = qW_fc1.eval() #qW_fc1.params[-2]
b_fc1 = qb_fc1.eval() #qb_fc1.params[-2]
W_fc2 = qW_fc2.eval() #.params[-2]
b_fc2 = qb_fc2.eval() #.params[-2]
h_conv1 = tf.nn.conv2d(x_image, W_conv1, strides=[1,1,1,1], padding='VALID') + b_conv1
conv1 = tf.nn.relu(h_conv1)
#conv1 = tf.nn.max_pool(conv1, ksize=[1,2,2,1], strides=[1,2,2,1], padding='VALID')
features = tf.contrib.layers.flatten(conv1)
h_fc1 = tf.nn.relu(tf.matmul(features, W_fc1) + b_fc1)
y_conv = tf.nn.softmax(tf.matmul(h_fc1, W_fc2) + b_fc2)
y_pred = tf.argmax(y_conv, 1)
# plt.hist(y_pred.eval())
# plt.show()
correct_prediction = tf.equal(y_pred , y_test )
accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float") )
return accuracy
X_train = X_train.astype('float32')
Y_train = np.argmax(y_train,axis=1)
accuracy = test_using_last_sample(X_train ,Y_train)
test_res = accuracy.eval()
print test_res
X_test = X_test.astype('float32')
Y_test = np.argmax(y_test,axis=1)
accuracy = test_using_last_sample(X_test ,Y_test)
test_res = accuracy.eval()
print test_res
X_valid = X_valid.astype('float32')
Y_valid = np.argmax(y_valid,axis=1)
accuracy = test_using_last_sample(X_valid ,Y_valid)
test_res = accuracy.eval()
print test_res
#0.6031851
#0.56532663
#0.6130952
# In[ ]:
import os
if not os.path.exists("SampledModels"):
os.makedirs("SampledModels")
from tqdm import trange
for _ in trange(400):
np.savez_compressed("SampledModels/sample_weights_%s"%(_), [qW_conv1.eval(),
qb_conv1.eval(),
qW_fc1.eval(),
qb_fc1.eval(),
qW_fc2.eval(),
qb_fc2.eval()],
['convw1', 'convb1', 'wfc1', 'bfc1', 'w', 'b'])
| 29.65 | 117 | 0.672639 |
28a0dbab8aba79b5861405178db3f4c9962dfb27 | 1,576 | py | Python | tests/links_tests/array_tests/test_shape_transformer_to_2d.py | pfnet/chainerchem | efe323aa21f63a815130d673781e7cca1ccb72d2 | [
"MIT"
] | 184 | 2019-11-27T12:59:01.000Z | 2022-03-29T19:18:54.000Z | tests/links_tests/array_tests/test_shape_transformer_to_2d.py | pfnet/chainerchem | efe323aa21f63a815130d673781e7cca1ccb72d2 | [
"MIT"
] | 21 | 2019-12-08T01:53:33.000Z | 2020-10-23T01:19:56.000Z | tests/links_tests/array_tests/test_shape_transformer_to_2d.py | pfnet/chainerchem | efe323aa21f63a815130d673781e7cca1ccb72d2 | [
"MIT"
] | 45 | 2019-11-28T09:59:54.000Z | 2022-02-07T02:42:46.000Z | import numpy
import pytest
from chainer_chemistry.links.array.shape_transformer_to_2d import ShapeTransformerTo2D # NOQA
@pytest.mark.parametrize('axis', [0, 1, -1])
def test_shape_transformer_2d_2d_array(axis):
st = ShapeTransformerTo2D(axis=axis)
x = numpy.arange(6).reshape((2, 3))
xt = st.transform(x)
xit = st.inverse_transform(xt)
if axis == 0:
assert numpy.allclose(xt.array, numpy.array([[0, 3], [1, 4], [2, 5]]))
elif axis == 1 or axis == -1:
assert numpy.allclose(x, xt.array)
assert numpy.allclose(x, xit.array)
@pytest.mark.parametrize('axis', [0, 1, 2, -1])
def test_shape_transformer_2d_3d_array(axis):
st = ShapeTransformerTo2D(axis=axis)
x = numpy.arange(12).reshape((2, 3, 2))
xt = st.transform(x)
xit = st.inverse_transform(xt)
if axis == 0:
assert numpy.allclose(
xt.array,
numpy.array([[0, 6], [1, 7], [2, 8], [3, 9], [4, 10], [5, 11]]))
elif axis == 1:
assert numpy.allclose(
xt.array,
numpy.array([[0, 2, 4], [1, 3, 5], [6, 8, 10], [7, 9, 11]]))
elif axis == 2 or axis == -1:
assert numpy.allclose(
xt.array, x.reshape(6, 2))
assert numpy.allclose(x, xit.array)
def test_shape_transformer_2d_error():
st = ShapeTransformerTo2D(axis=1)
x = numpy.arange(6).reshape(2, 3)
with pytest.raises(AttributeError):
# call `inverse_transform` before `transform`
xt = st.inverse_transform(x) # NOQA
if __name__ == '__main__':
pytest.main([__file__, '-v', '-s'])
| 30.901961 | 94 | 0.607234 |
6ebe623d5e71ccfc2df0d5d99653224fcfa79c38 | 1,817 | py | Python | tensorflow/python/ops/rnn_grad.py | uve/tensorflow | e08079463bf43e5963acc41da1f57e95603f8080 | [
"Apache-2.0"
] | null | null | null | tensorflow/python/ops/rnn_grad.py | uve/tensorflow | e08079463bf43e5963acc41da1f57e95603f8080 | [
"Apache-2.0"
] | null | null | null | tensorflow/python/ops/rnn_grad.py | uve/tensorflow | e08079463bf43e5963acc41da1f57e95603f8080 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Gradients for (block) GRU/LSTM operators."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import gen_rnn_ops
@ops.RegisterGradient("BlockLSTM")
def _block_lstm_grad(op, *grads):
"""Gradient for the BlockLSTM op."""
seq_len_max, x, cs_prev, h_prev, w, wci, wcf, wco, b = op.inputs
i, cs, f, o, ci, co, h = op.outputs
_, cs_grad, _, _, _, _, h_grad = grads
(x_grad, cs_prev_grad, h_prev_grad, w_grad, wci_grad, wcf_grad, wco_grad,
b_grad) = gen_rnn_ops.block_lstm_grad(
seq_len_max=seq_len_max,
x=x,
cs_prev=cs_prev,
h_prev=h_prev,
w=w,
wci=wci,
wcf=wcf,
wco=wco,
b=b,
i=i,
cs=cs,
f=f,
o=o,
ci=ci,
co=co,
h=h,
cs_grad=cs_grad,
h_grad=h_grad,
use_peephole=op.get_attr("use_peephole"))
return (None, x_grad, cs_prev_grad, h_prev_grad, w_grad, wci_grad, wcf_grad,
wco_grad, b_grad)
| 34.283019 | 81 | 0.636764 |
086f84f6eae948965ee5022a04f917e450458d66 | 9,942 | py | Python | Old/pickleshare.py | cbates8/CompostMonitoringSystem | 765fbf60e3d684c83c8fda6d9f18b5de2b7e03ab | [
"MIT"
] | 6,989 | 2017-07-18T06:23:18.000Z | 2022-03-31T15:58:36.000Z | Old/pickleshare.py | cbates8/CompostMonitoringSystem | 765fbf60e3d684c83c8fda6d9f18b5de2b7e03ab | [
"MIT"
] | 1,978 | 2017-07-18T09:17:58.000Z | 2022-03-31T14:28:43.000Z | Old/pickleshare.py | cbates8/CompostMonitoringSystem | 765fbf60e3d684c83c8fda6d9f18b5de2b7e03ab | [
"MIT"
] | 1,228 | 2017-07-18T09:03:13.000Z | 2022-03-29T05:57:40.000Z | #!/usr/bin/env python
""" PickleShare - a small 'shelve' like datastore with concurrency support
Like shelve, a PickleShareDB object acts like a normal dictionary. Unlike
shelve, many processes can access the database simultaneously. Changing a
value in database is immediately visible to other processes accessing the
same database.
Concurrency is possible because the values are stored in separate files. Hence
the "database" is a directory where *all* files are governed by PickleShare.
Example usage::
from pickleshare import *
db = PickleShareDB('~/testpickleshare')
db.clear()
print "Should be empty:",db.items()
db['hello'] = 15
db['aku ankka'] = [1,2,313]
db['paths/are/ok/key'] = [1,(5,46)]
print db.keys()
del db['aku ankka']
This module is certainly not ZODB, but can be used for low-load
(non-mission-critical) situations where tiny code size trumps the
advanced features of a "real" object database.
Installation guide: pip install pickleshare
Author: Ville Vainio <vivainio@gmail.com>
License: MIT open source license.
"""
from __future__ import print_function
__version__ = "0.7.5"
try:
from pathlib import Path
except ImportError:
# Python 2 backport
from pathlib2 import Path
import os,stat,time
try:
import collections.abc as collections_abc
except ImportError:
import collections as collections_abc
try:
import cPickle as pickle
except ImportError:
import pickle
import errno
import sys
if sys.version_info[0] >= 3:
string_types = (str,)
else:
string_types = (str, unicode)
def gethashfile(key):
return ("%02x" % abs(hash(key) % 256))[-2:]
_sentinel = object()
class PickleShareDB(collections_abc.MutableMapping):
""" The main 'connection' object for PickleShare database """
def __init__(self,root):
""" Return a db object that will manage the specied directory"""
if not isinstance(root, string_types):
root = str(root)
root = os.path.abspath(os.path.expanduser(root))
self.root = Path(root)
if not self.root.is_dir():
# catching the exception is necessary if multiple processes are concurrently trying to create a folder
# exists_ok keyword argument of mkdir does the same but only from Python 3.5
try:
self.root.mkdir(parents=True)
except OSError as e:
if e.errno != errno.EEXIST:
raise
# cache has { 'key' : (obj, orig_mod_time) }
self.cache = {}
def __getitem__(self,key):
""" db['key'] reading """
fil = self.root / key
try:
mtime = (fil.stat()[stat.ST_MTIME])
except OSError:
raise KeyError(key)
if fil in self.cache and mtime == self.cache[fil][1]:
return self.cache[fil][0]
try:
# The cached item has expired, need to read
with fil.open("rb") as f:
obj = pickle.loads(f.read())
except:
raise KeyError(key)
self.cache[fil] = (obj,mtime)
return obj
def __setitem__(self,key,value):
""" db['key'] = 5 """
fil = self.root / key
parent = fil.parent
if parent and not parent.is_dir():
parent.mkdir(parents=True)
# We specify protocol 2, so that we can mostly go between Python 2
# and Python 3. We can upgrade to protocol 3 when Python 2 is obsolete.
with fil.open('wb') as f:
pickle.dump(value, f, protocol=2)
try:
self.cache[fil] = (value, fil.stat().st_mtime)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def hset(self, hashroot, key, value):
""" hashed set """
hroot = self.root / hashroot
if not hroot.is_dir():
hroot.mkdir()
hfile = hroot / gethashfile(key)
d = self.get(hfile, {})
d.update( {key : value})
self[hfile] = d
def hget(self, hashroot, key, default = _sentinel, fast_only = True):
""" hashed get """
hroot = self.root / hashroot
hfile = hroot / gethashfile(key)
d = self.get(hfile, _sentinel )
#print "got dict",d,"from",hfile
if d is _sentinel:
if fast_only:
if default is _sentinel:
raise KeyError(key)
return default
# slow mode ok, works even after hcompress()
d = self.hdict(hashroot)
return d.get(key, default)
def hdict(self, hashroot):
""" Get all data contained in hashed category 'hashroot' as dict """
hfiles = self.keys(hashroot + "/*")
hfiles.sort()
last = len(hfiles) and hfiles[-1] or ''
if last.endswith('xx'):
# print "using xx"
hfiles = [last] + hfiles[:-1]
all = {}
for f in hfiles:
# print "using",f
try:
all.update(self[f])
except KeyError:
print("Corrupt",f,"deleted - hset is not threadsafe!")
del self[f]
self.uncache(f)
return all
def hcompress(self, hashroot):
""" Compress category 'hashroot', so hset is fast again
hget will fail if fast_only is True for compressed items (that were
hset before hcompress).
"""
hfiles = self.keys(hashroot + "/*")
all = {}
for f in hfiles:
# print "using",f
all.update(self[f])
self.uncache(f)
self[hashroot + '/xx'] = all
for f in hfiles:
p = self.root / f
if p.name == 'xx':
continue
p.unlink()
def __delitem__(self,key):
""" del db["key"] """
fil = self.root / key
self.cache.pop(fil,None)
try:
fil.unlink()
except OSError:
# notfound and permission denied are ok - we
# lost, the other process wins the conflict
pass
def _normalized(self, p):
""" Make a key suitable for user's eyes """
return str(p.relative_to(self.root)).replace('\\','/')
def keys(self, globpat = None):
""" All keys in DB, or all keys matching a glob"""
if globpat is None:
files = self.root.rglob('*')
else:
files = self.root.glob(globpat)
return [self._normalized(p) for p in files if p.is_file()]
def __iter__(self):
return iter(self.keys())
def __len__(self):
return len(self.keys())
def uncache(self,*items):
""" Removes all, or specified items from cache
Use this after reading a large amount of large objects
to free up memory, when you won't be needing the objects
for a while.
"""
if not items:
self.cache = {}
for it in items:
self.cache.pop(it,None)
def waitget(self,key, maxwaittime = 60 ):
""" Wait (poll) for a key to get a value
Will wait for `maxwaittime` seconds before raising a KeyError.
The call exits normally if the `key` field in db gets a value
within the timeout period.
Use this for synchronizing different processes or for ensuring
that an unfortunately timed "db['key'] = newvalue" operation
in another process (which causes all 'get' operation to cause a
KeyError for the duration of pickling) won't screw up your program
logic.
"""
wtimes = [0.2] * 3 + [0.5] * 2 + [1]
tries = 0
waited = 0
while 1:
try:
val = self[key]
return val
except KeyError:
pass
if waited > maxwaittime:
raise KeyError(key)
time.sleep(wtimes[tries])
waited+=wtimes[tries]
if tries < len(wtimes) -1:
tries+=1
def getlink(self,folder):
""" Get a convenient link for accessing items """
return PickleShareLink(self, folder)
def __repr__(self):
return "PickleShareDB('%s')" % self.root
class PickleShareLink:
""" A shortdand for accessing nested PickleShare data conveniently.
Created through PickleShareDB.getlink(), example::
lnk = db.getlink('myobjects/test')
lnk.foo = 2
lnk.bar = lnk.foo + 5
"""
def __init__(self, db, keydir ):
self.__dict__.update(locals())
def __getattr__(self,key):
return self.__dict__['db'][self.__dict__['keydir']+'/' + key]
def __setattr__(self,key,val):
self.db[self.keydir+'/' + key] = val
def __repr__(self):
db = self.__dict__['db']
keys = db.keys( self.__dict__['keydir'] +"/*")
return "<PickleShareLink '%s': %s>" % (
self.__dict__['keydir'],
";".join([Path(k).basename() for k in keys]))
def main():
import textwrap
usage = textwrap.dedent("""\
pickleshare - manage PickleShare databases
Usage:
pickleshare dump /path/to/db > dump.txt
pickleshare load /path/to/db < dump.txt
pickleshare test /path/to/db
""")
DB = PickleShareDB
import sys
if len(sys.argv) < 2:
print(usage)
return
cmd = sys.argv[1]
args = sys.argv[2:]
if cmd == 'dump':
if not args: args= ['.']
db = DB(args[0])
import pprint
pprint.pprint(db.items())
elif cmd == 'load':
cont = sys.stdin.read()
db = DB(args[0])
data = eval(cont)
db.clear()
for k,v in db.items():
db[k] = v
elif cmd == 'testwait':
db = DB(args[0])
db.clear()
print(db.waitget('250'))
elif cmd == 'test':
test()
stress()
if __name__== "__main__":
main()
| 28.164306 | 114 | 0.565379 |
ec3eb2a75093130cf7b36ef072b42779369b54ff | 2,702 | py | Python | save_model.py | gaowei0518/tensorflow-yolov4-tflite | fe4e0ffc1f3fc93dd8ba170de75c75dd5dbbda0c | [
"MIT"
] | null | null | null | save_model.py | gaowei0518/tensorflow-yolov4-tflite | fe4e0ffc1f3fc93dd8ba170de75c75dd5dbbda0c | [
"MIT"
] | null | null | null | save_model.py | gaowei0518/tensorflow-yolov4-tflite | fe4e0ffc1f3fc93dd8ba170de75c75dd5dbbda0c | [
"MIT"
] | null | null | null | import tensorflow as tf
from absl import app, flags, logging
from absl.flags import FLAGS
from core.yolov4 import YOLO, decode, filter_boxes
import core.utils as utils
from core.config import cfg
flags.DEFINE_string('weights', './data/yolov4.weights', 'path to weights file')
flags.DEFINE_string('output', './checkpoints/yolov4-416', 'path to output')
flags.DEFINE_boolean('tiny', False, 'is yolo-tiny or not')
flags.DEFINE_integer('input_size', 416, 'define input size of export model')
flags.DEFINE_float('score_thres', 0.2, 'define score threshold')
flags.DEFINE_string('framework', 'tf', 'define what framework do you want to convert (tf, trt, tflite)')
flags.DEFINE_string('model', 'yolov4', 'yolov3 or yolov4')
flags.DEFINE_string('class_file_name', './data/classes/coco.names', 'file contain the class names')
def save_tf():
STRIDES, ANCHORS, NUM_CLASS, XYSCALE = utils.load_config(FLAGS)
input_layer = tf.keras.layers.Input([FLAGS.input_size, FLAGS.input_size, 3])
feature_maps = YOLO(input_layer, NUM_CLASS, FLAGS.model, FLAGS.tiny)
bbox_tensors = []
prob_tensors = []
if FLAGS.tiny:
for i, fm in enumerate(feature_maps):
if i == 0:
output_tensors = decode(fm, FLAGS.input_size // 16, NUM_CLASS, STRIDES, ANCHORS, i, XYSCALE, FLAGS.framework)
else:
output_tensors = decode(fm, FLAGS.input_size // 32, NUM_CLASS, STRIDES, ANCHORS, i, XYSCALE, FLAGS.framework)
bbox_tensors.append(output_tensors[0])
prob_tensors.append(output_tensors[1])
else:
for i, fm in enumerate(feature_maps):
if i == 0:
output_tensors = decode(fm, FLAGS.input_size // 8, NUM_CLASS, STRIDES, ANCHORS, i, XYSCALE, FLAGS.framework)
elif i == 1:
output_tensors = decode(fm, FLAGS.input_size // 16, NUM_CLASS, STRIDES, ANCHORS, i, XYSCALE, FLAGS.framework)
else:
output_tensors = decode(fm, FLAGS.input_size // 32, NUM_CLASS, STRIDES, ANCHORS, i, XYSCALE, FLAGS.framework)
bbox_tensors.append(output_tensors[0])
prob_tensors.append(output_tensors[1])
pred_bbox = tf.concat(bbox_tensors, axis=1)
pred_prob = tf.concat(prob_tensors, axis=1)
if FLAGS.framework == 'tflite':
pred = (pred_bbox, pred_prob)
else:
boxes, pred_conf = filter_boxes(pred_bbox, pred_prob, score_threshold=FLAGS.score_thres, input_shape=tf.constant([FLAGS.input_size, FLAGS.input_size]))
pred = tf.concat([boxes, pred_conf], axis=-1)
model = tf.keras.Model(input_layer, pred)
utils.load_weights(model, FLAGS.weights, FLAGS.model, FLAGS.tiny)
model.summary()
model.save(FLAGS.output)
def main(_argv):
save_tf()
if __name__ == '__main__':
try:
app.run(main)
except SystemExit:
pass
| 43.580645 | 155 | 0.714656 |
00057fc627262f47aae65cefe6bb7a2dc3dd6424 | 5,469 | py | Python | conf.py | fernet-ws/docs | 3e2dbe79453739d5d430cb43aeed24ff4a4e6fe3 | [
"MIT"
] | null | null | null | conf.py | fernet-ws/docs | 3e2dbe79453739d5d430cb43aeed24ff4a4e6fe3 | [
"MIT"
] | null | null | null | conf.py | fernet-ws/docs | 3e2dbe79453739d5d430cb43aeed24ff4a4e6fe3 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = u'Fernet'
copyright = u'2021, Rodrigo "Albo" Arce'
author = u'Rodrigo "Albo" Arce'
# The short X.Y version
version = u''
# The full version, including alpha/beta/rc tags
release = u''
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.todo',
'sphinx_rtd_theme',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = [u'_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Fernetdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Fernet.tex', u'Fernet Documentation',
u'Rodrigo "Albo" Arce', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'fernet', u'Fernet Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Fernet', u'Fernet Documentation',
author, 'Fernet', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
# -- Options for todo extension ----------------------------------------------
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
| 29.722826 | 79 | 0.642896 |
850eb7d86567d2f3fdee44fcc1de5f516cc6c24e | 7,196 | py | Python | official/vision/image_classification/configs/base_configs.py | vishal2612200/models | fc02382c9f2ffa58dbd2541b62bcfdaf23fc631e | [
"Apache-2.0"
] | 2 | 2020-03-31T18:54:04.000Z | 2020-03-31T18:54:08.000Z | official/vision/image_classification/configs/base_configs.py | yajinwuzl/models | 01d1931f6a2f09c93dd1a007c33af8d1a902eff7 | [
"Apache-2.0"
] | 1 | 2020-03-30T06:12:11.000Z | 2020-03-30T06:12:11.000Z | official/vision/image_classification/configs/base_configs.py | yajinwuzl/models | 01d1931f6a2f09c93dd1a007c33af8d1a902eff7 | [
"Apache-2.0"
] | null | null | null | # Lint as: python3
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Definitions for high level configuration groups.."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from typing import Any, List, Mapping, Optional
import dataclasses
from official.modeling.hyperparams import base_config
CallbacksConfig = base_config.CallbacksConfig
TensorboardConfig = base_config.TensorboardConfig
RuntimeConfig = base_config.RuntimeConfig
@dataclasses.dataclass
class ExportConfig(base_config.Config):
"""Configuration for exports.
Attributes:
checkpoint: the path to the checkpoint to export.
destination: the path to where the checkpoint should be exported.
"""
checkpoint: str = None
destination: str = None
@dataclasses.dataclass
class MetricsConfig(base_config.Config):
"""Configuration for Metrics.
Attributes:
accuracy: Whether or not to track accuracy as a Callback. Defaults to None.
top_5: Whether or not to track top_5_accuracy as a Callback. Defaults to
None.
"""
accuracy: bool = None
top_5: bool = None
@dataclasses.dataclass
class TrainConfig(base_config.Config):
"""Configuration for training.
Attributes:
resume_checkpoint: Whether or not to enable load checkpoint loading.
Defaults to None.
epochs: The number of training epochs to run. Defaults to None.
steps: The number of steps to run per epoch. If None, then this will be
inferred based on the number of images and batch size. Defaults to None.
callbacks: An instance of CallbacksConfig.
metrics: An instance of MetricsConfig.
tensorboard: An instance of TensorboardConfig.
"""
resume_checkpoint: bool = None
epochs: int = None
steps: int = None
callbacks: CallbacksConfig = CallbacksConfig()
metrics: List[str] = None
tensorboard: TensorboardConfig = TensorboardConfig()
@dataclasses.dataclass
class EvalConfig(base_config.Config):
"""Configuration for evaluation.
Attributes:
epochs_between_evals: The number of train epochs to run between evaluations.
Defaults to None.
steps: The number of eval steps to run during evaluation. If None, this will
be inferred based on the number of images and batch size. Defaults to
None.
"""
epochs_between_evals: int = None
steps: int = None
@dataclasses.dataclass
class LossConfig(base_config.Config):
"""Configuration for Loss.
Attributes:
name: The name of the loss. Defaults to None.
loss_scale: The type of loss scale
label_smoothing: Whether or not to apply label smoothing to the loss. This
only applies to 'categorical_cross_entropy'.
"""
name: str = None
loss_scale: str = None
label_smoothing: float = None
@dataclasses.dataclass
class OptimizerConfig(base_config.Config):
"""Configuration for Optimizers.
Attributes:
name: The name of the optimizer. Defaults to None.
decay: Decay or rho, discounting factor for gradient. Defaults to None.
epsilon: Small value used to avoid 0 denominator. Defaults to None.
momentum: Plain momentum constant. Defaults to None.
nesterov: Whether or not to apply Nesterov momentum. Defaults to None.
moving_average_decay: The amount of decay to apply. If 0 or None, then
exponential moving average is not used. Defaults to None.
lookahead: Whether or not to apply the lookahead optimizer. Defaults to
None.
beta_1: The exponential decay rate for the 1st moment estimates. Used in
the Adam optimizers. Defaults to None.
beta_2: The exponential decay rate for the 2nd moment estimates. Used in
the Adam optimizers. Defaults to None.
epsilon: Small value used to avoid 0 denominator. Defaults to 1e-7.
"""
name: str = None
decay: float = None
epsilon: float = None
momentum: float = None
nesterov: bool = None
moving_average_decay: Optional[float] = None
lookahead: Optional[bool] = None
beta_1: float = None
beta_2: float = None
epsilon: float = None
@dataclasses.dataclass
class LearningRateConfig(base_config.Config):
"""Configuration for learning rates.
Attributes:
name: The name of the learning rate. Defaults to None.
initial_lr: The initial learning rate. Defaults to None.
decay_epochs: The number of decay epochs. Defaults to None.
decay_rate: The rate of decay. Defaults to None.
warmup_epochs: The number of warmup epochs. Defaults to None.
batch_lr_multiplier: The multiplier to apply to the base learning rate,
if necessary. Defaults to None.
examples_per_epoch: the number of examples in a single epoch.
Defaults to None.
boundaries: boundaries used in piecewise constant decay with warmup.
multipliers: multipliers used in piecewise constant decay with warmup.
scale_by_batch_size: Scale the learning rate by a fraction of the batch
size. Set to 0 for no scaling (default).
"""
name: str = None
initial_lr: float = None
decay_epochs: float = None
decay_rate: float = None
warmup_epochs: int = None
examples_per_epoch: int = None
boundaries: List[int] = None
multipliers: List[float] = None
scale_by_batch_size: float = 0.
@dataclasses.dataclass
class ModelConfig(base_config.Config):
"""Configuration for Models.
Attributes:
name: The name of the model. Defaults to None.
model_params: The parameters used to create the model. Defaults to None.
num_classes: The number of classes in the model. Defaults to None.
loss: A `LossConfig` instance. Defaults to None.
optimizer: An `OptimizerConfig` instance. Defaults to None.
"""
name: str = None
model_params: Mapping[str, Any] = None
num_classes: int = None
loss: LossConfig = None
optimizer: OptimizerConfig = None
@dataclasses.dataclass
class ExperimentConfig(base_config.Config):
"""Base configuration for an image classification experiment.
Attributes:
model_dir: The directory to use when running an experiment.
mode: e.g. 'train_and_eval', 'export'
runtime: A `RuntimeConfig` instance.
train: A `TrainConfig` instance.
evaluation: An `EvalConfig` instance.
model: A `ModelConfig` instance.
export: An `ExportConfig` instance.
"""
model_dir: str = None
model_name: str = None
mode: str = None
runtime: RuntimeConfig = None
train_dataset: Any = None
validation_dataset: Any = None
test_dataset: Any = None
train: TrainConfig = None
evaluation: EvalConfig = None
model: ModelConfig = None
export: ExportConfig = None
| 32.125 | 80 | 0.731101 |
dd88754ab4da30218ec590d26fe703b13b673051 | 6,289 | py | Python | peppercompiler/design/spurious_design.py | DNA-and-Natural-Algorithms-Group/peppercompiler | effbcdedfb17534300fb3504a552e46c1ead41e4 | [
"MIT"
] | 3 | 2019-06-10T18:44:03.000Z | 2021-11-17T10:57:09.000Z | peppercompiler/design/spurious_design.py | DNA-and-Natural-Algorithms-Group/peppercompiler | effbcdedfb17534300fb3504a552e46c1ead41e4 | [
"MIT"
] | 2 | 2017-12-15T01:09:49.000Z | 2021-03-25T20:42:23.000Z | peppercompiler/design/spurious_design.py | DNA-and-Natural-Algorithms-Group/peppercompiler | effbcdedfb17534300fb3504a552e46c1ead41e4 | [
"MIT"
] | 4 | 2017-08-21T03:32:51.000Z | 2019-10-18T04:09:38.000Z | #!/usr/bin/env python
"""
Designs sequences using Winfree's SpuriousDesign/spuriousSSM algorithm.
Uses PIL input and Zadeh's .mfe output formats for compatibility with compiler.
"""
import os
import string
import subprocess
import sys
from .constraint_load import Convert
from ..utils import error
DEBUG = False
def print_list(xs, filename, format):
"""Prints a list 'xs' to a file using space separation format."""
f = open(filename, "w")
for x in xs:
f.write(format % x)
f.close()
def design(basename, infilename, outfilename, cleanup=True, verbose=False, reuse=False, just_files=False, struct_orient=False, old_output=False, tempname=None, extra_pars="", findmfe=True, spuriousbinary="spuriousSSM"):
if not tempname:
tempname = basename
stname = tempname + ".st"
wcname = tempname + ".wc"
eqname = tempname + ".eq"
sp_outname = tempname + ".sp"
if reuse:
raise NotImplementedError
print("Reusing old constraints files.")
for name in stname, wcname, eqname:
assert os.path.isfile(name), "Error: requested --reuse, but file '%s' doesn't exist" % name
eq = open(eqname,'r').read().split(' ')
wc = open(wcname,'r').read().split(' ')
st = open(stname,'r').read().split(' ')
else:
# Prepare the constraints
print("Reading design from file '%s'" % infilename)
print("Preparing constraints files for spuriousSSM.")
convert = Convert(infilename, struct_orient)
eq, wc, st = convert.get_constraints()
# Convert specifications
def eq_map(x):
return x + 1 if x != None else 0
eq = list(map(eq_map, eq))
def wc_map(x):
return x + 1 if x != None else -1
wc = list(map(wc_map, wc))
def st_map(x):
return x if x != None else " "
st = list(map(st_map, st))
# Print them to files
print_list(eq, eqname, "%d ")
print_list(wc, wcname, "%d ")
print_list(st, stname, "%c")
if "_" in st:
print("System over-constrained.")
sys.exit(1)
# Run SpuriousC or read old data.
# TODO: take care of prevents.
if not old_output:
if verbose:
quiet = "quiet=ALL"
else:
quiet = "quiet=TRUE"
spo = open(sp_outname,'wb')
command = "%s score=automatic template=%s wc=%s eq=%s %s %s" % (spuriousbinary, stname, wcname, eqname, extra_pars, quiet)
print(command)
if just_files:
return
spurious_proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
data = spurious_proc.stdout.readline()
while data:
if verbose:
sys.stdout.write(data.decode())
spo.write(data)
data = spurious_proc.stdout.readline()
if spurious_proc.wait() != 0:
error("SpuriousSSM failed with return code {}. Output is in {}.".format(spurious_proc.returncode, sp_outname))
spo.close()
else:
print("Loading old spuriousSSM output from '%s'" % sp_outname)
assert os.path.isfile(sp_outname), "Error: requested --use-old-output, but file '%s' doesn't exist" % sp_outname
# Load results
nts = open(sp_outname, "r").read()
# File has all sorts of runtime info.
# The final sequences are stored on the last full line.
nts = nts.split("\n")[-2]
print("Processing results of spuriousSSM.")
convert.process_results(nts)
convert.output(outfilename, findmfe=findmfe)
print("Done, results saved to '%s'" % outfilename)
if cleanup:
print("Deleting temporary files")
os.remove(stname)
os.remove(wcname)
os.remove(eqname)
os.remove(sp_outname)
def main():
import re
from optparse import OptionParser
from .find_file import find_file, BadFilename
# Parse command line options.
usage = "usage: %prog [options] infilename [spuriousSSM_parameters ...]"
parser = OptionParser(usage=usage)
parser.set_defaults(verbose=False, struct_orient=False, cleanup=True, old_output=False, reuse=False, just_files=False)
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", help="Verbose output from spuriousSSM")
parser.add_option("-q", "--quiet", action="store_false", dest="verbose", help="No output from spuriousSSM [Default]")
parser.add_option("-o", "--output", help="Output file [defaults to BASENAME.mfe]", metavar="FILE")
parser.add_option("-t", "--tempname", help="Base name for temporary files (for multiple simultaneous runs)", metavar="TEMPBASE")
parser.add_option("--strand", action="store_false", dest="struct_orient", help="List constraints in strand-oriented manner [Default]")
parser.add_option("--struct", action="store_true", dest="struct_orient", help="List constraints in structure-oriented manner")
parser.add_option("--keep-temp", action="store_false", dest="cleanup", help="Keep temporary files (.st, .wc, .eq, .sp)")
parser.add_option("--just-files", action="store_true", dest="just_files", help="Just create input files for spuriousSSM")
parser.add_option("--cleanup", action="store_true", dest="cleanup", help="Remove temporary files after use [Default]")
parser.add_option("--reuse", action="store_true", help="Reuse the .st, .wc and .eq files if they already exist (Saves time if a session was terminated, or if you want to rerun a design)")
parser.add_option("--use-old-output", action="store_true", dest="old_output", help="Use old spurious output if it already exists (Useful primarily if spuriousSSM finished successfully, but spurious_design crashed)")
(options, args) = parser.parse_args()
if len(args) < 1:
parser.error("missing required argument infilename")
try:
infilename = find_file(args[0], ".pil")
except BadFilename:
parser.error("File not found: neither %s nor %s.pil exist. Please supply correct infilename." % (args[0], args[0]))
# Infer the basename if a full filename is given
basename = infilename
p = re.match(r"(.*)\.pil\Z", basename)
if p:
basename = p.group(1)
# Set filename defaults
if not options.output:
options.output = basename + ".mfe"
# Collect extra arguments for spuriousSSM
spurious_pars = " ".join(args[1:])
design(basename, infilename, options.output, options.cleanup, options.verbose, options.reuse, options.just_files, options.struct_orient, options.old_output, options.tempname, spurious_pars)
| 37.434524 | 219 | 0.682461 |
fafcf727c8c4fb6b54aa6e5ceb3f87cb6844313e | 71 | py | Python | pythonium/debugger.py | cacrespo/pythonium | 74cc5d4333212adfb6eedade8fcd8dfe86d221d5 | [
"MIT"
] | 13 | 2020-12-05T23:05:39.000Z | 2022-03-29T22:43:33.000Z | pythonium/debugger.py | cacrespo/pythonium | 74cc5d4333212adfb6eedade8fcd8dfe86d221d5 | [
"MIT"
] | 40 | 2020-10-28T22:16:18.000Z | 2022-03-26T22:50:37.000Z | pythonium/debugger.py | cacrespo/pythonium | 74cc5d4333212adfb6eedade8fcd8dfe86d221d5 | [
"MIT"
] | 6 | 2022-03-24T18:47:27.000Z | 2022-03-27T16:05:46.000Z | import os
def terminate():
os.system("kill -9 %d" % os.getpid())
| 11.833333 | 41 | 0.591549 |
b6493ee37e9cf46d40197086739a2dea4a9ac655 | 464 | py | Python | bbywebsite.py | elithaxxor/gpu_scraper | 065f33facadaf0bce9e6b5a4b95d1895b340058f | [
"MIT"
] | null | null | null | bbywebsite.py | elithaxxor/gpu_scraper | 065f33facadaf0bce9e6b5a4b95d1895b340058f | [
"MIT"
] | null | null | null | bbywebsite.py | elithaxxor/gpu_scraper | 065f33facadaf0bce9e6b5a4b95d1895b340058f | [
"MIT"
] | null | null | null | URL00= "https://www.bestbuy.com/identity/signin?token=tid%3A0f8e7941-289b-11ec-a2b7-06d188f825e106d188f825e1LEAR"
URL01 = "https://www.bestbuy.com/site/nvidia-geforce-rtx-3070-8gb-gddr6-pci-express-4-0-graphics-card-dark-platinum-and-black/6429442.p?skuId=6429442"
URL02= "https://www.bestbuy.com/site/gigabyte-geforce-rtx-3070-8g-gddr6-pci-express-4-0-graphics-card-black/6437912.p?skuId=6437912"
URL = "https://www.bestbuy.com/"
# RTX3070LINK1
# RTX3070LINK2
| 46.4 | 150 | 0.784483 |
27b2f7802b306a03eb7383c046b1f1903f6736a2 | 7,763 | py | Python | python/_bonedeform_fix.py | khanghugo/PMX-VMD-Scripting-Tools | bc978e7f8685ba39c2682aed6bb06bbe53f5bb4b | [
"MIT"
] | null | null | null | python/_bonedeform_fix.py | khanghugo/PMX-VMD-Scripting-Tools | bc978e7f8685ba39c2682aed6bb06bbe53f5bb4b | [
"MIT"
] | null | null | null | python/_bonedeform_fix.py | khanghugo/PMX-VMD-Scripting-Tools | bc978e7f8685ba39c2682aed6bb06bbe53f5bb4b | [
"MIT"
] | null | null | null | # Nuthouse01 - 08/24/2020 - v5.00
# This code is free to use and re-distribute, but I cannot be held responsible for damages that it may or may not cause.
#####################
# second, wrap custom imports with a try-except to catch it if files are missing
try:
from . import nuthouse01_core as core
from . import nuthouse01_pmx_parser as pmxlib
from . import nuthouse01_pmx_struct as pmxstruct
except ImportError as eee:
try:
import nuthouse01_core as core
import nuthouse01_pmx_parser as pmxlib
import nuthouse01_pmx_struct as pmxstruct
except ImportError as eee:
print(eee.__class__.__name__, eee)
print("ERROR: failed to import some of the necessary files, all my scripts must be together in the same folder!")
print("...press ENTER to exit...")
input()
exit()
core = pmxlib = pmxstruct = None
# when debug=True, disable the catchall try-except block. this means the full stack trace gets printed when it crashes,
# but if launched in a new window it exits immediately so you can't read it.
DEBUG = False
helptext = '''====================
bonedeform_fix:
This function fixes bone deform order issues. This frequently occurs with "finger curl"-type bones.
Ensure that each bone will have its position/rotation calculated after all bones it inherits from.
This can usually be fixed by reordering bones but this script fixes it by modifying the bone deform layers instead.
Specifically this looks at parents, parial-inherit, and IK target/chains, and ensures that either the downstream bone is lower in the list or has a higher deform layer than its parents.
'''
iotext = '''Inputs: PMX file "[model].pmx"\nOutputs: PMX file "[model]_bonedeform.pmx"
'''
def showhelp():
# print info to explain the purpose of this file
core.MY_PRINT_FUNC(helptext)
def showprompt():
# print info to explain what inputs/outputs it needs/creates
core.MY_PRINT_FUNC(iotext)
# prompt PMX name
core.MY_PRINT_FUNC("Please enter name of PMX model file:")
input_filename_pmx = core.prompt_user_filename(".pmx")
pmx = pmxlib.read_pmx(input_filename_pmx, moreinfo=True)
return pmx, input_filename_pmx
def bonedeform_fix(pmx: pmxstruct.Pmx, moreinfo=False):
# make a parallel list of the deform layers for each bone so I can work there
# if I encounter a recursive relationship I will have not touched the acutal PMX and can err and return it unchanged
deforms = [p.deform_layer for p in pmx.bones]
# make a list of the "ik master" for each bone
# it is possible for a bone to be controlled by multiple IK masters, actually every foot bone of every model is this way
ikmasters = [set() for x in pmx.bones]
for d,bone in enumerate(pmx.bones):
# find IK bones
if bone.has_ik:
# target uses me as master
ikmasters[bone.ik_target_idx].add(d)
for link in bone.ik_links:
# links use me as master
ikmasters[link.idx].add(d)
modified_bones = set()
# ASK: does "me" deform after "parent"?
def good_deform_relationship(me_idx, parent_idx):
# anything that inherits from an IKCHAIN bone has to be >= that bone's ik master, EXCEPT for bones actually in that ik group
# if parent has a master AND (parent master != my master): parent=max(parent,master), but need to expand this for sets:
# if parent has a master AND no overlap between my master and parent master: parent=max(parent,master)=master cuz master >= parent
# else: parent=parent
if ikmasters[parent_idx]:
# is me in the IK group of the parent? me is the ikmaster or me shares an ikmaster with parent
# if this IS in the ik group then DON'T overwrite parent_idx
if not (me_idx in ikmasters[parent_idx] or ikmasters[me_idx].intersection(ikmasters[parent_idx])):
l = list(ikmasters[parent_idx]) # turn set into list
l.sort() # sort by bone order, tiebreaker
l.sort(key=lambda x: deforms[x]) # sort by deform level, primary sort
parent_idx = l[-1] # this means the parent is the last-deforming master of any masters of the bone
# "after" means idx>source and layer >= source or idx<source and layer > source
# note: if somehow me_idx == parent_idx this returns true to prevent infinite looping
if me_idx < parent_idx:
if deforms[me_idx] > deforms[parent_idx]:
return True
else: # if me_idx >= parent_idx
if deforms[me_idx] >= deforms[parent_idx]:
return True
return False
# loop until nothing changes, or until 1000 iterations (indicates recursive relationship)
loops = 0
while loops < 1000:
loops += 1
has_changed = False
for d,bone in enumerate(pmx.bones):
# decide if this bone has a good deform layer!
is_good = True
# each bone must deform after its parent
if bone.parent_idx != -1: # -1 is not a valid parent to check
is_good &= good_deform_relationship(d, bone.parent_idx)
# each bone must deform after its partial inherit source, if it uses it
if (bone.inherit_trans or bone.inherit_rot) and bone.inherit_ratio != 0 and bone.inherit_parent_idx != -1:
is_good &= good_deform_relationship(d, bone.inherit_parent_idx)
# each ik bone must deform after its target and IK chain
if bone.has_ik:
# target
is_good &= good_deform_relationship(d, bone.ik_target_idx)
for link in bone.ik_links:
# links
is_good &= good_deform_relationship(d, link.idx)
# if the relationship is NOT good, then raise the deform layer of this bone
if not is_good:
has_changed = True
modified_bones.add(d)
deforms[d] += 1
pass # end for-loop
# this is the while-loop exit condition
if not has_changed:
break
pass # end while-loop
# did it break because of recursion error?
if loops == 1000:
# if yes, warn & return without changes
core.MY_PRINT_FUNC("ERROR: recursive inheritance relationship among bones!! You must manually investigate and resolve this issue.")
suspects = [i for i,de in enumerate(deforms) if de > 50]
core.MY_PRINT_FUNC("Suspicious bones: " + str(suspects))
core.MY_PRINT_FUNC("Bone deform order not changed")
return pmx, False
if not modified_bones:
core.MY_PRINT_FUNC("No changes are required")
return pmx, False
# if something did change,
if moreinfo:
deforms_orig = [p.deform_layer for p in pmx.bones]
for d, (o, n) in enumerate(zip(deforms_orig, deforms)):
if o != n:
core.MY_PRINT_FUNC("bone #{:<3} JP='{}' / EN='{}', deform: {} --> {}".format(
d, pmx.bones[d].name_jp, pmx.bones[d].name_en, o, n))
core.MY_PRINT_FUNC("Modified deform order for {} / {} = {:.1%} bones".format(
len(modified_bones), len(pmx.bones), len(modified_bones) / len(pmx.bones)))
# now actually apply the changes stored in deforms
for d,v in enumerate(deforms):
pmx.bones[d].deform_layer = v
return pmx, True
def end(pmx, input_filename_pmx):
# write out
# output_filename_pmx = "%s_dispframe.pmx" % core.get_clean_basename(input_filename_pmx)
output_filename_pmx = input_filename_pmx[0:-4] + "_bonedeform.pmx"
output_filename_pmx = core.get_unused_file_name(output_filename_pmx)
pmxlib.write_pmx(output_filename_pmx, pmx, moreinfo=True)
return None
def main():
showhelp()
pmx, name = showprompt()
pmx, is_changed = bonedeform_fix(pmx)
if is_changed:
end(pmx, name)
core.pause_and_quit("Done with everything! Goodbye!")
if __name__ == '__main__':
core.MY_PRINT_FUNC("Nuthouse01 - 08/24/2020 - v5.00")
if DEBUG:
main()
else:
try:
main()
except (KeyboardInterrupt, SystemExit):
# this is normal and expected, do nothing and die normally
pass
except Exception as ee:
# if an unexpected error occurs, catch it and print it and call pause_and_quit so the window stays open for a bit
core.MY_PRINT_FUNC(ee)
core.pause_and_quit("ERROR: something truly strange and unexpected has occurred, sorry, good luck figuring out what tho")
| 40.222798 | 185 | 0.729357 |
b68cfb7d6f6e33e06a0308f040cc345a1e2b8a8a | 8,284 | py | Python | bot.py | lukebemish/DiceRollApp | b8d037321e540d47ca4e0b34ded3d6f8f2a08cb4 | [
"MIT"
] | null | null | null | bot.py | lukebemish/DiceRollApp | b8d037321e540d47ca4e0b34ded3d6f8f2a08cb4 | [
"MIT"
] | null | null | null | bot.py | lukebemish/DiceRollApp | b8d037321e540d47ca4e0b34ded3d6f8f2a08cb4 | [
"MIT"
] | null | null | null | # bot.py
import os
import discord
from dotenv import load_dotenv
import re
import random
load_dotenv(dotenv_path=".envvars")
TOKEN = os.getenv('DISCORD_TOKEN')
client = discord.Client()
inc_form_mess = 'Input is in incorrect format, somewhere near "'
listen_val = "!r "
explode_val = "!rd "
def getRealName(event):
try:
if event.author.nick != None:
return event.author.nick
else:
return event.author.name
except:
return event.author.name
@client.event
async def on_ready():
print(f'{client.user} has connected to Discord!')
@client.event
async def on_message(message):
if message.author == client.user:
return
if message.channel.name != "rolls":
return
if (message.content.startswith("!rhelp")):
await message.author.create_dm()
await message.author.dm_channel.send("To roll dice, type something like `!r 2d10+2`")
await message.author.dm_channel.send("Use `+`, `-`, `adv`, `dis`, `advantage`, or `disadvantage` to add advantage or disadvantage, as in `!r + d20 + 2`")
await message.author.dm_channel.send("For damage rolls, use `!rd` instead or `!r`, which allows for explosions")
if (message.content.startswith(listen_val) or (message.content.startswith(explode_val))):
rest = ""
toBreak = False
if message.content.startswith(listen_val) and (len(message.content) <= len(listen_val)):
toBreak = True
if message.content.startswith(explode_val) and (len(message.content) <= len(explode_val)):
toBreak = True
if (not toBreak):
isGoodToCalc = True
explodes = False
if message.content.startswith(listen_val):
rest = message.content[len(listen_val):]
if message.content.startswith(explode_val):
rest = message.content[len(explode_val):]
explodes = True
rest = re.sub(' +', ' ', rest)
splits = rest.split(" ",1)
allRest = ""
adv = False
dis = False
#print(splits)
if (splits[0].strip() == "adv" or splits[0].strip() == "+" or splits[0].strip() == "advantage"):
adv = True
allRest = splits[1].strip()
elif(splits[0].strip() == "dis" or splits[0].strip() == "-" or splits[0].strip() == "disadvantage"):
dis = True
allRest = splits[1].strip()
else:
allRest = rest
allRest = re.sub(' +', ' ', allRest)
#print(allRest)
#print(rest)
if (len(allRest) > 0):
strParts = []
currentString = ""
for i in allRest:
if (i=="+"):
strParts.append(currentString)
currentString = ""
elif (i==" "):
if (currentString != ""):
strParts.append(currentString)
currentString = ""
elif (i=="-"):
strParts.append(currentString)
currentString = "-"
else:
currentString += i
strParts.append(currentString)
#print(strParts)
tVal = 0
rolls = []
isInText = True
text = ""
isCrit = []
isCritFail = []
for p in strParts:
val = 0
if (not bool(re.match("([0-9]*d[0-9]+)|((\\+)*[0-9]+)|((\\-)*[0-9]+)",p)) and isInText):
text += p + " "
else:
isInText = False
text=text.strip()
try:
val = int(p)
except:
val = 0
if bool(re.match("([0-9])*d([0-9])+",p)):
ps = p.split("d")
n = 1
if (ps[0]==""):
n = 1
else:
n = int(ps[0])
s = int(ps[1])
if (n>0 and s>0):
t = 0
for i in range(n):
thisroll = random.randint(1,s)
if adv:
roll2 = random.randint(1,s)
if (roll2 > thisroll):
thisroll = roll2
if dis:
roll2 = random.randint(1,s)
if (roll2 < thisroll):
thisroll = roll2
if (explodes and (thisroll == s)):
keepGoing = True
while keepGoing:
roll2 = random.randint(1,s)
thisroll += roll2
if (roll2 != s):
keepGoing = False
t += thisroll
rolls.append(thisroll)
if (s == 20 and thisroll == 20):
isCrit.append(True)
isCritFail.append(False)
elif (s == 20 and thisroll == 1):
isCritFail.append(True)
isCrit.append(False)
else:
isCritFail.append(False)
isCrit.append(False)
val += t
else:
isGoodToCalc = False
await message.channel.send("Cannot roll dice containing 0s")
else:
isGoodToCalc = False
await message.channel.send(inc_form_mess+p+'"')
tVal += val
if (isGoodToCalc):
#print(isCrit)
#print(isCritFail)
uName = getRealName(message)
if (not explodes):
tTotal = "*"+uName+" roll"
else:
tTotal = "*"+uName+" roll (exploding)"
if text != "":
tTotal += " for " + text
if adv:
tTotal+=" (with adv)"
elif dis:
tTotal+=" (with dis)"
tTotal += ":* "+str(tVal)
rollT = "*["
for i in range(len(rolls)):
if isCrit[i] or isCritFail[i]:
rollT += "__**"
rollT += (str(rolls[i]))
if isCrit[i] or isCritFail[i]:
rollT += "**__"
rollT += ", "
tTotal += " " + rollT[:-2] + "]*"
await message.channel.send(tTotal)
else:
isGoodToCalc = False
await message.channel.send(inc_form_mess+message.content+'"')
client.run(TOKEN)
| 42.482051 | 162 | 0.360092 |
458a33783bf849858950d34a0188759ba629bc6a | 24,427 | py | Python | seqpos/lib/python2.7/site-packages/hgext/largefiles/lfutil.py | guanjue/seqpos | ab9308ad128547ca968a1d944490710e583703bc | [
"MIT"
] | null | null | null | seqpos/lib/python2.7/site-packages/hgext/largefiles/lfutil.py | guanjue/seqpos | ab9308ad128547ca968a1d944490710e583703bc | [
"MIT"
] | null | null | null | seqpos/lib/python2.7/site-packages/hgext/largefiles/lfutil.py | guanjue/seqpos | ab9308ad128547ca968a1d944490710e583703bc | [
"MIT"
] | null | null | null | # Copyright 2009-2010 Gregory P. Ward
# Copyright 2009-2010 Intelerad Medical Systems Incorporated
# Copyright 2010-2011 Fog Creek Software
# Copyright 2010-2011 Unity Technologies
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
'''largefiles utility code: must not import other modules in this package.'''
from __future__ import absolute_import
import copy
import hashlib
import os
import stat
from mercurial.i18n import _
from mercurial.node import hex
from mercurial import (
dirstate,
encoding,
error,
httpconnection,
match as matchmod,
node,
pycompat,
scmutil,
sparse,
util,
vfs as vfsmod,
)
shortname = '.hglf'
shortnameslash = shortname + '/'
longname = 'largefiles'
# -- Private worker functions ------------------------------------------
def getminsize(ui, assumelfiles, opt, default=10):
lfsize = opt
if not lfsize and assumelfiles:
lfsize = ui.config(longname, 'minsize', default=default)
if lfsize:
try:
lfsize = float(lfsize)
except ValueError:
raise error.Abort(_('largefiles: size must be number (not %s)\n')
% lfsize)
if lfsize is None:
raise error.Abort(_('minimum size for largefiles must be specified'))
return lfsize
def link(src, dest):
"""Try to create hardlink - if that fails, efficiently make a copy."""
util.makedirs(os.path.dirname(dest))
try:
util.oslink(src, dest)
except OSError:
# if hardlinks fail, fallback on atomic copy
with open(src, 'rb') as srcf, util.atomictempfile(dest) as dstf:
for chunk in util.filechunkiter(srcf):
dstf.write(chunk)
os.chmod(dest, os.stat(src).st_mode)
def usercachepath(ui, hash):
'''Return the correct location in the "global" largefiles cache for a file
with the given hash.
This cache is used for sharing of largefiles across repositories - both
to preserve download bandwidth and storage space.'''
return os.path.join(_usercachedir(ui), hash)
def _usercachedir(ui, name=longname):
'''Return the location of the "global" largefiles cache.'''
path = ui.configpath(name, 'usercache')
if path:
return path
if pycompat.iswindows:
appdata = encoding.environ.get('LOCALAPPDATA',\
encoding.environ.get('APPDATA'))
if appdata:
return os.path.join(appdata, name)
elif pycompat.isdarwin:
home = encoding.environ.get('HOME')
if home:
return os.path.join(home, 'Library', 'Caches', name)
elif pycompat.isposix:
path = encoding.environ.get('XDG_CACHE_HOME')
if path:
return os.path.join(path, name)
home = encoding.environ.get('HOME')
if home:
return os.path.join(home, '.cache', name)
else:
raise error.Abort(_('unknown operating system: %s\n')
% pycompat.osname)
raise error.Abort(_('unknown %s usercache location') % name)
def inusercache(ui, hash):
path = usercachepath(ui, hash)
return os.path.exists(path)
def findfile(repo, hash):
'''Return store path of the largefile with the specified hash.
As a side effect, the file might be linked from user cache.
Return None if the file can't be found locally.'''
path, exists = findstorepath(repo, hash)
if exists:
repo.ui.note(_('found %s in store\n') % hash)
return path
elif inusercache(repo.ui, hash):
repo.ui.note(_('found %s in system cache\n') % hash)
path = storepath(repo, hash)
link(usercachepath(repo.ui, hash), path)
return path
return None
class largefilesdirstate(dirstate.dirstate):
def __getitem__(self, key):
return super(largefilesdirstate, self).__getitem__(unixpath(key))
def normal(self, f):
return super(largefilesdirstate, self).normal(unixpath(f))
def remove(self, f):
return super(largefilesdirstate, self).remove(unixpath(f))
def add(self, f):
return super(largefilesdirstate, self).add(unixpath(f))
def drop(self, f):
return super(largefilesdirstate, self).drop(unixpath(f))
def forget(self, f):
return super(largefilesdirstate, self).forget(unixpath(f))
def normallookup(self, f):
return super(largefilesdirstate, self).normallookup(unixpath(f))
def _ignore(self, f):
return False
def write(self, tr=False):
# (1) disable PENDING mode always
# (lfdirstate isn't yet managed as a part of the transaction)
# (2) avoid develwarn 'use dirstate.write with ....'
super(largefilesdirstate, self).write(None)
def openlfdirstate(ui, repo, create=True):
'''
Return a dirstate object that tracks largefiles: i.e. its root is
the repo root, but it is saved in .hg/largefiles/dirstate.
'''
vfs = repo.vfs
lfstoredir = longname
opener = vfsmod.vfs(vfs.join(lfstoredir))
lfdirstate = largefilesdirstate(opener, ui, repo.root,
repo.dirstate._validate,
lambda: sparse.matcher(repo))
# If the largefiles dirstate does not exist, populate and create
# it. This ensures that we create it on the first meaningful
# largefiles operation in a new clone.
if create and not vfs.exists(vfs.join(lfstoredir, 'dirstate')):
matcher = getstandinmatcher(repo)
standins = repo.dirstate.walk(matcher, subrepos=[], unknown=False,
ignored=False)
if len(standins) > 0:
vfs.makedirs(lfstoredir)
for standin in standins:
lfile = splitstandin(standin)
lfdirstate.normallookup(lfile)
return lfdirstate
def lfdirstatestatus(lfdirstate, repo):
pctx = repo['.']
match = matchmod.always(repo.root, repo.getcwd())
unsure, s = lfdirstate.status(match, subrepos=[], ignored=False,
clean=False, unknown=False)
modified, clean = s.modified, s.clean
for lfile in unsure:
try:
fctx = pctx[standin(lfile)]
except LookupError:
fctx = None
if not fctx or readasstandin(fctx) != hashfile(repo.wjoin(lfile)):
modified.append(lfile)
else:
clean.append(lfile)
lfdirstate.normal(lfile)
return s
def listlfiles(repo, rev=None, matcher=None):
'''return a list of largefiles in the working copy or the
specified changeset'''
if matcher is None:
matcher = getstandinmatcher(repo)
# ignore unknown files in working directory
return [splitstandin(f)
for f in repo[rev].walk(matcher)
if rev is not None or repo.dirstate[f] != '?']
def instore(repo, hash, forcelocal=False):
'''Return true if a largefile with the given hash exists in the store'''
return os.path.exists(storepath(repo, hash, forcelocal))
def storepath(repo, hash, forcelocal=False):
'''Return the correct location in the repository largefiles store for a
file with the given hash.'''
if not forcelocal and repo.shared():
return repo.vfs.reljoin(repo.sharedpath, longname, hash)
return repo.vfs.join(longname, hash)
def findstorepath(repo, hash):
'''Search through the local store path(s) to find the file for the given
hash. If the file is not found, its path in the primary store is returned.
The return value is a tuple of (path, exists(path)).
'''
# For shared repos, the primary store is in the share source. But for
# backward compatibility, force a lookup in the local store if it wasn't
# found in the share source.
path = storepath(repo, hash, False)
if instore(repo, hash):
return (path, True)
elif repo.shared() and instore(repo, hash, True):
return storepath(repo, hash, True), True
return (path, False)
def copyfromcache(repo, hash, filename):
'''Copy the specified largefile from the repo or system cache to
filename in the repository. Return true on success or false if the
file was not found in either cache (which should not happened:
this is meant to be called only after ensuring that the needed
largefile exists in the cache).'''
wvfs = repo.wvfs
path = findfile(repo, hash)
if path is None:
return False
wvfs.makedirs(wvfs.dirname(wvfs.join(filename)))
# The write may fail before the file is fully written, but we
# don't use atomic writes in the working copy.
with open(path, 'rb') as srcfd, wvfs(filename, 'wb') as destfd:
gothash = copyandhash(
util.filechunkiter(srcfd), destfd)
if gothash != hash:
repo.ui.warn(_('%s: data corruption in %s with hash %s\n')
% (filename, path, gothash))
wvfs.unlink(filename)
return False
return True
def copytostore(repo, ctx, file, fstandin):
wvfs = repo.wvfs
hash = readasstandin(ctx[fstandin])
if instore(repo, hash):
return
if wvfs.exists(file):
copytostoreabsolute(repo, wvfs.join(file), hash)
else:
repo.ui.warn(_("%s: largefile %s not available from local store\n") %
(file, hash))
def copyalltostore(repo, node):
'''Copy all largefiles in a given revision to the store'''
ctx = repo[node]
for filename in ctx.files():
realfile = splitstandin(filename)
if realfile is not None and filename in ctx.manifest():
copytostore(repo, ctx, realfile, filename)
def copytostoreabsolute(repo, file, hash):
if inusercache(repo.ui, hash):
link(usercachepath(repo.ui, hash), storepath(repo, hash))
else:
util.makedirs(os.path.dirname(storepath(repo, hash)))
with open(file, 'rb') as srcf:
with util.atomictempfile(storepath(repo, hash),
createmode=repo.store.createmode) as dstf:
for chunk in util.filechunkiter(srcf):
dstf.write(chunk)
linktousercache(repo, hash)
def linktousercache(repo, hash):
'''Link / copy the largefile with the specified hash from the store
to the cache.'''
path = usercachepath(repo.ui, hash)
link(storepath(repo, hash), path)
def getstandinmatcher(repo, rmatcher=None):
'''Return a match object that applies rmatcher to the standin directory'''
wvfs = repo.wvfs
standindir = shortname
# no warnings about missing files or directories
badfn = lambda f, msg: None
if rmatcher and not rmatcher.always():
pats = [wvfs.join(standindir, pat) for pat in rmatcher.files()]
if not pats:
pats = [wvfs.join(standindir)]
match = scmutil.match(repo[None], pats, badfn=badfn)
else:
# no patterns: relative to repo root
match = scmutil.match(repo[None], [wvfs.join(standindir)], badfn=badfn)
return match
def composestandinmatcher(repo, rmatcher):
'''Return a matcher that accepts standins corresponding to the
files accepted by rmatcher. Pass the list of files in the matcher
as the paths specified by the user.'''
smatcher = getstandinmatcher(repo, rmatcher)
isstandin = smatcher.matchfn
def composedmatchfn(f):
return isstandin(f) and rmatcher.matchfn(splitstandin(f))
smatcher.matchfn = composedmatchfn
return smatcher
def standin(filename):
'''Return the repo-relative path to the standin for the specified big
file.'''
# Notes:
# 1) Some callers want an absolute path, but for instance addlargefiles
# needs it repo-relative so it can be passed to repo[None].add(). So
# leave it up to the caller to use repo.wjoin() to get an absolute path.
# 2) Join with '/' because that's what dirstate always uses, even on
# Windows. Change existing separator to '/' first in case we are
# passed filenames from an external source (like the command line).
return shortnameslash + util.pconvert(filename)
def isstandin(filename):
'''Return true if filename is a big file standin. filename must be
in Mercurial's internal form (slash-separated).'''
return filename.startswith(shortnameslash)
def splitstandin(filename):
# Split on / because that's what dirstate always uses, even on Windows.
# Change local separator to / first just in case we are passed filenames
# from an external source (like the command line).
bits = util.pconvert(filename).split('/', 1)
if len(bits) == 2 and bits[0] == shortname:
return bits[1]
else:
return None
def updatestandin(repo, lfile, standin):
"""Re-calculate hash value of lfile and write it into standin
This assumes that "lfutil.standin(lfile) == standin", for efficiency.
"""
file = repo.wjoin(lfile)
if repo.wvfs.exists(lfile):
hash = hashfile(file)
executable = getexecutable(file)
writestandin(repo, standin, hash, executable)
else:
raise error.Abort(_('%s: file not found!') % lfile)
def readasstandin(fctx):
'''read hex hash from given filectx of standin file
This encapsulates how "standin" data is stored into storage layer.'''
return fctx.data().strip()
def writestandin(repo, standin, hash, executable):
'''write hash to <repo.root>/<standin>'''
repo.wwrite(standin, hash + '\n', executable and 'x' or '')
def copyandhash(instream, outfile):
'''Read bytes from instream (iterable) and write them to outfile,
computing the SHA-1 hash of the data along the way. Return the hash.'''
hasher = hashlib.sha1('')
for data in instream:
hasher.update(data)
outfile.write(data)
return hex(hasher.digest())
def hashfile(file):
if not os.path.exists(file):
return ''
with open(file, 'rb') as fd:
return hexsha1(fd)
def getexecutable(filename):
mode = os.stat(filename).st_mode
return ((mode & stat.S_IXUSR) and
(mode & stat.S_IXGRP) and
(mode & stat.S_IXOTH))
def urljoin(first, second, *arg):
def join(left, right):
if not left.endswith('/'):
left += '/'
if right.startswith('/'):
right = right[1:]
return left + right
url = join(first, second)
for a in arg:
url = join(url, a)
return url
def hexsha1(fileobj):
"""hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
object data"""
h = hashlib.sha1()
for chunk in util.filechunkiter(fileobj):
h.update(chunk)
return hex(h.digest())
def httpsendfile(ui, filename):
return httpconnection.httpsendfile(ui, filename, 'rb')
def unixpath(path):
'''Return a version of path normalized for use with the lfdirstate.'''
return util.pconvert(os.path.normpath(path))
def islfilesrepo(repo):
'''Return true if the repo is a largefile repo.'''
if ('largefiles' in repo.requirements and
any(shortnameslash in f[0] for f in repo.store.datafiles())):
return True
return any(openlfdirstate(repo.ui, repo, False))
class storeprotonotcapable(Exception):
def __init__(self, storetypes):
self.storetypes = storetypes
def getstandinsstate(repo):
standins = []
matcher = getstandinmatcher(repo)
wctx = repo[None]
for standin in repo.dirstate.walk(matcher, subrepos=[], unknown=False,
ignored=False):
lfile = splitstandin(standin)
try:
hash = readasstandin(wctx[standin])
except IOError:
hash = None
standins.append((lfile, hash))
return standins
def synclfdirstate(repo, lfdirstate, lfile, normallookup):
lfstandin = standin(lfile)
if lfstandin in repo.dirstate:
stat = repo.dirstate._map[lfstandin]
state, mtime = stat[0], stat[3]
else:
state, mtime = '?', -1
if state == 'n':
if (normallookup or mtime < 0 or
not repo.wvfs.exists(lfile)):
# state 'n' doesn't ensure 'clean' in this case
lfdirstate.normallookup(lfile)
else:
lfdirstate.normal(lfile)
elif state == 'm':
lfdirstate.normallookup(lfile)
elif state == 'r':
lfdirstate.remove(lfile)
elif state == 'a':
lfdirstate.add(lfile)
elif state == '?':
lfdirstate.drop(lfile)
def markcommitted(orig, ctx, node):
repo = ctx.repo()
orig(node)
# ATTENTION: "ctx.files()" may differ from "repo[node].files()"
# because files coming from the 2nd parent are omitted in the latter.
#
# The former should be used to get targets of "synclfdirstate",
# because such files:
# - are marked as "a" by "patch.patch()" (e.g. via transplant), and
# - have to be marked as "n" after commit, but
# - aren't listed in "repo[node].files()"
lfdirstate = openlfdirstate(repo.ui, repo)
for f in ctx.files():
lfile = splitstandin(f)
if lfile is not None:
synclfdirstate(repo, lfdirstate, lfile, False)
lfdirstate.write()
# As part of committing, copy all of the largefiles into the cache.
#
# Using "node" instead of "ctx" implies additional "repo[node]"
# lookup while copyalltostore(), but can omit redundant check for
# files comming from the 2nd parent, which should exist in store
# at merging.
copyalltostore(repo, node)
def getlfilestoupdate(oldstandins, newstandins):
changedstandins = set(oldstandins).symmetric_difference(set(newstandins))
filelist = []
for f in changedstandins:
if f[0] not in filelist:
filelist.append(f[0])
return filelist
def getlfilestoupload(repo, missing, addfunc):
makeprogress = repo.ui.makeprogress
with makeprogress(_('finding outgoing largefiles'),
unit=_('revisions'), total=len(missing)) as progress:
for i, n in enumerate(missing):
progress.update(i)
parents = [p for p in repo[n].parents() if p != node.nullid]
oldlfstatus = repo.lfstatus
repo.lfstatus = False
try:
ctx = repo[n]
finally:
repo.lfstatus = oldlfstatus
files = set(ctx.files())
if len(parents) == 2:
mc = ctx.manifest()
mp1 = ctx.parents()[0].manifest()
mp2 = ctx.parents()[1].manifest()
for f in mp1:
if f not in mc:
files.add(f)
for f in mp2:
if f not in mc:
files.add(f)
for f in mc:
if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
files.add(f)
for fn in files:
if isstandin(fn) and fn in ctx:
addfunc(fn, readasstandin(ctx[fn]))
def updatestandinsbymatch(repo, match):
'''Update standins in the working directory according to specified match
This returns (possibly modified) ``match`` object to be used for
subsequent commit process.
'''
ui = repo.ui
# Case 1: user calls commit with no specific files or
# include/exclude patterns: refresh and commit all files that
# are "dirty".
if match is None or match.always():
# Spend a bit of time here to get a list of files we know
# are modified so we can compare only against those.
# It can cost a lot of time (several seconds)
# otherwise to update all standins if the largefiles are
# large.
lfdirstate = openlfdirstate(ui, repo)
dirtymatch = matchmod.always(repo.root, repo.getcwd())
unsure, s = lfdirstate.status(dirtymatch, subrepos=[], ignored=False,
clean=False, unknown=False)
modifiedfiles = unsure + s.modified + s.added + s.removed
lfiles = listlfiles(repo)
# this only loops through largefiles that exist (not
# removed/renamed)
for lfile in lfiles:
if lfile in modifiedfiles:
fstandin = standin(lfile)
if repo.wvfs.exists(fstandin):
# this handles the case where a rebase is being
# performed and the working copy is not updated
# yet.
if repo.wvfs.exists(lfile):
updatestandin(repo, lfile, fstandin)
return match
lfiles = listlfiles(repo)
match._files = repo._subdirlfs(match.files(), lfiles)
# Case 2: user calls commit with specified patterns: refresh
# any matching big files.
smatcher = composestandinmatcher(repo, match)
standins = repo.dirstate.walk(smatcher, subrepos=[], unknown=False,
ignored=False)
# No matching big files: get out of the way and pass control to
# the usual commit() method.
if not standins:
return match
# Refresh all matching big files. It's possible that the
# commit will end up failing, in which case the big files will
# stay refreshed. No harm done: the user modified them and
# asked to commit them, so sooner or later we're going to
# refresh the standins. Might as well leave them refreshed.
lfdirstate = openlfdirstate(ui, repo)
for fstandin in standins:
lfile = splitstandin(fstandin)
if lfdirstate[lfile] != 'r':
updatestandin(repo, lfile, fstandin)
# Cook up a new matcher that only matches regular files or
# standins corresponding to the big files requested by the
# user. Have to modify _files to prevent commit() from
# complaining "not tracked" for big files.
match = copy.copy(match)
origmatchfn = match.matchfn
# Check both the list of largefiles and the list of
# standins because if a largefile was removed, it
# won't be in the list of largefiles at this point
match._files += sorted(standins)
actualfiles = []
for f in match._files:
fstandin = standin(f)
# For largefiles, only one of the normal and standin should be
# committed (except if one of them is a remove). In the case of a
# standin removal, drop the normal file if it is unknown to dirstate.
# Thus, skip plain largefile names but keep the standin.
if f in lfiles or fstandin in standins:
if repo.dirstate[fstandin] != 'r':
if repo.dirstate[f] != 'r':
continue
elif repo.dirstate[f] == '?':
continue
actualfiles.append(f)
match._files = actualfiles
def matchfn(f):
if origmatchfn(f):
return f not in lfiles
else:
return f in standins
match.matchfn = matchfn
return match
class automatedcommithook(object):
'''Stateful hook to update standins at the 1st commit of resuming
For efficiency, updating standins in the working directory should
be avoided while automated committing (like rebase, transplant and
so on), because they should be updated before committing.
But the 1st commit of resuming automated committing (e.g. ``rebase
--continue``) should update them, because largefiles may be
modified manually.
'''
def __init__(self, resuming):
self.resuming = resuming
def __call__(self, repo, match):
if self.resuming:
self.resuming = False # avoids updating at subsequent commits
return updatestandinsbymatch(repo, match)
else:
return match
def getstatuswriter(ui, repo, forcibly=None):
'''Return the function to write largefiles specific status out
If ``forcibly`` is ``None``, this returns the last element of
``repo._lfstatuswriters`` as "default" writer function.
Otherwise, this returns the function to always write out (or
ignore if ``not forcibly``) status.
'''
if forcibly is None and util.safehasattr(repo, '_largefilesenabled'):
return repo._lfstatuswriters[-1]
else:
if forcibly:
return ui.status # forcibly WRITE OUT
else:
return lambda *msg, **opts: None # forcibly IGNORE
| 36.134615 | 79 | 0.631842 |
9ad806fc3f6f31821609ade103f4dabc8f82b55a | 5,081 | py | Python | tests/spec/test_enum.py | recht/thriftrw-python | aad5ee4e9ca21fe59c9bea479465615ef3825dec | [
"MIT"
] | null | null | null | tests/spec/test_enum.py | recht/thriftrw-python | aad5ee4e9ca21fe59c9bea479465615ef3825dec | [
"MIT"
] | null | null | null | tests/spec/test_enum.py | recht/thriftrw-python | aad5ee4e9ca21fe59c9bea479465615ef3825dec | [
"MIT"
] | null | null | null | # Copyright (c) 2016 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import, unicode_literals, print_function
import pytest
from thriftrw.errors import ThriftCompilerError
from thriftrw.spec.enum import EnumTypeSpec
from thriftrw.idl import Parser
from ..util.value import vi32
@pytest.fixture
def parse():
"""Parser for enum definitions."""
return Parser(start='enum', silent=True).parse
def test_compile_implicit_values(parse):
enum_ast = parse('enum ImplicitEnum { A, B, C }')
spec = EnumTypeSpec.compile(enum_ast)
assert spec.name == 'ImplicitEnum'
assert spec.items == {'A': 0, 'B': 1, 'C': 2}
def test_compile_explicit_values(parse):
enum_ast = parse('enum ExplicitEnum { A = 1, B = 5, C = 3 }')
spec = EnumTypeSpec.compile(enum_ast)
assert spec.name == 'ExplicitEnum'
assert spec.items == {'A': 1, 'B': 5, 'C': 3}
def test_compile_implicit_and_explicit_values(parse):
enum_ast = parse('enum CombinationEnum { A = 1, B, C = 5, D, E }')
spec = EnumTypeSpec.compile(enum_ast)
assert spec.name == 'CombinationEnum'
assert spec.items == {'A': 1, 'B': 2, 'C': 5, 'D': 6, 'E': 7}
def test_compile_duplicate_names(parse):
enum_ast = parse('enum DupeEnum { A, B, A }')
with pytest.raises(ThriftCompilerError) as exc_info:
EnumTypeSpec.compile(enum_ast)
assert 'DupeEnum.A' in str(exc_info)
assert 'has duplicates' in str(exc_info)
def test_compile_values_collide(parse):
enum_ast = parse('enum Foo { A, B, C = 0, D }')
spec = EnumTypeSpec.compile(enum_ast)
assert spec.items == {'A': 0, 'B': 1, 'C': 0, 'D': 1}
assert set(spec.values_to_names[0]) == set(['A', 'C'])
assert set(spec.values_to_names[1]) == set(['B', 'D'])
def test_link(loads):
TestEnum = loads('''
enum TestEnum {
A = 1, B, C
}
''').TestEnum
assert TestEnum.A == 1
assert TestEnum.B == 2
assert TestEnum.C == 3
assert TestEnum.name_of(1) == 'A'
assert TestEnum.name_of(2) == 'B'
assert TestEnum.name_of(3) == 'C'
assert not TestEnum.name_of(4)
assert set(TestEnum.values) == set((1, 2, 3))
assert set(TestEnum.items) == set(('A', 'B', 'C'))
def test_to_wire(loads):
Enum = loads('enum ToWireEnum { A = 2, B = 3, C = -42 }').ToWireEnum
spec = Enum.type_spec
assert spec.to_wire(Enum.A) == vi32(2)
assert spec.to_wire(Enum.B) == vi32(3)
assert spec.to_wire(Enum.C) == vi32(-42)
def test_round_trip(loads):
Enum = loads('enum RoundTripEnum { A = 2, B = 3, C = -42 }').RoundTripEnum
spec = Enum.type_spec
assert spec.from_wire(spec.to_wire(Enum.A)) == Enum.A
assert spec.from_wire(spec.to_wire(Enum.B)) == Enum.B
assert spec.from_wire(spec.to_wire(Enum.C)) == Enum.C
assert spec.from_primitive(spec.to_primitive(Enum.A)) == Enum.A
assert spec.from_primitive(spec.to_primitive(Enum.B)) == Enum.B
assert spec.from_primitive(spec.to_primitive(Enum.C)) == Enum.C
def test_validate(loads):
Enum = loads('enum ToWireEnum { A = 2, B = 3, C = -42 }').ToWireEnum
Enum.type_spec.validate(2)
with pytest.raises(ValueError):
Enum.type_spec.validate(4)
def test_enums_are_constants(loads):
mod = loads('''
struct Bar {
1: required Foo foo = Foo.A
}
enum Foo {
A, B, C
}
''')
Foo = mod.Foo
Bar = mod.Bar
assert Bar().foo == Foo.A == 0
@pytest.mark.parametrize('s', [
'''
enum X { A = 1, B = 2, C = 4 }
const X x = 3
''',
'''
enum X { A = 1, B = 2, C = 4 }
struct Y { 1: required X x = 3 }
''',
])
def test_enum_constant_invalid_default(loads, s):
with pytest.raises(ThriftCompilerError) as exc_info:
loads(s)
assert 'is not a valid value for enum "X"' in str(exc_info)
def test_has_thrift_module(loads):
module = loads('''
enum Foo {
A = 1, B, C
}
''')
assert module is module.Foo.__thrift_module__
| 29.540698 | 79 | 0.650659 |
bb6cb7e77c65b3f45b9cf04ef0666e6eb28fa97a | 674 | py | Python | ops/culldeslack/culldeslack.py | unchaoss/unchaoss | a55b440fea9bf574f670d2a545e820275025d509 | [
"Apache-2.0"
] | null | null | null | ops/culldeslack/culldeslack.py | unchaoss/unchaoss | a55b440fea9bf574f670d2a545e820275025d509 | [
"Apache-2.0"
] | null | null | null | ops/culldeslack/culldeslack.py | unchaoss/unchaoss | a55b440fea9bf574f670d2a545e820275025d509 | [
"Apache-2.0"
] | null | null | null | __author__ = 'chitrabhanu'
import csv
import os, stat
import sys
import datetime
import time
import json
USAGE_ERROR_PREFIX = "USAGE ERROR: "
RUNTIME_ERROR_PREFIX = "RUNTIME ERROR: "
class UsageError(Exception):
def __init__(self, msg):
self.msg = USAGE_ERROR_PREFIX + msg
class RuntimeError(Exception):
def __init__(self, msg):
self.msg = RUNTIME_ERROR_PREFIX + msg
def Usage(valid_args_list):
print("USAGE:\n\tpython %s followed by one out of:" % (sys.argv[0]))
print("TBD")
class culldeslack:
def cli(self, args):
pass
def main():
cds = culldeslack()
cds.cli(sys.argv)
if __name__ == "__main__":
main()
| 17.282051 | 72 | 0.675074 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.