code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from yaml import safe_load
from hashlib import md5
from enum import Enum
from box import Box
class YamlType(Enum):
BASE = 0
PIPELINE = 1
SERVICE = 2
def Yaml(path):
"""
Sudo class for managing a yaml as a python object.
:param path: path to .yaml file
"""
__type__ = None
__text__ = open(path).read()
yaml = safe_load(__text__)
for yaml_type in YamlType:
if yaml_type.name.lower() in yaml:
__type__ = yaml_type
if __type__ is None:
raise ValueError('Invalid yaml type for %s' % path)
box = Box(yaml[__type__.name.lower()])
box.__path__ = path
box.__text__ = __text__
box.__type__ = __type__
box.hash = md5(__text__.encode()).hexdigest()
return box
| [
"yaml.safe_load"
] | [((353, 372), 'yaml.safe_load', 'safe_load', (['__text__'], {}), '(__text__)\n', (362, 372), False, 'from yaml import safe_load\n')] |
from __future__ import absolute_import
from __future__ import print_function
import numpy as np
import re
from scipy import linalg
import scipy.ndimage as ndi
from six.moves import range
import os
import sys
import threading
import copy
import inspect
import types
from keras import backend as K
from keras.utils.generic_utils import Progbar
import tensorflow as tf
import cv2
class ImageDataGenerator(object):
'''Generate minibatches with
real-time data augmentation.
# Arguments
featurewise_center: set input mean to 0 over the dataset.
samplewise_center: set each sample mean to 0.
featurewise_std_normalization: divide inputs by std of the dataset.
samplewise_std_normalization: divide each input by its std.
featurewise_standardize_axis: axis along which to perform feature-wise center and std normalization.
samplewise_standardize_axis: axis along which to to perform sample-wise center and std normalization.
zca_whitening: apply ZCA whitening.
rotation_range: degrees (0 to 180).
width_shift_range: fraction of total width.
height_shift_range: fraction of total height.
shear_range: shear intensity (shear angle in radians).
zoom_range: amount of zoom. if scalar z, zoom will be randomly picked
in the range [1-z, 1+z]. A sequence of two can be passed instead
to select this range.
channel_shift_range: shift range for each channels.
fill_mode: points outside the boundaries are filled according to the
given mode ('constant', 'nearest', 'reflect' or 'wrap'). Default
is 'nearest'.
cval: value used for points outside the boundaries when fill_mode is
'constant'. Default is 0.
horizontal_flip: whether to randomly flip images horizontally.
vertical_flip: whether to randomly flip images vertically.
rescale: rescaling factor. If None or 0, no rescaling is applied,
otherwise we multiply the data by the value provided (before applying
any other transformation).
dim_ordering: 'th' or 'tf'. In 'th' mode, the channels dimension
(the depth) is at index 1, in 'tf' mode it is at index 3.
It defaults to the `image_dim_ordering` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "th".
seed: random seed for reproducible pipeline processing. If not None, it will also be used by `flow` or
`flow_from_directory` to generate the shuffle index in case of no seed is set.
'''
def __init__(self,
featurewise_center=False,
samplewise_center=False,
featurewise_std_normalization=False,
samplewise_std_normalization=False,
featurewise_standardize_axis=None,
samplewise_standardize_axis=None,
zca_whitening=False,
rotation_range=0.,
width_shift_range=0.,
height_shift_range=0.,
shear_range=0.,
zoom_range=0.,
channel_shift_range=0.,
fill_mode='nearest',
cval=0.,
horizontal_flip=False,
vertical_flip=False,
rescale=None,
dim_ordering=K.image_dim_ordering(),
seed=None,
verbose=1):
self.config = copy.deepcopy(locals())
self.config['config'] = self.config
self.config['mean'] = None
self.config['std'] = None
self.config['principal_components'] = None
self.config['rescale'] = rescale
if dim_ordering not in {'tf', 'th'}:
raise Exception('dim_ordering should be "tf" (channel after row and '
'column) or "th" (channel before row and column). '
'Received arg: ', dim_ordering)
self.__sync_seed = self.config['seed'] or np.random.randint(0, 4294967295)
self.default_pipeline = []
self.default_pipeline.append(random_transform)
self.default_pipeline.append(standardize)
self.set_pipeline(self.default_pipeline)
self.__fitting = False
self.fit_lock = threading.Lock()
@property
def sync_seed(self):
return self.__sync_seed
@property
def fitting(self):
return self.__fitting
@property
def pipeline(self):
return self.__pipeline
def sync(self, image_data_generator):
self.__sync_seed = image_data_generator.sync_seed
return (self, image_data_generator)
def set_pipeline(self, p):
if p is None:
self.__pipeline = self.default_pipeline
elif type(p) is list:
self.__pipeline = p
else:
raise Exception('invalid pipeline.')
def flow(self, X, y=None, batch_size=32, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_mode=None, save_format='jpeg'):
return NumpyArrayIterator(
X, y, self,
batch_size=batch_size, shuffle=shuffle, seed=seed,
dim_ordering=self.config['dim_ordering'],
save_to_dir=save_to_dir, save_prefix=save_prefix,
save_mode=save_mode, save_format=save_format)
def flow_from_list(self, X, y=None, batch_size=32, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_mode=None, save_format='jpeg'):
return ListArrayIterator(
X, y, self,
batch_size=batch_size, shuffle=shuffle, seed=seed,
dim_ordering=self.config['dim_ordering'],
save_to_dir=save_to_dir, save_prefix=save_prefix,
save_mode=save_mode, save_format=save_format)
# def flow_with_mask(self, X, y=None, batch_size=32, shuffle=True, seed=None,
# save_to_dir=None, save_prefix='', save_mode=None, save_format='jpeg'):
# return ListArrayIteratorWithMask(
# X, y, self,
# batch_size=batch_size, shuffle=shuffle, seed=seed,
# dim_ordering=self.config['dim_ordering'],
# save_to_dir=save_to_dir, save_prefix=save_prefix,
# save_mode=save_mode, save_format=save_format)
def flow_from_directory(self, directory,
color_mode=None, target_size=None,
image_reader='pil', reader_config=None,
read_formats=None,
classes=None, class_mode='categorical',
batch_size=32, shuffle=True, seed=None,
save_to_dir=None, save_prefix='',
save_mode=None, save_format='jpeg'):
if reader_config is None:
reader_config = {'target_mode': 'RGB', 'target_size': (256, 256)}
if read_formats is None:
read_formats = {'png', 'jpg', 'jpeg', 'bmp'}
return DirectoryIterator(
directory, self,
color_mode=color_mode, target_size=target_size,
image_reader=image_reader, reader_config=reader_config,
read_formats=read_formats,
classes=classes, class_mode=class_mode,
dim_ordering=self.config['dim_ordering'],
batch_size=batch_size, shuffle=shuffle, seed=seed,
save_to_dir=save_to_dir, save_prefix=save_prefix,
save_mode=save_mode, save_format=save_format)
def process(self, x):
# get next sync_seed
np.random.seed(self.__sync_seed)
self.__sync_seed = np.random.randint(0, 4294967295)
self.config['fitting'] = self.__fitting
self.config['sync_seed'] = self.__sync_seed
for p in self.__pipeline:
x = p(x, **self.config)
return x
def fit_generator(self, generator, nb_iter):
'''Fit a generator
# Arguments
generator: Iterator, generate data for fitting.
nb_iter: Int, number of iteration to fit.
'''
with self.fit_lock:
try:
self.__fitting = nb_iter*generator.batch_size
for i in range(nb_iter):
next(generator)
finally:
self.__fitting = False
def fit(self, X, rounds=1):
'''Fit the pipeline on a numpy array
# Arguments
X: Numpy array, the data to fit on.
rounds: how many rounds of fit to do over the data
'''
# X = np.copy(X)
with self.fit_lock:
try:
# self.__fitting = rounds*X.shape[0]
self.__fitting = rounds * len(X)
for r in range(rounds):
# for i in range(X.shape[0]):
for i in range(len(X)):
self.process(X[i])
finally:
self.__fitting = False
if __name__ == '__main__':
pass
| [
"six.moves.range",
"keras.backend.image_dim_ordering",
"threading.Lock",
"numpy.random.randint",
"numpy.random.seed"
] | [((3432, 3454), 'keras.backend.image_dim_ordering', 'K.image_dim_ordering', ([], {}), '()\n', (3452, 3454), True, 'from keras import backend as K\n'), ((4362, 4378), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (4376, 4378), False, 'import threading\n'), ((7642, 7674), 'numpy.random.seed', 'np.random.seed', (['self.__sync_seed'], {}), '(self.__sync_seed)\n', (7656, 7674), True, 'import numpy as np\n'), ((7702, 7734), 'numpy.random.randint', 'np.random.randint', (['(0)', '(4294967295)'], {}), '(0, 4294967295)\n', (7719, 7734), True, 'import numpy as np\n'), ((4083, 4115), 'numpy.random.randint', 'np.random.randint', (['(0)', '(4294967295)'], {}), '(0, 4294967295)\n', (4100, 4115), True, 'import numpy as np\n'), ((8278, 8292), 'six.moves.range', 'range', (['nb_iter'], {}), '(nb_iter)\n', (8283, 8292), False, 'from six.moves import range\n'), ((8809, 8822), 'six.moves.range', 'range', (['rounds'], {}), '(rounds)\n', (8814, 8822), False, 'from six.moves import range\n')] |
# -*- coding: utf-8 -*-
"""
====================================
Plotting simple 3D graph with Mayavi
====================================
A simple example of the plot of a 3D graph with Mayavi
in order to test the autonomy of the gallery.
"""
# Code source: <NAME>
# License: BSD 3 clause
from mayavi import mlab
mlab.test_plot3d()
mlab.figure()
mlab.test_contour3d()
| [
"mayavi.mlab.test_plot3d",
"mayavi.mlab.test_contour3d",
"mayavi.mlab.figure"
] | [((318, 336), 'mayavi.mlab.test_plot3d', 'mlab.test_plot3d', ([], {}), '()\n', (334, 336), False, 'from mayavi import mlab\n'), ((338, 351), 'mayavi.mlab.figure', 'mlab.figure', ([], {}), '()\n', (349, 351), False, 'from mayavi import mlab\n'), ((352, 373), 'mayavi.mlab.test_contour3d', 'mlab.test_contour3d', ([], {}), '()\n', (371, 373), False, 'from mayavi import mlab\n')] |
__author__ = "<NAME>"
__license__ = 'MIT'
# -------------------------------------------------------------------------------------------------------------------- #
# IMPORTS
# Modules
import os
# RiBuild Modules
from delphin_6_automation.database_interactions.db_templates import delphin_entry
from delphin_6_automation.database_interactions import general_interactions
from delphin_6_automation.database_interactions.auth import validation as auth_dict
from delphin_6_automation.database_interactions import mongo_setup
# -------------------------------------------------------------------------------------------------------------------- #
# RIBuild
server = mongo_setup.global_init(auth_dict)
folder = r'C:\ribuild'
for project in delphin_entry.Delphin.objects(simulated__exists=True).only('id', 'results_raw')[:2]:
project_id = str(project.id)
project_folder = os.path.join(folder, str(project_id))
os.mkdir(project_folder)
general_interactions.download_full_project_from_database(project_id, project_folder)
general_interactions.download_sample_data(project_id, project_folder)
result_doc = project.results_raw
general_interactions.download_raw_result(str(result_doc.id), project_folder)
mongo_setup.global_end_ssh(server)
| [
"delphin_6_automation.database_interactions.mongo_setup.global_end_ssh",
"delphin_6_automation.database_interactions.mongo_setup.global_init",
"delphin_6_automation.database_interactions.general_interactions.download_sample_data",
"delphin_6_automation.database_interactions.db_templates.delphin_entry.Delphin.... | [((664, 698), 'delphin_6_automation.database_interactions.mongo_setup.global_init', 'mongo_setup.global_init', (['auth_dict'], {}), '(auth_dict)\n', (687, 698), False, 'from delphin_6_automation.database_interactions import mongo_setup\n'), ((1230, 1264), 'delphin_6_automation.database_interactions.mongo_setup.global_end_ssh', 'mongo_setup.global_end_ssh', (['server'], {}), '(server)\n', (1256, 1264), False, 'from delphin_6_automation.database_interactions import mongo_setup\n'), ((921, 945), 'os.mkdir', 'os.mkdir', (['project_folder'], {}), '(project_folder)\n', (929, 945), False, 'import os\n'), ((951, 1039), 'delphin_6_automation.database_interactions.general_interactions.download_full_project_from_database', 'general_interactions.download_full_project_from_database', (['project_id', 'project_folder'], {}), '(project_id,\n project_folder)\n', (1007, 1039), False, 'from delphin_6_automation.database_interactions import general_interactions\n'), ((1040, 1109), 'delphin_6_automation.database_interactions.general_interactions.download_sample_data', 'general_interactions.download_sample_data', (['project_id', 'project_folder'], {}), '(project_id, project_folder)\n', (1081, 1109), False, 'from delphin_6_automation.database_interactions import general_interactions\n'), ((739, 792), 'delphin_6_automation.database_interactions.db_templates.delphin_entry.Delphin.objects', 'delphin_entry.Delphin.objects', ([], {'simulated__exists': '(True)'}), '(simulated__exists=True)\n', (768, 792), False, 'from delphin_6_automation.database_interactions.db_templates import delphin_entry\n')] |
#!/usr/bin/env python
'''
MonkeyTest -- test your hard drive read-write speed in Python
A simplistic script to show that such system programming
tasks are possible and convenient to be solved in Python
The file is being created, then written with random data, randomly read
and deleted, so the script doesn't waste your drive
(!) Be sure, that the file you point to is not something
you need, cause it'll be overwritten during test
'''
# Miscellaneous operating system interfaces
# System-specific parameters and functions
import os, sys
# The shuffle() method takes a sequence, like a list, and reorganize the order of the items.
from random import shuffle
# Lib for cli interface
import argparse
# Pythons lib for json
import json
# Import new high-res counter (Python >= 3.3)
try:
from time import perf_counter as time
# Else print error
except ImportError as e:
print(e)
# Functions that holds all arguments
def get_args():
parser = argparse.ArgumentParser(description='Arguments', formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-f', '--file',
required=False,
action='store',
default='/tmp/monkeytest',
help='The file to read/write to')
parser.add_argument('-s', '--size',
required=False,
action='store',
type=int,
default=128,
help='Total MB to write')
parser.add_argument('-w', '--write-block-size',
required=False,
action='store',
type=int,
default=1024,
help='The block size for writing in bytes')
parser.add_argument('-r', '--read-block-size',
required=False,
action='store',
type=int,
default=512,
help='The block size for reading in bytes')
parser.add_argument('-j', '--json',
required=False,
action='store',
help='Output to json file')
args = parser.parse_args()
return args
class Benchmark:
'''
# Init method
# Initialize(assign values) to the data members of the class when an object of class is created
# The method is useful to do any initialization you want to do with your object.
'''
def __init__(self, file,write_mb, write_block_kb, read_block_b):
self.file = file
self.write_mb = write_mb
self.write_block_kb = write_block_kb
self.read_block_b = read_block_b
'''
# wr_blocks
# int( = The int() method returns an integer object from any number or string.
# self.write_mb * 1024 = Multiplication of variable
# / self.write_block_kb = Divided by Self.write_block_kb. The result always has type float.
'''
wr_blocks = int(self.write_mb * 1024 / self.write_block_kb)
rd_blocks = int(self.write_mb * 1024 * 1024 / self.read_block_b)
self.write_results = self.write_test( 1024 * self.write_block_kb, wr_blocks)
self.read_results = self.read_test(self.read_block_b, rd_blocks)
def write_test(self, block_size, blocks_count, show_progress=True):
'''
Tests write speed by writing random blocks, at total quantity
of blocks_count, each at size of block_size bytes to disk.
Function returns a list of write times in sec of each block.
'''
'''
# os.open = Open the file path and set various flags according to flags and possibly its mode according to mode.
# self.file = path
# os.O.CREAT = If the specified file does not exist, it may optionally (if O_CREAT is specified in flags) be created by open().
# os.O_WRONLY = The argument flags must include one of the following access modes: O_RDONLY, O_WRONLY, or O_RDWR.
# These request opening the file read-only, write-only, or read/write, respectively.
# 0o777 = A numeric value representing the mode of the newly opened file. The default value of this parameter is 0o777 (octal). (511)
'''
f = os.open(self.file, os.O_CREAT | os.O_WRONLY, 0o777) # low-level I/O
# list for time
took = []
# loop every int
for i in range(blocks_count):
if show_progress:
# dirty trick to actually print progress on each iteration
sys.stdout.write('\rWriting: {:.2f} %'.format(
(i + 1) * 100 / blocks_count))
sys.stdout.flush()
buff = os.urandom(block_size)
start = time()
os.write(f, buff)
os.fsync(f) # force write to disk
t = time() - start
took.append(t)
os.close(f)
return took
def read_test(self, block_size, blocks_count, show_progress=True):
'''
Performs read speed test by reading random offset blocks from
file, at maximum of blocks_count, each at size of block_size
bytes until the End Of File reached.
Returns a list of read times in sec of each block.
'''
f = os.open(self.file, os.O_RDONLY, 0o777) # low-level I/O
# generate random read positions
offsets = list(range(0, blocks_count * block_size, block_size))
shuffle(offsets)
took = []
for i, offset in enumerate(offsets, 1):
if show_progress and i % int(self.write_block_kb * 1024 / self.read_block_b) == 0:
# read is faster than write, so try to equalize print period
sys.stdout.write('\rReading: {:.2f} %'.format(
(i + 1) * 100 / blocks_count))
sys.stdout.flush()
start = time()
os.lseek(f, offset, os.SEEK_SET) # set position
buff = os.read(f, block_size) # read from position
t = time() - start
if not buff: break # if EOF reached
took.append(t)
os.close(f)
return took
def print_result(self):
result = ('\n\nWritten {} MB in {:.4f} s\nWrite speed is {:.2f} MB/s'
'\n max: {max:.2f}, min: {min:.2f}\n'.format(
self.write_mb, sum(self.write_results), self.write_mb / sum(self.write_results),
max=self.write_block_kb / (1024 * min(self.write_results)),
min=self.write_block_kb / (1024 * max(self.write_results))))
result += ('\nRead {} x {} B blocks in {:.4f} s\nRead speed is {:.2f} MB/s'
'\n max: {max:.2f}, min: {min:.2f}\n'.format(
len(self.read_results), self.read_block_b,
sum(self.read_results), self.write_mb / sum(self.read_results),
max=self.read_block_b / (1024 * 1024 * min(self.read_results)),
min=self.read_block_b / (1024 * 1024 * max(self.read_results))))
print(result)
def get_json_result(self,output_file):
results_json = {}
results_json["Written MB"] = self.write_mb
results_json["Write time (sec)"] = round(sum(self.write_results),2)
results_json["Write speed in MB/s"] = round(self.write_mb / sum(self.write_results),2)
results_json["Read blocks"] = len(self.read_results)
results_json["Read time (sec)"] = round(sum(self.read_results),2)
results_json["Read speed in MB/s"] = round(self.write_mb / sum(self.read_results),2)
with open(output_file,'w') as f:
json.dump(results_json,f)
def main():
args = get_args()
benchmark = Benchmark(args.file, args.size, args.write_block_size, args.read_block_size)
if args.json is not None:
benchmark.get_json_result(args.json)
else:
benchmark.print_result()
os.remove(args.file)
if __name__ == "__main__":
main() | [
"sys.stdout.flush",
"random.shuffle",
"argparse.ArgumentParser",
"os.close",
"os.urandom",
"os.write",
"os.open",
"os.lseek",
"time.perf_counter",
"os.fsync",
"os.read",
"json.dump",
"os.remove"
] | [((959, 1068), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Arguments"""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(description='Arguments', formatter_class=argparse.\n ArgumentDefaultsHelpFormatter)\n", (982, 1068), False, 'import argparse\n'), ((7965, 7985), 'os.remove', 'os.remove', (['args.file'], {}), '(args.file)\n', (7974, 7985), False, 'import os, sys\n'), ((4321, 4370), 'os.open', 'os.open', (['self.file', '(os.O_CREAT | os.O_WRONLY)', '(511)'], {}), '(self.file, os.O_CREAT | os.O_WRONLY, 511)\n', (4328, 4370), False, 'import os, sys\n'), ((4981, 4992), 'os.close', 'os.close', (['f'], {}), '(f)\n', (4989, 4992), False, 'import os, sys\n'), ((5364, 5400), 'os.open', 'os.open', (['self.file', 'os.O_RDONLY', '(511)'], {}), '(self.file, os.O_RDONLY, 511)\n', (5371, 5400), False, 'import os, sys\n'), ((5541, 5557), 'random.shuffle', 'shuffle', (['offsets'], {}), '(offsets)\n', (5548, 5557), False, 'from random import shuffle\n'), ((6214, 6225), 'os.close', 'os.close', (['f'], {}), '(f)\n', (6222, 6225), False, 'import os, sys\n'), ((4787, 4809), 'os.urandom', 'os.urandom', (['block_size'], {}), '(block_size)\n', (4797, 4809), False, 'import os, sys\n'), ((4830, 4836), 'time.perf_counter', 'time', ([], {}), '()\n', (4834, 4836), True, 'from time import perf_counter as time\n'), ((4849, 4866), 'os.write', 'os.write', (['f', 'buff'], {}), '(f, buff)\n', (4857, 4866), False, 'import os, sys\n'), ((4879, 4890), 'os.fsync', 'os.fsync', (['f'], {}), '(f)\n', (4887, 4890), False, 'import os, sys\n'), ((5966, 5972), 'time.perf_counter', 'time', ([], {}), '()\n', (5970, 5972), True, 'from time import perf_counter as time\n'), ((5985, 6017), 'os.lseek', 'os.lseek', (['f', 'offset', 'os.SEEK_SET'], {}), '(f, offset, os.SEEK_SET)\n', (5993, 6017), False, 'import os, sys\n'), ((6053, 6075), 'os.read', 'os.read', (['f', 'block_size'], {}), '(f, block_size)\n', (6060, 6075), False, 'import os, sys\n'), ((7688, 7714), 'json.dump', 'json.dump', (['results_json', 'f'], {}), '(results_json, f)\n', (7697, 7714), False, 'import json\n'), ((4749, 4767), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (4765, 4767), False, 'import os, sys\n'), ((4930, 4936), 'time.perf_counter', 'time', ([], {}), '()\n', (4934, 4936), True, 'from time import perf_counter as time\n'), ((5927, 5945), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (5943, 5945), False, 'import os, sys\n'), ((6114, 6120), 'time.perf_counter', 'time', ([], {}), '()\n', (6118, 6120), True, 'from time import perf_counter as time\n')] |
from django.test import TestCase, RequestFactory
from authors.apps.authentication.views import RegistrationAPIView, VerificationAPIView
from authors.apps.articles.views import UpdateArticleAPIView, ArticleCreateAPIView
import json
from minimock import Mock
import smtplib
from rest_framework import exceptions, authentication
from authors.apps.utils.app_util import UtilClass
class UpdateArticleTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.article = {
'title': 'original title',
'description': 'original description',
'body': 'original body'
}
self.update_article = {
'title': 'updated title',
'description': 'updated description',
'body': 'updated body'
}
self.user = {'user': {'email': '<EMAIL>',
'username': 'testusername7',
'password': '<PASSWORD>#',
'callbackurl': ''
}
}
self.obj = UtilClass()
registered_user = self.obj.get_reg_data(self.user)
self.obj.verify_user({"token": registered_user.data["token"]})
logged_in_user = self.obj.get_login_data(self.user)
self.headers = {
'HTTP_AUTHORIZATION': 'Token ' + logged_in_user.data["token"]
}
self.slug = self.create_article()
def create_article(self):
request = self.factory.post(
'/api/articles/', **self.headers, data=json.dumps(self.article), content_type='application/json')
response = ArticleCreateAPIView.as_view()(request)
return response.data['slug']
def test_update_article(self):
request = self.factory.put(
'articles/<slug>/update/', data=json.dumps(self.update_article), **self.headers, content_type='application/json')
response = UpdateArticleAPIView.as_view()(
request, **{'slug': self.slug})
self.assertEqual(self.update_article, response.data)
def test_no_title(self):
update_article = {
'description': 'updated description',
'body': 'updated body'
}
request = self.factory.put('articles/<slug>/update/', data=json.dumps(
update_article), **self.headers, content_type='application/json')
response = UpdateArticleAPIView.as_view()(
request, **{'slug': self.slug})
self.assertEqual(response.data['detail'],
'Please give the article a title.')
def test_no_description(self):
update_article = {
'title': 'updated title',
'body': 'updated body'
}
request = self.factory.put('articles/<slug>/update/', data=json.dumps(
update_article), **self.headers, content_type='application/json')
response = UpdateArticleAPIView.as_view()(
request, **{'slug': self.slug})
print(response.data)
self.assertEqual(response.data['detail'],
'Please give the article a description.')
def test_no_body(self):
update_article = {
'title': 'updated title',
'description': 'updated description',
}
request = self.factory.put('articles/<slug>/update/', data=json.dumps(
update_article), **self.headers, content_type='application/json')
response = UpdateArticleAPIView.as_view()(
request, **{'slug': self.slug})
print(response.data)
self.assertEqual(response.data['detail'],
'Please give the article a body.')
def test_non_article(self):
request = self.factory.put('articles/<slug>/update/', data=json.dumps(
self.update_article), **self.headers, content_type='application/json')
response = UpdateArticleAPIView.as_view()(
request, **{'slug': 'hgsfkjgygfhjdgfhjbrgfgdfhghj'})
print(response.data)
self.assertEqual(response.data['detail'], 'list index out of range')
| [
"django.test.RequestFactory",
"authors.apps.utils.app_util.UtilClass",
"json.dumps",
"authors.apps.articles.views.ArticleCreateAPIView.as_view",
"authors.apps.articles.views.UpdateArticleAPIView.as_view"
] | [((461, 477), 'django.test.RequestFactory', 'RequestFactory', ([], {}), '()\n', (475, 477), False, 'from django.test import TestCase, RequestFactory\n'), ((1092, 1103), 'authors.apps.utils.app_util.UtilClass', 'UtilClass', ([], {}), '()\n', (1101, 1103), False, 'from authors.apps.utils.app_util import UtilClass\n'), ((1644, 1674), 'authors.apps.articles.views.ArticleCreateAPIView.as_view', 'ArticleCreateAPIView.as_view', ([], {}), '()\n', (1672, 1674), False, 'from authors.apps.articles.views import UpdateArticleAPIView, ArticleCreateAPIView\n'), ((1938, 1968), 'authors.apps.articles.views.UpdateArticleAPIView.as_view', 'UpdateArticleAPIView.as_view', ([], {}), '()\n', (1966, 1968), False, 'from authors.apps.articles.views import UpdateArticleAPIView, ArticleCreateAPIView\n'), ((2403, 2433), 'authors.apps.articles.views.UpdateArticleAPIView.as_view', 'UpdateArticleAPIView.as_view', ([], {}), '()\n', (2431, 2433), False, 'from authors.apps.articles.views import UpdateArticleAPIView, ArticleCreateAPIView\n'), ((2913, 2943), 'authors.apps.articles.views.UpdateArticleAPIView.as_view', 'UpdateArticleAPIView.as_view', ([], {}), '()\n', (2941, 2943), False, 'from authors.apps.articles.views import UpdateArticleAPIView, ArticleCreateAPIView\n'), ((3465, 3495), 'authors.apps.articles.views.UpdateArticleAPIView.as_view', 'UpdateArticleAPIView.as_view', ([], {}), '()\n', (3493, 3495), False, 'from authors.apps.articles.views import UpdateArticleAPIView, ArticleCreateAPIView\n'), ((3894, 3924), 'authors.apps.articles.views.UpdateArticleAPIView.as_view', 'UpdateArticleAPIView.as_view', ([], {}), '()\n', (3922, 3924), False, 'from authors.apps.articles.views import UpdateArticleAPIView, ArticleCreateAPIView\n'), ((1566, 1590), 'json.dumps', 'json.dumps', (['self.article'], {}), '(self.article)\n', (1576, 1590), False, 'import json\n'), ((1837, 1868), 'json.dumps', 'json.dumps', (['self.update_article'], {}), '(self.update_article)\n', (1847, 1868), False, 'import json\n'), ((2294, 2320), 'json.dumps', 'json.dumps', (['update_article'], {}), '(update_article)\n', (2304, 2320), False, 'import json\n'), ((2804, 2830), 'json.dumps', 'json.dumps', (['update_article'], {}), '(update_article)\n', (2814, 2830), False, 'import json\n'), ((3356, 3382), 'json.dumps', 'json.dumps', (['update_article'], {}), '(update_article)\n', (3366, 3382), False, 'import json\n'), ((3780, 3811), 'json.dumps', 'json.dumps', (['self.update_article'], {}), '(self.update_article)\n', (3790, 3811), False, 'import json\n')] |
# -*- coding: utf-8 -*-
from lxml import etree
from optparse import OptionParser
from zeit.care import add_file_logging
from zeit.connector.resource import Resource
import StringIO
import logging
import sys
import zeit.care.crawl
import zeit.connector.connector
logger = logging.getLogger(__name__)
_DAV_PROP_NAME_ = ('file-name', 'http://namespaces.zeit.de/CMS/document')
_PARAMS_PER_PAGE_ = 7
class Converter(object):
def __init__(self, xml_str):
self.xml = xml_str
self.body_elems = ['title', 'subtitle', 'byline', 'supertitle', 'bu']
self.div_elems = ['p', 'video', 'audio', 'raw', 'intertitle',
'article_extra']
def _build_new_body(self, elements, divisons):
'''builds a new body node with the standard elements and divisions'''
body_elements = []
for d in elements:
body_elements.append(d)
body_divisions = []
for d in divisons:
new_div = etree.Element("division", type="page")
new_div.extend(d)
body_divisions.append(new_div)
new_body = etree.Element("body")
new_body.extend(body_elements)
new_body.extend(body_divisions)
return new_body
def _get_params_per_page(self, tree):
p = tree.xpath(
"//head/attribute[@ns='http://namespaces.zeit.de/CMS/document' \
and @name='paragraphsperpage']")
if p:
paras_per_page = int(p[0].text)
else:
paras_per_page = _PARAMS_PER_PAGE_
return paras_per_page
def convert(self):
tree = etree.parse(StringIO.StringIO(self.xml))
# only articles
if not tree.xpath('//article'):
return self.xml
elif tree.xpath('//body/division'):
return self.xml
paras_per_page = self._get_params_per_page(tree)
div_list = []
xp = 0
div = []
body_children = []
body = tree.xpath('//body')[0]
for e in list(body):
# some elements are not part of the continuous text
# (title, byline, etc)
if e.tag in self.body_elems:
body_children.append(e)
continue
div.append(e)
if e.tag in self.div_elems:
xp += 1
if paras_per_page == xp:
div_list.append(div)
div = []
xp = 0
# append remaining elements
if div:
div_list.append(div)
# replace body
new_body = self._build_new_body(body_children, div_list)
tree.xpath('//article')[0].replace(body, new_body)
return etree.tostring(tree, encoding="UTF-8", xml_declaration=True)
def division_worker(resource, connector):
if resource.type == "article":
try:
new_xml = Converter(resource.data.read()).convert()
new_resource = Resource(
resource.id,
resource.__name__,
resource.type,
StringIO.StringIO(new_xml),
resource.properties,
resource.contentType)
connector[resource.id] = new_resource
logger.info(resource.id)
except KeyboardInterrupt:
logger.info('SCRIPT STOPPED')
sys.exit()
except:
logger.exception(resource.id)
def main():
usage = "usage: %prog [options] arg"
parser = OptionParser(usage)
parser.add_option("-c", "--collection", dest="collection",
help="entry collection for starting the conversion")
parser.add_option("-w", "--webdav", dest="webdav",
help="webdav server uri")
parser.add_option("-l", "--log", dest="logfile",
help="logfile for errors")
parser.add_option("-f", "--force", action="store_true", dest="force",
help="no reinsurance question, for batch mode e.g.")
(options, args) = parser.parse_args()
if not options.collection:
parser.error("missing entry point for conversion")
if not options.webdav:
parser.error("missing webdav uri")
if options.logfile:
add_file_logging(logger, options.logfile)
if not options.force:
user_ok = raw_input(
'\nConversion will start at %s.\nAre you sure? [y|n]: ' %
options.collection)
else:
user_ok = "y"
if user_ok == "y":
connector = zeit.connector.connector.Connector(roots=dict(
default=options.webdav))
crawler = zeit.care.crawl.Crawler(connector, division_worker)
crawler.run(options.collection)
| [
"logging.getLogger",
"lxml.etree.Element",
"StringIO.StringIO",
"zeit.care.add_file_logging",
"optparse.OptionParser",
"sys.exit",
"lxml.etree.tostring"
] | [((273, 300), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (290, 300), False, 'import logging\n'), ((3473, 3492), 'optparse.OptionParser', 'OptionParser', (['usage'], {}), '(usage)\n', (3485, 3492), False, 'from optparse import OptionParser\n'), ((1108, 1129), 'lxml.etree.Element', 'etree.Element', (['"""body"""'], {}), "('body')\n", (1121, 1129), False, 'from lxml import etree\n'), ((2693, 2753), 'lxml.etree.tostring', 'etree.tostring', (['tree'], {'encoding': '"""UTF-8"""', 'xml_declaration': '(True)'}), "(tree, encoding='UTF-8', xml_declaration=True)\n", (2707, 2753), False, 'from lxml import etree\n'), ((4223, 4264), 'zeit.care.add_file_logging', 'add_file_logging', (['logger', 'options.logfile'], {}), '(logger, options.logfile)\n', (4239, 4264), False, 'from zeit.care import add_file_logging\n'), ((976, 1014), 'lxml.etree.Element', 'etree.Element', (['"""division"""'], {'type': '"""page"""'}), "('division', type='page')\n", (989, 1014), False, 'from lxml import etree\n'), ((1623, 1650), 'StringIO.StringIO', 'StringIO.StringIO', (['self.xml'], {}), '(self.xml)\n', (1640, 1650), False, 'import StringIO\n'), ((3058, 3084), 'StringIO.StringIO', 'StringIO.StringIO', (['new_xml'], {}), '(new_xml)\n', (3075, 3084), False, 'import StringIO\n'), ((3336, 3346), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3344, 3346), False, 'import sys\n')] |
import traceback
from pycompss.api.task import task
from pycompss.api.constraint import constraint
from pycompss.api.parameter import FILE_IN, FILE_OUT
from biobb_common.tools import file_utils as fu
from biobb_chemistry.acpype import acpype_params_gmx
@task(input_path=FILE_IN, output_path_gro=FILE_OUT, output_path_itp=FILE_OUT, output_path_top=FILE_OUT)
def acpype_params_gmx_pc(input_path, output_path_gro, output_path_itp, output_path_top, properties, **kwargs):
try:
acpype_params_gmx.AcpypeParamsGMX(input_path=input_path, output_path_gro=output_path_gro, output_path_itp=output_path_itp, output_path_top=output_path_top, properties=properties, **kwargs).launch()
except Exception:
traceback.print_exc()
fu.write_failed_output(output_path_gro)
fu.write_failed_output(output_path_itp)
fu.write_failed_output(output_path_top)
| [
"pycompss.api.task.task",
"biobb_common.tools.file_utils.write_failed_output",
"traceback.print_exc",
"biobb_chemistry.acpype.acpype_params_gmx.AcpypeParamsGMX"
] | [((255, 361), 'pycompss.api.task.task', 'task', ([], {'input_path': 'FILE_IN', 'output_path_gro': 'FILE_OUT', 'output_path_itp': 'FILE_OUT', 'output_path_top': 'FILE_OUT'}), '(input_path=FILE_IN, output_path_gro=FILE_OUT, output_path_itp=FILE_OUT,\n output_path_top=FILE_OUT)\n', (259, 361), False, 'from pycompss.api.task import task\n'), ((714, 735), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (733, 735), False, 'import traceback\n'), ((744, 783), 'biobb_common.tools.file_utils.write_failed_output', 'fu.write_failed_output', (['output_path_gro'], {}), '(output_path_gro)\n', (766, 783), True, 'from biobb_common.tools import file_utils as fu\n'), ((792, 831), 'biobb_common.tools.file_utils.write_failed_output', 'fu.write_failed_output', (['output_path_itp'], {}), '(output_path_itp)\n', (814, 831), True, 'from biobb_common.tools import file_utils as fu\n'), ((840, 879), 'biobb_common.tools.file_utils.write_failed_output', 'fu.write_failed_output', (['output_path_top'], {}), '(output_path_top)\n', (862, 879), True, 'from biobb_common.tools import file_utils as fu\n'), ((486, 684), 'biobb_chemistry.acpype.acpype_params_gmx.AcpypeParamsGMX', 'acpype_params_gmx.AcpypeParamsGMX', ([], {'input_path': 'input_path', 'output_path_gro': 'output_path_gro', 'output_path_itp': 'output_path_itp', 'output_path_top': 'output_path_top', 'properties': 'properties'}), '(input_path=input_path, output_path_gro=\n output_path_gro, output_path_itp=output_path_itp, output_path_top=\n output_path_top, properties=properties, **kwargs)\n', (519, 684), False, 'from biobb_chemistry.acpype import acpype_params_gmx\n')] |
import os
import statistics
import librosa
import sox
from concurrent.futures import ThreadPoolExecutor
directory = "flac"
lengths = []
for file in os.listdir(directory):
print(file)
length = librosa.get_duration(filename=f"flac\\{file}")
lengths.append(length)
transform = sox.Transformer();
transform.trim(length*0.25)
transform.build_file(f"flac\\{file}", f"75%\\{file}")
lengths.append(librosa.get_duration(filename=f"75%\\{file}"))
with (open("median_length.txt", "a")) as info:
info.writelines(f"100%: {statistics.median(lengths)}")
| [
"statistics.median",
"os.listdir",
"sox.Transformer",
"librosa.get_duration"
] | [((150, 171), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (160, 171), False, 'import os\n'), ((203, 249), 'librosa.get_duration', 'librosa.get_duration', ([], {'filename': 'f"""flac\\\\{file}"""'}), "(filename=f'flac\\\\{file}')\n", (223, 249), False, 'import librosa\n'), ((293, 310), 'sox.Transformer', 'sox.Transformer', ([], {}), '()\n', (308, 310), False, 'import sox\n'), ((422, 467), 'librosa.get_duration', 'librosa.get_duration', ([], {'filename': 'f"""75%\\\\{file}"""'}), "(filename=f'75%\\\\{file}')\n", (442, 467), False, 'import librosa\n'), ((547, 573), 'statistics.median', 'statistics.median', (['lengths'], {}), '(lengths)\n', (564, 573), False, 'import statistics\n')] |
from django.contrib import admin
from ems.crm.models import (
Attribute, Business, Project, TeamMemberRole, TeamMember,
ProjectTeam, ProjectAssessment, ProjectUserStory, ProjectEpic,
ProjectTask, UserProfile)
class AttributeAdmin(admin.ModelAdmin):
search_fields = ('label', 'type')
list_display = ('label', 'type', 'enable_timetracking', 'billable')
list_filter = ('type', 'enable_timetracking', 'billable')
ordering = ('type', 'sort_order') # Django honors only first field.
class BusinessAdmin(admin.ModelAdmin):
list_display = ['name', 'short_name']
search_fields = ['name', 'short_name']
class ProjectTaskInline(admin.StackedInline):
model = ProjectTask
class ProjectUserStoryAdmin(admin.ModelAdmin):
model = ProjectUserStory
inlines = (ProjectTaskInline,)
class ProjectUserStoryInline(admin.StackedInline):
model = ProjectUserStory
class ProjectEpicAdmin(admin.ModelAdmin):
model = ProjectEpic
inlines = (ProjectUserStoryInline, )
class ProjectEpicInline(admin.StackedInline):
model = ProjectEpic
class ProjectAssessmentAdmin(admin.ModelAdmin):
model = ProjectAssessment
class ProjectAssessmentInline(admin.StackedInline):
model = ProjectAssessment
class ProjectAdmin(admin.ModelAdmin):
raw_id_fields = ('business',)
list_display = ('name', 'business', 'point_person', 'status', 'type')
list_filter = ('type', 'status')
search_fields = ('name', 'business__name', 'point_person__username',
'point_person__first_name', 'point_person__last_name',
'description')
inlines = (ProjectAssessmentInline, ProjectEpicInline)
class UserProfileAdmin(admin.ModelAdmin):
list_display = ('user', 'hours_per_week')
admin.site.register(Attribute, AttributeAdmin)
admin.site.register(Business, BusinessAdmin)
admin.site.register(Project, ProjectAdmin)
admin.site.register(UserProfile, UserProfileAdmin)
admin.site.register(TeamMemberRole)
admin.site.register(TeamMember)
admin.site.register(ProjectTeam)
admin.site.register(ProjectAssessment, ProjectAssessmentAdmin)
admin.site.register(ProjectUserStory, ProjectUserStoryAdmin)
admin.site.register(ProjectEpic, ProjectEpicAdmin)
| [
"django.contrib.admin.site.register"
] | [((1766, 1812), 'django.contrib.admin.site.register', 'admin.site.register', (['Attribute', 'AttributeAdmin'], {}), '(Attribute, AttributeAdmin)\n', (1785, 1812), False, 'from django.contrib import admin\n'), ((1813, 1857), 'django.contrib.admin.site.register', 'admin.site.register', (['Business', 'BusinessAdmin'], {}), '(Business, BusinessAdmin)\n', (1832, 1857), False, 'from django.contrib import admin\n'), ((1858, 1900), 'django.contrib.admin.site.register', 'admin.site.register', (['Project', 'ProjectAdmin'], {}), '(Project, ProjectAdmin)\n', (1877, 1900), False, 'from django.contrib import admin\n'), ((1901, 1951), 'django.contrib.admin.site.register', 'admin.site.register', (['UserProfile', 'UserProfileAdmin'], {}), '(UserProfile, UserProfileAdmin)\n', (1920, 1951), False, 'from django.contrib import admin\n'), ((1952, 1987), 'django.contrib.admin.site.register', 'admin.site.register', (['TeamMemberRole'], {}), '(TeamMemberRole)\n', (1971, 1987), False, 'from django.contrib import admin\n'), ((1988, 2019), 'django.contrib.admin.site.register', 'admin.site.register', (['TeamMember'], {}), '(TeamMember)\n', (2007, 2019), False, 'from django.contrib import admin\n'), ((2020, 2052), 'django.contrib.admin.site.register', 'admin.site.register', (['ProjectTeam'], {}), '(ProjectTeam)\n', (2039, 2052), False, 'from django.contrib import admin\n'), ((2053, 2115), 'django.contrib.admin.site.register', 'admin.site.register', (['ProjectAssessment', 'ProjectAssessmentAdmin'], {}), '(ProjectAssessment, ProjectAssessmentAdmin)\n', (2072, 2115), False, 'from django.contrib import admin\n'), ((2116, 2176), 'django.contrib.admin.site.register', 'admin.site.register', (['ProjectUserStory', 'ProjectUserStoryAdmin'], {}), '(ProjectUserStory, ProjectUserStoryAdmin)\n', (2135, 2176), False, 'from django.contrib import admin\n'), ((2177, 2227), 'django.contrib.admin.site.register', 'admin.site.register', (['ProjectEpic', 'ProjectEpicAdmin'], {}), '(ProjectEpic, ProjectEpicAdmin)\n', (2196, 2227), False, 'from django.contrib import admin\n')] |
from string import Template
from requests import get, post
userInfoQuery = """
{
viewer {
login
id
}
}
"""
createContributedRepoQuery = Template("""
query {
user(login: "$username") {
repositoriesContributedTo(last: 100, includeUserRepositories: true) {
nodes {
isFork
name
owner {
login
}
}
}
}
}
""")
createCommittedDateQuery = Template("""
query {
repository(owner: "$owner", name: "$name") {
ref(qualifiedName: "master") {
target {
... on Commit {
history(first: 100, author: { id: "$id" }) {
edges {
node {
committedDate
}
}
}
}
}
}
}
}
""")
repositoryListQuery = Template("""
{
user(login: "$username") {
repositories(orderBy: {field: CREATED_AT, direction: ASC}, last: 100, affiliations: [OWNER, COLLABORATOR, ORGANIZATION_MEMBER], isFork: false) {
totalCount
edges {
node {
object(expression:"master") {
... on Commit {
history (author: { id: "$id" }){
totalCount
}
}
}
primaryLanguage {
color
name
id
}
stargazers {
totalCount
}
collaborators {
totalCount
}
createdAt
name
owner {
id
login
}
nameWithOwner
}
}
}
location
createdAt
name
}
}
""")
getLinesOfCodeQuery = Template("""/repos/$owner/$repo/stats/code_frequency""")
getProfileViewQuery = Template(
"""/repos/$owner/$repo/traffic/views""")
getProfileTrafficQuery = Template(
"""/repos/$owner/$repo/traffic/popular/referrers""")
class RunQuery():
def __init__(self, headers):
self.headers = headers
def runGithubAPIQuery(self, query):
request = get("https://api.github.com" + query, headers=self.headers)
if request.status_code == 200:
return request.json()
else:
raise Exception(
"Query failed to run by returning code of {}. {},... {}".format(
request.status_code, query, str(request.json())))
def runGithubGraphqlQuery(self, query) -> dict:
request = post("https://api.github.com/graphql",
json={"query": query}, headers=self.headers)
if request.status_code == 200:
return request.json()
else:
raise Exception("Query failed to run by returning code of {}. {}".format(
request.status_code, query))
def runGithubContributionsQuery(self, username):
request = get(
"https://github-contributions.now.sh/api/v1/" + username)
if request.status_code == 200:
return request.json()
| [
"requests.post",
"string.Template",
"requests.get"
] | [((160, 426), 'string.Template', 'Template', (['"""\nquery {\n user(login: "$username") {\n repositoriesContributedTo(last: 100, includeUserRepositories: true) {\n nodes {\n isFork\n name\n owner {\n login\n }\n }\n }\n }\n }\n"""'], {}), '(\n """\nquery {\n user(login: "$username") {\n repositoriesContributedTo(last: 100, includeUserRepositories: true) {\n nodes {\n isFork\n name\n owner {\n login\n }\n }\n }\n }\n }\n"""\n )\n', (168, 426), False, 'from string import Template\n'), ((445, 831), 'string.Template', 'Template', (['"""\nquery {\n repository(owner: "$owner", name: "$name") {\n ref(qualifiedName: "master") {\n target {\n ... on Commit {\n history(first: 100, author: { id: "$id" }) {\n edges {\n node {\n committedDate\n }\n }\n }\n }\n }\n }\n }\n }\n"""'], {}), '(\n """\nquery {\n repository(owner: "$owner", name: "$name") {\n ref(qualifiedName: "master") {\n target {\n ... on Commit {\n history(first: 100, author: { id: "$id" }) {\n edges {\n node {\n committedDate\n }\n }\n }\n }\n }\n }\n }\n }\n"""\n )\n', (453, 831), False, 'from string import Template\n'), ((846, 1702), 'string.Template', 'Template', (['"""\n{\n user(login: "$username") {\n repositories(orderBy: {field: CREATED_AT, direction: ASC}, last: 100, affiliations: [OWNER, COLLABORATOR, ORGANIZATION_MEMBER], isFork: false) {\n totalCount\n edges {\n node {\n object(expression:"master") {\n ... on Commit {\n history (author: { id: "$id" }){\n totalCount\n }\n }\n }\n primaryLanguage {\n color\n name\n id\n }\n stargazers {\n totalCount\n }\n collaborators {\n totalCount\n }\n createdAt\n name\n owner {\n id\n login\n }\n nameWithOwner\n }\n }\n }\n location\n createdAt\n name\n }\n}\n"""'], {}), '(\n """\n{\n user(login: "$username") {\n repositories(orderBy: {field: CREATED_AT, direction: ASC}, last: 100, affiliations: [OWNER, COLLABORATOR, ORGANIZATION_MEMBER], isFork: false) {\n totalCount\n edges {\n node {\n object(expression:"master") {\n ... on Commit {\n history (author: { id: "$id" }){\n totalCount\n }\n }\n }\n primaryLanguage {\n color\n name\n id\n }\n stargazers {\n totalCount\n }\n collaborators {\n totalCount\n }\n createdAt\n name\n owner {\n id\n login\n }\n nameWithOwner\n }\n }\n }\n location\n createdAt\n name\n }\n}\n"""\n )\n', (854, 1702), False, 'from string import Template\n'), ((1717, 1769), 'string.Template', 'Template', (['"""/repos/$owner/$repo/stats/code_frequency"""'], {}), "('/repos/$owner/$repo/stats/code_frequency')\n", (1725, 1769), False, 'from string import Template\n'), ((1797, 1842), 'string.Template', 'Template', (['"""/repos/$owner/$repo/traffic/views"""'], {}), "('/repos/$owner/$repo/traffic/views')\n", (1805, 1842), False, 'from string import Template\n'), ((1878, 1935), 'string.Template', 'Template', (['"""/repos/$owner/$repo/traffic/popular/referrers"""'], {}), "('/repos/$owner/$repo/traffic/popular/referrers')\n", (1886, 1935), False, 'from string import Template\n'), ((2089, 2148), 'requests.get', 'get', (["('https://api.github.com' + query)"], {'headers': 'self.headers'}), "('https://api.github.com' + query, headers=self.headers)\n", (2092, 2148), False, 'from requests import get, post\n'), ((2487, 2575), 'requests.post', 'post', (['"""https://api.github.com/graphql"""'], {'json': "{'query': query}", 'headers': 'self.headers'}), "('https://api.github.com/graphql', json={'query': query}, headers=self.\n headers)\n", (2491, 2575), False, 'from requests import get, post\n'), ((2884, 2945), 'requests.get', 'get', (["('https://github-contributions.now.sh/api/v1/' + username)"], {}), "('https://github-contributions.now.sh/api/v1/' + username)\n", (2887, 2945), False, 'from requests import get, post\n')] |
"""
This script tunes XGBoost regression model using Bayesian Optimization.
"""
import os
from typing import Union, Tuple, Dict
import numpy as np
import pandas as pd
import xgboost as xgb
from munch import munchify
from scipy.stats import pearsonr
from bayes_opt import BayesianOptimization
from sklearn.model_selection import train_test_split
from constants import mlp_features
from utilities import load_data, open_log, clip_features_inplace, transform_data, generate_scatter_plot
def train_model_bayes_opt(train_dmatrix, model_settings):
"""
Trains the model using bayesian optimization.
:param train_dmatrix: Training matrix generated using the xgb.DMatrix data type.
:param model_settings: Dictionary ("munchified") containing all the relevant parameters, both the parameter ranges
to be explored and training ranges, as well as parameters for the Bayesian optimization itself regarding the number
of iterations to be performed.
:return: The final model, corresponding parameters, and the cross validation results.
"""
def model_function(max_depth, gamma, num_boost_round, eta, subsample, max_delta_step, alpha, reg_lambda):
"""
Blackbox function that is optimized via Bayesian Optimization. Defined inside 'train_model_bayes_opt' as we
require some entries from the model_settings that cannot be passed as arguments to this function, given that the
inputs can only be ranges over which we wish to optimize.
Please refer to the documentation for explanations of each hyperparameter:
https://xgboost.readthedocs.io/en/latest/parameter.html
:return: Returns the approximate 95% confidence interval of the upper bound of the RMSE (but the negative value
thereof, as this library tries to maximize the objective).
"""
params = {
'booster': 'gbtree',
'max_depth': int(max_depth),
'gamma': gamma,
'eta': eta,
'subsample': subsample,
'max_delta_step': max_delta_step,
'alpha': alpha,
'reg_lambda': reg_lambda,
'seed': model_settings.seed,
'eval_metric': model_settings.eval_metric,
'objective': model_settings.objective
}
cv_result = xgb.cv(params, train_dmatrix, num_boost_round=int(num_boost_round), nfold=model_settings.n_fold,
early_stopping_rounds=model_settings.early_stopping_rounds)
# We return the negative value of confidence interval for RMSE as the Bayesian Optimization library attempts to
# maximize the objective
mean_rmse = cv_result[f'test-{model_settings.eval_metric}-mean'].iloc[-1]
sd_rmse = cv_result[f'test-{model_settings.eval_metric}-std'].iloc[-1]
return -1 * (mean_rmse + 2 * sd_rmse)
model_settings = munchify(model_settings)
xgb_bayesian_optimisation = BayesianOptimization(model_function, {
'max_depth': model_settings.max_depth_range,
'gamma': model_settings.gamma_range,
'num_boost_round': model_settings.num_boost_rounds_range,
'eta': model_settings.learning_rate_range,
'subsample': model_settings.subsample_range,
'max_delta_step': model_settings.max_delta_step_range,
'alpha': model_settings.alpha_range,
'reg_lambda': model_settings.lambda_range,
}, random_state=model_settings.seed)
xgb_bayesian_optimisation.maximize(n_iter=model_settings.n_bayesian_optimization_iterations,
init_points=model_settings.n_init_points_bayesian_optimization, acq='ei')
params = munchify(xgb_bayesian_optimisation.max['params'])
params['max_depth'] = int(params.max_depth)
params['objective'] = model_settings.objective
params['eval_metric'] = model_settings.eval_metric
params.update({'seed': model_settings.seed})
cv_result = xgb.cv(params, train_dmatrix, num_boost_round=int(params.num_boost_round),
nfold=model_settings.n_fold,
early_stopping_rounds=model_settings.early_stopping_rounds)
xgboost_model = xgb.train(params, train_dmatrix, num_boost_round=len(cv_result))
return xgboost_model, params, cv_result
if __name__ == "__main__":
# check if we are in a conda virtual env
try:
os.environ["CONDA_DEFAULT_ENV"]
except KeyError:
print("\tPlease init the conda environment!\n")
exit(1)
X, y = load_data()
x_train, x_test, y_train, y_test = train_test_split(X, y, test_size=.2, random_state=42)
x_train, x_test = transform_data(x_train, x_test, degree=1, log=True, cross=True)
x_train, x_test = x_train.astype(np.float32), x_test.astype(np.float32)
print('x_train: ', x_train.shape, 'x_test: ', x_test.shape, '\n')
d_train_mat = xgb.DMatrix(x_train, y_train)
d_test_mat = xgb.DMatrix(x_test, y_test)
# parameter ranges for bayesian optimization
bayes_dictionary = {
'n_fold': 10,
'early_stopping_rounds': 20,
'max_depth_range': (3, 10),
'gamma_range': (0, 15),
'num_boost_rounds_range': (100, 150),
'learning_rate_range': (0.01, 0.3),
'subsample_range': (0.8, 1),
'max_delta_step_range': (0, 30),
'lambda_range': (0, 5),
'alpha_range': (0, 5),
'seed': 1,
# objective dictates learning task (and is used to obtain evaluation metric)
# refer to learning task parameters under https://xgboost.readthedocs.io/en/latest/parameter.html
'objective': 'reg:squarederror',
'eval_metric': 'rmse',
# the following two params are parameters for bayesian optimization, not for actual xgboost
'n_bayesian_optimization_iterations': 400,
'n_init_points_bayesian_optimization': 40
}
model, paras, cv_result = train_model_bayes_opt(d_train_mat, bayes_dictionary)
print('model: ', model, '\n', 'paras: ', paras, '\n ', 'cv_result: ', cv_result)
d_test_mat = xgb.DMatrix(x_test, y_test)
model.eval(d_test_mat)
| [
"utilities.load_data",
"bayes_opt.BayesianOptimization",
"munch.munchify",
"sklearn.model_selection.train_test_split",
"utilities.transform_data",
"xgboost.DMatrix"
] | [((2874, 2898), 'munch.munchify', 'munchify', (['model_settings'], {}), '(model_settings)\n', (2882, 2898), False, 'from munch import munchify\n'), ((2932, 3393), 'bayes_opt.BayesianOptimization', 'BayesianOptimization', (['model_function', "{'max_depth': model_settings.max_depth_range, 'gamma': model_settings.\n gamma_range, 'num_boost_round': model_settings.num_boost_rounds_range,\n 'eta': model_settings.learning_rate_range, 'subsample': model_settings.\n subsample_range, 'max_delta_step': model_settings.max_delta_step_range,\n 'alpha': model_settings.alpha_range, 'reg_lambda': model_settings.\n lambda_range}"], {'random_state': 'model_settings.seed'}), "(model_function, {'max_depth': model_settings.\n max_depth_range, 'gamma': model_settings.gamma_range, 'num_boost_round':\n model_settings.num_boost_rounds_range, 'eta': model_settings.\n learning_rate_range, 'subsample': model_settings.subsample_range,\n 'max_delta_step': model_settings.max_delta_step_range, 'alpha':\n model_settings.alpha_range, 'reg_lambda': model_settings.lambda_range},\n random_state=model_settings.seed)\n", (2952, 3393), False, 'from bayes_opt import BayesianOptimization\n'), ((3668, 3717), 'munch.munchify', 'munchify', (["xgb_bayesian_optimisation.max['params']"], {}), "(xgb_bayesian_optimisation.max['params'])\n", (3676, 3717), False, 'from munch import munchify\n'), ((4508, 4519), 'utilities.load_data', 'load_data', ([], {}), '()\n', (4517, 4519), False, 'from utilities import load_data, open_log, clip_features_inplace, transform_data, generate_scatter_plot\n'), ((4560, 4614), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.2)', 'random_state': '(42)'}), '(X, y, test_size=0.2, random_state=42)\n', (4576, 4614), False, 'from sklearn.model_selection import train_test_split\n'), ((4637, 4700), 'utilities.transform_data', 'transform_data', (['x_train', 'x_test'], {'degree': '(1)', 'log': '(True)', 'cross': '(True)'}), '(x_train, x_test, degree=1, log=True, cross=True)\n', (4651, 4700), False, 'from utilities import load_data, open_log, clip_features_inplace, transform_data, generate_scatter_plot\n'), ((4867, 4896), 'xgboost.DMatrix', 'xgb.DMatrix', (['x_train', 'y_train'], {}), '(x_train, y_train)\n', (4878, 4896), True, 'import xgboost as xgb\n'), ((4914, 4941), 'xgboost.DMatrix', 'xgb.DMatrix', (['x_test', 'y_test'], {}), '(x_test, y_test)\n', (4925, 4941), True, 'import xgboost as xgb\n'), ((6053, 6080), 'xgboost.DMatrix', 'xgb.DMatrix', (['x_test', 'y_test'], {}), '(x_test, y_test)\n', (6064, 6080), True, 'import xgboost as xgb\n')] |
from django.conf import settings
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
def send_validation(strategy, code):
url = reverse('social:complete',
args=(strategy.backend_name,)) + '?code=' + code.code
send_mail(
'Validate your account',
'Validate your account {0}'.format(url),
settings.EMAIL_FROM,
[code.email],
fail_silently=False
)
| [
"django.core.urlresolvers.reverse"
] | [((166, 223), 'django.core.urlresolvers.reverse', 'reverse', (['"""social:complete"""'], {'args': '(strategy.backend_name,)'}), "('social:complete', args=(strategy.backend_name,))\n", (173, 223), False, 'from django.core.urlresolvers import reverse\n')] |
from django.db import models
# Create your models here.
from fastrunner.models import Project
from fastuser.models import BaseTable
class Schedule(BaseTable):
"""
定时任务信息表
"""
send_strategy = (
(1, "始终发送"),
(2, "仅失败发送"),
(3, "从不发送")
)
class Meta:
verbose_name = "定时任务"
db_table = "Schedule"
name = models.CharField("任务名称", unique=True, null=False, max_length=100)
identity = models.CharField("任务ID", null=False, unique=True, max_length=100)
send_type = models.IntegerField("发送策略", choices=send_strategy, default=3)
config = models.TextField("任务配置", null=False)
receiver = models.CharField("接收者", null=True, max_length=2048)
copy = models.CharField("抄送者", null=True, max_length=2048)
status = models.BooleanField("状态", default=True)
project = models.ForeignKey(Project, verbose_name='所属项目', null=False, on_delete=models.CASCADE)
| [
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.BooleanField",
"django.db.models.CharField"
] | [((371, 436), 'django.db.models.CharField', 'models.CharField', (['"""任务名称"""'], {'unique': '(True)', 'null': '(False)', 'max_length': '(100)'}), "('任务名称', unique=True, null=False, max_length=100)\n", (387, 436), False, 'from django.db import models\n'), ((452, 517), 'django.db.models.CharField', 'models.CharField', (['"""任务ID"""'], {'null': '(False)', 'unique': '(True)', 'max_length': '(100)'}), "('任务ID', null=False, unique=True, max_length=100)\n", (468, 517), False, 'from django.db import models\n'), ((534, 595), 'django.db.models.IntegerField', 'models.IntegerField', (['"""发送策略"""'], {'choices': 'send_strategy', 'default': '(3)'}), "('发送策略', choices=send_strategy, default=3)\n", (553, 595), False, 'from django.db import models\n'), ((609, 645), 'django.db.models.TextField', 'models.TextField', (['"""任务配置"""'], {'null': '(False)'}), "('任务配置', null=False)\n", (625, 645), False, 'from django.db import models\n'), ((661, 712), 'django.db.models.CharField', 'models.CharField', (['"""接收者"""'], {'null': '(True)', 'max_length': '(2048)'}), "('接收者', null=True, max_length=2048)\n", (677, 712), False, 'from django.db import models\n'), ((724, 775), 'django.db.models.CharField', 'models.CharField', (['"""抄送者"""'], {'null': '(True)', 'max_length': '(2048)'}), "('抄送者', null=True, max_length=2048)\n", (740, 775), False, 'from django.db import models\n'), ((789, 828), 'django.db.models.BooleanField', 'models.BooleanField', (['"""状态"""'], {'default': '(True)'}), "('状态', default=True)\n", (808, 828), False, 'from django.db import models\n'), ((843, 933), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Project'], {'verbose_name': '"""所属项目"""', 'null': '(False)', 'on_delete': 'models.CASCADE'}), "(Project, verbose_name='所属项目', null=False, on_delete=\n models.CASCADE)\n", (860, 933), False, 'from django.db import models\n')] |
from django import template
from django.contrib.admin.util import quote
from django.core.urlresolvers import reverse
from pbs.risk.models import Treatment
register = template.Library()
@register.inclusion_tag('admin/risk/treatments.html', takes_context=True)
def show_treatments(context, location):
current = context['current']
treatments = Treatment.objects.filter(
register__prescription=current, location__name=location)
url = reverse('admin:risk_treatment_complete',
args=(quote(current.pk),))
return {
'current': current,
'treatments': treatments,
'url': url
}
| [
"django.contrib.admin.util.quote",
"django.template.Library",
"pbs.risk.models.Treatment.objects.filter"
] | [((168, 186), 'django.template.Library', 'template.Library', ([], {}), '()\n', (184, 186), False, 'from django import template\n'), ((353, 439), 'pbs.risk.models.Treatment.objects.filter', 'Treatment.objects.filter', ([], {'register__prescription': 'current', 'location__name': 'location'}), '(register__prescription=current, location__name=\n location)\n', (377, 439), False, 'from pbs.risk.models import Treatment\n'), ((519, 536), 'django.contrib.admin.util.quote', 'quote', (['current.pk'], {}), '(current.pk)\n', (524, 536), False, 'from django.contrib.admin.util import quote\n')] |
# -*- coding: utf-8 -*-
"""
Created: on 2018-04-10
@author: Four
Project: config\readConfig.py
"""
import os
import configparser
cur_path = os.path.dirname(os.path.realpath(__file__))
configPath = os.path.join(cur_path, "cfg.ini")
conf = configparser.ConfigParser()
conf.read(configPath)
smtp_server = conf.get("email", "smtp_server")
sender = conf.get("email", "sender")
psw = conf.get("email", "psw")
receiver = conf.get("email", "receiver")
port = conf.get("email", "port")
| [
"os.path.realpath",
"os.path.join",
"configparser.ConfigParser"
] | [((198, 231), 'os.path.join', 'os.path.join', (['cur_path', '"""cfg.ini"""'], {}), "(cur_path, 'cfg.ini')\n", (210, 231), False, 'import os\n'), ((239, 266), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (264, 266), False, 'import configparser\n'), ((157, 183), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (173, 183), False, 'import os\n')] |
import os
import setuptools
from pathlib import Path
directory = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'peg_in_hole', 'envs', 'assets')
data_files = []
for root, dirs, files in os.walk(directory):
for file in files:
data_files.append(os.path.join(root, file))
setuptools.setup(
name='peg-in-hole-gym',
version='0.1.0',
description='An gym env for simulating flexible tube grasp.',
long_description=Path('README.md').read_text(),
long_description_content_type='text/markdown',
packages=setuptools.find_packages(include='envs'),
packages_data={'model_files': data_files},
install_requires=['gym', 'pybullet', 'numpy'],
url='https://github.com/guodashun/peg-in-hole-gym',
author='luckky',
author_email='<EMAIL>',
license='MIT',
) | [
"pathlib.Path",
"setuptools.find_packages",
"os.path.join",
"os.path.realpath",
"os.walk"
] | [((198, 216), 'os.walk', 'os.walk', (['directory'], {}), '(directory)\n', (205, 216), False, 'import os\n'), ((95, 121), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (111, 121), False, 'import os\n'), ((543, 583), 'setuptools.find_packages', 'setuptools.find_packages', ([], {'include': '"""envs"""'}), "(include='envs')\n", (567, 583), False, 'import setuptools\n'), ((267, 291), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (279, 291), False, 'import os\n'), ((448, 465), 'pathlib.Path', 'Path', (['"""README.md"""'], {}), "('README.md')\n", (452, 465), False, 'from pathlib import Path\n')] |
#!/usr/bin/env python3.8
'Serial device.'
from time import time
import serial
from settings import Settings
class SerialDevice():
'Communicate with a device over serial.'
def __init__(self, settings):
self.verbosity = settings['log_verbosity'] - 1
self.log('Setting up serial connection...', verbosity=1)
port = settings['serial_port']
baud = settings['serial_baud_rate']
self.timeout = 10
self.serial = serial.Serial(port, baud, timeout=self.timeout)
self.buffer = b''
self.get(['ARDUINO STARTUP COMPLETE'])
self.speed = {}
self.get_speeds()
if settings['serial_z_negative']:
self.send('F22 P53 V1')
if settings['serial_reset_position']:
self.send('F84 X1 Y1 Z1')
self.validate_params()
def log(self, message, **kwargs):
'Print a message.'
error = kwargs.get('message_type') == 'error'
if self.verbosity >= kwargs.get('verbosity', 2) or error:
print(message)
def send(self, command, wait_for_response=True):
'Send a command'
self.log(f'Sending {command}...')
self.serial.write(bytes(command + '\r\n', 'utf-8'))
self.get([command])
if wait_for_response:
code = command.split(' ')[0][1:]
if code == '22':
code = '21'
if code in ['00', '41']:
self.get(['R02'])
else:
self.get([f'R{code}'])
def get(self, responses, ignore_repeat=False):
'Fetch a response from serial.'
self.log(f'Waiting for {responses}...')
bytes_responses = [bytes(response, 'utf-8') for response in responses]
found = False
start = time()
last_dot = start - 0.11
while True:
if self.verbosity > 2:
since_last = time() - last_dot
if since_last > 0.1:
print('.' * int(since_last * 10), end='', flush=True)
last_dot = time()
if (time() - start) > self.timeout:
print('timeout')
return None
self.buffer += self.serial.read()
if any(self.buffer.endswith(resp) for resp in bytes_responses):
found = True
if found and self.buffer.endswith(b'\r\n'):
last = self.buffer.split(b'\r\n')[-2]
if ignore_repeat and b'R08' in last:
found = False
self.buffer = b''
continue
value = last.split(b' ')[1:-1]
self.log(f'received {value}')
self.buffer = b''
return value
def validate_params(self):
'Validate firmware parameters.'
self.log('Validating firmware parameters...')
self.send('F83')
self.get(['R00', 'R88'])
self.send('F22 P2 V1')
def get_speeds(self):
'Get axis speed values.'
self.log('Fetching firmware parameters...')
for axis, param in {'x': 71, 'y': 72, 'z': 73}.items():
self.send(f'F21 P{param}', wait_for_response=False)
self.speed[axis] = int(self.get(['R21'])[1].strip(b'V'))
def wait_for_idle(self):
'Wait for idle response.'
self.log('Waiting for idle...')
self.get(['R00'])
@staticmethod
def read_status():
'Read status.'
def get_current_position(self):
'Get current device coordinates.'
self.send('F82', wait_for_response=False)
position = [float(r[1:]) for r in self.get(['R82'])]
coordinate = {'x': position[0], 'y': position[1], 'z': position[2]}
self.log(f'current position: {coordinate}')
return coordinate
def move_relative(self, x, y, z, speed):
'Relative movement.'
position = self.get_current_position()
position['x'] += x
position['y'] += y
position['z'] += z
self.log(f'Moving to {position}', verbosity=1)
x_spd = self.speed['x'] * speed / 100.
y_spd = self.speed['y'] * speed / 100.
z_spd = self.speed['z'] * speed / 100.
command = f"G00 X{position['x']} Y{position['y']} Z{position['z']}"
command += f' A{x_spd} B{y_spd} C{z_spd}'
self.send(command)
def write_pin(self, pin_number, pin_value, pin_mode):
'Write pin value.'
self.log(f'Setting pin {pin_number} to {pin_value}')
self.send(f'F41 P{pin_number} V{pin_value} M{pin_mode}')
if __name__ == '__main__':
device = SerialDevice(Settings().settings)
print(device.get_current_position())
device.move_relative(0, 0, 0, 50)
| [
"settings.Settings",
"serial.Serial",
"time.time"
] | [((465, 512), 'serial.Serial', 'serial.Serial', (['port', 'baud'], {'timeout': 'self.timeout'}), '(port, baud, timeout=self.timeout)\n', (478, 512), False, 'import serial\n'), ((1768, 1774), 'time.time', 'time', ([], {}), '()\n', (1772, 1774), False, 'from time import time\n'), ((4620, 4630), 'settings.Settings', 'Settings', ([], {}), '()\n', (4628, 4630), False, 'from settings import Settings\n'), ((1891, 1897), 'time.time', 'time', ([], {}), '()\n', (1895, 1897), False, 'from time import time\n'), ((2051, 2057), 'time.time', 'time', ([], {}), '()\n', (2055, 2057), False, 'from time import time\n'), ((2074, 2080), 'time.time', 'time', ([], {}), '()\n', (2078, 2080), False, 'from time import time\n')] |
import cv2
'''
OpenCV comes with a trainer as well as detector.
If you want to train your own classifier for any object like car, planes etc.
you can use OpenCV to create one.
Its full details are given here: Cascade Classifier Training.
OpenCV already contains many pre-trained classifiers for face, eyes, smile etc.
Those XML files are stored in opencv/data/haarcascades/ folder.
Let’s create face and eye detector with OpenCV.
'''
def detection(image):
face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
eye_cascade = cv2.CascadeClassifier('haarcascade_eye.xml')
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
for (x, y, w, h) in faces:
image = cv2.rectangle(image, (x, y), (x + w, y + h), (255, 0, 0), 2)
roi_gray = gray[y:y + h, x:x + w]
roi_color = image[y:y + h, x:x + w]
eyes = eye_cascade.detectMultiScale(roi_gray)
for (ex, ey, ew, eh) in eyes:
cv2.rectangle(roi_color, (ex, ey), (ex + ew, ey + eh), (0, 255, 0), 2)
return image
| [
"cv2.rectangle",
"cv2.CascadeClassifier",
"cv2.cvtColor"
] | [((484, 544), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""haarcascade_frontalface_default.xml"""'], {}), "('haarcascade_frontalface_default.xml')\n", (505, 544), False, 'import cv2\n'), ((563, 607), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""haarcascade_eye.xml"""'], {}), "('haarcascade_eye.xml')\n", (584, 607), False, 'import cv2\n'), ((619, 658), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2GRAY'], {}), '(image, cv2.COLOR_BGR2GRAY)\n', (631, 658), False, 'import cv2\n'), ((762, 822), 'cv2.rectangle', 'cv2.rectangle', (['image', '(x, y)', '(x + w, y + h)', '(255, 0, 0)', '(2)'], {}), '(image, (x, y), (x + w, y + h), (255, 0, 0), 2)\n', (775, 822), False, 'import cv2\n'), ((1013, 1083), 'cv2.rectangle', 'cv2.rectangle', (['roi_color', '(ex, ey)', '(ex + ew, ey + eh)', '(0, 255, 0)', '(2)'], {}), '(roi_color, (ex, ey), (ex + ew, ey + eh), (0, 255, 0), 2)\n', (1026, 1083), False, 'import cv2\n')] |
import json
import psycopg2
import os
from psycopg2._psycopg import IntegrityError
from psycopg2.errorcodes import UNIQUE_VIOLATION
from logging import getLogger
def create_db_connection():
return psycopg2.connect(os.environ['DB_CONNECTION_STRING'])
class RunInTransaction:
def __init__(self, connection):
self.__connection = connection
def __enter__(self):
return self.__connection.cursor()
def __exit__(self, type, value, traceback):
self.__connection.commit()
def write_to_database(event, db_connection):
try:
with RunInTransaction(db_connection) as cursor:
cursor.execute("""
INSERT INTO events
(event_id, event_type, timestamp, details)
VALUES
(%s, %s, %s, %s);
""", [
event.event_id,
event.event_type,
event.timestamp,
json.dumps(event.details)
])
except IntegrityError as integrityError:
if integrityError.pgcode == UNIQUE_VIOLATION:
# The event has already been recorded - don't throw an exception (no need to retry this message), just
# log a notification and move on.
getLogger('event-recorder').warning('Failed to store message. The Event ID {0} already exists in the database'.format(event.event_id))
else:
raise integrityError
| [
"psycopg2.connect",
"json.dumps",
"logging.getLogger"
] | [((204, 256), 'psycopg2.connect', 'psycopg2.connect', (["os.environ['DB_CONNECTION_STRING']"], {}), "(os.environ['DB_CONNECTION_STRING'])\n", (220, 256), False, 'import psycopg2\n'), ((939, 964), 'json.dumps', 'json.dumps', (['event.details'], {}), '(event.details)\n', (949, 964), False, 'import json\n'), ((1252, 1279), 'logging.getLogger', 'getLogger', (['"""event-recorder"""'], {}), "('event-recorder')\n", (1261, 1279), False, 'from logging import getLogger\n')] |
import os
import sys
def pytest_addoption(parser):
parser.addoption('--selenium', '-S', action='store_true', dest='enable_selenium',
default=False, help='enable selenium tests')
parser.addoption('--show-browser', '-B', action='store_true', dest='show_browser',
default=False, help='will display start browsers in selenium tests')
def pytest_configure(config):
sys._called_from_pytest = True
setattr(config.option, 'driver', 'chrome')
if config.option.show_browser:
setattr(config.option, 'enable_selenium', True)
if not config.option.enable_selenium:
setattr(config.option, 'markexpr', 'not selenium')
config.addinivalue_line('markers', 'skip_if_ci: this mark skips the tests on GitlabCI')
config.addinivalue_line('markers', 'skip_test_if_env(env): this mark skips the tests for the given env')
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'demoapp'))
os.environ['DJANGO_SETTINGS_MODULE'] = 'demo.settings'
import django
django.setup()
| [
"os.path.dirname",
"django.setup"
] | [((1050, 1064), 'django.setup', 'django.setup', ([], {}), '()\n', (1062, 1064), False, 'import django\n'), ((930, 955), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (945, 955), False, 'import os\n')] |
import pytest
import numpy as np
from sklearn.ensemble import RandomForestClassifier
from ..sequential import sequential
import pkg_resources
PATH = pkg_resources.resource_filename(__name__, 'test_data/')
def test_sequential():
"Test sequential feature selection"
# load data
X = np.load(PATH+'features_largeN.npy')
X = X[:,:20]
y = np.load(PATH+'features_largeN_labels.npy')
# perform SFS
clf = RandomForestClassifier(n_estimators=100)
X_fwd = sequential(X, y, estimator=clf)
X_bwd = sequential(X, y, estimator=clf, direction='backward')
# test shapes
X_fwd.shape == (700, 10)
X_bwd.shape == (700, 10) | [
"numpy.load",
"sklearn.ensemble.RandomForestClassifier",
"pkg_resources.resource_filename"
] | [((151, 206), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['__name__', '"""test_data/"""'], {}), "(__name__, 'test_data/')\n", (182, 206), False, 'import pkg_resources\n'), ((300, 337), 'numpy.load', 'np.load', (["(PATH + 'features_largeN.npy')"], {}), "(PATH + 'features_largeN.npy')\n", (307, 337), True, 'import numpy as np\n'), ((361, 405), 'numpy.load', 'np.load', (["(PATH + 'features_largeN_labels.npy')"], {}), "(PATH + 'features_largeN_labels.npy')\n", (368, 405), True, 'import numpy as np\n'), ((433, 473), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {'n_estimators': '(100)'}), '(n_estimators=100)\n', (455, 473), False, 'from sklearn.ensemble import RandomForestClassifier\n')] |
from __future__ import annotations
import os
import typing
from jj2.client import Client, log
from jj2.connection import ConnectionEvent
from jj2.exceptions import ArgumentError
from jj2.game.connection import GameConnection, GameVersion, ConnectionFlag
from jj2.game.objects import PartialRabbit, Rabbit, Message
from jj2.game.protocol import TCP
if typing.TYPE_CHECKING:
from typing import Any
from jj2.game.connection import GameConnection
class GameClient(Client):
connection_class = GameConnection
version = GameVersion.from_str('1.24+')
def __init__(
self,
*rabbits: PartialRabbit,
version: GameVersion | str = version,
reconnect_strategy=None,
reconnect_time: int | None = None,
new_sgip: bool = True,
use_sgip: Any | None = None, # fixme
sgip_timeout: int | float = 0.5,
**connection_params
):
super().__init__(reconnect_time, **connection_params)
if use_sgip is not None:
new_sgip = False
if 0 >= len(rabbits) > 4:
raise ArgumentError(
f'{GameClient.__name__!r} consists of from 1 to 4 rabbits',
method=f'{type(self).__name__}.__init__()'
)
for rabbit in rabbits:
if not isinstance(rabbit, (PartialRabbit, Rabbit)):
raise TypeError(
f'Rabbit instance expected, got '
f'{type(rabbit).__name__!r}'
)
self.rabbits = rabbits
self.version_info = GameVersion.from_str(version)
self.reconnect_strategy = reconnect_strategy
self._sgip = use_sgip
self._sgip_event = ConnectionEvent()
if new_sgip:
from jj2.sgip import sgip, SGIPList
self._sgip = SGIPList()
sgip(self._sgip, event=self._sgip_event, timeout=sgip_timeout)
else:
self._sgip_event.set()
@property
def sgip(self):
return self._sgip
@property
def is_plus(self) -> bool:
return self.version_info.is_plus
async def connection_scheduled(self, connection):
await self.join(connection)
async def lookup_connection(self, lookup):
await self._sgip_event.wait()
game = self.sgip.lookup(lookup.target)
if game is not None:
return self.create_connection(game.address)
raise LookupError(f'Couldn\'t lookup a game named {lookup.target!r}')
@staticmethod
async def join(connection: GameConnection) -> None:
await connection.join()
@staticmethod
async def rejoin(connection: GameConnection) -> None:
await connection.rejoin()
@staticmethod
async def quit(connection, result: int = 0) -> None:
await connection.quit(result)
@staticmethod
async def send(connection: GameConnection, text: Message | str, **params) -> Message:
return await connection.send(text, **params)
@staticmethod
async def update(connection):
await connection.update()
@staticmethod
async def spectate(connection, spectating=None):
await connection.spectate(spectating)
@staticmethod
async def query(connection):
await connection.query()
write = send
async def _on_data_receive(self, connection, data, protocol):
log.debug(f'{self} RECV[{protocol}]<-{connection.address}: '.ljust(58) + data.hex(' '))
async def _on_data_send(self, connection, data, protocol):
log.debug(f'{self} SEND[{protocol}]->{connection.address}: '.ljust(58) + data.hex(' '))
async def _on_disconnect(self, connection, reason):
self.vanish(connection)
connection.clear_event('join')
if reason == 'Banned':
connection.set_event('banned')
if (
reason in ('Error downloading level', 'Connection timed out')
and self.reconnect_strategy is not None
):
await self.reconnect_strategy.reconnect(self)
else:
await connection.quit()
@staticmethod
async def _on_server_details(connection, server_cache):
data = connection.serializer.build_player_details(connection)
await connection.send_raw(data, TCP)
level = server_cache.level
if connection.flags & ConnectionFlag.DOWNLOAD_LEVELS:
from jj2.config import CACHE_PATHS
if not os.path.isfile(CACHE_PATHS['levels'].joinpath(level.file_name)):
await connection.download(level.file_name)
@staticmethod
async def _on_level_cycle(connection, _level):
await connection.update()
@staticmethod
async def _on_join(connection):
if connection.is_plus and connection.is_udp:
await connection.keep_alive()
@staticmethod
async def _on_update_ready(connection):
connection.clear_event('banned')
await connection.update()
@staticmethod
async def _on_scripts(connection, scripts):
await connection.update()
if connection.flags & ConnectionFlag.DOWNLOAD_SCRIPTS:
for script in scripts:
from jj2.config import CACHE_PATHS
if not os.path.isfile(CACHE_PATHS['scripts'].joinpath(script.file_name)):
await connection.download(script.file_name)
@staticmethod
async def _on_keep_alive(connection):
await connection.keep_alive()
def __copy__(self):
raise TypeError('can\'t copy a Client instance')
__deepcopy__ = __copy__
| [
"jj2.sgip.sgip",
"jj2.sgip.SGIPList",
"jj2.game.connection.GameVersion.from_str",
"jj2.connection.ConnectionEvent"
] | [((534, 563), 'jj2.game.connection.GameVersion.from_str', 'GameVersion.from_str', (['"""1.24+"""'], {}), "('1.24+')\n", (554, 563), False, 'from jj2.game.connection import GameConnection, GameVersion, ConnectionFlag\n'), ((1589, 1618), 'jj2.game.connection.GameVersion.from_str', 'GameVersion.from_str', (['version'], {}), '(version)\n', (1609, 1618), False, 'from jj2.game.connection import GameConnection, GameVersion, ConnectionFlag\n'), ((1729, 1746), 'jj2.connection.ConnectionEvent', 'ConnectionEvent', ([], {}), '()\n', (1744, 1746), False, 'from jj2.connection import ConnectionEvent\n'), ((1841, 1851), 'jj2.sgip.SGIPList', 'SGIPList', ([], {}), '()\n', (1849, 1851), False, 'from jj2.sgip import sgip, SGIPList\n'), ((1864, 1926), 'jj2.sgip.sgip', 'sgip', (['self._sgip'], {'event': 'self._sgip_event', 'timeout': 'sgip_timeout'}), '(self._sgip, event=self._sgip_event, timeout=sgip_timeout)\n', (1868, 1926), False, 'from jj2.sgip import sgip, SGIPList\n')] |
import regex
class LexToken:
def __init__(self, type, tag=None):
self.type = type
self.tag = tag
def __eq__(self, other):
return self.type == other.type and self.tag == other.tag
def __str__(self):
return "Type:'{}' Tag:'{}'".format(self.type, self.tag)
def lex(content: str):
line_n = 1
result = list()
# Build match patterns.
pattern = regex.compile(r"""
(?p)(?P<whitespace>\s)
|(?P<if>if)
|(?P<else>else)
|(?P<int>int)
|(?P<void>void)
|(?P<return>return)
|(?P<while>while)
|(?P<operator>[\+\-\*/<>=;,\(\)\{\}{\\\*}{/\*}]|(?:==)|(?:<=)|(?:>=)|(?:!=))
|(?P<ID>[a-zA-Z]+)
|(?P<NUM>[0-9]+)
""", regex.VERBOSE)
# Split file contents by its lines.
lines = content.splitlines()
# For each line, scan
for line in lines:
column_n = 1
while(column_n <= len(line)):
# Find the longest match.
match = pattern.match(line, column_n - 1)
# If no match was found, next character must be invalid. Raise a error message and move towards the next character.
if not match:
print("Error: invalid character at line ",
line_n, ", column ", column_n, ".", sep='')
column_n += 1
continue
# If whitespace was found, ignore it.
elif match.lastgroup == "whitespace":
pass
# If an operator was found, add the identified operator into the list.
elif match.lastgroup == "operator":
result.append(LexToken(match.group()))
# If an ID was found, create a token associated with the idenfier name.
elif match.lastgroup == "ID":
result.append(LexToken("ID", match.group()))
# If a NUM was found, create a token associated with the numeric value as int.
elif match.lastgroup == "NUM":
result.append(LexToken("NUM", int(match.group())))
# Otherwise, just add the keyword name.
else:
result.append(LexToken(match.lastgroup))
# Advance the stream towards the character after the matched token.
column_n += len(match.group())
line_n += 1
return result
| [
"regex.compile"
] | [((405, 740), 'regex.compile', 'regex.compile', (['"""\n (?p)(?P<whitespace>\\\\s)\n |(?P<if>if)\n |(?P<else>else)\n |(?P<int>int)\n |(?P<void>void)\n |(?P<return>return)\n |(?P<while>while)\n |(?P<operator>[\\\\+\\\\-\\\\*/<>=;,\\\\(\\\\)\\\\{\\\\}{\\\\\\\\\\\\*}{/\\\\*}]|(?:==)|(?:<=)|(?:>=)|(?:!=))\n |(?P<ID>[a-zA-Z]+)\n |(?P<NUM>[0-9]+)\n """', 'regex.VERBOSE'], {}), '(\n """\n (?p)(?P<whitespace>\\\\s)\n |(?P<if>if)\n |(?P<else>else)\n |(?P<int>int)\n |(?P<void>void)\n |(?P<return>return)\n |(?P<while>while)\n |(?P<operator>[\\\\+\\\\-\\\\*/<>=;,\\\\(\\\\)\\\\{\\\\}{\\\\\\\\\\\\*}{/\\\\*}]|(?:==)|(?:<=)|(?:>=)|(?:!=))\n |(?P<ID>[a-zA-Z]+)\n |(?P<NUM>[0-9]+)\n """\n , regex.VERBOSE)\n', (418, 740), False, 'import regex\n')] |
from django.urls import path
from .views import (
change_password,
login,
logout,
profile,
register,
register_email,
reset_password,
send_reset_password_link,
verify_email,
verify_registration
)
app_name = 'rest_registration'
urlpatterns = [
path('register/', register, name='register'),
path('verify-registration/', verify_registration, name='verify-registration'),
path(
'send-reset-password-link/', send_reset_password_link,
name='send-reset-password-link',
),
path('reset-password/', reset_password, name='reset-password'),
path('login/', login, name='login'),
path('logout/', logout, name='logout'),
path('profile/', profile, name='profile'),
path('change-password/', change_password, name='change-password'),
path('register-email/', register_email, name='register-email'),
path('verify-email/', verify_email, name='verify-email'),
]
| [
"django.urls.path"
] | [((288, 332), 'django.urls.path', 'path', (['"""register/"""', 'register'], {'name': '"""register"""'}), "('register/', register, name='register')\n", (292, 332), False, 'from django.urls import path\n'), ((338, 415), 'django.urls.path', 'path', (['"""verify-registration/"""', 'verify_registration'], {'name': '"""verify-registration"""'}), "('verify-registration/', verify_registration, name='verify-registration')\n", (342, 415), False, 'from django.urls import path\n'), ((422, 519), 'django.urls.path', 'path', (['"""send-reset-password-link/"""', 'send_reset_password_link'], {'name': '"""send-reset-password-link"""'}), "('send-reset-password-link/', send_reset_password_link, name=\n 'send-reset-password-link')\n", (426, 519), False, 'from django.urls import path\n'), ((543, 605), 'django.urls.path', 'path', (['"""reset-password/"""', 'reset_password'], {'name': '"""reset-password"""'}), "('reset-password/', reset_password, name='reset-password')\n", (547, 605), False, 'from django.urls import path\n'), ((612, 647), 'django.urls.path', 'path', (['"""login/"""', 'login'], {'name': '"""login"""'}), "('login/', login, name='login')\n", (616, 647), False, 'from django.urls import path\n'), ((653, 691), 'django.urls.path', 'path', (['"""logout/"""', 'logout'], {'name': '"""logout"""'}), "('logout/', logout, name='logout')\n", (657, 691), False, 'from django.urls import path\n'), ((698, 739), 'django.urls.path', 'path', (['"""profile/"""', 'profile'], {'name': '"""profile"""'}), "('profile/', profile, name='profile')\n", (702, 739), False, 'from django.urls import path\n'), ((746, 811), 'django.urls.path', 'path', (['"""change-password/"""', 'change_password'], {'name': '"""change-password"""'}), "('change-password/', change_password, name='change-password')\n", (750, 811), False, 'from django.urls import path\n'), ((818, 880), 'django.urls.path', 'path', (['"""register-email/"""', 'register_email'], {'name': '"""register-email"""'}), "('register-email/', register_email, name='register-email')\n", (822, 880), False, 'from django.urls import path\n'), ((886, 942), 'django.urls.path', 'path', (['"""verify-email/"""', 'verify_email'], {'name': '"""verify-email"""'}), "('verify-email/', verify_email, name='verify-email')\n", (890, 942), False, 'from django.urls import path\n')] |
import tkinter as tk
from tkinter import *
from PIL import ImageTk, Image
import model
Height=735
Width=1375
root1=tk.Tk()
root1.attributes("-fullscreen", True)
root1.geometry('1366x768')
root1.title("COVID 19")
background_image1=tk.PhotoImage(file=('suru.png'))
background_label1=tk.Label(root1,image=background_image1)
background_label1.place(relwidth=1,relheight=1)
f1=Frame(root1)
f1.grid(row=0,column=0)
#Button(f1, text='Proceed',command=lambda:swap(f2)).pack()
frame4=tk.Frame(root1,bd=5,bg='gray')
frame4.place(relx=0.516, rely=0.8, relwidth=0.2, relheight=0.1 , anchor='n')
Button2 = tk.Button(frame4,text='PROCEED',font=("bold", 30),bg='skyBlue1',command=lambda:create())
Button2.place(relx=0,rely=0,relwidth=1, relheight=1)
frame_5=tk.Frame(root1,bd=5,bg='gray')
frame_5.place(relx=0.93, rely=0.88, relwidth=0.08, relheight=0.08 , anchor='n')
button_3 = tk.Button(frame_5, text="Exit",font=("bold", 30),command=root1.destroy)
button_3.place(relx=0,rely=0,relwidth=1, relheight=1)
def create():
def change(*args):
global home
home = var.get()
def frame11():
root2=Toplevel(root1)
canvs=tk.Canvas(root2,height=Height,width=Width)
canvs.pack()
root2.title(" Corona Detection ")
root2.geometry('0x0')
background_image=tk.PhotoImage(file='corona.png')
background_label=tk.Label(root2,image=background_image)
background_label.place(relwidth=1,relheight=1)
root2.mainloop()
root=Toplevel(root1)
canvs=tk.Canvas(root,height=Height,width=Width)
canvs.pack()
root.title(" Corona Detection ")
root.attributes("-fullscreen", True)
background_image=tk.PhotoImage(file='corona.png')
background_label=tk.Label(root,image=background_image)
background_label.place(relwidth=1,relheight=1)
OPTIONS=[
"0.jpeg",
"1.jpeg",
"2.jpeg",
'3.jpeg',
"4.jpeg",
"5.jpeg",
"6.jpeg",
"7.jpeg",
"8.jpeg",
"9.jpg",
]
var=tk.StringVar(root)
var.set(OPTIONS[0])
var.trace("w",change)
frame2=tk.Frame(root,bd=5,bg='peach puff')
frame2.place(relx=0.2252, rely=0.28, relwidth=0.263, relheight=0.08 , anchor='n')
dropDownMenu=tk.OptionMenu(frame2,var,OPTIONS[0],OPTIONS[1],OPTIONS[2],OPTIONS[3],OPTIONS[4],OPTIONS[5],OPTIONS[6],OPTIONS[7],OPTIONS[8],OPTIONS[9])
dropDownMenu.config(width=6,)
dropDownMenu.place(relx=0,rely=0,relwidth=1, relheight=1)
frame=tk.Frame(root,bd=5,bg='green')
frame.place(relx=0.502, rely=0.48, relwidth=0.198, relheight=0.1 , anchor='n')
label_7 = Label(root, text="DETECT CORONA PATIENT", width=24, font=("bold", 15),bg='skyBlue1')
label_7.place(x=550,y=340)
photo22=tk.PhotoImage(file='np.png')
button2 = tk.Button(frame, text="Predict",image=photo22,command=lambda:result_function(home))
button2.place(relx=0,rely=0,relwidth=1, relheight=1)
label_7 = Label(root, text="SELECT IMAGE ", width=20, font=("bold", 20),bg='peach puff')
label_7.place(x=130, y=185)
frame3=tk.Frame(root,bd=5,bg='gray')
frame3.place(relx=0.93, rely=0.88, relwidth=0.08, relheight=0.08 , anchor='n')
button1 = tk.Button(frame3, text="Exit",font=("bold", 30),command=root.destroy)
button1.place(relx=0,rely=0,relwidth=1, relheight=1)
def result_function(home):
predict = model.predictwin(home)
if predict==1:
frame=tk.Frame(root,bd=5,bg='gray')
frame.place(relx=0.502, rely=0.65, relwidth=0.325, relheight=0.21 , anchor='n')
label_1 = Label(root, text="RESULT ", width=30, font=("bold", 15),bg='peach puff')
label_1.place(x=463, y=480)
photo2=tk.PhotoImage(file='positive.png')
button = tk.Button(frame, text="result",image=photo2)
button.place(relx=0,rely=0,relwidth=1, relheight=1)
print("You tested corona Positive")
label_2=Label(root,width=25, font=("bold", 20),bg='Azure3')
label_2['text']="You tested corona Positive"
label_2.place(x=463, y=510)
else:
frame=tk.Frame(root,bd=5,bg='gray')
frame.place(relx=0.502, rely=0.65, relwidth=0.325, relheight=0.21 , anchor='n')
label_1 = Label(root, text="RESULT ", width=40, font=("bold", 15),bg='peach puff')
label_1.place(x=463, y=480)
photo2=tk.PhotoImage(file='n.png')
button = tk.Button(frame, text="result",image=photo2)
button.place(relx=0,rely=0,relwidth=1, relheight=1)
print("You tested corona Negative")
label_2=Label(root,width=25, font=("bold",20),bg='Azure3')
label_2['text']="You tested corona Negative"
label_2.place(x=463, y=510)
frame11()
root.mainloop()
root1.mainloop()
| [
"tkinter.Button",
"tkinter.Canvas",
"tkinter.StringVar",
"tkinter.Tk",
"tkinter.Label",
"tkinter.OptionMenu",
"tkinter.PhotoImage",
"model.predictwin",
"tkinter.Frame"
] | [((119, 126), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (124, 126), True, 'import tkinter as tk\n'), ((235, 265), 'tkinter.PhotoImage', 'tk.PhotoImage', ([], {'file': '"""suru.png"""'}), "(file='suru.png')\n", (248, 265), True, 'import tkinter as tk\n'), ((286, 326), 'tkinter.Label', 'tk.Label', (['root1'], {'image': 'background_image1'}), '(root1, image=background_image1)\n', (294, 326), True, 'import tkinter as tk\n'), ((485, 517), 'tkinter.Frame', 'tk.Frame', (['root1'], {'bd': '(5)', 'bg': '"""gray"""'}), "(root1, bd=5, bg='gray')\n", (493, 517), True, 'import tkinter as tk\n'), ((759, 791), 'tkinter.Frame', 'tk.Frame', (['root1'], {'bd': '(5)', 'bg': '"""gray"""'}), "(root1, bd=5, bg='gray')\n", (767, 791), True, 'import tkinter as tk\n'), ((884, 957), 'tkinter.Button', 'tk.Button', (['frame_5'], {'text': '"""Exit"""', 'font': "('bold', 30)", 'command': 'root1.destroy'}), "(frame_5, text='Exit', font=('bold', 30), command=root1.destroy)\n", (893, 957), True, 'import tkinter as tk\n'), ((1489, 1532), 'tkinter.Canvas', 'tk.Canvas', (['root'], {'height': 'Height', 'width': 'Width'}), '(root, height=Height, width=Width)\n', (1498, 1532), True, 'import tkinter as tk\n'), ((1638, 1670), 'tkinter.PhotoImage', 'tk.PhotoImage', ([], {'file': '"""corona.png"""'}), "(file='corona.png')\n", (1651, 1670), True, 'import tkinter as tk\n'), ((1689, 1727), 'tkinter.Label', 'tk.Label', (['root'], {'image': 'background_image'}), '(root, image=background_image)\n', (1697, 1727), True, 'import tkinter as tk\n'), ((1920, 1938), 'tkinter.StringVar', 'tk.StringVar', (['root'], {}), '(root)\n', (1932, 1938), True, 'import tkinter as tk\n'), ((1992, 2029), 'tkinter.Frame', 'tk.Frame', (['root'], {'bd': '(5)', 'bg': '"""peach puff"""'}), "(root, bd=5, bg='peach puff')\n", (2000, 2029), True, 'import tkinter as tk\n'), ((2128, 2278), 'tkinter.OptionMenu', 'tk.OptionMenu', (['frame2', 'var', 'OPTIONS[0]', 'OPTIONS[1]', 'OPTIONS[2]', 'OPTIONS[3]', 'OPTIONS[4]', 'OPTIONS[5]', 'OPTIONS[6]', 'OPTIONS[7]', 'OPTIONS[8]', 'OPTIONS[9]'], {}), '(frame2, var, OPTIONS[0], OPTIONS[1], OPTIONS[2], OPTIONS[3],\n OPTIONS[4], OPTIONS[5], OPTIONS[6], OPTIONS[7], OPTIONS[8], OPTIONS[9])\n', (2141, 2278), True, 'import tkinter as tk\n'), ((2363, 2395), 'tkinter.Frame', 'tk.Frame', (['root'], {'bd': '(5)', 'bg': '"""green"""'}), "(root, bd=5, bg='green')\n", (2371, 2395), True, 'import tkinter as tk\n'), ((2609, 2637), 'tkinter.PhotoImage', 'tk.PhotoImage', ([], {'file': '"""np.png"""'}), "(file='np.png')\n", (2622, 2637), True, 'import tkinter as tk\n'), ((2922, 2953), 'tkinter.Frame', 'tk.Frame', (['root'], {'bd': '(5)', 'bg': '"""gray"""'}), "(root, bd=5, bg='gray')\n", (2930, 2953), True, 'import tkinter as tk\n'), ((3046, 3117), 'tkinter.Button', 'tk.Button', (['frame3'], {'text': '"""Exit"""', 'font': "('bold', 30)", 'command': 'root.destroy'}), "(frame3, text='Exit', font=('bold', 30), command=root.destroy)\n", (3055, 3117), True, 'import tkinter as tk\n'), ((1153, 1197), 'tkinter.Canvas', 'tk.Canvas', (['root2'], {'height': 'Height', 'width': 'Width'}), '(root2, height=Height, width=Width)\n', (1162, 1197), True, 'import tkinter as tk\n'), ((1296, 1328), 'tkinter.PhotoImage', 'tk.PhotoImage', ([], {'file': '"""corona.png"""'}), "(file='corona.png')\n", (1309, 1328), True, 'import tkinter as tk\n'), ((1348, 1387), 'tkinter.Label', 'tk.Label', (['root2'], {'image': 'background_image'}), '(root2, image=background_image)\n', (1356, 1387), True, 'import tkinter as tk\n'), ((3211, 3233), 'model.predictwin', 'model.predictwin', (['home'], {}), '(home)\n', (3227, 3233), False, 'import model\n'), ((3260, 3291), 'tkinter.Frame', 'tk.Frame', (['root'], {'bd': '(5)', 'bg': '"""gray"""'}), "(root, bd=5, bg='gray')\n", (3268, 3291), True, 'import tkinter as tk\n'), ((3501, 3535), 'tkinter.PhotoImage', 'tk.PhotoImage', ([], {'file': '"""positive.png"""'}), "(file='positive.png')\n", (3514, 3535), True, 'import tkinter as tk\n'), ((3548, 3593), 'tkinter.Button', 'tk.Button', (['frame'], {'text': '"""result"""', 'image': 'photo2'}), "(frame, text='result', image=photo2)\n", (3557, 3593), True, 'import tkinter as tk\n'), ((3846, 3877), 'tkinter.Frame', 'tk.Frame', (['root'], {'bd': '(5)', 'bg': '"""gray"""'}), "(root, bd=5, bg='gray')\n", (3854, 3877), True, 'import tkinter as tk\n'), ((4087, 4114), 'tkinter.PhotoImage', 'tk.PhotoImage', ([], {'file': '"""n.png"""'}), "(file='n.png')\n", (4100, 4114), True, 'import tkinter as tk\n'), ((4127, 4172), 'tkinter.Button', 'tk.Button', (['frame'], {'text': '"""result"""', 'image': 'photo2'}), "(frame, text='result', image=photo2)\n", (4136, 4172), True, 'import tkinter as tk\n')] |
# Melhore o jogo do DESAFIO 028 onde o computador vai "pensar" em um número entre 0 e 10. Só que agora o jogador vai tentar adivinhar até acertar, mostrando no final quantos palpites foram necessários para vencer.
from random import randint
contchute=0
print('¬¬¬'*7)
print(' JOGO DE ADVINHAÇÃO')
print('¬¬¬'*7)
print('Irei pensar em um número de 1 até 10, e você tentará acertar')
num=randint(1,10)
chute=int(input('chute um número: '))
contchute+=1
if chute==num:
print('uau acertou de primeira')
else:
print('infelizmente errou, dessa vez vou dá uma dica')
while chute!=num:
chute=int(input('tente novamente,qual número eu estou pensando?: '))
contchute+=1
if chute<num:
print('chute mais alto')
else:
print('chute mais baixo')
print('você chutou {} vezes, o número que eu pensei foi {}'.format(contchute,num))
| [
"random.randint"
] | [((386, 400), 'random.randint', 'randint', (['(1)', '(10)'], {}), '(1, 10)\n', (393, 400), False, 'from random import randint\n')] |
import numpy as np
from keras.models import Model
from keras.layers import Input, Conv2D, DepthwiseConv2D
# model
inputs = Input(shape=(4, 4, 3))
x = DepthwiseConv2D((3, 3), strides=(
1, 1), depth_multiplier=1, padding='same')(inputs)
model = Model(inputs, x)
model.load_weights('model.h5')
print(model.summary())
# data
input_x = np.load('input_x.npy')
output_x = np.load('output_x.npy')
o = model.predict(input_x)
print(np.allclose(output_x, o))
| [
"numpy.allclose",
"keras.layers.DepthwiseConv2D",
"keras.layers.Input",
"keras.models.Model",
"numpy.load"
] | [((124, 146), 'keras.layers.Input', 'Input', ([], {'shape': '(4, 4, 3)'}), '(shape=(4, 4, 3))\n', (129, 146), False, 'from keras.layers import Input, Conv2D, DepthwiseConv2D\n'), ((248, 264), 'keras.models.Model', 'Model', (['inputs', 'x'], {}), '(inputs, x)\n', (253, 264), False, 'from keras.models import Model\n'), ((337, 359), 'numpy.load', 'np.load', (['"""input_x.npy"""'], {}), "('input_x.npy')\n", (344, 359), True, 'import numpy as np\n'), ((371, 394), 'numpy.load', 'np.load', (['"""output_x.npy"""'], {}), "('output_x.npy')\n", (378, 394), True, 'import numpy as np\n'), ((151, 226), 'keras.layers.DepthwiseConv2D', 'DepthwiseConv2D', (['(3, 3)'], {'strides': '(1, 1)', 'depth_multiplier': '(1)', 'padding': '"""same"""'}), "((3, 3), strides=(1, 1), depth_multiplier=1, padding='same')\n", (166, 226), False, 'from keras.layers import Input, Conv2D, DepthwiseConv2D\n'), ((429, 453), 'numpy.allclose', 'np.allclose', (['output_x', 'o'], {}), '(output_x, o)\n', (440, 453), True, 'import numpy as np\n')] |
from pymesh.TestCase import TestCase
from pymesh import distance_to_mesh, BVH
from pymesh.meshutils import generate_box_mesh
import numpy as np
class DistanceToMeshTest(TestCase):
def test_boundary_pts_cgal(self):
mesh = generate_box_mesh(
np.array([0, 0, 0]), np.array([1, 1, 1]))
pts = np.array([
[0.0, 0.0, 0.0],
[1.0, 1.0, 1.0] ])
sq_dist, face_idx, closest_pts = distance_to_mesh(mesh, pts, "cgal")
self.assert_array_equal(sq_dist, np.zeros(2))
def test_boundary_pts_geogram(self):
mesh = generate_box_mesh(
np.array([0, 0, 0]), np.array([1, 1, 1]))
pts = np.array([
[0.0, 0.0, 0.0],
[1.0, 1.0, 1.0] ])
if "geogram" in BVH.available_engines:
sq_dist, face_idx, closest_pts = distance_to_mesh(mesh, pts, "geogram")
self.assert_array_equal(sq_dist, np.zeros(2))
| [
"numpy.array",
"numpy.zeros",
"pymesh.distance_to_mesh"
] | [((336, 380), 'numpy.array', 'np.array', (['[[0.0, 0.0, 0.0], [1.0, 1.0, 1.0]]'], {}), '([[0.0, 0.0, 0.0], [1.0, 1.0, 1.0]])\n', (344, 380), True, 'import numpy as np\n'), ((453, 488), 'pymesh.distance_to_mesh', 'distance_to_mesh', (['mesh', 'pts', '"""cgal"""'], {}), "(mesh, pts, 'cgal')\n", (469, 488), False, 'from pymesh import distance_to_mesh, BVH\n'), ((697, 741), 'numpy.array', 'np.array', (['[[0.0, 0.0, 0.0], [1.0, 1.0, 1.0]]'], {}), '([[0.0, 0.0, 0.0], [1.0, 1.0, 1.0]])\n', (705, 741), True, 'import numpy as np\n'), ((279, 298), 'numpy.array', 'np.array', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (287, 298), True, 'import numpy as np\n'), ((300, 319), 'numpy.array', 'np.array', (['[1, 1, 1]'], {}), '([1, 1, 1])\n', (308, 319), True, 'import numpy as np\n'), ((531, 542), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (539, 542), True, 'import numpy as np\n'), ((640, 659), 'numpy.array', 'np.array', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (648, 659), True, 'import numpy as np\n'), ((661, 680), 'numpy.array', 'np.array', (['[1, 1, 1]'], {}), '([1, 1, 1])\n', (669, 680), True, 'import numpy as np\n'), ((866, 904), 'pymesh.distance_to_mesh', 'distance_to_mesh', (['mesh', 'pts', '"""geogram"""'], {}), "(mesh, pts, 'geogram')\n", (882, 904), False, 'from pymesh import distance_to_mesh, BVH\n'), ((951, 962), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (959, 962), True, 'import numpy as np\n')] |
"""
Read configurations from a file
"""
import json
import os
from typing import Any, List
from json_minify import json_minify
from yellowbot.globalbag import GlobalBag
from yellowbot.loggingservice import LoggingService
class ConfigService:
def __init__(
self,
config_file: str = GlobalBag.CONFIG_FILE
) -> None:
"""Initialize this class.
:param config_file: JSON file with app configuration.
:type config_file: str
"""
# Create the logger and initialise it
self._logger = LoggingService.get_logger(__name__)
self._logger.info("Config service is starting")
# Load the config file
self._load_config_file(config_file)
def _load_config_file(self, config_file: str) -> None:
"""Load config key/value pairs from a file
:param config_file: name of the config file.
When only the filename is passed, this method searches the file
inside the root folder where the app was started.
In case of GAE environment launch, it is the same folder of app.yaml.
In case of a python launch, it is the same folder where python
executable was launched (generally, the src folder).
If the file is not found, this method looks inside the same folder
where this class file is stored.
Alternatively, It could also be a full path. In this case, it depends
on the hosting filesystem structure, and I don't know what's the
expected behavior under GAE.
:type config_file: str
:returns:
"""
self._config = {}
if not os.path.isfile(config_file):
# Folder where this file is, can work also without the abspath,
# but better for debug so full path is traced in the error
self._logger.info("Exact config file was not found, falling back to this class folder")
base_folder = os.path.abspath(os.path.dirname(__file__))
full_config_path = os.path.join(base_folder, config_file) # combine with the config file name
else:
full_config_path = config_file
# Now it has the file and full path with configurations
self._logger.info("Loading configurating from {}".format(full_config_path))
if os.path.isfile(full_config_path):
with open(full_config_path, 'r') as f:
json_with_comment = open(full_config_path).read()
# An error could be generated here if the config file is not
# OK (a missing , for example). I leave as it is, so a clear output
# is produced server side
self._config = json.loads(json_minify(json_with_comment))
else:
raise ValueError("Cannot find configuration file {}".format(full_config_path))
# Checks if the config files has real values
if len(self._config.keys()) == 0:
raise ValueError("Empty configuration file {}".format(full_config_path))
def get_config(
self,
key_to_read: str,
throw_error: bool = True
) -> Any:
"""Read a value from the configuration, throwing an error if it doesn't exist
:param key_to_read: the key to read
:type key_to_read: str
:param throw_error: if False, doesn't throw an error, but return None instead
:type throw_error: bool
:returns: the object associated wit the config key
"""
try:
return self._config[key_to_read]
except KeyError as e:
if throw_error:
raise ValueError(
"Non existing {} value in the config, please add it".format(key_to_read))
else:
return None
def change_authorized_keys(
self,
new_keys: List[str]
) -> None:
"""Substitutes old authorization keys with new ones. Useful for testing
purposes
:param new_keys: new keys to use
:type new_keys: str
"""
self._config["authorized_keys"] = new_keys
| [
"yellowbot.loggingservice.LoggingService.get_logger",
"os.path.join",
"os.path.isfile",
"os.path.dirname",
"json_minify.json_minify"
] | [((552, 587), 'yellowbot.loggingservice.LoggingService.get_logger', 'LoggingService.get_logger', (['__name__'], {}), '(__name__)\n', (577, 587), False, 'from yellowbot.loggingservice import LoggingService\n'), ((2335, 2367), 'os.path.isfile', 'os.path.isfile', (['full_config_path'], {}), '(full_config_path)\n', (2349, 2367), False, 'import os\n'), ((1666, 1693), 'os.path.isfile', 'os.path.isfile', (['config_file'], {}), '(config_file)\n', (1680, 1693), False, 'import os\n'), ((2043, 2081), 'os.path.join', 'os.path.join', (['base_folder', 'config_file'], {}), '(base_folder, config_file)\n', (2055, 2081), False, 'import os\n'), ((1985, 2010), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2000, 2010), False, 'import os\n'), ((2733, 2763), 'json_minify.json_minify', 'json_minify', (['json_with_comment'], {}), '(json_with_comment)\n', (2744, 2763), False, 'from json_minify import json_minify\n')] |
# Generated by Django 3.2.9 on 2021-12-11 02:07
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('course', '0017_alter_recording_lesson'),
]
operations = [
migrations.AlterField(
model_name='recording',
name='lesson',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='course.lesson'),
),
]
| [
"django.db.models.ForeignKey"
] | [((375, 462), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""course.lesson"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'course.lesson')\n", (392, 462), False, 'from django.db import migrations, models\n')] |
import torch
import torch.nn as nn
import numpy as np
class IndexTranslator(object):
def __init__(self, state):
self.state = state
self.px = self.state[:, 0].reshape(-1, 1)
self.py = self.state[:, 1].reshape(-1, 1)
self.vx = self.state[:, 2].reshape(-1, 1)
self.vy = self.state[:, 3].reshape(-1, 1)
self.radius = self.state[:, 4].reshape(-1, 1)
self.pgx = self.state[:, 5].reshape(-1, 1)
self.pgy = self.state[:, 6].reshape(-1, 1)
self.v_pref = self.state[:, 7].reshape(-1, 1)
self.theta = self.state[:, 8].reshape(-1, 1)
self.px1 = self.state[:, 9].reshape(-1, 1)
self.py1 = self.state[:, 10].reshape(-1, 1)
self.vx1 = self.state[:, 11].reshape(-1, 1)
self.vy1 = self.state[:, 12].reshape(-1, 1)
self.radius1 = self.state[:, 13].reshape(-1, 1)
class ValueNetwork(nn.Module):
def __init__(self, state_dim, fc_layers, kinematic, reparametrization=True):
super(ValueNetwork, self).__init__()
self.reparametrization = reparametrization
if reparametrization:
state_dim = 15
self.kinematic = kinematic
self.value_network = nn.Sequential(nn.Linear(state_dim, fc_layers[0]), nn.ReLU(),
nn.Linear(fc_layers[0], fc_layers[1]), nn.ReLU(),
nn.Linear(fc_layers[1], fc_layers[2]), nn.ReLU(),
nn.Linear(fc_layers[2], 1))
def rotate(self, state, device):
# first translate the coordinate then rotate around the origin
# 'px', 'py', 'vx', 'vy', 'radius', 'pgx', 'pgy', 'v_pref', 'theta', 'px1', 'py1', 'vx1', 'vy1', 'radius1'
# 0 1 2 3 4 5 6 7 8 9 10 11 12 13
state = IndexTranslator(state.cpu().numpy())
dx = state.pgx - state.px
dy = state.pgy - state.py
rot = np.arctan2(state.pgy-state.py, state.pgx-state.px)
dg = np.linalg.norm(np.concatenate([dx, dy], axis=1), axis=1, keepdims=True)
v_pref = state.v_pref
vx = state.vx * np.cos(rot) + state.vy * np.sin(rot)
vy = state.vy * np.cos(rot) - state.vx * np.sin(rot)
radius = state.radius
if self.kinematic:
theta = state.theta - rot
else:
theta = state.theta
vx1 = state.vx1 * np.cos(rot) + state.vy1 * np.sin(rot)
vy1 = state.vy1 * np.cos(rot) - state.vx1 * np.sin(rot)
px1 = (state.px1 - state.px) * np.cos(rot) + (state.py1 - state.py) * np.sin(rot)
py1 = (state.py1 - state.py) * np.cos(rot) - (state.px1 - state.px) * np.sin(rot)
radius1 = state.radius1
radius_sum = radius + radius1
cos_theta = np.cos(theta)
sin_theta = np.sin(theta)
da = np.linalg.norm(np.concatenate([state.px - state.px1, state.py - state.py1], axis=1), axis=1, keepdims=True)
new_state = np.concatenate([dg, v_pref, vx, vy, radius, theta, vx1, vy1, px1, py1,
radius1, radius_sum, cos_theta, sin_theta, da], axis=1)
return torch.Tensor(new_state).to(device)
def forward(self, state, device):
if self.reparametrization:
state = self.rotate(state, device)
temp_value_network = self.value_network;
value = temp_value_network(state)
return value
| [
"torch.nn.ReLU",
"torch.Tensor",
"numpy.arctan2",
"numpy.cos",
"torch.nn.Linear",
"numpy.concatenate",
"numpy.sin"
] | [((1990, 2044), 'numpy.arctan2', 'np.arctan2', (['(state.pgy - state.py)', '(state.pgx - state.px)'], {}), '(state.pgy - state.py, state.pgx - state.px)\n', (2000, 2044), True, 'import numpy as np\n'), ((2818, 2831), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (2824, 2831), True, 'import numpy as np\n'), ((2852, 2865), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (2858, 2865), True, 'import numpy as np\n'), ((3008, 3138), 'numpy.concatenate', 'np.concatenate', (['[dg, v_pref, vx, vy, radius, theta, vx1, vy1, px1, py1, radius1, radius_sum,\n cos_theta, sin_theta, da]'], {'axis': '(1)'}), '([dg, v_pref, vx, vy, radius, theta, vx1, vy1, px1, py1,\n radius1, radius_sum, cos_theta, sin_theta, da], axis=1)\n', (3022, 3138), True, 'import numpy as np\n'), ((1216, 1250), 'torch.nn.Linear', 'nn.Linear', (['state_dim', 'fc_layers[0]'], {}), '(state_dim, fc_layers[0])\n', (1225, 1250), True, 'import torch.nn as nn\n'), ((1252, 1261), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1259, 1261), True, 'import torch.nn as nn\n'), ((1306, 1343), 'torch.nn.Linear', 'nn.Linear', (['fc_layers[0]', 'fc_layers[1]'], {}), '(fc_layers[0], fc_layers[1])\n', (1315, 1343), True, 'import torch.nn as nn\n'), ((1345, 1354), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1352, 1354), True, 'import torch.nn as nn\n'), ((1399, 1436), 'torch.nn.Linear', 'nn.Linear', (['fc_layers[1]', 'fc_layers[2]'], {}), '(fc_layers[1], fc_layers[2])\n', (1408, 1436), True, 'import torch.nn as nn\n'), ((1438, 1447), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1445, 1447), True, 'import torch.nn as nn\n'), ((1492, 1518), 'torch.nn.Linear', 'nn.Linear', (['fc_layers[2]', '(1)'], {}), '(fc_layers[2], 1)\n', (1501, 1518), True, 'import torch.nn as nn\n'), ((2070, 2102), 'numpy.concatenate', 'np.concatenate', (['[dx, dy]'], {'axis': '(1)'}), '([dx, dy], axis=1)\n', (2084, 2102), True, 'import numpy as np\n'), ((2894, 2962), 'numpy.concatenate', 'np.concatenate', (['[state.px - state.px1, state.py - state.py1]'], {'axis': '(1)'}), '([state.px - state.px1, state.py - state.py1], axis=1)\n', (2908, 2962), True, 'import numpy as np\n'), ((2181, 2192), 'numpy.cos', 'np.cos', (['rot'], {}), '(rot)\n', (2187, 2192), True, 'import numpy as np\n'), ((2206, 2217), 'numpy.sin', 'np.sin', (['rot'], {}), '(rot)\n', (2212, 2217), True, 'import numpy as np\n'), ((2242, 2253), 'numpy.cos', 'np.cos', (['rot'], {}), '(rot)\n', (2248, 2253), True, 'import numpy as np\n'), ((2267, 2278), 'numpy.sin', 'np.sin', (['rot'], {}), '(rot)\n', (2273, 2278), True, 'import numpy as np\n'), ((2446, 2457), 'numpy.cos', 'np.cos', (['rot'], {}), '(rot)\n', (2452, 2457), True, 'import numpy as np\n'), ((2472, 2483), 'numpy.sin', 'np.sin', (['rot'], {}), '(rot)\n', (2478, 2483), True, 'import numpy as np\n'), ((2510, 2521), 'numpy.cos', 'np.cos', (['rot'], {}), '(rot)\n', (2516, 2521), True, 'import numpy as np\n'), ((2536, 2547), 'numpy.sin', 'np.sin', (['rot'], {}), '(rot)\n', (2542, 2547), True, 'import numpy as np\n'), ((2587, 2598), 'numpy.cos', 'np.cos', (['rot'], {}), '(rot)\n', (2593, 2598), True, 'import numpy as np\n'), ((2626, 2637), 'numpy.sin', 'np.sin', (['rot'], {}), '(rot)\n', (2632, 2637), True, 'import numpy as np\n'), ((2677, 2688), 'numpy.cos', 'np.cos', (['rot'], {}), '(rot)\n', (2683, 2688), True, 'import numpy as np\n'), ((2716, 2727), 'numpy.sin', 'np.sin', (['rot'], {}), '(rot)\n', (2722, 2727), True, 'import numpy as np\n'), ((3186, 3209), 'torch.Tensor', 'torch.Tensor', (['new_state'], {}), '(new_state)\n', (3198, 3209), False, 'import torch\n')] |
#!/bin/python
import sys,os,json,random
ls=os.listdir()
dirs=[]
for i in ls:
if os.path.isdir(i):
dirs.append(i)
combined=[]
for i in dirs:
f=open(i+"/quotes.json",'r')
quotes=json.load(f)
for j in quotes:
combined.append(j)
random.shuffle(combined)
json_data=json.dumps(combined,indent=4,sort_keys=True)
try :
os.remove('quotes.json')
except:
0
c=open('quotes.json','w')
c.write(json_data)
| [
"os.listdir",
"random.shuffle",
"json.dumps",
"os.path.isdir",
"json.load",
"os.remove"
] | [((44, 56), 'os.listdir', 'os.listdir', ([], {}), '()\n', (54, 56), False, 'import sys, os, json, random\n'), ((261, 285), 'random.shuffle', 'random.shuffle', (['combined'], {}), '(combined)\n', (275, 285), False, 'import sys, os, json, random\n'), ((297, 343), 'json.dumps', 'json.dumps', (['combined'], {'indent': '(4)', 'sort_keys': '(True)'}), '(combined, indent=4, sort_keys=True)\n', (307, 343), False, 'import sys, os, json, random\n'), ((86, 102), 'os.path.isdir', 'os.path.isdir', (['i'], {}), '(i)\n', (99, 102), False, 'import sys, os, json, random\n'), ((199, 211), 'json.load', 'json.load', (['f'], {}), '(f)\n', (208, 211), False, 'import sys, os, json, random\n'), ((353, 377), 'os.remove', 'os.remove', (['"""quotes.json"""'], {}), "('quotes.json')\n", (362, 377), False, 'import sys, os, json, random\n')] |
# /usr/bin/env python3.5
# -*- mode: python -*-
# =============================================================================
# @@-COPYRIGHT-START-@@
#
# Copyright (c) 2020, Qualcomm Innovation Center, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-3-Clause
#
# @@-COPYRIGHT-END-@@
# =============================================================================
""" Implements straight through gradient computation for Quant op"""
from dataclasses import dataclass
import torch
@dataclass
class LearnedGridParams:
"""
Data carrier containing parameters for learned grid
"""
scaling: torch.Tensor
offset: torch.Tensor
n: torch.Tensor
p: torch.Tensor
@dataclass
class IntermediateResultForLearnedGrid:
"""
Data carrier containing intermediate result for learned grid backward computation
forward_result: Round(x / scaling) + Round(offset)
rounding_error_q: Round(x / scaling) - (x / scaling)
rounding_error_o: Round(offset) - offset
"""
forward_result: torch.Tensor
rounding_error_q: torch.Tensor
rounding_error_o: torch.Tensor
def broadcast_to_tensor(tensor, encoding, ch_axis):
"""
This helper method takes n-dimension tensor and a 1-dimension encoding. And the encoding is broad-casted to
match the n-dimensional tensor
:param tensor: Tensor to use as target for the broadcasting operation
:param encoding: Encoding 1-dimensional tensor to broadcast
:param ch_axis: Channel axis along which broadcasting happens
:return: Broad-casted tensor
"""
if not isinstance(encoding, torch.Tensor):
encoding = torch.Tensor(encoding).to(tensor.device) # convert encoding to a tensor
# Original tensor shape is OIHW/IOHW, we change the shape to IHWO. Encoding (which is of shape O) can naturally
# broadcast to this shape
# This will work if the original tensor shape was any dimensions as long as the first dimension matches the
# encoding tensor shape
shape = list(tensor.shape)
num_channels = shape.pop(ch_axis)
encoding = encoding * torch.ones(shape + [num_channels]).to(tensor.device)
# we permute the resulting tensor back to OIHW/IOHW shape
permute_dims = list(range(len(shape)))
permute_dims.insert(ch_axis, len(shape))
encoding = encoding.permute(permute_dims)
return encoding
def compute_dloss_by_dx(x, grad, encoding_min, encoding_max, ch_axis=0):
"""
compute derivative w.r.t input using straight through estimator.
:param x: input tensor
:param grad: gradient flowing
:param encoding_min: encoding min grid param used on forward pass
:param encoding_max: encoding max grid param used on forward pass
:param ch_axis: Channel axis to use for per-channel quant
:return: gradient w.r.t input
"""
# Broadcast the encoding min and max tensors if they were single dimensioned. If they were scalars, the
# broadcast is automatic and more optimal in runtime, so we skip calling the helper above
if isinstance(encoding_max, list) and len(x.shape) > 1:
encoding_max = broadcast_to_tensor(x, encoding_max, ch_axis)
if isinstance(encoding_min, list) and len(x.shape) > 1:
encoding_min = broadcast_to_tensor(x, encoding_min, ch_axis)
else:
encoding_min = torch.Tensor([encoding_min]).to(x.device)
# compute dloss_by_dx = dq_by_dx * grad
inner_cond = torch.where(torch.le(x, encoding_max), # condition to check per value
torch.ones_like(x), # execute if true
torch.zeros_like(x)) # execute if false
dloss_by_dx = torch.where(torch.le(encoding_min, x), # condition to check per value
inner_cond, # execute if true
torch.zeros_like(x)) * grad
return dloss_by_dx
def _compute_derivative_of_loss_function(x: torch.Tensor,
derivative_of_quantizer: torch.Tensor,
grad: torch.Tensor,
scaling: torch.Tensor,
ch_axis: int) -> torch.Tensor:
"""
Compute derivative of the loss function like dloss_by_dmin or dloss_by_dmax
:param x: input
:param derivative_of_quantizer: derivative of the quantizer function like dq_by_dmin or dq_by_dmax
:param grad: gradient
:param scaling: scaling factor computed for given encoding min/max
:param ch_axis: channel axis along which sum is computed for gradient calculation
:return: computed derivative of loss w.r.t derivative of quantizer
"""
derivative_of_loss_function = derivative_of_quantizer * grad
if len(scaling) > 1 and len(x.shape) > 1:
dim = list(range(len(x.shape)))
# Remove the output axis
dim.pop(ch_axis)
derivative_of_loss_function = torch.sum(derivative_of_loss_function, dim=dim)
elif len(scaling) == 1:
derivative_of_loss_function = torch.sum(derivative_of_loss_function.flatten(), dim=0, keepdim=True)
return derivative_of_loss_function
def compute_intermediate_result_for_learned_grid(x: torch.Tensor,
scaling: torch.Tensor,
offset: torch.Tensor) -> IntermediateResultForLearnedGrid:
"""
helper function to compute forward result and rounding error before derivative
:param x: input
:param scaling: scaling factor computed for given encoding min/max
:param offset: offset computed
:return: forward result, rounding error of quantizer, rounding error of offset tuple
"""
forward_result = torch.round(x / scaling) + torch.round(offset)
rounding_error_q = torch.round(x / scaling) - (x / scaling)
rounding_error_o = torch.round(offset) - offset
return IntermediateResultForLearnedGrid(forward_result, rounding_error_q, rounding_error_o)
def compute_dloss_by_dmin(x: torch.Tensor,
grad: torch.Tensor,
intermediate_result: IntermediateResultForLearnedGrid,
grid_params: LearnedGridParams,
ch_axis: int = 0) -> torch.Tensor:
"""
helper function to compute derivative of loss w.r.t encoding min
Implementation based on LSQ+ ( https://arxiv.org/pdf/2004.09576.pdf )
Inner condition ( n <= fw <= p ):
dq_by_dmin = (round(x/s) - x/s) / -p
Outer condition ( fw < n ):
dq_by_dmin = -n/p + 1 + (round(o) - o)/p
Outer condition ( p < fw ):
dq_by_dmin = (round(o) - o)/p
:param x: input
:param grad: gradient
:param intermediate_result: data carrier containing intermediate result (forward result, rounding error q and o)
:param grid_params: data carrier containing parameters for learned grid (scale, offset, n, p)
:param ch_axis: channel axis along which sum is computed for gradient calculation
:return: computed derivative of loss w.r.t encoding min
"""
scaling, _, n, p = grid_params.scaling, grid_params.offset, grid_params.n, grid_params.p
forward_result = intermediate_result.forward_result
rounding_error_q = intermediate_result.rounding_error_q
rounding_error_o = intermediate_result.rounding_error_o
dq_by_dmin = torch.where(
torch.le(forward_result.data, p), -rounding_error_q / p, rounding_error_o / p
)
dq_by_dmin = torch.where(
torch.le(n, forward_result.data), dq_by_dmin, -n/p + 1 + rounding_error_o / p
)
dloss_by_dmin = _compute_derivative_of_loss_function(x, dq_by_dmin, grad, scaling, ch_axis)
return dloss_by_dmin
def compute_dloss_by_dmax(x: torch.Tensor,
grad: torch.Tensor,
intermediate_result: IntermediateResultForLearnedGrid,
grid_params: LearnedGridParams,
ch_axis: int = 0) -> torch.Tensor:
"""
helper function to compute derivative of loss w.r.t encoding max
Implementation based on LSQ+ ( https://arxiv.org/pdf/2004.09576.pdf )
Inner condition ( n <= fw <= p ):
dq_by_dmax = (round(x/s) - x/s) / p
Outer condition ( fw < n ):
dq_by_dmax = n/p - (round(o) - o)/p
Outer condition ( p < fw ):
dq_by_dmax = 1 - (round(o) - o)/p
:param x: input
:param grad: gradient
:param intermediate_result: data carrier containing intermediate result tensors (forward result, rounding errors)
:param grid_params: data carrier containing parameters for learned grid (scale, offset, n, p)
:param ch_axis: channel axis along which sum is computed for gradient calculation
:return: computed derivative of loss w.r.t encoding max
"""
scaling, _, n, p = grid_params.scaling, grid_params.offset, grid_params.n, grid_params.p
forward_result = intermediate_result.forward_result
rounding_error_q = intermediate_result.rounding_error_q
rounding_error_o = intermediate_result.rounding_error_o
dq_by_dmax = torch.where(
torch.le(forward_result.data, p), rounding_error_q / p, torch.ones_like(p) - rounding_error_o / p,
)
dq_by_dmax = torch.where(
torch.le(n, forward_result.data), dq_by_dmax, n / p - rounding_error_o / p,
)
dloss_by_dmax = _compute_derivative_of_loss_function(x, dq_by_dmax, grad, scaling, ch_axis)
return dloss_by_dmax
def compute_dloss_by_dx_using_scale_offset(x: torch.Tensor,
grad: torch.Tensor,
grid_params: LearnedGridParams) -> torch.Tensor:
"""
compute derivative w.r.t input
:param x: input
:param grad: gradient
:param grid_params: data carrier containing parameters for learned grid (scale, offset, n, p)
:return: gradient w.r.t input
"""
scaling, offset, n, p = grid_params.scaling, grid_params.offset, grid_params.n, grid_params.p
# R(x/s) + R(o)
r_x_by_s_plus_round_o = torch.round(x / scaling) + offset
# compute dloss_by_dx = dq_by_dx * grad
inner_cond = torch.where(torch.le(r_x_by_s_plus_round_o.data, p.data), # condition to check per value
torch.ones_like(r_x_by_s_plus_round_o), # execute if true
torch.zeros_like(r_x_by_s_plus_round_o)) # execute if false
dloss_by_dx = torch.where(torch.le(n.data, r_x_by_s_plus_round_o.data), # condition to check per value
inner_cond, # execute if true
torch.zeros_like(r_x_by_s_plus_round_o.data)) * grad
return dloss_by_dx
class RoundStraightThrough(torch.autograd.Function):
"""
Defining gradient of rounding function as passthrogh since round is a non-linearity
"""
@staticmethod
# pylint: disable=arguments-differ
def forward(ctx, *x):
return torch.round(*x)
@staticmethod
def backward(ctx, *output_grad):
return output_grad
| [
"torch.ones_like",
"torch.le",
"torch.Tensor",
"torch.round",
"torch.sum",
"torch.zeros_like",
"torch.ones"
] | [((4956, 4981), 'torch.le', 'torch.le', (['x', 'encoding_max'], {}), '(x, encoding_max)\n', (4964, 4981), False, 'import torch\n'), ((5044, 5062), 'torch.ones_like', 'torch.ones_like', (['x'], {}), '(x)\n', (5059, 5062), False, 'import torch\n'), ((5112, 5131), 'torch.zeros_like', 'torch.zeros_like', (['x'], {}), '(x)\n', (5128, 5131), False, 'import torch\n'), ((6444, 6491), 'torch.sum', 'torch.sum', (['derivative_of_loss_function'], {'dim': 'dim'}), '(derivative_of_loss_function, dim=dim)\n', (6453, 6491), False, 'import torch\n'), ((7251, 7275), 'torch.round', 'torch.round', (['(x / scaling)'], {}), '(x / scaling)\n', (7262, 7275), False, 'import torch\n'), ((7278, 7297), 'torch.round', 'torch.round', (['offset'], {}), '(offset)\n', (7289, 7297), False, 'import torch\n'), ((7321, 7345), 'torch.round', 'torch.round', (['(x / scaling)'], {}), '(x / scaling)\n', (7332, 7345), False, 'import torch\n'), ((7385, 7404), 'torch.round', 'torch.round', (['offset'], {}), '(offset)\n', (7396, 7404), False, 'import torch\n'), ((8912, 8944), 'torch.le', 'torch.le', (['forward_result.data', 'p'], {}), '(forward_result.data, p)\n', (8920, 8944), False, 'import torch\n'), ((9034, 9066), 'torch.le', 'torch.le', (['n', 'forward_result.data'], {}), '(n, forward_result.data)\n', (9042, 9066), False, 'import torch\n'), ((10640, 10672), 'torch.le', 'torch.le', (['forward_result.data', 'p'], {}), '(forward_result.data, p)\n', (10648, 10672), False, 'import torch\n'), ((10783, 10815), 'torch.le', 'torch.le', (['n', 'forward_result.data'], {}), '(n, forward_result.data)\n', (10791, 10815), False, 'import torch\n'), ((11579, 11603), 'torch.round', 'torch.round', (['(x / scaling)'], {}), '(x / scaling)\n', (11590, 11603), False, 'import torch\n'), ((11687, 11731), 'torch.le', 'torch.le', (['r_x_by_s_plus_round_o.data', 'p.data'], {}), '(r_x_by_s_plus_round_o.data, p.data)\n', (11695, 11731), False, 'import torch\n'), ((11794, 11832), 'torch.ones_like', 'torch.ones_like', (['r_x_by_s_plus_round_o'], {}), '(r_x_by_s_plus_round_o)\n', (11809, 11832), False, 'import torch\n'), ((11882, 11921), 'torch.zeros_like', 'torch.zeros_like', (['r_x_by_s_plus_round_o'], {}), '(r_x_by_s_plus_round_o)\n', (11898, 11921), False, 'import torch\n'), ((12478, 12493), 'torch.round', 'torch.round', (['*x'], {}), '(*x)\n', (12489, 12493), False, 'import torch\n'), ((5184, 5209), 'torch.le', 'torch.le', (['encoding_min', 'x'], {}), '(encoding_min, x)\n', (5192, 5209), False, 'import torch\n'), ((5334, 5353), 'torch.zeros_like', 'torch.zeros_like', (['x'], {}), '(x)\n', (5350, 5353), False, 'import torch\n'), ((10696, 10714), 'torch.ones_like', 'torch.ones_like', (['p'], {}), '(p)\n', (10711, 10714), False, 'import torch\n'), ((11974, 12018), 'torch.le', 'torch.le', (['n.data', 'r_x_by_s_plus_round_o.data'], {}), '(n.data, r_x_by_s_plus_round_o.data)\n', (11982, 12018), False, 'import torch\n'), ((12143, 12187), 'torch.zeros_like', 'torch.zeros_like', (['r_x_by_s_plus_round_o.data'], {}), '(r_x_by_s_plus_round_o.data)\n', (12159, 12187), False, 'import torch\n'), ((3146, 3168), 'torch.Tensor', 'torch.Tensor', (['encoding'], {}), '(encoding)\n', (3158, 3168), False, 'import torch\n'), ((3617, 3651), 'torch.ones', 'torch.ones', (['(shape + [num_channels])'], {}), '(shape + [num_channels])\n', (3627, 3651), False, 'import torch\n'), ((4840, 4868), 'torch.Tensor', 'torch.Tensor', (['[encoding_min]'], {}), '([encoding_min])\n', (4852, 4868), False, 'import torch\n')] |
import os
from os import path
from typing import Callable, Iterable
import cv2.cv2 as cv2
import numpy as np
from facepy import config
class DataSample:
"""Models a single LFW dataset image."""
@property
def person_name(self) -> str:
return person_name_from_dir(self.dir_path)
@property
def file_name(self) -> str:
return path.basename(self.file_path)
@property
def dir_path(self) -> str:
return path.dirname(self.file_path)
@property
def image(self) -> np.array:
if self.__image is None:
self.__image = cv2.imread(self.file_path)
return self.__image
@image.setter
def image(self, image: np.array) -> None:
self.__image = image
def __init__(self, file_path: str) -> None:
self.file_path = file_path
self.__image: np.array = None
def person_name_from_dir(dir_path: str) -> str:
"""Converts a LFW dir name to a person name."""
return path.basename(dir_path).replace('_', ' ')
def all_dirs() -> Iterable[str]:
"""Returns all the dirs in the dataset."""
data_dir = config.Paths.DATASET_DIR
dirs = [path.join(data_dir, d) for d in os.listdir(data_dir)]
dirs = [d for d in dirs if path.isdir(d)]
dirs.sort()
return dirs
def samples_in_dir(dir_path: str, sample_filter: Callable = None) -> Iterable[DataSample]:
"""
Returns the data samples present in the specified dir.
'sample_filter' is a function accepting a DataSample and returning either a DataSample or None.
If specified, it is used to filter and/or process data samples yielded by the iterator.
"""
paths = [path.join(dir_path, f) for f in os.listdir(dir_path) if f.endswith('.jpg')]
paths.sort()
for file_path in paths:
sample = DataSample(file_path)
if sample_filter:
sample = sample_filter(sample)
if sample:
yield sample
def samples_for_person(person_name: str, sample_filter: Callable = None) -> Iterable[DataSample]:
"""Returns the data samples for the specified person."""
person_dir = path.join(config.Paths.DATASET_DIR, person_name.replace(' ', '_'))
return samples_in_dir(person_dir, sample_filter=sample_filter)
def all_samples(sample_filter: Callable = None) -> Iterable[DataSample]:
"""Returns all the samples in the dataset."""
for dir_path in all_dirs():
for sample in samples_in_dir(dir_path, sample_filter=sample_filter):
yield sample
| [
"os.listdir",
"cv2.cv2.imread",
"os.path.join",
"os.path.dirname",
"os.path.isdir",
"os.path.basename"
] | [((364, 393), 'os.path.basename', 'path.basename', (['self.file_path'], {}), '(self.file_path)\n', (377, 393), False, 'from os import path\n'), ((455, 483), 'os.path.dirname', 'path.dirname', (['self.file_path'], {}), '(self.file_path)\n', (467, 483), False, 'from os import path\n'), ((1152, 1174), 'os.path.join', 'path.join', (['data_dir', 'd'], {}), '(data_dir, d)\n', (1161, 1174), False, 'from os import path\n'), ((1657, 1679), 'os.path.join', 'path.join', (['dir_path', 'f'], {}), '(dir_path, f)\n', (1666, 1679), False, 'from os import path\n'), ((592, 618), 'cv2.cv2.imread', 'cv2.imread', (['self.file_path'], {}), '(self.file_path)\n', (602, 618), True, 'import cv2.cv2 as cv2\n'), ((976, 999), 'os.path.basename', 'path.basename', (['dir_path'], {}), '(dir_path)\n', (989, 999), False, 'from os import path\n'), ((1184, 1204), 'os.listdir', 'os.listdir', (['data_dir'], {}), '(data_dir)\n', (1194, 1204), False, 'import os\n'), ((1237, 1250), 'os.path.isdir', 'path.isdir', (['d'], {}), '(d)\n', (1247, 1250), False, 'from os import path\n'), ((1689, 1709), 'os.listdir', 'os.listdir', (['dir_path'], {}), '(dir_path)\n', (1699, 1709), False, 'import os\n')] |
# -----------------------------------------------------------------------------
# Copyright * 2014, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration. All
# rights reserved.
#
# The Crisis Mapping Toolkit (CMT) v1 platform is licensed under the Apache
# License, Version 2.0 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# -----------------------------------------------------------------------------
from PyQt4 import QtGui, QtCore
import sys
from threading import Thread
from LLAMA import Ui_Lake_Level_UI
from plot_water_levelui import *
from lake_measure import *
class ProgressPopup(QtGui.QWidget):
update_signal = QtCore.pyqtSignal(int, int, str, str, int, int)
def __init__(self, cancel_function):
QtGui.QWidget.__init__(self)
self.update_signal.connect(self.apply_update, QtCore.Qt.QueuedConnection)
self.cancel_function = cancel_function
self.lake_totals = None
self.lake_counts = None
self.progressBar = QtGui.QProgressBar(self)
self.progressBar.setMinimumSize(500, 50)
self.progressBar.setMaximumSize(500, 50)
self.progressBar.setRange(0, 100)
self.progressBar.setValue(0)
self.status = QtGui.QLabel(self)
self.status.setText("")
self.cancelButton = QtGui.QPushButton('Cancel', self)
self.cancelButton.setMinimumSize(50, 30)
self.cancelButton.setMaximumSize(100, 50)
self.cancelButton.clicked[bool].connect(self._cancel)
vbox = QtGui.QVBoxLayout(self)
vbox.addWidget(self.progressBar)
vbox.addWidget(self.status)
vbox.addWidget(self.cancelButton)
vbox.addStretch(1)
self.setLayout(vbox)
def update_function(self, lakes_number, lakes_total, lake_name, lake_date, lake_image, lake_image_total):
self.update_signal.emit(lakes_number, lakes_total, lake_name, lake_date, lake_image, lake_image_total)
def apply_update(self, lakes_number, lakes_total, lake_name, lake_date, lake_image, lake_image_total):
if self.lake_totals == None:
self.lake_totals = [10] * lakes_total
self.lake_counts = [0] * lakes_total
self.lake_totals[lakes_number] = lake_image_total
self.lake_counts[lakes_number] = lake_image
total = sum(self.lake_totals)
progress = sum(self.lake_counts)
self.status.setText('Completed processing %s on %s.' % (lake_name, lake_date))
self.progressBar.setValue(float(progress) / total * 100)
def closeEvent(self, event):
if self.cancel_function != None:
self.cancel_function()
event.accept()
def _cancel(self):
self.close()
class Lake_Level_App(QtGui.QMainWindow, Ui_Lake_Level_UI):
def __init__(self):
super(self.__class__, self).__init__()
self.setupUi(self)
self.start_date = '1984-04-25'
# Sets end date to current date.
self.end_date = str((QtCore.QDate.currentDate()).toString('yyyy-MM-dd'))
self.selected_lake = 'Lake Tahoe'
self.selectlakeDropMenu.activated[str].connect(self.selectLakeHandle)
self.okBtn.clicked.connect(self.okHandle)
# Sets end date as current date. Couldn't set this option in QT Designer
self.endDate.setDate(QtCore.QDate.currentDate())
self.endDate.dateChanged[QtCore.QDate].connect(self.endHandle)
self.startDate.dateChanged[QtCore.QDate].connect(self.startHandle)
self.faiState = False
self.ndtiState = False
self.completedSignal.connect(self.completeLakeThread, QtCore.Qt.QueuedConnection)
def selectLakeHandle(self, text):
self.selected_lake = str(text)
def startHandle(self, date):
self.start_date = str(date.toString('yyyy-MM-dd'))
def endHandle(self, date):
self.end_date = str(date.toString('yyyy-MM-dd'))
completedSignal = QtCore.pyqtSignal()
@QtCore.pyqtSlot()
def completeLakeThread(self):
if self.tableCheckbox.isChecked():
table_water_level(self.selected_lake, self.start_date, self.end_date, result_dir='results', output_file=self.table_output_file)
if self.graphCheckbox.isChecked():
plot_water_level(self.selected_lake, self.start_date, self.end_date, result_dir='results')
self.popup.close()
def okHandle(self):
if self.algaeCheckbox.isChecked():
self.faiState = True
else:
self.faiState = False
if self.turbidityCheckbox.isChecked():
self.ndtiState = True
else:
self.ndtiState = False
# Heat map checkbox is not functioning. Add under here:
# if self.lake_areaCheckbox.isChecked():
if self.tableCheckbox.isChecked():
self.table_output_file = QtGui.QFileDialog.getSaveFileName(self, 'Choose Output File', 'results/' + self.selected_lake + '.csv', 'CSV File (*.csv *.txt)')
self.popup = ProgressPopup(Lake_Level_Cancel)
self.lake_thread = Thread(target=Lake_Level_Run, args=(self.selected_lake, self.start_date, self.end_date, \
'results', self.faiState, self.ndtiState, self.popup.update_function, self.completedSignal.emit))
self.popup.show()
self.lake_thread.start()
# CHANGE THIS. NEED TO MAKE THESE PARTS WAIT UNTIL LAKE_THREAD IS FINISHED.
def main():
app = QtGui.QApplication(sys.argv) # A new instance of QApplication
form = Lake_Level_App() # We set the form to be our ExampleApp (design)
form.show() # Show the form
app.exec_() # and execute the app
if __name__ == '__main__': # if we're running file directly and not importing it
main()
| [
"PyQt4.QtGui.QApplication",
"PyQt4.QtCore.pyqtSlot",
"PyQt4.QtCore.pyqtSignal",
"PyQt4.QtGui.QProgressBar",
"PyQt4.QtGui.QLabel",
"PyQt4.QtGui.QPushButton",
"PyQt4.QtCore.QDate.currentDate",
"PyQt4.QtGui.QFileDialog.getSaveFileName",
"PyQt4.QtGui.QVBoxLayout",
"PyQt4.QtGui.QWidget.__init__",
"th... | [((1136, 1183), 'PyQt4.QtCore.pyqtSignal', 'QtCore.pyqtSignal', (['int', 'int', 'str', 'str', 'int', 'int'], {}), '(int, int, str, str, int, int)\n', (1153, 1183), False, 'from PyQt4 import QtGui, QtCore\n'), ((4394, 4413), 'PyQt4.QtCore.pyqtSignal', 'QtCore.pyqtSignal', ([], {}), '()\n', (4411, 4413), False, 'from PyQt4 import QtGui, QtCore\n'), ((4419, 4436), 'PyQt4.QtCore.pyqtSlot', 'QtCore.pyqtSlot', ([], {}), '()\n', (4434, 4436), False, 'from PyQt4 import QtGui, QtCore\n'), ((5892, 5920), 'PyQt4.QtGui.QApplication', 'QtGui.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (5910, 5920), False, 'from PyQt4 import QtGui, QtCore\n'), ((1233, 1261), 'PyQt4.QtGui.QWidget.__init__', 'QtGui.QWidget.__init__', (['self'], {}), '(self)\n', (1255, 1261), False, 'from PyQt4 import QtGui, QtCore\n'), ((1485, 1509), 'PyQt4.QtGui.QProgressBar', 'QtGui.QProgressBar', (['self'], {}), '(self)\n', (1503, 1509), False, 'from PyQt4 import QtGui, QtCore\n'), ((1710, 1728), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self'], {}), '(self)\n', (1722, 1728), False, 'from PyQt4 import QtGui, QtCore\n'), ((1790, 1823), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['"""Cancel"""', 'self'], {}), "('Cancel', self)\n", (1807, 1823), False, 'from PyQt4 import QtGui, QtCore\n'), ((2001, 2024), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', (['self'], {}), '(self)\n', (2018, 2024), False, 'from PyQt4 import QtGui, QtCore\n'), ((5514, 5708), 'threading.Thread', 'Thread', ([], {'target': 'Lake_Level_Run', 'args': "(self.selected_lake, self.start_date, self.end_date, 'results', self.\n faiState, self.ndtiState, self.popup.update_function, self.\n completedSignal.emit)"}), "(target=Lake_Level_Run, args=(self.selected_lake, self.start_date,\n self.end_date, 'results', self.faiState, self.ndtiState, self.popup.\n update_function, self.completedSignal.emit))\n", (5520, 5708), False, 'from threading import Thread\n'), ((3786, 3812), 'PyQt4.QtCore.QDate.currentDate', 'QtCore.QDate.currentDate', ([], {}), '()\n', (3810, 3812), False, 'from PyQt4 import QtGui, QtCore\n'), ((5303, 5436), 'PyQt4.QtGui.QFileDialog.getSaveFileName', 'QtGui.QFileDialog.getSaveFileName', (['self', '"""Choose Output File"""', "('results/' + self.selected_lake + '.csv')", '"""CSV File (*.csv *.txt)"""'], {}), "(self, 'Choose Output File', 'results/' +\n self.selected_lake + '.csv', 'CSV File (*.csv *.txt)')\n", (5336, 5436), False, 'from PyQt4 import QtGui, QtCore\n'), ((3454, 3480), 'PyQt4.QtCore.QDate.currentDate', 'QtCore.QDate.currentDate', ([], {}), '()\n', (3478, 3480), False, 'from PyQt4 import QtGui, QtCore\n')] |
from asyncio import FastChildWatcher
import os
from unicodedata import category
from flask import request, current_app, url_for
from flask_restful import Resource
from datetime import datetime
from flask_jwt_extended import (
jwt_required, current_user
)
from werkzeug.utils import secure_filename
from sqlalchemy.orm import aliased
from bigeye.models.user import UserRoles
from ..models.base import db
from ..models.challenge import ChallengeCategory, Challenge, ChallengeResolve
from ..schemas.schemas import challengecategory_schema, challenge_schema, challengeresolve_schema, challenges_schema, challengecategorysingle_schema
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in current_app.config['ALLOWED_EXTENSIONS']
class ChallengeCategoryListResource(Resource):
@jwt_required()
def get(self):
challenge_categories = ChallengeCategory.query.all()
categories = []
for category in challenge_categories:
category.total_challenges_resolved = ChallengeResolve.query.filter(ChallengeResolve.user_id == current_user.id).\
join(ChallengeResolve.challenge).\
filter(Challenge.category_id == category.id).count()
categories.append(category)
challenge_categories_dump = challengecategory_schema.dump(categories)
return challenge_categories_dump, 200
@jwt_required()
def post(self):
if current_user.role != UserRoles.ADMIN:
return {'error': 'You cannot create a category. Insufisant privilege.'}, 403
dataForm = request.form
if not dataForm:
return {'error': 'No content provided'}, 400
errors = challengecategorysingle_schema.validate(dataForm)
if len(errors) > 0:
return {'error': errors}, 400
data = challengecategorysingle_schema.load(dataForm)
category = ChallengeCategory(
name=data['name']
)
db.session.add(category)
db.session.commit()
category = challengecategorysingle_schema.dump(category)
return category, 201
class ChallengeListResource(Resource):
@jwt_required()
def get(self, category_id):
category = ChallengeCategory.query.get(category_id)
if not category:
return {'error': 'Category not found'}, 404
challenges = Challenge.query.filter_by(category=category).order_by(Challenge.difficulty.asc())
challenge_list = []
for challenge in challenges:
challenge.is_resolved = ChallengeResolve.query.filter_by(user_id=current_user.id, challenge_id=challenge.id).count() > 0
challenge_list.append(challenge)
challenges_dump = challenges_schema.dump(challenge_list)
return challenges_dump, 200
@jwt_required()
def post(self, category_id):
if current_user.role != UserRoles.ADMIN:
return {'error': 'You cannot create a challenge. Insufisant privilege.'}, 403
dataForm = request.form
if not dataForm:
return {'error': 'No content provided'}, 400
errors = challenge_schema.validate(dataForm)
if len(errors) > 0:
return {'error': errors}, 400
if 'file' not in request.files and 'link' not in dataForm:
return {'error': 'Please send a file or a link!'}, 400
if 'file' in request.files and 'link' in dataForm:
return {'error': 'Please send a file or a link, not both!'}, 400
data = challenge_schema.load(dataForm)
if 'file' in request.files:
file = request.files['file']
if file.filename == '':
return {'error': 'No file has been sent!'}, 400
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(current_app.config['UPLOAD_FOLDER'], filename))
if current_app.config['ENV'] == 'development':
resource_link = f'http://localhost:8000'
else:
resource_link = f'https://api.bigeye.codexus.fr'
resource_link += url_for('static', filename='challenges/' + filename)
else:
return {'error': 'The file extension is not allowed! Allowed: ' + ','.join(current_app.config['ALLOWED_EXTENSIONS'])}, 400
else:
resource_link = data['link']
category = ChallengeCategory.query.get(category_id)
if category is None:
return {'error': 'The given category is not found!'}, 404
challenge = Challenge(
title=data['title'],
description=data['description'],
difficulty=data['difficulty'],
flag=data['flag'],
category=category,
points=data['points'],
created_at=datetime.now(),
resource_link=resource_link,
hint=data['hint']
)
challenge.is_resolved = False
db.session.add(challenge)
db.session.commit()
challenge = challenge_schema.dump(challenge)
return challenge, 201
class ChallengeResource(Resource):
@jwt_required()
def get(self, challenge_id):
challenge = Challenge.query.get(challenge_id)
if not challenge:
return {'error': 'Challenge not found'}, 404
challenge.is_resolved = ChallengeResolve.query.filter_by(user_id=current_user.id, challenge_id=challenge.id).count() > 0
challenge_dump = challenge_schema.dump(challenge)
return challenge_dump, 200
class ChallengeResolveResource(Resource):
@jwt_required()
def post(self, challenge_id):
challenge = Challenge.query.get(challenge_id)
if not challenge:
return {'error': 'Challenge not found'}, 404
resolved = ChallengeResolve.query.filter_by(user_id=current_user.id, challenge_id=challenge.id).first()
if resolved is not None:
return {'error': 'You already solved this challenge!'}, 400
data = request.form
if not data:
return {'error': 'No flag provided'}, 400
flag = data.get('flag', None)
if not flag:
return {'error': 'No flag provided'}, 400
if flag != challenge.flag:
return {'error': 'Wrong flag! Don\'t get discouraged and persevere!'}, 400
resolved = ChallengeResolve(
user_id=current_user.id,
challenge_id=challenge.id,
points=challenge.points,
resolved_at=datetime.now()
)
db.session.add(resolved)
db.session.commit()
resolved = challengeresolve_schema.dump(resolved)
return resolved, 201
| [
"os.path.join",
"flask.url_for",
"datetime.datetime.now",
"flask_jwt_extended.jwt_required",
"werkzeug.utils.secure_filename"
] | [((844, 858), 'flask_jwt_extended.jwt_required', 'jwt_required', ([], {}), '()\n', (856, 858), False, 'from flask_jwt_extended import jwt_required, current_user\n'), ((1541, 1555), 'flask_jwt_extended.jwt_required', 'jwt_required', ([], {}), '()\n', (1553, 1555), False, 'from flask_jwt_extended import jwt_required, current_user\n'), ((2311, 2325), 'flask_jwt_extended.jwt_required', 'jwt_required', ([], {}), '()\n', (2323, 2325), False, 'from flask_jwt_extended import jwt_required, current_user\n'), ((2958, 2972), 'flask_jwt_extended.jwt_required', 'jwt_required', ([], {}), '()\n', (2970, 2972), False, 'from flask_jwt_extended import jwt_required, current_user\n'), ((5354, 5368), 'flask_jwt_extended.jwt_required', 'jwt_required', ([], {}), '()\n', (5366, 5368), False, 'from flask_jwt_extended import jwt_required, current_user\n'), ((5815, 5829), 'flask_jwt_extended.jwt_required', 'jwt_required', ([], {}), '()\n', (5827, 5829), False, 'from flask_jwt_extended import jwt_required, current_user\n'), ((3963, 3993), 'werkzeug.utils.secure_filename', 'secure_filename', (['file.filename'], {}), '(file.filename)\n', (3978, 3993), False, 'from werkzeug.utils import secure_filename\n'), ((4331, 4383), 'flask.url_for', 'url_for', (['"""static"""'], {'filename': "('challenges/' + filename)"}), "('static', filename='challenges/' + filename)\n", (4338, 4383), False, 'from flask import request, current_app, url_for\n'), ((5031, 5045), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5043, 5045), False, 'from datetime import datetime\n'), ((6738, 6752), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6750, 6752), False, 'from datetime import datetime\n'), ((4020, 4079), 'os.path.join', 'os.path.join', (["current_app.config['UPLOAD_FOLDER']", 'filename'], {}), "(current_app.config['UPLOAD_FOLDER'], filename)\n", (4032, 4079), False, 'import os\n')] |
from Data import Data
import itertools
import joblib
import numpy as np
import pandas as pd
import pickle
import re
import statsmodels.api as sm
import sys
from sklearn.linear_model import LinearRegression
from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor
from sklearn.metrics import mean_squared_error
from sklearn.model_selection import train_test_split
from sklearn.neural_network import MLPClassifier
from sklearn import svm
from sklearn.preprocessing import OneHotEncoder
import os
class Predict(object):
"""This class makes predictions of house prices"""
def __init__(self, features = ['tfarea', 'numberrooms', 'propertytype', 'oldnew'], LR=True,
RF=False, GB=False, test_prop=0.2, regions=[], seed = 1704, outcome = ['ln_y'],
hot_load_models=True, save_models=True, model_append = [''], output_geo=True,
merge_lsoa=False, gb_model = GradientBoostingRegressor()):
self.model_dir = os.path.join(os.path.abspath(''), 'models')
self.data_dir = os.path.join(os.path.abspath(''), 'data')
self.features = features
self.LR = LR
self.RF = RF
self.GB = GB
self.gb_model = gb_model
self.test_prop = test_prop
self.regions = regions
self.seed = seed
self.outcome = outcome
self.hot_load_models = hot_load_models
self.save_models = save_models
self.merge_lsoa = merge_lsoa
self.model_append = model_append
self.feature_acronyms = [i[0:3] for i in self.features]
if self.model_append == ['']:
self.model_append = '_'.join(self.regions + self.feature_acronyms)
else:
self.model_append = '_'.join(self.regions + self.feature_acronyms + self.model_append)
self.output_geo = output_geo
self.data = Data(regions=self.regions, merge_lsoa = self.merge_lsoa).data
self.generate_outcome()
self.generate_features()
self.train_test_split()
self.estimate_model(LR = self.LR, RF = self.RF, GB = self.GB)
self.oos_r2()
if self.output_geo:
self.output_geo_df()
def train_test_split(self):
self.X_train, self.X_test, self.y_train, self.y_test =\
train_test_split(self.data[self.features], self.data['outcome'],
test_size = self.test_prop, random_state = self.seed)
print("Training set dimensions: {}".format(self.X_train.shape))
def generate_outcome(self):
self.data['y'] = self.data['price']
self.data['ln_y'] = self.data['price'].apply(np.log)
self.data['rel_y'] = self.data['priceper']
self.data['outcome'] = self.data[self.outcome]
def generate_features(self):
""" Generate features to include into the predictions"""
# identify categorical versus continuous features
self.cat_features =\
list(itertools.compress(self.features, [i == 'object' for i in self.data[self.features].dtypes]))
self.other_features=\
list(itertools.compress(self.features, [i != 'object' for i in self.data[self.features].dtypes]))
print("Categorical features identified: {}".format(self.cat_features))
print("Continous features identified: {}".format(self.other_features))
# one-hot encode all categorical observations
enc = OneHotEncoder(handle_unknown='ignore')
enc.fit(self.data[self.cat_features])
self.data[enc.get_feature_names(self.cat_features)] = enc.\
transform(self.data[self.cat_features]).toarray()
# new features
self.features = list(itertools.chain(*[self.other_features,
list(enc.get_feature_names(self.cat_features))]))
def estimate_model(self, LR, RF, GB):
if LR:
self.lr()
else:
self.lr_predictions = np.nan
if RF:
self.rf()
else:
self.rf_predictions = np.nan
if GB:
self.gb(gb_model = self.gb_model)
else:
self.gb_predictions = np.nan
def output_geo_df(self):
assert pd.Series(self.X_test.index).isin(pd.Series(self.data.index)).mean() == 1
assert pd.Series(self.y_test.index).isin(pd.Series(self.data.index)).mean() == 1
geo_output = pd.DataFrame({'true': self.y_test.values,
'lr_pred': self.lr_predictions,
'rf_pred': self.rf_predictions,
'gb_pred': self.gb_predictions,
},
index = self.y_test.index)
geo_df = self.data[['lsoa11', 'msoa11', 'laua', 'lad11nm', 'gor', 'rgn11nm']]
full_geo = pd.merge(geo_output, geo_df, left_index=True, right_index=True)
filename = 'geo_output_' + '_'.join(self.regions) + '.csv'
print("Writing " + filename)
full_geo.to_csv(os.path.join(self.data_dir, 'edit', filename))
def oos_r2(self):
TSS = np.square(self.y_test - self.y_test.mean()).sum()
ESS_lr = np.square(self.y_test - self.lr_predictions).sum()
ESS_rf = np.square(self.y_test - self.rf_predictions).sum()
ESS_gb = np.square(self.y_test - self.gb_predictions).sum()
self.LR_oos_r2 = (TSS - ESS_lr)/TSS
self.RF_oos_r2 = (TSS - ESS_rf)/TSS
self.GB_oos_r2 = (TSS - ESS_gb)/TSS
def lr(self, predict_linreg=True, verbose=True):
"""Run a standard OLS"""
model_path = os.path.join(self.model_dir, 'LR' + self.model_append + '.sav')
# setup model, either hot load or estimate directly
if self.hot_load_models:
print("Hotloading model")
try:
self.reg = pickle.load(open(model_path, 'rb'))
except FileNotFoundError:
print("Could not find saved model for hot loading")
sys.exit(1)
else:
self.reg = LinearRegression()
self.reg.fit(self.X_train, self.y_train) # train
if self.save_models:
print("Saving LR model {}".format(model_path))
pickle.dump(self.reg, open(model_path, 'wb'))
self.lr_coeff = self.reg.coef_
self.lr_predictions = self.reg.predict(self.X_test)
self.lr_rmse = mean_squared_error(self.lr_predictions, self.y_test)
if verbose:
print('LR RMSE: {:3f}'.format(self.lr_rmse))
def rf(self, rf=RandomForestRegressor(n_estimators=1000),
verbose=True):
""" Estimate Random Forest model on the pre-specified feature space,
option to perform cross validation on pre-defined paramter grid
"""
self.rf = rf
# setup models
model_path = os.path.join(self.model_dir, 'RF' + self.model_append + '.sav')
# setup model, either hot load or estimate directly
if self.hot_load_models:
print("Hotloading model")
try:
self.rf = pickle.load(open(model_path, 'rb'))
except FileNotFoundError:
print("Could not find saved model for hot loading")
sys.exit(1)
else:
self.rf.fit(self.X_train.to_numpy(), self.y_train.ravel())
if self.save_models:
print("Saving RF model {}".format(model_path))
pickle.dump(self.rf, open(model_path, 'wb'))
# estimate RF model on train set and evaluate performance on test set
self.rf_predictions = self.rf.predict(self.X_test)
self.rf_rmse = mean_squared_error(self.rf_predictions, self.y_test)
if verbose:
print('RF RMSE: {}'.format(self.rf_rmse))
def gb(self, verbose=True, gb_model = GradientBoostingRegressor()):
""" Estimate Gradien Boosting model to pre-specified feature space,
option to perform cross validation on pre-defined paramter grid
"""
# estimate GB model on the train set and evaluate predictive accuracy
# setup models
model_path = os.path.join(self.model_dir, 'GB' + self.model_append + '.sav')
self.gb = gb_model
# setup model, either hot load or estimate directly
if self.hot_load_models:
print("Hotloading model")
try:
self.gb = pickle.load(open(model_path, 'rb'))
except FileNotFoundError:
print("Could not find saved model for hot loading")
sys.exit(1)
else:
self.gb.fit(self.X_train.to_numpy(), self.y_train.ravel())
if self.save_models:
print("Saving GB model {}".format(model_path))
pickle.dump(self.gb, open(model_path, 'wb'))
self.gb_predictions = self.gb.predict(self.X_test)
self.gb_rmse = mean_squared_error(self.gb_predictions, self.y_test)
if verbose:
print('GB RMSE: {}'.format(self.gb_rmse))
| [
"Data.Data",
"pandas.Series",
"sklearn.ensemble.RandomForestRegressor",
"itertools.compress",
"sklearn.model_selection.train_test_split",
"sklearn.preprocessing.OneHotEncoder",
"pandas.merge",
"os.path.join",
"sklearn.metrics.mean_squared_error",
"numpy.square",
"sys.exit",
"pandas.DataFrame",... | [((959, 986), 'sklearn.ensemble.GradientBoostingRegressor', 'GradientBoostingRegressor', ([], {}), '()\n', (984, 986), False, 'from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor\n'), ((2368, 2487), 'sklearn.model_selection.train_test_split', 'train_test_split', (['self.data[self.features]', "self.data['outcome']"], {'test_size': 'self.test_prop', 'random_state': 'self.seed'}), "(self.data[self.features], self.data['outcome'], test_size=\n self.test_prop, random_state=self.seed)\n", (2384, 2487), False, 'from sklearn.model_selection import train_test_split\n'), ((3550, 3588), 'sklearn.preprocessing.OneHotEncoder', 'OneHotEncoder', ([], {'handle_unknown': '"""ignore"""'}), "(handle_unknown='ignore')\n", (3563, 3588), False, 'from sklearn.preprocessing import OneHotEncoder\n'), ((4537, 4709), 'pandas.DataFrame', 'pd.DataFrame', (["{'true': self.y_test.values, 'lr_pred': self.lr_predictions, 'rf_pred':\n self.rf_predictions, 'gb_pred': self.gb_predictions}"], {'index': 'self.y_test.index'}), "({'true': self.y_test.values, 'lr_pred': self.lr_predictions,\n 'rf_pred': self.rf_predictions, 'gb_pred': self.gb_predictions}, index=\n self.y_test.index)\n", (4549, 4709), True, 'import pandas as pd\n'), ((4952, 5015), 'pandas.merge', 'pd.merge', (['geo_output', 'geo_df'], {'left_index': '(True)', 'right_index': '(True)'}), '(geo_output, geo_df, left_index=True, right_index=True)\n', (4960, 5015), True, 'import pandas as pd\n'), ((5758, 5821), 'os.path.join', 'os.path.join', (['self.model_dir', "('LR' + self.model_append + '.sav')"], {}), "(self.model_dir, 'LR' + self.model_append + '.sav')\n", (5770, 5821), False, 'import os\n'), ((6586, 6638), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', (['self.lr_predictions', 'self.y_test'], {}), '(self.lr_predictions, self.y_test)\n', (6604, 6638), False, 'from sklearn.metrics import mean_squared_error\n'), ((6741, 6781), 'sklearn.ensemble.RandomForestRegressor', 'RandomForestRegressor', ([], {'n_estimators': '(1000)'}), '(n_estimators=1000)\n', (6762, 6781), False, 'from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor\n'), ((7043, 7106), 'os.path.join', 'os.path.join', (['self.model_dir', "('RF' + self.model_append + '.sav')"], {}), "(self.model_dir, 'RF' + self.model_append + '.sav')\n", (7055, 7106), False, 'import os\n'), ((7885, 7937), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', (['self.rf_predictions', 'self.y_test'], {}), '(self.rf_predictions, self.y_test)\n', (7903, 7937), False, 'from sklearn.metrics import mean_squared_error\n'), ((8061, 8088), 'sklearn.ensemble.GradientBoostingRegressor', 'GradientBoostingRegressor', ([], {}), '()\n', (8086, 8088), False, 'from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor\n'), ((8391, 8454), 'os.path.join', 'os.path.join', (['self.model_dir', "('GB' + self.model_append + '.sav')"], {}), "(self.model_dir, 'GB' + self.model_append + '.sav')\n", (8403, 8454), False, 'import os\n'), ((9180, 9232), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', (['self.gb_predictions', 'self.y_test'], {}), '(self.gb_predictions, self.y_test)\n', (9198, 9232), False, 'from sklearn.metrics import mean_squared_error\n'), ((1028, 1047), 'os.path.abspath', 'os.path.abspath', (['""""""'], {}), "('')\n", (1043, 1047), False, 'import os\n'), ((1097, 1116), 'os.path.abspath', 'os.path.abspath', (['""""""'], {}), "('')\n", (1112, 1116), False, 'import os\n'), ((1932, 1986), 'Data.Data', 'Data', ([], {'regions': 'self.regions', 'merge_lsoa': 'self.merge_lsoa'}), '(regions=self.regions, merge_lsoa=self.merge_lsoa)\n', (1936, 1986), False, 'from Data import Data\n'), ((3081, 3179), 'itertools.compress', 'itertools.compress', (['self.features', "[(i == 'object') for i in self.data[self.features].dtypes]"], {}), "(self.features, [(i == 'object') for i in self.data[self.\n features].dtypes])\n", (3099, 3179), False, 'import itertools\n'), ((3223, 3321), 'itertools.compress', 'itertools.compress', (['self.features', "[(i != 'object') for i in self.data[self.features].dtypes]"], {}), "(self.features, [(i != 'object') for i in self.data[self.\n features].dtypes])\n", (3241, 3321), False, 'import itertools\n'), ((5157, 5202), 'os.path.join', 'os.path.join', (['self.data_dir', '"""edit"""', 'filename'], {}), "(self.data_dir, 'edit', filename)\n", (5169, 5202), False, 'import os\n'), ((6216, 6234), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (6232, 6234), False, 'from sklearn.linear_model import LinearRegression\n'), ((5316, 5360), 'numpy.square', 'np.square', (['(self.y_test - self.lr_predictions)'], {}), '(self.y_test - self.lr_predictions)\n', (5325, 5360), True, 'import numpy as np\n'), ((5385, 5429), 'numpy.square', 'np.square', (['(self.y_test - self.rf_predictions)'], {}), '(self.y_test - self.rf_predictions)\n', (5394, 5429), True, 'import numpy as np\n'), ((5454, 5498), 'numpy.square', 'np.square', (['(self.y_test - self.gb_predictions)'], {}), '(self.y_test - self.gb_predictions)\n', (5463, 5498), True, 'import numpy as np\n'), ((6165, 6176), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6173, 6176), False, 'import sys\n'), ((7449, 7460), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (7457, 7460), False, 'import sys\n'), ((8835, 8846), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (8843, 8846), False, 'import sys\n'), ((4385, 4411), 'pandas.Series', 'pd.Series', (['self.data.index'], {}), '(self.data.index)\n', (4394, 4411), True, 'import pandas as pd\n'), ((4475, 4501), 'pandas.Series', 'pd.Series', (['self.data.index'], {}), '(self.data.index)\n', (4484, 4501), True, 'import pandas as pd\n'), ((4351, 4379), 'pandas.Series', 'pd.Series', (['self.X_test.index'], {}), '(self.X_test.index)\n', (4360, 4379), True, 'import pandas as pd\n'), ((4441, 4469), 'pandas.Series', 'pd.Series', (['self.y_test.index'], {}), '(self.y_test.index)\n', (4450, 4469), True, 'import pandas as pd\n')] |
import asyncio
import json
import os
import ssl
from mock import Mock
from aiohttp.client import ClientSession
from serversion.kubernetes import Kubernetes
def mock_is_file(filename):
if filename == "/var/run/secrets/kubernetes.io/serviceaccount/ca.crt":
return True
return os.path.isfile(filename)
def test_api_server_url():
unique_api_server_url = "https://unique-kubernetes.default.svc"
kubernetes = Kubernetes(unique_api_server_url)
assert kubernetes.api_server == unique_api_server_url
def test_set_ssl_context(monkeypatch):
kubernetes = Kubernetes()
monkeypatch.setattr(os.path, 'isfile', mock_is_file)
ssl.create_default_context = Mock(return_value=None)
kubernetes._set_ssl_context()
async def mock_get(self, url: str, headers, ssl): # pylint: disable=unused-argument,redefined-outer-name
class MockJson:
@classmethod
def json(cls):
future = asyncio.Future()
if url.endswith("pods"):
result = {
"items": [{
"metadata": {
"generateName": "pod-name-1-1",
},
"spec": {
"containers": [{
"name": "container-name",
"image": "image-name",
}],
},
}],
}
elif url.endswith("secrets"):
result = {
"items": [{
"type": "helm.sh/release.v1",
}],
}
else:
result = {}
future.set_result(result)
return future
return MockJson()
async def test_get_namespaces(monkeypatch):
kubernetes = Kubernetes()
monkeypatch.setattr(ClientSession, 'get', mock_get)
namespaces = await kubernetes.get_namespaces()
assert namespaces == set()
def test_set_headers():
kubernetes = Kubernetes()
kubernetes._set_headers()
assert kubernetes.headers == {"Authorization": "Bearer None"}
kubernetes.token = "test"
kubernetes._set_headers()
assert kubernetes.headers == {"Authorization": "Bearer test"}
async def test_get_images_with_tags(monkeypatch):
kubernetes = Kubernetes()
namespaces = set(["kube-system"])
monkeypatch.setattr(ClientSession, 'get', mock_get)
images_with_tags = await kubernetes.get_images_with_tags(namespaces)
assert images_with_tags == {
"kube-system": {
"pod-name": {
"containers": {
"container-name": "image-name",
},
},
},
}
async def test_get_helm_data(monkeypatch):
kubernetes = Kubernetes()
monkeypatch.setattr(ClientSession, 'get', mock_get)
monkeypatch.setattr(json, 'loads', lambda _: {
"chart": {
"metadata": {
"name": "release-name",
"version": "1.0.0",
"appVersion": "2.0.0",
},
},
"namespace": "kube-system",
})
helm_data = await kubernetes.get_helm_data()
assert helm_data == {
"kube-system": {
"release-name": {
"appVersion": "2.0.0",
"version": "1.0.0",
},
},
}
| [
"os.path.isfile",
"serversion.kubernetes.Kubernetes",
"mock.Mock",
"asyncio.Future"
] | [((292, 316), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (306, 316), False, 'import os\n'), ((431, 464), 'serversion.kubernetes.Kubernetes', 'Kubernetes', (['unique_api_server_url'], {}), '(unique_api_server_url)\n', (441, 464), False, 'from serversion.kubernetes import Kubernetes\n'), ((581, 593), 'serversion.kubernetes.Kubernetes', 'Kubernetes', ([], {}), '()\n', (591, 593), False, 'from serversion.kubernetes import Kubernetes\n'), ((684, 707), 'mock.Mock', 'Mock', ([], {'return_value': 'None'}), '(return_value=None)\n', (688, 707), False, 'from mock import Mock\n'), ((1858, 1870), 'serversion.kubernetes.Kubernetes', 'Kubernetes', ([], {}), '()\n', (1868, 1870), False, 'from serversion.kubernetes import Kubernetes\n'), ((2052, 2064), 'serversion.kubernetes.Kubernetes', 'Kubernetes', ([], {}), '()\n', (2062, 2064), False, 'from serversion.kubernetes import Kubernetes\n'), ((2356, 2368), 'serversion.kubernetes.Kubernetes', 'Kubernetes', ([], {}), '()\n', (2366, 2368), False, 'from serversion.kubernetes import Kubernetes\n'), ((2817, 2829), 'serversion.kubernetes.Kubernetes', 'Kubernetes', ([], {}), '()\n', (2827, 2829), False, 'from serversion.kubernetes import Kubernetes\n'), ((935, 951), 'asyncio.Future', 'asyncio.Future', ([], {}), '()\n', (949, 951), False, 'import asyncio\n')] |
#!/bin/env python
#
# Copyright (C) 2014 eNovance SAS <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import config
from utils import Base
from utils import GerritGitUtils
from pysflib.sfgerrit import GerritUtils
class TestConfigRepo(Base):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
def test_check_config_repo_exists(self):
""" Validate config repo has been bootstraped
"""
pname = 'config'
gu = GerritUtils(
config.GATEWAY_URL,
auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
self.assertTrue(gu.project_exists(pname))
ggu = GerritGitUtils(config.ADMIN_USER,
config.ADMIN_PRIV_KEY_PATH,
config.USERS[config.ADMIN_USER]['email'])
url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER,
config.GATEWAY_HOST, pname)
clone_dir = ggu.clone(url, pname)
# Test that the clone is a success
self.assertTrue(os.path.isdir(clone_dir))
# Check if the clone dir has projects file
self.assertTrue(os.path.isfile(os.path.join(clone_dir,
"jobs/projects.yaml")))
| [
"os.path.join",
"os.path.isdir",
"pysflib.sfgerrit.GerritUtils",
"utils.GerritGitUtils"
] | [((1029, 1125), 'pysflib.sfgerrit.GerritUtils', 'GerritUtils', (['config.GATEWAY_URL'], {'auth_cookie': "config.USERS[config.ADMIN_USER]['auth_cookie']"}), "(config.GATEWAY_URL, auth_cookie=config.USERS[config.ADMIN_USER]\n ['auth_cookie'])\n", (1040, 1125), False, 'from pysflib.sfgerrit import GerritUtils\n'), ((1211, 1319), 'utils.GerritGitUtils', 'GerritGitUtils', (['config.ADMIN_USER', 'config.ADMIN_PRIV_KEY_PATH', "config.USERS[config.ADMIN_USER]['email']"], {}), "(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[\n config.ADMIN_USER]['email'])\n", (1225, 1319), False, 'from utils import GerritGitUtils\n'), ((1609, 1633), 'os.path.isdir', 'os.path.isdir', (['clone_dir'], {}), '(clone_dir)\n', (1622, 1633), False, 'import os\n'), ((1725, 1770), 'os.path.join', 'os.path.join', (['clone_dir', '"""jobs/projects.yaml"""'], {}), "(clone_dir, 'jobs/projects.yaml')\n", (1737, 1770), False, 'import os\n')] |
#!/usr/bin/env python
import sys
from os import getcwd
sys.path.append(getcwd() + "/pymodules")
import plotter
from json import dumps
from flask import Flask, render_template, request, session, redirect, flash, url_for
from nseLookup import MapFormat
from finLogin import Connector
import datetime
app = Flask(__name__)
app.secret_key = "<KEY>"
cacher = {}
ListOfOption = dumps([i[-1] for i in MapFormat().Stocks()])
def authenticate(request):
connector = Connector("127.0.0.1", "xxxx", "xxxx", "xxxxx")
return connector.checkLogin(request.form["username"], request.form["password"])
def generate_graph(search="NIFTY"):
try:
stock = search.upper()
data = MapFormat(offset=8)
data.url = stock
if stock in cacher and (datetime.datetime.now() - cacher[stock]["time"]).seconds < 60:
return cacher[stock]
else:
stock_data = data.parse()
if data == None:
return None
oi_chg_graph, oi_chg_raw = data.oi_change_graph()
oi_graph, oi_raw = data.oi_numbers()
max_pain = data.max_pain()
grapher = [{"graph": oi_chg_graph, "raw": oi_chg_raw}, {"graph": oi_graph, "raw": oi_raw}, {"graph": max_pain}]
cacher[stock] = {"data": grapher, "time": datetime.datetime.now(), "info": stock_data["info"]}
return cacher[stock]
except:
return None
#=============================================================
@app.route("/secret")
def secret():
a = plotter.get_data("nifty")
return render_template('secret.html', data=a)
@app.route("/user")
def user():
if "username" in session:
return render_template('user.html', logged_in=True, user=session["f_name"], graph_data = generate_graph())
else:
flash("You are currently not logged in")
return redirect(url_for("landing"))
@app.route("/logout")
def logout():
if "username" in session:
session.pop("username")
flash("You have been logged out successfully")
return redirect(url_for("landing"))
else:
flash("You are currently not logged in")
return redirect(url_for("landing"))
@app.route("/register", methods=["POST"])
def register():
pass
@app.route("/login", methods=["POST"])
@app.route("/home")
@app.route("/search", methods=["POST"])
def home():
if request.method =="POST" and request.path == "/login":
data = authenticate(request)
if data["Authenticated"] == True and data["state"] == "enabled":
session["username"] = data["email"]
session["f_name"] = data["f_name"]
return render_template('home.html', logged_in=True, user=session["f_name"], graph_data = generate_graph())
elif data["Authenticated"] == True and data["state"] != "enabled":
flash("Error: Please complete email address verification")
return redirect(url_for(("landing")))
else:
flash("Error: Incorrect username/password")
return redirect(url_for(("landing")))
elif request.method == "POST" and request.path == "/search":
if "username" in session:
graph_data = generate_graph(request.form["search"])
if not graph_data:
flash("Error: "+ request.form["search"] + " is not a valid search parameter")
return redirect(url_for(("landing")))
else:
return render_template('home.html', logged_in=True, user=session["f_name"], graph_data = generate_graph(request.form["search"]))
else:
flash("Error: You are not currently logged in")
return redirect(url_for(("landing")))
else:
if "username" in session:
return render_template('home.html', logged_in=True, user=session["f_name"], graph_data = generate_graph())
else:
flash("Error: You are not currently logged in")
return redirect(url_for(("landing")))
@app.route('/')
def landing():
if "username" in session:
return redirect(url_for("home"))
else:
return render_template('main.html')
if __name__ == "__main__":
app.run(host="0.0.0.0", debug=True, threaded=True)
| [
"flask.render_template",
"nseLookup.MapFormat",
"plotter.get_data",
"flask.flash",
"flask.Flask",
"os.getcwd",
"flask.url_for",
"datetime.datetime.now",
"finLogin.Connector",
"flask.session.pop"
] | [((306, 321), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (311, 321), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((465, 512), 'finLogin.Connector', 'Connector', (['"""127.0.0.1"""', '"""xxxx"""', '"""xxxx"""', '"""xxxxx"""'], {}), "('127.0.0.1', 'xxxx', 'xxxx', 'xxxxx')\n", (474, 512), False, 'from finLogin import Connector\n'), ((1528, 1553), 'plotter.get_data', 'plotter.get_data', (['"""nifty"""'], {}), "('nifty')\n", (1544, 1553), False, 'import plotter\n'), ((1565, 1603), 'flask.render_template', 'render_template', (['"""secret.html"""'], {'data': 'a'}), "('secret.html', data=a)\n", (1580, 1603), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((72, 80), 'os.getcwd', 'getcwd', ([], {}), '()\n', (78, 80), False, 'from os import getcwd\n'), ((690, 709), 'nseLookup.MapFormat', 'MapFormat', ([], {'offset': '(8)'}), '(offset=8)\n', (699, 709), False, 'from nseLookup import MapFormat\n'), ((1806, 1846), 'flask.flash', 'flash', (['"""You are currently not logged in"""'], {}), "('You are currently not logged in')\n", (1811, 1846), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((1966, 1989), 'flask.session.pop', 'session.pop', (['"""username"""'], {}), "('username')\n", (1977, 1989), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((1998, 2044), 'flask.flash', 'flash', (['"""You have been logged out successfully"""'], {}), "('You have been logged out successfully')\n", (2003, 2044), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((2107, 2147), 'flask.flash', 'flash', (['"""You are currently not logged in"""'], {}), "('You are currently not logged in')\n", (2112, 2147), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((4118, 4146), 'flask.render_template', 'render_template', (['"""main.html"""'], {}), "('main.html')\n", (4133, 4146), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((1871, 1889), 'flask.url_for', 'url_for', (['"""landing"""'], {}), "('landing')\n", (1878, 1889), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((2069, 2087), 'flask.url_for', 'url_for', (['"""landing"""'], {}), "('landing')\n", (2076, 2087), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((2172, 2190), 'flask.url_for', 'url_for', (['"""landing"""'], {}), "('landing')\n", (2179, 2190), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((4076, 4091), 'flask.url_for', 'url_for', (['"""home"""'], {}), "('home')\n", (4083, 4091), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((1300, 1323), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1321, 1323), False, 'import datetime\n'), ((2845, 2903), 'flask.flash', 'flash', (['"""Error: Please complete email address verification"""'], {}), "('Error: Please complete email address verification')\n", (2850, 2903), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((2980, 3023), 'flask.flash', 'flash', (['"""Error: Incorrect username/password"""'], {}), "('Error: Incorrect username/password')\n", (2985, 3023), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((3605, 3652), 'flask.flash', 'flash', (['"""Error: You are not currently logged in"""'], {}), "('Error: You are not currently logged in')\n", (3610, 3652), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((3892, 3939), 'flask.flash', 'flash', (['"""Error: You are not currently logged in"""'], {}), "('Error: You are not currently logged in')\n", (3897, 3939), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((397, 408), 'nseLookup.MapFormat', 'MapFormat', ([], {}), '()\n', (406, 408), False, 'from nseLookup import MapFormat\n'), ((2932, 2950), 'flask.url_for', 'url_for', (['"""landing"""'], {}), "('landing')\n", (2939, 2950), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((3052, 3070), 'flask.url_for', 'url_for', (['"""landing"""'], {}), "('landing')\n", (3059, 3070), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((3284, 3362), 'flask.flash', 'flash', (["('Error: ' + request.form['search'] + ' is not a valid search parameter')"], {}), "('Error: ' + request.form['search'] + ' is not a valid search parameter')\n", (3289, 3362), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((3681, 3699), 'flask.url_for', 'url_for', (['"""landing"""'], {}), "('landing')\n", (3688, 3699), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((3968, 3986), 'flask.url_for', 'url_for', (['"""landing"""'], {}), "('landing')\n", (3975, 3986), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n'), ((767, 790), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (788, 790), False, 'import datetime\n'), ((3394, 3412), 'flask.url_for', 'url_for', (['"""landing"""'], {}), "('landing')\n", (3401, 3412), False, 'from flask import Flask, render_template, request, session, redirect, flash, url_for\n')] |
import random
import math
import pyglet
from .letter import Letter
def random_color():
return (
random.randint(0, 255),
random.randint(0, 255),
random.randint(0, 255),
)
def spawn_letters(bitmap_font, text, start_x, start_y, batch, boundaries):
points = len(text)
if points <= 0:
return []
letters = []
angle = 0
angle_diff = 360 / points
for letter in text:
letter_image = bitmap_font.get_image(letter)
direction = [math.cos(angle), math.sin(angle)]
l = Letter(img=letter_image,
batch=batch,
life_milliseconds=5.0, boundaries=boundaries,
direction=direction)
l.x = start_x
l.y = start_y
letters.append(l)
angle += angle_diff
return letters
def load_gif_animation(path, tex_bin=None):
tb = tex_bin or pyglet.image.atlas.TextureBin()
animation = pyglet.image.load_animation(path)
animation.add_to_texture_bin(tb)
return animation
def load_grid_animation(path, rows, cols, frame_duration=0.1):
img = pyglet.resource.image(path)
grid = pyglet.image.ImageGrid(img, rows, cols)
return pyglet.image.Animation.from_image_sequence(grid,
frame_duration,
True)
| [
"pyglet.resource.image",
"pyglet.image.ImageGrid",
"pyglet.image.load_animation",
"math.cos",
"pyglet.image.Animation.from_image_sequence",
"math.sin",
"random.randint",
"pyglet.image.atlas.TextureBin"
] | [((946, 979), 'pyglet.image.load_animation', 'pyglet.image.load_animation', (['path'], {}), '(path)\n', (973, 979), False, 'import pyglet\n'), ((1115, 1142), 'pyglet.resource.image', 'pyglet.resource.image', (['path'], {}), '(path)\n', (1136, 1142), False, 'import pyglet\n'), ((1154, 1193), 'pyglet.image.ImageGrid', 'pyglet.image.ImageGrid', (['img', 'rows', 'cols'], {}), '(img, rows, cols)\n', (1176, 1193), False, 'import pyglet\n'), ((1205, 1275), 'pyglet.image.Animation.from_image_sequence', 'pyglet.image.Animation.from_image_sequence', (['grid', 'frame_duration', '(True)'], {}), '(grid, frame_duration, True)\n', (1247, 1275), False, 'import pyglet\n'), ((111, 133), 'random.randint', 'random.randint', (['(0)', '(255)'], {}), '(0, 255)\n', (125, 133), False, 'import random\n'), ((143, 165), 'random.randint', 'random.randint', (['(0)', '(255)'], {}), '(0, 255)\n', (157, 165), False, 'import random\n'), ((175, 197), 'random.randint', 'random.randint', (['(0)', '(255)'], {}), '(0, 255)\n', (189, 197), False, 'import random\n'), ((898, 929), 'pyglet.image.atlas.TextureBin', 'pyglet.image.atlas.TextureBin', ([], {}), '()\n', (927, 929), False, 'import pyglet\n'), ((504, 519), 'math.cos', 'math.cos', (['angle'], {}), '(angle)\n', (512, 519), False, 'import math\n'), ((521, 536), 'math.sin', 'math.sin', (['angle'], {}), '(angle)\n', (529, 536), False, 'import math\n')] |
import cv2
import dlib
import numpy as np
import pyautogui
import imutils
import time
from imutils import face_utils
WHITE_COLOR = (255, 255, 255)
YELLOW_COLOR = (0, 255, 255)
RED_COLOR = (0, 0, 255)
GREEN_COLOR = (0, 255, 0)
BLUE_COLOR = (255, 0, 0)
BLACK_COLOR = (0, 0, 0)
MOUTH_AR_THRESH = 0.6
shape_predictor = "./shape_predictor_68_face_landmarks.dat"
detector = dlib.get_frontal_face_detector()
predictor = dlib.shape_predictor(shape_predictor)
(l_eye_start, l_eye_end) = face_utils.FACIAL_LANDMARKS_IDXS['left_eye']
(r_eye_start, r_eye_end) = face_utils.FACIAL_LANDMARKS_IDXS['right_eye']
(mouth_start, mouth_end) = face_utils.FACIAL_LANDMARKS_IDXS['mouth']
(nose_start, nose_end) = face_utils.FACIAL_LANDMARKS_IDXS['nose']
#webcm
vid = cv2.VideoCapture(0)
resolution_w = 1366
resolution_h = 768
cam_w = 640
cam_h = 480
mouse_x = 0
mouse_y = 0
unit_w = resolution_w / cam_w
unit_h = resolution_h / cam_h
padding_x, padding_y = 50, 50
control_padding = 20
#set guide rect
rect_start = (cam_w//2-100, cam_h//2-100)
rect_end = (cam_w//2+100, cam_h//2+100)
process = False
counter = 0
cursor_coordinates = ()
pyautogui.FAILSAFE = False
def mouth_aspect_ratio(mouth):
# Compute the euclidean distances between the three sets
# of vertical mouth landmarks (x, y)-coordinates
A = np.linalg.norm(mouth[13] - mouth[19])
B = np.linalg.norm(mouth[14] - mouth[18])
C = np.linalg.norm(mouth[15] - mouth[17])
# Compute the euclidean distance between the horizontal
# mouth landmarks (x, y)-coordinates
D = np.linalg.norm(mouth[12] - mouth[16])
# Compute the mouth aspect ratio
mar = (A + B + C) / (2 * D)
# Return the mouth aspect ratio
return mar
while True:
_, frame = vid.read()
frame = cv2.flip(frame, 1)
frame = imutils.resize(frame, width=cam_w, height=cam_h)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# Detect faces
rects = detector(gray, 0)
# if face detected
if len(rects) > 0:
rect = rects[0]
else:
cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
continue
shape = predictor(gray, rect)
shape = face_utils.shape_to_np(shape)
if(process == True):
mouth = shape[mouth_start:mouth_end]
nose = shape[nose_start: nose_end]
cv2.circle(frame, (nose[3, 0], nose[3, 1]), 5, BLUE_COLOR, 1)
cv2.rectangle(frame, rect_start, rect_end, RED_COLOR, 2)
cv2.line(frame, (cursor_coordinates[0]-padding_x, cursor_coordinates[1]), (cursor_coordinates[0]+padding_x, cursor_coordinates[1]), YELLOW_COLOR, 2)
cv2.line(frame, (cursor_coordinates[0], cursor_coordinates[1]-padding_y), (cursor_coordinates[0], cursor_coordinates[1]+padding_y), YELLOW_COLOR, 2)
cv2.imshow("Frame", frame)
if nose[3,0] > cursor_coordinates[0]+control_padding:
if mouse_x <= 1910:
mouse_x += 5
elif nose[3,0] < cursor_coordinates[0]-control_padding:
if mouse_x >= 10:
mouse_x -= 5
if nose[3,1] > cursor_coordinates[1]+control_padding:
if mouse_y <= 1080:
mouse_y += 5
elif nose[3,1] < cursor_coordinates[1]-control_padding:
if mouse_y >= 10:
mouse_y -= 5
#if mouth open click
mar = mouth_aspect_ratio(mouth)
if(mar>MOUTH_AR_THRESH):
pyautogui.click(mouse_x, mouse_y)
pyautogui.moveTo(mouse_x, mouse_y)
key = cv2.waitKey(1) & 0xFF
else:
#get eyes
left_eye = shape[l_eye_start:l_eye_end]
right_eye = shape[r_eye_start:r_eye_end]
nose = shape[nose_start: nose_end]
# swap left and right
temp = left_eye
left_eye = right_eye
right_eye = temp
#is face inside of rectangle
if(left_eye[3,0]>rect_start[0] and left_eye[3,0]<rect_end[0]
and right_eye[3,0]>rect_start[0] and right_eye[3,0]<rect_end[0]
and left_eye[3,1]>rect_start[1] and left_eye[3,1]<rect_end[1]
and right_eye[3,1]>rect_start[1] and right_eye[3,1]<rect_end[1]):
cv2.putText(frame, str(counter//10), (cam_w//2-100, cam_h//2+100), cv2.FONT_HERSHEY_SIMPLEX, 1, GREEN_COLOR)
counter += 1
if(counter/10 > 10):
cursor_coordinates = nose[3]
process = True
else:
counter = 0
cv2.rectangle(frame, rect_start, rect_end, WHITE_COLOR, 2)
cv2.putText(frame, "Hold your face inside of rectangle for 10 sec", (cam_w//2-100, cam_h//2+200), cv2.FONT_HERSHEY_PLAIN, 1, GREEN_COLOR)
cv2.imshow("Frame", frame)
key = cv2.waitKey(10) & 0xFF
| [
"cv2.rectangle",
"cv2.flip",
"pyautogui.moveTo",
"cv2.line",
"dlib.shape_predictor",
"cv2.imshow",
"cv2.putText",
"dlib.get_frontal_face_detector",
"imutils.resize",
"cv2.circle",
"pyautogui.click",
"cv2.VideoCapture",
"cv2.cvtColor",
"numpy.linalg.norm",
"imutils.face_utils.shape_to_np"... | [((370, 402), 'dlib.get_frontal_face_detector', 'dlib.get_frontal_face_detector', ([], {}), '()\n', (400, 402), False, 'import dlib\n'), ((415, 452), 'dlib.shape_predictor', 'dlib.shape_predictor', (['shape_predictor'], {}), '(shape_predictor)\n', (435, 452), False, 'import dlib\n'), ((748, 767), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (764, 767), False, 'import cv2\n'), ((1297, 1334), 'numpy.linalg.norm', 'np.linalg.norm', (['(mouth[13] - mouth[19])'], {}), '(mouth[13] - mouth[19])\n', (1311, 1334), True, 'import numpy as np\n'), ((1343, 1380), 'numpy.linalg.norm', 'np.linalg.norm', (['(mouth[14] - mouth[18])'], {}), '(mouth[14] - mouth[18])\n', (1357, 1380), True, 'import numpy as np\n'), ((1389, 1426), 'numpy.linalg.norm', 'np.linalg.norm', (['(mouth[15] - mouth[17])'], {}), '(mouth[15] - mouth[17])\n', (1403, 1426), True, 'import numpy as np\n'), ((1537, 1574), 'numpy.linalg.norm', 'np.linalg.norm', (['(mouth[12] - mouth[16])'], {}), '(mouth[12] - mouth[16])\n', (1551, 1574), True, 'import numpy as np\n'), ((1748, 1766), 'cv2.flip', 'cv2.flip', (['frame', '(1)'], {}), '(frame, 1)\n', (1756, 1766), False, 'import cv2\n'), ((1779, 1827), 'imutils.resize', 'imutils.resize', (['frame'], {'width': 'cam_w', 'height': 'cam_h'}), '(frame, width=cam_w, height=cam_h)\n', (1793, 1827), False, 'import imutils\n'), ((1839, 1878), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2GRAY'], {}), '(frame, cv2.COLOR_BGR2GRAY)\n', (1851, 1878), False, 'import cv2\n'), ((2143, 2172), 'imutils.face_utils.shape_to_np', 'face_utils.shape_to_np', (['shape'], {}), '(shape)\n', (2165, 2172), False, 'from imutils import face_utils\n'), ((2016, 2042), 'cv2.imshow', 'cv2.imshow', (['"""Frame"""', 'frame'], {}), "('Frame', frame)\n", (2026, 2042), False, 'import cv2\n'), ((2296, 2357), 'cv2.circle', 'cv2.circle', (['frame', '(nose[3, 0], nose[3, 1])', '(5)', 'BLUE_COLOR', '(1)'], {}), '(frame, (nose[3, 0], nose[3, 1]), 5, BLUE_COLOR, 1)\n', (2306, 2357), False, 'import cv2\n'), ((2366, 2422), 'cv2.rectangle', 'cv2.rectangle', (['frame', 'rect_start', 'rect_end', 'RED_COLOR', '(2)'], {}), '(frame, rect_start, rect_end, RED_COLOR, 2)\n', (2379, 2422), False, 'import cv2\n'), ((2431, 2592), 'cv2.line', 'cv2.line', (['frame', '(cursor_coordinates[0] - padding_x, cursor_coordinates[1])', '(cursor_coordinates[0] + padding_x, cursor_coordinates[1])', 'YELLOW_COLOR', '(2)'], {}), '(frame, (cursor_coordinates[0] - padding_x, cursor_coordinates[1]),\n (cursor_coordinates[0] + padding_x, cursor_coordinates[1]), YELLOW_COLOR, 2\n )\n', (2439, 2592), False, 'import cv2\n'), ((2588, 2749), 'cv2.line', 'cv2.line', (['frame', '(cursor_coordinates[0], cursor_coordinates[1] - padding_y)', '(cursor_coordinates[0], cursor_coordinates[1] + padding_y)', 'YELLOW_COLOR', '(2)'], {}), '(frame, (cursor_coordinates[0], cursor_coordinates[1] - padding_y),\n (cursor_coordinates[0], cursor_coordinates[1] + padding_y), YELLOW_COLOR, 2\n )\n', (2596, 2749), False, 'import cv2\n'), ((2745, 2771), 'cv2.imshow', 'cv2.imshow', (['"""Frame"""', 'frame'], {}), "('Frame', frame)\n", (2755, 2771), False, 'import cv2\n'), ((3422, 3456), 'pyautogui.moveTo', 'pyautogui.moveTo', (['mouse_x', 'mouse_y'], {}), '(mouse_x, mouse_y)\n', (3438, 3456), False, 'import pyautogui\n'), ((4406, 4464), 'cv2.rectangle', 'cv2.rectangle', (['frame', 'rect_start', 'rect_end', 'WHITE_COLOR', '(2)'], {}), '(frame, rect_start, rect_end, WHITE_COLOR, 2)\n', (4419, 4464), False, 'import cv2\n'), ((4473, 4622), 'cv2.putText', 'cv2.putText', (['frame', '"""Hold your face inside of rectangle for 10 sec"""', '(cam_w // 2 - 100, cam_h // 2 + 200)', 'cv2.FONT_HERSHEY_PLAIN', '(1)', 'GREEN_COLOR'], {}), "(frame, 'Hold your face inside of rectangle for 10 sec', (cam_w //\n 2 - 100, cam_h // 2 + 200), cv2.FONT_HERSHEY_PLAIN, 1, GREEN_COLOR)\n", (4484, 4622), False, 'import cv2\n'), ((4619, 4645), 'cv2.imshow', 'cv2.imshow', (['"""Frame"""', 'frame'], {}), "('Frame', frame)\n", (4629, 4645), False, 'import cv2\n'), ((2057, 2071), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (2068, 2071), False, 'import cv2\n'), ((3379, 3412), 'pyautogui.click', 'pyautogui.click', (['mouse_x', 'mouse_y'], {}), '(mouse_x, mouse_y)\n', (3394, 3412), False, 'import pyautogui\n'), ((3471, 3485), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (3482, 3485), False, 'import cv2\n'), ((4661, 4676), 'cv2.waitKey', 'cv2.waitKey', (['(10)'], {}), '(10)\n', (4672, 4676), False, 'import cv2\n')] |
#!/usr/bin/env python
from nodes import Node
class Less(Node):
char = "<"
args = 2
results = 1
@Node.test_func([4,2], [0])
@Node.test_func([0,0], [0])
@Node.test_func([4,5], [1])
def func(self, a,b):
"""a<b"""
return (a<b)+0
@Node.test_func(["test",2], ["te"])
def indexable_1(self, a:Node.indexable, b:int):
"""a[:b]"""
return [a[:b]]
@Node.test_func([1,"test"], ["tes"])
def indexable_2(self, a:int, b:Node.indexable):
"""b[:-a]"""
return [b[:-a]]
def inf_over_n(self, n: int, inf: Node.infinite):
return inf.modify(inf.filter_code, "{}>".format(n))
def inf_under_n(self, inf: Node.infinite, n: int):
return inf.modify(inf.filter_code, "{}<".format(n))
| [
"nodes.Node.test_func"
] | [((119, 146), 'nodes.Node.test_func', 'Node.test_func', (['[4, 2]', '[0]'], {}), '([4, 2], [0])\n', (133, 146), False, 'from nodes import Node\n'), ((153, 180), 'nodes.Node.test_func', 'Node.test_func', (['[0, 0]', '[0]'], {}), '([0, 0], [0])\n', (167, 180), False, 'from nodes import Node\n'), ((187, 214), 'nodes.Node.test_func', 'Node.test_func', (['[4, 5]', '[1]'], {}), '([4, 5], [1])\n', (201, 214), False, 'from nodes import Node\n'), ((292, 327), 'nodes.Node.test_func', 'Node.test_func', (["['test', 2]", "['te']"], {}), "(['test', 2], ['te'])\n", (306, 327), False, 'from nodes import Node\n'), ((434, 470), 'nodes.Node.test_func', 'Node.test_func', (["[1, 'test']", "['tes']"], {}), "([1, 'test'], ['tes'])\n", (448, 470), False, 'from nodes import Node\n')] |
from random import randint
from time import sleep
from operator import itemgetter
jogo = {'Jogador 1': randint(1,6), 'Jogador 2': randint(1,6), 'Jogador 3': randint(1,6), 'Jogador 4': randint(1,6)}
ranking = []
print('Valores sorteados: ')
for k, v in jogo.items():
print(f'O {k} tirou {v}')
sleep(1)
print('='*32)
print(' -= RANKING DOS JOGADORES =- ')
ranking = sorted(jogo.items(), key=itemgetter(1), reverse=True)
for i, v in enumerate(ranking):
print(f'{i+1}º lugar: {v[0]} com {v[1]}!')
sleep(1)
| [
"operator.itemgetter",
"random.randint",
"time.sleep"
] | [((103, 116), 'random.randint', 'randint', (['(1)', '(6)'], {}), '(1, 6)\n', (110, 116), False, 'from random import randint\n'), ((130, 143), 'random.randint', 'randint', (['(1)', '(6)'], {}), '(1, 6)\n', (137, 143), False, 'from random import randint\n'), ((157, 170), 'random.randint', 'randint', (['(1)', '(6)'], {}), '(1, 6)\n', (164, 170), False, 'from random import randint\n'), ((184, 197), 'random.randint', 'randint', (['(1)', '(6)'], {}), '(1, 6)\n', (191, 197), False, 'from random import randint\n'), ((300, 308), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (305, 308), False, 'from time import sleep\n'), ((511, 519), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (516, 519), False, 'from time import sleep\n'), ((399, 412), 'operator.itemgetter', 'itemgetter', (['(1)'], {}), '(1)\n', (409, 412), False, 'from operator import itemgetter\n')] |
import unittest
from random import randint
from Compare_powers_6_kyu import compare_powers
class Powers(unittest.TestCase):
def test_1(self):
a = [2, 5]
b = [3, 9]
result = 1
self.assertEqual(compare_powers(a, b), result)
def test_2(self):
a = [33, 99]
b = [22, 99]
result = -1
self.assertEqual(compare_powers(a, b), result)
def test_3(self):
a = [2, 5]
b = [2, 5]
result = 0
self.assertEqual(compare_powers(a, b), result)
if __name__ == "__main__":
unittest.main()
| [
"unittest.main",
"Compare_powers_6_kyu.compare_powers"
] | [((584, 599), 'unittest.main', 'unittest.main', ([], {}), '()\n', (597, 599), False, 'import unittest\n'), ((230, 250), 'Compare_powers_6_kyu.compare_powers', 'compare_powers', (['a', 'b'], {}), '(a, b)\n', (244, 250), False, 'from Compare_powers_6_kyu import compare_powers\n'), ((369, 389), 'Compare_powers_6_kyu.compare_powers', 'compare_powers', (['a', 'b'], {}), '(a, b)\n', (383, 389), False, 'from Compare_powers_6_kyu import compare_powers\n'), ((504, 524), 'Compare_powers_6_kyu.compare_powers', 'compare_powers', (['a', 'b'], {}), '(a, b)\n', (518, 524), False, 'from Compare_powers_6_kyu import compare_powers\n')] |
"""
IDE: PyCharm
Project: corpus-analysis
Author: Robin
Filename: generate_docs
Date: 12.01.2020
"""
import pandas as pd
from tqdm import tqdm
root_folder = 'data/leipzig-corpora/'
filename: str = root_folder + 'deu_wikipedia_2016_10K-sentences.txt'
doc_folder = root_folder + 'docs'
dataframe = pd.read_csv(filename, sep='\t', header=None, names=["id", "text"])
def create_doc(filename: str, text: str):
with open(filename, 'w+', encoding='utf-8') as txt_file:
txt_file.write(text)
for index, row in tqdm(dataframe.iterrows(), total=len(dataframe)):
id, text = row[0], row[1]
create_doc(doc_folder + '/%i.txt' % id, text)
| [
"pandas.read_csv"
] | [((300, 366), 'pandas.read_csv', 'pd.read_csv', (['filename'], {'sep': '"""\t"""', 'header': 'None', 'names': "['id', 'text']"}), "(filename, sep='\\t', header=None, names=['id', 'text'])\n", (311, 366), True, 'import pandas as pd\n')] |
import subprocess
import os
"""
Much of the generated code is based on code internal to quickjs with minor changes.
Generated IDL files are header files that provide basic declarations and wrappers.
The normal function signature for quickjs is...
JSValue js_std_gc(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
"""
TYPES = {}
def declare_type(type_name, cpp_type, js_to_cpp=None, js_to_cpp_cleanup=None, cpp_to_js=None, is_numeric=False):
global TYPES
TYPES[type_name] = {"type_name": type_name,
"cpp_type": cpp_type,
"js_to_cpp": js_to_cpp,
"js_to_cpp_cleanup": js_to_cpp_cleanup,
"cpp_to_js": cpp_to_js,
"is_numeric": is_numeric}
def declare_numeric_type(type_name, cpp_type_name, js_to_cpp_name, cpp_to_js_name):
declare_type(type_name,
cpp_type=cpp_type_name,
js_to_cpp=f"""{cpp_type_name} {{0}} = 0;
if ({js_to_cpp_name}(ctx, &{{0}}, {{1}}) != 0) {{{{
{{2}}
}}}}""",
cpp_to_js=f"{cpp_to_js_name}(ctx, {{0}})",
is_numeric=True)
TODO = "TODO()"
# Number Handling
declare_numeric_type("int32", "int32_t", "JS_ToInt32", "JS_NewInt32")
declare_numeric_type("int64", "int64_t", "JS_ToInt64", "JS_NewInt64")
declare_numeric_type("uint32", "uint32_t", "JS_ToUint32", "JS_NewUint32")
declare_numeric_type("double", "double", "JS_ToFloat64", "JS_NewFloat64")
declare_type("boolean",
cpp_type="bool",
js_to_cpp="""bool {0};
int {0}_r = JS_ToBool(ctx, {1});
if ({0}_r == -1) {{
{2}
}} else if ({0}_r == 0) {{
{0} = false;
}} else if ({0}_r == 1) {{
{0} = true;
}}
""",
cpp_to_js="""JS_NewBool(ctx, {0})""")
# String Handling
declare_type("const char*",
cpp_type="const char*",
js_to_cpp="""const char* {0} = JS_ToCString(ctx, {1});
if ({0} == NULL) {{
{2}
}}
""",
js_to_cpp_cleanup="""JS_FreeCString(ctx, {0});""",
cpp_to_js="""JS_NewString(ctx, {0})""")
# Complex Types
declare_type("Callback",
cpp_type="Callback*",
js_to_cpp="""if (!JS_IsFunction(ctx, {1})) {{
{2}
}}
Callback* {0} = new Callback(ctx, {1});
""",
js_to_cpp_cleanup="""delete {0};""",
cpp_to_js=TODO)
declare_type("JSValue",
cpp_type="JSValue",
js_to_cpp="""JSValue {0} = {1};""",
cpp_to_js="""{0}""")
declare_type("ArrayBuffer",
cpp_type="ArrayBuffer",
js_to_cpp="""size_t {0}_size = 0;
uint8_t* {0}_arr = JS_GetArrayBuffer(ctx, &{0}_size, {1});
if ({0}_arr == NULL) {{
{2}
}}
ArrayBuffer {0}({0}_arr, {0}_size);
""",
cpp_to_js=TODO)
declare_type("ArrayBufferView",
cpp_type="ArrayBufferView",
js_to_cpp="""size_t {0}_offset = 0;
size_t {0}_length = 0;
size_t {0}_bytes_per_element = 0;
JSValue {0}_arr = JS_GetTypedArrayBuffer(ctx, {1}, &{0}_offset, &{0}_length, &{0}_bytes_per_element);
if (JS_IsException({0}_arr)) {{
{2}
}}
ArrayBufferView {0}(ctx, {0}_arr, {0}_offset, {0}_length, {0}_bytes_per_element);
""",
cpp_to_js=TODO)
declare_type("void", cpp_type="void", cpp_to_js="JS_UNDEFINED")
def get_cpp_type(type_name):
global TYPES
if TYPES[type_name] != None:
return TYPES[type_name]["cpp_type"]
else:
raise Exception("Unknown type: " + type_name)
def get_js_to_cpp_for_type(type_name, variable_name, argument_value):
global TYPES
error_callback = "return JS_EXCEPTION;"
if TYPES[type_name] != None:
format_string = TYPES[type_name]["js_to_cpp"]
if format_string != None:
return format_string.format(
variable_name, argument_value, error_callback)
else:
return None
else:
raise Exception("Unknown type: " + type_name)
def get_js_to_cpp_cleanup_for_type(type_name, variable_name):
global TYPES
if TYPES[type_name] != None:
format_string = TYPES[type_name]["js_to_cpp_cleanup"]
if format_string != None:
return format_string.format(variable_name)
else:
return None
else:
raise Exception("Unknown type: " + type_name)
def get_cpp_to_js_for_type(type_name, variable_name):
global TYPES
if TYPES[type_name] != None:
format_string = TYPES[type_name]["cpp_to_js"]
if format_string != None:
return format_string.format(variable_name)
else:
return None
else:
raise Exception("Unknown type: " + type_name)
def emit_constant(decl):
name = decl["name"]
constant_type = decl["constantType"]
constant_value = decl["constantValue"]
declarations = ""
signatures = ""
function_list_entry = None
if constant_type == "uint32":
function_list_entry = f"JS_PROP_INT32_DEF(\"{name}\", {constant_value}, JS_PROP_CONFIGURABLE)"
else:
raise Exception(f"constants of type {constant_type} not implemented")
return (declarations, signatures, function_list_entry, None)
def emit_function(decl):
name = decl["name"]
return_type = decl.get("returnType", None)
constructor = decl.get("constructor", None)
prototype = decl.get("prototype", None)
pass_context = decl.get("passContext", False)
if return_type == None and constructor == None:
raise Exception("No return type specified")
function_name = f"js_{name}_wrap"
user_function_name = f"js_{name}"
wrapper_function_signature = f"static JSValue {function_name}(JSContext *ctx," \
"JSValueConst this_val, int argc, JSValueConst *argv)"
# TODO(joshua): Check argument length and argument types.
wrapper_prechecks = ""
argument_names = []
argument_count = len(decl["arguments"])
wrapper_argument_conversion = ""
user_arguments = []
cleanup_fragments = ""
if pass_context:
argument_names += ["ctx"]
user_arguments += [f"JSContext* ctx"]
if prototype != None:
prototype_cpp_type = get_cpp_type(prototype)
class_id = decl["class_id"]
wrapper_argument_conversion += f"{prototype_cpp_type} _this = ({prototype_cpp_type}) JS_GetOpaque(this_val, {class_id});\n"
argument_names += ["_this"]
user_arguments += [f"{prototype_cpp_type} _this"]
argument_number = 0
for arg in decl["arguments"]:
argument_name = arg["name"]
argument_type = arg["argumentType"]
argument_cpp_type = get_cpp_type(argument_type)
argument_value = f"argv[{argument_number}]"
argument_conversion = get_js_to_cpp_for_type(
argument_type, argument_name, argument_value)
if argument_conversion == None:
raise Exception("Could not convert " + argument_type)
wrapper_argument_conversion += f"{argument_conversion}\n"
argument_names += [argument_name]
user_arguments += [f"{argument_cpp_type} {argument_name}"]
argument_cleanup = get_js_to_cpp_cleanup_for_type(
argument_type, argument_name)
if argument_cleanup != None:
cleanup_fragments += argument_cleanup + "\n"
argument_number += 1
cpp_return_type = constructor if constructor != None else get_cpp_type(
return_type)
arguments_joined = ", ".join(argument_names)
user_arguments_joined = ", ".join(user_arguments)
user_call = f"{user_function_name}({arguments_joined})"
full_user_call = f"{cpp_return_type} user_ret = {user_call};"
if cpp_return_type == "void":
full_user_call = f"{user_call};"
wrapper_return_conversion = ""
if constructor == None:
wrapper_return_conversion = get_cpp_to_js_for_type(
return_type, "user_ret")
wrapper_return_conversion = f"JSValue _r_value = {wrapper_return_conversion};"
else:
class_id = decl["class_id"]
wrapper_return_conversion = f"""
JSValue proto;
if (JS_IsUndefined(this_val)) {{
proto = JS_GetClassProto(ctx, {class_id});
}} else {{
proto = JS_GetPropertyStr(ctx, this_val, "prototype");
if (JS_IsException(proto)) {{
return JS_EXCEPTION;
}}
}}
JSValue _r_value = JS_NewObjectProtoClass(ctx, proto, {class_id});
JS_FreeValue(ctx, proto);
JS_SetOpaque(_r_value, user_ret);
"""
wrapper_function = f"""{wrapper_function_signature} {{
{wrapper_prechecks}
{wrapper_argument_conversion}
{full_user_call}
{wrapper_return_conversion}
{cleanup_fragments}
return _r_value;
}}"""
user_signature = f"{cpp_return_type} {user_function_name}({user_arguments_joined});"
function_list_entry = f"JS_CFUNC_DEF(\"{name}\", {argument_count}, {function_name} )"
return (wrapper_function, user_signature, function_list_entry, None)
def emit_class(decl):
name = decl["name"]
cpp_name = decl["cppName"]
cpp_name_ptr = cpp_name + "*"
class_id_name = f"js_{name}_class_id"
class_id = f"static JSClassID {class_id_name};"
declarations = ""
signatures = ""
# Add Constructor
constructor_decl = {
"type": "function",
"name": f"{name}_ctor",
"constructor": cpp_name_ptr,
"class_id": class_id_name,
"arguments": decl["constructorArguments"]}
ctor_wrapper_function, ctor_user_signature, _, _ = emit_function(
constructor_decl)
ctor_wrapper_name = f"js_{name}_ctor_wrap"
ctor_argument_count = len(decl["constructorArguments"])
# Add finalizer
finalizer_wrapper_name = f"js_{name}_finalizer_wrap"
finalizer_name = f"js_{name}_finalizer"
finalizer_signature = f"void {finalizer_name}({cpp_name_ptr} val);"
finalizer_wrapper = f"""static void {finalizer_wrapper_name}(JSRuntime *rt, JSValue val)
{{
{cpp_name} *value = ({cpp_name_ptr}) JS_GetOpaque(val, {class_id_name});
if (value) {{
{finalizer_name}(value);
}}
}}"""
# Add new function
new_function_name = f"js_{name}_new"
new_function_wrapper = f"""JSValue {new_function_name}(JSContext* ctx, {cpp_name_ptr} user_value) {{
JSValue proto = JS_GetClassProto(ctx, {class_id_name});
JSValue value = JS_NewObjectProtoClass(ctx, proto, {class_id_name});
JS_FreeValue(ctx, proto);
JS_SetOpaque(value, user_value);
return value;
}}"""
new_function_signature = f"JSValue {new_function_name}(JSContext* ctx, {cpp_name_ptr} user_value);"
# Add fromValue function
fromValue_function_name = f"js_{name}_fromValue"
fromValue_function_wrapper = f"""{cpp_name_ptr} {fromValue_function_name}(JSContext* ctx, JSValue value) {{
if (JS_IsUndefined(value) || JS_IsNull(value)) {{
return nullptr;
}}
{cpp_name} *cpp_value = ({cpp_name_ptr}) JS_GetOpaque(value, {class_id_name});
return cpp_value;
}}"""
# Add prototype methods
prototype_name = f"$$PROTO_{name}"
declare_type(prototype_name, f"{cpp_name_ptr}",
f"{prototype_name}_TODO();", f"{prototype_name}_TODO()")
prototype_signatures = ""
prototype_declarations = ""
prototype_function_list_entries = []
for prototype_decl in decl["prototype"]:
if prototype_decl["type"] == "function":
prototype_decl_name = prototype_decl["name"]
prototype_decl["name"] = f"{name}_{prototype_decl_name}"
prototype_decl["prototype"] = prototype_name
prototype_decl["class_id"] = class_id_name
prototype_argument_count = len(prototype_decl["arguments"])
prototype_function_name = f"js_{name}_{prototype_decl_name}_wrap"
pt_wrapper_function, pt_user_signature, pt_function_list_entry, _ = emit_function(
prototype_decl)
prototype_signatures += pt_user_signature + "\n"
prototype_declarations += pt_wrapper_function + "\n"
prototype_function_list_entries += [
f"JS_CFUNC_DEF(\"{prototype_decl_name}\", {prototype_argument_count}, {prototype_function_name} )"]
elif prototype_decl["type"] == "constant":
pt_wrapper_function, pt_user_signature, pt_function_list_entry, _ = emit_constant(
prototype_decl)
prototype_function_list_entries += [pt_function_list_entry]
elif prototype_decl["type"] == "property":
prototype_decl_name = prototype_decl["name"]
prototype_decl["name"] = f"{name}_{prototype_decl_name}"
prototype_decl["prototype"] = prototype_name
prototype_decl["class_id"] = class_id_name
pt_wrapper_function, pt_user_signature, pt_function_list_entry, _ = emit_property(
prototype_decl)
prototype_signatures += pt_user_signature + "\n"
prototype_declarations += pt_wrapper_function + "\n"
prototype_function_list_entries += [
f"""JS_CGETSET_DEF("{prototype_decl_name}", js_{name}_{prototype_decl_name}_get_wrap, js_{name}_{prototype_decl_name}_set_wrap)"""]
else:
raise Exception(
"Declaration type " + prototype_decl["type"] + " not supported for prototypes.")
# Add Class Declaration
class_decl_name = f"js_{name}_class"
class_decl = f"""static JSClassDef js_{name}_class = {{
"{name}",
.finalizer = {finalizer_wrapper_name},
}};"""
# Add Prototype funcion list
prototype_function_list_entries_joined = ",\n".join(
prototype_function_list_entries)
prototype_function_list_name = f"js_{name}_prototype_funcs"
prototype_function_list = f"""static const JSCFunctionListEntry {prototype_function_list_name}[] = {{
{prototype_function_list_entries_joined}
}};"""
# Add Initialization function
initialization_function_name = f"js_{name}_init"
initialization_function = f"""void {initialization_function_name}(JSContext* ctx, JSValue global) {{
JS_NewClassID(&{class_id_name});
JS_NewClass(JS_GetRuntime(ctx), {class_id_name}, &{class_decl_name});
JSValue proto = JS_NewObject(ctx);
JS_SetPropertyFunctionList(ctx, proto, {prototype_function_list_name}, countof({prototype_function_list_name}));
JSValue obj = JS_NewCFunction2(ctx, {ctor_wrapper_name}, "{name}", {ctor_argument_count},
JS_CFUNC_constructor, 0);
JS_SetConstructor(ctx, obj, proto);
JS_SetClassProto(ctx, {class_id_name}, proto);
JS_SetPropertyStr(ctx, global, "{name}", obj);
}}"""
declarations += f"""
{class_id}
{finalizer_wrapper}
{class_decl}
{ctor_wrapper_function}
{new_function_wrapper}
{fromValue_function_wrapper}
{prototype_declarations}
{prototype_function_list}
{initialization_function}
"""
signatures += f"""
struct {cpp_name};
{ctor_user_signature}
{new_function_signature}
{prototype_signatures}
{finalizer_signature}
"""
initialization_call = f"{initialization_function_name}(ctx, global);"
declare_type(name,
cpp_type=f"{cpp_name}*",
js_to_cpp=f"{cpp_name}* {{0}} = js_{name}_fromValue(ctx, {{1}});",
cpp_to_js=f"js_{name}_new(ctx, {{0}})")
return (declarations, signatures, None, initialization_call)
def emit_property(decl):
name = decl["name"]
property_type = decl["propertyType"]
prototype = decl.get("prototype", None)
cpp_type = get_cpp_type(property_type)
user_arguments = []
argument_names = []
prefix_fragment = ""
if prototype != None:
prototype_cpp_type = get_cpp_type(prototype)
class_id = decl["class_id"]
prefix_fragment += f"{prototype_cpp_type} _this = ({prototype_cpp_type}) JS_GetOpaque(this_val, {class_id});\n"
argument_names += ["_this"]
user_arguments += [f"{prototype_cpp_type} _this"]
get_wrapper_name = f"js_{name}_get_wrap"
set_wrapper_name = f"js_{name}_set_wrap"
get_user_function_name = f"js_{name}_get"
set_user_function_name = f"js_{name}_set"
cpp_to_js_fragment = get_cpp_to_js_for_type(property_type, "user_value")
get_argument_names_joined = ", ".join(argument_names)
get_wrapper_function = f"""static JSValue {get_wrapper_name}(JSContext *ctx, JSValueConst this_val) {{
{prefix_fragment}
{cpp_type} user_value = {get_user_function_name}({get_argument_names_joined});
JSValue value = {cpp_to_js_fragment};
return value;
}}"""
set_argument_names_joined = ", ".join(argument_names + ["user_value"])
js_to_cpp_fragment = get_js_to_cpp_for_type(
property_type, "user_value", "value")
set_wrapper_function = f"""static JSValue {set_wrapper_name}(JSContext *ctx, JSValueConst this_val, JSValueConst value) {{
{prefix_fragment}
{js_to_cpp_fragment}
{set_user_function_name}({set_argument_names_joined});
return JS_UNDEFINED;
}}"""
get_user_arguments_joined = ", ".join(user_arguments)
set_user_arguments_joined = ", ".join(
user_arguments + [f"{cpp_type} value"])
get_user_signature = f"{cpp_type} {get_user_function_name}({get_user_arguments_joined});"
set_user_signature = f"void {set_user_function_name}({set_user_arguments_joined});"
declartions = f"""
{get_wrapper_function}
{set_wrapper_function}
"""
signatures = f"""
{get_user_signature}
{set_user_signature}
"""
function_list_entry = f"""JS_CGETSET_DEF("{name}", {get_wrapper_name}, {set_wrapper_name} )"""
return (declartions, signatures, function_list_entry, None)
def emit_declaration(decl):
if decl["type"] == "function":
return emit_function(decl)
elif decl["type"] == "class":
return emit_class(decl)
elif decl["type"] == "property":
return emit_property(decl)
elif decl["type"] == "constant":
return emit_constant(decl)
else:
raise Exception("Declartion type " +
decl["type"] + " not Implemented.")
def do_codegen(codegen_name, idl_file, output_filename):
# Check the version number
assert idl_file["version"] == 1
decls = ""
user_signatures = ""
initialization_calls = ""
function_list_entries = []
for decl in idl_file["declarations"]:
wrapper_function, user_signature, function_list_entry, initialization_call = emit_declaration(
decl)
decls += wrapper_function + "\n"
user_signatures += user_signature + "\n"
if function_list_entry != None:
function_list_entries += [function_list_entry]
if initialization_call != None:
initialization_calls += initialization_call + "\n"
impl_define = f"JSCODEGEN_{codegen_name}_IMPLEMENTATION"
init_signature = f"void codegen_{codegen_name}_init(JSContext* context);"
function_list_entries_joined = ",\n".join(function_list_entries)
function_list_name = f"js_{codegen_name}_funcs"
function_list = f"""static const JSCFunctionListEntry {function_list_name}[] = {{
{function_list_entries_joined}
}};"""
init_function = f"""
void codegen_{codegen_name}_init(JSContext* ctx) {{
JSValue global = JS_GetGlobalObject(ctx);
JS_SetPropertyFunctionList(ctx, global, {function_list_name},
countof({function_list_name}));
{initialization_calls}
JS_FreeValue(ctx, global);
}}
"""
codegen_file = f"""#pragma once
// AUTOGENERATED by scripts/codegen.py
# include <quickjs.h>
# ifndef countof
# define countof(x) (sizeof(x) / sizeof((x)[0]))
# endif
{user_signatures}
{init_signature}
# ifdef {impl_define}
{decls}
{function_list}
{init_function}
# endif
"""
with open(output_filename, "w") as file_handle:
file_handle.write(codegen_file)
# Run clang-format
subprocess.check_call(
["clang-format", "-style=google", "-i", output_filename])
| [
"subprocess.check_call"
] | [((19938, 20017), 'subprocess.check_call', 'subprocess.check_call', (["['clang-format', '-style=google', '-i', output_filename]"], {}), "(['clang-format', '-style=google', '-i', output_filename])\n", (19959, 20017), False, 'import subprocess\n')] |
from django.test import TestCase
import logging
from users.models import CustomUser
from .models import ContractualParty, Contract, UserContractualPartyAssociation
logger = logging.getLogger(__name__)
class Explorations(TestCase):
def setUp(self):
logger.info("setting up the test case")
admin = CustomUser(username="Admin", password="<PASSWORD>")
admin.save()
apg = ContractualParty(legal_entity="APG/SGA", created_by=admin)
apg.save()
self.apg = apg
c1_apg = Contract(other_party=apg, other_party_role=Contract.AGENCY)
c2_apg = Contract(other_party=apg, other_party_role=Contract.SITE_OWNER)
c1_apg.save()
c2_apg.save()
self.contracts = [c1_apg, c2_apg]
def test_sth(self):
apg = ContractualParty.objects.all()[0]
self.assertEquals(apg, self.apg)
self.assertEquals(apg.contract_set.all().count(), 2)
wolfie = CustomUser(username="Wolfie", password="<PASSWORD>")
wolfie.save()
w_apg = UserContractualPartyAssociation(cp=apg, user=wolfie)
w_apg.save()
| [
"logging.getLogger",
"users.models.CustomUser"
] | [((175, 202), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (192, 202), False, 'import logging\n'), ((321, 372), 'users.models.CustomUser', 'CustomUser', ([], {'username': '"""Admin"""', 'password': '"""<PASSWORD>"""'}), "(username='Admin', password='<PASSWORD>')\n", (331, 372), False, 'from users.models import CustomUser\n'), ((945, 997), 'users.models.CustomUser', 'CustomUser', ([], {'username': '"""Wolfie"""', 'password': '"""<PASSWORD>"""'}), "(username='Wolfie', password='<PASSWORD>')\n", (955, 997), False, 'from users.models import CustomUser\n')] |
import sqlite3
from sqlite3 import Error
#todo: absoluten pfad raus
class RecipeDB:
def __init__(self, db_file):
"""
establishes a connection to an existing database or creates a new one
:param db_file: name of the database to be created
"""
self.db_file = db_file
self.path = "C:\\Users\\Fabian\\PycharmProjects\\RecipeDB\\recipedb\\"
self.connection = sqlite3.connect(self.path + self.db_file)
print(self.path,self.db_file, sep="")
def __enter__(self):
print(f"opening db connection to {self.db_file}")
return self.connection
def __exit__(self, exc_type, exc_val, exc_tb):
print(f"closing db connection to {self.db_file}")
self.connection.close()
def create_table(self, sql_statement: str):
curs = self.connection.cursor()
curs.execute(sql_statement)
print("table created")
def add_data(self, sql_cmd):
curs = self.connection.cursor()
curs.execute(sql_cmd)
self.connection.commit()
return curs.lastrowid
def close_connection(self):
self.connection.close()
print("connection to database closed.")
def select_all(self, table):
"""
Query all rows in the given table
:param conn: the Connection object
:return:
"""
cur = self.connection.cursor()
cur.execute(f"SELECT * FROM {table}")
rows = cur.fetchall()
for row in rows:
print(row)
print(rows)
def search_ingr(self, ingredient):
'''
search for given ingredient in ingredients table
:param ingredient: ingredient to search for
:return: dictionary of {rec_id: ingredient} with all rec_ids where ingredient is found
'''
cur = self.connection.cursor()
cur.execute(f"SELECT * FROM ingredients WHERE Zutaten like '%{ingredient}%'")
entries = cur.fetchall()
return {entry[4]: ingredient for entry in entries}
def get_recipe(self, id):
'''
retrieves recipe with given rec_id
:param id: rec_id
:return: name, link, picture --> currently returns all
'''
cur = self.connection.cursor()
cur.execute(f"SELECT * FROM recipes WHERE id like '{id}'")
return cur.fetchall()
def get_ingr(self, id):
"""
retrieves recipe ingredients with given rec_id
:param id: rec_id
:return: Zutaten, Menge, Einheit --> currently returns all
"""
cur = self.connection.cursor()
cur.execute(f"SELECT Zutaten, Menge, Einheit, rez_id FROM ingredients WHERE rez_id like '{id}'")
return cur.fetchall()
def create_db():
"""
creates and initializes the database with the tables needed for the recipes cookbook.
This code has to run once in the beginning to set up the db skeleton
:return:
"""
#rec_db = RecipeDB("../Recipe.db")
sql_table_recipes = """ CREATE TABLE IF NOT EXISTS recipes (
id integer PRIMARY KEY,
Rezept_Name text NOT NULL,
Zubereitung text,
Kategorie text,
Küche text,
Link text,
Notizen text
); """
sql_table_ingredients = """CREATE TABLE IF NOT EXISTS ingredients (
ingr_id integer PRIMARY KEY,
Zutaten text NOT NULL,
Menge integer,
Einheit text,
rez_id integer NOT NULL,
FOREIGN KEY (rez_id) REFERENCES recipes (id)
);"""
sql_table_nutrival = """ CREATE TABLE IF NOT EXISTS nutval (
nut_id integer PRIMARY KEY,
Nährstoffe text NOT NULL,
Menge text,
Einheit text,
rez_id integer NOT NULL,
FOREIGN KEY (rez_id) REFERENCES recipes (id)
);"""
rec_db.create_table(sql_table_recipes)
rec_db.create_table(sql_table_ingredients)
rec_db.create_table(sql_table_nutrival)
if __name__ == "__main__":
create_db()
| [
"sqlite3.connect"
] | [((416, 457), 'sqlite3.connect', 'sqlite3.connect', (['(self.path + self.db_file)'], {}), '(self.path + self.db_file)\n', (431, 457), False, 'import sqlite3\n')] |
"""Handle requests to support sequential navigation between arXiv IDs."""
from flask import url_for, escape
from typing import Tuple, Dict, Any
from werkzeug.exceptions import BadRequest
from browse.domain.identifier import Identifier, IdentifierException
from browse.services.database import get_sequential_id
from arxiv import status
from arxiv.taxonomy.definitions import ARCHIVES, CATEGORIES_ACTIVE
from arxiv.base import logging
Response = Tuple[Dict[str, Any], int, Dict[str, Any]]
logger = logging.getLogger(__name__)
def get_prevnext(id: str, function: str, context: str) -> Response:
"""
Get the next or previous arXiv ID in the browse context.
The 'site' parameter from the classic prevnext is no longer supported.
Parameters
----------
id
arxiv id
function
prev or next
context
which archive or category to browse
Returns
-------
dict
Result response data.
int
HTTP status code.
dict
Headers to add to the response.
Raises
------
BadRequest
Raised when request parameters are missing, invalid, or when an ID
redirect cannot be returned even when the request parameters are valid.
"""
if id is None or not id:
raise BadRequest('Missing article identifier')
if function not in ['prev', 'next']:
raise BadRequest('Missing or invalid function request')
if context is None or not context:
raise BadRequest('Missing context')
if not (context in CATEGORIES_ACTIVE
or context in ARCHIVES or context == 'all'):
raise BadRequest('Invalid context')
try:
arxiv_id = Identifier(id)
except IdentifierException:
raise BadRequest(escape(f"Invalid article identifier {id}"))
seq_id = get_sequential_id(paper_id=arxiv_id,
is_next=function == 'next',
context=context)
if not seq_id:
raise BadRequest(
escape(f'No {function} article found for '
f'{arxiv_id.id} in {context}'))
redirect_url = url_for('browse.abstract', arxiv_id=seq_id, context=context)
return {}, status.HTTP_301_MOVED_PERMANENTLY, {'Location': redirect_url}
| [
"arxiv.base.logging.getLogger",
"flask.escape",
"flask.url_for",
"browse.services.database.get_sequential_id",
"browse.domain.identifier.Identifier",
"werkzeug.exceptions.BadRequest"
] | [((501, 528), 'arxiv.base.logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (518, 528), False, 'from arxiv.base import logging\n'), ((1811, 1897), 'browse.services.database.get_sequential_id', 'get_sequential_id', ([], {'paper_id': 'arxiv_id', 'is_next': "(function == 'next')", 'context': 'context'}), "(paper_id=arxiv_id, is_next=function == 'next', context=\n context)\n", (1828, 1897), False, 'from browse.services.database import get_sequential_id\n'), ((2126, 2186), 'flask.url_for', 'url_for', (['"""browse.abstract"""'], {'arxiv_id': 'seq_id', 'context': 'context'}), "('browse.abstract', arxiv_id=seq_id, context=context)\n", (2133, 2186), False, 'from flask import url_for, escape\n'), ((1281, 1321), 'werkzeug.exceptions.BadRequest', 'BadRequest', (['"""Missing article identifier"""'], {}), "('Missing article identifier')\n", (1291, 1321), False, 'from werkzeug.exceptions import BadRequest\n'), ((1377, 1426), 'werkzeug.exceptions.BadRequest', 'BadRequest', (['"""Missing or invalid function request"""'], {}), "('Missing or invalid function request')\n", (1387, 1426), False, 'from werkzeug.exceptions import BadRequest\n'), ((1480, 1509), 'werkzeug.exceptions.BadRequest', 'BadRequest', (['"""Missing context"""'], {}), "('Missing context')\n", (1490, 1509), False, 'from werkzeug.exceptions import BadRequest\n'), ((1622, 1651), 'werkzeug.exceptions.BadRequest', 'BadRequest', (['"""Invalid context"""'], {}), "('Invalid context')\n", (1632, 1651), False, 'from werkzeug.exceptions import BadRequest\n'), ((1681, 1695), 'browse.domain.identifier.Identifier', 'Identifier', (['id'], {}), '(id)\n', (1691, 1695), False, 'from browse.domain.identifier import Identifier, IdentifierException\n'), ((2012, 2081), 'flask.escape', 'escape', (['f"""No {function} article found for {arxiv_id.id} in {context}"""'], {}), "(f'No {function} article found for {arxiv_id.id} in {context}')\n", (2018, 2081), False, 'from flask import url_for, escape\n'), ((1753, 1795), 'flask.escape', 'escape', (['f"""Invalid article identifier {id}"""'], {}), "(f'Invalid article identifier {id}')\n", (1759, 1795), False, 'from flask import url_for, escape\n')] |
import unittest
import numpy as np
from chainer import testing
from chainercv.transforms import resize_bbox
class TestResizeBbox(unittest.TestCase):
def test_resize_bbox(self):
bbox = np.random.uniform(
low=0., high=32., size=(10, 4))
out = resize_bbox(bbox, in_size=(32, 32), out_size=(64, 128))
bbox_expected = bbox.copy()
bbox_expected[:, 0] = bbox[:, 0] * 2
bbox_expected[:, 1] = bbox[:, 1] * 4
bbox_expected[:, 2] = bbox[:, 2] * 2
bbox_expected[:, 3] = bbox[:, 3] * 4
np.testing.assert_equal(out, bbox_expected)
testing.run_module(__name__, __file__)
| [
"chainercv.transforms.resize_bbox",
"chainer.testing.run_module",
"numpy.testing.assert_equal",
"numpy.random.uniform"
] | [((605, 643), 'chainer.testing.run_module', 'testing.run_module', (['__name__', '__file__'], {}), '(__name__, __file__)\n', (623, 643), False, 'from chainer import testing\n'), ((201, 252), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(0.0)', 'high': '(32.0)', 'size': '(10, 4)'}), '(low=0.0, high=32.0, size=(10, 4))\n', (218, 252), True, 'import numpy as np\n'), ((279, 334), 'chainercv.transforms.resize_bbox', 'resize_bbox', (['bbox'], {'in_size': '(32, 32)', 'out_size': '(64, 128)'}), '(bbox, in_size=(32, 32), out_size=(64, 128))\n', (290, 334), False, 'from chainercv.transforms import resize_bbox\n'), ((559, 602), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['out', 'bbox_expected'], {}), '(out, bbox_expected)\n', (582, 602), True, 'import numpy as np\n')] |
# This examples shows how to perform collision detection between the end-effector of a robot and a point cloud depicted as a Height Field
# Note: this feature requires Meshcat to be installed, this can be done using
# pip install --user meshcat
import pinocchio as pin
import hppfcl as fcl
import numpy as np
import sys
from os.path import dirname, join, abspath
from pinocchio.visualize import MeshcatVisualizer
# Load the URDF model.
# Conversion with str seems to be necessary when executing this file with ipython
pinocchio_model_dir = join(dirname(dirname(str(abspath(__file__)))),"models")
model_path = join(pinocchio_model_dir,"example-robot-data/robots")
mesh_dir = pinocchio_model_dir
urdf_filename = "panda.urdf"
urdf_model_path = join(join(model_path,"panda_description/urdf"),urdf_filename)
model, collision_model, visual_model = pin.buildModelsFromUrdf(urdf_model_path, mesh_dir)
# Add point clouds
num_points = 5000
points = np.random.rand(3, num_points)
point_cloud_placement = pin.SE3.Identity() # Placement of the point cloud wrt the WORLD frame
point_cloud_placement.translation = np.array([0.2,0.2,-0.5])
X = points[0,:]
Y = points[1,:]
Z = points[2,:]
nx = 20
x_grid = np.linspace(0.,1.,nx)
x_half_pad = 0.5*(x_grid[1] - x_grid[0])
x_bins = np.digitize(X, x_grid + x_half_pad)
x_dim = x_grid[-1] - x_grid[0]
ny = 20
y_grid = np.linspace(0.,1.,ny)
y_half_pad = 0.5*(y_grid[1] - y_grid[0])
y_bins = np.digitize(Y, y_grid + y_half_pad)
y_dim = y_grid[-1] - y_grid[0]
point_bins = y_bins * nx + x_bins
heights = np.zeros((ny, nx))
np.maximum.at(heights.ravel(), point_bins, Z)
point_cloud = fcl.BVHModelOBBRSS()
point_cloud.beginModel(0, num_points)
point_cloud.addVertices(points.T)
height_field = fcl.HeightFieldOBBRSS(x_dim, y_dim, heights, min(Z))
height_field_placement = point_cloud_placement * pin.SE3(np.eye(3), 0.5*np.array([x_grid[0] + x_grid[-1], y_grid[0] + y_grid[-1], 0.]))
go_point_cloud = pin.GeometryObject("point_cloud",0,point_cloud,point_cloud_placement)
go_point_cloud.meshColor = np.ones((4))
collision_model.addGeometryObject(go_point_cloud)
visual_model.addGeometryObject(go_point_cloud)
go_height_field = pin.GeometryObject("height_field",0,height_field,height_field_placement)
go_height_field.meshColor = np.ones((4))
height_field_collision_id = collision_model.addGeometryObject(go_height_field)
visual_model.addGeometryObject(go_height_field)
# Add colllision pair between the height field and the panda_hand geometry
panda_hand_collision_id = collision_model.getGeometryId("panda_hand_0")
go_panda_hand = collision_model.geometryObjects[panda_hand_collision_id]
go_panda_hand.geometry.buildConvexRepresentation(False)
go_panda_hand.geometry = go_panda_hand.geometry.convex # We need to work with the convex hull of the real mesh
collision_pair = pin.CollisionPair(height_field_collision_id, panda_hand_collision_id)
collision_model.addCollisionPair(collision_pair)
viz = MeshcatVisualizer(model, collision_model, visual_model)
# Start a new MeshCat server and client.
# Note: the server can also be started separately using the "meshcat-server" command in a terminal:
# this enables the server to remain active after the current script ends.
#
# Option open=True pens the visualizer.
# Note: the visualizer can also be opened seperately by visiting the provided URL.
try:
viz.initViewer(open=True)
except ImportError as err:
print("Error while initializing the viewer. It seems you should install Python meshcat")
print(err)
sys.exit(0)
# Load the robot in the viewer.
viz.loadViewerModel()
# Display a robot configuration.
q0 = pin.neutral(model)
viz.display(q0)
is_collision = False
data = model.createData()
collision_data = collision_model.createData()
while not is_collision:
q = pin.randomConfiguration(model)
is_collision = pin.computeCollisions(model, data, collision_model, collision_data, q, True)
print("Found a configuration in collision:",q)
viz.display(q)
| [
"numpy.random.rand",
"pinocchio.buildModelsFromUrdf",
"numpy.array",
"pinocchio.computeCollisions",
"sys.exit",
"numpy.linspace",
"pinocchio.randomConfiguration",
"pinocchio.SE3.Identity",
"numpy.eye",
"numpy.ones",
"numpy.digitize",
"pinocchio.visualize.MeshcatVisualizer",
"pinocchio.Collis... | [((613, 667), 'os.path.join', 'join', (['pinocchio_model_dir', '"""example-robot-data/robots"""'], {}), "(pinocchio_model_dir, 'example-robot-data/robots')\n", (617, 667), False, 'from os.path import dirname, join, abspath\n'), ((847, 897), 'pinocchio.buildModelsFromUrdf', 'pin.buildModelsFromUrdf', (['urdf_model_path', 'mesh_dir'], {}), '(urdf_model_path, mesh_dir)\n', (870, 897), True, 'import pinocchio as pin\n'), ((945, 974), 'numpy.random.rand', 'np.random.rand', (['(3)', 'num_points'], {}), '(3, num_points)\n', (959, 974), True, 'import numpy as np\n'), ((999, 1017), 'pinocchio.SE3.Identity', 'pin.SE3.Identity', ([], {}), '()\n', (1015, 1017), True, 'import pinocchio as pin\n'), ((1105, 1131), 'numpy.array', 'np.array', (['[0.2, 0.2, -0.5]'], {}), '([0.2, 0.2, -0.5])\n', (1113, 1131), True, 'import numpy as np\n'), ((1197, 1222), 'numpy.linspace', 'np.linspace', (['(0.0)', '(1.0)', 'nx'], {}), '(0.0, 1.0, nx)\n', (1208, 1222), True, 'import numpy as np\n'), ((1269, 1304), 'numpy.digitize', 'np.digitize', (['X', '(x_grid + x_half_pad)'], {}), '(X, x_grid + x_half_pad)\n', (1280, 1304), True, 'import numpy as np\n'), ((1354, 1379), 'numpy.linspace', 'np.linspace', (['(0.0)', '(1.0)', 'ny'], {}), '(0.0, 1.0, ny)\n', (1365, 1379), True, 'import numpy as np\n'), ((1426, 1461), 'numpy.digitize', 'np.digitize', (['Y', '(y_grid + y_half_pad)'], {}), '(Y, y_grid + y_half_pad)\n', (1437, 1461), True, 'import numpy as np\n'), ((1538, 1556), 'numpy.zeros', 'np.zeros', (['(ny, nx)'], {}), '((ny, nx))\n', (1546, 1556), True, 'import numpy as np\n'), ((1618, 1638), 'hppfcl.BVHModelOBBRSS', 'fcl.BVHModelOBBRSS', ([], {}), '()\n', (1636, 1638), True, 'import hppfcl as fcl\n'), ((1934, 2006), 'pinocchio.GeometryObject', 'pin.GeometryObject', (['"""point_cloud"""', '(0)', 'point_cloud', 'point_cloud_placement'], {}), "('point_cloud', 0, point_cloud, point_cloud_placement)\n", (1952, 2006), True, 'import pinocchio as pin\n'), ((2031, 2041), 'numpy.ones', 'np.ones', (['(4)'], {}), '(4)\n', (2038, 2041), True, 'import numpy as np\n'), ((2160, 2235), 'pinocchio.GeometryObject', 'pin.GeometryObject', (['"""height_field"""', '(0)', 'height_field', 'height_field_placement'], {}), "('height_field', 0, height_field, height_field_placement)\n", (2178, 2235), True, 'import pinocchio as pin\n'), ((2261, 2271), 'numpy.ones', 'np.ones', (['(4)'], {}), '(4)\n', (2268, 2271), True, 'import numpy as np\n'), ((2807, 2876), 'pinocchio.CollisionPair', 'pin.CollisionPair', (['height_field_collision_id', 'panda_hand_collision_id'], {}), '(height_field_collision_id, panda_hand_collision_id)\n', (2824, 2876), True, 'import pinocchio as pin\n'), ((2933, 2988), 'pinocchio.visualize.MeshcatVisualizer', 'MeshcatVisualizer', (['model', 'collision_model', 'visual_model'], {}), '(model, collision_model, visual_model)\n', (2950, 2988), False, 'from pinocchio.visualize import MeshcatVisualizer\n'), ((3610, 3628), 'pinocchio.neutral', 'pin.neutral', (['model'], {}), '(model)\n', (3621, 3628), True, 'import pinocchio as pin\n'), ((750, 792), 'os.path.join', 'join', (['model_path', '"""panda_description/urdf"""'], {}), "(model_path, 'panda_description/urdf')\n", (754, 792), False, 'from os.path import dirname, join, abspath\n'), ((3771, 3801), 'pinocchio.randomConfiguration', 'pin.randomConfiguration', (['model'], {}), '(model)\n', (3794, 3801), True, 'import pinocchio as pin\n'), ((3822, 3898), 'pinocchio.computeCollisions', 'pin.computeCollisions', (['model', 'data', 'collision_model', 'collision_data', 'q', '(True)'], {}), '(model, data, collision_model, collision_data, q, True)\n', (3843, 3898), True, 'import pinocchio as pin\n'), ((1837, 1846), 'numpy.eye', 'np.eye', (['(3)'], {}), '(3)\n', (1843, 1846), True, 'import numpy as np\n'), ((3504, 3515), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3512, 3515), False, 'import sys\n'), ((1852, 1915), 'numpy.array', 'np.array', (['[x_grid[0] + x_grid[-1], y_grid[0] + y_grid[-1], 0.0]'], {}), '([x_grid[0] + x_grid[-1], y_grid[0] + y_grid[-1], 0.0])\n', (1860, 1915), True, 'import numpy as np\n'), ((568, 585), 'os.path.abspath', 'abspath', (['__file__'], {}), '(__file__)\n', (575, 585), False, 'from os.path import dirname, join, abspath\n')] |
from collections import Counter, namedtuple
Result_ApOfflineTrend = namedtuple("Result_ApOfflineTrend", (
"areaid", "avgApNum",
"avgOffLineApNum", "maxOffLineApNum", "minOffLineApNum",
"avgOffLineRate", "maxOffLineRate", "minOffLineRate"))
data= [
{
"deviceId":14063,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-09 11:02:49",
"remark":None,
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
23151
],
"topId":13655,
"mac":"11:A1:11:11:11:11"
},
{
"deviceId":13164,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22946
],
"topId":22944,
"mac":"A1:A1:A5:A1:A1:02"
},
{
"deviceId":13165,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22949
],
"topId":22947,
"mac":"A1:A1:A5:A1:A1:03"
},
{
"deviceId":13166,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22952
],
"topId":22950,
"mac":"A1:A1:A5:A1:A1:04"
},
{
"deviceId":13167,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22955
],
"topId":22953,
"mac":"A1:A1:A5:A1:A1:05"
},
{
"deviceId":13168,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22958
],
"topId":22956,
"mac":"A1:A1:A5:A1:A1:06"
},
{
"deviceId":13169,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22961
],
"topId":22959,
"mac":"A1:A1:A5:A1:A1:07"
},
{
"deviceId":13170,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22964
],
"topId":22962,
"mac":"A1:A1:A5:A1:A1:08"
},
{
"deviceId":13171,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22967
],
"topId":22965,
"mac":"A1:A1:A5:A1:A1:09"
},
{
"deviceId":13172,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22970
],
"topId":22968,
"mac":"A1:A1:A5:A1:A1:10"
},
{
"deviceId":13174,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22974
],
"topId":22944,
"mac":"A1:A1:A5:A1:A1:12"
},
{
"deviceId":13175,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22976
],
"topId":22947,
"mac":"A1:A1:A5:A1:A1:13"
},
{
"deviceId":13176,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22978
],
"topId":22950,
"mac":"A1:A1:A5:A1:A1:14"
},
{
"deviceId":13177,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22980
],
"topId":22953,
"mac":"A1:A1:A5:A1:A1:15"
},
{
"deviceId":13178,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22982
],
"topId":22956,
"mac":"A1:A1:A5:A1:A1:16"
},
{
"deviceId":13179,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22984
],
"topId":22959,
"mac":"A1:A1:A5:A1:A1:17"
},
{
"deviceId":13180,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22986
],
"topId":22962,
"mac":"A1:A1:A5:A1:A1:18"
},
{
"deviceId":13181,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22988
],
"topId":22965,
"mac":"A1:A1:A5:A1:A1:19"
},
{
"deviceId":13182,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22990
],
"topId":22968,
"mac":"A1:A1:A5:A1:A1:20"
},
{
"deviceId":13184,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22994
],
"topId":22944,
"mac":"A1:A1:A5:A1:A1:22"
},
{
"deviceId":13185,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22996
],
"topId":22947,
"mac":"A1:A1:A5:A1:A1:23"
},
{
"deviceId":13186,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22998
],
"topId":22950,
"mac":"A1:A1:A5:A1:A1:24"
},
{
"deviceId":13187,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23000
],
"topId":22953,
"mac":"A1:A1:A5:A1:A1:25"
},
{
"deviceId":13188,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23002
],
"topId":22956,
"mac":"A1:A1:A5:A1:A1:26"
},
{
"deviceId":13189,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23004
],
"topId":22959,
"mac":"A1:A1:A5:A1:A1:27"
},
{
"deviceId":13190,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23006
],
"topId":22962,
"mac":"A1:A1:A5:A1:A1:28"
},
{
"deviceId":13191,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23008
],
"topId":22965,
"mac":"A1:A1:A5:A1:A1:29"
},
{
"deviceId":13192,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23010
],
"topId":22968,
"mac":"A1:A1:A5:A1:A1:30"
},
{
"deviceId":13194,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23014
],
"topId":22944,
"mac":"A1:A1:A5:A1:A1:32"
},
{
"deviceId":13195,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23016
],
"topId":22947,
"mac":"A1:A1:A5:A1:A1:33"
},
{
"deviceId":13196,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23018
],
"topId":22950,
"mac":"A1:A1:A5:A1:A1:34"
},
{
"deviceId":13197,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23020
],
"topId":22953,
"mac":"A1:A1:A5:A1:A1:35"
},
{
"deviceId":13198,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23022
],
"topId":22956,
"mac":"A1:A1:A5:A1:A1:36"
},
{
"deviceId":13205,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23036
],
"topId":22947,
"mac":"A1:A1:A5:A1:A1:43"
},
{
"deviceId":13206,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23038
],
"topId":22950,
"mac":"A1:A1:A5:A1:A1:44"
},
{
"deviceId":13207,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23040
],
"topId":22953,
"mac":"A1:A1:A5:A1:A1:45"
},
{
"deviceId":13208,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23042
],
"topId":22956,
"mac":"A1:A1:A5:A1:A1:46"
},
{
"deviceId":13209,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23044
],
"topId":22959,
"mac":"A1:A1:A5:A1:A1:47"
},
{
"deviceId":13210,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23046
],
"topId":22962,
"mac":"A1:A1:A5:A1:A1:48"
},
{
"deviceId":13211,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23048
],
"topId":22965,
"mac":"A1:A1:A5:A1:A1:49"
},
{
"deviceId":13212,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23050
],
"topId":22968,
"mac":"A1:A1:A5:A1:A1:50"
},
{
"deviceId":13214,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23054
],
"topId":22944,
"mac":"A1:A1:A5:A1:A1:52"
},
{
"deviceId":13215,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23056
],
"topId":22947,
"mac":"A1:A1:A5:A1:A1:53"
},
{
"deviceId":13216,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23058
],
"topId":22950,
"mac":"A1:A1:A5:A1:A1:54"
},
{
"deviceId":13217,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23060
],
"topId":22953,
"mac":"A1:A1:A5:A1:A1:55"
},
{
"deviceId":13218,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23062
],
"topId":22956,
"mac":"A1:A1:A5:A1:A1:56"
},
{
"deviceId":13219,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23064
],
"topId":22959,
"mac":"A1:A1:A5:A1:A1:57"
},
{
"deviceId":13220,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23066
],
"topId":22962,
"mac":"A1:A1:A5:A1:A1:58"
},
{
"deviceId":13221,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23068
],
"topId":22965,
"mac":"A1:A1:A5:A1:A1:59"
},
{
"deviceId":13222,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23070
],
"topId":22968,
"mac":"A1:A1:A5:A1:A1:60"
},
{
"deviceId":13224,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:58",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23074
],
"topId":22944,
"mac":"A1:A1:A5:A1:A1:62"
},
{
"deviceId":13225,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:58",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23076
],
"topId":22947,
"mac":"A1:A1:A5:A1:A1:63"
},
{
"deviceId":13226,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:58",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23078
],
"topId":22950,
"mac":"A1:A1:A5:A1:A1:64"
},
{
"deviceId":13227,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:58",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23080
],
"topId":22953,
"mac":"A1:A1:A5:A1:A1:65"
},
{
"deviceId":13228,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:58",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23082
],
"topId":22956,
"mac":"A1:A1:A5:A1:A1:66"
},
{
"deviceId":13229,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:58",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23084
],
"topId":22959,
"mac":"A1:A1:A5:A1:A1:67"
},
{
"deviceId":13230,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23086
],
"topId":22962,
"mac":"A1:A1:A5:A1:A1:68"
},
{
"deviceId":13231,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23088
],
"topId":22965,
"mac":"A1:A1:A5:A1:A1:69"
},
{
"deviceId":13232,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23090
],
"topId":22968,
"mac":"A1:A1:A5:A1:A1:70"
},
{
"deviceId":13234,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23094
],
"topId":22944,
"mac":"A1:A1:A5:A1:A1:72"
},
{
"deviceId":13235,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23096
],
"topId":22947,
"mac":"A1:A1:A5:A1:A1:73"
},
{
"deviceId":13236,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23098
],
"topId":22950,
"mac":"A1:A1:A5:A1:A1:74"
},
{
"deviceId":13237,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23100
],
"topId":22953,
"mac":"A1:A1:A5:A1:A1:75"
},
{
"deviceId":13238,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23102
],
"topId":22956,
"mac":"A1:A1:A5:A1:A1:76"
},
{
"deviceId":13239,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23104
],
"topId":22959,
"mac":"A1:A1:A5:A1:A1:77"
},
{
"deviceId":13240,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23106
],
"topId":22962,
"mac":"A1:A1:A5:A1:A1:78"
},
{
"deviceId":13241,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23108
],
"topId":22965,
"mac":"A1:A1:A5:A1:A1:79"
},
{
"deviceId":13242,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23110
],
"topId":22968,
"mac":"A1:A1:A5:A1:A1:80"
},
{
"deviceId":13199,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23024
],
"topId":22959,
"mac":"A1:A1:A5:A1:A1:37"
},
{
"deviceId":13200,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23026
],
"topId":22962,
"mac":"A1:A1:A5:A1:A1:38"
},
{
"deviceId":13201,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23028
],
"topId":22965,
"mac":"A1:A1:A5:A1:A1:39"
},
{
"deviceId":13202,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23030
],
"topId":22968,
"mac":"A1:A1:A5:A1:A1:40"
},
{
"deviceId":13204,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:29:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23034
],
"topId":22944,
"mac":"A1:A1:A5:A1:A1:42"
},
{
"deviceId":13346,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:09",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23118
],
"topId":22950,
"mac":"A1:A1:A5:A1:B1:85"
},
{
"deviceId":13347,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:09",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23120
],
"topId":22953,
"mac":"A1:A1:A5:A1:B1:86"
},
{
"deviceId":13348,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:09",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23122
],
"topId":22956,
"mac":"A1:A1:A5:A1:B1:87"
},
{
"deviceId":13349,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:09",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23124
],
"topId":22959,
"mac":"A1:A1:A5:A1:B1:88"
},
{
"deviceId":13350,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:09",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23126
],
"topId":22962,
"mac":"A1:A1:A5:A1:B1:89"
},
{
"deviceId":13351,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:10",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23128
],
"topId":22965,
"mac":"A1:A1:A5:A1:B1:90"
},
{
"deviceId":13352,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:10",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23130
],
"topId":22968,
"mac":"A1:A1:A5:A1:B1:91"
},
{
"deviceId":13354,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:10",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23134
],
"topId":22944,
"mac":"A1:A1:A5:A1:B1:93"
},
{
"deviceId":13355,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:10",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23136
],
"topId":22947,
"mac":"A1:A1:A5:A1:B1:94"
},
{
"deviceId":13356,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:10",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23138
],
"topId":22950,
"mac":"A1:A1:A5:A1:B1:95"
},
{
"deviceId":13357,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:10",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23140
],
"topId":22953,
"mac":"A1:A1:A5:A1:B1:96"
},
{
"deviceId":13358,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:10",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23142
],
"topId":22956,
"mac":"A1:A1:A5:A1:B1:97"
},
{
"deviceId":13359,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:10",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23144
],
"topId":22959,
"mac":"A1:A1:A5:A1:B1:98"
},
{
"deviceId":13360,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:10",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23146
],
"topId":22962,
"mac":"A1:A1:A5:A1:B1:99"
},
{
"deviceId":13361,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:10",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23148
],
"topId":22965,
"mac":"A1:A1:A5:A1:C1:01"
},
{
"deviceId":13362,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:10",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23150
],
"topId":22968,
"mac":"A1:A1:A5:A1:C1:02"
},
{
"deviceId":13364,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:10",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22946
],
"topId":22944,
"mac":"A1:A1:A5:A1:C1:04"
},
{
"deviceId":13365,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:11",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22949
],
"topId":22947,
"mac":"A1:A1:A5:A1:C1:05"
},
{
"deviceId":13366,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:11",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22952
],
"topId":22950,
"mac":"A1:A1:A5:A1:C1:06"
},
{
"deviceId":13367,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:11",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22955
],
"topId":22953,
"mac":"A1:A1:A5:A1:C1:07"
},
{
"deviceId":13368,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:11",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22958
],
"topId":22956,
"mac":"A1:A1:A5:A1:C1:08"
},
{
"deviceId":13369,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:11",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22961
],
"topId":22959,
"mac":"A1:A1:A5:A1:C1:09"
},
{
"deviceId":13370,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:11",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22964
],
"topId":22962,
"mac":"A1:A1:A5:A1:C1:10"
},
{
"deviceId":13371,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:11",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22967
],
"topId":22965,
"mac":"A1:A1:A5:A1:C1:11"
},
{
"deviceId":13372,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:11",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22970
],
"topId":22968,
"mac":"A1:A1:A5:A1:C1:12"
},
{
"deviceId":13374,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:11",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22974
],
"topId":22944,
"mac":"A1:A1:A5:A1:C1:14"
},
{
"deviceId":13375,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:11",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22976
],
"topId":22947,
"mac":"A1:A1:A5:A1:C1:15"
},
{
"deviceId":13376,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:11",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22978
],
"topId":22950,
"mac":"A1:A1:A5:A1:C1:16"
},
{
"deviceId":13377,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:11",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22980
],
"topId":22953,
"mac":"A1:A1:A5:A1:C1:17"
},
{
"deviceId":13378,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:11",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22982
],
"topId":22956,
"mac":"A1:A1:A5:A1:C1:18"
},
{
"deviceId":13379,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:12",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22984
],
"topId":22959,
"mac":"A1:A1:A5:A1:C1:19"
},
{
"deviceId":13380,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:12",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22986
],
"topId":22962,
"mac":"A1:A1:A5:A1:C1:20"
},
{
"deviceId":13381,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:12",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22988
],
"topId":22965,
"mac":"A1:A1:A5:A1:C1:21"
},
{
"deviceId":13382,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:12",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22990
],
"topId":22968,
"mac":"A1:A1:A5:A1:C1:22"
},
{
"deviceId":13384,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:12",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22994
],
"topId":22944,
"mac":"A1:A1:A5:A1:C1:24"
},
{
"deviceId":13385,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:12",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22996
],
"topId":22947,
"mac":"A1:A1:A5:A1:C1:25"
},
{
"deviceId":13386,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:12",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22998
],
"topId":22950,
"mac":"A1:A1:A5:A1:C1:26"
},
{
"deviceId":13387,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:12",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23000
],
"topId":22953,
"mac":"A1:A1:A5:A1:C1:27"
},
{
"deviceId":13388,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:12",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23002
],
"topId":22956,
"mac":"A1:A1:A5:A1:C1:28"
},
{
"deviceId":13389,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:12",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23004
],
"topId":22959,
"mac":"A1:A1:A5:A1:C1:29"
},
{
"deviceId":13437,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:16",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23100
],
"topId":22953,
"mac":"A1:A1:A5:A1:C1:77"
},
{
"deviceId":13438,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:16",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23102
],
"topId":22956,
"mac":"A1:A1:A5:A1:C1:78"
},
{
"deviceId":13439,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:16",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23104
],
"topId":22959,
"mac":"A1:A1:A5:A1:C1:79"
},
{
"deviceId":13440,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:16",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23106
],
"topId":22962,
"mac":"A1:A1:A5:A1:C1:80"
},
{
"deviceId":13441,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:16",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23108
],
"topId":22965,
"mac":"A1:A1:A5:A1:C1:81"
},
{
"deviceId":13442,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:16",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23110
],
"topId":22968,
"mac":"A1:A1:A5:A1:C1:82"
},
{
"deviceId":13444,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:16",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23114
],
"topId":22944,
"mac":"A1:A1:A5:A1:C1:84"
},
{
"deviceId":13445,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:17",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23116
],
"topId":22947,
"mac":"A1:A1:A5:A1:C1:85"
},
{
"deviceId":13446,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:17",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23118
],
"topId":22950,
"mac":"A1:A1:A5:A1:C1:86"
},
{
"deviceId":13459,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:17",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23144
],
"topId":22959,
"mac":"A1:A1:A5:A1:C1:99"
},
{
"deviceId":13460,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:18",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23146
],
"topId":22962,
"mac":"A1:A1:A5:A1:D1:01"
},
{
"deviceId":13461,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:18",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23148
],
"topId":22965,
"mac":"A1:A1:A5:A1:D1:02"
},
{
"deviceId":13462,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:18",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23150
],
"topId":22968,
"mac":"A1:A1:A5:A1:D1:03"
},
{
"deviceId":13464,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:18",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22946
],
"topId":22944,
"mac":"A1:A1:A5:A1:D1:05"
},
{
"deviceId":13465,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:18",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22949
],
"topId":22947,
"mac":"A1:A1:A5:A1:D1:06"
},
{
"deviceId":13466,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:18",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22952
],
"topId":22950,
"mac":"A1:A1:A5:A1:D1:07"
},
{
"deviceId":13467,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:18",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22955
],
"topId":22953,
"mac":"A1:A1:A5:A1:D1:08"
},
{
"deviceId":13468,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:18",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22958
],
"topId":22956,
"mac":"A1:A1:A5:A1:D1:09"
},
{
"deviceId":13469,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:18",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22961
],
"topId":22959,
"mac":"A1:A1:A5:A1:D1:10"
},
{
"deviceId":13470,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:18",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22964
],
"topId":22962,
"mac":"A1:A1:A5:A1:D1:11"
},
{
"deviceId":13471,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:18",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22967
],
"topId":22965,
"mac":"A1:A1:A5:A1:D1:12"
},
{
"deviceId":13472,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:18",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22970
],
"topId":22968,
"mac":"A1:A1:A5:A1:D1:13"
},
{
"deviceId":13244,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23114
],
"topId":22944,
"mac":"A1:A1:A5:A1:A1:82"
},
{
"deviceId":13245,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23116
],
"topId":22947,
"mac":"A1:A1:A5:A1:A1:83"
},
{
"deviceId":13246,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23118
],
"topId":22950,
"mac":"A1:A1:A5:A1:A1:84"
},
{
"deviceId":13247,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23120
],
"topId":22953,
"mac":"A1:A1:A5:A1:A1:85"
},
{
"deviceId":13248,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23122
],
"topId":22956,
"mac":"A1:A1:A5:A1:A1:86"
},
{
"deviceId":13249,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23124
],
"topId":22959,
"mac":"A1:A1:A5:A1:A1:87"
},
{
"deviceId":13250,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23126
],
"topId":22962,
"mac":"A1:A1:A5:A1:A1:88"
},
{
"deviceId":13251,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:02",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23128
],
"topId":22965,
"mac":"A1:A1:A5:A1:A1:89"
},
{
"deviceId":13252,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:02",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23130
],
"topId":22968,
"mac":"A1:A1:A5:A1:A1:90"
},
{
"deviceId":13254,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:02",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23134
],
"topId":22944,
"mac":"A1:A1:A5:A1:A1:92"
},
{
"deviceId":13255,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:02",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23136
],
"topId":22947,
"mac":"A1:A1:A5:A1:A1:93"
},
{
"deviceId":13256,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:02",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23138
],
"topId":22950,
"mac":"A1:A1:A5:A1:A1:94"
},
{
"deviceId":13257,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:02",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23140
],
"topId":22953,
"mac":"A1:A1:A5:A1:A1:95"
},
{
"deviceId":13258,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:03",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23142
],
"topId":22956,
"mac":"A1:A1:A5:A1:A1:96"
},
{
"deviceId":13259,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:03",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23144
],
"topId":22959,
"mac":"A1:A1:A5:A1:A1:97"
},
{
"deviceId":13260,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:03",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23146
],
"topId":22962,
"mac":"A1:A1:A5:A1:A1:98"
},
{
"deviceId":13261,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:03",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23148
],
"topId":22965,
"mac":"A1:A1:A5:A1:A1:99"
},
{
"deviceId":13262,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:03",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23150
],
"topId":22968,
"mac":"A1:A1:A5:A1:B1:01"
},
{
"deviceId":13264,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:03",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22946
],
"topId":22944,
"mac":"A1:A1:A5:A1:B1:03"
},
{
"deviceId":13265,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:03",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22949
],
"topId":22947,
"mac":"A1:A1:A5:A1:B1:04"
},
{
"deviceId":13266,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:03",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22952
],
"topId":22950,
"mac":"A1:A1:A5:A1:B1:05"
},
{
"deviceId":13267,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:03",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22955
],
"topId":22953,
"mac":"A1:A1:A5:A1:B1:06"
},
{
"deviceId":13268,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:03",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22958
],
"topId":22956,
"mac":"A1:A1:A5:A1:B1:07"
},
{
"deviceId":13269,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:04",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22961
],
"topId":22959,
"mac":"A1:A1:A5:A1:B1:08"
},
{
"deviceId":13270,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:04",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22964
],
"topId":22962,
"mac":"A1:A1:A5:A1:B1:09"
},
{
"deviceId":13271,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:04",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22967
],
"topId":22965,
"mac":"A1:A1:A5:A1:B1:10"
},
{
"deviceId":13272,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:04",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22970
],
"topId":22968,
"mac":"A1:A1:A5:A1:B1:11"
},
{
"deviceId":13274,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:04",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22974
],
"topId":22944,
"mac":"A1:A1:A5:A1:B1:13"
},
{
"deviceId":13275,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:04",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22976
],
"topId":22947,
"mac":"A1:A1:A5:A1:B1:14"
},
{
"deviceId":13276,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:04",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22978
],
"topId":22950,
"mac":"A1:A1:A5:A1:B1:15"
},
{
"deviceId":13277,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:04",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22980
],
"topId":22953,
"mac":"A1:A1:A5:A1:B1:16"
},
{
"deviceId":13278,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:04",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22982
],
"topId":22956,
"mac":"A1:A1:A5:A1:B1:17"
},
{
"deviceId":13279,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:04",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22984
],
"topId":22959,
"mac":"A1:A1:A5:A1:B1:18"
},
{
"deviceId":13280,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:04",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22986
],
"topId":22962,
"mac":"A1:A1:A5:A1:B1:19"
},
{
"deviceId":13281,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:04",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22988
],
"topId":22965,
"mac":"A1:A1:A5:A1:B1:20"
},
{
"deviceId":13282,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:05",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22990
],
"topId":22968,
"mac":"A1:A1:A5:A1:B1:21"
},
{
"deviceId":13284,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:05",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22994
],
"topId":22944,
"mac":"A1:A1:A5:A1:B1:23"
},
{
"deviceId":13285,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:05",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22996
],
"topId":22947,
"mac":"A1:A1:A5:A1:B1:24"
},
{
"deviceId":13286,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:05",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22998
],
"topId":22950,
"mac":"A1:A1:A5:A1:B1:25"
},
{
"deviceId":13287,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:05",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23000
],
"topId":22953,
"mac":"A1:A1:A5:A1:B1:26"
},
{
"deviceId":13288,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:05",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23002
],
"topId":22956,
"mac":"A1:A1:A5:A1:B1:27"
},
{
"deviceId":13289,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:05",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23004
],
"topId":22959,
"mac":"A1:A1:A5:A1:B1:28"
},
{
"deviceId":13290,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:05",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23006
],
"topId":22962,
"mac":"A1:A1:A5:A1:B1:29"
},
{
"deviceId":13291,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:05",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23008
],
"topId":22965,
"mac":"A1:A1:A5:A1:B1:30"
},
{
"deviceId":13292,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:05",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23010
],
"topId":22968,
"mac":"A1:A1:A5:A1:B1:31"
},
{
"deviceId":13294,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:05",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23014
],
"topId":22944,
"mac":"A1:A1:A5:A1:B1:33"
},
{
"deviceId":13295,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:05",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23016
],
"topId":22947,
"mac":"A1:A1:A5:A1:B1:34"
},
{
"deviceId":13296,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:06",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23018
],
"topId":22950,
"mac":"A1:A1:A5:A1:B1:35"
},
{
"deviceId":13297,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:06",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23020
],
"topId":22953,
"mac":"A1:A1:A5:A1:B1:36"
},
{
"deviceId":13298,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:06",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23022
],
"topId":22956,
"mac":"A1:A1:A5:A1:B1:37"
},
{
"deviceId":13299,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:06",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23024
],
"topId":22959,
"mac":"A1:A1:A5:A1:B1:38"
},
{
"deviceId":13300,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:06",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23026
],
"topId":22962,
"mac":"A1:A1:A5:A1:B1:39"
},
{
"deviceId":13301,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:06",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23028
],
"topId":22965,
"mac":"A1:A1:A5:A1:B1:40"
},
{
"deviceId":13302,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:06",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23030
],
"topId":22968,
"mac":"A1:A1:A5:A1:B1:41"
},
{
"deviceId":13304,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:06",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23034
],
"topId":22944,
"mac":"A1:A1:A5:A1:B1:43"
},
{
"deviceId":13305,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:06",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23036
],
"topId":22947,
"mac":"A1:A1:A5:A1:B1:44"
},
{
"deviceId":13306,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:06",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23038
],
"topId":22950,
"mac":"A1:A1:A5:A1:B1:45"
},
{
"deviceId":13307,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:06",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23040
],
"topId":22953,
"mac":"A1:A1:A5:A1:B1:46"
},
{
"deviceId":13308,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:06",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23042
],
"topId":22956,
"mac":"A1:A1:A5:A1:B1:47"
},
{
"deviceId":13309,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:06",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23044
],
"topId":22959,
"mac":"A1:A1:A5:A1:B1:48"
},
{
"deviceId":13310,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:07",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23046
],
"topId":22962,
"mac":"A1:A1:A5:A1:B1:49"
},
{
"deviceId":13311,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:07",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23048
],
"topId":22965,
"mac":"A1:A1:A5:A1:B1:50"
},
{
"deviceId":13312,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:07",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23050
],
"topId":22968,
"mac":"A1:A1:A5:A1:B1:51"
},
{
"deviceId":13314,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:07",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23054
],
"topId":22944,
"mac":"A1:A1:A5:A1:B1:53"
},
{
"deviceId":13315,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:07",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23056
],
"topId":22947,
"mac":"A1:A1:A5:A1:B1:54"
},
{
"deviceId":13316,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:07",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23058
],
"topId":22950,
"mac":"A1:A1:A5:A1:B1:55"
},
{
"deviceId":13317,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:07",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23060
],
"topId":22953,
"mac":"A1:A1:A5:A1:B1:56"
},
{
"deviceId":13318,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:07",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23062
],
"topId":22956,
"mac":"A1:A1:A5:A1:B1:57"
},
{
"deviceId":13319,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:07",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23064
],
"topId":22959,
"mac":"A1:A1:A5:A1:B1:58"
},
{
"deviceId":13320,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:07",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23066
],
"topId":22962,
"mac":"A1:A1:A5:A1:B1:59"
},
{
"deviceId":13321,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:07",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23068
],
"topId":22965,
"mac":"A1:A1:A5:A1:B1:60"
},
{
"deviceId":13322,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:07",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23070
],
"topId":22968,
"mac":"A1:A1:A5:A1:B1:61"
},
{
"deviceId":13324,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:08",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23074
],
"topId":22944,
"mac":"A1:A1:A5:A1:B1:63"
},
{
"deviceId":13325,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:08",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23076
],
"topId":22947,
"mac":"A1:A1:A5:A1:B1:64"
},
{
"deviceId":13326,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:08",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23078
],
"topId":22950,
"mac":"A1:A1:A5:A1:B1:65"
},
{
"deviceId":13327,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:08",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23080
],
"topId":22953,
"mac":"A1:A1:A5:A1:B1:66"
},
{
"deviceId":13328,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:08",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23082
],
"topId":22956,
"mac":"A1:A1:A5:A1:B1:67"
},
{
"deviceId":13329,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:08",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23084
],
"topId":22959,
"mac":"A1:A1:A5:A1:B1:68"
},
{
"deviceId":13330,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:08",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23086
],
"topId":22962,
"mac":"A1:A1:A5:A1:B1:69"
},
{
"deviceId":13331,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:08",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23088
],
"topId":22965,
"mac":"A1:A1:A5:A1:B1:70"
},
{
"deviceId":13332,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:08",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23090
],
"topId":22968,
"mac":"A1:A1:A5:A1:B1:71"
},
{
"deviceId":13334,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:08",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23094
],
"topId":22944,
"mac":"A1:A1:A5:A1:B1:73"
},
{
"deviceId":13335,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:08",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23096
],
"topId":22947,
"mac":"A1:A1:A5:A1:B1:74"
},
{
"deviceId":13336,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:09",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23098
],
"topId":22950,
"mac":"A1:A1:A5:A1:B1:75"
},
{
"deviceId":13337,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:09",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23100
],
"topId":22953,
"mac":"A1:A1:A5:A1:B1:76"
},
{
"deviceId":13338,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:09",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23102
],
"topId":22956,
"mac":"A1:A1:A5:A1:B1:77"
},
{
"deviceId":13339,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:09",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23104
],
"topId":22959,
"mac":"A1:A1:A5:A1:B1:78"
},
{
"deviceId":13340,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:09",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23106
],
"topId":22962,
"mac":"A1:A1:A5:A1:B1:79"
},
{
"deviceId":13341,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:09",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23108
],
"topId":22965,
"mac":"A1:A1:A5:A1:B1:80"
},
{
"deviceId":13342,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:09",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23110
],
"topId":22968,
"mac":"A1:A1:A5:A1:B1:81"
},
{
"deviceId":13344,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:09",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23114
],
"topId":22944,
"mac":"A1:A1:A5:A1:B1:83"
},
{
"deviceId":13345,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:09",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23116
],
"topId":22947,
"mac":"A1:A1:A5:A1:B1:84"
},
{
"deviceId":13390,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:12",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23006
],
"topId":22962,
"mac":"A1:A1:A5:A1:C1:30"
},
{
"deviceId":13391,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:12",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23008
],
"topId":22965,
"mac":"A1:A1:A5:A1:C1:31"
},
{
"deviceId":13392,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:13",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23010
],
"topId":22968,
"mac":"A1:A1:A5:A1:C1:32"
},
{
"deviceId":13394,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:13",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23014
],
"topId":22944,
"mac":"A1:A1:A5:A1:C1:34"
},
{
"deviceId":13395,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:13",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23016
],
"topId":22947,
"mac":"A1:A1:A5:A1:C1:35"
},
{
"deviceId":13396,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:13",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23018
],
"topId":22950,
"mac":"A1:A1:A5:A1:C1:36"
},
{
"deviceId":13397,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:13",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23020
],
"topId":22953,
"mac":"A1:A1:A5:A1:C1:37"
},
{
"deviceId":13398,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:13",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23022
],
"topId":22956,
"mac":"A1:A1:A5:A1:C1:38"
},
{
"deviceId":13399,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:13",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23024
],
"topId":22959,
"mac":"A1:A1:A5:A1:C1:39"
},
{
"deviceId":13400,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:13",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23026
],
"topId":22962,
"mac":"A1:A1:A5:A1:C1:40"
},
{
"deviceId":13401,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:13",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23028
],
"topId":22965,
"mac":"A1:A1:A5:A1:C1:41"
},
{
"deviceId":13402,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:13",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23030
],
"topId":22968,
"mac":"A1:A1:A5:A1:C1:42"
},
{
"deviceId":13404,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:13",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23034
],
"topId":22944,
"mac":"A1:A1:A5:A1:C1:44"
},
{
"deviceId":13405,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:14",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23036
],
"topId":22947,
"mac":"A1:A1:A5:A1:C1:45"
},
{
"deviceId":13406,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:14",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23038
],
"topId":22950,
"mac":"A1:A1:A5:A1:C1:46"
},
{
"deviceId":13407,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:14",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23040
],
"topId":22953,
"mac":"A1:A1:A5:A1:C1:47"
},
{
"deviceId":13408,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:14",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23042
],
"topId":22956,
"mac":"A1:A1:A5:A1:C1:48"
},
{
"deviceId":13409,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:14",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23044
],
"topId":22959,
"mac":"A1:A1:A5:A1:C1:49"
},
{
"deviceId":13410,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:14",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23046
],
"topId":22962,
"mac":"A1:A1:A5:A1:C1:50"
},
{
"deviceId":13411,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:14",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23048
],
"topId":22965,
"mac":"A1:A1:A5:A1:C1:51"
},
{
"deviceId":13412,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:14",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23050
],
"topId":22968,
"mac":"A1:A1:A5:A1:C1:52"
},
{
"deviceId":13414,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:14",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23054
],
"topId":22944,
"mac":"A1:A1:A5:A1:C1:54"
},
{
"deviceId":13415,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:14",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23056
],
"topId":22947,
"mac":"A1:A1:A5:A1:C1:55"
},
{
"deviceId":13416,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:14",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23058
],
"topId":22950,
"mac":"A1:A1:A5:A1:C1:56"
},
{
"deviceId":13417,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:14",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23060
],
"topId":22953,
"mac":"A1:A1:A5:A1:C1:57"
},
{
"deviceId":13418,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:15",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23062
],
"topId":22956,
"mac":"A1:A1:A5:A1:C1:58"
},
{
"deviceId":13419,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:15",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23064
],
"topId":22959,
"mac":"A1:A1:A5:A1:C1:59"
},
{
"deviceId":13420,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:15",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23066
],
"topId":22962,
"mac":"A1:A1:A5:A1:C1:60"
},
{
"deviceId":13421,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:15",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23068
],
"topId":22965,
"mac":"A1:A1:A5:A1:C1:61"
},
{
"deviceId":13422,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:15",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23070
],
"topId":22968,
"mac":"A1:A1:A5:A1:C1:62"
},
{
"deviceId":13424,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:15",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23074
],
"topId":22944,
"mac":"A1:A1:A5:A1:C1:64"
},
{
"deviceId":13425,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:15",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23076
],
"topId":22947,
"mac":"A1:A1:A5:A1:C1:65"
},
{
"deviceId":13426,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:15",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23078
],
"topId":22950,
"mac":"A1:A1:A5:A1:C1:66"
},
{
"deviceId":13427,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:15",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23080
],
"topId":22953,
"mac":"A1:A1:A5:A1:C1:67"
},
{
"deviceId":13428,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:15",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23082
],
"topId":22956,
"mac":"A1:A1:A5:A1:C1:68"
},
{
"deviceId":13429,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:15",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23084
],
"topId":22959,
"mac":"A1:A1:A5:A1:C1:69"
},
{
"deviceId":13430,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:15",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23086
],
"topId":22962,
"mac":"A1:A1:A5:A1:C1:70"
},
{
"deviceId":13431,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:15",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23088
],
"topId":22965,
"mac":"A1:A1:A5:A1:C1:71"
},
{
"deviceId":13432,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:16",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23090
],
"topId":22968,
"mac":"A1:A1:A5:A1:C1:72"
},
{
"deviceId":13434,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:16",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23094
],
"topId":22944,
"mac":"A1:A1:A5:A1:C1:74"
},
{
"deviceId":13435,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:16",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23096
],
"topId":22947,
"mac":"A1:A1:A5:A1:C1:75"
},
{
"deviceId":13436,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:16",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23098
],
"topId":22950,
"mac":"A1:A1:A5:A1:C1:76"
},
{
"deviceId":13447,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:17",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23120
],
"topId":22953,
"mac":"A1:A1:A5:A1:C1:87"
},
{
"deviceId":13448,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:17",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23122
],
"topId":22956,
"mac":"A1:A1:A5:A1:C1:88"
},
{
"deviceId":13449,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:17",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23124
],
"topId":22959,
"mac":"A1:A1:A5:A1:C1:89"
},
{
"deviceId":13450,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:17",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23126
],
"topId":22962,
"mac":"A1:A1:A5:A1:C1:90"
},
{
"deviceId":13451,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:17",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23128
],
"topId":22965,
"mac":"A1:A1:A5:A1:C1:91"
},
{
"deviceId":13452,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:17",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23130
],
"topId":22968,
"mac":"A1:A1:A5:A1:C1:92"
},
{
"deviceId":13454,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:17",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23134
],
"topId":22944,
"mac":"A1:A1:A5:A1:C1:94"
},
{
"deviceId":13455,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:17",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23136
],
"topId":22947,
"mac":"A1:A1:A5:A1:C1:95"
},
{
"deviceId":13456,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:17",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23138
],
"topId":22950,
"mac":"A1:A1:A5:A1:C1:96"
},
{
"deviceId":13457,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:17",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23140
],
"topId":22953,
"mac":"A1:A1:A5:A1:C1:97"
},
{
"deviceId":13458,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:17",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23142
],
"topId":22956,
"mac":"A1:A1:A5:A1:C1:98"
},
{
"deviceId":13490,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:20",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23006
],
"topId":22962,
"mac":"A1:A1:A5:A1:D1:31"
},
{
"deviceId":13491,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:20",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23008
],
"topId":22965,
"mac":"A1:A1:A5:A1:D1:32"
},
{
"deviceId":13492,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:20",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23010
],
"topId":22968,
"mac":"A1:A1:A5:A1:D1:33"
},
{
"deviceId":13496,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:20",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23018
],
"topId":22950,
"mac":"A1:A1:A5:A1:D1:37"
},
{
"deviceId":13497,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:20",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23020
],
"topId":22953,
"mac":"A1:A1:A5:A1:D1:38"
},
{
"deviceId":13498,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:20",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23022
],
"topId":22956,
"mac":"A1:A1:A5:A1:D1:39"
},
{
"deviceId":13499,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:20",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23024
],
"topId":22959,
"mac":"A1:A1:A5:A1:D1:40"
},
{
"deviceId":13500,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:20",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23026
],
"topId":22962,
"mac":"A1:A1:A5:A1:D1:41"
},
{
"deviceId":13501,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:20",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23028
],
"topId":22965,
"mac":"A1:A1:A5:A1:D1:42"
},
{
"deviceId":13502,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:21",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23030
],
"topId":22968,
"mac":"A1:A1:A5:A1:D1:43"
},
{
"deviceId":13504,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:21",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23034
],
"topId":22944,
"mac":"A1:A1:A5:A1:D1:45"
},
{
"deviceId":13505,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:21",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23036
],
"topId":22947,
"mac":"A1:A1:A5:A1:D1:46"
},
{
"deviceId":13506,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:21",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23038
],
"topId":22950,
"mac":"A1:A1:A5:A1:D1:47"
},
{
"deviceId":13507,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:21",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23040
],
"topId":22953,
"mac":"A1:A1:A5:A1:D1:48"
},
{
"deviceId":13508,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:21",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23042
],
"topId":22956,
"mac":"A1:A1:A5:A1:D1:49"
},
{
"deviceId":13474,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:19",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22974
],
"topId":22944,
"mac":"A1:A1:A5:A1:D1:15"
},
{
"deviceId":13475,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:19",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22976
],
"topId":22947,
"mac":"A1:A1:A5:A1:D1:16"
},
{
"deviceId":13476,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:19",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22978
],
"topId":22950,
"mac":"A1:A1:A5:A1:D1:17"
},
{
"deviceId":13477,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:19",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22980
],
"topId":22953,
"mac":"A1:A1:A5:A1:D1:18"
},
{
"deviceId":13478,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:19",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22982
],
"topId":22956,
"mac":"A1:A1:A5:A1:D1:19"
},
{
"deviceId":13479,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:19",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22984
],
"topId":22959,
"mac":"A1:A1:A5:A1:D1:20"
},
{
"deviceId":13480,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:19",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22986
],
"topId":22962,
"mac":"A1:A1:A5:A1:D1:21"
},
{
"deviceId":13481,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:19",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22988
],
"topId":22965,
"mac":"A1:A1:A5:A1:D1:22"
},
{
"deviceId":13482,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:19",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22990
],
"topId":22968,
"mac":"A1:A1:A5:A1:D1:23"
},
{
"deviceId":13484,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:19",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22994
],
"topId":22944,
"mac":"A1:A1:A5:A1:D1:25"
},
{
"deviceId":13485,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:19",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22996
],
"topId":22947,
"mac":"A1:A1:A5:A1:D1:26"
},
{
"deviceId":13486,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:19",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22998
],
"topId":22950,
"mac":"A1:A1:A5:A1:D1:27"
},
{
"deviceId":13487,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:19",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23000
],
"topId":22953,
"mac":"A1:A1:A5:A1:D1:28"
},
{
"deviceId":13488,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:19",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23002
],
"topId":22956,
"mac":"A1:A1:A5:A1:D1:29"
},
{
"deviceId":13489,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:20",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23004
],
"topId":22959,
"mac":"A1:A1:A5:A1:D1:30"
},
{
"deviceId":13494,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:20",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23014
],
"topId":22944,
"mac":"A1:A1:A5:A1:D1:35"
},
{
"deviceId":13495,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:20",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23016
],
"topId":22947,
"mac":"A1:A1:A5:A1:D1:36"
},
{
"deviceId":13509,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:21",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23044
],
"topId":22959,
"mac":"A1:A1:A5:A1:D1:50"
},
{
"deviceId":13510,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:21",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23046
],
"topId":22962,
"mac":"A1:A1:A5:A1:D1:51"
},
{
"deviceId":13511,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:21",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23048
],
"topId":22965,
"mac":"A1:A1:A5:A1:D1:52"
},
{
"deviceId":13512,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:21",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23050
],
"topId":22968,
"mac":"A1:A1:A5:A1:D1:53"
},
{
"deviceId":13514,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:21",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23054
],
"topId":22944,
"mac":"A1:A1:A5:A1:D1:55"
},
{
"deviceId":13515,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:21",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23056
],
"topId":22947,
"mac":"A1:A1:A5:A1:D1:56"
},
{
"deviceId":13516,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:21",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23058
],
"topId":22950,
"mac":"A1:A1:A5:A1:D1:57"
},
{
"deviceId":13517,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:21",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23060
],
"topId":22953,
"mac":"A1:A1:A5:A1:D1:58"
},
{
"deviceId":13518,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:22",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23062
],
"topId":22956,
"mac":"A1:A1:A5:A1:D1:59"
},
{
"deviceId":13519,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:22",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23064
],
"topId":22959,
"mac":"A1:A1:A5:A1:D1:60"
},
{
"deviceId":13520,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:22",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23066
],
"topId":22962,
"mac":"A1:A1:A5:A1:D1:61"
},
{
"deviceId":13521,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:22",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23068
],
"topId":22965,
"mac":"A1:A1:A5:A1:D1:62"
},
{
"deviceId":13522,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:22",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23070
],
"topId":22968,
"mac":"A1:A1:A5:A1:D1:63"
},
{
"deviceId":13524,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:22",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23074
],
"topId":22944,
"mac":"A1:A1:A5:A1:D1:65"
},
{
"deviceId":13525,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:22",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23076
],
"topId":22947,
"mac":"A1:A1:A5:A1:D1:66"
},
{
"deviceId":13526,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:22",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23078
],
"topId":22950,
"mac":"A1:A1:A5:A1:D1:67"
},
{
"deviceId":13527,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:22",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23080
],
"topId":22953,
"mac":"A1:A1:A5:A1:D1:68"
},
{
"deviceId":13528,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:22",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23082
],
"topId":22956,
"mac":"A1:A1:A5:A1:D1:69"
},
{
"deviceId":13529,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:22",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23084
],
"topId":22959,
"mac":"A1:A1:A5:A1:D1:70"
},
{
"deviceId":13530,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:22",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23086
],
"topId":22962,
"mac":"A1:A1:A5:A1:D1:71"
},
{
"deviceId":13531,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:22",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23088
],
"topId":22965,
"mac":"A1:A1:A5:A1:D1:72"
},
{
"deviceId":13532,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:23",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23090
],
"topId":22968,
"mac":"A1:A1:A5:A1:D1:73"
},
{
"deviceId":13534,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:23",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23094
],
"topId":22944,
"mac":"A1:A1:A5:A1:D1:75"
},
{
"deviceId":13535,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:23",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23096
],
"topId":22947,
"mac":"A1:A1:A5:A1:D1:76"
},
{
"deviceId":13536,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:23",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23098
],
"topId":22950,
"mac":"A1:A1:A5:A1:D1:77"
},
{
"deviceId":13537,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:23",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23100
],
"topId":22953,
"mac":"A1:A1:A5:A1:D1:78"
},
{
"deviceId":13538,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:23",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23102
],
"topId":22956,
"mac":"A1:A1:A5:A1:D1:79"
},
{
"deviceId":13539,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:23",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23104
],
"topId":22959,
"mac":"A1:A1:A5:A1:D1:80"
},
{
"deviceId":13540,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:23",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23106
],
"topId":22962,
"mac":"A1:A1:A5:A1:D1:81"
},
{
"deviceId":13541,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:23",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23108
],
"topId":22965,
"mac":"A1:A1:A5:A1:D1:82"
},
{
"deviceId":13542,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:23",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23110
],
"topId":22968,
"mac":"A1:A1:A5:A1:D1:83"
},
{
"deviceId":13544,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:23",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23114
],
"topId":22944,
"mac":"A1:A1:A5:A1:D1:85"
},
{
"deviceId":13545,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:23",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23116
],
"topId":22947,
"mac":"A1:A1:A5:A1:D1:86"
},
{
"deviceId":13546,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:24",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23118
],
"topId":22950,
"mac":"A1:A1:A5:A1:D1:87"
},
{
"deviceId":13547,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:24",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23120
],
"topId":22953,
"mac":"A1:A1:A5:A1:D1:88"
},
{
"deviceId":13548,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:24",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23122
],
"topId":22956,
"mac":"A1:A1:A5:A1:D1:89"
},
{
"deviceId":13549,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:24",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23124
],
"topId":22959,
"mac":"A1:A1:A5:A1:D1:90"
},
{
"deviceId":13550,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:24",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23126
],
"topId":22962,
"mac":"A1:A1:A5:A1:D1:91"
},
{
"deviceId":13551,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:24",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23128
],
"topId":22965,
"mac":"A1:A1:A5:A1:D1:92"
},
{
"deviceId":13552,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:24",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23130
],
"topId":22968,
"mac":"A1:A1:A5:A1:D1:93"
},
{
"deviceId":13554,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:24",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23134
],
"topId":22944,
"mac":"A1:A1:A5:A1:D1:95"
},
{
"deviceId":13555,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:24",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23136
],
"topId":22947,
"mac":"A1:A1:A5:A1:D1:96"
},
{
"deviceId":13556,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:24",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23138
],
"topId":22950,
"mac":"A1:A1:A5:A1:D1:97"
},
{
"deviceId":13557,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:24",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23140
],
"topId":22953,
"mac":"A1:A1:A5:A1:D1:98"
},
{
"deviceId":13558,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:24",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23142
],
"topId":22956,
"mac":"A1:A1:A5:A1:D1:99"
},
{
"deviceId":13559,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:25",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23144
],
"topId":22959,
"mac":"A1:A1:A5:A1:E1:01"
},
{
"deviceId":13560,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:25",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23146
],
"topId":22962,
"mac":"A1:A1:A5:A1:E1:02"
},
{
"deviceId":13561,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:25",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23148
],
"topId":22965,
"mac":"A1:A1:A5:A1:E1:03"
},
{
"deviceId":13562,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:25",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23150
],
"topId":22968,
"mac":"A1:A1:A5:A1:E1:04"
},
{
"deviceId":13564,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:25",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22946
],
"topId":22944,
"mac":"A1:A1:A5:A1:E1:06"
},
{
"deviceId":13565,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:25",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22949
],
"topId":22947,
"mac":"A1:A1:A5:A1:E1:07"
},
{
"deviceId":13566,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:25",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22952
],
"topId":22950,
"mac":"A1:A1:A5:A1:E1:08"
},
{
"deviceId":13567,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:25",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22955
],
"topId":22953,
"mac":"A1:A1:A5:A1:E1:09"
},
{
"deviceId":13568,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:25",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22958
],
"topId":22956,
"mac":"A1:A1:A5:A1:E1:10"
},
{
"deviceId":13569,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:25",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22961
],
"topId":22959,
"mac":"A1:A1:A5:A1:E1:11"
},
{
"deviceId":13570,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:25",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22964
],
"topId":22962,
"mac":"A1:A1:A5:A1:E1:12"
},
{
"deviceId":13571,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:25",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22967
],
"topId":22965,
"mac":"A1:A1:A5:A1:E1:13"
},
{
"deviceId":13572,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:25",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22970
],
"topId":22968,
"mac":"A1:A1:A5:A1:E1:14"
},
{
"deviceId":13574,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:26",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22974
],
"topId":22944,
"mac":"A1:A1:A5:A1:E1:16"
},
{
"deviceId":13575,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:26",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22976
],
"topId":22947,
"mac":"A1:A1:A5:A1:E1:17"
},
{
"deviceId":13576,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:26",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22978
],
"topId":22950,
"mac":"A1:A1:A5:A1:E1:18"
},
{
"deviceId":13577,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:26",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22980
],
"topId":22953,
"mac":"A1:A1:A5:A1:E1:19"
},
{
"deviceId":13578,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:26",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22982
],
"topId":22956,
"mac":"A1:A1:A5:A1:E1:20"
},
{
"deviceId":13579,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:26",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22984
],
"topId":22959,
"mac":"A1:A1:A5:A1:E1:21"
},
{
"deviceId":13580,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:26",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22986
],
"topId":22962,
"mac":"A1:A1:A5:A1:E1:22"
},
{
"deviceId":13581,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:26",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22988
],
"topId":22965,
"mac":"A1:A1:A5:A1:E1:23"
},
{
"deviceId":13582,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:26",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22990
],
"topId":22968,
"mac":"A1:A1:A5:A1:E1:24"
},
{
"deviceId":13584,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:26",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22994
],
"topId":22944,
"mac":"A1:A1:A5:A1:E1:26"
},
{
"deviceId":13585,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:26",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22996
],
"topId":22947,
"mac":"A1:A1:A5:A1:E1:27"
},
{
"deviceId":13586,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:27",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22998
],
"topId":22950,
"mac":"A1:A1:A5:A1:E1:28"
},
{
"deviceId":13587,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:27",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23000
],
"topId":22953,
"mac":"A1:A1:A5:A1:E1:29"
},
{
"deviceId":13588,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:27",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23002
],
"topId":22956,
"mac":"A1:A1:A5:A1:E1:30"
},
{
"deviceId":13589,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:27",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23004
],
"topId":22959,
"mac":"A1:A1:A5:A1:E1:31"
},
{
"deviceId":13590,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:27",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23006
],
"topId":22962,
"mac":"A1:A1:A5:A1:E1:32"
},
{
"deviceId":13591,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:27",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23008
],
"topId":22965,
"mac":"A1:A1:A5:A1:E1:33"
},
{
"deviceId":13592,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:27",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23010
],
"topId":22968,
"mac":"A1:A1:A5:A1:E1:34"
},
{
"deviceId":13594,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:27",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23014
],
"topId":22944,
"mac":"A1:A1:A5:A1:E1:36"
},
{
"deviceId":13595,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:27",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23016
],
"topId":22947,
"mac":"A1:A1:A5:A1:E1:37"
},
{
"deviceId":13596,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:27",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23018
],
"topId":22950,
"mac":"A1:A1:A5:A1:E1:38"
},
{
"deviceId":13597,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:27",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23020
],
"topId":22953,
"mac":"A1:A1:A5:A1:E1:39"
},
{
"deviceId":13598,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:27",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23022
],
"topId":22956,
"mac":"A1:A1:A5:A1:E1:40"
},
{
"deviceId":13599,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:27",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23024
],
"topId":22959,
"mac":"A1:A1:A5:A1:E1:41"
},
{
"deviceId":13600,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:27",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23026
],
"topId":22962,
"mac":"A1:A1:A5:A1:E1:42"
},
{
"deviceId":13601,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:28",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23028
],
"topId":22965,
"mac":"A1:A1:A5:A1:E1:43"
},
{
"deviceId":13682,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:33",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22990
],
"topId":22968,
"mac":"A1:A1:A5:A1:F1:25"
},
{
"deviceId":13684,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:33",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22994
],
"topId":22944,
"mac":"A1:A1:A5:A1:F1:27"
},
{
"deviceId":13685,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:34",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22996
],
"topId":22947,
"mac":"A1:A1:A5:A1:F1:28"
},
{
"deviceId":13686,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:34",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22998
],
"topId":22950,
"mac":"A1:A1:A5:A1:F1:29"
},
{
"deviceId":13687,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:34",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23000
],
"topId":22953,
"mac":"A1:A1:A5:A1:F1:30"
},
{
"deviceId":13688,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:34",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23002
],
"topId":22956,
"mac":"A1:A1:A5:A1:F1:31"
},
{
"deviceId":13689,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:34",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23004
],
"topId":22959,
"mac":"A1:A1:A5:A1:F1:32"
},
{
"deviceId":13690,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:34",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23006
],
"topId":22962,
"mac":"A1:A1:A5:A1:F1:33"
},
{
"deviceId":13691,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:34",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23008
],
"topId":22965,
"mac":"A1:A1:A5:A1:F1:34"
},
{
"deviceId":13692,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:34",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23010
],
"topId":22968,
"mac":"A1:A1:A5:A1:F1:35"
},
{
"deviceId":13694,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:34",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23014
],
"topId":22944,
"mac":"A1:A1:A5:A1:F1:37"
},
{
"deviceId":13695,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:34",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23016
],
"topId":22947,
"mac":"A1:A1:A5:A1:F1:38"
},
{
"deviceId":13696,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:34",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23018
],
"topId":22950,
"mac":"A1:A1:A5:A1:F1:39"
},
{
"deviceId":13697,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:34",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23020
],
"topId":22953,
"mac":"A1:A1:A5:A1:F1:40"
},
{
"deviceId":13698,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:34",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23022
],
"topId":22956,
"mac":"A1:A1:A5:A1:F1:41"
},
{
"deviceId":13699,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:35",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23024
],
"topId":22959,
"mac":"A1:A1:A5:A1:F1:42"
},
{
"deviceId":13700,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:35",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23026
],
"topId":22962,
"mac":"A1:A1:A5:A1:F1:43"
},
{
"deviceId":13701,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:35",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23028
],
"topId":22965,
"mac":"A1:A1:A5:A1:F1:44"
},
{
"deviceId":13702,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:35",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23030
],
"topId":22968,
"mac":"A1:A1:A5:A1:F1:45"
},
{
"deviceId":13704,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:35",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23034
],
"topId":22944,
"mac":"A1:A1:A5:A1:F1:47"
},
{
"deviceId":13705,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:35",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23036
],
"topId":22947,
"mac":"A1:A1:A5:A1:F1:48"
},
{
"deviceId":13706,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:35",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23038
],
"topId":22950,
"mac":"A1:A1:A5:A1:F1:49"
},
{
"deviceId":13707,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:35",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23040
],
"topId":22953,
"mac":"A1:A1:A5:A1:F1:50"
},
{
"deviceId":13708,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:35",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23042
],
"topId":22956,
"mac":"A1:A1:A5:A1:F1:51"
},
{
"deviceId":13709,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:35",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23044
],
"topId":22959,
"mac":"A1:A1:A5:A1:F1:52"
},
{
"deviceId":13710,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:35",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23046
],
"topId":22962,
"mac":"A1:A1:A5:A1:F1:53"
},
{
"deviceId":13711,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:35",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23048
],
"topId":22965,
"mac":"A1:A1:A5:A1:F1:54"
},
{
"deviceId":13712,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:36",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23050
],
"topId":22968,
"mac":"A1:A1:A5:A1:F1:55"
},
{
"deviceId":13714,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:36",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23054
],
"topId":22944,
"mac":"A1:A1:A5:A1:F1:57"
},
{
"deviceId":13715,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:36",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23056
],
"topId":22947,
"mac":"A1:A1:A5:A1:F1:58"
},
{
"deviceId":13716,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:36",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23058
],
"topId":22950,
"mac":"A1:A1:A5:A1:F1:59"
},
{
"deviceId":13717,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:36",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23060
],
"topId":22953,
"mac":"A1:A1:A5:A1:F1:60"
},
{
"deviceId":13718,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:36",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23062
],
"topId":22956,
"mac":"A1:A1:A5:A1:F1:61"
},
{
"deviceId":13719,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:36",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23064
],
"topId":22959,
"mac":"A1:A1:A5:A1:F1:62"
},
{
"deviceId":13720,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:36",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23066
],
"topId":22962,
"mac":"A1:A1:A5:A1:F1:63"
},
{
"deviceId":13721,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:36",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23068
],
"topId":22965,
"mac":"A1:A1:A5:A1:F1:64"
},
{
"deviceId":13722,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:36",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23070
],
"topId":22968,
"mac":"A1:A1:A5:A1:F1:65"
},
{
"deviceId":13724,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:36",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23074
],
"topId":22944,
"mac":"A1:A1:A5:A1:F1:67"
},
{
"deviceId":13725,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:36",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23076
],
"topId":22947,
"mac":"A1:A1:A5:A1:F1:68"
},
{
"deviceId":13726,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:37",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23078
],
"topId":22950,
"mac":"A1:A1:A5:A1:F1:69"
},
{
"deviceId":13727,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:37",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23080
],
"topId":22953,
"mac":"A1:A1:A5:A1:F1:70"
},
{
"deviceId":13797,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:42",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23020
],
"topId":22953,
"mac":"A1:A1:A5:B1:A1:41"
},
{
"deviceId":13798,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:42",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23022
],
"topId":22956,
"mac":"A1:A1:A5:B1:A1:42"
},
{
"deviceId":13602,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:28",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23030
],
"topId":22968,
"mac":"A1:A1:A5:A1:E1:44"
},
{
"deviceId":13604,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:28",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23034
],
"topId":22944,
"mac":"A1:A1:A5:A1:E1:46"
},
{
"deviceId":13605,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:28",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23036
],
"topId":22947,
"mac":"A1:A1:A5:A1:E1:47"
},
{
"deviceId":13606,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:28",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23038
],
"topId":22950,
"mac":"A1:A1:A5:A1:E1:48"
},
{
"deviceId":13607,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:28",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23040
],
"topId":22953,
"mac":"A1:A1:A5:A1:E1:49"
},
{
"deviceId":13608,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:28",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23042
],
"topId":22956,
"mac":"A1:A1:A5:A1:E1:50"
},
{
"deviceId":13609,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:28",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23044
],
"topId":22959,
"mac":"A1:A1:A5:A1:E1:51"
},
{
"deviceId":13610,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:28",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23046
],
"topId":22962,
"mac":"A1:A1:A5:A1:E1:52"
},
{
"deviceId":13611,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:28",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23048
],
"topId":22965,
"mac":"A1:A1:A5:A1:E1:53"
},
{
"deviceId":13612,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:28",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23050
],
"topId":22968,
"mac":"A1:A1:A5:A1:E1:54"
},
{
"deviceId":13614,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:28",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23054
],
"topId":22944,
"mac":"A1:A1:A5:A1:E1:56"
},
{
"deviceId":13615,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:29",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23056
],
"topId":22947,
"mac":"A1:A1:A5:A1:E1:57"
},
{
"deviceId":13616,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:29",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23058
],
"topId":22950,
"mac":"A1:A1:A5:A1:E1:58"
},
{
"deviceId":13617,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:29",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23060
],
"topId":22953,
"mac":"A1:A1:A5:A1:E1:59"
},
{
"deviceId":13618,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:29",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23062
],
"topId":22956,
"mac":"A1:A1:A5:A1:E1:60"
},
{
"deviceId":13619,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:29",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23064
],
"topId":22959,
"mac":"A1:A1:A5:A1:E1:61"
},
{
"deviceId":13620,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:29",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23066
],
"topId":22962,
"mac":"A1:A1:A5:A1:E1:62"
},
{
"deviceId":13621,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:29",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23068
],
"topId":22965,
"mac":"A1:A1:A5:A1:E1:63"
},
{
"deviceId":13622,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:29",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23070
],
"topId":22968,
"mac":"A1:A1:A5:A1:E1:64"
},
{
"deviceId":13624,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:29",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23074
],
"topId":22944,
"mac":"A1:A1:A5:A1:E1:66"
},
{
"deviceId":13625,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:29",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23076
],
"topId":22947,
"mac":"A1:A1:A5:A1:E1:67"
},
{
"deviceId":13626,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:29",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23078
],
"topId":22950,
"mac":"A1:A1:A5:A1:E1:68"
},
{
"deviceId":13627,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:29",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23080
],
"topId":22953,
"mac":"A1:A1:A5:A1:E1:69"
},
{
"deviceId":13628,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:29",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23082
],
"topId":22956,
"mac":"A1:A1:A5:A1:E1:70"
},
{
"deviceId":13629,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:30",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23084
],
"topId":22959,
"mac":"A1:A1:A5:A1:E1:71"
},
{
"deviceId":13630,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:30",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23086
],
"topId":22962,
"mac":"A1:A1:A5:A1:E1:72"
},
{
"deviceId":13631,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:30",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23088
],
"topId":22965,
"mac":"A1:A1:A5:A1:E1:73"
},
{
"deviceId":13632,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:30",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23090
],
"topId":22968,
"mac":"A1:A1:A5:A1:E1:74"
},
{
"deviceId":13634,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:30",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23094
],
"topId":22944,
"mac":"A1:A1:A5:A1:E1:76"
},
{
"deviceId":13635,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:30",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23096
],
"topId":22947,
"mac":"A1:A1:A5:A1:E1:77"
},
{
"deviceId":13636,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:30",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23098
],
"topId":22950,
"mac":"A1:A1:A5:A1:E1:78"
},
{
"deviceId":13637,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:30",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23100
],
"topId":22953,
"mac":"A1:A1:A5:A1:E1:79"
},
{
"deviceId":13638,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:30",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23102
],
"topId":22956,
"mac":"A1:A1:A5:A1:E1:80"
},
{
"deviceId":13639,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:30",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23104
],
"topId":22959,
"mac":"A1:A1:A5:A1:E1:81"
},
{
"deviceId":13640,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:30",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23106
],
"topId":22962,
"mac":"A1:A1:A5:A1:E1:82"
},
{
"deviceId":13641,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:30",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23108
],
"topId":22965,
"mac":"A1:A1:A5:A1:E1:83"
},
{
"deviceId":13642,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:30",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23110
],
"topId":22968,
"mac":"A1:A1:A5:A1:E1:84"
},
{
"deviceId":13644,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:31",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23114
],
"topId":22944,
"mac":"A1:A1:A5:A1:E1:86"
},
{
"deviceId":13645,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:31",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23116
],
"topId":22947,
"mac":"A1:A1:A5:A1:E1:87"
},
{
"deviceId":13646,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:31",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23118
],
"topId":22950,
"mac":"A1:A1:A5:A1:E1:88"
},
{
"deviceId":13647,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:31",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23120
],
"topId":22953,
"mac":"A1:A1:A5:A1:E1:89"
},
{
"deviceId":13648,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:31",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23122
],
"topId":22956,
"mac":"A1:A1:A5:A1:E1:90"
},
{
"deviceId":13649,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:31",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23124
],
"topId":22959,
"mac":"A1:A1:A5:A1:E1:91"
},
{
"deviceId":13650,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:31",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23126
],
"topId":22962,
"mac":"A1:A1:A5:A1:E1:92"
},
{
"deviceId":13651,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:31",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23128
],
"topId":22965,
"mac":"A1:A1:A5:A1:E1:93"
},
{
"deviceId":13652,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:31",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23130
],
"topId":22968,
"mac":"A1:A1:A5:A1:E1:94"
},
{
"deviceId":13654,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:31",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23134
],
"topId":22944,
"mac":"A1:A1:A5:A1:E1:96"
},
{
"deviceId":13655,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:31",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23136
],
"topId":22947,
"mac":"A1:A1:A5:A1:E1:97"
},
{
"deviceId":13656,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:31",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23138
],
"topId":22950,
"mac":"A1:A1:A5:A1:E1:98"
},
{
"deviceId":13657,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:32",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23140
],
"topId":22953,
"mac":"A1:A1:A5:A1:E1:99"
},
{
"deviceId":13658,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:32",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23142
],
"topId":22956,
"mac":"A1:A1:A5:A1:F1:01"
},
{
"deviceId":13659,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:32",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23144
],
"topId":22959,
"mac":"A1:A1:A5:A1:F1:02"
},
{
"deviceId":13660,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:32",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23146
],
"topId":22962,
"mac":"A1:A1:A5:A1:F1:03"
},
{
"deviceId":13661,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:32",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23148
],
"topId":22965,
"mac":"A1:A1:A5:A1:F1:04"
},
{
"deviceId":13662,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:32",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23150
],
"topId":22968,
"mac":"A1:A1:A5:A1:F1:05"
},
{
"deviceId":13664,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:32",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22946
],
"topId":22944,
"mac":"A1:A1:A5:A1:F1:07"
},
{
"deviceId":13665,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:32",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22949
],
"topId":22947,
"mac":"A1:A1:A5:A1:F1:08"
},
{
"deviceId":13666,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:32",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22952
],
"topId":22950,
"mac":"A1:A1:A5:A1:F1:09"
},
{
"deviceId":13667,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:32",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22955
],
"topId":22953,
"mac":"A1:A1:A5:A1:F1:10"
},
{
"deviceId":13668,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:32",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22958
],
"topId":22956,
"mac":"A1:A1:A5:A1:F1:11"
},
{
"deviceId":13669,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:32",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22961
],
"topId":22959,
"mac":"A1:A1:A5:A1:F1:12"
},
{
"deviceId":13670,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:33",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22964
],
"topId":22962,
"mac":"A1:A1:A5:A1:F1:13"
},
{
"deviceId":13671,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:33",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22967
],
"topId":22965,
"mac":"A1:A1:A5:A1:F1:14"
},
{
"deviceId":13672,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:33",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22970
],
"topId":22968,
"mac":"A1:A1:A5:A1:F1:15"
},
{
"deviceId":13674,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:33",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22974
],
"topId":22944,
"mac":"A1:A1:A5:A1:F1:17"
},
{
"deviceId":13675,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:33",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22976
],
"topId":22947,
"mac":"A1:A1:A5:A1:F1:18"
},
{
"deviceId":13676,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:33",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22978
],
"topId":22950,
"mac":"A1:A1:A5:A1:F1:19"
},
{
"deviceId":13677,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:33",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22980
],
"topId":22953,
"mac":"A1:A1:A5:A1:F1:20"
},
{
"deviceId":13678,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:33",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22982
],
"topId":22956,
"mac":"A1:A1:A5:A1:F1:21"
},
{
"deviceId":13679,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:33",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22984
],
"topId":22959,
"mac":"A1:A1:A5:A1:F1:22"
},
{
"deviceId":13680,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:33",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22986
],
"topId":22962,
"mac":"A1:A1:A5:A1:F1:23"
},
{
"deviceId":13681,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:33",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22988
],
"topId":22965,
"mac":"A1:A1:A5:A1:F1:24"
},
{
"deviceId":13728,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:37",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23082
],
"topId":22956,
"mac":"A1:A1:A5:A1:F1:71"
},
{
"deviceId":13729,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:37",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23084
],
"topId":22959,
"mac":"A1:A1:A5:A1:F1:72"
},
{
"deviceId":13730,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:37",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23086
],
"topId":22962,
"mac":"A1:A1:A5:A1:F1:73"
},
{
"deviceId":13731,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:37",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23088
],
"topId":22965,
"mac":"A1:A1:A5:A1:F1:74"
},
{
"deviceId":13732,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:37",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23090
],
"topId":22968,
"mac":"A1:A1:A5:A1:F1:75"
},
{
"deviceId":13734,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:37",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23094
],
"topId":22944,
"mac":"A1:A1:A5:A1:F1:77"
},
{
"deviceId":13735,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:37",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23096
],
"topId":22947,
"mac":"A1:A1:A5:A1:F1:78"
},
{
"deviceId":13736,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:37",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23098
],
"topId":22950,
"mac":"A1:A1:A5:A1:F1:79"
},
{
"deviceId":13737,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:37",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23100
],
"topId":22953,
"mac":"A1:A1:A5:A1:F1:80"
},
{
"deviceId":13738,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:37",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23102
],
"topId":22956,
"mac":"A1:A1:A5:A1:F1:81"
},
{
"deviceId":13739,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:37",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23104
],
"topId":22959,
"mac":"A1:A1:A5:A1:F1:82"
},
{
"deviceId":13740,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:38",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23106
],
"topId":22962,
"mac":"A1:A1:A5:A1:F1:83"
},
{
"deviceId":13741,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:38",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23108
],
"topId":22965,
"mac":"A1:A1:A5:A1:F1:84"
},
{
"deviceId":13742,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:38",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23110
],
"topId":22968,
"mac":"A1:A1:A5:A1:F1:85"
},
{
"deviceId":13744,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:38",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23114
],
"topId":22944,
"mac":"A1:A1:A5:A1:F1:87"
},
{
"deviceId":13745,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:38",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23116
],
"topId":22947,
"mac":"A1:A1:A5:A1:F1:88"
},
{
"deviceId":13746,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:38",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23118
],
"topId":22950,
"mac":"A1:A1:A5:A1:F1:89"
},
{
"deviceId":13747,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:38",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23120
],
"topId":22953,
"mac":"A1:A1:A5:A1:F1:90"
},
{
"deviceId":13748,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:38",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23122
],
"topId":22956,
"mac":"A1:A1:A5:A1:F1:91"
},
{
"deviceId":13749,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:38",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23124
],
"topId":22959,
"mac":"A1:A1:A5:A1:F1:92"
},
{
"deviceId":13750,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:38",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23126
],
"topId":22962,
"mac":"A1:A1:A5:A1:F1:93"
},
{
"deviceId":13751,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:38",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23128
],
"topId":22965,
"mac":"A1:A1:A5:A1:F1:94"
},
{
"deviceId":13752,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:38",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23130
],
"topId":22968,
"mac":"A1:A1:A5:A1:F1:95"
},
{
"deviceId":13754,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:39",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23134
],
"topId":22944,
"mac":"A1:A1:A5:A1:F1:97"
},
{
"deviceId":13755,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:39",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23136
],
"topId":22947,
"mac":"A1:A1:A5:A1:F1:98"
},
{
"deviceId":13756,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:39",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23138
],
"topId":22950,
"mac":"A1:A1:A5:A1:F1:99"
},
{
"deviceId":13757,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:39",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23140
],
"topId":22953,
"mac":"A1:A1:A5:B1:A1:01"
},
{
"deviceId":13758,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:39",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23142
],
"topId":22956,
"mac":"A1:A1:A5:B1:A1:02"
},
{
"deviceId":13759,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:39",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23144
],
"topId":22959,
"mac":"A1:A1:A5:B1:A1:03"
},
{
"deviceId":13760,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:39",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23146
],
"topId":22962,
"mac":"A1:A1:A5:B1:A1:04"
},
{
"deviceId":13761,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:39",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23148
],
"topId":22965,
"mac":"A1:A1:A5:B1:A1:05"
},
{
"deviceId":13762,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:39",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23150
],
"topId":22968,
"mac":"A1:A1:A5:B1:A1:06"
},
{
"deviceId":13764,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:39",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22946
],
"topId":22944,
"mac":"A1:A1:A5:B1:A1:08"
},
{
"deviceId":13765,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:39",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22949
],
"topId":22947,
"mac":"A1:A1:A5:B1:A1:09"
},
{
"deviceId":13766,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:39",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22952
],
"topId":22950,
"mac":"A1:A1:A5:B1:A1:10"
},
{
"deviceId":13767,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:40",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22955
],
"topId":22953,
"mac":"A1:A1:A5:B1:A1:11"
},
{
"deviceId":13768,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:40",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22958
],
"topId":22956,
"mac":"A1:A1:A5:B1:A1:12"
},
{
"deviceId":13769,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:40",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22961
],
"topId":22959,
"mac":"A1:A1:A5:B1:A1:13"
},
{
"deviceId":13770,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:40",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22964
],
"topId":22962,
"mac":"A1:A1:A5:B1:A1:14"
},
{
"deviceId":13771,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:40",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22967
],
"topId":22965,
"mac":"A1:A1:A5:B1:A1:15"
},
{
"deviceId":13772,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:40",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22970
],
"topId":22968,
"mac":"A1:A1:A5:B1:A1:16"
},
{
"deviceId":13774,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:40",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22974
],
"topId":22944,
"mac":"A1:A1:A5:B1:A1:18"
},
{
"deviceId":13775,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:40",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22976
],
"topId":22947,
"mac":"A1:A1:A5:B1:A1:19"
},
{
"deviceId":13776,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:40",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22978
],
"topId":22950,
"mac":"A1:A1:A5:B1:A1:20"
},
{
"deviceId":13777,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:40",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22980
],
"topId":22953,
"mac":"A1:A1:A5:B1:A1:21"
},
{
"deviceId":13778,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:40",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22982
],
"topId":22956,
"mac":"A1:A1:A5:B1:A1:22"
},
{
"deviceId":13779,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:40",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22984
],
"topId":22959,
"mac":"A1:A1:A5:B1:A1:23"
},
{
"deviceId":13780,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:41",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22986
],
"topId":22962,
"mac":"A1:A1:A5:B1:A1:24"
},
{
"deviceId":13781,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:41",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22988
],
"topId":22965,
"mac":"A1:A1:A5:B1:A1:25"
},
{
"deviceId":13782,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:41",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22990
],
"topId":22968,
"mac":"A1:A1:A5:B1:A1:26"
},
{
"deviceId":13784,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:41",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22994
],
"topId":22944,
"mac":"A1:A1:A5:B1:A1:28"
},
{
"deviceId":13785,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:41",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22996
],
"topId":22947,
"mac":"A1:A1:A5:B1:A1:29"
},
{
"deviceId":13786,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:41",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22998
],
"topId":22950,
"mac":"A1:A1:A5:B1:A1:30"
},
{
"deviceId":13787,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:41",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23000
],
"topId":22953,
"mac":"A1:A1:A5:B1:A1:31"
},
{
"deviceId":13788,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:41",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23002
],
"topId":22956,
"mac":"A1:A1:A5:B1:A1:32"
},
{
"deviceId":13789,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:41",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23004
],
"topId":22959,
"mac":"A1:A1:A5:B1:A1:33"
},
{
"deviceId":13790,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:41",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23006
],
"topId":22962,
"mac":"A1:A1:A5:B1:A1:34"
},
{
"deviceId":13791,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:41",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23008
],
"topId":22965,
"mac":"A1:A1:A5:B1:A1:35"
},
{
"deviceId":13792,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:41",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23010
],
"topId":22968,
"mac":"A1:A1:A5:B1:A1:36"
},
{
"deviceId":13794,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:42",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23014
],
"topId":22944,
"mac":"A1:A1:A5:B1:A1:38"
},
{
"deviceId":13795,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:42",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23016
],
"topId":22947,
"mac":"A1:A1:A5:B1:A1:39"
},
{
"deviceId":13796,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:42",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23018
],
"topId":22950,
"mac":"A1:A1:A5:B1:A1:40"
},
{
"deviceId":13799,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:42",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23024
],
"topId":22959,
"mac":"A1:A1:A5:B1:A1:43"
},
{
"deviceId":13800,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:42",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23026
],
"topId":22962,
"mac":"A1:A1:A5:B1:A1:44"
},
{
"deviceId":13801,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:42",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23028
],
"topId":22965,
"mac":"A1:A1:A5:B1:A1:45"
},
{
"deviceId":13802,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:42",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23030
],
"topId":22968,
"mac":"A1:A1:A5:B1:A1:46"
},
{
"deviceId":13804,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:42",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23034
],
"topId":22944,
"mac":"A1:A1:A5:B1:A1:48"
},
{
"deviceId":13805,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:42",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23036
],
"topId":22947,
"mac":"A1:A1:A5:B1:A1:49"
},
{
"deviceId":13806,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:42",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23038
],
"topId":22950,
"mac":"A1:A1:A5:B1:A1:50"
},
{
"deviceId":13807,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:42",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23040
],
"topId":22953,
"mac":"A1:A1:A5:B1:A1:51"
},
{
"deviceId":13808,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:43",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23042
],
"topId":22956,
"mac":"A1:A1:A5:B1:A1:52"
},
{
"deviceId":13809,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:43",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23044
],
"topId":22959,
"mac":"A1:A1:A5:B1:A1:53"
},
{
"deviceId":13810,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:43",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23046
],
"topId":22962,
"mac":"A1:A1:A5:B1:A1:54"
},
{
"deviceId":13811,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:43",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23048
],
"topId":22965,
"mac":"A1:A1:A5:B1:A1:55"
},
{
"deviceId":13812,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:43",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23050
],
"topId":22968,
"mac":"A1:A1:A5:B1:A1:56"
},
{
"deviceId":13814,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:43",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23054
],
"topId":22944,
"mac":"A1:A1:A5:B1:A1:58"
},
{
"deviceId":13815,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:43",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23056
],
"topId":22947,
"mac":"A1:A1:A5:B1:A1:59"
},
{
"deviceId":13816,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:43",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23058
],
"topId":22950,
"mac":"A1:A1:A5:B1:A1:60"
},
{
"deviceId":13817,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:43",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23060
],
"topId":22953,
"mac":"A1:A1:A5:B1:A1:61"
},
{
"deviceId":13818,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:43",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23062
],
"topId":22956,
"mac":"A1:A1:A5:B1:A1:62"
},
{
"deviceId":13819,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:43",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23064
],
"topId":22959,
"mac":"A1:A1:A5:B1:A1:63"
},
{
"deviceId":13820,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:43",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23066
],
"topId":22962,
"mac":"A1:A1:A5:B1:A1:64"
},
{
"deviceId":13821,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:44",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23068
],
"topId":22965,
"mac":"A1:A1:A5:B1:A1:65"
},
{
"deviceId":13822,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:44",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23070
],
"topId":22968,
"mac":"A1:A1:A5:B1:A1:66"
},
{
"deviceId":13824,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:44",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23074
],
"topId":22944,
"mac":"A1:A1:A5:B1:A1:68"
},
{
"deviceId":13825,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:44",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23076
],
"topId":22947,
"mac":"A1:A1:A5:B1:A1:69"
},
{
"deviceId":13826,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:44",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23078
],
"topId":22950,
"mac":"A1:A1:A5:B1:A1:70"
},
{
"deviceId":13827,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:44",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23080
],
"topId":22953,
"mac":"A1:A1:A5:B1:A1:71"
},
{
"deviceId":13828,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:44",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23082
],
"topId":22956,
"mac":"A1:A1:A5:B1:A1:72"
},
{
"deviceId":13829,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:44",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23084
],
"topId":22959,
"mac":"A1:A1:A5:B1:A1:73"
},
{
"deviceId":13830,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:44",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23086
],
"topId":22962,
"mac":"A1:A1:A5:B1:A1:74"
},
{
"deviceId":13831,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:44",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23088
],
"topId":22965,
"mac":"A1:A1:A5:B1:A1:75"
},
{
"deviceId":13832,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:44",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23090
],
"topId":22968,
"mac":"A1:A1:A5:B1:A1:76"
},
{
"deviceId":13834,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:44",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23094
],
"topId":22944,
"mac":"A1:A1:A5:B1:A1:78"
},
{
"deviceId":13835,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:44",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23096
],
"topId":22947,
"mac":"A1:A1:A5:B1:A1:79"
},
{
"deviceId":13836,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:45",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23098
],
"topId":22950,
"mac":"A1:A1:A5:B1:A1:80"
},
{
"deviceId":13837,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:45",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23100
],
"topId":22953,
"mac":"A1:A1:A5:B1:A1:81"
},
{
"deviceId":13838,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:45",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23102
],
"topId":22956,
"mac":"A1:A1:A5:B1:A1:82"
},
{
"deviceId":13839,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:45",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23104
],
"topId":22959,
"mac":"A1:A1:A5:B1:A1:83"
},
{
"deviceId":13840,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:45",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23106
],
"topId":22962,
"mac":"A1:A1:A5:B1:A1:84"
},
{
"deviceId":13841,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:45",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23108
],
"topId":22965,
"mac":"A1:A1:A5:B1:A1:85"
},
{
"deviceId":13842,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:45",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23110
],
"topId":22968,
"mac":"A1:A1:A5:B1:A1:86"
},
{
"deviceId":13844,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:45",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23114
],
"topId":22944,
"mac":"A1:A1:A5:B1:A1:88"
},
{
"deviceId":13845,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:45",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23116
],
"topId":22947,
"mac":"A1:A1:A5:B1:A1:89"
},
{
"deviceId":13846,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:45",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23118
],
"topId":22950,
"mac":"A1:A1:A5:B1:A1:90"
},
{
"deviceId":13847,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:45",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23120
],
"topId":22953,
"mac":"A1:A1:A5:B1:A1:91"
},
{
"deviceId":13848,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:45",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23122
],
"topId":22956,
"mac":"A1:A1:A5:B1:A1:92"
},
{
"deviceId":13849,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:46",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23124
],
"topId":22959,
"mac":"A1:A1:A5:B1:A1:93"
},
{
"deviceId":13850,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:46",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23126
],
"topId":22962,
"mac":"A1:A1:A5:B1:A1:94"
},
{
"deviceId":13851,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:46",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23128
],
"topId":22965,
"mac":"A1:A1:A5:B1:A1:95"
},
{
"deviceId":13852,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:46",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23130
],
"topId":22968,
"mac":"A1:A1:A5:B1:A1:96"
},
{
"deviceId":13854,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:46",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23134
],
"topId":22944,
"mac":"A1:A1:A5:B1:A1:98"
},
{
"deviceId":13855,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:46",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23136
],
"topId":22947,
"mac":"A1:A1:A5:B1:A1:99"
},
{
"deviceId":13856,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:47",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23136
],
"topId":22947,
"mac":"A1:A1:A5:C1:A1:01"
},
{
"deviceId":13857,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:47",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23138
],
"topId":22950,
"mac":"A1:A1:A5:C1:A1:02"
},
{
"deviceId":13858,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:47",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23140
],
"topId":22953,
"mac":"A1:A1:A5:C1:A1:03"
},
{
"deviceId":13859,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:47",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23142
],
"topId":22956,
"mac":"A1:A1:A5:C1:A1:04"
},
{
"deviceId":13860,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:48",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23144
],
"topId":22959,
"mac":"A1:A1:A5:C1:A1:05"
},
{
"deviceId":13861,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:48",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23146
],
"topId":22962,
"mac":"A1:A1:A5:C1:A1:06"
},
{
"deviceId":13862,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:48",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23148
],
"topId":22965,
"mac":"A1:A1:A5:C1:A1:07"
},
{
"deviceId":13863,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:48",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23150
],
"topId":22968,
"mac":"A1:A1:A5:C1:A1:08"
},
{
"deviceId":13865,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:48",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22946
],
"topId":22944,
"mac":"A1:A1:A5:C1:A1:10"
},
{
"deviceId":13866,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:48",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22949
],
"topId":22947,
"mac":"A1:A1:A5:C1:A1:11"
},
{
"deviceId":13867,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:48",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22952
],
"topId":22950,
"mac":"A1:A1:A5:C1:A1:12"
},
{
"deviceId":13868,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:48",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22955
],
"topId":22953,
"mac":"A1:A1:A5:C1:A1:13"
},
{
"deviceId":13869,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:48",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22958
],
"topId":22956,
"mac":"A1:A1:A5:C1:A1:14"
},
{
"deviceId":13870,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:48",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22961
],
"topId":22959,
"mac":"A1:A1:A5:C1:A1:15"
},
{
"deviceId":13871,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:48",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22964
],
"topId":22962,
"mac":"A1:A1:A5:C1:A1:16"
},
{
"deviceId":13872,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:48",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22967
],
"topId":22965,
"mac":"A1:A1:A5:C1:A1:17"
},
{
"deviceId":13873,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:48",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22970
],
"topId":22968,
"mac":"A1:A1:A5:C1:A1:18"
},
{
"deviceId":13875,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:49",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22974
],
"topId":22944,
"mac":"A1:A1:A5:C1:A1:20"
},
{
"deviceId":13876,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:49",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22976
],
"topId":22947,
"mac":"A1:A1:A5:C1:A1:21"
},
{
"deviceId":13877,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:49",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22978
],
"topId":22950,
"mac":"A1:A1:A5:C1:A1:22"
},
{
"deviceId":13878,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:49",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22980
],
"topId":22953,
"mac":"A1:A1:A5:C1:A1:23"
},
{
"deviceId":13879,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:49",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22982
],
"topId":22956,
"mac":"A1:A1:A5:C1:A1:24"
},
{
"deviceId":13880,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:49",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22984
],
"topId":22959,
"mac":"A1:A1:A5:C1:A1:25"
},
{
"deviceId":13881,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:49",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22986
],
"topId":22962,
"mac":"A1:A1:A5:C1:A1:26"
},
{
"deviceId":13882,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:49",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22988
],
"topId":22965,
"mac":"A1:A1:A5:C1:A1:27"
},
{
"deviceId":13883,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:49",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22990
],
"topId":22968,
"mac":"A1:A1:A5:C1:A1:28"
},
{
"deviceId":13885,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:49",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22994
],
"topId":22944,
"mac":"A1:A1:A5:C1:A1:30"
},
{
"deviceId":13886,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:49",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22996
],
"topId":22947,
"mac":"A1:A1:A5:C1:A1:31"
},
{
"deviceId":13887,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:49",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22998
],
"topId":22950,
"mac":"A1:A1:A5:C1:A1:32"
},
{
"deviceId":13888,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:49",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23000
],
"topId":22953,
"mac":"A1:A1:A5:C1:A1:33"
},
{
"deviceId":13889,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:49",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23002
],
"topId":22956,
"mac":"A1:A1:A5:C1:A1:34"
},
{
"deviceId":13890,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23004
],
"topId":22959,
"mac":"A1:A1:A5:C1:A1:35"
},
{
"deviceId":13891,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23006
],
"topId":22962,
"mac":"A1:A1:A5:C1:A1:36"
},
{
"deviceId":13892,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23008
],
"topId":22965,
"mac":"A1:A1:A5:C1:A1:37"
},
{
"deviceId":13893,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23010
],
"topId":22968,
"mac":"A1:A1:A5:C1:A1:38"
},
{
"deviceId":13895,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23014
],
"topId":22944,
"mac":"A1:A1:A5:C1:A1:40"
},
{
"deviceId":13896,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23016
],
"topId":22947,
"mac":"A1:A1:A5:C1:A1:41"
},
{
"deviceId":13897,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23018
],
"topId":22950,
"mac":"A1:A1:A5:C1:A1:42"
},
{
"deviceId":13898,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23020
],
"topId":22953,
"mac":"A1:A1:A5:C1:A1:43"
},
{
"deviceId":13899,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23022
],
"topId":22956,
"mac":"A1:A1:A5:C1:A1:44"
},
{
"deviceId":13900,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23024
],
"topId":22959,
"mac":"A1:A1:A5:C1:A1:45"
},
{
"deviceId":13901,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23026
],
"topId":22962,
"mac":"A1:A1:A5:C1:A1:46"
},
{
"deviceId":13902,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:50",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23028
],
"topId":22965,
"mac":"A1:A1:A5:C1:A1:47"
},
{
"deviceId":13903,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23030
],
"topId":22968,
"mac":"A1:A1:A5:C1:A1:48"
},
{
"deviceId":13905,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23034
],
"topId":22944,
"mac":"A1:A1:A5:C1:A1:50"
},
{
"deviceId":13906,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23036
],
"topId":22947,
"mac":"A1:A1:A5:C1:A1:51"
},
{
"deviceId":13907,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23038
],
"topId":22950,
"mac":"A1:A1:A5:C1:A1:52"
},
{
"deviceId":13908,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23040
],
"topId":22953,
"mac":"A1:A1:A5:C1:A1:53"
},
{
"deviceId":13909,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23042
],
"topId":22956,
"mac":"A1:A1:A5:C1:A1:54"
},
{
"deviceId":13910,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23044
],
"topId":22959,
"mac":"A1:A1:A5:C1:A1:55"
},
{
"deviceId":13911,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23046
],
"topId":22962,
"mac":"A1:A1:A5:C1:A1:56"
},
{
"deviceId":13912,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23048
],
"topId":22965,
"mac":"A1:A1:A5:C1:A1:57"
},
{
"deviceId":13913,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23050
],
"topId":22968,
"mac":"A1:A1:A5:C1:A1:58"
},
{
"deviceId":13915,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:51",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23054
],
"topId":22944,
"mac":"A1:A1:A5:C1:A1:60"
},
{
"deviceId":13916,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23056
],
"topId":22947,
"mac":"A1:A1:A5:C1:A1:61"
},
{
"deviceId":13917,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23058
],
"topId":22950,
"mac":"A1:A1:A5:C1:A1:62"
},
{
"deviceId":13918,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23060
],
"topId":22953,
"mac":"A1:A1:A5:C1:A1:63"
},
{
"deviceId":13919,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23062
],
"topId":22956,
"mac":"A1:A1:A5:C1:A1:64"
},
{
"deviceId":13920,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23064
],
"topId":22959,
"mac":"A1:A1:A5:C1:A1:65"
},
{
"deviceId":13921,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23066
],
"topId":22962,
"mac":"A1:A1:A5:C1:A1:66"
},
{
"deviceId":13922,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23068
],
"topId":22965,
"mac":"A1:A1:A5:C1:A1:67"
},
{
"deviceId":13923,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23070
],
"topId":22968,
"mac":"A1:A1:A5:C1:A1:68"
},
{
"deviceId":13925,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23074
],
"topId":22944,
"mac":"A1:A1:A5:C1:A1:70"
},
{
"deviceId":13926,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23076
],
"topId":22947,
"mac":"A1:A1:A5:C1:A1:71"
},
{
"deviceId":13927,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23078
],
"topId":22950,
"mac":"A1:A1:A5:C1:A1:72"
},
{
"deviceId":13928,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23080
],
"topId":22953,
"mac":"A1:A1:A5:C1:A1:73"
},
{
"deviceId":13929,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:52",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23082
],
"topId":22956,
"mac":"A1:A1:A5:C1:A1:74"
},
{
"deviceId":13930,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23084
],
"topId":22959,
"mac":"A1:A1:A5:C1:A1:75"
},
{
"deviceId":13931,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23086
],
"topId":22962,
"mac":"A1:A1:A5:C1:A1:76"
},
{
"deviceId":13932,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23088
],
"topId":22965,
"mac":"A1:A1:A5:C1:A1:77"
},
{
"deviceId":13933,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23090
],
"topId":22968,
"mac":"A1:A1:A5:C1:A1:78"
},
{
"deviceId":13935,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23094
],
"topId":22944,
"mac":"A1:A1:A5:C1:A1:80"
},
{
"deviceId":13936,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23096
],
"topId":22947,
"mac":"A1:A1:A5:C1:A1:81"
},
{
"deviceId":13937,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23098
],
"topId":22950,
"mac":"A1:A1:A5:C1:A1:82"
},
{
"deviceId":13938,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23100
],
"topId":22953,
"mac":"A1:A1:A5:C1:A1:83"
},
{
"deviceId":13939,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23102
],
"topId":22956,
"mac":"A1:A1:A5:C1:A1:84"
},
{
"deviceId":13940,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23104
],
"topId":22959,
"mac":"A1:A1:A5:C1:A1:85"
},
{
"deviceId":13941,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23106
],
"topId":22962,
"mac":"A1:A1:A5:C1:A1:86"
},
{
"deviceId":13942,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23108
],
"topId":22965,
"mac":"A1:A1:A5:C1:A1:87"
},
{
"deviceId":13943,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:53",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23110
],
"topId":22968,
"mac":"A1:A1:A5:C1:A1:88"
},
{
"deviceId":13945,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23114
],
"topId":22944,
"mac":"A1:A1:A5:C1:A1:90"
},
{
"deviceId":13946,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23116
],
"topId":22947,
"mac":"A1:A1:A5:C1:A1:91"
},
{
"deviceId":13947,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23118
],
"topId":22950,
"mac":"A1:A1:A5:C1:A1:92"
},
{
"deviceId":13948,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23120
],
"topId":22953,
"mac":"A1:A1:A5:C1:A1:93"
},
{
"deviceId":13949,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23122
],
"topId":22956,
"mac":"A1:A1:A5:C1:A1:94"
},
{
"deviceId":13950,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23124
],
"topId":22959,
"mac":"A1:A1:A5:C1:A1:95"
},
{
"deviceId":13951,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23126
],
"topId":22962,
"mac":"A1:A1:A5:C1:A1:96"
},
{
"deviceId":13952,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23128
],
"topId":22965,
"mac":"A1:A1:A5:C1:A1:97"
},
{
"deviceId":13953,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23130
],
"topId":22968,
"mac":"A1:A1:A5:C1:A1:98"
},
{
"deviceId":13955,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:54",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23134
],
"topId":22944,
"mac":"A1:A1:A5:D1:A1:01"
},
{
"deviceId":13956,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23136
],
"topId":22947,
"mac":"A1:A1:A5:D1:A1:02"
},
{
"deviceId":13957,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23138
],
"topId":22950,
"mac":"A1:A1:A5:D1:A1:03"
},
{
"deviceId":13958,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23140
],
"topId":22953,
"mac":"A1:A1:A5:D1:A1:04"
},
{
"deviceId":13959,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23142
],
"topId":22956,
"mac":"A1:A1:A5:D1:A1:05"
},
{
"deviceId":13960,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23144
],
"topId":22959,
"mac":"A1:A1:A5:D1:A1:06"
},
{
"deviceId":13961,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23146
],
"topId":22962,
"mac":"A1:A1:A5:D1:A1:07"
},
{
"deviceId":13962,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23148
],
"topId":22965,
"mac":"A1:A1:A5:D1:A1:08"
},
{
"deviceId":13963,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23150
],
"topId":22968,
"mac":"A1:A1:A5:D1:A1:09"
},
{
"deviceId":13965,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22946
],
"topId":22944,
"mac":"A1:A1:A5:D1:A1:11"
},
{
"deviceId":13966,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22949
],
"topId":22947,
"mac":"A1:A1:A5:D1:A1:12"
},
{
"deviceId":13967,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:55",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22952
],
"topId":22950,
"mac":"A1:A1:A5:D1:A1:13"
},
{
"deviceId":13968,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22955
],
"topId":22953,
"mac":"A1:A1:A5:D1:A1:14"
},
{
"deviceId":13969,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22958
],
"topId":22956,
"mac":"A1:A1:A5:D1:A1:15"
},
{
"deviceId":13970,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22961
],
"topId":22959,
"mac":"A1:A1:A5:D1:A1:16"
},
{
"deviceId":13971,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22964
],
"topId":22962,
"mac":"A1:A1:A5:D1:A1:17"
},
{
"deviceId":13972,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22967
],
"topId":22965,
"mac":"A1:A1:A5:D1:A1:18"
},
{
"deviceId":13973,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22970
],
"topId":22968,
"mac":"A1:A1:A5:D1:A1:19"
},
{
"deviceId":13975,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22974
],
"topId":22944,
"mac":"A1:A1:A5:D1:A1:21"
},
{
"deviceId":13976,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22976
],
"topId":22947,
"mac":"A1:A1:A5:D1:A1:22"
},
{
"deviceId":13977,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22978
],
"topId":22950,
"mac":"A1:A1:A5:D1:A1:23"
},
{
"deviceId":13978,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22980
],
"topId":22953,
"mac":"A1:A1:A5:D1:A1:24"
},
{
"deviceId":13979,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22982
],
"topId":22956,
"mac":"A1:A1:A5:D1:A1:25"
},
{
"deviceId":13980,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22984
],
"topId":22959,
"mac":"A1:A1:A5:D1:A1:26"
},
{
"deviceId":13981,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:56",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22986
],
"topId":22962,
"mac":"A1:A1:A5:D1:A1:27"
},
{
"deviceId":13982,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22988
],
"topId":22965,
"mac":"A1:A1:A5:D1:A1:28"
},
{
"deviceId":13983,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22990
],
"topId":22968,
"mac":"A1:A1:A5:D1:A1:29"
},
{
"deviceId":13985,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22994
],
"topId":22944,
"mac":"A1:A1:A5:D1:A1:31"
},
{
"deviceId":13986,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22996
],
"topId":22947,
"mac":"A1:A1:A5:D1:A1:32"
},
{
"deviceId":13987,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22998
],
"topId":22950,
"mac":"A1:A1:A5:D1:A1:33"
},
{
"deviceId":13988,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23000
],
"topId":22953,
"mac":"A1:A1:A5:D1:A1:34"
},
{
"deviceId":13989,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23002
],
"topId":22956,
"mac":"A1:A1:A5:D1:A1:35"
},
{
"deviceId":13990,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23004
],
"topId":22959,
"mac":"A1:A1:A5:D1:A1:36"
},
{
"deviceId":13991,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23006
],
"topId":22962,
"mac":"A1:A1:A5:D1:A1:37"
},
{
"deviceId":13992,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23008
],
"topId":22965,
"mac":"A1:A1:A5:D1:A1:38"
},
{
"deviceId":13993,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:57",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23010
],
"topId":22968,
"mac":"A1:A1:A5:D1:A1:39"
},
{
"deviceId":13995,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:58",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23014
],
"topId":22944,
"mac":"A1:A1:A5:D1:A1:41"
},
{
"deviceId":13996,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:58",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23016
],
"topId":22947,
"mac":"A1:A1:A5:D1:A1:42"
},
{
"deviceId":13997,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:58",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23018
],
"topId":22950,
"mac":"A1:A1:A5:D1:A1:43"
},
{
"deviceId":13998,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:58",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23020
],
"topId":22953,
"mac":"A1:A1:A5:D1:A1:44"
},
{
"deviceId":13999,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:58",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23022
],
"topId":22956,
"mac":"A1:A1:A5:D1:A1:45"
},
{
"deviceId":14000,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:58",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23024
],
"topId":22959,
"mac":"A1:A1:A5:D1:A1:46"
},
{
"deviceId":14001,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:58",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23026
],
"topId":22962,
"mac":"A1:A1:A5:D1:A1:47"
},
{
"deviceId":14002,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:58",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23028
],
"topId":22965,
"mac":"A1:A1:A5:D1:A1:48"
},
{
"deviceId":14003,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:58",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23030
],
"topId":22968,
"mac":"A1:A1:A5:D1:A1:49"
},
{
"deviceId":14005,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:58",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23034
],
"topId":22944,
"mac":"A1:A1:A5:D1:A1:51"
},
{
"deviceId":14006,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23036
],
"topId":22947,
"mac":"A1:A1:A5:D1:A1:52"
},
{
"deviceId":14007,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23038
],
"topId":22950,
"mac":"A1:A1:A5:D1:A1:53"
},
{
"deviceId":14008,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23040
],
"topId":22953,
"mac":"A1:A1:A5:D1:A1:54"
},
{
"deviceId":14009,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23042
],
"topId":22956,
"mac":"A1:A1:A5:D1:A1:55"
},
{
"deviceId":14010,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23044
],
"topId":22959,
"mac":"A1:A1:A5:D1:A1:56"
},
{
"deviceId":14011,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23046
],
"topId":22962,
"mac":"A1:A1:A5:D1:A1:57"
},
{
"deviceId":14012,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23048
],
"topId":22965,
"mac":"A1:A1:A5:D1:A1:58"
},
{
"deviceId":14013,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23050
],
"topId":22968,
"mac":"A1:A1:A5:D1:A1:59"
},
{
"deviceId":14015,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23054
],
"topId":22944,
"mac":"A1:A1:A5:D1:A1:61"
},
{
"deviceId":14016,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23056
],
"topId":22947,
"mac":"A1:A1:A5:D1:A1:62"
},
{
"deviceId":14017,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23058
],
"topId":22950,
"mac":"A1:A1:A5:D1:A1:63"
},
{
"deviceId":14018,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23060
],
"topId":22953,
"mac":"A1:A1:A5:D1:A1:64"
},
{
"deviceId":14019,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:30:59",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23062
],
"topId":22956,
"mac":"A1:A1:A5:D1:A1:65"
},
{
"deviceId":14020,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23064
],
"topId":22959,
"mac":"A1:A1:A5:D1:A1:66"
},
{
"deviceId":14021,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23066
],
"topId":22962,
"mac":"A1:A1:A5:D1:A1:67"
},
{
"deviceId":14022,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23068
],
"topId":22965,
"mac":"A1:A1:A5:D1:A1:68"
},
{
"deviceId":14023,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23070
],
"topId":22968,
"mac":"A1:A1:A5:D1:A1:69"
},
{
"deviceId":14025,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23074
],
"topId":22944,
"mac":"A1:A1:A5:D1:A1:71"
},
{
"deviceId":14026,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23076
],
"topId":22947,
"mac":"A1:A1:A5:D1:A1:72"
},
{
"deviceId":14027,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23078
],
"topId":22950,
"mac":"A1:A1:A5:D1:A1:73"
},
{
"deviceId":14028,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23080
],
"topId":22953,
"mac":"A1:A1:A5:D1:A1:74"
},
{
"deviceId":14029,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23082
],
"topId":22956,
"mac":"A1:A1:A5:D1:A1:75"
},
{
"deviceId":14030,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23084
],
"topId":22959,
"mac":"A1:A1:A5:D1:A1:76"
},
{
"deviceId":14031,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23086
],
"topId":22962,
"mac":"A1:A1:A5:D1:A1:77"
},
{
"deviceId":14032,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:00",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23088
],
"topId":22965,
"mac":"A1:A1:A5:D1:A1:78"
},
{
"deviceId":14033,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23090
],
"topId":22968,
"mac":"A1:A1:A5:D1:A1:79"
},
{
"deviceId":14035,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23094
],
"topId":22944,
"mac":"A1:A1:A5:D1:A1:81"
},
{
"deviceId":14036,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23096
],
"topId":22947,
"mac":"A1:A1:A5:D1:A1:82"
},
{
"deviceId":14037,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23098
],
"topId":22950,
"mac":"A1:A1:A5:D1:A1:83"
},
{
"deviceId":14038,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23100
],
"topId":22953,
"mac":"A1:A1:A5:D1:A1:84"
},
{
"deviceId":14039,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23102
],
"topId":22956,
"mac":"A1:A1:A5:D1:A1:85"
},
{
"deviceId":14040,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23104
],
"topId":22959,
"mac":"A1:A1:A5:D1:A1:86"
},
{
"deviceId":14041,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23106
],
"topId":22962,
"mac":"A1:A1:A5:D1:A1:87"
},
{
"deviceId":14042,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23108
],
"topId":22965,
"mac":"A1:A1:A5:D1:A1:88"
},
{
"deviceId":14043,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23110
],
"topId":22968,
"mac":"A1:A1:A5:D1:A1:89"
},
{
"deviceId":14045,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23114
],
"topId":22944,
"mac":"A1:A1:A5:D1:A1:91"
},
{
"deviceId":14046,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23116
],
"topId":22947,
"mac":"A1:A1:A5:D1:A1:92"
},
{
"deviceId":14047,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:01",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23118
],
"topId":22950,
"mac":"A1:A1:A5:D1:A1:93"
},
{
"deviceId":14048,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:02",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23120
],
"topId":22953,
"mac":"A1:A1:A5:D1:A1:94"
},
{
"deviceId":14049,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:02",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23122
],
"topId":22956,
"mac":"A1:A1:A5:D1:A1:95"
},
{
"deviceId":14050,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:02",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23124
],
"topId":22959,
"mac":"A1:A1:A5:D1:A1:96"
},
{
"deviceId":14051,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:02",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23126
],
"topId":22962,
"mac":"A1:A1:A5:D1:A1:97"
},
{
"deviceId":14052,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:02",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23128
],
"topId":22965,
"mac":"A1:A1:A5:D1:A1:98"
},
{
"deviceId":14053,
"sn":None,
"deviceName":"AP设备",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-08 15:31:02",
"remark":"",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23130
],
"topId":22968,
"mac":"A1:A1:A5:E1:A1:99"
},
{
"deviceId":4973,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-12 18:22:54",
"remark":"ssss",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
19905
],
"topId":19903,
"mac":"59:31:32:32:31:F2"
},
{
"deviceId":4974,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-12 18:22:54",
"remark":"ssss",
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
19905
],
"topId":19903,
"mac":"59:31:32:32:31:F3"
},
{
"deviceId":4949,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-11 15:00:15",
"remark":"ssss",
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
19881
],
"topId":19879,
"mac":"59:33:32:32:31:F3"
},
{
"deviceId":4941,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-08 14:45:20",
"remark":"ssss",
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
19857
],
"topId":19855,
"mac":"49:33:32:32:31:F5"
},
{
"deviceId":4940,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-08 14:45:20",
"remark":"ssss",
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
19858
],
"topId":19855,
"mac":"49:33:32:32:31:F3"
},
{
"deviceId":4948,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-11 15:00:15",
"remark":"ssss",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
19882
],
"topId":19879,
"mac":"59:33:32:32:31:F2"
},
{
"deviceId":4719,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-06 11:11:17",
"remark":None,
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
13657
],
"topId":13655,
"mac":"33:44:55:66:77:88"
},
{
"deviceId":4992,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-15 11:03:50",
"remark":"ssss",
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
19927
],
"topId":19925,
"mac":"12:22:33:44:50:AA"
},
{
"deviceId":5128,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-23 14:49:38",
"remark":"sf",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
20138
],
"topId":20135,
"mac":"33:33:93:33:33:33"
},
{
"deviceId":5127,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-23 14:37:27",
"remark":"sf",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
20137
],
"topId":20135,
"mac":"EE:11:11:22:11:11"
},
{
"deviceId":5110,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-22 18:32:06",
"remark":None,
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
13657
],
"topId":13655,
"mac":"1E:11:11:11:11:11"
},
{
"deviceId":5108,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-22 18:18:30",
"remark":None,
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
13657
],
"topId":13655,
"mac":"11:A1:11:11:11:33"
},
{
"deviceId":5107,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-22 18:14:59",
"remark":None,
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
13657
],
"topId":13655,
"mac":"A1:11:11:11:11:33"
},
{
"deviceId":5106,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-22 17:06:21",
"remark":None,
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
19867
],
"topId":13655,
"mac":"33:33:33:33:33:31"
},
{
"deviceId":5105,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-22 16:51:48",
"remark":None,
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
13658
],
"topId":13655,
"mac":"99:32:34:32:32:66"
},
{
"deviceId":14062,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-09 11:01:18",
"remark":None,
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
20145
],
"topId":13655,
"mac":"A1:11:11:11:11:11"
},
{
"deviceId":6325,
"sn":None,
"deviceName":"04:8B:FF:00:00:02",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-27 15:02:23",
"remark":None,
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
18384
],
"topId":18382,
"mac":"04:8B:FF:00:00:02"
},
{
"deviceId":5777,
"sn":None,
"deviceName":"04:8B:FF:00:00:01",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-27 15:00:21",
"remark":None,
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
18384
],
"topId":18382,
"mac":"04:8B:FF:00:00:01"
},
{
"deviceId":7242,
"sn":"8152017030900031",
"deviceName":"chenyan3320",
"softwareVersion":"AmOS-3.6.1.300P12C5",
"deviceModel":"WIA3300-20",
"createDate":"2019-03-27 17:41:07",
"remark":None,
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
20055
],
"topId":19981,
"mac":"04:8B:42:30:00:A0"
},
{
"deviceId":3488,
"sn":None,
"deviceName":"testes11t1",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-02-20 21:16:51",
"remark":"aaaa11",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
13658
],
"topId":13655,
"mac":"44:22:11:AA:BB:CE"
},
{
"deviceId":3487,
"sn":None,
"deviceName":"testest",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-02-20 21:16:49",
"remark":"aaaa",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
13658
],
"topId":13655,
"mac":"44:22:11:AA:BB:CF"
},
{
"deviceId":3485,
"sn":None,
"deviceName":"testest",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-02-20 21:13:42",
"remark":"aaaa",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
13657
],
"topId":13655,
"mac":"44:22:11:AA:BB:CC"
},
{
"deviceId":3746,
"sn":None,
"deviceName":"ss",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-03-01 17:36:06",
"remark":"ssss",
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
13658
],
"topId":13655,
"mac":"99:32:32:32:32:66"
},
{
"deviceId":10659,
"sn":None,
"deviceName":"44:55:66:77:88:99",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-02 10:11:38",
"remark":None,
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22484
],
"topId":18382,
"mac":"44:55:66:77:88:99"
},
{
"deviceId":14082,
"sn":None,
"deviceName":"111",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-09 13:49:28",
"remark":None,
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
23169
],
"topId":22194,
"mac":"34:8B:48:99:88:85"
},
{
"deviceId":14083,
"sn":None,
"deviceName":"222",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-09 13:54:19",
"remark":None,
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
23170
],
"topId":22194,
"mac":"04:4B:68:99:88:85"
},
{
"deviceId":14084,
"sn":None,
"deviceName":"33333",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-09 13:56:44",
"remark":None,
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
23171
],
"topId":22194,
"mac":"04:8E:48:99:88:85"
},
{
"deviceId":14085,
"sn":None,
"deviceName":"4444",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-09 13:58:59",
"remark":None,
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
23172
],
"topId":22194,
"mac":"04:3C:48:99:88:85"
},
{
"deviceId":5531,
"sn":"8152017060700509",
"deviceName":"04:8b:42:30:0b:a0",
"softwareVersion":"AmOS-3.6.1.200P05C5",
"deviceModel":"WIA3300-20",
"createDate":"2019-03-27 14:47:05",
"remark":"04:8b:42:30:0b:a0",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
22508
],
"topId":14807,
"mac":"04:8B:42:30:0B:A0"
},
{
"deviceId":7241,
"sn":"811201511180001L",
"deviceName":"WIA3300-80",
"softwareVersion":"AmOS-2.5.7.016",
"deviceModel":"WIA3200-80",
"createDate":"2019-03-27 17:02:49",
"remark":"wave1",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
19877
],
"topId":19875,
"mac":"04:8B:42:22:75:3C"
},
{
"deviceId":8166,
"sn":"8152017030900015",
"deviceName":"WIA3300-20",
"softwareVersion":"AmOS-3.6.2.021C5",
"deviceModel":"WIA3300-20",
"createDate":"2019-04-01 14:03:05",
"remark":None,
"isOnline":True,
"deviceType":3,
"bindAreaIds":[
19877
],
"topId":19875,
"mac":"04:8B:42:30:01:4A"
},
{
"deviceId":8138,
"sn":"8112016031400008",
"deviceName":"chenyan80s",
"softwareVersion":"AmOS-3.5.6.300P11C3",
"deviceModel":"WIA3200-80S",
"createDate":"2019-03-27 18:27:45",
"remark":None,
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
20009
],
"topId":19981,
"mac":"04:8B:42:23:E5:9D"
},
{
"deviceId":14090,
"sn":"8152017030900016",
"deviceName":"04:8B:42:30:00:50",
"softwareVersion":"AmOS-3.6.2.2498C5",
"deviceModel":"WIA3300-20",
"createDate":"2019-04-12 20:57:48",
"remark":"WIA3300-20",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
23175
],
"topId":8655,
"mac":"04:8B:42:30:00:50"
},
{
"deviceId":8142,
"sn":"201711301554943",
"deviceName":"iAP60",
"softwareVersion":"AmOS-3.6.2.023C7",
"deviceModel":"WIA3300-60",
"createDate":"2019-03-28 10:38:48",
"remark":None,
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
22192
],
"topId":22190,
"mac":"04:8B:32:11:10:60"
},
{
"deviceId":8167,
"sn":"048b21C79820",
"deviceName":"ap3320-liyi",
"softwareVersion":"AmOS-3.6.2.024C5",
"deviceModel":"WIA3300-20",
"createDate":"2019-04-01 14:36:12",
"remark":None,
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
22261
],
"topId":2,
"mac":"04:8B:21:C7:98:20"
},
{
"deviceId":14088,
"sn":None,
"deviceName":"2222222",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-10 09:48:51",
"remark":None,
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
23173
],
"topId":22194,
"mac":"04:8B:32:99:88:85"
},
{
"deviceId":8139,
"sn":"8152017030900047",
"deviceName":"00:f5",
"softwareVersion":"AmOS-3.6.2.023C5",
"deviceModel":"WIA3300-20",
"createDate":"2019-03-27 19:29:23",
"remark":None,
"isOnline":True,
"deviceType":1,
"bindAreaIds":[
647
],
"topId":8,
"mac":"04:8B:42:30:00:F5"
},
{
"deviceId":8157,
"sn":"8112014070900268",
"deviceName":"WIA3200-80",
"softwareVersion":"AmOS-2.5.6.301P10",
"deviceModel":"WIA3200-80",
"createDate":"2019-03-29 09:43:10",
"remark":"10.10.22.11",
"isOnline":True,
"deviceType":1,
"bindAreaIds":[
14812
],
"topId":10886,
"mac":"04:8C:42:21:4A:54"
},
{
"deviceId":8140,
"sn":"8112015060500010",
"deviceName":"04:8f",
"softwareVersion":"AmOS-2.5.7.012",
"deviceModel":"WIA3200-80D",
"createDate":"2019-03-27 19:30:24",
"remark":None,
"isOnline":True,
"deviceType":1,
"bindAreaIds":[
14369
],
"topId":8,
"mac":"04:8D:42:22:04:8F"
},
{
"deviceId":8159,
"sn":"8152017060700446",
"deviceName":"WIA3300-20",
"softwareVersion":"AmOS-3.6.1.300P15C5",
"deviceModel":"WIA3300-20",
"createDate":"2019-03-29 11:38:44",
"remark":"10.10.27.86",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
14812
],
"topId":10886,
"mac":"04:8B:42:30:0A:50"
},
{
"deviceId":543,
"sn":"8122016060300432",
"deviceName":"39783693X148B42225AAB",
"softwareVersion":"AmOS-2.5.7.017",
"deviceModel":"WOA5200-80",
"createDate":"2019-01-08 19:08:43",
"remark":None,
"isOnline":True,
"deviceType":1,
"bindAreaIds":[
8793
],
"topId":8,
"mac":"14:8B:42:22:5A:AB"
},
{
"deviceId":3294,
"sn":"J6371662642",
"deviceName":"iap3320-liyi",
"softwareVersion":"AmOS-3.6.2.023C5",
"deviceModel":"WIA3300-20",
"createDate":"2019-02-19 10:52:13",
"remark":None,
"isOnline":True,
"deviceType":3,
"bindAreaIds":[
22193
],
"topId":2,
"mac":"D0:05:2A:84:98:30"
},
{
"deviceId":14087,
"sn":"8152017060700030",
"deviceName":"04:8B:42:30:01:A5",
"softwareVersion":"AmOS-3.6.2.024C5",
"deviceModel":"WIA3300-20",
"createDate":"2019-04-09 15:04:48",
"remark":"peiping_iap_3320",
"isOnline":True,
"deviceType":3,
"bindAreaIds":[
14810
],
"topId":14807,
"mac":"04:8B:42:30:01:A5"
},
{
"deviceId":14089,
"sn":None,
"deviceName":"222",
"softwareVersion":None,
"deviceModel":None,
"createDate":"2019-04-10 17:02:40",
"remark":None,
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
23174
],
"topId":22194,
"mac":"04:1B:48:99:88:85"
},
{
"deviceId":13062,
"sn":"8152018032000297",
"deviceName":"04:8B:42:31:3D:DA",
"softwareVersion":"AmOS-3.6.1.300P08C7",
"deviceModel":"WIA3300-60",
"createDate":"2019-04-03 17:38:35",
"remark":"04:8B:42:31:3D:DA",
"isOnline":True,
"deviceType":1,
"bindAreaIds":[
14809
],
"topId":14807,
"mac":"04:8B:42:31:3D:DA"
},
{
"deviceId":8160,
"sn":"8152017102605228",
"deviceName":"test",
"softwareVersion":"AmOS-3.6.2.2496C5",
"deviceModel":"WIA3300-20",
"createDate":"2019-03-29 14:21:41",
"remark":"12212",
"isOnline":False,
"deviceType":1,
"bindAreaIds":[
13665
],
"topId":8,
"mac":"04:8B:42:30:BF:5A"
},
{
"deviceId":12163,
"sn":"8152018032000457",
"deviceName":"iap3360",
"softwareVersion":"AmOS-3.6.2.023C7",
"deviceModel":"WIA3300-60",
"createDate":"2019-04-03 15:09:26",
"remark":None,
"isOnline":False,
"deviceType":3,
"bindAreaIds":[
13712
],
"topId":2,
"mac":"04:8B:42:31:41:30"
}
]
# 获取离线ap及对应areaid
offline_list = [ret for ret in data if not ret['isOnline']]
print('enable : {}'.format(offline_list))
offline_ret = Counter([d['topId'] for d in offline_list])
print('offline : {}'.format(offline_ret))
# 获取启用数
enable_ret = Counter([d['topId'] for d in data])
print('enable : {}'.format(enable_ret))
data_ap_offline_trend = []
for area in offline_ret:
# offline ap number
ap_offline_num = offline_ret[area]
# enable ap number
enable_ap_num = enable_ret[area]
offline_rate = ap_offline_num / enable_ap_num if 0 != enable_ap_num else 0
data_ap_offline_trend.append(
Result_ApOfflineTrend(area,
enable_ap_num - ap_offline_num if enable_ap_num > ap_offline_num else 0,
ap_offline_num, ap_offline_num, ap_offline_num,
offline_rate, offline_rate, offline_rate))
print(data_ap_offline_trend) | [
"collections.Counter",
"collections.namedtuple"
] | [((69, 247), 'collections.namedtuple', 'namedtuple', (['"""Result_ApOfflineTrend"""', "('areaid', 'avgApNum', 'avgOffLineApNum', 'maxOffLineApNum',\n 'minOffLineApNum', 'avgOffLineRate', 'maxOffLineRate', 'minOffLineRate')"], {}), "('Result_ApOfflineTrend', ('areaid', 'avgApNum',\n 'avgOffLineApNum', 'maxOffLineApNum', 'minOffLineApNum',\n 'avgOffLineRate', 'maxOffLineRate', 'minOffLineRate'))\n", (79, 247), False, 'from collections import Counter, namedtuple\n'), ((371930, 371973), 'collections.Counter', 'Counter', (["[d['topId'] for d in offline_list]"], {}), "([d['topId'] for d in offline_list])\n", (371937, 371973), False, 'from collections import Counter, namedtuple\n'), ((372037, 372072), 'collections.Counter', 'Counter', (["[d['topId'] for d in data]"], {}), "([d['topId'] for d in data])\n", (372044, 372072), False, 'from collections import Counter, namedtuple\n')] |
import logging
import types
from collections import defaultdict
from typing import (
Any,
Dict,
Iterator,
List,
Mapping,
Tuple,
Type,
Union,
)
from .setting import Setting, PropertySetting
from .setting_registry import registry
from .docreader import extract_doc_comments_from_class_or_module
from .exceptions import StructureError, ValidationError, ValidationErrorDetails
from .sources import get_source, AnySource, Source, NotFound
from .sources.strategies import Strategy, default as default_update_strategy
from .types import GuessSettingType, type_hints_equal
from .validators import Validator, ValueTypeValidator
logger = logging.getLogger(__name__)
INVALID_SETTINGS = '__invalid__settings__'
class SettingsMeta(type):
def __new__(mcs, name, bases, class_dict):
new_dict = mcs.class_dict_to_settings(class_dict, bases)
mcs.add_settings_help(name, new_dict)
return super().__new__(mcs, name, bases, new_dict)
@classmethod
def class_dict_to_settings(mcs, class_dict: dict, bases: List[type]):
new_dict = {}
annotations = class_dict.get("__annotations__", {})
for name, attr in class_dict.items():
attr_is_setting = isinstance(attr, Setting)
new_attr = attr
# Make a Setting out of each UPPERCASE_ATTRIBUTE
if (
not attr_is_setting
and mcs._is_setting_name(name)
and mcs._can_be_converted_to_setting_automatically(attr)
):
new_attr = mcs._make_setting_from_attribute(name, attr, annotations)
new_attr_is_setting = isinstance(new_attr, Setting)
# Should we guess a type_hint for the Setting?
if new_attr_is_setting and new_attr.type_hint is GuessSettingType:
new_attr.type_hint = mcs._guess_type_hint(
name, new_attr, annotations, bases
)
# If the Setting was created from an implicit definition (without behaviors!)
# should we then try substituting the setting type with a one
# from the registry?
if not attr_is_setting and new_attr_is_setting:
setting_class_from_registry = registry.get_setting_class_for_type(
new_attr.type_hint
)
if setting_class_from_registry is not Setting:
new_attr = mcs._substitute_by_setting_class_from_registry(
new_attr,
setting_class_from_registry
)
# Final touch: apply behaviors
if new_attr_is_setting:
mcs._apply_behaviors(new_attr)
new_dict[name] = new_attr
return new_dict
@classmethod
def _make_setting_from_attribute(
mcs, name, attr, annotations
) -> Union[PropertySetting, Setting]:
# is it a class method?
if isinstance(attr, types.FunctionType):
return PropertySetting(attr)
type_hint = annotations.get(name, GuessSettingType)
setting_class_from_registry = registry.get_setting_class_for_type(type_hint)
return setting_class_from_registry(attr, doc="", type_hint=type_hint)
@classmethod
def _guess_type_hint(mcs, name, setting: Setting, annotations, bases: List[type]):
# we still have to check annotations,
# e.g. if the setting was instantiated by behavior
annotation_type_hint = annotations.get(name, GuessSettingType)
if annotation_type_hint is not GuessSettingType:
return annotation_type_hint
# try to get the type hint from the base classes
for base in bases:
try:
base_type_hint = getattr(base, name).type_hint
return base_type_hint
except AttributeError:
pass
guessed_setting_type = GuessSettingType.guess_type_hint(setting.value)
return guessed_setting_type
@classmethod
def _is_setting_name(mcs, name: str) -> bool:
"""Return True if name is written in the upper case"""
return not name.startswith('_') and name.upper() == name
@classmethod
def _can_be_converted_to_setting_automatically(mcs, attr: Any) -> bool:
"""Return False if attribute should not be converted
to a Setting automatically"""
callable_types = (property, classmethod, staticmethod)
return not isinstance(attr, callable_types)
@classmethod
def add_settings_help(mcs, cls_name: str, class_dict: dict):
if '__module__' not in class_dict:
# class is not coming from a module
return
settings = {
name: attr for name, attr in class_dict.items() if isinstance(attr, Setting)
}
if not settings:
# class seems to contain to settings
return
if all(setting.__doc__ for setting in settings.values()):
# All settings of the class have been explicitly documented.
# Since explicit documentation overrides comment-docs,
# there is no need to proceed further
return
# read the contents of the module which contains the settings
# and parse it via Sphinx parser
cls_module_name = class_dict['__module__']
comments = extract_doc_comments_from_class_or_module(cls_module_name, cls_name)
for name, setting in settings.items():
if setting.__doc__:
# do not modify an explicitly-made setting documentation
continue
comment_key = (cls_name, name)
try:
setting.__doc__ = comments[comment_key]
except KeyError:
# no comment-style documentation exists
pass
@classmethod
def _substitute_by_setting_class_from_registry(
mcs,
setting: Setting,
substitue_setting_type: Type[Setting]
):
new_setting = substitue_setting_type(
setting.value,
doc=setting.__doc__,
validators=setting.validators,
type_hint=setting.type_hint,
override=setting.override
)
new_setting._behaviors = setting._behaviors
return new_setting
@classmethod
def _apply_behaviors(mcs, setting: Setting):
for behavior in setting._behaviors:
behavior.decorate(setting)
class Settings(Setting, metaclass=SettingsMeta):
default_validators: Tuple[Validator, ...] = ()
mandatory_validators: Tuple[Validator, ...] = (ValueTypeValidator(),)
_is_being_validated: bool
_errors: ValidationErrorDetails = {}
def __init__(self, **kwargs):
assert (
'value' not in kwargs
), '"value" argument should not be passed to Settings.__init__()'
assert (
'type_hint' not in kwargs
), '"type_hint" argument should not be passed to Settings.__init__()'
super().__init__(value=self, type_hint=self.__class__, **kwargs)
self._is_being_validated = False
self._verify_structure()
def _verify_structure(self):
# verify whether the setting on Nth level of the inheritance hierarchy
# corresponds to the setting on N-1th level of the hierarchy.
for name, classes in self._get_settings_classes().items():
for c0, c1 in zip(classes, classes[1:]):
# start with setting object of the first classes
s0 = c0.__dict__[name]
s1 = c1.__dict__[name]
differences = self._settings_diff(s0, s1)
if differences:
diff = '; '.join(differences)
raise StructureError(
f'in classes {c0} and {c1} setting {name} has'
f' the following difference(s): {diff}'
)
def _get_settings_classes(self) -> Dict[str, List[Type['Settings']]]:
# _settings_classes is helper list which can be used in
# settings reading and validation routines.
# 1. Iterate through __mro__ classes in reverse order - so that
# iteration happens from the most-base class to the current one.
# 2. Store found settings as {name: [cls, ...]} to settings_classes
settings_classes: Dict[str, List[Type['Settings']]] = defaultdict(list)
assert self.__class__.__mro__[-3] is Settings
# __mro__[:-2] - skip Settings and object bases
for cls in reversed(self.__class__.__mro__[:-3]):
for attr, val in cls.__dict__.items():
if isinstance(val, Setting):
settings_classes[attr].append(cls)
return dict(settings_classes)
def _settings_diff(self, s0: Setting, s1: Setting) -> List[str]:
NO_DIFF = [] # type: ignore
differences = []
# No checks are performed if setting is overridden
if s1.override:
return NO_DIFF
if not type_hints_equal(s0.type_hint, s1.type_hint):
differences.append(f'types differ: {s0.type_hint} != {s1.type_hint}')
return differences
@classmethod
def settings_attributes(cls) -> Iterator[Tuple[str, Setting]]:
for name in dir(cls):
attr = getattr(cls, name)
if isinstance(attr, Setting):
yield name, attr
def is_valid(self, raise_exception=False) -> bool:
self._errors = {}
self._errors = self._run_validation(raise_exception)
return self._errors == {}
def _run_validation(self, raise_exception=False) -> ValidationErrorDetails:
self._is_being_validated = True
errors = {}
# validate each setting individually
for name, setting in self.settings_attributes():
setting_errors = self._validate_setting(name, setting, raise_exception)
if setting_errors:
errors[name] = setting_errors
if errors == {}:
try:
self.validate()
except ValidationError as e:
if raise_exception:
raise e
else:
errors[INVALID_SETTINGS] = [str(e)]
self._is_being_validated = False
return errors
def _validate_setting(
self, name: str, setting: Setting, raise_exception=False
) -> ValidationErrorDetails:
value: Setting = getattr(self, name)
errors: List[ValidationErrorDetails] = []
validators = setting.validators or self.default_validators
validators += self.mandatory_validators
for validator in validators:
try:
validator(value, name=name, owner=self, setting=setting)
except ValidationError as e:
if raise_exception:
raise ValidationError({name: e.details}) from e
errors.append(str(e))
except Exception as e:
if raise_exception:
raise ValidationError({name: str(e)}) from e
else:
errors.append(str(e))
# nested Settings
if isinstance(value, Settings):
nested_settings = value
try:
nested_settings.is_valid(raise_exception=raise_exception)
except ValidationError as e:
assert raise_exception
e.prepend_source(name)
raise ValidationError({name: e.details}) from e
if nested_settings.errors:
errors.append(nested_settings.errors)
return errors
def validate(self):
pass
def update(self, source: AnySource, strategies: dict = None):
strategies = strategies if strategies is not None else {}
assert isinstance(strategies, Mapping), '`strategies` type should be `dict`'
source_obj = get_source(source)
self._update(self, source_obj, parents=(), strategies=strategies)
@staticmethod
def _update(
settings: 'Settings',
source: Source,
parents: Tuple[str, ...] = (),
strategies: Dict[str, Strategy] = None,
):
"""Recursively update settings object from dictionary"""
strategies = strategies or {}
for name, setting in settings.settings_attributes():
if isinstance(setting, Settings):
settings._update(setting, source, (*parents, name), strategies)
else:
full_setting_name = f'{".".join(parents) and "."}{name}'
if full_setting_name in strategies:
update_strategy = strategies[full_setting_name]
logger.debug(
'Updating setting %s with strategy %s',
full_setting_name,
getattr(update_strategy, '__qualname__', 'unknown strategy'),
)
else:
update_strategy = default_update_strategy
update_to_val = source.read(setting, parents)
if update_to_val is NotFound:
continue
current_val = getattr(settings, name)
new_val = update_strategy(current_val, update_to_val)
setattr(settings, name, new_val)
def extract_to(self, destination: Union[types.ModuleType, dict], prefix: str = ''):
if prefix != '':
prefix = prefix + '_'
if isinstance(destination, types.ModuleType):
destination = destination.__dict__
for name, attr in self.settings_attributes():
var_name = prefix + name
if isinstance(attr, Settings): # nested settings
attr.extract_to(destination, var_name)
else:
destination[var_name] = getattr(self, name)
@property
def errors(self) -> ValidationErrorDetails:
return self._errors
@property
def is_being_validated(self) -> bool:
return self._is_being_validated
| [
"logging.getLogger",
"collections.defaultdict"
] | [((662, 689), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (679, 689), False, 'import logging\n'), ((8456, 8473), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (8467, 8473), False, 'from collections import defaultdict\n')] |
import sys, os, socket, time, xmlrpc.client, threading
from log.logger import *
from bean.urlbean import *
class Worker(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
localip = socket.gethostbyname(socket.gethostname())
self.s = xmlrpc.client.ServerProxy('http://%s:6789' % (localip), allow_none=True)
def run(self):
while (True):
try:
ls = []
ls = self.s.getJobs(20)
for l in ls:
re = []
#根
if "getpage" in l["message"]:
#生成任务
for i in range(200):
re.append(UrlBean('http://bj.58.com/ershoufang/', 'www58com#getitem', key="www58com#getitem"+str(i), param=10, headers='北京', order='1602221013'))
print("getpage!")
time.sleep(1)
elif "getitem" in l["message"]:
print("getitem!")
time.sleep(1)
else:
#生成任务
for i in range(200):
re.append(UrlBean('http://bj.58.com/ershoufang/', 'www58com#getpage', param=10, headers='北京', order='1602221013'))
print("root")
time.sleep(2)
for i in range(3):
self.s.addJobs(re)
time.sleep(1)
print("empty!")
time.sleep(2)
except Exception as e:
print(str(e), type(e))
if __name__=='__main__':
workers = []
for i in range(1):
worker = Worker()
worker.start()
workers.append(worker)
for w in workers:
w.join() | [
"threading.Thread.__init__",
"socket.gethostname",
"time.sleep"
] | [((180, 211), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self'], {}), '(self)\n', (205, 211), False, 'import sys, os, socket, time, xmlrpc.client, threading\n'), ((252, 272), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (270, 272), False, 'import sys, os, socket, time, xmlrpc.client, threading\n'), ((1596, 1609), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1606, 1609), False, 'import sys, os, socket, time, xmlrpc.client, threading\n'), ((948, 961), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (958, 961), False, 'import sys, os, socket, time, xmlrpc.client, threading\n'), ((1532, 1545), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1542, 1545), False, 'import sys, os, socket, time, xmlrpc.client, threading\n'), ((1083, 1096), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1093, 1096), False, 'import sys, os, socket, time, xmlrpc.client, threading\n'), ((1409, 1422), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1419, 1422), False, 'import sys, os, socket, time, xmlrpc.client, threading\n')] |
#!/usr/bin/env python
#------------------------------------------------------------
# Purpose: Program to straight line parameters
# to data with errors in both coordinates. Compare
# the results with SciPy's ODR routine.
# Vog, 27 Nov, 2011
#------------------------------------------------------------
import numpy
from matplotlib.pyplot import figure, show, rc
from numpy.random import normal
from kapteyn import kmpfit
from scipy.odr import Data, Model, ODR, RealData, odr_stop
def model(p, x):
# Model is staight line: y = a + b*x
a, b = p
return a + b*x
def residuals(p, data):
# Residuals function for effective variance
a, b = p
x, y, ex, ey = data
w = ey*ey + b*b*ex*ex
wi = numpy.sqrt(numpy.where(w==0.0, 0.0, 1.0/(w)))
d = wi*(y-model(p,x))
return d
def residuals2(p, data):
# Minimum distance formula with expression for x_model
a, b = p
x, y, ex, ey = data
wx = 1/(ex*ex)
wy = 1/(ey*ey)
df = b
xd = x + (wy*(y-model(p,x))*df)/(wx+wy*df*df)
yd = model(p,xd)
D = numpy.sqrt( wx*(x-xd)**2+wy*(y-yd)**2 )
return D
# Create the data
N = 20
a0 = 2; b0 = 1.6
x = numpy.linspace(0.0, 12.0, N)
y = model((a0,b0),x) + normal(0.0, 1.5, N) # Mean 0, sigma 1
errx = normal(0.0, 0.4, N)
erry = normal(0.0, 0.5, N)
beta0 = [0,0]
print("\n========== Results SciPy's ODR ============")
linear = Model(model)
mydata = RealData(x, y, sx=errx, sy=erry)
myodr = ODR(mydata, linear, beta0=beta0, maxit=5000)
myoutput = myodr.run()
print("Fitted parameters: ", myoutput.beta)
print("Covariance errors: ", numpy.sqrt(myoutput.cov_beta.diagonal()))
print("Standard errors: ", myoutput.sd_beta)
print("Minimum (reduced)chi^2: ", myoutput.res_var)
beta = myoutput.beta
# Prepare fit routine
fitobj = kmpfit.Fitter(residuals=residuals, data=(x, y, errx, erry))
try:
fitobj.fit(params0=beta0)
except Exception as mes:
print("Something wrong with fit: ", mes)
raise SystemExit
print("\n\n======== Results kmpfit: w1 = ey*ey + b*b*ex*ex =========")
print("Params: ", fitobj.params)
print("Covariance errors: ", fitobj.xerror)
print("Standard errors ", fitobj.stderr)
print("Chi^2 min: ", fitobj.chi2_min)
print("Reduced Chi^2: ", fitobj.rchi2_min)
print("Message: ", fitobj.message)
fitobj2 = kmpfit.Fitter(residuals=residuals2, data=(x, y, errx, erry))
try:
fitobj2.fit(params0=beta0)
except Exception as mes:
print("Something wrong with fit: ", mes)
raise SystemExit
print("\n\n======== Results kmpfit: r = ex*ex/(ey*ey), xd = (x-a*r+y*b*r)/(1+r) =========")
print("Params: ", fitobj2.params)
print("Covariance errors: ", fitobj2.xerror)
print("Standard errors ", fitobj2.stderr)
print("Chi^2 min: ", fitobj2.chi2_min)
print("Reduced Chi^2: ", fitobj2.rchi2_min)
print("Message: ", fitobj2.message)
t = "\nTHE WILLAMSON APPROACH"
print(t, "\n", "="*len(t))
# Step 1: Get a and b for a, b with standard weighted least squares calculation
def lingres(xa, ya, w):
# Return a, b for the relation y = a + b*x
# given data in xa, ya and weights in w
sum = w.sum()
sumX = (w*xa).sum()
sumY = (w*ya).sum()
sumX2 = (w*xa*xa).sum()
sumY2 = (w*ya*ya).sum()
sumXY = (w*xa*ya).sum()
delta = sum * sumX2 - sumX * sumX
a = (sumX2*sumY - sumX*sumXY) / delta
b = (sumXY*sum - sumX*sumY) / delta
return a, b
w = numpy.where(erry==0.0, 0.0, 1.0/(erry*erry))
a,b = lingres(x, y, w)
a_y = a; b_y = b # Williamson initial Parameters
ui = errx**2
vi = erry**2
n = 0
cont = True
while cont:
# Step 2: Use this slope to find weighting for each point
wi = (vi+b*b*ui)**-1
# Step 3: Calcu;ate weighted avarages
w_sum = wi.sum()
x_av = (wi*x).sum() / w_sum
x_diff = x - x_av
y_av = (wi*y).sum() / w_sum
y_diff = y - y_av
# Step 4: Calculate the 'improvement' vector zi
zi = wi*(vi*x_diff + b*ui*y_diff)
b_will = (wi*zi*y_diff).sum()/ (wi*zi*x_diff).sum()
cont = abs(b-b_will) > 1e-12 and n < 100
n += 1
b = b_will
# Step 5: Repeat steps 2-4 until convergence
# Step 6: Calculate 'a' using the averages of a and y
a_will = y_av - b_will*x_av # Improved parameters
# Step 7: The variances
wi = (vi+b_will*b_will*ui)**-1
w_sum = wi.sum()
z_av = (wi*zi).sum() / w_sum
zi2 = zi - z_av
Q =1.0/(wi*(x_diff*y_diff/b_will + 4*zi2*(zi-x_diff))).sum()
sigb2 = Q*Q * (wi*wi*(x_diff**2*vi+y_diff**2*ui)).sum()
siga2 = 1.0/w_sum + 2*(x_av+2*z_av)*z_av*Q + (x_av+2*z_av)**2*sigb2
siga = numpy.sqrt(siga2)
sigb = numpy.sqrt(sigb2)
print("Williamson Fitted A, B: ", a_will, b_will)
print("Parameter errors: ", siga, sigb)
# Some plotting
rc('font', size=9)
rc('legend', fontsize=8)
fig = figure(1)
frame = fig.add_subplot(1,1,1, aspect=1, adjustable='datalim')
frame.errorbar(x, y, xerr=errx, yerr=erry, fmt='bo')
# Plot first fit
frame.plot(x, model(beta,x), '-y', lw=4, label="ODR", alpha=0.6)
frame.plot(x, model(fitobj.params,x), 'c', ls='--', lw=2, label="kmpfit")
frame.plot(x, model(fitobj2.params,x), '#ffaa00', label="kmpfit correct")
frame.plot(x, model((a_will,b_will),x), 'g', label="Williamson")
frame.plot(x, model((a0,b0),x), '#ab12cc', label="True")
frame.set_xlabel("X")
frame.set_ylabel("Y")
frame.set_title("Weights in both coordinates. Model: $y=a+bx$")
frame.grid(True)
leg = frame.legend(loc=1)
show() | [
"numpy.random.normal",
"numpy.sqrt",
"numpy.where",
"scipy.odr.ODR",
"scipy.odr.Model",
"numpy.linspace",
"scipy.odr.RealData",
"matplotlib.pyplot.figure",
"kapteyn.kmpfit.Fitter",
"matplotlib.pyplot.rc",
"matplotlib.pyplot.show"
] | [((1153, 1181), 'numpy.linspace', 'numpy.linspace', (['(0.0)', '(12.0)', 'N'], {}), '(0.0, 12.0, N)\n', (1167, 1181), False, 'import numpy\n'), ((1251, 1270), 'numpy.random.normal', 'normal', (['(0.0)', '(0.4)', 'N'], {}), '(0.0, 0.4, N)\n', (1257, 1270), False, 'from numpy.random import normal\n'), ((1279, 1298), 'numpy.random.normal', 'normal', (['(0.0)', '(0.5)', 'N'], {}), '(0.0, 0.5, N)\n', (1285, 1298), False, 'from numpy.random import normal\n'), ((1379, 1391), 'scipy.odr.Model', 'Model', (['model'], {}), '(model)\n', (1384, 1391), False, 'from scipy.odr import Data, Model, ODR, RealData, odr_stop\n'), ((1401, 1433), 'scipy.odr.RealData', 'RealData', (['x', 'y'], {'sx': 'errx', 'sy': 'erry'}), '(x, y, sx=errx, sy=erry)\n', (1409, 1433), False, 'from scipy.odr import Data, Model, ODR, RealData, odr_stop\n'), ((1442, 1486), 'scipy.odr.ODR', 'ODR', (['mydata', 'linear'], {'beta0': 'beta0', 'maxit': '(5000)'}), '(mydata, linear, beta0=beta0, maxit=5000)\n', (1445, 1486), False, 'from scipy.odr import Data, Model, ODR, RealData, odr_stop\n'), ((1792, 1851), 'kapteyn.kmpfit.Fitter', 'kmpfit.Fitter', ([], {'residuals': 'residuals', 'data': '(x, y, errx, erry)'}), '(residuals=residuals, data=(x, y, errx, erry))\n', (1805, 1851), False, 'from kapteyn import kmpfit\n'), ((2358, 2418), 'kapteyn.kmpfit.Fitter', 'kmpfit.Fitter', ([], {'residuals': 'residuals2', 'data': '(x, y, errx, erry)'}), '(residuals=residuals2, data=(x, y, errx, erry))\n', (2371, 2418), False, 'from kapteyn import kmpfit\n'), ((3483, 3533), 'numpy.where', 'numpy.where', (['(erry == 0.0)', '(0.0)', '(1.0 / (erry * erry))'], {}), '(erry == 0.0, 0.0, 1.0 / (erry * erry))\n', (3494, 3533), False, 'import numpy\n'), ((4593, 4610), 'numpy.sqrt', 'numpy.sqrt', (['siga2'], {}), '(siga2)\n', (4603, 4610), False, 'import numpy\n'), ((4618, 4635), 'numpy.sqrt', 'numpy.sqrt', (['sigb2'], {}), '(sigb2)\n', (4628, 4635), False, 'import numpy\n'), ((4744, 4762), 'matplotlib.pyplot.rc', 'rc', (['"""font"""'], {'size': '(9)'}), "('font', size=9)\n", (4746, 4762), False, 'from matplotlib.pyplot import figure, show, rc\n'), ((4763, 4787), 'matplotlib.pyplot.rc', 'rc', (['"""legend"""'], {'fontsize': '(8)'}), "('legend', fontsize=8)\n", (4765, 4787), False, 'from matplotlib.pyplot import figure, show, rc\n'), ((4794, 4803), 'matplotlib.pyplot.figure', 'figure', (['(1)'], {}), '(1)\n', (4800, 4803), False, 'from matplotlib.pyplot import figure, show, rc\n'), ((5424, 5430), 'matplotlib.pyplot.show', 'show', ([], {}), '()\n', (5428, 5430), False, 'from matplotlib.pyplot import figure, show, rc\n'), ((1053, 1104), 'numpy.sqrt', 'numpy.sqrt', (['(wx * (x - xd) ** 2 + wy * (y - yd) ** 2)'], {}), '(wx * (x - xd) ** 2 + wy * (y - yd) ** 2)\n', (1063, 1104), False, 'import numpy\n'), ((1205, 1224), 'numpy.random.normal', 'normal', (['(0.0)', '(1.5)', 'N'], {}), '(0.0, 1.5, N)\n', (1211, 1224), False, 'from numpy.random import normal\n'), ((740, 775), 'numpy.where', 'numpy.where', (['(w == 0.0)', '(0.0)', '(1.0 / w)'], {}), '(w == 0.0, 0.0, 1.0 / w)\n', (751, 775), False, 'import numpy\n')] |
import os
import KratosMultiphysics
from KratosMultiphysics import Logger
Logger.GetDefaultOutput().SetSeverity(Logger.Severity.WARNING)
import KratosMultiphysics.KratosUnittest as KratosUnittest
import KratosMultiphysics.DEMApplication.DEM_analysis_stage
import numpy as np
import auxiliary_functions_for_tests
this_working_dir_backup = os.getcwd()
def GetFilePath(fileName):
return os.path.join(os.path.dirname(os.path.realpath(__file__)), fileName)
class DEM3D_SearchToleranceMain(KratosMultiphysics.DEMApplication.DEM_analysis_stage.DEMAnalysisStage, KratosUnittest.TestCase):
def Initialize(self):
super().Initialize()
for node in self.spheres_model_part.Nodes:
self.initial_normal_vel = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY_Z)
@classmethod
def GetMainPath(self):
return os.path.join(os.path.dirname(os.path.realpath(__file__)), "test_search_tolerance")
def GetProblemNameWithPath(self):
return os.path.join(self.main_path, self.DEM_parameters["problem_name"].GetString())
def FinalizeSolutionStep(self):
super().FinalizeSolutionStep()
for node in self.spheres_model_part.Nodes:
#reference data with freq=1 searchtolerance=0.0
if node.Id == 2:
tol = 1.0e-15
if np.isclose(self.time, 0.02, rtol=0.0, atol=1e-06):
y_vel = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY_Y)
print(self.time, y_vel)
y_vel_ref = -5.86502139707038
self.assertAlmostEqual(y_vel, y_vel_ref, delta=tol)
if np.isclose(self.time, 0.115, rtol=0.0, atol=1e-06):
y_vel = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY_Y)
print(self.time, y_vel)
y_vel_ref = -3.3859516373258987
self.assertAlmostEqual(y_vel, y_vel_ref, delta=tol)
if np.isclose(self.time, 0.22, rtol=0.0, atol=1e-06):
y_vel = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY_Y)
print(self.time, y_vel)
y_vel_ref = -0.5929799879392164
self.assertAlmostEqual(y_vel, y_vel_ref, delta=tol)
def Finalize(self):
self.procedures.RemoveFoldersWithResults(str(self.main_path), str(self.problem_name), '')
super().Finalize()
class DEM3D_SearchTolerance1(DEM3D_SearchToleranceMain):
def FinalizeSolutionStep(self):
KratosMultiphysics.DEMApplication.DEM_analysis_stage.DEMAnalysisStage.FinalizeSolutionStep(self)
for node in self.spheres_model_part.Nodes:
if node.Id == 2:
tol = 1.0e-15
if np.isclose(self.time, 0.02, rtol=0.0, atol=1e-06):
y_vel = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY_Y)
print(self.time, y_vel)
y_vel_ref = -5.8654458179811835
self.assertAlmostEqual(y_vel, y_vel_ref, delta=tol)
if np.isclose(self.time, 0.115, rtol=0.0, atol=1e-06):
y_vel = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY_Y)
print(self.time, y_vel)
y_vel_ref = -3.3861319639727263
self.assertAlmostEqual(y_vel, y_vel_ref, delta=tol)
if np.isclose(self.time, 0.22, rtol=0.0, atol=1e-06):
y_vel = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY_Y)
print(self.time, y_vel)
y_vel_ref = -0.594495289987086
self.assertAlmostEqual(y_vel, y_vel_ref, delta=tol)
class DEM3D_SearchTolerance2(DEM3D_SearchToleranceMain):
def FinalizeSolutionStep(self):
KratosMultiphysics.DEMApplication.DEM_analysis_stage.DEMAnalysisStage.FinalizeSolutionStep(self)
for node in self.spheres_model_part.Nodes:
if node.Id == 2:
tol = 1.0e-15
if np.isclose(self.time, 0.02, rtol=0.0, atol=1e-06):
y_vel = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY_Y)
print(self.time, y_vel)
y_vel_ref = -5.865445816566027
self.assertAlmostEqual(y_vel, y_vel_ref, delta=tol)
if np.isclose(self.time, 0.115, rtol=0.0, atol=1e-06):
y_vel = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY_Y)
print(self.time, y_vel)
y_vel_ref = -3.386128017385994
self.assertAlmostEqual(y_vel, y_vel_ref, delta=tol)
if np.isclose(self.time, 0.22, rtol=0.0, atol=1e-06):
y_vel = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY_Y)
print(self.time, y_vel)
y_vel_ref = -0.5941551772701182
self.assertAlmostEqual(y_vel, y_vel_ref, delta=tol)
class DEM3D_SearchTolerance3(DEM3D_SearchToleranceMain):
def FinalizeSolutionStep(self):
KratosMultiphysics.DEMApplication.DEM_analysis_stage.DEMAnalysisStage.FinalizeSolutionStep(self)
for node in self.spheres_model_part.Nodes:
if node.Id == 2:
tol = 1.0e-15
if np.isclose(self.time, 0.02, rtol=0.0, atol=1e-06):
y_vel = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY_Y)
print(self.time, y_vel)
y_vel_ref = -5.86502139707038
self.assertAlmostEqual(y_vel, y_vel_ref, delta=tol)
if np.isclose(self.time, 0.115, rtol=0.0, atol=1e-06):
y_vel = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY_Y)
print(self.time, y_vel)
y_vel_ref = -3.3859516373258987
self.assertAlmostEqual(y_vel, y_vel_ref, delta=tol)
if np.isclose(self.time, 0.22, rtol=0.0, atol=1e-06):
y_vel = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY_Y)
print(self.time, y_vel)
y_vel_ref = -0.5929799879392164
self.assertAlmostEqual(y_vel, y_vel_ref, delta=tol)
class TestSearchTolerance(KratosUnittest.TestCase):
@classmethod
def test_SearchA(self):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "test_search_tolerance")
parameters_file_name = os.path.join(path, "ProjectParametersDEM.json")
with open(parameters_file_name,'r') as parameter_file:
project_parameters = KratosMultiphysics.Parameters(parameter_file.read())
project_parameters["SearchTolerance"].SetDouble(0.0)
project_parameters["search_tolerance_against_walls"].SetDouble(0.0)
project_parameters["NeighbourSearchFrequency"].SetInt(1)
model = KratosMultiphysics.Model()
auxiliary_functions_for_tests.CreateAndRunStageInSelectedNumberOfOpenMPThreads(DEM3D_SearchToleranceMain, model, project_parameters, auxiliary_functions_for_tests.GetHardcodedNumberOfThreads())
@classmethod
def test_SearchB(self):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "test_search_tolerance")
parameters_file_name = os.path.join(path, "ProjectParametersDEM.json")
with open(parameters_file_name,'r') as parameter_file:
project_parameters = KratosMultiphysics.Parameters(parameter_file.read())
project_parameters["SearchTolerance"].SetDouble(0.0)
project_parameters["search_tolerance_against_walls"].SetDouble(0.0)
project_parameters["NeighbourSearchFrequency"].SetInt(10)
model = KratosMultiphysics.Model()
auxiliary_functions_for_tests.CreateAndRunStageInSelectedNumberOfOpenMPThreads(DEM3D_SearchTolerance1, model, project_parameters, auxiliary_functions_for_tests.GetHardcodedNumberOfThreads())
@classmethod
def test_SearchC(self):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "test_search_tolerance")
parameters_file_name = os.path.join(path, "ProjectParametersDEM.json")
with open(parameters_file_name,'r') as parameter_file:
project_parameters = KratosMultiphysics.Parameters(parameter_file.read())
project_parameters["SearchTolerance"].SetDouble(1e-04)
project_parameters["search_tolerance_against_walls"].SetDouble(1e-04)
project_parameters["NeighbourSearchFrequency"].SetInt(20)
model = KratosMultiphysics.Model()
auxiliary_functions_for_tests.CreateAndRunStageInSelectedNumberOfOpenMPThreads(DEM3D_SearchTolerance2, model, project_parameters, auxiliary_functions_for_tests.GetHardcodedNumberOfThreads())
@classmethod
def test_SearchD(self):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "test_search_tolerance")
parameters_file_name = os.path.join(path, "ProjectParametersDEM.json")
with open(parameters_file_name,'r') as parameter_file:
project_parameters = KratosMultiphysics.Parameters(parameter_file.read())
project_parameters["SearchTolerance"].SetDouble(1e-03)
project_parameters["search_tolerance_against_walls"].SetDouble(1e-03)
project_parameters["NeighbourSearchFrequency"].SetInt(20)
model = KratosMultiphysics.Model()
auxiliary_functions_for_tests.CreateAndRunStageInSelectedNumberOfOpenMPThreads(DEM3D_SearchTolerance3, model, project_parameters, auxiliary_functions_for_tests.GetHardcodedNumberOfThreads())
if __name__ == "__main__":
Logger.GetDefaultOutput().SetSeverity(Logger.Severity.WARNING)
KratosUnittest.main()
| [
"numpy.isclose",
"KratosMultiphysics.DEMApplication.DEM_analysis_stage.DEMAnalysisStage.FinalizeSolutionStep",
"KratosMultiphysics.KratosUnittest.main",
"os.path.join",
"os.getcwd",
"os.path.realpath",
"KratosMultiphysics.Logger.GetDefaultOutput",
"KratosMultiphysics.Model",
"auxiliary_functions_for... | [((340, 351), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (349, 351), False, 'import os\n'), ((9710, 9731), 'KratosMultiphysics.KratosUnittest.main', 'KratosUnittest.main', ([], {}), '()\n', (9729, 9731), True, 'import KratosMultiphysics.KratosUnittest as KratosUnittest\n'), ((74, 99), 'KratosMultiphysics.Logger.GetDefaultOutput', 'Logger.GetDefaultOutput', ([], {}), '()\n', (97, 99), False, 'from KratosMultiphysics import Logger\n'), ((2537, 2638), 'KratosMultiphysics.DEMApplication.DEM_analysis_stage.DEMAnalysisStage.FinalizeSolutionStep', 'KratosMultiphysics.DEMApplication.DEM_analysis_stage.DEMAnalysisStage.FinalizeSolutionStep', (['self'], {}), '(\n self)\n', (2627, 2638), False, 'import KratosMultiphysics\n'), ((3818, 3919), 'KratosMultiphysics.DEMApplication.DEM_analysis_stage.DEMAnalysisStage.FinalizeSolutionStep', 'KratosMultiphysics.DEMApplication.DEM_analysis_stage.DEMAnalysisStage.FinalizeSolutionStep', (['self'], {}), '(\n self)\n', (3908, 3919), False, 'import KratosMultiphysics\n'), ((5098, 5199), 'KratosMultiphysics.DEMApplication.DEM_analysis_stage.DEMAnalysisStage.FinalizeSolutionStep', 'KratosMultiphysics.DEMApplication.DEM_analysis_stage.DEMAnalysisStage.FinalizeSolutionStep', (['self'], {}), '(\n self)\n', (5188, 5199), False, 'import KratosMultiphysics\n'), ((6503, 6550), 'os.path.join', 'os.path.join', (['path', '"""ProjectParametersDEM.json"""'], {}), "(path, 'ProjectParametersDEM.json')\n", (6515, 6550), False, 'import os\n'), ((6919, 6945), 'KratosMultiphysics.Model', 'KratosMultiphysics.Model', ([], {}), '()\n', (6943, 6945), False, 'import KratosMultiphysics\n'), ((7323, 7370), 'os.path.join', 'os.path.join', (['path', '"""ProjectParametersDEM.json"""'], {}), "(path, 'ProjectParametersDEM.json')\n", (7335, 7370), False, 'import os\n'), ((7740, 7766), 'KratosMultiphysics.Model', 'KratosMultiphysics.Model', ([], {}), '()\n', (7764, 7766), False, 'import KratosMultiphysics\n'), ((8141, 8188), 'os.path.join', 'os.path.join', (['path', '"""ProjectParametersDEM.json"""'], {}), "(path, 'ProjectParametersDEM.json')\n", (8153, 8188), False, 'import os\n'), ((8562, 8588), 'KratosMultiphysics.Model', 'KratosMultiphysics.Model', ([], {}), '()\n', (8586, 8588), False, 'import KratosMultiphysics\n'), ((8963, 9010), 'os.path.join', 'os.path.join', (['path', '"""ProjectParametersDEM.json"""'], {}), "(path, 'ProjectParametersDEM.json')\n", (8975, 9010), False, 'import os\n'), ((9384, 9410), 'KratosMultiphysics.Model', 'KratosMultiphysics.Model', ([], {}), '()\n', (9408, 9410), False, 'import KratosMultiphysics\n'), ((420, 446), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (436, 446), False, 'import os\n'), ((7087, 7146), 'auxiliary_functions_for_tests.GetHardcodedNumberOfThreads', 'auxiliary_functions_for_tests.GetHardcodedNumberOfThreads', ([], {}), '()\n', (7144, 7146), False, 'import auxiliary_functions_for_tests\n'), ((7905, 7964), 'auxiliary_functions_for_tests.GetHardcodedNumberOfThreads', 'auxiliary_functions_for_tests.GetHardcodedNumberOfThreads', ([], {}), '()\n', (7962, 7964), False, 'import auxiliary_functions_for_tests\n'), ((8727, 8786), 'auxiliary_functions_for_tests.GetHardcodedNumberOfThreads', 'auxiliary_functions_for_tests.GetHardcodedNumberOfThreads', ([], {}), '()\n', (8784, 8786), False, 'import auxiliary_functions_for_tests\n'), ((9549, 9608), 'auxiliary_functions_for_tests.GetHardcodedNumberOfThreads', 'auxiliary_functions_for_tests.GetHardcodedNumberOfThreads', ([], {}), '()\n', (9606, 9608), False, 'import auxiliary_functions_for_tests\n'), ((9643, 9668), 'KratosMultiphysics.Logger.GetDefaultOutput', 'Logger.GetDefaultOutput', ([], {}), '()\n', (9666, 9668), False, 'from KratosMultiphysics import Logger\n'), ((881, 907), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (897, 907), False, 'import os\n'), ((1332, 1381), 'numpy.isclose', 'np.isclose', (['self.time', '(0.02)'], {'rtol': '(0.0)', 'atol': '(1e-06)'}), '(self.time, 0.02, rtol=0.0, atol=1e-06)\n', (1342, 1381), True, 'import numpy as np\n'), ((1654, 1704), 'numpy.isclose', 'np.isclose', (['self.time', '(0.115)'], {'rtol': '(0.0)', 'atol': '(1e-06)'}), '(self.time, 0.115, rtol=0.0, atol=1e-06)\n', (1664, 1704), True, 'import numpy as np\n'), ((1979, 2028), 'numpy.isclose', 'np.isclose', (['self.time', '(0.22)'], {'rtol': '(0.0)', 'atol': '(1e-06)'}), '(self.time, 0.22, rtol=0.0, atol=1e-06)\n', (1989, 2028), True, 'import numpy as np\n'), ((2763, 2812), 'numpy.isclose', 'np.isclose', (['self.time', '(0.02)'], {'rtol': '(0.0)', 'atol': '(1e-06)'}), '(self.time, 0.02, rtol=0.0, atol=1e-06)\n', (2773, 2812), True, 'import numpy as np\n'), ((3087, 3137), 'numpy.isclose', 'np.isclose', (['self.time', '(0.115)'], {'rtol': '(0.0)', 'atol': '(1e-06)'}), '(self.time, 0.115, rtol=0.0, atol=1e-06)\n', (3097, 3137), True, 'import numpy as np\n'), ((3412, 3461), 'numpy.isclose', 'np.isclose', (['self.time', '(0.22)'], {'rtol': '(0.0)', 'atol': '(1e-06)'}), '(self.time, 0.22, rtol=0.0, atol=1e-06)\n', (3422, 3461), True, 'import numpy as np\n'), ((4044, 4093), 'numpy.isclose', 'np.isclose', (['self.time', '(0.02)'], {'rtol': '(0.0)', 'atol': '(1e-06)'}), '(self.time, 0.02, rtol=0.0, atol=1e-06)\n', (4054, 4093), True, 'import numpy as np\n'), ((4367, 4417), 'numpy.isclose', 'np.isclose', (['self.time', '(0.115)'], {'rtol': '(0.0)', 'atol': '(1e-06)'}), '(self.time, 0.115, rtol=0.0, atol=1e-06)\n', (4377, 4417), True, 'import numpy as np\n'), ((4691, 4740), 'numpy.isclose', 'np.isclose', (['self.time', '(0.22)'], {'rtol': '(0.0)', 'atol': '(1e-06)'}), '(self.time, 0.22, rtol=0.0, atol=1e-06)\n', (4701, 4740), True, 'import numpy as np\n'), ((5324, 5373), 'numpy.isclose', 'np.isclose', (['self.time', '(0.02)'], {'rtol': '(0.0)', 'atol': '(1e-06)'}), '(self.time, 0.02, rtol=0.0, atol=1e-06)\n', (5334, 5373), True, 'import numpy as np\n'), ((5646, 5696), 'numpy.isclose', 'np.isclose', (['self.time', '(0.115)'], {'rtol': '(0.0)', 'atol': '(1e-06)'}), '(self.time, 0.115, rtol=0.0, atol=1e-06)\n', (5656, 5696), True, 'import numpy as np\n'), ((5971, 6020), 'numpy.isclose', 'np.isclose', (['self.time', '(0.22)'], {'rtol': '(0.0)', 'atol': '(1e-06)'}), '(self.time, 0.22, rtol=0.0, atol=1e-06)\n', (5981, 6020), True, 'import numpy as np\n'), ((6418, 6444), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (6434, 6444), False, 'import os\n'), ((7238, 7264), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (7254, 7264), False, 'import os\n'), ((8056, 8082), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (8072, 8082), False, 'import os\n'), ((8878, 8904), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (8894, 8904), False, 'import os\n')] |
# -*- coding: utf-8 -*-
import pywph as pw
import pywph_vanilla as pw2
import numpy as np
import matplotlib.pyplot as plt
M, N = 256, 256
J = 6
L = 4
dn = 0
data_ini = np.load('data/I_1.npy')
data = data_ini[:256,:256]
datab = data_ini[256:,256:]
""" Without normalization """
# Version dev
wph_op = pw.WPHOp(M, N, J, L=L, dn=dn, device='cpu')
stats = wph_op(data, padding = True).cpu().numpy()
print(stats.shape, stats)
# Version vanilla
wph_op2 = pw2.WPHOp(M, N, J, L=L, dn=dn, device='cpu')
stats2 = wph_op2(data, padding = True).cpu().numpy()
print(stats2.shape, stats2)
# Comparison
diff = (stats2-stats)
""" With normalization """
# Version dev
wph_op = pw.WPHOp(M, N, J, L=L, dn=dn, device='cpu')
stats = wph_op(data, padding = True, norm = 'auto').cpu().numpy()
print(stats.shape, stats)
# Version Vanilla
wph_op2 = pw2.WPHOp(M, N, J, L=L, dn=dn, device='cpu')
stats2 = wph_op2(data, padding = True, norm = 'auto').cpu().numpy()
print(stats2.shape, stats2)
plt.figure()
plt.plot(np.real(stats))
plt.plot(np.real(stats2))
""" Deuxième passage avec normalisation """
# Version dev
statsb = wph_op(datab, padding = True, norm = 'auto').cpu().numpy()
print(statsb.shape, statsb)
# Version Vanilla
statsb2 = wph_op2(datab, padding = True, norm = 'auto').cpu().numpy()
print(statsb2.shape, statsb2)
plt.figure()
plt.plot(np.real(statsb))
plt.plot(np.real(statsb2))
""" With normalization 2d map """
# Version dev
wph_op = pw.WPHOp(M, N, J, L=L, dn=dn, device='cpu')
stats = wph_op(datab, padding = True, norm = 'auto').cpu().numpy()
print(stats.shape, stats)
# Version Vanilla
wph_op2 = pw2.WPHOp(M, N, J, L=L, dn=dn, device='cpu')
stats2 = wph_op2(datab, padding = True, norm = 'auto').cpu().numpy()
print(stats2.shape, stats2)
| [
"pywph.WPHOp",
"numpy.real",
"matplotlib.pyplot.figure",
"numpy.load",
"pywph_vanilla.WPHOp"
] | [((171, 194), 'numpy.load', 'np.load', (['"""data/I_1.npy"""'], {}), "('data/I_1.npy')\n", (178, 194), True, 'import numpy as np\n'), ((306, 349), 'pywph.WPHOp', 'pw.WPHOp', (['M', 'N', 'J'], {'L': 'L', 'dn': 'dn', 'device': '"""cpu"""'}), "(M, N, J, L=L, dn=dn, device='cpu')\n", (314, 349), True, 'import pywph as pw\n'), ((457, 501), 'pywph_vanilla.WPHOp', 'pw2.WPHOp', (['M', 'N', 'J'], {'L': 'L', 'dn': 'dn', 'device': '"""cpu"""'}), "(M, N, J, L=L, dn=dn, device='cpu')\n", (466, 501), True, 'import pywph_vanilla as pw2\n'), ((671, 714), 'pywph.WPHOp', 'pw.WPHOp', (['M', 'N', 'J'], {'L': 'L', 'dn': 'dn', 'device': '"""cpu"""'}), "(M, N, J, L=L, dn=dn, device='cpu')\n", (679, 714), True, 'import pywph as pw\n'), ((836, 880), 'pywph_vanilla.WPHOp', 'pw2.WPHOp', (['M', 'N', 'J'], {'L': 'L', 'dn': 'dn', 'device': '"""cpu"""'}), "(M, N, J, L=L, dn=dn, device='cpu')\n", (845, 880), True, 'import pywph_vanilla as pw2\n'), ((978, 990), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (988, 990), True, 'import matplotlib.pyplot as plt\n'), ((1318, 1330), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1328, 1330), True, 'import matplotlib.pyplot as plt\n'), ((1443, 1486), 'pywph.WPHOp', 'pw.WPHOp', (['M', 'N', 'J'], {'L': 'L', 'dn': 'dn', 'device': '"""cpu"""'}), "(M, N, J, L=L, dn=dn, device='cpu')\n", (1451, 1486), True, 'import pywph as pw\n'), ((1609, 1653), 'pywph_vanilla.WPHOp', 'pw2.WPHOp', (['M', 'N', 'J'], {'L': 'L', 'dn': 'dn', 'device': '"""cpu"""'}), "(M, N, J, L=L, dn=dn, device='cpu')\n", (1618, 1653), True, 'import pywph_vanilla as pw2\n'), ((1000, 1014), 'numpy.real', 'np.real', (['stats'], {}), '(stats)\n', (1007, 1014), True, 'import numpy as np\n'), ((1025, 1040), 'numpy.real', 'np.real', (['stats2'], {}), '(stats2)\n', (1032, 1040), True, 'import numpy as np\n'), ((1340, 1355), 'numpy.real', 'np.real', (['statsb'], {}), '(statsb)\n', (1347, 1355), True, 'import numpy as np\n'), ((1366, 1382), 'numpy.real', 'np.real', (['statsb2'], {}), '(statsb2)\n', (1373, 1382), True, 'import numpy as np\n')] |
# Copyright [2018-2020] <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ixian.task import Task
from ixian.config import CONFIG
from ixian_docker.modules.docker.tasks import run, Compose
class Manage(Task):
"""
Shortcut to Django's manage.py script.
This shortcut gives access to manage.py within the context of the app
container. Volumes and environment variables for loaded modules are loaded
automatically via docker-compose.
The script is run by calling `{PYTHON.BIN} manage.py`. Arguments are passed
through to the script.
Type `ix manage --help` for it's built-in help.
"""
name = "manage"
category = "django"
short_description = "Django manage.py script."
depends = ["compose_runtime"]
def execute(self, *args):
manage(*args)
MANAGE_CMD = "{PYTHON.BIN} manage.py"
def manage(*args):
"""Shim around `manage.py`"""
return run(MANAGE_CMD, *args)
class Shell(Task):
"""
Shortcut to Django python shell.
This shortcut runs within the context of the app container. Volumes and
environment variables for loaded modules are loaded automatically via
docker-compose.
"""
name = "shell"
category = "django"
short_description = "open django python shell"
depends = ["compose_runtime"]
def execute(self, *args):
return manage("shell_plus", *args)
class ShellPlus(Task):
"""
Shortcut to Django extensions shell_plus.
This shortcut runs within the context of the app container. Volumes and
environment variables for loaded modules are loaded automatically via
docker-compose.
"""
name = "shell_plus"
category = "django"
short_description = "open django shell_plus"
depends = ["compose_runtime"]
def execute(self, *args):
return manage("shell_plus", *args)
class DjangoTest(Task):
"""
Shortcut to Django test runner
This shortcut runs within the context of the app container. Volumes and
environment variables for loaded modules are loaded automatically via
docker-compose.
The command automatically sets these settings:
--settings={DJANGO.SETTINGS_TEST}
--exclude-dir={DJANGO.SETTINGS_MODULE}
Arguments are passed through to the command.
"""
name = "django_test"
category = "testing"
parent = ["test", "test_py"]
depends = ["compose_runtime"]
short_description = "django test runner"
def execute(self, *args):
required_args = [
"--settings={DJANGO.SETTINGS_TEST}",
"--exclude-dir={DJANGO.SETTINGS_MODULE}",
]
args = args or [CONFIG.PYTHON.ROOT_MODULE]
return manage("test", *required_args, *args)
class Migrate(Task):
"""
Run django migrations.
"""
name = "migrate"
category = "django"
short_description = "run database migrations"
depends = ["compose_runtime"]
def execute(self, *args):
return manage("migrate", *args)
class MakeMigrations(Task):
"""
Generate missing django migrations. This is a shortcut to
`manage.py makemigrations`.
By default this will generate migrations only for {CONFIG.PROJECT_NAME}.
This is overridden whenever args are passed to this task.
"""
name = "makemigrations"
category = "django"
short_description = "generate missing database migrations"
depends = ["compose_runtime"]
def execute(self, *args):
return manage("makemigrations", *args)
class DBShell(Task):
"""
Shortcut to `manage.py dbshell`
"""
name = "dbshell"
category = "django"
short_description = "open a database shell"
depends = ["compose_runtime"]
def execute(self, *args):
return manage("dbshell", *args)
class Runserver(Task):
"""
Shortcut to `manage.py runserver 0.0.0.0:8000`
This command maps port 8000:8000 so the server is accessible outside the
container. Additional args are passed through to the command but the IP and
port can not be changed.
"""
name = "runserver"
category = "django"
short_description = "start django test server"
depends = ["compose_runtime"]
def execute(self, *args):
return run(MANAGE_CMD, "--service-ports", "runserver", *(args or ["0.0.0.0:8000"]),)
| [
"ixian_docker.modules.docker.tasks.run"
] | [((1420, 1442), 'ixian_docker.modules.docker.tasks.run', 'run', (['MANAGE_CMD', '*args'], {}), '(MANAGE_CMD, *args)\n', (1423, 1442), False, 'from ixian_docker.modules.docker.tasks import run, Compose\n'), ((4736, 4812), 'ixian_docker.modules.docker.tasks.run', 'run', (['MANAGE_CMD', '"""--service-ports"""', '"""runserver"""', "*(args or ['0.0.0.0:8000'])"], {}), "(MANAGE_CMD, '--service-ports', 'runserver', *(args or ['0.0.0.0:8000']))\n", (4739, 4812), False, 'from ixian_docker.modules.docker.tasks import run, Compose\n')] |
'''
Utility methods for running jobs, changing parameters, and sending multiple
jobs at the same time to the dgx server.
'''
__author__ = "<NAME>"
__email__ = "<EMAIL>"
import os
import numpy as np
import sys
import subprocess
JSON_TEMPLATE1 = \
'{\
"jobDefinition": {\
"name": "%s",\
"description": "Processing i3d",\
"clusterId": %d,\
"dockerImage": "nvidian_general/iva:cfosco-action_recognition",\
"jobType": "BATCH",\
"command": "%s",\
"resources": {\
"gpus": %d,\
"systemMemory": %d,\
"cpuCores": %d\
},\
"jobDataLocations": [\
{\
"mountPoint": "/home/cfosco/nfs_share",\
"protocol": "NFSV3",\
"sharePath": "/export/iva_action_recognition.cosmos192",\
"shareHost": "dcg-zfs-04.nvidia.com"\
}\
],\
"portMappings": []\
}\
}'
COMMAND_TEMPLATE1 = \
"cd nfs_share/action_recognition/camilo_workspace/src;\
python3 keras_eval.py \
-eval_ckpt %s \
--dataset %s --model_type %s --split %d --verbose %d --num_classes %d \
--batch_size %d --gpus %d --workers %d --use_mp %d --do_per_class %d --type %s %s "
def evaluate_all_ckpts(ckpt_folder, ckpt_loc, req_str='HMDB51', model_type = 'i3d', split=1, dataset ='HMDB51', num_classes=51, batch_size=5,
workers=10, use_mp=0, verbose=1, min_epoch=0, cpus=12, gpus=4, ram=64, cluster_id=425, do_per_class=0, extra_cmd = '',
classes_to_process=None):
if classes_to_process:
extra_cmd += ' --classes_to_process %s ' % classes_to_process
ckpts = [f for f in os.listdir(ckpt_folder) if '.hdf5' in f]
names=[]
for ck in ckpts:
ep = int(ck.split('_ep')[1].split('_')[0])
if ep >= min_epoch:
if req_str in ck:
if 'flow' in ck:
type = 'flow'
elif 'rgb' in ck:
type = 'rgb'
print('Checkpoint',ck,'fulfills requirements. Starting eval job...')
cmd = create_command(os.path.join(ckpt_loc, ck), dataset=dataset, model_type=model_type,
split=split, verbose=verbose, batch_size=batch_size, num_classes=num_classes,
gpus=gpus, workers=workers, use_mp=use_mp, type=type, extra_cmd=extra_cmd, do_per_class=do_per_class)
name = 'ckpt_'+req_str+'_ep'+str(ep)+'_eval'
json_name = save_json(JSON_TEMPLATE1, cmd, name, cpus=cpus,
gpus=gpus, ram=ram, cluster_id=cluster_id)
out=subprocess.check_output(['dgx','job','submit','-f',json_name])
for l in out.split('\n'):
if 'Id:' in l:
print(l)
break
names.append(json_name)
delete_tmp_jsons(names)
def create_command(ckpt, dataset='HMDB51', model_type='i3d', num_classes=51, split=1, verbose=1, batch_size=5, gpus=4, workers=10, use_mp=0, type='rgb', do_per_class=0, extra_cmd=''):
return COMMAND_TEMPLATE1 % (ckpt, dataset, model_type, split, verbose, num_classes, batch_size, gpus, workers, use_mp, do_per_class, type, extra_cmd)
def delete_tmp_jsons(names):
for n in names:
try:
os.remove(n)
except Exception as err:
print(err)
print('Moving on')
def save_json(template, cmd, name='ckpt_eval', cpus=12,gpus=4,ram=64, cluster_id=425):
json_str = template % (name, cluster_id, cmd, gpus, ram, cpus)
json_name = "%s_gpus%d_ram%d_cpus%d" % (name, gpus, ram, cpus)
with open (json_name, 'w+') as f:
f.write(json_str)
return json_name
# def train_multiple(init_ckpt, param_dict_to_test):
# for k,v in param_dict_to_test:
# for param_value in v:
# json_name = save_json(ck, TEMPLATE1, )
# subprocess.call(['dgx','job','submit','-f',json_name])
| [
"subprocess.check_output",
"os.listdir",
"os.path.join",
"os.remove"
] | [((1688, 1711), 'os.listdir', 'os.listdir', (['ckpt_folder'], {}), '(ckpt_folder)\n', (1698, 1711), False, 'import os\n'), ((3373, 3385), 'os.remove', 'os.remove', (['n'], {}), '(n)\n', (3382, 3385), False, 'import os\n'), ((2688, 2754), 'subprocess.check_output', 'subprocess.check_output', (["['dgx', 'job', 'submit', '-f', json_name]"], {}), "(['dgx', 'job', 'submit', '-f', json_name])\n", (2711, 2754), False, 'import subprocess\n'), ((2128, 2154), 'os.path.join', 'os.path.join', (['ckpt_loc', 'ck'], {}), '(ckpt_loc, ck)\n', (2140, 2154), False, 'import os\n')] |
# import RPi.GPIO as GPIO
import datetime
class Servo:
def __init__(self, on_time=datetime.time(0,0,0)): #pin_mode = GPIO.BOARD):
self.on_time = on_time
self.on_hour = on_time.hour
self.on_minute = on_time.minute
self.is_running = False
def view_config(self):
print(f'on_time: {self.on_time}')
print(f'on_hour: {self.on_hour}')
print(f'on_minute: {self.on_minute}')
print(f'is_running: {self.is_running}')
def set_is_running(self, status):
self.is_running = status
def should_run_servo(self, current_time):
current_hour = current_time.hour
current_minute = current_time.minute
return current_hour == self.on_hour and current_minute == self.on_minute and not self.is_running
def should_stop_servo(self, current_time):
current_hour = current_time.hour
current_minute = current_time.minute
return current_minute > self.on_minute and self.is_running | [
"datetime.time"
] | [((85, 107), 'datetime.time', 'datetime.time', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (98, 107), False, 'import datetime\n')] |
# -*- coding: UTF-8 -*-
from urllib.parse import urlparse
from flask import Flask, request, abort
app = Flask(__name__)
BASE = "tellement.sexy"
VERBS = {
"est": "est",
"sont": "sont",
"is": "is",
"are": "are",
"cest": "c'est",
"ete": "été",
"etre": "être",
"etait": "était",
"etais": "étais",
}
ALIASES = {
"css": "CSS",
"html": "HTML",
"javascript": "JavaScript",
"lisp": "LISP",
"php": "PHP",
}
def pretty_word(word):
if word in ALIASES:
return ALIASES[word]
return word.capitalize()
@app.route("/")
def root():
host = urlparse(request.base_url).netloc.split(":", 1)[0]
for verb, text in VERBS.items():
idx = host.rfind(".%s.%s" % (verb, BASE))
if idx < 0:
continue
parts = host[:idx].split(".")
name = " ".join([pretty_word(w) for w in parts])
return "%s %s tellement sexy ;)" % (name, text)
abort(404)
if __name__ == "__main__":
app.run()
| [
"flask.abort",
"urllib.parse.urlparse",
"flask.Flask"
] | [((106, 121), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (111, 121), False, 'from flask import Flask, request, abort\n'), ((942, 952), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (947, 952), False, 'from flask import Flask, request, abort\n'), ((604, 630), 'urllib.parse.urlparse', 'urlparse', (['request.base_url'], {}), '(request.base_url)\n', (612, 630), False, 'from urllib.parse import urlparse\n')] |
from flexinfer.misc import build_from_cfg, registry
def build_converter(cfg):
return build_from_cfg(cfg, registry, 'converter')
| [
"flexinfer.misc.build_from_cfg"
] | [((91, 133), 'flexinfer.misc.build_from_cfg', 'build_from_cfg', (['cfg', 'registry', '"""converter"""'], {}), "(cfg, registry, 'converter')\n", (105, 133), False, 'from flexinfer.misc import build_from_cfg, registry\n')] |
from anydoi2cff.main import norm_doi
def test_simple_doi():
doi = 'http://doi.org/10.5334/jors.161'
assert norm_doi(doi) == '10.5334/jors.161'
assert norm_doi(norm_doi(doi)) == '10.5334/jors.161'
| [
"anydoi2cff.main.norm_doi"
] | [((117, 130), 'anydoi2cff.main.norm_doi', 'norm_doi', (['doi'], {}), '(doi)\n', (125, 130), False, 'from anydoi2cff.main import norm_doi\n'), ((173, 186), 'anydoi2cff.main.norm_doi', 'norm_doi', (['doi'], {}), '(doi)\n', (181, 186), False, 'from anydoi2cff.main import norm_doi\n')] |
#c.execute("CREATE TABLE aud(RollNO text, date integer, starttime integer,endtime integer)")
def check(roll,date,starttime,endtime):
import sqlite3
message=""
conn=sqlite3.connect('aud.db')
c=conn.cursor()
tup=tuple([roll,date,starttime,endtime])
audopen=9
audclose=24
if int(starttime)<int(audopen) or int(endtime)<int(audopen) or int(starttime)>int(audclose) or int(endtime)>int(audclose):
message= "enter a valid time frame, Auditorium functions from 9AM to 12AM"
elif int(starttime)>int(endtime):
message= "start time should be greater than endtime, Please try again"
else:
c.execute("SELECT * FROM aud")
items=c.fetchall()
booked=[]
for i in items:
if i[1]==date:
for j in range(i[2],i[3]):
booked.append(int(j))
t=0
for i in range(int(starttime),int(endtime)):
if i in booked:
t=1
break
if t==1:
message= "That slot is already booked please try another one"
else:
c.execute("INSERT INTO aud VALUES(?,?,?,?)",tup)
message="You have booked the slot sucessfully"
conn.commit()
conn.close()
return message
| [
"sqlite3.connect"
] | [((178, 203), 'sqlite3.connect', 'sqlite3.connect', (['"""aud.db"""'], {}), "('aud.db')\n", (193, 203), False, 'import sqlite3\n')] |
from django.test import TestCase
from graphql_extensions import types
class CamelJSONTypeTests(TestCase):
def test_camel_json(self):
json_type = types.CamelJSON()
self.assertIn('aB', json_type.serialize({'a_b': None}).keys())
| [
"graphql_extensions.types.CamelJSON"
] | [((161, 178), 'graphql_extensions.types.CamelJSON', 'types.CamelJSON', ([], {}), '()\n', (176, 178), False, 'from graphql_extensions import types\n')] |
from pkg_resources import resource_string
import json
proto_info = None
try:
proto_json = resource_string(__name__, 'proto.json')
proto_info = json.loads(proto_json)
except Exception:
pass
| [
"json.loads",
"pkg_resources.resource_string"
] | [((102, 141), 'pkg_resources.resource_string', 'resource_string', (['__name__', '"""proto.json"""'], {}), "(__name__, 'proto.json')\n", (117, 141), False, 'from pkg_resources import resource_string\n'), ((160, 182), 'json.loads', 'json.loads', (['proto_json'], {}), '(proto_json)\n', (170, 182), False, 'import json\n')] |
import numpy as np
import scipy
import cv2
def get_pixel_neighbors(height, width):
"""
Estimate the 4 neighbors of every pixel in an image
:param height: image height
:param width: image width
:return: pixel index - neighbor index lists
"""
pix_id = []
neighbor_id = []
for i in range(height):
for j in range(width):
n = []
if i == 0:
n = n + [(i + 1) * width + j]
elif i == height - 1:
n = n + [(i - 1) * width + j]
else:
n = n + [(i + 1) * width + j, (i - 1) * width + j]
if j == 0:
n = n + [i * width + j + 1]
elif j == width - 1:
n = n + [i * width + j - 1]
else:
n = n + [i * width + j + 1, i * width + j - 1]
for k in n:
pix_id.append(i*width+j)
neighbor_id.append(k)
return pix_id, neighbor_id
limps = np.array(
[[0, 1], [1, 2], [2, 3], [3, 4], [1, 5], [5, 6], [6, 7], [1, 11], [11, 12], [12, 13], [1, 8],
[8, 9], [9, 10], [14, 15], [16, 17], [0, 14], [0, 15], [14, 16], [15, 17]])
def get_instance_skeleton_buffer(h, w, poses):
output = np.zeros((h, w, 3), dtype=np.float32) - 1
for i in range(len(poses)):
keypoints = poses[i]
lbl = i
for k in range(limps.shape[0]):
kp1, kp2 = limps[k, :].astype(int)
bone_start = keypoints[kp1, :]
bone_end = keypoints[kp2, :]
bone_start[0] = np.maximum(np.minimum(bone_start[0], w - 1), 0.)
bone_start[1] = np.maximum(np.minimum(bone_start[1], h - 1), 0.)
bone_end[0] = np.maximum(np.minimum(bone_end[0], w - 1), 0.)
bone_end[1] = np.maximum(np.minimum(bone_end[1], h - 1), 0.)
if bone_start[2] > 0.0:
output[int(bone_start[1]), int(bone_start[0])] = 1
cv2.circle(output, (int(bone_start[0]), int(bone_start[1])), 2, (lbl, 0, 0), -1)
if bone_end[2] > 0.0:
output[int(bone_end[1]), int(bone_end[0])] = 1
cv2.circle(output, (int(bone_end[0]), int(bone_end[1])), 2, (lbl, 0, 0), -1)
if bone_start[2] > 0.0 and bone_end[2] > 0.0:
cv2.line(output, (int(bone_start[0]), int(bone_start[1])), (int(bone_end[0]), int(bone_end[1])), (lbl, 0, 0), 1)
return output[:, :, 0]
def get_poseimg_for_opt(sel_pose, poseimg, init_mask, n_bg=50):
h, w = init_mask.shape[:2]
bg_label = 1
output = np.zeros((h, w, 3), dtype=np.float32) - 1
II, JJ = (poseimg > 0).nonzero()
Isel, J_sel = (poseimg == sel_pose).nonzero()
output[II, JJ] = 0
output[Isel, J_sel] = 2
init_mask[Isel, J_sel] = 1
# Sample also from points in the field
init_mask = cv2.dilate(init_mask, np.ones((25, 25), np.uint8), iterations=1)
I_bg, J_bg = (init_mask == 0).nonzero()
rand_index = np.random.permutation(len(I_bg))[:n_bg]
bg_points = np.array([J_bg[rand_index], I_bg[rand_index]]).T
for k in range(bg_points.shape[0]):
cv2.circle(output, (int(bg_points[k, 0]), int(bg_points[k, 1])), 2, (bg_label, 0, 0), -1)
return output[:, :, 0]
def draw_poses_for_optimization(sel_pose, keypoints_list, init_mask, n_bg=50):
h, w = init_mask.shape[:2]
bg_label = 0
output = np.zeros((h, w, 3), dtype=np.float32)-1
for i in range(len(keypoints_list)):
keypoints = keypoints_list[i]
if i == sel_pose:
lbl = 2
else:
lbl = 1
for k in range(limps.shape[0]):
kp1, kp2 = limps[k, :].astype(int)
bone_start = keypoints[kp1, :]
bone_end = keypoints[kp2, :]
bone_start[0] = np.maximum(np.minimum(bone_start[0], w - 1), 0.)
bone_start[1] = np.maximum(np.minimum(bone_start[1], h - 1), 0.)
bone_end[0] = np.maximum(np.minimum(bone_end[0], w - 1), 0.)
bone_end[1] = np.maximum(np.minimum(bone_end[1], h - 1), 0.)
if bone_start[2] > 0.0:
output[int(bone_start[1]), int(bone_start[0])] = 1
cv2.circle(output, (int(bone_start[0]), int(bone_start[1])), 2, (lbl, 0, 0), -1)
if bone_end[2] > 0.0:
output[int(bone_end[1]), int(bone_end[0])] = 1
cv2.circle(output, (int(bone_end[0]), int(bone_end[1])), 2, (lbl, 0, 0), -1)
if bone_start[2] > 0.0 and bone_end[2] > 0.0:
cv2.line(output, (int(bone_start[0]), int(bone_start[1])), (int(bone_end[0]), int(bone_end[1])), (lbl, 0, 0), 1)
# Draw circles for the bg players keypoints
# for k in range(bg_keypoints.shape[0]):
# cv2.circle(output, (int(bg_keypoints[k, 0]), int(bg_keypoints[k, 1])), 2, (bg_keypoint_lable, 0, 0), -1)
# Sample also from points in the field
init_mask = cv2.dilate(init_mask, np.ones((5, 5), np.uint8), iterations=1)
I_bg, J_bg = (init_mask == 0).nonzero()
rand_index = np.random.permutation(len(I_bg))[:n_bg]
bg_points = np.array([J_bg[rand_index], I_bg[rand_index]]).T
for k in range(bg_points.shape[0]):
cv2.circle(output, (int(bg_points[k, 0]), int(bg_points[k, 1])), 2, (bg_label, 0, 0), -1)
return output[:, :, 0]
def set_U(strokes, h, w, dim):
N = h*w
y = np.zeros((N, dim))
U = scipy.sparse.lil_matrix((N, N))
for p in range(strokes.shape[0]):
i = strokes[p, 1]
j = strokes[p, 0]
index = int(i * w + j)
for ii in range(dim):
y[index, ii] = strokes[p, ii+2]
U[index, index] = 1
return U, y
def set_DW(image, edges=None, sigma1=1000., sigma2=0.01):
image = image.astype(float)
h, w = image.shape[0:2]
N = h * w
pixd, neighborid = get_pixel_neighbors(h, w)
i, j = np.unravel_index(pixd, (h, w))
ii, jj = np.unravel_index(neighborid, (h, w))
pix_diff = np.squeeze((image[i, j, :] - image[ii, jj, :]) ** 2)
if len(pix_diff.shape) == 1:
pix_diff = pix_diff[:, np.newaxis]
weight0 = np.exp(-(np.sum(pix_diff, axis=1)) / sigma1)
weight1 = np.exp(-((edges[i, j]) ** 2) / sigma2)
# neighbor_info = np.vstack((pixd, neighborid, weight0)).T
M = len(pixd)
D = scipy.sparse.lil_matrix((M, N))
W = scipy.sparse.lil_matrix((M, M))
p = np.arange(0, M, 1)
D[p, pixd] = 1
D[p, neighborid] = -1
W[p, p] = weight1
return D, W
| [
"scipy.sparse.lil_matrix",
"numpy.ones",
"numpy.minimum",
"numpy.squeeze",
"numpy.exp",
"numpy.array",
"numpy.zeros",
"numpy.sum",
"numpy.unravel_index",
"numpy.arange"
] | [((990, 1177), 'numpy.array', 'np.array', (['[[0, 1], [1, 2], [2, 3], [3, 4], [1, 5], [5, 6], [6, 7], [1, 11], [11, 12],\n [12, 13], [1, 8], [8, 9], [9, 10], [14, 15], [16, 17], [0, 14], [0, 15],\n [14, 16], [15, 17]]'], {}), '([[0, 1], [1, 2], [2, 3], [3, 4], [1, 5], [5, 6], [6, 7], [1, 11],\n [11, 12], [12, 13], [1, 8], [8, 9], [9, 10], [14, 15], [16, 17], [0, 14\n ], [0, 15], [14, 16], [15, 17]])\n', (998, 1177), True, 'import numpy as np\n'), ((5362, 5380), 'numpy.zeros', 'np.zeros', (['(N, dim)'], {}), '((N, dim))\n', (5370, 5380), True, 'import numpy as np\n'), ((5390, 5421), 'scipy.sparse.lil_matrix', 'scipy.sparse.lil_matrix', (['(N, N)'], {}), '((N, N))\n', (5413, 5421), False, 'import scipy\n'), ((5858, 5888), 'numpy.unravel_index', 'np.unravel_index', (['pixd', '(h, w)'], {}), '(pixd, (h, w))\n', (5874, 5888), True, 'import numpy as np\n'), ((5902, 5938), 'numpy.unravel_index', 'np.unravel_index', (['neighborid', '(h, w)'], {}), '(neighborid, (h, w))\n', (5918, 5938), True, 'import numpy as np\n'), ((5955, 6007), 'numpy.squeeze', 'np.squeeze', (['((image[i, j, :] - image[ii, jj, :]) ** 2)'], {}), '((image[i, j, :] - image[ii, jj, :]) ** 2)\n', (5965, 6007), True, 'import numpy as np\n'), ((6157, 6191), 'numpy.exp', 'np.exp', (['(-edges[i, j] ** 2 / sigma2)'], {}), '(-edges[i, j] ** 2 / sigma2)\n', (6163, 6191), True, 'import numpy as np\n'), ((6288, 6319), 'scipy.sparse.lil_matrix', 'scipy.sparse.lil_matrix', (['(M, N)'], {}), '((M, N))\n', (6311, 6319), False, 'import scipy\n'), ((6328, 6359), 'scipy.sparse.lil_matrix', 'scipy.sparse.lil_matrix', (['(M, M)'], {}), '((M, M))\n', (6351, 6359), False, 'import scipy\n'), ((6369, 6387), 'numpy.arange', 'np.arange', (['(0)', 'M', '(1)'], {}), '(0, M, 1)\n', (6378, 6387), True, 'import numpy as np\n'), ((1249, 1286), 'numpy.zeros', 'np.zeros', (['(h, w, 3)'], {'dtype': 'np.float32'}), '((h, w, 3), dtype=np.float32)\n', (1257, 1286), True, 'import numpy as np\n'), ((2577, 2614), 'numpy.zeros', 'np.zeros', (['(h, w, 3)'], {'dtype': 'np.float32'}), '((h, w, 3), dtype=np.float32)\n', (2585, 2614), True, 'import numpy as np\n'), ((2871, 2898), 'numpy.ones', 'np.ones', (['(25, 25)', 'np.uint8'], {}), '((25, 25), np.uint8)\n', (2878, 2898), True, 'import numpy as np\n'), ((3032, 3078), 'numpy.array', 'np.array', (['[J_bg[rand_index], I_bg[rand_index]]'], {}), '([J_bg[rand_index], I_bg[rand_index]])\n', (3040, 3078), True, 'import numpy as np\n'), ((3391, 3428), 'numpy.zeros', 'np.zeros', (['(h, w, 3)'], {'dtype': 'np.float32'}), '((h, w, 3), dtype=np.float32)\n', (3399, 3428), True, 'import numpy as np\n'), ((4934, 4959), 'numpy.ones', 'np.ones', (['(5, 5)', 'np.uint8'], {}), '((5, 5), np.uint8)\n', (4941, 4959), True, 'import numpy as np\n'), ((5093, 5139), 'numpy.array', 'np.array', (['[J_bg[rand_index], I_bg[rand_index]]'], {}), '([J_bg[rand_index], I_bg[rand_index]])\n', (5101, 5139), True, 'import numpy as np\n'), ((1579, 1611), 'numpy.minimum', 'np.minimum', (['bone_start[0]', '(w - 1)'], {}), '(bone_start[0], w - 1)\n', (1589, 1611), True, 'import numpy as np\n'), ((1656, 1688), 'numpy.minimum', 'np.minimum', (['bone_start[1]', '(h - 1)'], {}), '(bone_start[1], h - 1)\n', (1666, 1688), True, 'import numpy as np\n'), ((1732, 1762), 'numpy.minimum', 'np.minimum', (['bone_end[0]', '(w - 1)'], {}), '(bone_end[0], w - 1)\n', (1742, 1762), True, 'import numpy as np\n'), ((1805, 1835), 'numpy.minimum', 'np.minimum', (['bone_end[1]', '(h - 1)'], {}), '(bone_end[1], h - 1)\n', (1815, 1835), True, 'import numpy as np\n'), ((3801, 3833), 'numpy.minimum', 'np.minimum', (['bone_start[0]', '(w - 1)'], {}), '(bone_start[0], w - 1)\n', (3811, 3833), True, 'import numpy as np\n'), ((3878, 3910), 'numpy.minimum', 'np.minimum', (['bone_start[1]', '(h - 1)'], {}), '(bone_start[1], h - 1)\n', (3888, 3910), True, 'import numpy as np\n'), ((3954, 3984), 'numpy.minimum', 'np.minimum', (['bone_end[0]', '(w - 1)'], {}), '(bone_end[0], w - 1)\n', (3964, 3984), True, 'import numpy as np\n'), ((4027, 4057), 'numpy.minimum', 'np.minimum', (['bone_end[1]', '(h - 1)'], {}), '(bone_end[1], h - 1)\n', (4037, 4057), True, 'import numpy as np\n'), ((6107, 6131), 'numpy.sum', 'np.sum', (['pix_diff'], {'axis': '(1)'}), '(pix_diff, axis=1)\n', (6113, 6131), True, 'import numpy as np\n')] |
import math
import torch
import numpy as np
import pandas as pd
import torch.nn as nn
def normal_pdf(x):
import math
return torch.exp(-0.5 * x**2) / math.sqrt(2 * math.pi)
def normal_cdf(y, h=0.01, tau=0.5):
# Approximation of Q-function given by López-Benítez & Casadevall (2011)
# based on a second-order exponential function & Q(x) = 1 - Q(-x):
Q_fn = lambda x: torch.exp(-0.4920*x**2 - 0.2887*x - 1.1893)
m = len(y)
y_prime = (tau - y) / h
sum_ = torch.sum(Q_fn(y_prime[y_prime > 0])) \
+ torch.sum(1 - Q_fn(torch.abs(y_prime[y_prime < 0]))) \
+ 0.5 * len(y_prime[y_prime == 0])
return sum_ / m
def Huber_loss(x, delta):
if abs(x) < delta:
return (x ** 2) / 2
return delta * (x.abs() - delta / 2)
def Huber_loss_derivative(x, delta):
if x > delta:
return delta
elif x < -delta:
return -delta
return x
def get_fairness_metrics(Y, Z, Ytilde, n_classes, n_sensitive_attrs):
DDP = 0
DEO = 0
for y in range(n_classes):
Pr_Ytilde_y = (Ytilde == y).mean()
Ytilde_y_given_Y_y = np.logical_and(Ytilde==y, Y==y)
for z in range(n_sensitive_attrs):
DDP += abs(np.logical_and(Ytilde==y, Z==z).mean() / (Z==z).mean() - Pr_Ytilde_y)
DEO += abs(np.logical_and(Ytilde_y_given_Y_y, Z==z).mean() / np.logical_and(Y==y, Z==z).mean() - Ytilde_y_given_Y_y.mean() / (Y==y).mean())
return DDP, DEO
class BCELossAccuracy():
def __init__(self):
self.loss_function = nn.BCELoss()
@staticmethod
def accuracy(y_hat, labels):
with torch.no_grad():
y_tilde = (y_hat > 0.5).int()
accuracy = (y_tilde == labels.int()).float().mean().item()
return accuracy
def __call__(self, y_hat, labels):
loss = self.loss_function(y_hat, labels)
accuracy = self.accuracy(y_hat, labels)
return loss, accuracy
#
class CELossAccuracy():
def __init__(self):
self.loss_function = nn.CrossEntropyLoss()
@staticmethod
def accuracy(y_hat, labels):
with torch.no_grad():
y_tilde = y_hat.argmax(axis=1)
accuracy = (y_tilde == labels).float().mean().item()
return accuracy
def __call__(self, y_hat, labels):
loss = self.loss_function(y_hat, labels)
accuracy = self.accuracy(y_hat, labels)
return loss, accuracy
#
class FairnessLoss():
def __init__(self, h, tau, delta, notion, n_classes, n_sensitive_attrs, sensitive_attrs):
self.h = h
self.tau = tau
self.delta = delta
self.fairness_notion = notion
self.n_classes = n_classes
self.n_sensitive_attrs = n_sensitive_attrs
self.sensitive_attrs = sensitive_attrs
if self.n_classes > 2:
self.tau = 0.5
assert self.fairness_notion in ['DP', 'EO']
def DDP_loss(self, y_hat, Z):
m = y_hat.shape[0]
backward_loss = 0
logging_loss = 0
if self.n_classes == 2:
Pr_Ytilde1 = normal_cdf(y_hat.detach(), self.h, self.tau)
for z in self.sensitive_attrs:
Pr_Ytilde1_Z = normal_cdf(y_hat.detach()[Z==z], self.h, self.tau)
m_z = Z[Z==z].shape[0]
Prob_diff_Z = Pr_Ytilde1_Z - Pr_Ytilde1
_dummy = \
torch.dot(
normal_pdf((self.tau - y_hat.detach()[Z==z]) / self.h).view(-1),
y_hat[Z==z].view(-1)
) / (self.h * m_z) -\
torch.dot(
normal_pdf((self.tau - y_hat.detach()) / self.h).view(-1),
y_hat.view(-1)
) / (self.h * m)
_dummy *= Huber_loss_derivative(Prob_diff_Z, self.delta)
backward_loss += _dummy
logging_loss += Huber_loss(Prob_diff_Z, self.delta)
else:
idx_set = list(range(self.n_classes)) if self.n_classes > 2 else [0]
for y in idx_set:
Pr_Ytilde1 = normal_cdf(y_hat[:,y].detach(), self.h, self.tau)
for z in self.sensitive_attrs:
Pr_Ytilde1_Z = normal_cdf(y_hat[:,y].detach()[Z==z], self.h, self.tau)
m_z = Z[Z==z].shape[0]
Prob_diff_Z = Pr_Ytilde1_Z - Pr_Ytilde1
_dummy = Huber_loss_derivative(Prob_diff_Z, self.delta)
_dummy *= \
torch.dot(
normal_pdf((self.tau - y_hat[:,y].detach()[Z==z]) / self.h).view(-1),
y_hat[:,y][Z==z].view(-1)
) / (self.h * m_z) -\
torch.dot(
normal_pdf((self.tau - y_hat[:,y].detach()) / self.h).view(-1),
y_hat[:,y].view(-1)
) / (self.h * m)
backward_loss += _dummy
logging_loss += Huber_loss(Prob_diff_Z, self.delta).item()
return backward_loss, logging_loss
def DEO_loss(self, y_hat, Y, Z):
backward_loss = 0
logging_loss = 0
if self.n_classes == 2:
for y in [0,1]:
Pr_Ytilde1_Y = normal_cdf(y_hat[Y==y].detach(), self.h, self.tau)
m_y = (Y==y).sum().item()
for z in self.sensitive_attrs:
Pr_Ytilde1_YZ = normal_cdf(y_hat[torch.logical_and(Y==y, Z==z)].detach(), self.h, self.tau)
m_zy = torch.logical_and(Y==y, Z==z).sum().item()
Prob_diff_Z = Pr_Ytilde1_YZ - Pr_Ytilde1_Y
_dummy = Huber_loss_derivative(Prob_diff_Z, self.delta)
_dummy *= \
torch.dot(
normal_pdf((self.tau - y_hat[torch.logical_and(Y==y, Z==z)].detach()) / self.h).view(-1),
y_hat[torch.logical_and(Y==y, Z==z)].view(-1)
) / (self.h * m_zy) -\
torch.dot(
normal_pdf((self.tau - y_hat[Y==y].detach()) / self.h).view(-1),
y_hat[Y==y].view(-1)
) / (self.h * m_y)
backward_loss += _dummy
logging_loss += Huber_loss(Prob_diff_Z, self.delta).item()
else:
for y in range(self.n_classes):
Pr_Ytilde1_Y = normal_cdf(y_hat[:,y][Y==y].detach(), self.h, self.tau)
m_y = (Y==y).sum().item()
for z in self.sensitive_attrs:
Pr_Ytilde1_YZ = normal_cdf(y_hat[:,y][torch.logical_and(Y==y, Z==z)].detach(), self.h, self.tau)
m_zy = torch.logical_and(Y==y, Z==z).sum().item()
Prob_diff_Z = Pr_Ytilde1_YZ - Pr_Ytilde1_Y
_dummy = Huber_loss_derivative(Prob_diff_Z, self.delta)
_dummy *= \
torch.dot(
normal_pdf((self.tau - y_hat[:,y][torch.logical_and(Y==y, Z==z)].detach()) / self.h).view(-1),
y_hat[:,y][torch.logical_and(Y==y, Z==z)].view(-1)
) / (self.h * m_zy) -\
torch.dot(
normal_pdf((self.tau - y_hat[:,y][Y==y].detach()) / self.h).view(-1),
y_hat[:,y][Y==y].view(-1)
) / (self.h * m_y)
backward_loss += _dummy
logging_loss += Huber_loss(Prob_diff_Z, self.delta).item()
return backward_loss, logging_loss
def __call__(self, y_hat, Y, Z):
if self.fairness_notion == 'DP':
return self.DDP_loss(y_hat, Z)
else:
return self.DEO_loss(y_hat, Y, Z) | [
"torch.abs",
"torch.nn.CrossEntropyLoss",
"numpy.logical_and",
"math.sqrt",
"torch.exp",
"torch.nn.BCELoss",
"torch.no_grad",
"torch.logical_and"
] | [((134, 158), 'torch.exp', 'torch.exp', (['(-0.5 * x ** 2)'], {}), '(-0.5 * x ** 2)\n', (143, 158), False, 'import torch\n'), ((159, 181), 'math.sqrt', 'math.sqrt', (['(2 * math.pi)'], {}), '(2 * math.pi)\n', (168, 181), False, 'import math\n'), ((388, 436), 'torch.exp', 'torch.exp', (['(-0.492 * x ** 2 - 0.2887 * x - 1.1893)'], {}), '(-0.492 * x ** 2 - 0.2887 * x - 1.1893)\n', (397, 436), False, 'import torch\n'), ((1111, 1146), 'numpy.logical_and', 'np.logical_and', (['(Ytilde == y)', '(Y == y)'], {}), '(Ytilde == y, Y == y)\n', (1125, 1146), True, 'import numpy as np\n'), ((1531, 1543), 'torch.nn.BCELoss', 'nn.BCELoss', ([], {}), '()\n', (1541, 1543), True, 'import torch.nn as nn\n'), ((2018, 2039), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (2037, 2039), True, 'import torch.nn as nn\n'), ((1613, 1628), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1626, 1628), False, 'import torch\n'), ((2109, 2124), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2122, 2124), False, 'import torch\n'), ((559, 590), 'torch.abs', 'torch.abs', (['y_prime[y_prime < 0]'], {}), '(y_prime[y_prime < 0])\n', (568, 590), False, 'import torch\n'), ((1209, 1244), 'numpy.logical_and', 'np.logical_and', (['(Ytilde == y)', '(Z == z)'], {}), '(Ytilde == y, Z == z)\n', (1223, 1244), True, 'import numpy as np\n'), ((1302, 1344), 'numpy.logical_and', 'np.logical_and', (['Ytilde_y_given_Y_y', '(Z == z)'], {}), '(Ytilde_y_given_Y_y, Z == z)\n', (1316, 1344), True, 'import numpy as np\n'), ((1352, 1382), 'numpy.logical_and', 'np.logical_and', (['(Y == y)', '(Z == z)'], {}), '(Y == y, Z == z)\n', (1366, 1382), True, 'import numpy as np\n'), ((5614, 5647), 'torch.logical_and', 'torch.logical_and', (['(Y == y)', '(Z == z)'], {}), '(Y == y, Z == z)\n', (5631, 5647), False, 'import torch\n'), ((5700, 5733), 'torch.logical_and', 'torch.logical_and', (['(Y == y)', '(Z == z)'], {}), '(Y == y, Z == z)\n', (5717, 5733), False, 'import torch\n'), ((6827, 6860), 'torch.logical_and', 'torch.logical_and', (['(Y == y)', '(Z == z)'], {}), '(Y == y, Z == z)\n', (6844, 6860), False, 'import torch\n'), ((6913, 6946), 'torch.logical_and', 'torch.logical_and', (['(Y == y)', '(Z == z)'], {}), '(Y == y, Z == z)\n', (6930, 6946), False, 'import torch\n'), ((6103, 6136), 'torch.logical_and', 'torch.logical_and', (['(Y == y)', '(Z == z)'], {}), '(Y == y, Z == z)\n', (6120, 6136), False, 'import torch\n'), ((7326, 7359), 'torch.logical_and', 'torch.logical_and', (['(Y == y)', '(Z == z)'], {}), '(Y == y, Z == z)\n', (7343, 7359), False, 'import torch\n'), ((6007, 6040), 'torch.logical_and', 'torch.logical_and', (['(Y == y)', '(Z == z)'], {}), '(Y == y, Z == z)\n', (6024, 6040), False, 'import torch\n'), ((7225, 7258), 'torch.logical_and', 'torch.logical_and', (['(Y == y)', '(Z == z)'], {}), '(Y == y, Z == z)\n', (7242, 7258), False, 'import torch\n')] |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
"""Functions that handle saving and loading of checkpoints."""
import os
import pickle
from collections import OrderedDict
import torch
from fvcore.common.file_io import PathManager
import net.utils.logging_tool as logging
# from slowfast.utils.c2_model_loading import get_name_convert_func
logger = logging.get_logger(__name__)
def make_checkpoint_dir(path_to_job):
"""
Creates the checkpoint directory (if not present already).
Args:
path_to_job (string): the path to the folder of the current job.
"""
checkpoint_dir = os.path.join(path_to_job, "checkpoints")
# Create the checkpoint dir from the master process
if not PathManager.exists(checkpoint_dir):
try:
PathManager.mkdirs(checkpoint_dir)
except Exception:
pass
return checkpoint_dir
def get_checkpoint_dir(path_to_job):
"""
Get path for storing checkpoints.
Args:
path_to_job (string): the path to the folder of the current job.
"""
return os.path.join(path_to_job, "checkpoints")
def get_path_to_checkpoint(path_to_job, model_name,epoch):
"""
Get the full path to a checkpoint file.
Args:
path_to_job (string): the path to the folder of the current job.
model_name : G or D
epoch (int): the number of epoch for the checkpoint.
"""
name = "checkpoint_epoch_{}_{:05d}.pyth".format(model_name,epoch)
return os.path.join(get_checkpoint_dir(path_to_job), name)
def get_last_checkpoint(path_to_job):
"""
Get the last checkpoint from the checkpointing folder.
Args:
path_to_job (string): the path to the folder of the current job.
"""
d = get_checkpoint_dir(path_to_job)
names = PathManager.ls(d) if PathManager.exists(d) else []
names = [f for f in names if "checkpoint" in f]
assert len(names), "No checkpoints found in '{}'.".format(d)
# Sort the checkpoints by epoch.
name = sorted(names)[-1]
return os.path.join(d, name)
def has_checkpoint(path_to_job):
"""
Determines if the given directory contains a checkpoint.
Args:
path_to_job (string): the path to the folder of the current job.
"""
d = get_checkpoint_dir(path_to_job)
files = PathManager.ls(d) if PathManager.exists(d) else []
return any("checkpoint" in f for f in files)
def is_checkpoint_epoch(cur_epoch, checkpoint_period):
"""
Determine if a checkpoint should be saved on current epoch.
Args:
cur_epoch (int): current number of epoch of the model.
checkpoint_period (int): the frequency of checkpointing.
"""
return (cur_epoch + 1) % checkpoint_period == 0
def save_checkpoint(path_to_job, model, model_name,optimizer, epoch, cfg):
"""
Save a checkpoint.
Args:
model (model): model to save the weight to the checkpoint.
optimizer (optim): optimizer to save the historical state.
epoch (int): current number of epoch of the model.
cfg (CfgNode): configs to save.
"""
# Save checkpoints only from the master process.
# Ensure that the checkpoint dir exists.
PathManager.mkdirs(get_checkpoint_dir(path_to_job))
# Omit the DDP wrapper in the multi-gpu setting.
sd = model.module.state_dict() if cfg.NUM_GPUS > 1 else model.state_dict()
# Record the state.
checkpoint = {
"epoch": epoch,
"model_state": sd,
"optimizer_state": optimizer.state_dict(),
"cfg": cfg.dump(),
}
# Write the checkpoint.
path_to_checkpoint = get_path_to_checkpoint(path_to_job, model_name,epoch + 1)
# if (epoch+1)%10==0 or(epoch+1)==cfg.SOLVER.MAX_EPOCH :
with PathManager.open(path_to_checkpoint, "wb") as f:
torch.save(checkpoint, f)
return path_to_checkpoint
def load_checkpoint(
path_to_checkpoint,
model,
data_parallel=True,
optimizer=None,
inflation=False,
convert_from_caffe2=False,
):
"""
Load the checkpoint from the given file. If inflation is True, inflate the
2D Conv weights from the checkpoint to 3D Conv.
Args:
path_to_checkpoint (string): path to the checkpoint to load.
model (model): model to load the weights from the checkpoint.
data_parallel (bool): if true, model is wrapped by
torch.nn.parallel.DistributedDataParallel.
optimizer (optim): optimizer to load the historical state.
inflation (bool): if True, inflate the weights from the checkpoint.
convert_from_caffe2 (bool): if True, load the model from caffe2 and
convert it to pytorch.
Returns:
(int): the number of training epoch of the checkpoint.
change: more conv layer above
"""
assert PathManager.exists(
path_to_checkpoint
), "Checkpoint '{}' not found".format(path_to_checkpoint)
# Account for the DDP wrapper in the multi-gpu setting.
ms = model.module if data_parallel else model
# print("ms type ",type(ms))
# print("convert_from_caffe2=",convert_from_caffe2)
# Load the checkpoint on CPU to avoid GPU mem spike.
# checkpoint name =path_to_checkpoint.split("/")[-1]
with PathManager.open(path_to_checkpoint, "rb") as f:
checkpoint = torch.load(f, map_location="cpu") # load checkpoint
print("type checkpoint",type(checkpoint))
# if inflation:
# # Try to inflate the model.
# model_state_dict_3d = (
# model.module.state_dict()
# if data_parallel
# else model.state_dict()
# )
# inflated_model_dict = inflate_weight(
# checkpoint["model_state"], model_state_dict_3d
# )
# ms.load_state_dict(inflated_model_dict, strict=False)
"""
pretrained_dict=torch.load(model_weight)
model_dict=myNet.state_dict()
# 1. filter out unnecessary keys
pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict}
# 2. overwrite entries in the existing state dict
model_dict.update(pretrained_dict)
"""
ms.load_state_dict(checkpoint["model_state"])
# Load the optimizer state (commonly not done when fine-tuning)
if optimizer:
optimizer.load_state_dict(checkpoint["optimizer_state"])
if "epoch" in checkpoint.keys():
epoch = checkpoint["epoch"]
else:
epoch = -1
return epoch
| [
"fvcore.common.file_io.PathManager.open",
"torch.load",
"os.path.join",
"net.utils.logging_tool.get_logger",
"fvcore.common.file_io.PathManager.ls",
"torch.save",
"fvcore.common.file_io.PathManager.mkdirs",
"fvcore.common.file_io.PathManager.exists"
] | [((401, 429), 'net.utils.logging_tool.get_logger', 'logging.get_logger', (['__name__'], {}), '(__name__)\n', (419, 429), True, 'import net.utils.logging_tool as logging\n'), ((653, 693), 'os.path.join', 'os.path.join', (['path_to_job', '"""checkpoints"""'], {}), "(path_to_job, 'checkpoints')\n", (665, 693), False, 'import os\n'), ((1114, 1154), 'os.path.join', 'os.path.join', (['path_to_job', '"""checkpoints"""'], {}), "(path_to_job, 'checkpoints')\n", (1126, 1154), False, 'import os\n'), ((2077, 2098), 'os.path.join', 'os.path.join', (['d', 'name'], {}), '(d, name)\n', (2089, 2098), False, 'import os\n'), ((4833, 4871), 'fvcore.common.file_io.PathManager.exists', 'PathManager.exists', (['path_to_checkpoint'], {}), '(path_to_checkpoint)\n', (4851, 4871), False, 'from fvcore.common.file_io import PathManager\n'), ((762, 796), 'fvcore.common.file_io.PathManager.exists', 'PathManager.exists', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (780, 796), False, 'from fvcore.common.file_io import PathManager\n'), ((1853, 1874), 'fvcore.common.file_io.PathManager.exists', 'PathManager.exists', (['d'], {}), '(d)\n', (1871, 1874), False, 'from fvcore.common.file_io import PathManager\n'), ((1832, 1849), 'fvcore.common.file_io.PathManager.ls', 'PathManager.ls', (['d'], {}), '(d)\n', (1846, 1849), False, 'from fvcore.common.file_io import PathManager\n'), ((2367, 2388), 'fvcore.common.file_io.PathManager.exists', 'PathManager.exists', (['d'], {}), '(d)\n', (2385, 2388), False, 'from fvcore.common.file_io import PathManager\n'), ((2346, 2363), 'fvcore.common.file_io.PathManager.ls', 'PathManager.ls', (['d'], {}), '(d)\n', (2360, 2363), False, 'from fvcore.common.file_io import PathManager\n'), ((3778, 3820), 'fvcore.common.file_io.PathManager.open', 'PathManager.open', (['path_to_checkpoint', '"""wb"""'], {}), "(path_to_checkpoint, 'wb')\n", (3794, 3820), False, 'from fvcore.common.file_io import PathManager\n'), ((3835, 3860), 'torch.save', 'torch.save', (['checkpoint', 'f'], {}), '(checkpoint, f)\n', (3845, 3860), False, 'import torch\n'), ((5266, 5308), 'fvcore.common.file_io.PathManager.open', 'PathManager.open', (['path_to_checkpoint', '"""rb"""'], {}), "(path_to_checkpoint, 'rb')\n", (5282, 5308), False, 'from fvcore.common.file_io import PathManager\n'), ((5336, 5369), 'torch.load', 'torch.load', (['f'], {'map_location': '"""cpu"""'}), "(f, map_location='cpu')\n", (5346, 5369), False, 'import torch\n'), ((823, 857), 'fvcore.common.file_io.PathManager.mkdirs', 'PathManager.mkdirs', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (841, 857), False, 'from fvcore.common.file_io import PathManager\n')] |
import sqlite3
import os
DB_SAVES_DIR = 'saves'
class DB:
def __init__(self, name):
self.name = name
self.path = '{}/{}.db'.format(DB_SAVES_DIR, self.name)
if not os.path.isdir(DB_SAVES_DIR):
os.makedirs(DB_SAVES_DIR)
self.conn = sqlite3.connect(self.path)
def save(self):
# Clear the old db.
self.conn.close()
os.remove('saves/{}.db'.format(self.name))
self.conn = sqlite3.connect(self.path)
self.setup()
def query(self, fname, params=None):
with open(fname) as f:
contents = f.read()
result = []
cursor = self.conn.cursor()
for query in contents.split(';'):
if params is None:
cursor.execute(query)
else:
cursor.execute(query, params)
res = cursor.fetchall()
if len(res) > 0:
for row in res:
result.append({})
for i, col in enumerate(cursor.description):
result[-1][col[0]] = row[i]
return result
def execute(self, fname, params=None):
with open(fname) as f:
contents = f.read()
for statement in contents.split(';'):
cursor = self.conn.cursor()
if params is None:
cursor.execute(statement)
else:
cursor.execute(statement, params)
self.conn.commit()
def setup(self):
# Create all the tables
self.execute('db/setup/gen_log.sql')
self.execute('db/setup/nations.sql')
self.execute('db/setup/names.sql')
self.execute('db/setup/name_modifiers.sql')
self.execute('db/setup/name_places.sql')
self.execute('db/setup/relations.sql')
self.execute('db/setup/nation_relationship.sql')
self.execute('db/setup/groups.sql')
self.execute('db/setup/weapons.sql')
self.execute('db/setup/weapon_stats.sql')
self.execute('db/setup/armors.sql')
self.execute('db/setup/equipment_list.sql')
self.execute('db/setup/treaties.sql')
self.execute('db/setup/events.sql')
self.execute('db/setup/event_types.sql')
self.execute('db/setup/event_data.sql')
self.execute('db/setup/cells.sql')
self.execute('db/setup/buildings.sql')
self.conn.commit()
| [
"os.path.isdir",
"sqlite3.connect",
"os.makedirs"
] | [((282, 308), 'sqlite3.connect', 'sqlite3.connect', (['self.path'], {}), '(self.path)\n', (297, 308), False, 'import sqlite3\n'), ((455, 481), 'sqlite3.connect', 'sqlite3.connect', (['self.path'], {}), '(self.path)\n', (470, 481), False, 'import sqlite3\n'), ((194, 221), 'os.path.isdir', 'os.path.isdir', (['DB_SAVES_DIR'], {}), '(DB_SAVES_DIR)\n', (207, 221), False, 'import os\n'), ((235, 260), 'os.makedirs', 'os.makedirs', (['DB_SAVES_DIR'], {}), '(DB_SAVES_DIR)\n', (246, 260), False, 'import os\n')] |
import pandas as pd
df = pd.read_csv('tmp/all.csv')
df.to_excel('tmp/all.xlsx') | [
"pandas.read_csv"
] | [((26, 52), 'pandas.read_csv', 'pd.read_csv', (['"""tmp/all.csv"""'], {}), "('tmp/all.csv')\n", (37, 52), True, 'import pandas as pd\n')] |
from google.cloud import storage
from todayi.remote.base import Remote
from todayi.util.fs import path
class GcsRemote(Remote):
"""
Manages pushing and pulling application resources
with Google Cloud Storage.
:param local_file_path: path to file to upload
:type local_file_path: str
:param remote_path: blob path to upload file to
:type remote_path: str
:param bucket_name: name of gcs bucket
:type bucket_name: str
"""
_bucket_prefix = "gs://"
_bucket_suffix = "/"
def __init__(self, local_file_path: str, remote_path: str, bucket_name: str):
self._local_file_path = local_file_path
self._remote_path = remote_path
self._bucket_name = self._clean_bucket_name(bucket_name)
self._bucket = None
@property
def bucket(self):
if self._bucket is None:
self._bucket = storage.Client().bucket(self._bucket_name)
return self._bucket
def push(self, backup: bool = False):
"""
Pushes up current state to GCS remote. Optionally write
backup file to remote.
:param backup: whether or not to backup remote backend file
:type backup: bool
"""
if backup is True:
self.bucket.rename_blob(
self._blob(), self._backup_file_name(self._remote_path)
)
blob = self._blob()
blob.upload_from_filename(self._local_file_path)
def pull(self, backup: bool = False):
"""
Updates current state from remote. Optionally write
local backup file.
:param backup: whether or not to backup local backend file
:type backup: bool
"""
if backup is True:
path(self._local_file_path).rename(
path(self._backup_file_name(self._local_file_path))
)
blob = self._blob()
blob.download_to_filename(self._local_file_path)
def _blob(self):
return self.bucket.blob(self._remote_path)
def _clean_bucket_name(self, bucket_name: str) -> str:
if self._bucket_prefix in bucket_name:
bucket_name = bucket_name.replace(self._bucket_prefix, "")
if bucket_name[-1] == self._bucket_suffix:
bucket_name = bucket_name[0 : len(bucket_name) - 1]
return bucket_name
| [
"google.cloud.storage.Client",
"todayi.util.fs.path"
] | [((881, 897), 'google.cloud.storage.Client', 'storage.Client', ([], {}), '()\n', (895, 897), False, 'from google.cloud import storage\n'), ((1733, 1760), 'todayi.util.fs.path', 'path', (['self._local_file_path'], {}), '(self._local_file_path)\n', (1737, 1760), False, 'from todayi.util.fs import path\n')] |
import argparse
import re
from pathlib import Path
parser = argparse.ArgumentParser(description='Patches the rers code for libfuzzer')
parser.add_argument('file', type=str)
args = parser.parse_args()
path = args.file
alphabet = None
with open(path, 'r') as f:
lines = f.readlines()
for line in lines:
match = re.match(r'\s*int inputs\[\] = {(.*)};', line)
if match:
alphabet = [int(x) for x in match.group(1).split(',')]
print(len(alphabet))
newpath = Path(path).parent.joinpath('dict')
with open(newpath, 'w') as newfile:
for a in alphabet:
newfile.write("\"\\x%0.2X\"\n" % a)
| [
"re.match",
"argparse.ArgumentParser",
"pathlib.Path"
] | [((61, 135), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Patches the rers code for libfuzzer"""'}), "(description='Patches the rers code for libfuzzer')\n", (84, 135), False, 'import argparse\n'), ((329, 377), 're.match', 're.match', (['"""\\\\s*int inputs\\\\[\\\\] = {(.*)};"""', 'line'], {}), "('\\\\s*int inputs\\\\[\\\\] = {(.*)};', line)\n", (337, 377), False, 'import re\n'), ((505, 515), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (509, 515), False, 'from pathlib import Path\n')] |
from python_anticaptcha import AnticaptchaClient, NoCaptchaTaskProxylessTask, AnticaptchaException
from bs4 import BeautifulSoup
import requests
URL_DEC_PEC = "http://www.infoimprese.it/impr/ricerca/captcha.jsp?codiceCaptcha=%s&pecCriptata=%s"
def get_captcha(url, api_key, site_key):
print("[CHECKING] captcha (siteKey = %s, apiKey = %s)" % (site_key, api_key))
try:
client = AnticaptchaClient(api_key)
task = NoCaptchaTaskProxylessTask(url, site_key)
job = client.createTask(task)
job.join()
return job.get_solution_response()
except AnticaptchaException as ae:
print("[ERROR] error = %s" % str(ae))
return None
def get_pec(url, api_key, site_key):
r = requests.get(url)
soup = BeautifulSoup(r.text, "html.parser")
dec_pec = soup.find('input', {'id': 'decPec'}).get('value')
print("[FOUND] decPec = %s" % dec_pec)
url_errore = soup.find('input', {'id': 'urlErrore'}).get('value')
print("[FOUND] urlErrore = %s" % url_errore)
captcha = get_captcha(url, api_key, site_key)
print("[FOUND] captcha = %s", captcha)
pec_req_url = URL_DEC_PEC % (captcha, dec_pec)
print("URL for decrypted pec will be: %s", pec_req_url)
r = requests.get(pec_req_url)
return r.text
| [
"bs4.BeautifulSoup",
"python_anticaptcha.AnticaptchaClient",
"python_anticaptcha.NoCaptchaTaskProxylessTask",
"requests.get"
] | [((733, 750), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (745, 750), False, 'import requests\n'), ((762, 798), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.text', '"""html.parser"""'], {}), "(r.text, 'html.parser')\n", (775, 798), False, 'from bs4 import BeautifulSoup\n'), ((1237, 1262), 'requests.get', 'requests.get', (['pec_req_url'], {}), '(pec_req_url)\n', (1249, 1262), False, 'import requests\n'), ((397, 423), 'python_anticaptcha.AnticaptchaClient', 'AnticaptchaClient', (['api_key'], {}), '(api_key)\n', (414, 423), False, 'from python_anticaptcha import AnticaptchaClient, NoCaptchaTaskProxylessTask, AnticaptchaException\n'), ((439, 480), 'python_anticaptcha.NoCaptchaTaskProxylessTask', 'NoCaptchaTaskProxylessTask', (['url', 'site_key'], {}), '(url, site_key)\n', (465, 480), False, 'from python_anticaptcha import AnticaptchaClient, NoCaptchaTaskProxylessTask, AnticaptchaException\n')] |
# ******************************************************************************
# Copyright 2017-2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ******************************************************************************
from __future__ import division
from __future__ import print_function
from math import floor, ceil
import ngraph as ng
from ngraph.frontends.onnx.onnx_importer.utils.axes import reorder_axes
from ngraph.frontends.onnx.onnx_importer.utils.misc import verify_symmetric_padding
from ngraph.frontends.tensorflow.tf_importer.utils_pos_axes import cast_to_pos_axes
def get_pads(onnx_node): # type: (NodeWrapper) -> Tuple[int, int, int]
"""
Get padding values for the operation described by an ONNX node.
If `auto_pad` attribute is specified as SAME_UPPER or SAME_LOWER, or VALID values are
calculated. Otherwise values are taken from the `pads` attribute.
`pads` value should follow [x1_begin, x2_begin..., x1_end, x2_end,...]
:param onnx_node: wrapped ONNX node for Conv or Pool operation
:return: tuple of numbers of pixels to pad (height, width, depth)
"""
auto_pad = onnx_node.get_attribute_value('auto_pad')
pads = onnx_node.get_attribute_value('pads', ()) # Padding along each axis
kernel_shape = onnx_node.get_attribute_value('kernel_shape')
# Attribute 'auto_pad' is deprecated, but is currently used by CNTK
if auto_pad:
if auto_pad == 'VALID':
pads = [0, 0] * len(kernel_shape)
else:
# SAME_UPPER or SAME_LOWER mean pad the input so that the output size match the input.
# In case of odd number add the extra padding at the end for SAME_UPPER and at the
# beginning for SAME_LOWER.
def pad_value(kernel_dim): # type: (int) -> float
return (kernel_dim - 1.0) / 2.0
pads_starts = [floor(pad_value(dim)) if auto_pad == 'SAME_UPPER' else
ceil(pad_value(dim)) for dim in kernel_shape]
pads_ends = [ceil(pad_value(dim)) if auto_pad == 'SAME_UPPER' else
floor(pad_value(dim)) for dim in kernel_shape]
pads = pads_starts + pads_ends
verify_symmetric_padding(onnx_node, pads)
pad_h, pad_w, pad_d = 0, 0, 0
if pads and len(pads) == 2: # ONNX input axes NCHW
pad_h, pad_w = pads
if pads and len(pads) == 3: # ONNX input axes NCHWD
pad_h, pad_w, pad_d = pads
if pads and len(pads) == 4: # ONNX input axes NCHW
pad_h, pad_w, _, _ = pads
elif pads and len(pads) == 6: # ONNX input axes NCHWD
pad_h, pad_w, pad_d, _, _, _ = pads
return pad_h, pad_w, pad_d
def get_strides(onnx_node): # type: (NodeWrapper) -> Tuple[int, int, int]
"""
Get number of pixels to stride operation by in each direction.
:param onnx_node: wrapped ONNX node for Conv or Pool operation
:return: tuple of numbers of pixels to stride by (height, width, depth)
"""
str_h, str_w, str_d = 1, 1, 1 # default values
strides = onnx_node.get_attribute_value('strides', ()) # stride along each axis
if len(strides) == 2: # ONNX input axes order NCHW
str_h, str_w = strides
elif len(strides) == 3: # ONNX input axes order NCHWD
str_h, str_w, str_d = strides
return str_h, str_w, str_d
def get_dilations(onnx_node): # type: (NodeWrapper) -> Tuple[int, int, int]
"""
Get number of pixels for filter dilation in each direction.
:param onnx_node: wrapped ONNX node for Conv or Pool operation
:return: tuple of numbers of pixels for filter dilation (height, width, depth)
"""
dil_h, dil_w, dil_d = 1, 1, 1 # default values
dilations = onnx_node.get_attribute_value('dilations', ()) # dilation along each filter axis
if len(dilations) == 2: # ONNX input axes order NCHW
dil_h, dil_w = dilations
elif len(dilations) == 3: # ONNX input axes order NCHWD
dil_h, dil_w, dil_d = dilations
return dil_h, dil_w, dil_d
def get_conv_params(onnx_node): # type: (NodeWrapper) -> Dict
"""
Parse ONNX Conv operation attributes and produce an ngraph compatible conv_params dict.
:param onnx_node: wrapped ONNX node for Conv or ConvTranspose operation
:return: dict of conv_params for ng.convolution
"""
pad_h, pad_w, pad_d = get_pads(onnx_node)
str_h, str_w, str_d = get_strides(onnx_node)
dil_h, dil_w, dil_d = get_dilations(onnx_node)
return {'pad_d': pad_d, 'pad_h': pad_h, 'pad_w': pad_w,
'str_d': str_d, 'str_h': str_h, 'str_w': str_w,
'dil_d': dil_d, 'dil_h': dil_h, 'dil_w': dil_w}
def make_conv_output_axes(input, filter, conv_params):
# type: (TensorOp, TensorOp, Dict) -> Axes
"""
Prepare axes for the output of an ng.convolution operation.
:param input: ngraph tensor with convolution input data
:param filter: ngraph tensor with convolution filter data
:param conv_params: dict of conv_params for ng.convolution
:return: ngraph Axes compatible with convolution operation
"""
number_output_features = filter.axes[-1].length
mini_batch_size = input.axes[-1].length
input_d, input_h, input_w = input.axes.lengths[1:4] # axes order C, D, H, W, N
filter_d, filter_h, filter_w = filter.axes.lengths[1:4] # axes order J, T(d), R(h), S(w), K
def output_dim(input_x, filter_x, pad_x, str_x, dil_x):
return floor((input_x + 2 * pad_x - filter_x - (filter_x - 1) * (dil_x - 1)) / str_x) + 1
convp = conv_params
output_d = output_dim(input_d, filter_d, convp['pad_d'], convp['str_d'], convp['dil_d'])
output_h = output_dim(input_h, filter_h, convp['pad_h'], convp['str_h'], convp['dil_h'])
output_w = output_dim(input_w, filter_w, convp['pad_w'], convp['str_w'], convp['dil_w'])
output_axes = ng.make_axes(axes=(
ng.make_axis(name='C', docstring='output features', length=int(number_output_features)),
ng.make_axis(name='D', docstring='depth', length=int(output_d)),
ng.make_axis(name='H', docstring='height', length=int(output_h)),
ng.make_axis(name='W', docstring='width', length=int(output_w)),
ng.make_axis(name='N', docstring='mini-batch size', length=int(mini_batch_size)),
))
return output_axes
def make_convolution_op(onnx_node, ng_inputs, transpose=False):
# type: (NodeWrapper, List[TensorOp], bool) -> Op
"""
Create an ngraph convolution or deconvolution Op based on an ONNX node.
:param onnx_node: wrapped ONNX node for Conv of ConvTranspose op
:param ng_inputs: ngraph TensorOp input tensors
:param transpose: should this be a transposed convolution?
:return: ngraph Op for convolution or deconvolution
"""
if len(ng_inputs) == 3:
x, weights, bias = ng_inputs
elif len(ng_inputs) == 2:
x, weights = ng_inputs
bias = ng.constant(0)
else:
raise ValueError('Conv node (%s): unexpected number of input values: %d.',
onnx_node.name, len(ng_inputs))
# Reorder x axes from ONNX convention (N, C, H, W, D) to ngraph (C, D, H, W, N)
# Reorder weights axes from ONNX (K, J, R, S, T) to ngraph (J, T, R, S, K)
# Axis names follow https://ngraph.nervanasys.com/index.html/axes.html
if len(x.axes) == 4: # 2D convolution
x = reorder_axes(x, 'NCHW', 'CDHWN')
weights = reorder_axes(weights, 'KJRS', 'JTRSK')
elif len(x.axes) == 5: # 3D convolution
x = reorder_axes(x, 'NCHWD', 'CDHWN')
weights = reorder_axes(weights, 'KJRST', 'JTRSK')
else:
raise NotImplementedError('Conv node (%s): only 2D and 3D convolutions are supported.',
onnx_node.name)
groups = onnx_node.get_attribute_value('group', 1)
if groups != 1:
raise NotImplementedError('Conv node (%s): `group` attribute value %d not supported.',
onnx_node.name, groups)
# Prepare ngraph convolution operation
conv_params = get_conv_params(onnx_node)
output_axes = make_conv_output_axes(x, weights, conv_params)
if transpose:
conv = ng.deconvolution(conv_params, x, weights, axes=output_axes)
else:
conv = ng.convolution(conv_params, x, weights, axes=output_axes)
conv = cast_to_pos_axes(conv) + bias
# ONNX output should have axes in the order N, C, H, W, D
conv = reorder_axes(conv, 'CDHWN', 'NCHWD')
if len(ng_inputs[0].axes) == 4: # 2D convolution, slice away the D axis from output
conv = ng.tensor_slice(conv, [slice(None), slice(None), slice(None), slice(None), 0])
return conv
| [
"ngraph.frontends.onnx.onnx_importer.utils.axes.reorder_axes",
"ngraph.convolution",
"ngraph.frontends.onnx.onnx_importer.utils.misc.verify_symmetric_padding",
"ngraph.frontends.tensorflow.tf_importer.utils_pos_axes.cast_to_pos_axes",
"math.floor",
"ngraph.constant",
"ngraph.deconvolution"
] | [((2728, 2769), 'ngraph.frontends.onnx.onnx_importer.utils.misc.verify_symmetric_padding', 'verify_symmetric_padding', (['onnx_node', 'pads'], {}), '(onnx_node, pads)\n', (2752, 2769), False, 'from ngraph.frontends.onnx.onnx_importer.utils.misc import verify_symmetric_padding\n'), ((8953, 8989), 'ngraph.frontends.onnx.onnx_importer.utils.axes.reorder_axes', 'reorder_axes', (['conv', '"""CDHWN"""', '"""NCHWD"""'], {}), "(conv, 'CDHWN', 'NCHWD')\n", (8965, 8989), False, 'from ngraph.frontends.onnx.onnx_importer.utils.axes import reorder_axes\n'), ((7881, 7913), 'ngraph.frontends.onnx.onnx_importer.utils.axes.reorder_axes', 'reorder_axes', (['x', '"""NCHW"""', '"""CDHWN"""'], {}), "(x, 'NCHW', 'CDHWN')\n", (7893, 7913), False, 'from ngraph.frontends.onnx.onnx_importer.utils.axes import reorder_axes\n'), ((7932, 7970), 'ngraph.frontends.onnx.onnx_importer.utils.axes.reorder_axes', 'reorder_axes', (['weights', '"""KJRS"""', '"""JTRSK"""'], {}), "(weights, 'KJRS', 'JTRSK')\n", (7944, 7970), False, 'from ngraph.frontends.onnx.onnx_importer.utils.axes import reorder_axes\n'), ((8693, 8752), 'ngraph.deconvolution', 'ng.deconvolution', (['conv_params', 'x', 'weights'], {'axes': 'output_axes'}), '(conv_params, x, weights, axes=output_axes)\n', (8709, 8752), True, 'import ngraph as ng\n'), ((8779, 8836), 'ngraph.convolution', 'ng.convolution', (['conv_params', 'x', 'weights'], {'axes': 'output_axes'}), '(conv_params, x, weights, axes=output_axes)\n', (8793, 8836), True, 'import ngraph as ng\n'), ((8849, 8871), 'ngraph.frontends.tensorflow.tf_importer.utils_pos_axes.cast_to_pos_axes', 'cast_to_pos_axes', (['conv'], {}), '(conv)\n', (8865, 8871), False, 'from ngraph.frontends.tensorflow.tf_importer.utils_pos_axes import cast_to_pos_axes\n'), ((5965, 6043), 'math.floor', 'floor', (['((input_x + 2 * pad_x - filter_x - (filter_x - 1) * (dil_x - 1)) / str_x)'], {}), '((input_x + 2 * pad_x - filter_x - (filter_x - 1) * (dil_x - 1)) / str_x)\n', (5970, 6043), False, 'from math import floor, ceil\n'), ((7422, 7436), 'ngraph.constant', 'ng.constant', (['(0)'], {}), '(0)\n', (7433, 7436), True, 'import ngraph as ng\n'), ((8028, 8061), 'ngraph.frontends.onnx.onnx_importer.utils.axes.reorder_axes', 'reorder_axes', (['x', '"""NCHWD"""', '"""CDHWN"""'], {}), "(x, 'NCHWD', 'CDHWN')\n", (8040, 8061), False, 'from ngraph.frontends.onnx.onnx_importer.utils.axes import reorder_axes\n'), ((8080, 8119), 'ngraph.frontends.onnx.onnx_importer.utils.axes.reorder_axes', 'reorder_axes', (['weights', '"""KJRST"""', '"""JTRSK"""'], {}), "(weights, 'KJRST', 'JTRSK')\n", (8092, 8119), False, 'from ngraph.frontends.onnx.onnx_importer.utils.axes import reorder_axes\n')] |
from floodsystem.geo import stations_by_distance, rivers_by_station_number
from haversine import haversine
from floodsystem.stationdata import build_station_list
from floodsystem.geo import rivers_with_station
from floodsystem.geo import stations_by_river
from floodsystem.geo import stations_within_radius
from floodsystem.station import inconsistent_typical_range_stations
# Build list of stations
stations = build_station_list()
#Task 1B
def test_stations_by_distance():
result = stations_by_distance(stations, p=(0,0))
for i in range(1, len(result)):
x = result[i]
y = result[i-1]
assert x[2] >= y[2]
# Testing Task C: Are all the outputted stations within 10 km of the city centre
def test_stations_within_radius():
stations = build_station_list()
reference_coordinate = (52.2053, 0.1218)
lol = stations_within_radius(stations,reference_coordinate,10)
for i in stations:
for a in lol:
if i.name == a:
assert haversine(reference_coordinate, i.coord) <= 10
#Testing Task D: Do all rivers outputted have at least 1 station?
def test_rivers_with_station():
stations = build_station_list()
r = rivers_with_station(stations)
assert len(r) == 857
for i in stations:
for j in r:
a = 0
if i.river == j:
a = a+1
assert a>= 1
#Testing Task D: Are all the stations outputted situated on the desired rivers?
def test_stations_by_river():
stations = build_station_list()
r1 = stations_by_river(stations)
g1 = r1["River Aire"]
g2 = r1['River Cam']
g3 = r1['River Thames']
for i in stations:
for j in g1:
if i.name == g1:
assert i.river == "River Aire"
for k in g2:
if i.name == g2:
assert i.river == "River Cam"
for l in g3:
if i.name == g3:
assert i.river == "River Thames"
#Task 1E
def test_rivers_by_station_number():
N = 9
result = rivers_by_station_number(stations, N)
assert len(result) >= 9
if len(result) == N:
for n in range(len(result)-1):
x = result[n]
y = result[n+1]
assert x[1] >= y[1]
elif len(result) > N:
for n in range(N-1):
x = result[n]
y = result[n+1]
assert x[1] >= y[1]
for n in range(N, len(result)-1):
x = result[n]
y = result[n+1]
assert x==y
# Testing Task F: Do all outputted stations have inconsistent range data
def test_inconsistent_typical_range_stations():
stations = build_station_list()
r2 = inconsistent_typical_range_stations(stations)
for i in stations:
for j in r2:
if i.name == j:
assert i.typical_range == None or i.typical_range[1] - i.typical_range[0] < 0
| [
"floodsystem.geo.rivers_with_station",
"haversine.haversine",
"floodsystem.geo.stations_by_distance",
"floodsystem.geo.stations_within_radius",
"floodsystem.geo.stations_by_river",
"floodsystem.geo.rivers_by_station_number",
"floodsystem.stationdata.build_station_list",
"floodsystem.station.inconsiste... | [((413, 433), 'floodsystem.stationdata.build_station_list', 'build_station_list', ([], {}), '()\n', (431, 433), False, 'from floodsystem.stationdata import build_station_list\n'), ((491, 531), 'floodsystem.geo.stations_by_distance', 'stations_by_distance', (['stations'], {'p': '(0, 0)'}), '(stations, p=(0, 0))\n', (511, 531), False, 'from floodsystem.geo import stations_by_distance, rivers_by_station_number\n'), ((773, 793), 'floodsystem.stationdata.build_station_list', 'build_station_list', ([], {}), '()\n', (791, 793), False, 'from floodsystem.stationdata import build_station_list\n'), ((849, 907), 'floodsystem.geo.stations_within_radius', 'stations_within_radius', (['stations', 'reference_coordinate', '(10)'], {}), '(stations, reference_coordinate, 10)\n', (871, 907), False, 'from floodsystem.geo import stations_within_radius\n'), ((1163, 1183), 'floodsystem.stationdata.build_station_list', 'build_station_list', ([], {}), '()\n', (1181, 1183), False, 'from floodsystem.stationdata import build_station_list\n'), ((1192, 1221), 'floodsystem.geo.rivers_with_station', 'rivers_with_station', (['stations'], {}), '(stations)\n', (1211, 1221), False, 'from floodsystem.geo import rivers_with_station\n'), ((1515, 1535), 'floodsystem.stationdata.build_station_list', 'build_station_list', ([], {}), '()\n', (1533, 1535), False, 'from floodsystem.stationdata import build_station_list\n'), ((1545, 1572), 'floodsystem.geo.stations_by_river', 'stations_by_river', (['stations'], {}), '(stations)\n', (1562, 1572), False, 'from floodsystem.geo import stations_by_river\n'), ((2037, 2074), 'floodsystem.geo.rivers_by_station_number', 'rivers_by_station_number', (['stations', 'N'], {}), '(stations, N)\n', (2061, 2074), False, 'from floodsystem.geo import stations_by_distance, rivers_by_station_number\n'), ((2653, 2673), 'floodsystem.stationdata.build_station_list', 'build_station_list', ([], {}), '()\n', (2671, 2673), False, 'from floodsystem.stationdata import build_station_list\n'), ((2683, 2728), 'floodsystem.station.inconsistent_typical_range_stations', 'inconsistent_typical_range_stations', (['stations'], {}), '(stations)\n', (2718, 2728), False, 'from floodsystem.station import inconsistent_typical_range_stations\n'), ((1002, 1042), 'haversine.haversine', 'haversine', (['reference_coordinate', 'i.coord'], {}), '(reference_coordinate, i.coord)\n', (1011, 1042), False, 'from haversine import haversine\n')] |
import unittest
def setUpModule():
print("in module {} - setUpModule()".format(__name__))
def tearDownModule():
print("in module {} - tearDownModule()".format(__name__))
class TextFixtures(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('in class {} - setUpClass()'.format(cls.__name__))
@classmethod
def tearDownClass(cls):
print('in class {} - tearDownClass()'.format(cls.__name__))
def setUp(self):
print('in setup()')
def tearDown(self):
print('in tearDown()')
def test_1 (self):
print('in test_1()')
def test_2 (self):
print('in test_2()')
if __name__ == '__main__':
unittest.main()
| [
"unittest.main"
] | [((687, 702), 'unittest.main', 'unittest.main', ([], {}), '()\n', (700, 702), False, 'import unittest\n')] |
import os
import numpy as np
import torch
from torchvision import models, transforms
MODELS = {'densenet121': models.densenet121,
'resnet152': models.resnet152}
DEVICE = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
ANALYZERS = ['grad', 'smooth-grad', 'smooth-taylor', 'ig', 'lrp']
IG_BASELINES = ['zero', 'noise']
# ImageNet transform constants
DEFAULT_TRANSFORM = transforms.Compose([
transforms.Resize(256), # resize image to 256X256 pixels
transforms.CenterCrop(224), # crop the image to 224X224 pixels about the center
transforms.ToTensor(), # convert the image to PyTorch Tensor data type
transforms.Normalize( # Normalize by setting the mean and s.d. to specified values
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]
)
])
NORMALIZE_TRANSFORM = transforms.Compose([
transforms.Normalize( # Normalize by setting the mean and s.d. to specified values
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]
)
])
RESIZE_TRANSFORM = transforms.Compose([
transforms.Resize(256), # resize image to 256X256 pixels
transforms.CenterCrop(224), # crop the image to 224X224 pixels about the center
])
INVERSE_TRANSFORM = transforms.Compose([
transforms.Normalize( # Normalize by setting the mean and s.d. to specified values
mean=[-0.485 / 0.229, -0.456 / 0.224, -0.406 / 0.225],
std=[1 / 0.229, 1 / 0.224, 1 / 0.225]
)
]) | [
"torchvision.transforms.CenterCrop",
"torch.cuda.is_available",
"torchvision.transforms.Normalize",
"torchvision.transforms.Resize",
"torchvision.transforms.ToTensor"
] | [((205, 230), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (228, 230), False, 'import torch\n'), ((421, 443), 'torchvision.transforms.Resize', 'transforms.Resize', (['(256)'], {}), '(256)\n', (438, 443), False, 'from torchvision import models, transforms\n'), ((488, 514), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['(224)'], {}), '(224)\n', (509, 514), False, 'from torchvision import models, transforms\n'), ((574, 595), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (593, 595), False, 'from torchvision import models, transforms\n'), ((656, 731), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[0.485, 0.456, 0.406]', 'std': '[0.229, 0.224, 0.225]'}), '(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n', (676, 731), False, 'from torchvision import models, transforms\n'), ((874, 949), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[0.485, 0.456, 0.406]', 'std': '[0.229, 0.224, 0.225]'}), '(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n', (894, 949), False, 'from torchvision import models, transforms\n'), ((1089, 1111), 'torchvision.transforms.Resize', 'transforms.Resize', (['(256)'], {}), '(256)\n', (1106, 1111), False, 'from torchvision import models, transforms\n'), ((1156, 1182), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['(224)'], {}), '(224)\n', (1177, 1182), False, 'from torchvision import models, transforms\n'), ((1287, 1405), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[-0.485 / 0.229, -0.456 / 0.224, -0.406 / 0.225]', 'std': '[1 / 0.229, 1 / 0.224, 1 / 0.225]'}), '(mean=[-0.485 / 0.229, -0.456 / 0.224, -0.406 / 0.225],\n std=[1 / 0.229, 1 / 0.224, 1 / 0.225])\n', (1307, 1405), False, 'from torchvision import models, transforms\n')] |
import warnings
import numpy as np
from einsteinpy.integrators import GeodesicIntegrator
from .utils import _P, _kerr, _kerrnewman, _sch
class Geodesic:
"""
Base Class for defining Geodesics
Working in Geometrized Units (M-Units),
with :math:`c = G = M = k_e = 1`
"""
def __init__(
self,
metric,
metric_params,
position,
momentum,
time_like=True,
return_cartesian=True,
**kwargs,
):
"""
Constructor
Parameters
----------
metric : str
Name of the metric. Currently, these metrics are supported:
1. Schwarzschild
2. Kerr
3. KerrNewman
metric_params : array_like
Tuple of parameters to pass to the metric
E.g., ``(a,)`` for Kerr
position : array_like
3-Position
4-Position is initialized by taking ``t = 0.0``
momentum : array_like
3-Momentum
4-Momentum is calculated automatically,
considering the value of ``time_like``
time_like : bool, optional
Determines type of Geodesic
``True`` for Time-like geodesics
``False`` for Null-like geodesics
Defaults to ``True``
return_cartesian : bool, optional
Whether to return calculated positions in Cartesian Coordinates
This only affects the coordinates. Momenta are dimensionless
quantities, and are returned in Spherical Polar Coordinates.
Defaults to ``True``
kwargs : dict
Keyword parameters for the Geodesic Integrator
See 'Other Parameters' below.
Other Parameters
----------------
steps : int
Number of integration steps
Defaults to ``50``
delta : float
Initial integration step-size
Defaults to ``0.5``
rtol : float
Relative Tolerance
Defaults to ``1e-2``
atol : float
Absolute Tolerance
Defaults to ``1e-2``
order : int
Integration Order
Defaults to ``2``
omega : float
Coupling between Hamiltonian Flows
Smaller values imply smaller integration error, but too
small values can make the equation of motion non-integrable.
For non-capture trajectories, ``omega = 1.0`` is recommended.
For trajectories, that either lead to a capture or a grazing
geodesic, a decreased value of ``0.01`` or less is recommended.
Defaults to ``1.0``
suppress_warnings : bool
Whether to suppress warnings during simulation
Warnings are shown for every step, where numerical errors
exceed specified tolerance (controlled by ``rtol`` and ``atol``)
Defaults to ``False``
"""
# Contravariant Metrics, defined so far
_METRICS = {
"Schwarzschild": _sch,
"Kerr": _kerr,
"KerrNewman": _kerrnewman,
}
if metric not in _METRICS:
raise NotImplementedError(
f"'{metric}' is unsupported. Currently, these metrics are supported:\
\n1. Schwarzschild\n2. Kerr\n3. KerrNewman"
)
self.metric_name = metric
self.metric = _METRICS[metric]
self.metric_params = metric_params
if metric == "Schwarzschild":
self.metric_params = (0.0,)
self.position = np.array([0.0, *position])
self.momentum = _P(
self.metric, metric_params, self.position, momentum, time_like
)
self.time_like = time_like
self.kind = "Time-like" if time_like else "Null-like"
self.coords = "Cartesian" if return_cartesian else "Spherical Polar"
self._trajectory = self.calculate_trajectory(**kwargs)
def __repr__(self):
return f"""Geodesic Object:(\n\
Type : ({self.kind}),\n\
Metric : ({self.metric_name}),\n\
Metric Parameters : ({self.metric_params}),\n\
Initial 4-Position : ({self.position}),\n\
Initial 4-Momentum : ({self.momentum}),\n\
Trajectory = (\n\
{self.trajectory}\n\
),\n\
Output Position Coordinate System = ({self.coords})\n\
))"""
def __str__(self):
return self.__repr__()
@property
def trajectory(self):
"""
Returns the trajectory of the test particle
"""
return self._trajectory
def calculate_trajectory(self, **kwargs):
"""
Calculate trajectory in spacetime
Parameters
----------
kwargs : dict
Keyword parameters for the Geodesic Integrator
See 'Other Parameters' below.
Returns
-------
~numpy.ndarray
N-element numpy array, containing step count
~numpy.ndarray
Shape-(N, 8) numpy array, containing
(4-Position, 4-Momentum) for each step
Other Parameters
----------------
steps : int
Number of integration steps
Defaults to ``50``
delta : float
Initial integration step-size
Defaults to ``0.5``
rtol : float
Relative Tolerance
Defaults to ``1e-2``
atol : float
Absolute Tolerance
Defaults to ``1e-2``
order : int
Integration Order
Defaults to ``2``
omega : float
Coupling between Hamiltonian Flows
Smaller values imply smaller integration error, but too
small values can make the equation of motion non-integrable.
For non-capture trajectories, ``omega = 1.0`` is recommended.
For trajectories, that either lead to a capture or a grazing
geodesic, a decreased value of ``0.01`` or less is recommended.
Defaults to ``1.0``
suppress_warnings : bool
Whether to suppress warnings during simulation
Warnings are shown for every step, where numerical errors
exceed specified tolerance (controlled by ``rtol`` and ``atol``)
Defaults to ``False``
"""
g, g_prms = self.metric, self.metric_params
q0, p0 = self.position, self.momentum
tl = self.time_like
N = kwargs.get("steps", 50)
dl = kwargs.get("delta", 0.5)
rtol = kwargs.get("rtol", 1e-2)
atol = kwargs.get("atol", 1e-2)
order = kwargs.get("order", 2)
omega = kwargs.get("omega", 1.0)
sw = kwargs.get("suppress_warnings", False)
steps = np.arange(N)
geodint = GeodesicIntegrator(
metric=g,
metric_params=g_prms,
q0=q0,
p0=p0,
time_like=tl,
steps=N,
delta=dl,
rtol=rtol,
atol=atol,
order=order,
omega=omega,
suppress_warnings=sw,
)
for i in steps:
geodint.step()
vecs = np.array(geodint.results, dtype=float)
q1 = vecs[:, 0]
p1 = vecs[:, 1]
results = np.hstack((q1, p1))
# Ignoring
# q2 = vecs[:, 2]
# p2 = vecs[:, 3]
if self.coords == "Cartesian":
# Converting to Cartesian from Spherical Polar Coordinates
# Note that momenta cannot be converted this way,
# due to ambiguities in the signs of v_r and v_th (velocities)
t, r, th, ph = q1.T
pt, pr, pth, pph = p1.T
x = r * np.sin(th) * np.cos(ph)
y = r * np.sin(th) * np.sin(ph)
z = r * np.cos(th)
cart_results = np.vstack((t, x, y, z, pt, pr, pth, pph)).T
return steps, cart_results
return steps, results
class Nulllike(Geodesic):
"""
Class for defining Null-like Geodesics
"""
def __init__(
self, metric, metric_params, position, momentum, return_cartesian=True, **kwargs
):
"""
Constructor
Parameters
----------
metric : str
Name of the metric. Currently, these metrics are supported:
1. Schwarzschild
2. Kerr
3. KerrNewman
metric_params : array_like
Tuple of parameters to pass to the metric
E.g., ``(a,)`` for Kerr
position : array_like
3-Position
4-Position is initialized by taking ``t = 0.0``
momentum : array_like
3-Momentum
4-Momentum is calculated automatically,
considering the value of ``time_like``
return_cartesian : bool, optional
Whether to return calculated positions in Cartesian Coordinates
This only affects the coordinates. The momenta dimensionless
quantities, and are returned in Spherical Polar Coordinates.
Defaults to ``True``
kwargs : dict
Keyword parameters for the Geodesic Integrator
See 'Other Parameters' below.
Other Parameters
----------------
steps : int
Number of integration steps
Defaults to ``50``
delta : float
Initial integration step-size
Defaults to ``0.5``
rtol : float
Relative Tolerance
Defaults to ``1e-2``
atol : float
Absolute Tolerance
Defaults to ``1e-2``
order : int
Integration Order
Defaults to ``2``
omega : float
Coupling between Hamiltonian Flows
Smaller values imply smaller integration error, but too
small values can make the equation of motion non-integrable.
For non-capture trajectories, ``omega = 1.0`` is recommended.
For trajectories, that either lead to a capture or a grazing
geodesic, a decreased value of ``0.01`` or less is recommended.
Defaults to ``1.0``
suppress_warnings : bool
Whether to suppress warnings during simulation
Warnings are shown for every step, where numerical errors
exceed specified tolerance (controlled by ``rtol`` and ``atol``)
Defaults to ``False``
"""
super().__init__(
metric=metric,
metric_params=metric_params,
position=position,
momentum=momentum,
time_like=False,
return_cartesian=return_cartesian,
**kwargs,
)
class Timelike(Geodesic):
"""
Class for defining Time-like Geodesics
"""
def __init__(
self, metric, metric_params, position, momentum, return_cartesian=True, **kwargs
):
"""
Constructor
Parameters
----------
metric : str
Name of the metric. Currently, these metrics are supported:
1. Schwarzschild
2. Kerr
3. KerrNewman
metric_params : array_like
Tuple of parameters to pass to the metric
E.g., ``(a,)`` for Kerr
position : array_like
3-Position
4-Position is initialized by taking ``t = 0.0``
momentum : array_like
3-Momentum
4-Momentum is calculated automatically,
considering the value of ``time_like``
return_cartesian : bool, optional
Whether to return calculated positions in Cartesian Coordinates
This only affects the coordinates. The momenta dimensionless
quantities, and are returned in Spherical Polar Coordinates.
Defaults to ``True``
kwargs : dict
Keyword parameters for the Geodesic Integrator
See 'Other Parameters' below.
Other Parameters
----------------
steps : int
Number of integration steps
Defaults to ``50``
delta : float
Initial integration step-size
Defaults to ``0.5``
rtol : float
Relative Tolerance
Defaults to ``1e-2``
atol : float
Absolute Tolerance
Defaults to ``1e-2``
order : int
Integration Order
Defaults to ``2``
omega : float
Coupling between Hamiltonian Flows
Smaller values imply smaller integration error, but too
small values can make the equation of motion non-integrable.
For non-capture trajectories, ``omega = 1.0`` is recommended.
For trajectories, that either lead to a capture or a grazing
geodesic, a decreased value of ``0.01`` or less is recommended.
Defaults to ``1.0``
suppress_warnings : bool
Whether to suppress warnings during simulation
Warnings are shown for every step, where numerical errors
exceed specified tolerance (controlled by ``rtol`` and ``atol``)
Defaults to ``False``
"""
super().__init__(
metric=metric,
metric_params=metric_params,
position=position,
momentum=momentum,
time_like=True,
return_cartesian=return_cartesian,
**kwargs,
)
| [
"numpy.hstack",
"einsteinpy.integrators.GeodesicIntegrator",
"numpy.array",
"numpy.cos",
"numpy.vstack",
"numpy.sin",
"numpy.arange"
] | [((3607, 3633), 'numpy.array', 'np.array', (['[0.0, *position]'], {}), '([0.0, *position])\n', (3615, 3633), True, 'import numpy as np\n'), ((6840, 6852), 'numpy.arange', 'np.arange', (['N'], {}), '(N)\n', (6849, 6852), True, 'import numpy as np\n'), ((6872, 7048), 'einsteinpy.integrators.GeodesicIntegrator', 'GeodesicIntegrator', ([], {'metric': 'g', 'metric_params': 'g_prms', 'q0': 'q0', 'p0': 'p0', 'time_like': 'tl', 'steps': 'N', 'delta': 'dl', 'rtol': 'rtol', 'atol': 'atol', 'order': 'order', 'omega': 'omega', 'suppress_warnings': 'sw'}), '(metric=g, metric_params=g_prms, q0=q0, p0=p0, time_like=\n tl, steps=N, delta=dl, rtol=rtol, atol=atol, order=order, omega=omega,\n suppress_warnings=sw)\n', (6890, 7048), False, 'from einsteinpy.integrators import GeodesicIntegrator\n'), ((7263, 7301), 'numpy.array', 'np.array', (['geodint.results'], {'dtype': 'float'}), '(geodint.results, dtype=float)\n', (7271, 7301), True, 'import numpy as np\n'), ((7369, 7388), 'numpy.hstack', 'np.hstack', (['(q1, p1)'], {}), '((q1, p1))\n', (7378, 7388), True, 'import numpy as np\n'), ((7809, 7819), 'numpy.cos', 'np.cos', (['ph'], {}), '(ph)\n', (7815, 7819), True, 'import numpy as np\n'), ((7853, 7863), 'numpy.sin', 'np.sin', (['ph'], {}), '(ph)\n', (7859, 7863), True, 'import numpy as np\n'), ((7884, 7894), 'numpy.cos', 'np.cos', (['th'], {}), '(th)\n', (7890, 7894), True, 'import numpy as np\n'), ((7923, 7964), 'numpy.vstack', 'np.vstack', (['(t, x, y, z, pt, pr, pth, pph)'], {}), '((t, x, y, z, pt, pr, pth, pph))\n', (7932, 7964), True, 'import numpy as np\n'), ((7796, 7806), 'numpy.sin', 'np.sin', (['th'], {}), '(th)\n', (7802, 7806), True, 'import numpy as np\n'), ((7840, 7850), 'numpy.sin', 'np.sin', (['th'], {}), '(th)\n', (7846, 7850), True, 'import numpy as np\n')] |
import galaxy.model
import galaxy.webapps.community.model as model
from galaxy.model.orm import *
from galaxy.webapps.community.model.mapping import context as sa_session
from galaxy.model.mapping import context as ga_session
def delete_obj( obj ):
sa_session.delete( obj )
sa_session.flush()
def delete_user_roles( user ):
for ura in user.roles:
sa_session.delete( ura )
sa_session.flush()
def flush( obj ):
sa_session.add( obj )
sa_session.flush()
def get_all_repositories():
return sa_session.query( model.Repository ).all()
def get_all_installed_repositories( actually_installed=False ):
if actually_installed:
return ga_session.query( galaxy.model.ToolShedRepository ) \
.filter( and_( galaxy.model.ToolShedRepository.table.c.deleted == False,
galaxy.model.ToolShedRepository.table.c.uninstalled == False,
galaxy.model.ToolShedRepository.table.c.status == galaxy.model.ToolShedRepository.installation_status.INSTALLED ) ) \
.all()
else:
return ga_session.query( galaxy.model.ToolShedRepository ).all()
def get_category_by_name( name ):
return sa_session.query( model.Category ) \
.filter( model.Category.table.c.name == name ) \
.first()
def get_default_user_permissions_by_role( role ):
return sa_session.query( model.DefaultUserPermissions ) \
.filter( model.DefaultUserPermissions.table.c.role_id == role.id ) \
.all()
def get_default_user_permissions_by_user( user ):
return sa_session.query( model.DefaultUserPermissions ) \
.filter( model.DefaultUserPermissions.table.c.user_id==user.id ) \
.all()
def get_galaxy_repository_by_name_owner_changeset_revision( repository_name, owner, changeset_revision ):
return ga_session.query( galaxy.model.ToolShedRepository ) \
.filter( and_( galaxy.model.ToolShedRepository.table.c.name == repository_name,
galaxy.model.ToolShedRepository.table.c.owner == owner,
galaxy.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
.first()
def get_installed_repository_by_id( repository_id ):
return ga_session.query( galaxy.model.ToolShedRepository ) \
.filter( galaxy.model.ToolShedRepository.table.c.id == repository_id ) \
.first()
def get_installed_repository_by_name_owner( repository_name, owner ):
return ga_session.query( galaxy.model.ToolShedRepository ) \
.filter( and_( galaxy.model.ToolShedRepository.table.c.name == repository_name,
galaxy.model.ToolShedRepository.table.c.owner == owner ) ) \
.first()
def get_private_role( user ):
for role in user.all_roles():
if role.name == user.email and role.description == 'Private Role for %s' % user.email:
return role
raise AssertionError( "Private role not found for user '%s'" % user.email )
def get_repository_by_id( repository_id ):
return sa_session.query( model.Repository ) \
.filter( model.Repository.table.c.id == repository_id ) \
.first()
def get_repository_review_by_user_id_changeset_revision( user_id, repository_id, changeset_revision ):
review = sa_session.query( model.RepositoryReview ) \
.filter( and_( model.RepositoryReview.table.c.user_id == user_id,
model.RepositoryReview.table.c.repository_id == repository_id,
model.RepositoryReview.table.c.changeset_revision == changeset_revision ) ) \
.first()
return review
def get_role_by_name( role_name ):
return sa_session.query( model.Role ) \
.filter( model.Role.table.c.name == role_name ) \
.first()
def get_user( email ):
return sa_session.query( model.User ) \
.filter( model.User.table.c.email==email ) \
.first()
def get_user_by_name( username ):
return sa_session.query( model.User ) \
.filter( model.User.table.c.username==username ) \
.first()
def mark_obj_deleted( obj ):
obj.deleted = True
sa_session.add( obj )
sa_session.flush()
def refresh( obj ):
sa_session.refresh( obj )
def ga_refresh( obj ):
ga_session.refresh( obj )
def get_galaxy_private_role( user ):
for role in user.all_roles():
if role.name == user.email and role.description == 'Private Role for %s' % user.email:
return role
raise AssertionError( "Private role not found for user '%s'" % user.email )
def get_galaxy_user( email ):
return ga_session.query( galaxy.model.User ) \
.filter( galaxy.model.User.table.c.email==email ) \
.first()
def get_repository_by_name_and_owner( name, owner_username ):
owner = get_user_by_name( owner_username )
repository = sa_session.query( model.Repository ) \
.filter( and_( model.Repository.table.c.name == name,
model.Repository.table.c.user_id == owner.id ) ) \
.first()
return repository
def get_repository_metadata_by_repository_id_changeset_revision( repository_id, changeset_revision ):
repository_metadata = sa_session.query( model.RepositoryMetadata ) \
.filter( and_( model.RepositoryMetadata.table.c.repository_id == repository_id,
model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
.first()
return repository_metadata
| [
"galaxy.webapps.community.model.mapping.context.add",
"galaxy.model.mapping.context.refresh",
"galaxy.webapps.community.model.mapping.context.query",
"galaxy.webapps.community.model.mapping.context.flush",
"galaxy.webapps.community.model.mapping.context.refresh",
"galaxy.model.mapping.context.query",
"g... | [((254, 276), 'galaxy.webapps.community.model.mapping.context.delete', 'sa_session.delete', (['obj'], {}), '(obj)\n', (271, 276), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((283, 301), 'galaxy.webapps.community.model.mapping.context.flush', 'sa_session.flush', ([], {}), '()\n', (299, 301), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((397, 415), 'galaxy.webapps.community.model.mapping.context.flush', 'sa_session.flush', ([], {}), '()\n', (413, 415), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((438, 457), 'galaxy.webapps.community.model.mapping.context.add', 'sa_session.add', (['obj'], {}), '(obj)\n', (452, 457), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((464, 482), 'galaxy.webapps.community.model.mapping.context.flush', 'sa_session.flush', ([], {}), '()\n', (480, 482), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((4521, 4540), 'galaxy.webapps.community.model.mapping.context.add', 'sa_session.add', (['obj'], {}), '(obj)\n', (4535, 4540), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((4547, 4565), 'galaxy.webapps.community.model.mapping.context.flush', 'sa_session.flush', ([], {}), '()\n', (4563, 4565), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((4590, 4613), 'galaxy.webapps.community.model.mapping.context.refresh', 'sa_session.refresh', (['obj'], {}), '(obj)\n', (4608, 4613), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((4643, 4666), 'galaxy.model.mapping.context.refresh', 'ga_session.refresh', (['obj'], {}), '(obj)\n', (4661, 4666), True, 'from galaxy.model.mapping import context as ga_session\n'), ((368, 390), 'galaxy.webapps.community.model.mapping.context.delete', 'sa_session.delete', (['ura'], {}), '(ura)\n', (385, 390), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((522, 556), 'galaxy.webapps.community.model.mapping.context.query', 'sa_session.query', (['model.Repository'], {}), '(model.Repository)\n', (538, 556), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((1140, 1189), 'galaxy.model.mapping.context.query', 'ga_session.query', (['galaxy.model.ToolShedRepository'], {}), '(galaxy.model.ToolShedRepository)\n', (1156, 1189), True, 'from galaxy.model.mapping import context as ga_session\n'), ((1243, 1275), 'galaxy.webapps.community.model.mapping.context.query', 'sa_session.query', (['model.Category'], {}), '(model.Category)\n', (1259, 1275), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((1441, 1487), 'galaxy.webapps.community.model.mapping.context.query', 'sa_session.query', (['model.DefaultUserPermissions'], {}), '(model.DefaultUserPermissions)\n', (1457, 1487), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((1671, 1717), 'galaxy.webapps.community.model.mapping.context.query', 'sa_session.query', (['model.DefaultUserPermissions'], {}), '(model.DefaultUserPermissions)\n', (1687, 1717), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((1955, 2004), 'galaxy.model.mapping.context.query', 'ga_session.query', (['galaxy.model.ToolShedRepository'], {}), '(galaxy.model.ToolShedRepository)\n', (1971, 2004), True, 'from galaxy.model.mapping import context as ga_session\n'), ((2419, 2468), 'galaxy.model.mapping.context.query', 'ga_session.query', (['galaxy.model.ToolShedRepository'], {}), '(galaxy.model.ToolShedRepository)\n', (2435, 2468), True, 'from galaxy.model.mapping import context as ga_session\n'), ((2678, 2727), 'galaxy.model.mapping.context.query', 'ga_session.query', (['galaxy.model.ToolShedRepository'], {}), '(galaxy.model.ToolShedRepository)\n', (2694, 2727), True, 'from galaxy.model.mapping import context as ga_session\n'), ((3277, 3311), 'galaxy.webapps.community.model.mapping.context.query', 'sa_session.query', (['model.Repository'], {}), '(model.Repository)\n', (3293, 3311), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((3541, 3581), 'galaxy.webapps.community.model.mapping.context.query', 'sa_session.query', (['model.RepositoryReview'], {}), '(model.RepositoryReview)\n', (3557, 3581), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((3988, 4016), 'galaxy.webapps.community.model.mapping.context.query', 'sa_session.query', (['model.Role'], {}), '(model.Role)\n', (4004, 4016), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((4156, 4184), 'galaxy.webapps.community.model.mapping.context.query', 'sa_session.query', (['model.User'], {}), '(model.User)\n', (4172, 4184), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((4330, 4358), 'galaxy.webapps.community.model.mapping.context.query', 'sa_session.query', (['model.User'], {}), '(model.User)\n', (4346, 4358), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((4980, 5015), 'galaxy.model.mapping.context.query', 'ga_session.query', (['galaxy.model.User'], {}), '(galaxy.model.User)\n', (4996, 5015), True, 'from galaxy.model.mapping import context as ga_session\n'), ((5249, 5283), 'galaxy.webapps.community.model.mapping.context.query', 'sa_session.query', (['model.Repository'], {}), '(model.Repository)\n', (5265, 5283), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((5648, 5690), 'galaxy.webapps.community.model.mapping.context.query', 'sa_session.query', (['model.RepositoryMetadata'], {}), '(model.RepositoryMetadata)\n', (5664, 5690), True, 'from galaxy.webapps.community.model.mapping import context as sa_session\n'), ((671, 720), 'galaxy.model.mapping.context.query', 'ga_session.query', (['galaxy.model.ToolShedRepository'], {}), '(galaxy.model.ToolShedRepository)\n', (687, 720), True, 'from galaxy.model.mapping import context as ga_session\n')] |
from payment_server import app
from flask import json
def test_payment_server():
response = app.test_client().post(
'/api-v1/process-payment/',
data=json.dumps({
"CreditCardNumber":"79927398713",
"CardHolder":"Anonymous",
"ExpirationDate":"2022-08-11T05:26:03.869245",
"Amount":9
}),
content_type='application/json',
)
text = response.get_data(as_text=True)
assert (response.status_code == 500 and text == 'Any error') or (response.status_code == 200 and text == 'Payment is processed') | [
"payment_server.app.test_client",
"flask.json.dumps"
] | [((109, 126), 'payment_server.app.test_client', 'app.test_client', ([], {}), '()\n', (124, 126), False, 'from payment_server import app\n'), ((184, 323), 'flask.json.dumps', 'json.dumps', (["{'CreditCardNumber': '79927398713', 'CardHolder': 'Anonymous',\n 'ExpirationDate': '2022-08-11T05:26:03.869245', 'Amount': 9}"], {}), "({'CreditCardNumber': '79927398713', 'CardHolder': 'Anonymous',\n 'ExpirationDate': '2022-08-11T05:26:03.869245', 'Amount': 9})\n", (194, 323), False, 'from flask import json\n')] |
# coding: utf-8
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.text import slugify
class Profile(models.Model):
user = models.OneToOneField(User)
is_external = models.BooleanField(default=False)
class Meta:
db_table = 'auth_user_profile'
verbose_name = _('profile')
verbose_name_plural = _('profiles')
def __unicode__(self):
return self.get_display_name()
def get_display_name(self):
if self.user.first_name and self.user.last_name:
return self.user.get_full_name()
elif self.user.first_name:
return self.user.first_name
elif self.user.last_name:
return self.user.last_name
else:
return self.user.username
def get_display_roles(self):
groups = self.user.groups.all().values_list('name', flat=True)
return ', '.join(groups)
def get_avatar(self):
initials = u''
if self.user.first_name and self.user.last_name:
initials = u'{0}{1}'.format(self.user.first_name[:1], self.user.last_name[:1])
elif self.user.first_name and len(self.user.first_name) > 1:
initials = self.user.first_name[:2]
elif self.user.last_name and len(self.user.last_name) > 1:
initials = self.user.last_name[:2]
elif len(self.user.username) > 1:
initials = self.user.username[:2]
else:
initials = self.user.username
initials = slugify(initials)
colors = {
'a': '#8A2E60',
'b': '#4B2D73',
'c': '#AA5A39',
'd': '#29516D',
'e': '#6F256F',
'f': '#993350',
'g': '#2D882D',
'h': '#AA8E39',
'i': '#7F2A68',
'j': '#403075',
'k': '#AA5439',
'l': '#7B9F35',
'm': '#AA7939',
'n': '#AA8539',
'o': '#AAA839',
'p': '#236467',
'q': '#AA9739',
'r': '#592A71',
's': '#609732',
't': '#277553',
'u': '#AA9F39',
'v': '#91A437',
'w': '#343477',
'x': '#2E4372',
'y': '#AA6D39',
'z': '#AA3C39'
}
try:
color = colors[initials[:1].lower()]
except KeyError:
color = '#E1E1E1'
return (initials.upper(), color)
| [
"django.db.models.OneToOneField",
"django.utils.translation.ugettext_lazy",
"django.utils.text.slugify",
"django.db.models.BooleanField"
] | [((226, 252), 'django.db.models.OneToOneField', 'models.OneToOneField', (['User'], {}), '(User)\n', (246, 252), False, 'from django.db import models\n'), ((271, 305), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (290, 305), False, 'from django.db import models\n'), ((385, 397), 'django.utils.translation.ugettext_lazy', '_', (['"""profile"""'], {}), "('profile')\n", (386, 397), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((428, 441), 'django.utils.translation.ugettext_lazy', '_', (['"""profiles"""'], {}), "('profiles')\n", (429, 441), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1576, 1593), 'django.utils.text.slugify', 'slugify', (['initials'], {}), '(initials)\n', (1583, 1593), False, 'from django.utils.text import slugify\n')] |
"""
This module defines an abstract base formatter.
!!! question "Formats"
Refer to the [Formats documentation](../../formats/index.md)
to learn about the supported output formats.
"""
from abc import ABC, abstractmethod
import numpy as np
from numpy.lib.recfunctions import unstructured_to_structured
class BaseFormatter(ABC):
"""
An abstract base formatter.
Attributes:
colorize (bool): Whether to color the text.
vcolor (Callable): The vectorized implementation of the `color` method.
Note:
The following methods must be overwritten:
- [`color`][picharsso.format.base.BaseFormatter.color]
- [`translate`][picharsso.format.base.BaseFormatter.translate]
- [`unify`][picharsso.format.base.BaseFormatter.unify]
"""
def __init__(self, colorize=False):
"""Initialization method.
Args:
colorize (Option[bool]): Whether to color the text.
"""
self.colorize = None
BaseFormatter.set(self, colorize=colorize)
self.vcolor = np.vectorize(self.color)
def __call__(self, text_matrix, image, resample):
"""Applies formatting and colorization on the `text_matrix`
and returns a single string.
Args:
text_matrix (numpy.ndarray): The subject text matrix,
with `shape = (<height>, <width>)`,
and `dtype = str`.
image (PIL.Image.Image): The subject image.
resample (int): The resampling filter.
Returns:
str: The formatted string of text with color (if specified).
"""
text_size = text_matrix.shape
# Apply any translations.
text_matrix = self.translate(text_matrix)
# Colorize if necessary
if self.colorize:
# Pool the colors from the original image by resizing it to the size of the text output.
# Using the vectorized `color` method, color each element in the `text_martix`.
# The vectorized operation takes a `str` from `text_matrix`
# and a `List[int, int, int]` from the pooled colors.
text_matrix = self.vcolor(
text_matrix,
unstructured_to_structured(
np.array(image.resize(text_size[::-1], resample=resample)).astype(
np.uint8
)
).astype("O"),
)
return self.unify(text_matrix)
@staticmethod
@abstractmethod
def color(text, color):
"""Applies `color` to a string of `text`.
Args:
text (str): The subject text.
color (Tuple[int, int, int]): The `RGB` value for the color.
Returns:
str: The colored text.
"""
@staticmethod
@abstractmethod
def translate(text_matrix):
"""Applies translatations to `text_matrix`.
Args:
text_matrix (numpy.ndarray): The subject text matrix,
with `shape = (<height>, <width>)`,
and `dtype = str`.
Returns:
numpy.ndarray: The translated text_matrix.
"""
@staticmethod
@abstractmethod
def unify(text_matrix):
"""Formats a `text_matrix` into a single string.
Args:
text_matrix (numpy.ndarray): The subject text matrix,
with `shape = (<height>, <width>)`,
and `dtype = str`.
Returns:
str: The formatted string of text art.
"""
def set(self, colorize=None):
"""Sets attributes of the formatter instance.
Args:
colorize (Optional[bool]): Sets `colorize`.
"""
if colorize is not None:
self.colorize = colorize
__all__ = ["BaseFormatter"]
| [
"numpy.vectorize"
] | [((1067, 1091), 'numpy.vectorize', 'np.vectorize', (['self.color'], {}), '(self.color)\n', (1079, 1091), True, 'import numpy as np\n')] |
from typing import List, Optional, Dict, Any
import logging
import os
import json
import glob
from fastapi import FastAPI, HTTPException, Header, Response, Body
from fastapi.responses import FileResponse
from fastapi.encoders import jsonable_encoder
from app.metadata import PaperStatus, Allocation
from app.annotations import Annotation, RelationGroup, PdfAnnotation
from app.utils import StackdriverJsonFormatter
from app import pre_serve
IN_PRODUCTION = os.getenv("IN_PRODUCTION", "dev")
CONFIGURATION_FILE = os.getenv(
"PAWLS_CONFIGURATION_FILE", "/usr/local/src/skiff/app/api/config/configuration.json"
)
handlers = None
if IN_PRODUCTION == "prod":
json_handler = logging.StreamHandler()
json_handler.setFormatter(StackdriverJsonFormatter())
handlers = [json_handler]
logging.basicConfig(
level=os.environ.get("LOG_LEVEL", default=logging.INFO), handlers=handlers
)
logger = logging.getLogger("uvicorn")
# boto3 logging is _super_ verbose.
logging.getLogger("boto3").setLevel(logging.CRITICAL)
logging.getLogger("botocore").setLevel(logging.CRITICAL)
logging.getLogger("nose").setLevel(logging.CRITICAL)
logging.getLogger("s3transfer").setLevel(logging.CRITICAL)
# The annotation app requires a bit of set up.
configuration = pre_serve.load_configuration(CONFIGURATION_FILE)
app = FastAPI()
def get_user_from_header(user_email: Optional[str]) -> Optional[str]:
"""
Call this function with the X-Auth-Request-Email header value. This must
include an "@" in its value.
* In production, this is provided by Skiff after the user authenticates.
* In development, it is provided in the NGINX proxy configuration file local.conf.
If the value isn't well formed, or the user isn't allowed, an exception is
thrown.
"""
if "@" not in user_email:
raise HTTPException(403, "Forbidden")
if not user_is_allowed(user_email):
raise HTTPException(403, "Forbidden")
return user_email
def user_is_allowed(user_email: str) -> bool:
"""
Return True if the user_email is in the users file, False otherwise.
"""
try:
with open(configuration.users_file) as file:
for line in file:
entry = line.strip()
if user_email == entry:
return True
# entries like "@allenai.org" mean anyone in that domain @allenai.org is granted access
if entry.startswith("@") and user_email.endswith(entry):
return True
except FileNotFoundError:
logger.warning("file not found: %s", configuration.users_file)
pass
return False
def all_pdf_shas() -> List[str]:
pdfs = glob.glob(f"{configuration.output_directory}/*/*.pdf")
return [p.split("/")[-2] for p in pdfs]
def update_status_json(status_path: str, sha: str, data: Dict[str, Any]):
with open(status_path, "r+") as st:
status_json = json.load(st)
status_json[sha] = {**status_json[sha], **data}
st.seek(0)
json.dump(status_json, st)
st.truncate()
@app.get("/", status_code=204)
def read_root():
"""
Skiff's sonar, and the Kubernetes health check, require
that the server returns a 2XX response from it's
root URL, so it can tell the service is ready for requests.
"""
return Response(status_code=204)
@app.get("/api/doc/{sha}/pdf")
async def get_pdf(sha: str):
"""
Fetches a PDF.
sha: str
The sha of the pdf to return.
"""
pdf = os.path.join(configuration.output_directory, sha, f"{sha}.pdf")
pdf_exists = os.path.exists(pdf)
if not pdf_exists:
raise HTTPException(status_code=404, detail=f"pdf {sha} not found.")
return FileResponse(pdf, media_type="application/pdf")
@app.get("/api/doc/{sha}/title")
async def get_pdf_title(sha: str) -> Optional[str]:
"""
Fetches a PDF's title.
sha: str
The sha of the pdf title to return.
"""
pdf_info = os.path.join(configuration.output_directory, "pdf_metadata.json")
with open(pdf_info, "r") as f:
info = json.load(f)
data = info.get("sha", None)
if data is None:
return None
return data.get("title", None)
@app.post("/api/doc/{sha}/comments")
def set_pdf_comments(
sha: str, comments: str = Body(...), x_auth_request_email: str = Header(None)
):
user = get_user_from_header(x_auth_request_email)
status_path = os.path.join(configuration.output_directory, "status", f"{user}.json")
exists = os.path.exists(status_path)
if not exists:
# Not an allocated user. Do nothing.
return {}
update_status_json(status_path, sha, {"comments": comments})
return {}
@app.post("/api/doc/{sha}/junk")
def set_pdf_junk(
sha: str, junk: bool = Body(...), x_auth_request_email: str = Header(None)
):
user = get_user_from_header(x_auth_request_email)
status_path = os.path.join(configuration.output_directory, "status", f"{user}.json")
exists = os.path.exists(status_path)
if not exists:
# Not an allocated user. Do nothing.
return {}
update_status_json(status_path, sha, {"junk": junk})
return {}
@app.post("/api/doc/{sha}/finished")
def set_pdf_finished(
sha: str, finished: bool = Body(...), x_auth_request_email: str = Header(None)
):
user = get_user_from_header(x_auth_request_email)
status_path = os.path.join(configuration.output_directory, "status", f"{user}.json")
exists = os.path.exists(status_path)
if not exists:
# Not an allocated user. Do nothing.
return {}
update_status_json(status_path, sha, {"finished": finished})
return {}
@app.get("/api/doc/{sha}/annotations")
def get_annotations(
sha: str, x_auth_request_email: str = Header(None)
) -> PdfAnnotation:
user = get_user_from_header(x_auth_request_email)
annotations = os.path.join(
configuration.output_directory, sha, f"{user}_annotations.json"
)
exists = os.path.exists(annotations)
if exists:
with open(annotations) as f:
blob = json.load(f)
return blob
else:
return {"annotations": [], "relations": []}
@app.post("/api/doc/{sha}/annotations")
def save_annotations(
sha: str,
annotations: List[Annotation],
relations: List[RelationGroup],
x_auth_request_email: str = Header(None),
):
"""
sha: str
PDF sha to save annotations for.
annotations: List[Annotation]
A json blob of the annotations to save.
relations: List[RelationGroup]
A json blob of the relations between the annotations to save.
x_auth_request_email: str
This is a header sent with the requests which specifies the user login.
For local development, this will be None, because the authentication
is controlled by the Skiff Kubernetes cluster.
"""
# Update the annotations in the annotation json file.
user = get_user_from_header(x_auth_request_email)
annotations_path = os.path.join(
configuration.output_directory, sha, f"{user}_annotations.json"
)
json_annotations = [jsonable_encoder(a) for a in annotations]
json_relations = [jsonable_encoder(r) for r in relations]
# Update the annotation counts in the status file.
status_path = os.path.join(configuration.output_directory, "status", f"{user}.json")
exists = os.path.exists(status_path)
if not exists:
# Not an allocated user. Do nothing.
return {}
with open(annotations_path, "w+") as f:
json.dump({"annotations": json_annotations, "relations": json_relations}, f)
update_status_json(
status_path, sha, {"annotations": len(annotations), "relations": len(relations)}
)
return {}
@app.get("/api/doc/{sha}/tokens")
def get_tokens(sha: str):
"""
sha: str
PDF sha to retrieve tokens for.
"""
pdf_tokens = os.path.join(configuration.output_directory, sha, "pdf_structure.json")
if not os.path.exists(pdf_tokens):
raise HTTPException(status_code=404, detail="No tokens for pdf.")
with open(pdf_tokens, "r") as f:
response = json.load(f)
return response
@app.get("/api/annotation/labels")
def get_labels() -> List[Dict[str, str]]:
"""
Get the labels used for annotation for this app.
"""
return configuration.labels
@app.get("/api/annotation/relations")
def get_relations() -> List[Dict[str, str]]:
"""
Get the relations used for annotation for this app.
"""
return configuration.relations
@app.get("/api/annotation/allocation/info")
def get_allocation_info(x_auth_request_email: str = Header(None)) -> Allocation:
# In development, the app isn't passed the x_auth_request_email header,
# meaning this would always fail. Instead, to smooth local development,
# we always return all pdfs, essentially short-circuiting the allocation
# mechanism.
user = get_user_from_header(x_auth_request_email)
status_dir = os.path.join(configuration.output_directory, "status")
status_path = os.path.join(status_dir, f"{user}.json")
exists = os.path.exists(status_path)
if not exists:
# If the user doesn't have allocated papers, they can see all the
# pdfs but they can't save anything.
papers = [PaperStatus.empty(sha, sha) for sha in all_pdf_shas()]
response = Allocation(
papers=papers,
hasAllocatedPapers=False
)
else:
with open(status_path) as f:
status_json = json.load(f)
papers = []
for sha, status in status_json.items():
papers.append(PaperStatus(**status))
response = Allocation(papers=papers, hasAllocatedPapers=True)
return response
| [
"logging.getLogger",
"logging.StreamHandler",
"fastapi.Body",
"app.utils.StackdriverJsonFormatter",
"os.path.exists",
"app.metadata.PaperStatus.empty",
"glob.glob",
"fastapi.Response",
"fastapi.responses.FileResponse",
"app.pre_serve.load_configuration",
"fastapi.encoders.jsonable_encoder",
"f... | [((460, 493), 'os.getenv', 'os.getenv', (['"""IN_PRODUCTION"""', '"""dev"""'], {}), "('IN_PRODUCTION', 'dev')\n", (469, 493), False, 'import os\n'), ((516, 615), 'os.getenv', 'os.getenv', (['"""PAWLS_CONFIGURATION_FILE"""', '"""/usr/local/src/skiff/app/api/config/configuration.json"""'], {}), "('PAWLS_CONFIGURATION_FILE',\n '/usr/local/src/skiff/app/api/config/configuration.json')\n", (525, 615), False, 'import os\n'), ((907, 935), 'logging.getLogger', 'logging.getLogger', (['"""uvicorn"""'], {}), "('uvicorn')\n", (924, 935), False, 'import logging\n'), ((1260, 1308), 'app.pre_serve.load_configuration', 'pre_serve.load_configuration', (['CONFIGURATION_FILE'], {}), '(CONFIGURATION_FILE)\n', (1288, 1308), False, 'from app import pre_serve\n'), ((1316, 1325), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (1323, 1325), False, 'from fastapi import FastAPI, HTTPException, Header, Response, Body\n'), ((683, 706), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (704, 706), False, 'import logging\n'), ((2692, 2746), 'glob.glob', 'glob.glob', (['f"""{configuration.output_directory}/*/*.pdf"""'], {}), "(f'{configuration.output_directory}/*/*.pdf')\n", (2701, 2746), False, 'import glob\n'), ((3330, 3355), 'fastapi.Response', 'Response', ([], {'status_code': '(204)'}), '(status_code=204)\n', (3338, 3355), False, 'from fastapi import FastAPI, HTTPException, Header, Response, Body\n'), ((3515, 3578), 'os.path.join', 'os.path.join', (['configuration.output_directory', 'sha', 'f"""{sha}.pdf"""'], {}), "(configuration.output_directory, sha, f'{sha}.pdf')\n", (3527, 3578), False, 'import os\n'), ((3596, 3615), 'os.path.exists', 'os.path.exists', (['pdf'], {}), '(pdf)\n', (3610, 3615), False, 'import os\n'), ((3728, 3775), 'fastapi.responses.FileResponse', 'FileResponse', (['pdf'], {'media_type': '"""application/pdf"""'}), "(pdf, media_type='application/pdf')\n", (3740, 3775), False, 'from fastapi.responses import FileResponse\n'), ((3979, 4044), 'os.path.join', 'os.path.join', (['configuration.output_directory', '"""pdf_metadata.json"""'], {}), "(configuration.output_directory, 'pdf_metadata.json')\n", (3991, 4044), False, 'import os\n'), ((4312, 4321), 'fastapi.Body', 'Body', (['...'], {}), '(...)\n', (4316, 4321), False, 'from fastapi import FastAPI, HTTPException, Header, Response, Body\n'), ((4351, 4363), 'fastapi.Header', 'Header', (['None'], {}), '(None)\n', (4357, 4363), False, 'from fastapi import FastAPI, HTTPException, Header, Response, Body\n'), ((4439, 4509), 'os.path.join', 'os.path.join', (['configuration.output_directory', '"""status"""', 'f"""{user}.json"""'], {}), "(configuration.output_directory, 'status', f'{user}.json')\n", (4451, 4509), False, 'import os\n'), ((4523, 4550), 'os.path.exists', 'os.path.exists', (['status_path'], {}), '(status_path)\n', (4537, 4550), False, 'import os\n'), ((4794, 4803), 'fastapi.Body', 'Body', (['...'], {}), '(...)\n', (4798, 4803), False, 'from fastapi import FastAPI, HTTPException, Header, Response, Body\n'), ((4833, 4845), 'fastapi.Header', 'Header', (['None'], {}), '(None)\n', (4839, 4845), False, 'from fastapi import FastAPI, HTTPException, Header, Response, Body\n'), ((4921, 4991), 'os.path.join', 'os.path.join', (['configuration.output_directory', '"""status"""', 'f"""{user}.json"""'], {}), "(configuration.output_directory, 'status', f'{user}.json')\n", (4933, 4991), False, 'import os\n'), ((5005, 5032), 'os.path.exists', 'os.path.exists', (['status_path'], {}), '(status_path)\n', (5019, 5032), False, 'import os\n'), ((5279, 5288), 'fastapi.Body', 'Body', (['...'], {}), '(...)\n', (5283, 5288), False, 'from fastapi import FastAPI, HTTPException, Header, Response, Body\n'), ((5318, 5330), 'fastapi.Header', 'Header', (['None'], {}), '(None)\n', (5324, 5330), False, 'from fastapi import FastAPI, HTTPException, Header, Response, Body\n'), ((5406, 5476), 'os.path.join', 'os.path.join', (['configuration.output_directory', '"""status"""', 'f"""{user}.json"""'], {}), "(configuration.output_directory, 'status', f'{user}.json')\n", (5418, 5476), False, 'import os\n'), ((5490, 5517), 'os.path.exists', 'os.path.exists', (['status_path'], {}), '(status_path)\n', (5504, 5517), False, 'import os\n'), ((5784, 5796), 'fastapi.Header', 'Header', (['None'], {}), '(None)\n', (5790, 5796), False, 'from fastapi import FastAPI, HTTPException, Header, Response, Body\n'), ((5889, 5966), 'os.path.join', 'os.path.join', (['configuration.output_directory', 'sha', 'f"""{user}_annotations.json"""'], {}), "(configuration.output_directory, sha, f'{user}_annotations.json')\n", (5901, 5966), False, 'import os\n'), ((5994, 6021), 'os.path.exists', 'os.path.exists', (['annotations'], {}), '(annotations)\n', (6008, 6021), False, 'import os\n'), ((6372, 6384), 'fastapi.Header', 'Header', (['None'], {}), '(None)\n', (6378, 6384), False, 'from fastapi import FastAPI, HTTPException, Header, Response, Body\n'), ((7023, 7100), 'os.path.join', 'os.path.join', (['configuration.output_directory', 'sha', 'f"""{user}_annotations.json"""'], {}), "(configuration.output_directory, sha, f'{user}_annotations.json')\n", (7035, 7100), False, 'import os\n'), ((7317, 7387), 'os.path.join', 'os.path.join', (['configuration.output_directory', '"""status"""', 'f"""{user}.json"""'], {}), "(configuration.output_directory, 'status', f'{user}.json')\n", (7329, 7387), False, 'import os\n'), ((7401, 7428), 'os.path.exists', 'os.path.exists', (['status_path'], {}), '(status_path)\n', (7415, 7428), False, 'import os\n'), ((7924, 7995), 'os.path.join', 'os.path.join', (['configuration.output_directory', 'sha', '"""pdf_structure.json"""'], {}), "(configuration.output_directory, sha, 'pdf_structure.json')\n", (7936, 7995), False, 'import os\n'), ((8669, 8681), 'fastapi.Header', 'Header', (['None'], {}), '(None)\n', (8675, 8681), False, 'from fastapi import FastAPI, HTTPException, Header, Response, Body\n'), ((9017, 9071), 'os.path.join', 'os.path.join', (['configuration.output_directory', '"""status"""'], {}), "(configuration.output_directory, 'status')\n", (9029, 9071), False, 'import os\n'), ((9090, 9130), 'os.path.join', 'os.path.join', (['status_dir', 'f"""{user}.json"""'], {}), "(status_dir, f'{user}.json')\n", (9102, 9130), False, 'import os\n'), ((9144, 9171), 'os.path.exists', 'os.path.exists', (['status_path'], {}), '(status_path)\n', (9158, 9171), False, 'import os\n'), ((737, 763), 'app.utils.StackdriverJsonFormatter', 'StackdriverJsonFormatter', ([], {}), '()\n', (761, 763), False, 'from app.utils import StackdriverJsonFormatter\n'), ((827, 876), 'os.environ.get', 'os.environ.get', (['"""LOG_LEVEL"""'], {'default': 'logging.INFO'}), "('LOG_LEVEL', default=logging.INFO)\n", (841, 876), False, 'import os\n'), ((973, 999), 'logging.getLogger', 'logging.getLogger', (['"""boto3"""'], {}), "('boto3')\n", (990, 999), False, 'import logging\n'), ((1027, 1056), 'logging.getLogger', 'logging.getLogger', (['"""botocore"""'], {}), "('botocore')\n", (1044, 1056), False, 'import logging\n'), ((1084, 1109), 'logging.getLogger', 'logging.getLogger', (['"""nose"""'], {}), "('nose')\n", (1101, 1109), False, 'import logging\n'), ((1137, 1168), 'logging.getLogger', 'logging.getLogger', (['"""s3transfer"""'], {}), "('s3transfer')\n", (1154, 1168), False, 'import logging\n'), ((1825, 1856), 'fastapi.HTTPException', 'HTTPException', (['(403)', '"""Forbidden"""'], {}), "(403, 'Forbidden')\n", (1838, 1856), False, 'from fastapi import FastAPI, HTTPException, Header, Response, Body\n'), ((1912, 1943), 'fastapi.HTTPException', 'HTTPException', (['(403)', '"""Forbidden"""'], {}), "(403, 'Forbidden')\n", (1925, 1943), False, 'from fastapi import FastAPI, HTTPException, Header, Response, Body\n'), ((2930, 2943), 'json.load', 'json.load', (['st'], {}), '(st)\n', (2939, 2943), False, 'import json\n'), ((3027, 3053), 'json.dump', 'json.dump', (['status_json', 'st'], {}), '(status_json, st)\n', (3036, 3053), False, 'import json\n'), ((3653, 3715), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': 'f"""pdf {sha} not found."""'}), "(status_code=404, detail=f'pdf {sha} not found.')\n", (3666, 3715), False, 'from fastapi import FastAPI, HTTPException, Header, Response, Body\n'), ((4096, 4108), 'json.load', 'json.load', (['f'], {}), '(f)\n', (4105, 4108), False, 'import json\n'), ((7139, 7158), 'fastapi.encoders.jsonable_encoder', 'jsonable_encoder', (['a'], {}), '(a)\n', (7155, 7158), False, 'from fastapi.encoders import jsonable_encoder\n'), ((7203, 7222), 'fastapi.encoders.jsonable_encoder', 'jsonable_encoder', (['r'], {}), '(r)\n', (7219, 7222), False, 'from fastapi.encoders import jsonable_encoder\n'), ((7564, 7640), 'json.dump', 'json.dump', (["{'annotations': json_annotations, 'relations': json_relations}", 'f'], {}), "({'annotations': json_annotations, 'relations': json_relations}, f)\n", (7573, 7640), False, 'import json\n'), ((8007, 8033), 'os.path.exists', 'os.path.exists', (['pdf_tokens'], {}), '(pdf_tokens)\n', (8021, 8033), False, 'import os\n'), ((8049, 8108), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""No tokens for pdf."""'}), "(status_code=404, detail='No tokens for pdf.')\n", (8062, 8108), False, 'from fastapi import FastAPI, HTTPException, Header, Response, Body\n'), ((8165, 8177), 'json.load', 'json.load', (['f'], {}), '(f)\n', (8174, 8177), False, 'import json\n'), ((9403, 9454), 'app.metadata.Allocation', 'Allocation', ([], {'papers': 'papers', 'hasAllocatedPapers': '(False)'}), '(papers=papers, hasAllocatedPapers=False)\n', (9413, 9454), False, 'from app.metadata import PaperStatus, Allocation\n'), ((9714, 9764), 'app.metadata.Allocation', 'Allocation', ([], {'papers': 'papers', 'hasAllocatedPapers': '(True)'}), '(papers=papers, hasAllocatedPapers=True)\n', (9724, 9764), False, 'from app.metadata import PaperStatus, Allocation\n'), ((6094, 6106), 'json.load', 'json.load', (['f'], {}), '(f)\n', (6103, 6106), False, 'import json\n'), ((9329, 9356), 'app.metadata.PaperStatus.empty', 'PaperStatus.empty', (['sha', 'sha'], {}), '(sha, sha)\n', (9346, 9356), False, 'from app.metadata import PaperStatus, Allocation\n'), ((9563, 9575), 'json.load', 'json.load', (['f'], {}), '(f)\n', (9572, 9575), False, 'import json\n'), ((9671, 9692), 'app.metadata.PaperStatus', 'PaperStatus', ([], {}), '(**status)\n', (9682, 9692), False, 'from app.metadata import PaperStatus, Allocation\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import gi
gi.require_version("Gtk", "3.0")
from gi.repository import Gtk
from predict import Predict
class Clasify():
BOX = None
WINDOW = None
BOX_IMG_TO_PREDICT = None
IMAGE = None
def __init__(self):
self.window = Gtk.Window()
self.window.set_title("CNN")
self.window.set_default_size(100, 75)
self.window.set_position(Gtk.WindowPosition.CENTER)
self.window.connect('destroy', self.destroy)
self.box = Gtk.Box()
self.box.set_spacing(10)
self.box.set_orientation(Gtk.Orientation.VERTICAL)
self.window.add(self.box)
#GIF:
image = Gtk.Image()
image.set_from_file("Olaf.gif")
self.box.add(image)
#Bottom to choose image:
button = Gtk.Button("Pls, Could you show me a photo (jpeg/jpg) of a cat or a dog?")
button.connect("clicked", self.on_open_clicked)
self.box.add(button)
self.window.show_all()
def on_open_clicked(self, button):
if (not (self.BOX_IMG_TO_PREDICT == None) and (not(self.IMAGE == None))):
self.box.remove(self.BOX_IMG_TO_PREDICT)
self.BOX_IMG_TO_PREDICT = Gtk.Box()
dialog = Gtk.FileChooserDialog("Open Image", button.get_toplevel(), Gtk.FileChooserAction.OPEN)
dialog.add_button(Gtk.STOCK_CANCEL, 0)
dialog.add_button(Gtk.STOCK_OPEN, 1)
dialog.set_default_response(1)
filefilter = Gtk.FileFilter()
filefilter.add_pixbuf_formats()
dialog.set_filter(filefilter)
if ((dialog.run() == 1) and
((dialog.get_filename().endswith(".jpeg") or (dialog.get_filename().endswith(".jpg")))== True)):
self.image = Gtk.Image()
self.image.set_from_file(dialog.get_filename())
self.BOX_IMG_TO_PREDICT.add(self.image)
self.BOX_IMG_TO_PREDICT.set_size_request(150,150)
#CNN:
result = Predict.predict(dialog.get_filename())
if result == 0:
button.set_label("I see a CAT, Could you show me a photo (jpeg/jpg) of a cat or a dog? ")
elif result == 1:
button.set_label("I see a DOG, Could you show me a photo (jpeg/jpg) of a cat or a dog?")
self.box.add(self.BOX_IMG_TO_PREDICT)
self.window.show_all()
else:
button.set_label("It isn't a photo(jpeg/jpg), Could you show me a photo of a cat or a dog?")
dialog.destroy()
def destroy(self, window):
Gtk.main_quit()
def main():
app = Clasify()
Gtk.main()
if __name__ == '__main__':
main()
| [
"gi.repository.Gtk.Box",
"gi.repository.Gtk.FileFilter",
"gi.repository.Gtk.Image",
"gi.repository.Gtk.main_quit",
"gi.require_version",
"gi.repository.Gtk.Window",
"gi.repository.Gtk.Button",
"gi.repository.Gtk.main"
] | [((78, 110), 'gi.require_version', 'gi.require_version', (['"""Gtk"""', '"""3.0"""'], {}), "('Gtk', '3.0')\n", (96, 110), False, 'import gi\n'), ((2724, 2734), 'gi.repository.Gtk.main', 'Gtk.main', ([], {}), '()\n', (2732, 2734), False, 'from gi.repository import Gtk\n'), ((327, 339), 'gi.repository.Gtk.Window', 'Gtk.Window', ([], {}), '()\n', (337, 339), False, 'from gi.repository import Gtk\n'), ((556, 565), 'gi.repository.Gtk.Box', 'Gtk.Box', ([], {}), '()\n', (563, 565), False, 'from gi.repository import Gtk\n'), ((732, 743), 'gi.repository.Gtk.Image', 'Gtk.Image', ([], {}), '()\n', (741, 743), False, 'from gi.repository import Gtk\n'), ((863, 937), 'gi.repository.Gtk.Button', 'Gtk.Button', (['"""Pls, Could you show me a photo (jpeg/jpg) of a cat or a dog?"""'], {}), "('Pls, Could you show me a photo (jpeg/jpg) of a cat or a dog?')\n", (873, 937), False, 'from gi.repository import Gtk\n'), ((1284, 1293), 'gi.repository.Gtk.Box', 'Gtk.Box', ([], {}), '()\n', (1291, 1293), False, 'from gi.repository import Gtk\n'), ((1568, 1584), 'gi.repository.Gtk.FileFilter', 'Gtk.FileFilter', ([], {}), '()\n', (1582, 1584), False, 'from gi.repository import Gtk\n'), ((2671, 2686), 'gi.repository.Gtk.main_quit', 'Gtk.main_quit', ([], {}), '()\n', (2684, 2686), False, 'from gi.repository import Gtk\n'), ((1841, 1852), 'gi.repository.Gtk.Image', 'Gtk.Image', ([], {}), '()\n', (1850, 1852), False, 'from gi.repository import Gtk\n')] |
import os
import cloudpickle
import numpy as np
import logging
from modestga import operators
from modestga import population
from modestga import individual
from modestga.ga import norm
def parallel_pop(pipe,
pickled_fun,
args,
bounds,
pop_size,
trm_size,
xover_ratio,
mut_rate,
end_event):
"""Subpopulation used in parallel GA."""
log = logging.getLogger(name=f'parallel_pop[PID={os.getpid()}]')
log.debug("Starting process")
# Unpickle function
fun = cloudpickle.loads(pickled_fun)
# Initialize population
pop = population.Population(pop_size, bounds, fun, args=args, evaluate=False)
while not end_event.is_set():
# Check if there's some data
if pipe.poll(0.01):
# Get data
try:
data = pipe.recv()
except EOFError:
break
scale = data['scale']
pop.set_genes(data['genes'])
pop.set_fx(data['fx'])
# Generate children
children = list()
fx = list()
while len(children) < pop_size:
#Cross-over
i1, i2 = operators.tournament(pop, trm_size)
child = operators.crossover(i1, i2, xover_ratio)
# Mutation
child = operators.mutation(child, mut_rate, scale)
# Evaluate f(x)
child.evaluate()
# Add to children
children.append(child)
fx.append(child.val)
# Return data (new genes) to the main process
pop.ind = children
data = dict()
data['genes'] = pop.get_genes()
data['fx'] = fx
pipe.send(data)
pipe.close()
| [
"cloudpickle.loads",
"modestga.operators.tournament",
"modestga.population.Population",
"modestga.operators.crossover",
"os.getpid",
"modestga.operators.mutation"
] | [((614, 644), 'cloudpickle.loads', 'cloudpickle.loads', (['pickled_fun'], {}), '(pickled_fun)\n', (631, 644), False, 'import cloudpickle\n'), ((684, 755), 'modestga.population.Population', 'population.Population', (['pop_size', 'bounds', 'fun'], {'args': 'args', 'evaluate': '(False)'}), '(pop_size, bounds, fun, args=args, evaluate=False)\n', (705, 755), False, 'from modestga import population\n'), ((1277, 1312), 'modestga.operators.tournament', 'operators.tournament', (['pop', 'trm_size'], {}), '(pop, trm_size)\n', (1297, 1312), False, 'from modestga import operators\n'), ((1337, 1377), 'modestga.operators.crossover', 'operators.crossover', (['i1', 'i2', 'xover_ratio'], {}), '(i1, i2, xover_ratio)\n', (1356, 1377), False, 'from modestga import operators\n'), ((1430, 1472), 'modestga.operators.mutation', 'operators.mutation', (['child', 'mut_rate', 'scale'], {}), '(child, mut_rate, scale)\n', (1448, 1472), False, 'from modestga import operators\n'), ((529, 540), 'os.getpid', 'os.getpid', ([], {}), '()\n', (538, 540), False, 'import os\n')] |
import numpy as np
import matplotlib.pylab as plt
from math import pi,floor,atan2,atan
from scipy.interpolate import splprep, splev
plt.rcParams['pdf.fonttype'] = 42
plt.rcParams['ps.fonttype'] = 42
#########################################################################################
################################## FCT DEFINITION #######################################
#########################################################################################
def readModel(path,time):
length = 500
curv = np.transpose(np.loadtxt(path))
x = curv[0]
y = curv[1]
theta = curv[2]
time_model = np.linspace(0,length,len(x))
okay = np.where(np.abs(np.diff(x)) + np.abs(np.diff(y)) > 0)
x = x[okay]
y = y[okay]
tck, u = splprep([x, y], s=0)
unew = np.linspace(0,1,length)
data = splev(unew, tck)
x,y = data[0],data[1]
theta = np.interp(time,time_model,theta)
delta_y = np.diff(y)
delta_x = np.diff(x)
th_local = []
for i in range (len(delta_x)):
phi = atan2(delta_y[i],delta_x[i])
th_local.append(phi-theta[i])
return (x,y,th_local,theta)
def distanceBetweenCurvs(x_real,x_sim,y_real,y_sim):
distance = 0
length = len(x_real)
distance_fin = np.sqrt((x_sim[-1]-x_real[-1])**2+(y_sim[-1]-y_real[-1])**2)
okay = np.where(np.abs(np.diff(x_real)) + np.abs(np.diff(y_real)) > 0)
x_real = x_real[okay]
y_real = y_real[okay]
tck, u = splprep([x_real, y_real], s=0)
unew = np.linspace(0,1,length)
data = splev(unew, tck)
x_real,y_real = data[0],data[1]
for i in range (length):
distance += np.sqrt((x_sim[i]-x_real[i])**2+(y_sim[i]-y_real[i])**2)
# if i%25 == 0:
# # print(i, "sim",x_sim[i],y_sim[i])
# # print(np.sqrt((x_sim[i]-x_real[i])**2+(y_sim[i]-y_real[i])**2))
# plt.plot([x_sim[i],x_real[i]], [y_sim[i],y_real[i]], color = 'black', linewidth = 0.5)
# # # print(distance)
# plt.plot(x_sim,y_sim,color='blue')
# plt.plot(x_real,y_real,color='red')
# plt.plot(x_sim, y_sim,color='cyan',linestyle=':', marker='o')
# plt.plot(x_real, y_real,color='orange',linestyle=':', marker='o')
# plt.show()
# # print("dist_i :",distance/500)
return distance/length,distance_fin
def normalizeAngle(angle):
new_angle = angle
while new_angle > pi:
new_angle -= 2*pi
while new_angle < -pi:
new_angle += 2*pi
return new_angle
def angularDistanceBetweenCurvs(th_real,th_sim):
distance = 0
distance_fin = abs(pi/2-normalizeAngle(th_sim[-1]))
for i in range (len(th_real)-1):
distance += abs(normalizeAngle(th_real[i])-normalizeAngle(th_sim[i]))
# print(abs(th_real[i]-th_sim[i]))
# plt.plot([time[i],time[i]], [th_real[i],th_sim[i]], color = 'black', linewidth = 0.5)
# plt.plot(time,th_real,linestyle=':', marker='o')
# plt.plot(time,th_sim,linestyle=':', marker='o')
# print("--------",distance/len(th_real))
# plt.show()
return distance/len(th_sim),distance_fin
#########################################################################################
################################## MAIN #################################################
#########################################################################################
direction_list = ['N','E','S','O']
position_list = ['1500','4000','-0615','0615','1515','4015','-0640','0640','1540','4040']
path_human_list = []
for pos in position_list:
for direction in direction_list:
name_file = direction+pos+".dat"
path_human_list.append('data/Human/'+name_file)
init_pos_list = []
start_and_end = np.loadtxt("data/Human/StartAndEnd.dat")
for i in range(len(start_and_end)):
init_pos_list.append([floor(start_and_end[i][0]*1000)/1000,floor(start_and_end[i][1]*1000)/1000])
fin_pos = [0,0,1.57]
orientation_list = [1.57,0.0,-1.58,3.14]
path_clothoid_list,path_ddp_list = [],[]
i = 0
for pos in init_pos_list:
# name_file = 'Clothoid_from_'+str(pos[0])+','+str(pos[1])+','+str(orientation_list[i%4])+\
# '_to_'+str(fin_pos[0])+','+str(fin_pos[1])+','+str(fin_pos[2])+'_0.001_pos.dat'
# path_clothoid_list.append('data/Clothoid/'+name_file)
name_file = 'DdpResult_from_'+str(pos[0])+','+str(pos[1])+','+str(orientation_list[i%4])+\
'_to_'+str(fin_pos[0])+','+str(fin_pos[1])+','+str(fin_pos[2])+'_pos.dat'
path_ddp_list.append('data/DdpResult/'+name_file)
i += 1
init_pos_list = []
start_and_end = np.loadtxt("data/Human/DataIROS/StartAndEnd.dat")
for i in range(len(start_and_end)):
init_pos_list.append([floor(start_and_end[i][0]*1000)/1000,floor(start_and_end[i][1]*1000)/1000])
fin_pos = [0,0,1.57]
orientation_list = [1.57,0.0,-1.58,3.14]
path_clothoid_list= []
i = 0
for pos in init_pos_list:
name_file = 'DdpResult_from_'+str(pos[0])+','+str(pos[1])+','+str(orientation_list[i%4])+\
'_to_'+str(fin_pos[0])+','+str(fin_pos[1])+','+str(fin_pos[2])+'_pos.dat'
path_clothoid_list.append('data/DdpResult/DataIROS/'+name_file)
i += 1
time = np.arange(0,500,1)
fig = plt.figure()
count = 1
dist_clothoid_list, dist_ddp_list,angular_dist_clothoid_list, angular_dist_ddp_list = [],[],[],[]
dist_fin_ddp_list , angular_dist_fin_ddp_list = [],[]
dist_subjects_ddp_list , angular_dist_subjects_ddp_list = [],[]
for i in range (len(path_human_list)):
title = path_human_list[i][11:17]
#print(title)
#ax = plt.subplot(1,4,count)
ax = plt.subplot(4,10,count)
#if title == 'E1540.' or title == 'N-0615' or title == 'S4015.' or title == 'O0640.':
print(title,i,count)
human_data = np.loadtxt(path_human_list[i])
# (x_clothoid,y_clothoid,theta_clothoid) = readModel(path_clothoid_list[i],time)
(x_ddp,y_ddp,theta_local_ddp,theta_global_ddp) = readModel(path_ddp_list[i],time)
plt.plot(x_ddp,y_ddp,label='OC',color='red',linewidth=1.5)
plt.plot(human_data[6],human_data[7],label='Subjects',color='lime',linewidth=0.75,alpha = 0.4)
plt.plot(human_data[12],human_data[13],color='lime',linewidth=0.75,alpha = 0.4)
plt.plot(human_data[18],human_data[19],color='lime',linewidth=0.75,alpha = 0.4)
plt.plot(human_data[24],human_data[25],color='lime',linewidth=0.75,alpha = 0.4)
plt.plot(human_data[30],human_data[31],color='lime',linewidth=0.75,alpha = 0.4)
plt.plot(human_data[36],human_data[37],color='lime',linewidth=0.75,alpha = 0.4)
plt.plot(human_data[42],human_data[43],color='lime',linewidth=0.75,alpha = 0.4)
plt.plot(human_data[48],human_data[49],color='lime',linewidth=0.75,alpha = 0.4)
plt.plot(human_data[54],human_data[55],color='lime',linewidth=0.75,alpha = 0.4)
plt.plot(human_data[60],human_data[61],color='lime',linewidth=0.75,alpha = 0.4)
plt.plot(human_data[0],human_data[1],label='Human average',color='green',linewidth=1.5)
if np.sum(human_data[5]) != 0:
arrow_len = 0.2
for i in range (len(human_data[0])):
if i%50 == 0:
plt.arrow(human_data[0][i], human_data[1][i], np.cos(human_data[5][i])*arrow_len, np.sin(human_data[5][i])*arrow_len, head_width=.03,color='green')
plt.arrow(x_ddp[i], y_ddp[i], np.cos(theta_global_ddp[i])*arrow_len, np.sin(theta_global_ddp[i])*arrow_len, head_width=.03,color='red')
plt.arrow(x_ddp[-1], y_ddp[-1], np.cos(theta_global_ddp[-1])*arrow_len, np.sin(theta_global_ddp[-1])*arrow_len, head_width=.03,color='red')
# plt.plot(time,v,color='orange')
# plt.plot([time[end]]*len(time),np.linspace(0,6,len(time)),color ='black')
# plt.plot([time[begin]]*len(time),np.linspace(0,6,len(time)),color ='black')
# plt.plot(time,human_data[5],linestyle=':',color ='black')
# plt.plot(time,theta_clothoid,color='red')
# plt.plot(time,theta_ddp,color='blue')
# plt.plot(time,theta_trunc,color='green')
# plt.plot(time,human_data[11],color='lime',linewidth=0.75,alpha = 0.4)
# plt.plot(time,human_data[17],color='lime',linewidth=0.75,alpha = 0.4)
# plt.plot(time,human_data[23],color='lime',linewidth=0.75,alpha = 0.4)
# plt.plot(time,human_data[29],color='lime',linewidth=0.75,alpha = 0.4)
# plt.plot(time,human_data[35],color='lime',linewidth=0.75,alpha = 0.4)
# plt.plot(time,human_data[41],color='lime',linewidth=0.75,alpha = 0.4)
# plt.plot(time,human_data[47],color='lime',linewidth=0.75,alpha = 0.4)
# plt.plot(time,human_data[53],color='lime',linewidth=0.75,alpha = 0.4)
# plt.plot(time,human_data[59],color='lime',linewidth=0.75,alpha = 0.4)
# plt.plot(time,human_data[65],color='lime',linewidth=0.75,alpha = 0.4)
# dist_clotho = distanceBetweenCurvs(human_data[0],x_clothoid,human_data[1],y_clothoid)
dist_ddp,dist_fin_ddp = distanceBetweenCurvs(human_data[0],x_ddp,human_data[1],y_ddp)
# dist_clothoid_list.append(dist_clotho)
dist_ddp_list.append(dist_ddp)
dist_fin_ddp_list.append(dist_fin_ddp)
for i in range (10):
dist_subjects_ddp_list.append(distanceBetweenCurvs(human_data[6+i*6],x_ddp,human_data[7+i*6],y_ddp)[0])
if np.sum(human_data[5]) != 0:
print("yes")
# angular_dist_clotho = angularDistanceBetweenCurvs(human_data[5],theta_clothoid)
angular_dist_ddp,angular_dist_fin_ddp = angularDistanceBetweenCurvs(human_data[4],theta_local_ddp)
else:
# angular_dist_clotho = 0
angular_dist_ddp,angular_dist_fin_ddp = 0,0
print("no",title)
# angular_dist_clothoid_list.append(angular_dist_clotho)
angular_dist_ddp_list.append(angular_dist_ddp)
angular_dist_fin_ddp_list.append(angular_dist_fin_ddp)
#print(i,path_human_list[i][62:68],dist_clotho,dist_ddp)
# plt.legend(fontsize = 'xx-large')
# plt.title(title)
plt.title("d_xy = " + str(floor(dist_ddp*10000)/10000) + " & d_eta = "+str(floor(angular_dist_ddp*10000)/10000), fontsize=18)
# plt.title('clotho :'+str(floor(angular_dist_clotho*100)/100) + \
# ' VS ddp :'+str(floor(angular_dist_ddp*100)/100))
# plt.title('Clothoid-Human d_xy='+str(floor(dist_clotho*100)/100) + \
# ' & d_th='+str(floor(angular_dist_clotho*100)/100)+ \
# ', OC-Human d_xy='+str(floor(dist_ddp*100)/100)+ \
# ' & d_th='+str(floor(angular_dist_ddp*100)/100))
# ax.set_xticklabels([])
# ax.set_yticklabels([])
plt.ylabel("y (m)")
plt.xlabel("x (m)")
#if count < 4:
count += 1
plt.show()
# path = "data/dist_clotho.dat"
# np.savetxt(path,dist_clothoid_list)
path = "data/dist_ddp.dat"
np.savetxt(path,dist_ddp_list)
path = "data/dist_fin_ddp.dat"
np.savetxt(path,dist_fin_ddp_list)
# path = "data/angular_dist_clotho.dat"
# np.savetxt(path,angular_dist_clothoid_list)
path = "data/angular_dist_ddp.dat"
np.savetxt(path,angular_dist_ddp_list)
path = "data/angular_dist_fin_ddp.dat"
np.savetxt(path,angular_dist_fin_ddp_list)
path = "data/dist_subjects_ddp.dat"
np.savetxt(path,dist_subjects_ddp_list) | [
"numpy.sqrt",
"math.floor",
"matplotlib.pylab.show",
"numpy.sin",
"numpy.arange",
"matplotlib.pylab.figure",
"numpy.diff",
"numpy.linspace",
"scipy.interpolate.splev",
"matplotlib.pylab.plot",
"matplotlib.pylab.xlabel",
"math.atan2",
"numpy.savetxt",
"numpy.interp",
"numpy.cos",
"scipy... | [((3451, 3491), 'numpy.loadtxt', 'np.loadtxt', (['"""data/Human/StartAndEnd.dat"""'], {}), "('data/Human/StartAndEnd.dat')\n", (3461, 3491), True, 'import numpy as np\n'), ((4257, 4306), 'numpy.loadtxt', 'np.loadtxt', (['"""data/Human/DataIROS/StartAndEnd.dat"""'], {}), "('data/Human/DataIROS/StartAndEnd.dat')\n", (4267, 4306), True, 'import numpy as np\n'), ((4808, 4828), 'numpy.arange', 'np.arange', (['(0)', '(500)', '(1)'], {}), '(0, 500, 1)\n', (4817, 4828), True, 'import numpy as np\n'), ((4834, 4846), 'matplotlib.pylab.figure', 'plt.figure', ([], {}), '()\n', (4844, 4846), True, 'import matplotlib.pylab as plt\n'), ((9839, 9849), 'matplotlib.pylab.show', 'plt.show', ([], {}), '()\n', (9847, 9849), True, 'import matplotlib.pylab as plt\n'), ((9949, 9980), 'numpy.savetxt', 'np.savetxt', (['path', 'dist_ddp_list'], {}), '(path, dist_ddp_list)\n', (9959, 9980), True, 'import numpy as np\n'), ((10012, 10047), 'numpy.savetxt', 'np.savetxt', (['path', 'dist_fin_ddp_list'], {}), '(path, dist_fin_ddp_list)\n', (10022, 10047), True, 'import numpy as np\n'), ((10170, 10209), 'numpy.savetxt', 'np.savetxt', (['path', 'angular_dist_ddp_list'], {}), '(path, angular_dist_ddp_list)\n', (10180, 10209), True, 'import numpy as np\n'), ((10249, 10292), 'numpy.savetxt', 'np.savetxt', (['path', 'angular_dist_fin_ddp_list'], {}), '(path, angular_dist_fin_ddp_list)\n', (10259, 10292), True, 'import numpy as np\n'), ((10329, 10369), 'numpy.savetxt', 'np.savetxt', (['path', 'dist_subjects_ddp_list'], {}), '(path, dist_subjects_ddp_list)\n', (10339, 10369), True, 'import numpy as np\n'), ((736, 756), 'scipy.interpolate.splprep', 'splprep', (['[x, y]'], {'s': '(0)'}), '([x, y], s=0)\n', (743, 756), False, 'from scipy.interpolate import splprep, splev\n'), ((765, 790), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'length'], {}), '(0, 1, length)\n', (776, 790), True, 'import numpy as np\n'), ((797, 813), 'scipy.interpolate.splev', 'splev', (['unew', 'tck'], {}), '(unew, tck)\n', (802, 813), False, 'from scipy.interpolate import splprep, splev\n'), ((847, 881), 'numpy.interp', 'np.interp', (['time', 'time_model', 'theta'], {}), '(time, time_model, theta)\n', (856, 881), True, 'import numpy as np\n'), ((892, 902), 'numpy.diff', 'np.diff', (['y'], {}), '(y)\n', (899, 902), True, 'import numpy as np\n'), ((914, 924), 'numpy.diff', 'np.diff', (['x'], {}), '(x)\n', (921, 924), True, 'import numpy as np\n'), ((1178, 1248), 'numpy.sqrt', 'np.sqrt', (['((x_sim[-1] - x_real[-1]) ** 2 + (y_sim[-1] - y_real[-1]) ** 2)'], {}), '((x_sim[-1] - x_real[-1]) ** 2 + (y_sim[-1] - y_real[-1]) ** 2)\n', (1185, 1248), True, 'import numpy as np\n'), ((1369, 1399), 'scipy.interpolate.splprep', 'splprep', (['[x_real, y_real]'], {'s': '(0)'}), '([x_real, y_real], s=0)\n', (1376, 1399), False, 'from scipy.interpolate import splprep, splev\n'), ((1408, 1433), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'length'], {}), '(0, 1, length)\n', (1419, 1433), True, 'import numpy as np\n'), ((1440, 1456), 'scipy.interpolate.splev', 'splev', (['unew', 'tck'], {}), '(unew, tck)\n', (1445, 1456), False, 'from scipy.interpolate import splprep, splev\n'), ((5201, 5226), 'matplotlib.pylab.subplot', 'plt.subplot', (['(4)', '(10)', 'count'], {}), '(4, 10, count)\n', (5212, 5226), True, 'import matplotlib.pylab as plt\n'), ((5355, 5385), 'numpy.loadtxt', 'np.loadtxt', (['path_human_list[i]'], {}), '(path_human_list[i])\n', (5365, 5385), True, 'import numpy as np\n'), ((5554, 5616), 'matplotlib.pylab.plot', 'plt.plot', (['x_ddp', 'y_ddp'], {'label': '"""OC"""', 'color': '"""red"""', 'linewidth': '(1.5)'}), "(x_ddp, y_ddp, label='OC', color='red', linewidth=1.5)\n", (5562, 5616), True, 'import matplotlib.pylab as plt\n'), ((5617, 5718), 'matplotlib.pylab.plot', 'plt.plot', (['human_data[6]', 'human_data[7]'], {'label': '"""Subjects"""', 'color': '"""lime"""', 'linewidth': '(0.75)', 'alpha': '(0.4)'}), "(human_data[6], human_data[7], label='Subjects', color='lime',\n linewidth=0.75, alpha=0.4)\n", (5625, 5718), True, 'import matplotlib.pylab as plt\n'), ((5713, 5798), 'matplotlib.pylab.plot', 'plt.plot', (['human_data[12]', 'human_data[13]'], {'color': '"""lime"""', 'linewidth': '(0.75)', 'alpha': '(0.4)'}), "(human_data[12], human_data[13], color='lime', linewidth=0.75,\n alpha=0.4)\n", (5721, 5798), True, 'import matplotlib.pylab as plt\n'), ((5794, 5879), 'matplotlib.pylab.plot', 'plt.plot', (['human_data[18]', 'human_data[19]'], {'color': '"""lime"""', 'linewidth': '(0.75)', 'alpha': '(0.4)'}), "(human_data[18], human_data[19], color='lime', linewidth=0.75,\n alpha=0.4)\n", (5802, 5879), True, 'import matplotlib.pylab as plt\n'), ((5876, 5961), 'matplotlib.pylab.plot', 'plt.plot', (['human_data[24]', 'human_data[25]'], {'color': '"""lime"""', 'linewidth': '(0.75)', 'alpha': '(0.4)'}), "(human_data[24], human_data[25], color='lime', linewidth=0.75,\n alpha=0.4)\n", (5884, 5961), True, 'import matplotlib.pylab as plt\n'), ((5957, 6042), 'matplotlib.pylab.plot', 'plt.plot', (['human_data[30]', 'human_data[31]'], {'color': '"""lime"""', 'linewidth': '(0.75)', 'alpha': '(0.4)'}), "(human_data[30], human_data[31], color='lime', linewidth=0.75,\n alpha=0.4)\n", (5965, 6042), True, 'import matplotlib.pylab as plt\n'), ((6038, 6123), 'matplotlib.pylab.plot', 'plt.plot', (['human_data[36]', 'human_data[37]'], {'color': '"""lime"""', 'linewidth': '(0.75)', 'alpha': '(0.4)'}), "(human_data[36], human_data[37], color='lime', linewidth=0.75,\n alpha=0.4)\n", (6046, 6123), True, 'import matplotlib.pylab as plt\n'), ((6119, 6204), 'matplotlib.pylab.plot', 'plt.plot', (['human_data[42]', 'human_data[43]'], {'color': '"""lime"""', 'linewidth': '(0.75)', 'alpha': '(0.4)'}), "(human_data[42], human_data[43], color='lime', linewidth=0.75,\n alpha=0.4)\n", (6127, 6204), True, 'import matplotlib.pylab as plt\n'), ((6200, 6285), 'matplotlib.pylab.plot', 'plt.plot', (['human_data[48]', 'human_data[49]'], {'color': '"""lime"""', 'linewidth': '(0.75)', 'alpha': '(0.4)'}), "(human_data[48], human_data[49], color='lime', linewidth=0.75,\n alpha=0.4)\n", (6208, 6285), True, 'import matplotlib.pylab as plt\n'), ((6281, 6366), 'matplotlib.pylab.plot', 'plt.plot', (['human_data[54]', 'human_data[55]'], {'color': '"""lime"""', 'linewidth': '(0.75)', 'alpha': '(0.4)'}), "(human_data[54], human_data[55], color='lime', linewidth=0.75,\n alpha=0.4)\n", (6289, 6366), True, 'import matplotlib.pylab as plt\n'), ((6362, 6447), 'matplotlib.pylab.plot', 'plt.plot', (['human_data[60]', 'human_data[61]'], {'color': '"""lime"""', 'linewidth': '(0.75)', 'alpha': '(0.4)'}), "(human_data[60], human_data[61], color='lime', linewidth=0.75,\n alpha=0.4)\n", (6370, 6447), True, 'import matplotlib.pylab as plt\n'), ((6443, 6538), 'matplotlib.pylab.plot', 'plt.plot', (['human_data[0]', 'human_data[1]'], {'label': '"""Human average"""', 'color': '"""green"""', 'linewidth': '(1.5)'}), "(human_data[0], human_data[1], label='Human average', color='green',\n linewidth=1.5)\n", (6451, 6538), True, 'import matplotlib.pylab as plt\n'), ((9769, 9788), 'matplotlib.pylab.ylabel', 'plt.ylabel', (['"""y (m)"""'], {}), "('y (m)')\n", (9779, 9788), True, 'import matplotlib.pylab as plt\n'), ((9790, 9809), 'matplotlib.pylab.xlabel', 'plt.xlabel', (['"""x (m)"""'], {}), "('x (m)')\n", (9800, 9809), True, 'import matplotlib.pylab as plt\n'), ((533, 549), 'numpy.loadtxt', 'np.loadtxt', (['path'], {}), '(path)\n', (543, 549), True, 'import numpy as np\n'), ((981, 1010), 'math.atan2', 'atan2', (['delta_y[i]', 'delta_x[i]'], {}), '(delta_y[i], delta_x[i])\n', (986, 1010), False, 'from math import pi, floor, atan2, atan\n'), ((1531, 1597), 'numpy.sqrt', 'np.sqrt', (['((x_sim[i] - x_real[i]) ** 2 + (y_sim[i] - y_real[i]) ** 2)'], {}), '((x_sim[i] - x_real[i]) ** 2 + (y_sim[i] - y_real[i]) ** 2)\n', (1538, 1597), True, 'import numpy as np\n'), ((6536, 6557), 'numpy.sum', 'np.sum', (['human_data[5]'], {}), '(human_data[5])\n', (6542, 6557), True, 'import numpy as np\n'), ((8618, 8639), 'numpy.sum', 'np.sum', (['human_data[5]'], {}), '(human_data[5])\n', (8624, 8639), True, 'import numpy as np\n'), ((6963, 6991), 'numpy.cos', 'np.cos', (['theta_global_ddp[-1]'], {}), '(theta_global_ddp[-1])\n', (6969, 6991), True, 'import numpy as np\n'), ((7003, 7031), 'numpy.sin', 'np.sin', (['theta_global_ddp[-1]'], {}), '(theta_global_ddp[-1])\n', (7009, 7031), True, 'import numpy as np\n'), ((3551, 3584), 'math.floor', 'floor', (['(start_and_end[i][0] * 1000)'], {}), '(start_and_end[i][0] * 1000)\n', (3556, 3584), False, 'from math import pi, floor, atan2, atan\n'), ((3588, 3621), 'math.floor', 'floor', (['(start_and_end[i][1] * 1000)'], {}), '(start_and_end[i][1] * 1000)\n', (3593, 3621), False, 'from math import pi, floor, atan2, atan\n'), ((4366, 4399), 'math.floor', 'floor', (['(start_and_end[i][0] * 1000)'], {}), '(start_and_end[i][0] * 1000)\n', (4371, 4399), False, 'from math import pi, floor, atan2, atan\n'), ((4403, 4436), 'math.floor', 'floor', (['(start_and_end[i][1] * 1000)'], {}), '(start_and_end[i][1] * 1000)\n', (4408, 4436), False, 'from math import pi, floor, atan2, atan\n'), ((662, 672), 'numpy.diff', 'np.diff', (['x'], {}), '(x)\n', (669, 672), True, 'import numpy as np\n'), ((683, 693), 'numpy.diff', 'np.diff', (['y'], {}), '(y)\n', (690, 693), True, 'import numpy as np\n'), ((1265, 1280), 'numpy.diff', 'np.diff', (['x_real'], {}), '(x_real)\n', (1272, 1280), True, 'import numpy as np\n'), ((1291, 1306), 'numpy.diff', 'np.diff', (['y_real'], {}), '(y_real)\n', (1298, 1306), True, 'import numpy as np\n'), ((9307, 9338), 'math.floor', 'floor', (['(angular_dist_ddp * 10000)'], {}), '(angular_dist_ddp * 10000)\n', (9312, 9338), False, 'from math import pi, floor, atan2, atan\n'), ((6688, 6712), 'numpy.cos', 'np.cos', (['human_data[5][i]'], {}), '(human_data[5][i])\n', (6694, 6712), True, 'import numpy as np\n'), ((6724, 6748), 'numpy.sin', 'np.sin', (['human_data[5][i]'], {}), '(human_data[5][i])\n', (6730, 6748), True, 'import numpy as np\n'), ((6824, 6851), 'numpy.cos', 'np.cos', (['theta_global_ddp[i]'], {}), '(theta_global_ddp[i])\n', (6830, 6851), True, 'import numpy as np\n'), ((6863, 6890), 'numpy.sin', 'np.sin', (['theta_global_ddp[i]'], {}), '(theta_global_ddp[i])\n', (6869, 6890), True, 'import numpy as np\n'), ((9258, 9281), 'math.floor', 'floor', (['(dist_ddp * 10000)'], {}), '(dist_ddp * 10000)\n', (9263, 9281), False, 'from math import pi, floor, atan2, atan\n')] |
import datetime
import asynctest.mock
import pytest
from exptools.time import (
utcnow, localnow,
as_local, as_utc,
format_utc, format_utc_short, format_local, format_local_short,
parse_utc, parse_local,
diff_sec,
format_sec, format_sec_fixed, format_sec_short,
job_elapsed_time,
format_job_count,
format_estimated_time,
)
class _mocked_datetime(datetime.datetime):
@classmethod
def utcnow(cls):
return cls(2000, 1, 2, 3, 4, 5, 678901)
@classmethod
def localnow(cls):
return cls(2000, 1, 2, 3, 4, 5, 678901)
@asynctest.mock.patch('datetime.datetime', new=_mocked_datetime)
def test_utcnow():
assert format_utc(utcnow()) == '2000-01-02 03:04:05.678901'
@asynctest.mock.patch('datetime.datetime', new=_mocked_datetime)
def test_localnow():
assert format_utc(localnow()) == '2000-01-02 03:04:05.678901'
def test_as_utc():
t = parse_utc('2000-01-02 03:04:05.678901')
assert t == as_utc(t)
def test_as_local():
t = parse_local('2000-01-02 03:04:05.678901')
assert t == as_local(t)
def test_as_utc_as_local_as_utc():
t = parse_utc('2000-01-02 03:04:05.678901')
assert t == as_utc(as_local(t))
t = parse_local('2000-01-02 03:04:05.678901')
assert t == as_local(as_utc(t))
def test_parse_utc_format_utc():
assert format_utc(parse_utc('2000-01-02 03:04:05.678901')) == '2000-01-02 03:04:05.678901'
def test_parse_local_format_local():
assert format_local(parse_local('2000-01-02 03:04:05.678901')) == '2000-01-02 03:04:05.678901'
def test_parse_utc_format_utc_short():
assert format_utc_short(parse_utc('2000-01-02 03:04:05.678901')) == '2000-01-02 03:04:05'
def test_parse_local_format_local_short():
assert format_local_short(parse_local('2000-01-02 03:04:05.678901')) == '2000-01-02 03:04:05'
def test_diff_sec():
t1 = parse_utc('2000-01-02 03:04:05.678901')
t2 = parse_utc('2000-01-02 02:03:04.678900')
assert diff_sec(t1, t2) == 3600 + 60 + 1 + 0.000001
def test_format_sec():
assert format_sec(0) == '0 seconds'
assert format_sec(1) == '1 second'
assert format_sec(2) == '2 seconds'
assert format_sec(60) == '1 minute'
assert format_sec(60 * 2) == '2 minutes'
assert format_sec(60 * 2 + 1) == '2 minutes 1 second'
assert format_sec(3600) == '1 hour'
assert format_sec(3600 * 2) == '2 hours'
assert format_sec(3600 * 2 + 60) == '2 hours 1 minute'
assert format_sec(3600 * 2 + 60 + 1) == '2 hours 1 minute 1 second'
assert format_sec(86400) == '1 day'
assert format_sec(86400 * 2) == '2 days'
assert format_sec(86400 * 2 + 3600) == '2 days 1 hour'
assert format_sec(86400 * 2 + 3600 + 60) == '2 days 1 hour 1 minute'
assert format_sec(86400 * 2 + 3600 + 60 + 1) == '2 days 1 hour 1 minute 1 second'
assert format_sec(86400 * 7) == '1 week'
assert format_sec(86400 * 7 + 1) == '1 week 1 second'
assert format_sec(86400 * 7 + 60) == '1 week 1 minute'
assert format_sec(86400 * 7 + 3600) == '1 week 1 hour'
assert format_sec(86400 * 7 + 86400) == '1 week 1 day'
def test_format_sec_fixed():
assert format_sec_fixed(2) == '0:00:02'
assert format_sec_fixed(60 * 2 + 1) == '0:02:01'
assert format_sec_fixed(3600 * 2 + 60 + 1) == '2:01:01'
def test_format_short():
assert format_sec_short(0) == '0s'
assert format_sec_short(1) == '1s'
assert format_sec_short(2) == '2s'
assert format_sec_short(60) == '1m 0s'
assert format_sec_short(60 * 2) == '2m 0s'
assert format_sec_short(60 * 2 + 1) == '2m 1s'
assert format_sec_short(3600) == '1h 0m'
assert format_sec_short(3600 * 2) == '2h 0m'
assert format_sec_short(3600 * 2 + 60) == '2h 1m'
assert format_sec_short(3600 * 2 + 60 + 1) == '2h 1m'
assert format_sec_short(86400) == '1d 0h'
assert format_sec_short(86400 * 2) == '2d 0h'
assert format_sec_short(86400 * 2 + 3600) == '2d 1h'
assert format_sec_short(86400 * 2 + 3600 + 60) == '2d 1h'
assert format_sec_short(86400 * 2 + 3600 + 60 + 1) == '2d 1h'
assert format_sec_short(86400 * 7) == '1w 0d'
assert format_sec_short(86400 * 7 + 1) == '1w 0d'
assert format_sec_short(86400 * 7 + 60) == '1w 0d'
assert format_sec_short(86400 * 7 + 3600) == '1w 0d'
assert format_sec_short(86400 * 7 + 86400) == '1w 1d'
def test_job_elapsed_time_finished_job():
job = {'finished': '2000-01-02 03:04:05.678901', 'duration': 10.}
assert job_elapsed_time(job) == 10.
@asynctest.mock.patch('exptools.time.utcnow')
def test_job_elapsed_time_started_job(mock_utcnow):
mock_utcnow.return_value = parse_utc('2000-01-02 03:04:15.678901')
job = {'finished': None, 'started': '2000-01-02 03:04:05.678901'}
assert job_elapsed_time(job) == 10.
@asynctest.mock.patch('termcolor.colored')
def test_format_job_count(mock_colored):
mock_colored.side_effect = lambda s, *args, **kwargs: s
queue_state = {
'finished_jobs': [{'succeeded': True}],
'started_jobs': [3, 4, 5],
'queued_jobs': [6, 7, 8, 9],
}
assert format_job_count(queue_state, True) == 'S:1 F:0 A:3 Q:4'
queue_state = {
'finished_jobs': [{'succeeded': True}, {'succeeded': False}, {'succeeded': False}],
'started_jobs': [3, 4, 5],
'queued_jobs': [6, 7, 8, 9],
}
assert format_job_count(queue_state, False) == 'S:1 F:2 A:3 Q:4'
@pytest.mark.asyncio
@asynctest.mock.patch('datetime.datetime', new=_mocked_datetime)
@asynctest.mock.patch('termcolor.colored')
async def test_format_estimated_time(mock_colored):
mock_colored.side_effect = lambda s, *args, **kwargs: s
remaining_time = 10.
queue_state = {
'finished_jobs': [{'succeeded': True}, {'succeeded': False}, {'succeeded': False}],
'started_jobs': [3, 4, 5],
'queued_jobs': [6, 7, 8, 9],
'concurrency': 1.1,
}
assert (await format_estimated_time(remaining_time, queue_state, True) ==
'S:1 F:2 A:3 Q:4 Remaining %ds Finish by %s Concurrency 1.1' % (
remaining_time,
format_local_short(utcnow() + datetime.timedelta(seconds=10))))
assert (await format_estimated_time(10., queue_state, False) ==
'S:1 F:2 A:3 Q:4 Remaining %ds Finish by %s Concurrency 1.1' % (
remaining_time,
format_local_short(utcnow() + datetime.timedelta(seconds=10))))
| [
"exptools.time.parse_local",
"exptools.time.format_estimated_time",
"exptools.time.format_sec_short",
"exptools.time.utcnow",
"exptools.time.localnow",
"exptools.time.parse_utc",
"exptools.time.as_local",
"exptools.time.format_job_count",
"exptools.time.job_elapsed_time",
"exptools.time.format_sec... | [((873, 912), 'exptools.time.parse_utc', 'parse_utc', (['"""2000-01-02 03:04:05.678901"""'], {}), "('2000-01-02 03:04:05.678901')\n", (882, 912), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((966, 1007), 'exptools.time.parse_local', 'parse_local', (['"""2000-01-02 03:04:05.678901"""'], {}), "('2000-01-02 03:04:05.678901')\n", (977, 1007), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((1077, 1116), 'exptools.time.parse_utc', 'parse_utc', (['"""2000-01-02 03:04:05.678901"""'], {}), "('2000-01-02 03:04:05.678901')\n", (1086, 1116), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((1158, 1199), 'exptools.time.parse_local', 'parse_local', (['"""2000-01-02 03:04:05.678901"""'], {}), "('2000-01-02 03:04:05.678901')\n", (1169, 1199), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((1802, 1841), 'exptools.time.parse_utc', 'parse_utc', (['"""2000-01-02 03:04:05.678901"""'], {}), "('2000-01-02 03:04:05.678901')\n", (1811, 1841), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((1849, 1888), 'exptools.time.parse_utc', 'parse_utc', (['"""2000-01-02 02:03:04.678900"""'], {}), "('2000-01-02 02:03:04.678900')\n", (1858, 1888), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((4481, 4520), 'exptools.time.parse_utc', 'parse_utc', (['"""2000-01-02 03:04:15.678901"""'], {}), "('2000-01-02 03:04:15.678901')\n", (4490, 4520), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((927, 936), 'exptools.time.as_utc', 'as_utc', (['t'], {}), '(t)\n', (933, 936), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((1022, 1033), 'exptools.time.as_local', 'as_local', (['t'], {}), '(t)\n', (1030, 1033), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((1898, 1914), 'exptools.time.diff_sec', 'diff_sec', (['t1', 't2'], {}), '(t1, t2)\n', (1906, 1914), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((1977, 1990), 'exptools.time.format_sec', 'format_sec', (['(0)'], {}), '(0)\n', (1987, 1990), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2015, 2028), 'exptools.time.format_sec', 'format_sec', (['(1)'], {}), '(1)\n', (2025, 2028), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2052, 2065), 'exptools.time.format_sec', 'format_sec', (['(2)'], {}), '(2)\n', (2062, 2065), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2091, 2105), 'exptools.time.format_sec', 'format_sec', (['(60)'], {}), '(60)\n', (2101, 2105), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2129, 2147), 'exptools.time.format_sec', 'format_sec', (['(60 * 2)'], {}), '(60 * 2)\n', (2139, 2147), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2172, 2194), 'exptools.time.format_sec', 'format_sec', (['(60 * 2 + 1)'], {}), '(60 * 2 + 1)\n', (2182, 2194), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2229, 2245), 'exptools.time.format_sec', 'format_sec', (['(3600)'], {}), '(3600)\n', (2239, 2245), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2267, 2287), 'exptools.time.format_sec', 'format_sec', (['(3600 * 2)'], {}), '(3600 * 2)\n', (2277, 2287), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2310, 2335), 'exptools.time.format_sec', 'format_sec', (['(3600 * 2 + 60)'], {}), '(3600 * 2 + 60)\n', (2320, 2335), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2367, 2396), 'exptools.time.format_sec', 'format_sec', (['(3600 * 2 + 60 + 1)'], {}), '(3600 * 2 + 60 + 1)\n', (2377, 2396), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2438, 2455), 'exptools.time.format_sec', 'format_sec', (['(86400)'], {}), '(86400)\n', (2448, 2455), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2476, 2497), 'exptools.time.format_sec', 'format_sec', (['(86400 * 2)'], {}), '(86400 * 2)\n', (2486, 2497), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2519, 2547), 'exptools.time.format_sec', 'format_sec', (['(86400 * 2 + 3600)'], {}), '(86400 * 2 + 3600)\n', (2529, 2547), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2576, 2609), 'exptools.time.format_sec', 'format_sec', (['(86400 * 2 + 3600 + 60)'], {}), '(86400 * 2 + 3600 + 60)\n', (2586, 2609), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2647, 2684), 'exptools.time.format_sec', 'format_sec', (['(86400 * 2 + 3600 + 60 + 1)'], {}), '(86400 * 2 + 3600 + 60 + 1)\n', (2657, 2684), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2732, 2753), 'exptools.time.format_sec', 'format_sec', (['(86400 * 7)'], {}), '(86400 * 7)\n', (2742, 2753), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2775, 2800), 'exptools.time.format_sec', 'format_sec', (['(86400 * 7 + 1)'], {}), '(86400 * 7 + 1)\n', (2785, 2800), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2831, 2857), 'exptools.time.format_sec', 'format_sec', (['(86400 * 7 + 60)'], {}), '(86400 * 7 + 60)\n', (2841, 2857), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2888, 2916), 'exptools.time.format_sec', 'format_sec', (['(86400 * 7 + 3600)'], {}), '(86400 * 7 + 3600)\n', (2898, 2916), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((2945, 2974), 'exptools.time.format_sec', 'format_sec', (['(86400 * 7 + 86400)'], {}), '(86400 * 7 + 86400)\n', (2955, 2974), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3033, 3052), 'exptools.time.format_sec_fixed', 'format_sec_fixed', (['(2)'], {}), '(2)\n', (3049, 3052), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3075, 3103), 'exptools.time.format_sec_fixed', 'format_sec_fixed', (['(60 * 2 + 1)'], {}), '(60 * 2 + 1)\n', (3091, 3103), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3126, 3161), 'exptools.time.format_sec_fixed', 'format_sec_fixed', (['(3600 * 2 + 60 + 1)'], {}), '(3600 * 2 + 60 + 1)\n', (3142, 3161), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3211, 3230), 'exptools.time.format_sec_short', 'format_sec_short', (['(0)'], {}), '(0)\n', (3227, 3230), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3248, 3267), 'exptools.time.format_sec_short', 'format_sec_short', (['(1)'], {}), '(1)\n', (3264, 3267), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3285, 3304), 'exptools.time.format_sec_short', 'format_sec_short', (['(2)'], {}), '(2)\n', (3301, 3304), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3323, 3343), 'exptools.time.format_sec_short', 'format_sec_short', (['(60)'], {}), '(60)\n', (3339, 3343), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3365, 3389), 'exptools.time.format_sec_short', 'format_sec_short', (['(60 * 2)'], {}), '(60 * 2)\n', (3381, 3389), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3411, 3439), 'exptools.time.format_sec_short', 'format_sec_short', (['(60 * 2 + 1)'], {}), '(60 * 2 + 1)\n', (3427, 3439), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3462, 3484), 'exptools.time.format_sec_short', 'format_sec_short', (['(3600)'], {}), '(3600)\n', (3478, 3484), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3506, 3532), 'exptools.time.format_sec_short', 'format_sec_short', (['(3600 * 2)'], {}), '(3600 * 2)\n', (3522, 3532), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3554, 3585), 'exptools.time.format_sec_short', 'format_sec_short', (['(3600 * 2 + 60)'], {}), '(3600 * 2 + 60)\n', (3570, 3585), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3607, 3642), 'exptools.time.format_sec_short', 'format_sec_short', (['(3600 * 2 + 60 + 1)'], {}), '(3600 * 2 + 60 + 1)\n', (3623, 3642), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3665, 3688), 'exptools.time.format_sec_short', 'format_sec_short', (['(86400)'], {}), '(86400)\n', (3681, 3688), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3710, 3737), 'exptools.time.format_sec_short', 'format_sec_short', (['(86400 * 2)'], {}), '(86400 * 2)\n', (3726, 3737), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3759, 3793), 'exptools.time.format_sec_short', 'format_sec_short', (['(86400 * 2 + 3600)'], {}), '(86400 * 2 + 3600)\n', (3775, 3793), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3815, 3854), 'exptools.time.format_sec_short', 'format_sec_short', (['(86400 * 2 + 3600 + 60)'], {}), '(86400 * 2 + 3600 + 60)\n', (3831, 3854), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3876, 3919), 'exptools.time.format_sec_short', 'format_sec_short', (['(86400 * 2 + 3600 + 60 + 1)'], {}), '(86400 * 2 + 3600 + 60 + 1)\n', (3892, 3919), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3942, 3969), 'exptools.time.format_sec_short', 'format_sec_short', (['(86400 * 7)'], {}), '(86400 * 7)\n', (3958, 3969), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((3991, 4022), 'exptools.time.format_sec_short', 'format_sec_short', (['(86400 * 7 + 1)'], {}), '(86400 * 7 + 1)\n', (4007, 4022), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((4044, 4076), 'exptools.time.format_sec_short', 'format_sec_short', (['(86400 * 7 + 60)'], {}), '(86400 * 7 + 60)\n', (4060, 4076), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((4098, 4132), 'exptools.time.format_sec_short', 'format_sec_short', (['(86400 * 7 + 3600)'], {}), '(86400 * 7 + 3600)\n', (4114, 4132), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((4154, 4189), 'exptools.time.format_sec_short', 'format_sec_short', (['(86400 * 7 + 86400)'], {}), '(86400 * 7 + 86400)\n', (4170, 4189), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((4323, 4344), 'exptools.time.job_elapsed_time', 'job_elapsed_time', (['job'], {}), '(job)\n', (4339, 4344), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((4598, 4619), 'exptools.time.job_elapsed_time', 'job_elapsed_time', (['job'], {}), '(job)\n', (4614, 4619), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((4911, 4946), 'exptools.time.format_job_count', 'format_job_count', (['queue_state', '(True)'], {}), '(queue_state, True)\n', (4927, 4946), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((5152, 5188), 'exptools.time.format_job_count', 'format_job_count', (['queue_state', '(False)'], {}), '(queue_state, False)\n', (5168, 5188), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((652, 660), 'exptools.time.utcnow', 'utcnow', ([], {}), '()\n', (658, 660), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((802, 812), 'exptools.time.localnow', 'localnow', ([], {}), '()\n', (810, 812), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((1138, 1149), 'exptools.time.as_local', 'as_local', (['t'], {}), '(t)\n', (1146, 1149), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((1223, 1232), 'exptools.time.as_utc', 'as_utc', (['t'], {}), '(t)\n', (1229, 1232), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((1289, 1328), 'exptools.time.parse_utc', 'parse_utc', (['"""2000-01-02 03:04:05.678901"""'], {}), "('2000-01-02 03:04:05.678901')\n", (1298, 1328), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((1423, 1464), 'exptools.time.parse_local', 'parse_local', (['"""2000-01-02 03:04:05.678901"""'], {}), "('2000-01-02 03:04:05.678901')\n", (1434, 1464), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((1565, 1604), 'exptools.time.parse_utc', 'parse_utc', (['"""2000-01-02 03:04:05.678901"""'], {}), "('2000-01-02 03:04:05.678901')\n", (1574, 1604), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((1704, 1745), 'exptools.time.parse_local', 'parse_local', (['"""2000-01-02 03:04:05.678901"""'], {}), "('2000-01-02 03:04:05.678901')\n", (1715, 1745), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((5691, 5747), 'exptools.time.format_estimated_time', 'format_estimated_time', (['remaining_time', 'queue_state', '(True)'], {}), '(remaining_time, queue_state, True)\n', (5712, 5747), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((5950, 5997), 'exptools.time.format_estimated_time', 'format_estimated_time', (['(10.0)', 'queue_state', '(False)'], {}), '(10.0, queue_state, False)\n', (5971, 5997), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((5888, 5896), 'exptools.time.utcnow', 'utcnow', ([], {}), '()\n', (5894, 5896), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((5899, 5929), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(10)'}), '(seconds=10)\n', (5917, 5929), False, 'import datetime\n'), ((6137, 6145), 'exptools.time.utcnow', 'utcnow', ([], {}), '()\n', (6143, 6145), False, 'from exptools.time import utcnow, localnow, as_local, as_utc, format_utc, format_utc_short, format_local, format_local_short, parse_utc, parse_local, diff_sec, format_sec, format_sec_fixed, format_sec_short, job_elapsed_time, format_job_count, format_estimated_time\n'), ((6148, 6178), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(10)'}), '(seconds=10)\n', (6166, 6178), False, 'import datetime\n')] |
import os
import struct
import sys
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from ply import lex, yacc
class EDR:
"""Reader for experimental data record (EDR) files from the PDS.
This object will ingest and store data from the EDR files, to be
processed later. It only ingests data into a convenient structure,
and de-compresses the science data, no processing occurs.
"""
def __init__(self, lbl=None):
"""Create an EDR object.
Create an EDR object, read in an EDR file if given.
Parameters
----------
lbl: str, optional
Path to an EDR label file which must be in the same directory as
the corresponding science and geometry files
Returns
-------
EDR
Notes
-----
"""
if lbl is None:
return
self.load(lbl)
def load(self, lbl):
"""Load a set of EDR files.
Read in the label, science, and geometry EDR files for a given
observation.
Parameters
----------
lbl: str
Path to an EDR label file which must be in the same directory as
the corresponding science and geometry files
Returns
-------
Notes
-----
"""
# Read label file
self.lbld = self.parseLBL(lbl)
# Science and aux file names
sci = lbl.replace(".lbl", "_f.dat")
geo = lbl.replace(".lbl", "_g.dat")
# Read science file
self.anc, self.ost, self.data = self.parseSci(sci, self.lbld)
# Read geometry file
self.geo = self.parseGeo(geo, self.lbld)
def parseGeo(self, file, lbld):
"""Read in a geometry data file.
Parameters
----------
file: str
Path to an geometry data file
lbld: dict
Dictionary containing data parsed from corresponding label
file, created by the parseLBL method
Returns
-------
Notes
-----
"""
# Set up geometry data frame
rec_t = np.dtype(
[
("SCET_FRAME_WHOLE", ">u4"),
("SCET_FRAME_FRAC", ">u2"),
("GEOMETRY_EPHEMERIS_TIME", ">f8"),
("GEOMETRY_EPOCH", "V23"),
("MARS_SOLAR_LONGITUDE", ">f8"),
("MARS_SUN_DISTANCE", ">f8"),
("ORBIT_NUMBER", ">u4"),
("TARGET_NAME", "V6"),
("TARGET_SC_POSITION_VECTOR", ">f8", 3),
("SPACECRAFT_ALTITUDE", ">f8"),
("SUB_SC_LONGITUDE", ">f8"),
("SUB_SC_LATITUDE", ">f8"),
("TARGET_SC_VELOCITY_VECTOR", ">f8", 3),
("TARGET_SC_RADIAL_VELOCITY", ">f8"),
("TARGET_SC_TANG_VELOCITY", ">f8"),
("LOCAL_TRUE_SOLAR_TIME", ">f8"),
("SOLAR_ZENITH_ANGLE", ">f8"),
("DIPOLE_UNIT_VECTOR", ">f8", 3),
("MONOPOLE_UNIT_VECTOR", ">f8", 3),
]
)
geoData = np.fromfile(file, dtype=rec_t)
return geoData
def decompress(self, trace, exp):
sign = (-1) ** (trace >> 7) # Sign bit
mantissa = 1+((trace & 0x7F)/(2.0**7))
#mantissa = trace & 0x7F
trace = sign * mantissa * (2 ** (exp - 127))
return trace
def deagc(self, data, agc):
# Take in an array of marsis data
# and a vector of AGC settings,
# then correct for agc
agc = agc & 0x07 # Only last three bits matter
agc = agc*4 + 2 # Gain in dB, per Orosei
data = data * 10**(agc/20)[np.newaxis, :]
return data
def parseSci(self, file, lbld):
# Set up ancillary data dataframe
rec_t = np.dtype(
[
("SCET_STAR_WHOLE", ">u4"),
("SCET_STAR_FRAC", ">u2"),
("OST_LINE_NUMBER", ">u2"),
("OST_LINE", "V12"),
("FRAME_NUMBER", ">u2"),
("ANCILLARY_DATA_HEADER", "V6"),
("FIRST_PRI_OF_FRAME", ">u4"),
("SCET_FRAME_WHOLE", ">u4"),
("SCET_FRAME_FRAC", ">u2"),
("SCET_PERICENTER_WHOLE", ">u4"),
("SCET_PERICENTER_FRAC", ">u2"),
("SCET_PAR_WHOLE", ">u4"),
("SCET_PAR_FRAC", ">u2"),
("H_SCET_PAR", ">f4"),
("VT_SCET_PAR", ">f4"),
("VR_SCET_PAR", ">f4"),
("N_0", ">u4"),
("DELTA_S_MIN", ">f4"),
("NB_MIN", ">u2"),
("M_OCOG", ">f4", 2),
("INDEX_OCOG", ">u2", 2),
("TRK_THRESHOLD", ">f4", 2),
("INI_IND_TRK_THRESHOLD", ">u2", 2),
("LAST_IND_TRK_THRESHOLD", ">u2", 2),
("INI_IND_FSRM", ">u2", 2),
("LAST_IND_FSRM", ">u2", 2),
("SPARE_4", ">u4", 3),
("DELTA_S_SCET_PAR", ">f4"),
("NB_SCET_PAR", ">u2"),
("NA_SCET_PAR", ">u2", 2),
("A2_INI_CM", ">f4", 2),
("A2_OPT", ">f4", 2),
("REF_CA_OPT", ">f4", 2),
("DELTA_T", ">u2", 2),
("SF", ">f4", 2),
("I_C", ">u2", 2),
("AGC_SA_FOR_NEXT_FRAME", ">f4", 2),
("AGC_SA_LEVELS_CURRENT_FRAME", ">u1", 2),
("RX_TRIG_SA_FOR_NEXT_FRAME", ">u2", 2),
("RX_TRIG_SA_PROGR", ">u2", 2),
("INI_IND_OCOG", ">u2"),
("LAST_IND_OCOG", ">u2"),
("OCOG", ">f4", 2),
("A", ">f4", 2),
("C_LOL", ">i2", 2),
("SPARE_5", ">u2", 3),
("MAX_RE_EXP_MINUS1_F1_DIP", ">u1"),
("MAX_IM_EXP_MINUS1_F1_DIP", ">u1"),
("MAX_RE_EXP_ZERO_F1_DIP", ">u1"),
("MAX_IM_EXP_ZERO_F1_DIP", ">u1"),
("MAX_RE_EXP_PLUS1_F1_DIP", ">u1"),
("MAX_IM_EXP_PLUS1_F1_DIP", ">u1"),
("MAX_RE_EXP_MINUS1_F2_DIP", ">u1"),
("MAX_IM_EXP_MINUS1_F2_DIP", ">u1"),
("MAX_RE_EXP_ZERO_F2_DIP", ">u1"),
("MAX_IM_EXP_ZERO_F2_DIP", ">u1"),
("MAX_RE_EXP_PLUS1_F2_DIP", ">u1"),
("MAX_IM_EXP_PLUS1_F2_DIP", ">u1"),
("SPARE_6", ">u1", 8),
("AGC_PIS_PT_VALUE", ">f4", 2),
("AGC_PIS_LEVELS", ">u1", 2),
("K_PIM", ">u1"),
("PIS_MAX_DATA_EXP", ">u1", 2),
("PROCESSING_PRF", ">f4"),
("SPARE_7", ">u1"),
("REAL_ECHO_MINUS1_F1_DIP", ">u1", 512),
("IMAG_ECHO_MINUS1_F1_DIP", ">u1", 512),
("REAL_ECHO_ZERO_F1_DIP", ">u1", 512),
("IMAG_ECHO_ZERO_F1_DIP", ">u1", 512),
("REAL_ECHO_PLUS1_F1_DIP", ">u1", 512),
("IMAG_ECHO_PLUS1_F1_DIP", ">u1", 512),
("REAL_ECHO_MINUS1_F2_DIP", ">u1", 512),
("IMAG_ECHO_MINUS1_F2_DIP", ">u1", 512),
("REAL_ECHO_ZERO_F2_DIP", ">u1", 512),
("IMAG_ECHO_ZERO_F2_DIP", ">u1", 512),
("REAL_ECHO_PLUS1_F2_DIP", ">u1", 512),
("IMAG_ECHO_PLUS1_F2_DIP", ">u1", 512),
("PIS_F1", ">i2", 128),
("PIS_F2", ">i2", 128),
]
)
telTab = np.fromfile(file, dtype=rec_t)
# Decode OST line bit fields - this is incomplete
df = pd.DataFrame()
ost = telTab["OST_LINE"]
ost = np.array(ost.tolist()) # weird but it works to get from void to bytes_
df["SPARE_0"] = np.vectorize(lambda s: s[0])(ost)
df["MODE_DURATION"] = np.vectorize(
lambda s: np.frombuffer(s[0:4], dtype=">u4") & 0x00FFFFFF
)(ost)
df["SPARE_1"] = np.vectorize(
lambda s: np.frombuffer(s[4:5], dtype=">u1") & 0xC0
)(ost)
df["MODE_SELECTION"] = np.vectorize(
lambda s: np.frombuffer(s[4:5], dtype=">u1") >> 2 & 0x0F
)(ost)
df["DCG_CONFIGURATION_LO"] = np.vectorize(
lambda s: np.frombuffer(s[4:5], dtype=">u1") & 0x03
)(ost)
df["DCG_CONFIGURATION_HI"] = np.vectorize(
lambda s: np.frombuffer(s[5:6], dtype=">u1") >> 6
)(ost)
# Decompress data and make radargrams
moded = {
"SS3_TRK_CMP": [
"MINUS1_F1",
"ZERO_F1",
"PLUS1_F1",
"MINUS1_F2",
"ZERO_F2",
"PLUS1_F2",
]
}
mode = lbld["INSTRUMENT_MODE_ID"].replace('"', "")
if mode not in moded.keys():
print("Unhandled mode, exiting")
print(mode)
sys.exit()
datad = {}
for rg in moded[mode]:
block = np.zeros((512, len(telTab)), dtype=np.complex64)
for i in range(len(telTab)):
expIM = telTab["MAX_IM_EXP_" + rg + "_DIP"][i]
expRE = telTab["MAX_RE_EXP_" + rg + "_DIP"][i]
trIM = telTab["IMAG_ECHO_" + rg + "_DIP"][i]
trRE = telTab["REAL_ECHO_" + rg + "_DIP"][i]
trRE = self.decompress(trRE, expRE)
trIM = self.decompress(trIM, expIM)
trace = trRE + 1j * trIM
band = int(rg.split("_")[1][1])
block[:, i] = trace
if("F1" in rg):
block = self.deagc(block, telTab["AGC_SA_LEVELS_CURRENT_FRAME"][:,0])
elif("F2" in rg):
block = self.deagc(block, telTab["AGC_SA_LEVELS_CURRENT_FRAME"][:,1])
datad[rg] = block
return telTab, df, datad
def buildDict(self, pdata, i):
dd = {}
while i < len(pdata):
key, val = pdata[i]
if key == "OBJECT":
c = 0
name = val + str(c)
while name in dd.keys():
c += 1
name = val + str(c)
i, dd[name] = self.buildDict(pdata, i + 1)
continue
if key == "END_OBJECT":
return i + 1, dd
dd[key] = val
i += 1
return dd
def parseLBL(self, lbl):
# Parse the label file with lex and yacc
# Heavily based on https://github.com/mkelley/pds3
# lexer def ###
tokens = [
"DSID",
"WORD",
"STRING",
"COMMENT",
"POINTER",
"DATE",
"INT",
"REAL",
"UNIT",
"END",
]
literals = ["(", ")", ",", "=", "{", "}"]
def t_DSID(t):
r"MEX-M-MARSIS-2-EDR(-EXT[0-9])?-V[0-9].0" # Dataset ID
return t
def t_WORD(t):
r"[A-Z][A-Z0-9:_]+"
if t.value == "END":
t.type = "END"
return t
t_STRING = r'"[^"]+"'
def t_COMMENT(t):
r"/\*.+\*/"
pass
t_POINTER = r"\^[A-Z0-9_]+"
t_DATE = r"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(.[0-9]{3})?"
t_INT = r"[+-]?[0-9]+"
t_REAL = r"[+-]?[0-9]+\.[0-9]+([Ee][+-]?[0-9]+)?"
t_UNIT = r"<[\w*^\-/]+>"
t_ignore = " \t\r\n"
def t_error(t):
print("Illegal character '%s'" % t.value[0])
t.lexer.skip(1)
lexer = lex.lex()
# ## parser def ## #
def p_label(p):
""" label : record
| label record
| label END"""
if len(p) == 2:
# record
p[0] = [p[1]]
elif p[2] == "END":
# label END
p[0] = p[1]
else:
# label record
p[0] = p[1] + [p[2]]
def p_record(p):
"""record : WORD '=' value
| POINTER '=' INT
| POINTER '=' STRING
| POINTER '=' '(' STRING ',' INT ')'"""
p[0] = (p[1], p[3])
def p_value(p):
"""value : STRING
| DATE
| WORD
| DSID
| number
| number UNIT
| sequence"""
# Just chuck the units for now
p[0] = p[1]
def p_number(p):
"""number : INT
| REAL"""
p[0] = p[1]
def p_sequence(p):
"""sequence : '(' value ')'
| '(' sequence_values ')'
| '{' value '}'
| '{' sequence_values '}'"""
p[0] = p[2]
def p_sequence_values(p):
"""sequence_values : value ','
| sequence_values value ','
| sequence_values value"""
if p[2] == ",":
p[0] = [p[1]]
else:
p[0] = p[1] + [p[2]]
def p_error(p):
if p:
print("Syntax error at '%s'" % p.value)
else:
print("Syntax error at EOF")
parser = yacc.yacc()
# ## parse the label ## #
fd = open(lbl, "r")
data = fd.read()
fd.close()
result = parser.parse(data, lexer=lexer, debug=False)
return self.buildDict(result, 0)
| [
"ply.yacc.yacc",
"numpy.fromfile",
"ply.lex.lex",
"sys.exit",
"pandas.DataFrame",
"numpy.frombuffer",
"numpy.dtype",
"numpy.vectorize"
] | [((2156, 2818), 'numpy.dtype', 'np.dtype', (["[('SCET_FRAME_WHOLE', '>u4'), ('SCET_FRAME_FRAC', '>u2'), (\n 'GEOMETRY_EPHEMERIS_TIME', '>f8'), ('GEOMETRY_EPOCH', 'V23'), (\n 'MARS_SOLAR_LONGITUDE', '>f8'), ('MARS_SUN_DISTANCE', '>f8'), (\n 'ORBIT_NUMBER', '>u4'), ('TARGET_NAME', 'V6'), (\n 'TARGET_SC_POSITION_VECTOR', '>f8', 3), ('SPACECRAFT_ALTITUDE', '>f8'),\n ('SUB_SC_LONGITUDE', '>f8'), ('SUB_SC_LATITUDE', '>f8'), (\n 'TARGET_SC_VELOCITY_VECTOR', '>f8', 3), ('TARGET_SC_RADIAL_VELOCITY',\n '>f8'), ('TARGET_SC_TANG_VELOCITY', '>f8'), ('LOCAL_TRUE_SOLAR_TIME',\n '>f8'), ('SOLAR_ZENITH_ANGLE', '>f8'), ('DIPOLE_UNIT_VECTOR', '>f8', 3),\n ('MONOPOLE_UNIT_VECTOR', '>f8', 3)]"], {}), "([('SCET_FRAME_WHOLE', '>u4'), ('SCET_FRAME_FRAC', '>u2'), (\n 'GEOMETRY_EPHEMERIS_TIME', '>f8'), ('GEOMETRY_EPOCH', 'V23'), (\n 'MARS_SOLAR_LONGITUDE', '>f8'), ('MARS_SUN_DISTANCE', '>f8'), (\n 'ORBIT_NUMBER', '>u4'), ('TARGET_NAME', 'V6'), (\n 'TARGET_SC_POSITION_VECTOR', '>f8', 3), ('SPACECRAFT_ALTITUDE', '>f8'),\n ('SUB_SC_LONGITUDE', '>f8'), ('SUB_SC_LATITUDE', '>f8'), (\n 'TARGET_SC_VELOCITY_VECTOR', '>f8', 3), ('TARGET_SC_RADIAL_VELOCITY',\n '>f8'), ('TARGET_SC_TANG_VELOCITY', '>f8'), ('LOCAL_TRUE_SOLAR_TIME',\n '>f8'), ('SOLAR_ZENITH_ANGLE', '>f8'), ('DIPOLE_UNIT_VECTOR', '>f8', 3),\n ('MONOPOLE_UNIT_VECTOR', '>f8', 3)])\n", (2164, 2818), True, 'import numpy as np\n'), ((3138, 3168), 'numpy.fromfile', 'np.fromfile', (['file'], {'dtype': 'rec_t'}), '(file, dtype=rec_t)\n', (3149, 3168), True, 'import numpy as np\n'), ((3854, 6393), 'numpy.dtype', 'np.dtype', (["[('SCET_STAR_WHOLE', '>u4'), ('SCET_STAR_FRAC', '>u2'), ('OST_LINE_NUMBER',\n '>u2'), ('OST_LINE', 'V12'), ('FRAME_NUMBER', '>u2'), (\n 'ANCILLARY_DATA_HEADER', 'V6'), ('FIRST_PRI_OF_FRAME', '>u4'), (\n 'SCET_FRAME_WHOLE', '>u4'), ('SCET_FRAME_FRAC', '>u2'), (\n 'SCET_PERICENTER_WHOLE', '>u4'), ('SCET_PERICENTER_FRAC', '>u2'), (\n 'SCET_PAR_WHOLE', '>u4'), ('SCET_PAR_FRAC', '>u2'), ('H_SCET_PAR',\n '>f4'), ('VT_SCET_PAR', '>f4'), ('VR_SCET_PAR', '>f4'), ('N_0', '>u4'),\n ('DELTA_S_MIN', '>f4'), ('NB_MIN', '>u2'), ('M_OCOG', '>f4', 2), (\n 'INDEX_OCOG', '>u2', 2), ('TRK_THRESHOLD', '>f4', 2), (\n 'INI_IND_TRK_THRESHOLD', '>u2', 2), ('LAST_IND_TRK_THRESHOLD', '>u2', 2\n ), ('INI_IND_FSRM', '>u2', 2), ('LAST_IND_FSRM', '>u2', 2), ('SPARE_4',\n '>u4', 3), ('DELTA_S_SCET_PAR', '>f4'), ('NB_SCET_PAR', '>u2'), (\n 'NA_SCET_PAR', '>u2', 2), ('A2_INI_CM', '>f4', 2), ('A2_OPT', '>f4', 2),\n ('REF_CA_OPT', '>f4', 2), ('DELTA_T', '>u2', 2), ('SF', '>f4', 2), (\n 'I_C', '>u2', 2), ('AGC_SA_FOR_NEXT_FRAME', '>f4', 2), (\n 'AGC_SA_LEVELS_CURRENT_FRAME', '>u1', 2), ('RX_TRIG_SA_FOR_NEXT_FRAME',\n '>u2', 2), ('RX_TRIG_SA_PROGR', '>u2', 2), ('INI_IND_OCOG', '>u2'), (\n 'LAST_IND_OCOG', '>u2'), ('OCOG', '>f4', 2), ('A', '>f4', 2), ('C_LOL',\n '>i2', 2), ('SPARE_5', '>u2', 3), ('MAX_RE_EXP_MINUS1_F1_DIP', '>u1'),\n ('MAX_IM_EXP_MINUS1_F1_DIP', '>u1'), ('MAX_RE_EXP_ZERO_F1_DIP', '>u1'),\n ('MAX_IM_EXP_ZERO_F1_DIP', '>u1'), ('MAX_RE_EXP_PLUS1_F1_DIP', '>u1'),\n ('MAX_IM_EXP_PLUS1_F1_DIP', '>u1'), ('MAX_RE_EXP_MINUS1_F2_DIP', '>u1'),\n ('MAX_IM_EXP_MINUS1_F2_DIP', '>u1'), ('MAX_RE_EXP_ZERO_F2_DIP', '>u1'),\n ('MAX_IM_EXP_ZERO_F2_DIP', '>u1'), ('MAX_RE_EXP_PLUS1_F2_DIP', '>u1'),\n ('MAX_IM_EXP_PLUS1_F2_DIP', '>u1'), ('SPARE_6', '>u1', 8), (\n 'AGC_PIS_PT_VALUE', '>f4', 2), ('AGC_PIS_LEVELS', '>u1', 2), ('K_PIM',\n '>u1'), ('PIS_MAX_DATA_EXP', '>u1', 2), ('PROCESSING_PRF', '>f4'), (\n 'SPARE_7', '>u1'), ('REAL_ECHO_MINUS1_F1_DIP', '>u1', 512), (\n 'IMAG_ECHO_MINUS1_F1_DIP', '>u1', 512), ('REAL_ECHO_ZERO_F1_DIP', '>u1',\n 512), ('IMAG_ECHO_ZERO_F1_DIP', '>u1', 512), ('REAL_ECHO_PLUS1_F1_DIP',\n '>u1', 512), ('IMAG_ECHO_PLUS1_F1_DIP', '>u1', 512), (\n 'REAL_ECHO_MINUS1_F2_DIP', '>u1', 512), ('IMAG_ECHO_MINUS1_F2_DIP',\n '>u1', 512), ('REAL_ECHO_ZERO_F2_DIP', '>u1', 512), (\n 'IMAG_ECHO_ZERO_F2_DIP', '>u1', 512), ('REAL_ECHO_PLUS1_F2_DIP', '>u1',\n 512), ('IMAG_ECHO_PLUS1_F2_DIP', '>u1', 512), ('PIS_F1', '>i2', 128), (\n 'PIS_F2', '>i2', 128)]"], {}), "([('SCET_STAR_WHOLE', '>u4'), ('SCET_STAR_FRAC', '>u2'), (\n 'OST_LINE_NUMBER', '>u2'), ('OST_LINE', 'V12'), ('FRAME_NUMBER', '>u2'),\n ('ANCILLARY_DATA_HEADER', 'V6'), ('FIRST_PRI_OF_FRAME', '>u4'), (\n 'SCET_FRAME_WHOLE', '>u4'), ('SCET_FRAME_FRAC', '>u2'), (\n 'SCET_PERICENTER_WHOLE', '>u4'), ('SCET_PERICENTER_FRAC', '>u2'), (\n 'SCET_PAR_WHOLE', '>u4'), ('SCET_PAR_FRAC', '>u2'), ('H_SCET_PAR',\n '>f4'), ('VT_SCET_PAR', '>f4'), ('VR_SCET_PAR', '>f4'), ('N_0', '>u4'),\n ('DELTA_S_MIN', '>f4'), ('NB_MIN', '>u2'), ('M_OCOG', '>f4', 2), (\n 'INDEX_OCOG', '>u2', 2), ('TRK_THRESHOLD', '>f4', 2), (\n 'INI_IND_TRK_THRESHOLD', '>u2', 2), ('LAST_IND_TRK_THRESHOLD', '>u2', 2\n ), ('INI_IND_FSRM', '>u2', 2), ('LAST_IND_FSRM', '>u2', 2), ('SPARE_4',\n '>u4', 3), ('DELTA_S_SCET_PAR', '>f4'), ('NB_SCET_PAR', '>u2'), (\n 'NA_SCET_PAR', '>u2', 2), ('A2_INI_CM', '>f4', 2), ('A2_OPT', '>f4', 2),\n ('REF_CA_OPT', '>f4', 2), ('DELTA_T', '>u2', 2), ('SF', '>f4', 2), (\n 'I_C', '>u2', 2), ('AGC_SA_FOR_NEXT_FRAME', '>f4', 2), (\n 'AGC_SA_LEVELS_CURRENT_FRAME', '>u1', 2), ('RX_TRIG_SA_FOR_NEXT_FRAME',\n '>u2', 2), ('RX_TRIG_SA_PROGR', '>u2', 2), ('INI_IND_OCOG', '>u2'), (\n 'LAST_IND_OCOG', '>u2'), ('OCOG', '>f4', 2), ('A', '>f4', 2), ('C_LOL',\n '>i2', 2), ('SPARE_5', '>u2', 3), ('MAX_RE_EXP_MINUS1_F1_DIP', '>u1'),\n ('MAX_IM_EXP_MINUS1_F1_DIP', '>u1'), ('MAX_RE_EXP_ZERO_F1_DIP', '>u1'),\n ('MAX_IM_EXP_ZERO_F1_DIP', '>u1'), ('MAX_RE_EXP_PLUS1_F1_DIP', '>u1'),\n ('MAX_IM_EXP_PLUS1_F1_DIP', '>u1'), ('MAX_RE_EXP_MINUS1_F2_DIP', '>u1'),\n ('MAX_IM_EXP_MINUS1_F2_DIP', '>u1'), ('MAX_RE_EXP_ZERO_F2_DIP', '>u1'),\n ('MAX_IM_EXP_ZERO_F2_DIP', '>u1'), ('MAX_RE_EXP_PLUS1_F2_DIP', '>u1'),\n ('MAX_IM_EXP_PLUS1_F2_DIP', '>u1'), ('SPARE_6', '>u1', 8), (\n 'AGC_PIS_PT_VALUE', '>f4', 2), ('AGC_PIS_LEVELS', '>u1', 2), ('K_PIM',\n '>u1'), ('PIS_MAX_DATA_EXP', '>u1', 2), ('PROCESSING_PRF', '>f4'), (\n 'SPARE_7', '>u1'), ('REAL_ECHO_MINUS1_F1_DIP', '>u1', 512), (\n 'IMAG_ECHO_MINUS1_F1_DIP', '>u1', 512), ('REAL_ECHO_ZERO_F1_DIP', '>u1',\n 512), ('IMAG_ECHO_ZERO_F1_DIP', '>u1', 512), ('REAL_ECHO_PLUS1_F1_DIP',\n '>u1', 512), ('IMAG_ECHO_PLUS1_F1_DIP', '>u1', 512), (\n 'REAL_ECHO_MINUS1_F2_DIP', '>u1', 512), ('IMAG_ECHO_MINUS1_F2_DIP',\n '>u1', 512), ('REAL_ECHO_ZERO_F2_DIP', '>u1', 512), (\n 'IMAG_ECHO_ZERO_F2_DIP', '>u1', 512), ('REAL_ECHO_PLUS1_F2_DIP', '>u1',\n 512), ('IMAG_ECHO_PLUS1_F2_DIP', '>u1', 512), ('PIS_F1', '>i2', 128), (\n 'PIS_F2', '>i2', 128)])\n", (3862, 6393), True, 'import numpy as np\n'), ((7556, 7586), 'numpy.fromfile', 'np.fromfile', (['file'], {'dtype': 'rec_t'}), '(file, dtype=rec_t)\n', (7567, 7586), True, 'import numpy as np\n'), ((7659, 7673), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (7671, 7673), True, 'import pandas as pd\n'), ((11637, 11646), 'ply.lex.lex', 'lex.lex', ([], {}), '()\n', (11644, 11646), False, 'from ply import lex, yacc\n'), ((13445, 13456), 'ply.yacc.yacc', 'yacc.yacc', ([], {}), '()\n', (13454, 13456), False, 'from ply import lex, yacc\n'), ((7817, 7845), 'numpy.vectorize', 'np.vectorize', (['(lambda s: s[0])'], {}), '(lambda s: s[0])\n', (7829, 7845), True, 'import numpy as np\n'), ((8949, 8959), 'sys.exit', 'sys.exit', ([], {}), '()\n', (8957, 8959), False, 'import sys\n'), ((7917, 7951), 'numpy.frombuffer', 'np.frombuffer', (['s[0:4]'], {'dtype': '""">u4"""'}), "(s[0:4], dtype='>u4')\n", (7930, 7951), True, 'import numpy as np\n'), ((8040, 8074), 'numpy.frombuffer', 'np.frombuffer', (['s[4:5]'], {'dtype': '""">u1"""'}), "(s[4:5], dtype='>u1')\n", (8053, 8074), True, 'import numpy as np\n'), ((8299, 8333), 'numpy.frombuffer', 'np.frombuffer', (['s[4:5]'], {'dtype': '""">u1"""'}), "(s[4:5], dtype='>u1')\n", (8312, 8333), True, 'import numpy as np\n'), ((8429, 8463), 'numpy.frombuffer', 'np.frombuffer', (['s[5:6]'], {'dtype': '""">u1"""'}), "(s[5:6], dtype='>u1')\n", (8442, 8463), True, 'import numpy as np\n'), ((8164, 8198), 'numpy.frombuffer', 'np.frombuffer', (['s[4:5]'], {'dtype': '""">u1"""'}), "(s[4:5], dtype='>u1')\n", (8177, 8198), True, 'import numpy as np\n')] |
#! /usr/bin/env python3
# -*- coding:utf-8 -*-
import cv2
import xml.etree.ElementTree as ET
import glob
import sys
import pickle
from multiprocessing import Pool, Manager
def getXmlRoot(filename):
# try:
tree = ET.parse(filename)
return tree.getroot()
def getVideo(filename):
try:
video = cv2.VideoCapture(filename)
except:
print('cannot open video')
exit(0)
return video
def main(annotation_file, base_dir, set, out_data):
label = {"red":[1,0,0], "yellow":[0,1,0], "green":[0,0,1], "__undefined__":[0,0,0]}
# get annotation
annt_root = getXmlRoot(annotation_file)
# get video
clip_name = annotation_file.split("/")[-1].replace("_annt.xml", "")
video_file = f"{base_dir}/PIE_clips/{set}/{clip_name}.mp4"
video = getVideo(video_file)
for track in annt_root.iter("track"):
if track.get("label") != "traffic_light":
continue
for i, box in enumerate(track.iter("box")):
print(box.get("frame"))
video.set(cv2.CAP_PROP_POS_FRAMES, float(box.get("frame")))
ret, frame = video.read()
if not ret:
print("failed to get frame")
continue
# crop tr
tl_bb = frame[
int(float(box.get("ytl"))) : int(float(box.get("ybr"))),
int(float(box.get("xtl"))) : int(float(box.get("xbr")))
]
std_img = cv2.resize(tl_bb.astype("uint8"), dsize=(32, 32))
# get annotation
state = None
id = None
type = None
for attrib in box.iter("attribute"):
if attrib.get("name") == "type":
type = attrib.text
elif attrib.get("name") == "state":
state = label.get(attrib.text)
elif attrib.get("name") == "id":
id = attrib.text
if type != "regular":
continue
buf_data = {
"image" : std_img,
"state" : state,
"id" : id,
"set" : set,
"video" : clip_name,
}
out_data.append(buf_data)
if set == "set03" and i == len(box)-1:
buf_data = {
"image" : std_img,
"state" : state,
"id" : id,
"set" : "test",
"video" : clip_name,
}
print("test")
out_data.append(buf_data)
# cv2.imshow("tl", std_img)
# cv2.waitKey(1)
# print(state)
if __name__ == "__main__":
base_dir = "/media/kuriatsu/InternalHDD/PIE"
# out_data = []
with Manager() as manager:
p = Pool(8)
out_data = manager.list()
for set in ["set01", "set02", "set03", "set04", "set05", "set06"]:
for annotation_file in glob.iglob(base_dir+"/annotations/"+set+"/*.xml"):
p.apply_async(main, args=(annotation_file, base_dir, set, out_data))
p.close()
p.join()
with open("/media/kuriatsu/InternalHDD/PIE/tlr/database.pickle", "wb") as f:
pickle.dump(list(out_data), f)
| [
"xml.etree.ElementTree.parse",
"glob.iglob",
"cv2.VideoCapture",
"multiprocessing.Pool",
"multiprocessing.Manager"
] | [((223, 241), 'xml.etree.ElementTree.parse', 'ET.parse', (['filename'], {}), '(filename)\n', (231, 241), True, 'import xml.etree.ElementTree as ET\n'), ((320, 346), 'cv2.VideoCapture', 'cv2.VideoCapture', (['filename'], {}), '(filename)\n', (336, 346), False, 'import cv2\n'), ((2810, 2819), 'multiprocessing.Manager', 'Manager', ([], {}), '()\n', (2817, 2819), False, 'from multiprocessing import Pool, Manager\n'), ((2844, 2851), 'multiprocessing.Pool', 'Pool', (['(8)'], {}), '(8)\n', (2848, 2851), False, 'from multiprocessing import Pool, Manager\n'), ((2996, 3051), 'glob.iglob', 'glob.iglob', (["(base_dir + '/annotations/' + set + '/*.xml')"], {}), "(base_dir + '/annotations/' + set + '/*.xml')\n", (3006, 3051), False, 'import glob\n')] |
from typing import Tuple
import numpy as np
import pandas as pd
from sklearn.preprocessing import LabelEncoder
def label_encoder(c: str) -> np.ndarray:
lc = LabelEncoder()
return lc.fit_transform(c)
def load_dataset() -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:
path = "../../input/tabular-playground-series-apr-2021/"
train = pd.read_csv(path + "train.csv")
test = pd.read_csv(path + "test.csv")
pseudo_label = pd.read_csv("../../res/AutoWoE_submission_combo.csv")
test["Survived"] = [x for x in pseudo_label.Survived]
# Calcule SameFirstName
train["FirstName"] = train["Name"].apply(lambda x: x.split(", ")[0])
train["n"] = 1
gb = train.groupby("FirstName")
df_names = gb["n"].sum()
train["SameFirstName"] = train["FirstName"].apply(lambda x: df_names[x])
test["FirstName"] = test["Name"].apply(lambda x: x.split(", ")[0])
test["n"] = 1
gb = test.groupby("FirstName")
df_names = gb["n"].sum()
test["SameFirstName"] = test["FirstName"].apply(lambda x: df_names[x])
# To preprocess
data = pd.concat([train, test], axis=0)
# Before fill missing
data["AnyMissing"] = np.where(data.isnull().any(axis=1) == 1, 1, 0)
# Family
data["FamilySize"] = data["SibSp"] + data["Parch"] + 1
data["IsAlone"] = np.where(data["FamilySize"] <= 1, 1, 0)
# Cabin
data["Has_Cabin"] = data["Cabin"].apply(lambda x: 0 if type(x) == float else 1)
data["Cabin"] = data["Cabin"].fillna("X").map(lambda x: x[0].strip())
cabin_map = {"A": 1, "B": 2, "C": 3, "D": 4, "E": 5, "F": 6, "G": 7, "T": 1, "X": 8}
data["Cabin"] = data["Cabin"].str[0].fillna("X").replace(cabin_map)
# Embarked
# map_Embarked = train.Embarked.mode().item()
data["Embarked"] = data["Embarked"].fillna("No")
conditions = [
(data["Embarked"] == "S"),
(data["Embarked"] == "Q"),
(data["Embarked"] == "C"),
(data["Embarked"] == "No"),
]
choices = [0, 1, 2, -1]
data["Embarked"] = np.select(conditions, choices)
data["Embarked"] = data["Embarked"].astype(int)
# Name
data["SecondName"] = data.Name.str.split(", ", 1, expand=True)[1] # to try
data["IsFirstNameDublicated"] = np.where(data.FirstName.duplicated(), 1, 0)
# Fare
data["Fare"] = data["Fare"].fillna(train["Fare"].median())
# train['CategoricalFare'] = pd.qcut(train['Fare'], 4)
# [(0.679, 10.04] < (10.04, 24.46] < (24.46, 33.5] < (33.5, 744.66]]
# From original Titanic:
conditions = [
(data["Fare"] <= 7.91),
((data["Fare"] > 7.91) & (data["Fare"] <= 14.454)),
((data["Fare"] > 14.454) & (data["Fare"] <= 31)),
(data["Fare"] > 31),
]
choices = [0, 1, 2, 3]
data["Fare"] = np.select(conditions, choices)
data["Fare"] = data["Fare"].astype(int)
# Fix Ticket
# data['TicketNum'] = data.Ticket.str.extract(r'(\d+)').\
# astype('float64', copy=False) # to_try
data["Ticket"] = (
data.Ticket.str.replace(r"\.", "", regex=True)
.str.replace(r"(\d+)", "", regex=True)
.str.replace(" ", "", regex=True)
.replace(r"^\s*$", "X", regex=True)
.fillna("X")
)
data["Ticket"] = data["Ticket"].astype("category").cat.codes # to_try
# Age
conditions = [
((data.Sex == "female") & (data.Pclass == 1) & (data.Age.isnull())),
((data.Sex == "male") & (data.Pclass == 1) & (data.Age.isnull())),
((data.Sex == "female") & (data.Pclass == 2) & (data.Age.isnull())),
((data.Sex == "male") & (data.Pclass == 2) & (data.Age.isnull())),
((data.Sex == "female") & (data.Pclass == 3) & (data.Age.isnull())),
((data.Sex == "male") & (data.Pclass == 3) & (data.Age.isnull())),
]
choices = (
data[["Age", "Pclass", "Sex"]].dropna().groupby(["Pclass", "Sex"]).mean()["Age"]
)
data["Age"] = np.select(conditions, choices)
conditions = [
(data["Age"].le(16)),
(data["Age"].gt(16) & data["Age"].le(32)),
(data["Age"].gt(32) & data["Age"].le(48)),
(data["Age"].gt(48) & data["Age"].le(64)),
(data["Age"].gt(64)),
]
choices = [0, 1, 2, 3, 4]
data["Age"] = np.select(conditions, choices)
# Sex
data["Sex"] = np.where(data["Sex"] == "male", 1, 0)
# Drop columns
data = data.drop(["Name", "n"], axis=1)
# Splitting into train and test
train = data.iloc[: train.shape[0]]
test = data.iloc[train.shape[0] :].drop(columns=["Survived"])
target = "Survived"
features_selected = [
"Pclass",
"Sex",
"Age",
"Embarked",
"Parch",
"SibSp",
"Fare",
"Cabin",
"Ticket",
"IsAlone",
"SameFirstName",
]
X = data.drop(target, axis=1)
X = X[features_selected]
y = data[target]
test = test[features_selected]
return X, y, test
| [
"sklearn.preprocessing.LabelEncoder",
"numpy.select",
"pandas.read_csv",
"numpy.where",
"pandas.concat"
] | [((164, 178), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (176, 178), False, 'from sklearn.preprocessing import LabelEncoder\n'), ((356, 387), 'pandas.read_csv', 'pd.read_csv', (["(path + 'train.csv')"], {}), "(path + 'train.csv')\n", (367, 387), True, 'import pandas as pd\n'), ((399, 429), 'pandas.read_csv', 'pd.read_csv', (["(path + 'test.csv')"], {}), "(path + 'test.csv')\n", (410, 429), True, 'import pandas as pd\n'), ((450, 503), 'pandas.read_csv', 'pd.read_csv', (['"""../../res/AutoWoE_submission_combo.csv"""'], {}), "('../../res/AutoWoE_submission_combo.csv')\n", (461, 503), True, 'import pandas as pd\n'), ((1087, 1119), 'pandas.concat', 'pd.concat', (['[train, test]'], {'axis': '(0)'}), '([train, test], axis=0)\n', (1096, 1119), True, 'import pandas as pd\n'), ((1314, 1353), 'numpy.where', 'np.where', (["(data['FamilySize'] <= 1)", '(1)', '(0)'], {}), "(data['FamilySize'] <= 1, 1, 0)\n", (1322, 1353), True, 'import numpy as np\n'), ((2022, 2052), 'numpy.select', 'np.select', (['conditions', 'choices'], {}), '(conditions, choices)\n', (2031, 2052), True, 'import numpy as np\n'), ((2764, 2794), 'numpy.select', 'np.select', (['conditions', 'choices'], {}), '(conditions, choices)\n', (2773, 2794), True, 'import numpy as np\n'), ((3920, 3950), 'numpy.select', 'np.select', (['conditions', 'choices'], {}), '(conditions, choices)\n', (3929, 3950), True, 'import numpy as np\n'), ((4239, 4269), 'numpy.select', 'np.select', (['conditions', 'choices'], {}), '(conditions, choices)\n', (4248, 4269), True, 'import numpy as np\n'), ((4299, 4336), 'numpy.where', 'np.where', (["(data['Sex'] == 'male')", '(1)', '(0)'], {}), "(data['Sex'] == 'male', 1, 0)\n", (4307, 4336), True, 'import numpy as np\n')] |
"""
This file is part of nucypher.
nucypher is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
nucypher is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with nucypher. If not, see <https://www.gnu.org/licenses/>.
"""
import pytest
from web3.contract import Contract
secret = (123456).to_bytes(32, byteorder='big')
@pytest.fixture()
def token(testerchain):
# Create an ERC20 token
token, _ = testerchain.interface.deploy_contract('NuCypherToken', int(2e9))
return token
@pytest.fixture()
def escrow(testerchain, token):
creator = testerchain.interface.w3.eth.accounts[0]
# Creator deploys the escrow
contract, _ = testerchain.interface.deploy_contract('MinersEscrowForUserEscrowMock', token.address)
# Give some coins to the escrow
tx = token.functions.transfer(contract.address, 10000).transact({'from': creator})
testerchain.wait_for_receipt(tx)
return contract
@pytest.fixture()
def policy_manager(testerchain):
contract, _ = testerchain.interface.deploy_contract('PolicyManagerForUserEscrowMock')
return contract
@pytest.fixture()
def proxy(testerchain, token, escrow, policy_manager):
# Creator deploys the user escrow proxy
contract, _ = testerchain.interface.deploy_contract(
'UserEscrowProxy', token.address, escrow.address, policy_manager.address)
return contract
@pytest.fixture()
def linker(testerchain, proxy):
secret_hash = testerchain.interface.w3.sha3(secret)
linker, _ = testerchain.interface.deploy_contract('UserEscrowLibraryLinker', proxy.address, secret_hash)
return linker
@pytest.fixture()
def user_escrow(testerchain, token, linker):
creator = testerchain.interface.w3.eth.accounts[0]
user = testerchain.interface.w3.eth.accounts[1]
contract, _ = testerchain.interface.deploy_contract('UserEscrow', linker.address, token.address)
# Transfer ownership
tx = contract.functions.transferOwnership(user).transact({'from': creator})
testerchain.wait_for_receipt(tx)
return contract
@pytest.fixture()
def user_escrow_proxy(testerchain, proxy, user_escrow):
return testerchain.interface.w3.eth.contract(
abi=proxy.abi,
address=user_escrow.address,
ContractFactoryClass=Contract)
| [
"pytest.fixture"
] | [((743, 759), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (757, 759), False, 'import pytest\n'), ((912, 928), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (926, 928), False, 'import pytest\n'), ((1338, 1354), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1352, 1354), False, 'import pytest\n'), ((1501, 1517), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1515, 1517), False, 'import pytest\n'), ((1779, 1795), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1793, 1795), False, 'import pytest\n'), ((2014, 2030), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (2028, 2030), False, 'import pytest\n'), ((2451, 2467), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (2465, 2467), False, 'import pytest\n')] |
import http.server
from os import linesep
class POST:
def __init__(self,server_instance):
selfserv = server_instance
content_length = int(selfserv.headers['Content-Length'])
boundary = '--' + selfserv.headers['Content-Type'].split('=')[1] #get boundary. in headers, boundary is shorter by 2 "-" than in request body
boundary = boundary.encode()
#https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/POST
postb = selfserv.rfile.read(content_length) #read entire request body. result is bytes.
self.resdict = dict()
postblist = postb.split(boundary)
delimiter = (linesep + linesep).encode()
for eachpart in postblist[1:-1]:
contDisp = eachpart[0 : eachpart.find(delimiter)]
contData = eachpart[eachpart.find(delimiter) + len(delimiter) : -len(linesep)]
if contDisp.find(b'; filename=') != -1: # in case a file was loaded
if contDisp.find(linesep.encode() + b'Content-Type:') != -1:
filename = contDisp[contDisp.find(b'; filename=') + 12 : -len(linesep) + 1 + abs(contDisp.find(linesep.encode() + b'Content-Type:'))]
else:
filename = contDisp[contDisp.find(b'; filename=') + 12 : -1]
#
filename = filename.decode()
paramname = contDisp[contDisp.find(b'; name=') + 8 : contDisp.find(b'; filename=') -1]
paramname = paramname.decode()
else:
filename = ''
paramname = contDisp[contDisp.find(b'; name=') + 8 : -1]
paramname = paramname.decode()
contData = contData.decode()
#
self.resdict[paramname] = (filename,contData)
#
#
def _FILES(self):
resdict = dict()
for eachkey in self.resdict.keys():
if self.resdict[eachkey][0] !='':
resdict[eachkey] = self.resdict[eachkey]
#
#
return resdict
#
def _POST(self):
resdict = dict()
for eachkey in self.resdict.keys():
if self.resdict[eachkey][0] == '':
resdict[eachkey] = self.resdict[eachkey][1]
#
#
return resdict
#
# | [
"os.linesep.encode"
] | [((1064, 1080), 'os.linesep.encode', 'linesep.encode', ([], {}), '()\n', (1078, 1080), False, 'from os import linesep\n'), ((1224, 1240), 'os.linesep.encode', 'linesep.encode', ([], {}), '()\n', (1238, 1240), False, 'from os import linesep\n')] |
import psycopg2
import csv
# relative and fixed imports
from config.db.config import config
from config.db.connect import setup_db
from geodensity.tasks import query_data
from geodensity.querries import geonorge, enhetsreg
# from .models import YOUR_MODEL
def query_data(query, filepath):
"""
Sample function to collect application layer data and write
to .csv file taking query and filepath as input. Adjust function to suit your needs.
"""
conn = setup_db(config(section="DATABASE"))
sql = query
cursor = conn.cursor() # cursor_factory=psycopg2.extras.DictCursor
cursor.execute(sql)
while True:
rows = cursor.fetchmany(200)
if not rows:
break
with open(filepath, 'w') as f:
writer = csv.writer(f, delimiter=',')
for row in rows:
writer.writerow(row)
if cursor:
cursor.close()
return True
def example_store_data_to_model(query):
"""
Example function to store data to project DB
"""
YOUR_MODEL = "" # replace by proper model with aligned variables
YOUR_MODEL.objects.all().delete()
conn = setup_db(config(section="DATABASE"))
cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cursor.execute(query)
while True:
objs = []
rows = cursor.fetchmany(200)
if not rows:
break
for row in rows:
obj = TaxAnomalies(**row)
objs.append(obj)
YOUR_MODEL.objects.bulk_create(objs)
if cursor:
cursor.close()
return True
def sample_create_csvs():
"""
Example function to get some data
"""
geonorge_filepath = 'geodensity/data/geonorge.csv'
enhetsreg_filepath = 'geodensity/data/enhetsreg.csv'
query_data(query=geonorge, filepath=geonorge_filepath)
| [
"csv.writer",
"config.db.config.config",
"geodensity.tasks.query_data"
] | [((1778, 1832), 'geodensity.tasks.query_data', 'query_data', ([], {'query': 'geonorge', 'filepath': 'geonorge_filepath'}), '(query=geonorge, filepath=geonorge_filepath)\n', (1788, 1832), False, 'from geodensity.tasks import query_data\n'), ((480, 506), 'config.db.config.config', 'config', ([], {'section': '"""DATABASE"""'}), "(section='DATABASE')\n", (486, 506), False, 'from config.db.config import config\n'), ((1154, 1180), 'config.db.config.config', 'config', ([], {'section': '"""DATABASE"""'}), "(section='DATABASE')\n", (1160, 1180), False, 'from config.db.config import config\n'), ((771, 799), 'csv.writer', 'csv.writer', (['f'], {'delimiter': '""","""'}), "(f, delimiter=',')\n", (781, 799), False, 'import csv\n')] |
import numpy as np
from scipy.stats import chi2
from tqdm import tqdm as tqdm
from ...common import (
Gaussian,
GaussianDensity,
HypothesisReduction,
normalize_log_weights,
)
from ...configs import SensorModelConfig
from ...measurement_models import MeasurementModel
from ...motion_models import MotionModel
from .base_single_object_tracker import SingleObjectTracker
class GaussSumTracker(SingleObjectTracker):
def __init__(
self,
meas_model: MeasurementModel,
sensor_model: SensorModelConfig,
motion_model: MotionModel,
M,
merging_threshold,
P_G,
w_min,
*args,
**kwargs,
) -> None:
self.meas_model = meas_model
self.sensor_model = sensor_model
self.motion_model = motion_model
self.w_min = w_min
self.P_G = P_G
self.gating_size = chi2.ppf(P_G, df=self.meas_model.d)
self.M = M
self.merging_threshold = merging_threshold
self.hypotheses_weight = None
self.multi_hypotheses_bank = None
super(GaussSumTracker).__init__()
def estimate(self, initial_state: Gaussian, measurements, verbose=False):
"""Tracks a single object using Gauss sum filtering
For each filter recursion iteration implemented next steps:
1) for each hypothesis, create missed detection hypothesis
2) for each hypothesis, perform ellipsoidal gating
and only create object detection hypotheses for detections
inside the gate
3) normalise hypotheses weights
4) prune hypotheses with small weights and then re-normalise the weights
5) hypothese merging
6) cap the number of the hypotheses and then re-normalise the weights
7) extract object state estimate using the most probably
hypothesis estimation
8) for each hypothesis, perform prediction
"""
prev_state = initial_state
estimations = [None for x in range(len(measurements))]
self.hypotheses_weight = [np.log(1.0)]
self.multi_hypotheses_bank = [initial_state]
for timestep, measurements_in_scene in tqdm(enumerate(measurements)):
estimations[timestep] = self.estimation_step(
predicted_state=prev_state,
current_measurements=np.array(measurements_in_scene),
)
prev_state = GaussianDensity.predict(state=estimations[timestep], motion_model=self.motion_model)
return tuple(estimations)
def estimation_step(self, predicted_state: Gaussian, current_measurements: np.ndarray):
new_hypotheses, new_weights = [], []
w_theta_factor = np.log(self.sensor_model.P_D / self.sensor_model.intensity_c)
w_theta_0 = np.log(1 - self.sensor_model.P_D) # misdetection
for _old_idx, (curr_weight, curr_hypothesis) in enumerate(
zip(self.hypotheses_weight, self.multi_hypotheses_bank)
):
# 1) for each hypothesis, create missed detection hypothesis
new_hypotheses.append(curr_hypothesis)
new_weights.append(w_theta_0 + curr_weight)
# 2) for each hypothesis, perform ellipsoidal gating
# and only create object detection hypotheses for detection
# inside the gate
z_ingate, _ = GaussianDensity.ellipsoidal_gating(
curr_hypothesis,
current_measurements,
self.meas_model,
self.gating_size,
)
predicted_likelihood = GaussianDensity.predicted_likelihood(curr_hypothesis, z_ingate, self.meas_model)
# for each measurement create detection hypotheses
for idx, meausurement in z_ingate:
new_hypotheses.append(GaussianDensity.update(curr_hypothesis, meausurement, self.meas_model))
new_weights.append(predicted_likelihood[idx] + w_theta_factor)
self.hypotheses_weight.extend(new_weights)
self.multi_hypotheses_bank.extend(new_hypotheses)
assert len(self.hypotheses_weight) == len(self.multi_hypotheses_bank)
# 3.normalise hypotheses weights
self.hypotheses_weight, _ = normalize_log_weights(self.hypotheses_weight)
# 4. Prune hypotheses with small weights and then re-normalise the weights
self.hypotheses_weight, self.multi_hypotheses_bank = HypothesisReduction.prune(
self.hypotheses_weight, self.multi_hypotheses_bank, threshold=self.w_min
)
self.hypotheses_weight, _ = normalize_log_weights(self.hypotheses_weight)
# 5. Hypotheses merging and normalize
self.hypotheses_weight, self.multi_hypotheses_bank = HypothesisReduction.merge(
self.hypotheses_weight,
self.multi_hypotheses_bank,
threshold=self.merging_threshold,
)
self.hypotheses_weight, _ = normalize_log_weights(self.hypotheses_weight)
# 6. Cap the number of the hypotheses and then re-normalise the weights
self.hypotheses_weight, self.multi_hypotheses_bank = HypothesisReduction.cap(
self.hypotheses_weight, self.multi_hypotheses_bank, top_k=self.M
)
self.hypotheses_weight, _ = normalize_log_weights(self.hypotheses_weight)
# 7. Get object state from the most probable hypothesis
if self.multi_hypotheses_bank:
current_step_state = self.multi_hypotheses_bank[np.argmax(self.hypotheses_weight)]
estimation = current_step_state
else:
estimation = predicted_state
# 8. For each hypotheses do prediction
self.updated_states = [
GaussianDensity.predict(hypothesis, self.motion_model) for hypothesis in self.multi_hypotheses_bank
]
self.multi_hypotheses_bank = self.updated_states
return estimation
@property
def method(self):
return "gauss sum filter"
| [
"numpy.log",
"numpy.array",
"scipy.stats.chi2.ppf",
"numpy.argmax"
] | [((887, 922), 'scipy.stats.chi2.ppf', 'chi2.ppf', (['P_G'], {'df': 'self.meas_model.d'}), '(P_G, df=self.meas_model.d)\n', (895, 922), False, 'from scipy.stats import chi2\n'), ((2707, 2768), 'numpy.log', 'np.log', (['(self.sensor_model.P_D / self.sensor_model.intensity_c)'], {}), '(self.sensor_model.P_D / self.sensor_model.intensity_c)\n', (2713, 2768), True, 'import numpy as np\n'), ((2789, 2822), 'numpy.log', 'np.log', (['(1 - self.sensor_model.P_D)'], {}), '(1 - self.sensor_model.P_D)\n', (2795, 2822), True, 'import numpy as np\n'), ((2069, 2080), 'numpy.log', 'np.log', (['(1.0)'], {}), '(1.0)\n', (2075, 2080), True, 'import numpy as np\n'), ((5475, 5508), 'numpy.argmax', 'np.argmax', (['self.hypotheses_weight'], {}), '(self.hypotheses_weight)\n', (5484, 5508), True, 'import numpy as np\n'), ((2353, 2384), 'numpy.array', 'np.array', (['measurements_in_scene'], {}), '(measurements_in_scene)\n', (2361, 2384), True, 'import numpy as np\n')] |
import SCRAM
from SCRAM.BuildSystem.ToolManager import ToolManager
from SCRAM.Core.Core import Core
from operator import itemgetter
def process(args):
area = Core()
area.checklocal()
if not args or args[0].lower() not in ['list', 'info', 'tag', 'remove']:
SCRAM.scramfatal("Error parsing arguments. See \"scram -help\" for usage info.")
return eval('tool_%s' % args[0].lower())(args[1:], area.localarea())
def tool_list(args, area):
toolmanager = ToolManager(area)
tools = toolmanager.toolsdata()
if not tools:
SCRAM.scramerror(">>>> No tools set up for current arch or area! <<<<")
msg = "Tool list for location %s" % area.location()
msg += "\n%s\n" % ("+" * len(msg))
SCRAM.printmsg("\n%s" % msg)
for tool in sorted(tools, key=itemgetter('TOOLNAME')):
SCRAM.printmsg(" {:40s} {:20s}".format(tool['TOOLNAME'], tool['TOOLVERSION']))
SCRAM.printmsg("")
return True
def tool_info(args, area):
if not args:
SCRAM.scramfatal("No tool name given: see \"scram tool -help\" for usage info.")
from SCRAM.BuildSystem.ToolFile import ToolFile
toolmanager = ToolManager(area)
toolname = args[0].lower()
tool = toolmanager.gettool(toolname)
if not tool:
SCRAM.scramerror(">>>> Tool %s is not setup for this project area. <<<<" % toolname)
msg = "Tool info as configured in location %s" % area.location()
msg += "\n%s\n" % ("+" * len(msg))
msg += "Name : %s\n" % toolname
msg += "Version : %s\n" % tool['TOOLVERSION']
msg += "%s\n" % ("+" * 20)
SCRAM.printmsg(msg)
tooldata, flags = ToolFile.summarize_tool(tool)
for tag in sorted(tooldata):
ch = "+=" if tag in flags else "="
SCRAM.printmsg('%s%s%s' % (tag, ch, tooldata[tag]))
SCRAM.printmsg("")
return True
def tool_tag(args, area):
if len(args) < 1:
SCRAM.scramfatal("No tool name given: see \"scram tool -help\" for usage info.")
toolmanager = ToolManager(area)
toolname = args[0].lower()
tool = toolmanager.gettool(toolname)
if not tool:
SCRAM.scramerror(">>>> Tool %s is not setup for this project area. <<<<" % toolname)
from SCRAM.BuildSystem.ToolFile import ToolFile
tag = None if len(args) == 1 else args[1]
msg = ToolFile.get_feature(tool, tag)
if msg:
SCRAM.printmsg(msg)
return True
def tool_remove(args, area):
if len(args) < 1:
SCRAM.scramfatal("No tool name given: see \"scram tool -help\" for usage info.")
toolname = args[0].lower()
toolmanager = ToolManager(area)
if not toolmanager.hastool(toolname):
SCRAM.scramerror(">>>> Tool %s is not defined for this project area. <<<<" % toolname)
SCRAM.printmsg("Removing tool %s from current project area configuration." % toolname)
toolmanager.remove_tool(toolname)
return True
| [
"SCRAM.BuildSystem.ToolManager.ToolManager",
"SCRAM.scramfatal",
"SCRAM.BuildSystem.ToolFile.ToolFile.get_feature",
"SCRAM.BuildSystem.ToolFile.ToolFile.summarize_tool",
"SCRAM.scramerror",
"SCRAM.printmsg",
"SCRAM.Core.Core.Core",
"operator.itemgetter"
] | [((164, 170), 'SCRAM.Core.Core.Core', 'Core', ([], {}), '()\n', (168, 170), False, 'from SCRAM.Core.Core import Core\n'), ((479, 496), 'SCRAM.BuildSystem.ToolManager.ToolManager', 'ToolManager', (['area'], {}), '(area)\n', (490, 496), False, 'from SCRAM.BuildSystem.ToolManager import ToolManager\n'), ((730, 758), 'SCRAM.printmsg', 'SCRAM.printmsg', (["('\\n%s' % msg)"], {}), "('\\n%s' % msg)\n", (744, 758), False, 'import SCRAM\n'), ((909, 927), 'SCRAM.printmsg', 'SCRAM.printmsg', (['""""""'], {}), "('')\n", (923, 927), False, 'import SCRAM\n'), ((1150, 1167), 'SCRAM.BuildSystem.ToolManager.ToolManager', 'ToolManager', (['area'], {}), '(area)\n', (1161, 1167), False, 'from SCRAM.BuildSystem.ToolManager import ToolManager\n'), ((1579, 1598), 'SCRAM.printmsg', 'SCRAM.printmsg', (['msg'], {}), '(msg)\n', (1593, 1598), False, 'import SCRAM\n'), ((1621, 1650), 'SCRAM.BuildSystem.ToolFile.ToolFile.summarize_tool', 'ToolFile.summarize_tool', (['tool'], {}), '(tool)\n', (1644, 1650), False, 'from SCRAM.BuildSystem.ToolFile import ToolFile\n'), ((1791, 1809), 'SCRAM.printmsg', 'SCRAM.printmsg', (['""""""'], {}), "('')\n", (1805, 1809), False, 'import SCRAM\n'), ((1984, 2001), 'SCRAM.BuildSystem.ToolManager.ToolManager', 'ToolManager', (['area'], {}), '(area)\n', (1995, 2001), False, 'from SCRAM.BuildSystem.ToolManager import ToolManager\n'), ((2292, 2323), 'SCRAM.BuildSystem.ToolFile.ToolFile.get_feature', 'ToolFile.get_feature', (['tool', 'tag'], {}), '(tool, tag)\n', (2312, 2323), False, 'from SCRAM.BuildSystem.ToolFile import ToolFile\n'), ((2572, 2589), 'SCRAM.BuildSystem.ToolManager.ToolManager', 'ToolManager', (['area'], {}), '(area)\n', (2583, 2589), False, 'from SCRAM.BuildSystem.ToolManager import ToolManager\n'), ((2731, 2821), 'SCRAM.printmsg', 'SCRAM.printmsg', (["('Removing tool %s from current project area configuration.' % toolname)"], {}), "('Removing tool %s from current project area configuration.' %\n toolname)\n", (2745, 2821), False, 'import SCRAM\n'), ((278, 356), 'SCRAM.scramfatal', 'SCRAM.scramfatal', (['"""Error parsing arguments. See "scram -help" for usage info."""'], {}), '(\'Error parsing arguments. See "scram -help" for usage info.\')\n', (294, 356), False, 'import SCRAM\n'), ((559, 630), 'SCRAM.scramerror', 'SCRAM.scramerror', (['""">>>> No tools set up for current arch or area! <<<<"""'], {}), "('>>>> No tools set up for current arch or area! <<<<')\n", (575, 630), False, 'import SCRAM\n'), ((998, 1076), 'SCRAM.scramfatal', 'SCRAM.scramfatal', (['"""No tool name given: see "scram tool -help" for usage info."""'], {}), '(\'No tool name given: see "scram tool -help" for usage info.\')\n', (1014, 1076), False, 'import SCRAM\n'), ((1265, 1353), 'SCRAM.scramerror', 'SCRAM.scramerror', (["('>>>> Tool %s is not setup for this project area. <<<<' % toolname)"], {}), "('>>>> Tool %s is not setup for this project area. <<<<' %\n toolname)\n", (1281, 1353), False, 'import SCRAM\n'), ((1735, 1786), 'SCRAM.printmsg', 'SCRAM.printmsg', (["('%s%s%s' % (tag, ch, tooldata[tag]))"], {}), "('%s%s%s' % (tag, ch, tooldata[tag]))\n", (1749, 1786), False, 'import SCRAM\n'), ((1884, 1962), 'SCRAM.scramfatal', 'SCRAM.scramfatal', (['"""No tool name given: see "scram tool -help" for usage info."""'], {}), '(\'No tool name given: see "scram tool -help" for usage info.\')\n', (1900, 1962), False, 'import SCRAM\n'), ((2099, 2187), 'SCRAM.scramerror', 'SCRAM.scramerror', (["('>>>> Tool %s is not setup for this project area. <<<<' % toolname)"], {}), "('>>>> Tool %s is not setup for this project area. <<<<' %\n toolname)\n", (2115, 2187), False, 'import SCRAM\n'), ((2344, 2363), 'SCRAM.printmsg', 'SCRAM.printmsg', (['msg'], {}), '(msg)\n', (2358, 2363), False, 'import SCRAM\n'), ((2441, 2519), 'SCRAM.scramfatal', 'SCRAM.scramfatal', (['"""No tool name given: see "scram tool -help" for usage info."""'], {}), '(\'No tool name given: see "scram tool -help" for usage info.\')\n', (2457, 2519), False, 'import SCRAM\n'), ((2640, 2730), 'SCRAM.scramerror', 'SCRAM.scramerror', (["('>>>> Tool %s is not defined for this project area. <<<<' % toolname)"], {}), "('>>>> Tool %s is not defined for this project area. <<<<' %\n toolname)\n", (2656, 2730), False, 'import SCRAM\n'), ((793, 815), 'operator.itemgetter', 'itemgetter', (['"""TOOLNAME"""'], {}), "('TOOLNAME')\n", (803, 815), False, 'from operator import itemgetter\n')] |
from flask import request
import app.api.routes.models.political
from app.api.routes.models.political import PoliticalParty as p, parties
from app.api.blueprints import version1
from app.api.responses import Responses
from app.api.utils import Validations
from app.api.valid import validate_update_all as v
from app.api.routes.models import *
@version1.route("/party", methods=['GET'])
def get_all_parties():
"""this gets all parties"""
if not app.api.routes.models.political.parties:
return Responses.not_found("No created parties yet"), 404
return Responses.complete_response(app.api.routes.models.political.parties), 200
@version1.route("/party", methods=['POST'])
def create_political_party():
"""this creates a new political party"""
data = request.get_json()
empty = Validations.validate_json_inputs()
if empty:
return Validations.validate_json_inputs()
error = Validations.validate_extra_fields(data)
if error:
return Validations.validate_extra_fields(data)
validate = Validations.validate_strings(data)
if validate:
return Validations.validate_strings(data)
# create a party
if Validations.verify_political_details():
return Validations.verify_political_details()
if Validations.validate_characters():
return Validations.validate_characters()
if Validations.validate_logo():
return Validations.validate_logo()
new_party = {
'id': len(parties) + 1,
'name': request.json['name'],
'hqAddress': request.json.get('hqAddress'),
'logoUrl': request.json.get('logoUrl'),
}
parties.append(new_party)
return Responses.created_response(new_party), 201
@version1.route("/party/<int:id>", methods=['GET'])
def get_specific_party(id):
"""this gets a specific party using id"""
for party in parties:
if id == party['id']:
return Responses.complete_response(party), 200
return Responses.not_found("Party not found"), 404
@version1.route("/party/<int:id>/name", methods=['PATCH'])
def update_specific_party(id):
"""this updates a specific party name"""
validate = v()
if validate:
return v()
res = app.api.routes.models.political.Update.update_party_details(id)
return res
@version1.route("/party/<int:id>", methods=['DELETE'])
def delete_specific_party(id):
"""this deletes a specific party"""
for party in app.api.routes.models.political.parties:
if party["id"] == int(id):
app.api.routes.models.political.parties.remove(party)
return Responses.complete_response("Party deleted successfully"), 200
return Responses.not_found("Party does not exist"), 404
| [
"app.api.blueprints.version1.route",
"app.api.utils.Validations.validate_json_inputs",
"app.api.utils.Validations.validate_strings",
"app.api.utils.Validations.validate_logo",
"app.api.valid.validate_update_all",
"app.api.responses.Responses.created_response",
"app.api.utils.Validations.validate_charact... | [((347, 388), 'app.api.blueprints.version1.route', 'version1.route', (['"""/party"""'], {'methods': "['GET']"}), "('/party', methods=['GET'])\n", (361, 388), False, 'from app.api.blueprints import version1\n'), ((650, 692), 'app.api.blueprints.version1.route', 'version1.route', (['"""/party"""'], {'methods': "['POST']"}), "('/party', methods=['POST'])\n", (664, 692), False, 'from app.api.blueprints import version1\n'), ((1721, 1771), 'app.api.blueprints.version1.route', 'version1.route', (['"""/party/<int:id>"""'], {'methods': "['GET']"}), "('/party/<int:id>', methods=['GET'])\n", (1735, 1771), False, 'from app.api.blueprints import version1\n'), ((2019, 2076), 'app.api.blueprints.version1.route', 'version1.route', (['"""/party/<int:id>/name"""'], {'methods': "['PATCH']"}), "('/party/<int:id>/name', methods=['PATCH'])\n", (2033, 2076), False, 'from app.api.blueprints import version1\n'), ((2300, 2353), 'app.api.blueprints.version1.route', 'version1.route', (['"""/party/<int:id>"""'], {'methods': "['DELETE']"}), "('/party/<int:id>', methods=['DELETE'])\n", (2314, 2353), False, 'from app.api.blueprints import version1\n'), ((779, 797), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (795, 797), False, 'from flask import request\n'), ((810, 844), 'app.api.utils.Validations.validate_json_inputs', 'Validations.validate_json_inputs', ([], {}), '()\n', (842, 844), False, 'from app.api.utils import Validations\n'), ((921, 960), 'app.api.utils.Validations.validate_extra_fields', 'Validations.validate_extra_fields', (['data'], {}), '(data)\n', (954, 960), False, 'from app.api.utils import Validations\n'), ((1045, 1079), 'app.api.utils.Validations.validate_strings', 'Validations.validate_strings', (['data'], {}), '(data)\n', (1073, 1079), False, 'from app.api.utils import Validations\n'), ((1176, 1214), 'app.api.utils.Validations.verify_political_details', 'Validations.verify_political_details', ([], {}), '()\n', (1212, 1214), False, 'from app.api.utils import Validations\n'), ((1277, 1310), 'app.api.utils.Validations.validate_characters', 'Validations.validate_characters', ([], {}), '()\n', (1308, 1310), False, 'from app.api.utils import Validations\n'), ((1368, 1395), 'app.api.utils.Validations.validate_logo', 'Validations.validate_logo', ([], {}), '()\n', (1393, 1395), False, 'from app.api.utils import Validations\n'), ((1638, 1663), 'app.api.routes.models.political.parties.append', 'parties.append', (['new_party'], {}), '(new_party)\n', (1652, 1663), False, 'from app.api.routes.models.political import PoliticalParty as p, parties\n'), ((2168, 2171), 'app.api.valid.validate_update_all', 'v', ([], {}), '()\n', (2169, 2171), True, 'from app.api.valid import validate_update_all as v\n'), ((573, 641), 'app.api.responses.Responses.complete_response', 'Responses.complete_response', (['app.api.routes.models.political.parties'], {}), '(app.api.routes.models.political.parties)\n', (600, 641), False, 'from app.api.responses import Responses\n'), ((874, 908), 'app.api.utils.Validations.validate_json_inputs', 'Validations.validate_json_inputs', ([], {}), '()\n', (906, 908), False, 'from app.api.utils import Validations\n'), ((990, 1029), 'app.api.utils.Validations.validate_extra_fields', 'Validations.validate_extra_fields', (['data'], {}), '(data)\n', (1023, 1029), False, 'from app.api.utils import Validations\n'), ((1112, 1146), 'app.api.utils.Validations.validate_strings', 'Validations.validate_strings', (['data'], {}), '(data)\n', (1140, 1146), False, 'from app.api.utils import Validations\n'), ((1231, 1269), 'app.api.utils.Validations.verify_political_details', 'Validations.verify_political_details', ([], {}), '()\n', (1267, 1269), False, 'from app.api.utils import Validations\n'), ((1327, 1360), 'app.api.utils.Validations.validate_characters', 'Validations.validate_characters', ([], {}), '()\n', (1358, 1360), False, 'from app.api.utils import Validations\n'), ((1412, 1439), 'app.api.utils.Validations.validate_logo', 'Validations.validate_logo', ([], {}), '()\n', (1437, 1439), False, 'from app.api.utils import Validations\n'), ((1549, 1578), 'flask.request.json.get', 'request.json.get', (['"""hqAddress"""'], {}), "('hqAddress')\n", (1565, 1578), False, 'from flask import request\n'), ((1599, 1626), 'flask.request.json.get', 'request.json.get', (['"""logoUrl"""'], {}), "('logoUrl')\n", (1615, 1626), False, 'from flask import request\n'), ((1675, 1712), 'app.api.responses.Responses.created_response', 'Responses.created_response', (['new_party'], {}), '(new_party)\n', (1701, 1712), False, 'from app.api.responses import Responses\n'), ((1972, 2010), 'app.api.responses.Responses.not_found', 'Responses.not_found', (['"""Party not found"""'], {}), "('Party not found')\n", (1991, 2010), False, 'from app.api.responses import Responses\n'), ((2204, 2207), 'app.api.valid.validate_update_all', 'v', ([], {}), '()\n', (2205, 2207), True, 'from app.api.valid import validate_update_all as v\n'), ((2677, 2720), 'app.api.responses.Responses.not_found', 'Responses.not_found', (['"""Party does not exist"""'], {}), "('Party does not exist')\n", (2696, 2720), False, 'from app.api.responses import Responses\n'), ((511, 556), 'app.api.responses.Responses.not_found', 'Responses.not_found', (['"""No created parties yet"""'], {}), "('No created parties yet')\n", (530, 556), False, 'from app.api.responses import Responses\n'), ((1921, 1955), 'app.api.responses.Responses.complete_response', 'Responses.complete_response', (['party'], {}), '(party)\n', (1948, 1955), False, 'from app.api.responses import Responses\n'), ((2603, 2660), 'app.api.responses.Responses.complete_response', 'Responses.complete_response', (['"""Party deleted successfully"""'], {}), "('Party deleted successfully')\n", (2630, 2660), False, 'from app.api.responses import Responses\n')] |
import unittest
import numpy as np
import tensorflow as tf
import lib.weighted_layers_v2 as wl
from lib.weighted_resblock import MixtureWeight
class WeightedConv2DTest(tf.test.TestCase):
"""WeightedConv2D test class."""
def setUp(self):
"""Sets default parameters."""
super(WeightedConv2DTest, self).setUp()
self.kernel_size = 3
self.activation = 'relu'
self.filters = 40
self.input_channel = 20
self.num_templates = 10
self.kernel = np.random.rand(self.num_templates, self.kernel_size,
self.kernel_size, self.input_channel,
self.filters)
self.bias = np.random.rand(self.num_templates, self.filters)
self.kernel_init = tf.constant_initializer(self.kernel)
self.bias_init = tf.constant_initializer(self.bias)
self.padding = 'same'
xi_init = tf.random_uniform_initializer(minval=0.0, maxval=1.0)
self.xi = MixtureWeight(num_templates=self.num_templates,
initializer=xi_init)
def _create_default_w_conv(self):
"""Creates an instance of WeightedConv2D with dedault parameters."""
return wl.WeightedConv2D(
filters=self.filters, activation=self.activation, padding=self.padding,
kernel_size=self.kernel_size, num_templates=self.num_templates,
kernel_initializer=self.kernel_init, bias_initializer=self.bias_init)
def _get_default_inputs(self, in_shape):
"""returns default layer inputs."""
layer_inputs = tf.Variable(np.random.rand(*in_shape), dtype=tf.float32)
return [layer_inputs, self.xi(None)[0]]
def test_output_shape(self):
"""checks if the shape of the output tensor is correct."""
w_conv = self._create_default_w_conv()
input_shape = (32, 16, 16, self.input_channel)
inputs = self._get_default_inputs(input_shape)
output = w_conv(inputs)
expected_shape = (32, 16, 16, self.filters)
self.assertAllEqual(expected_shape, output.shape)
def test_output_values(self):
"""checks if the output tensor is computed correctly."""
w_conv = self._create_default_w_conv()
w_conv.activation = None
input_shape = (32, 16, 16, self.input_channel)
inputs = self._get_default_inputs(input_shape)
w_output = w_conv(inputs)
# the output of weighted convolution should be same as linear combination of
# outputs of regular convolution with template weights in case when no
# activation is used.
expected_output = tf.zeros_like(w_output)
conv = tf.keras.layers.Conv2D(filters=self.filters, activation=None,
padding=self.padding,
kernel_size=self.kernel_size)
conv.build(input_shape)
for t in range(self.num_templates):
conv.kernel = self.kernel[t]
conv.bias = self.bias[t]
conv_out = conv(inputs[0])
expected_output += inputs[1][t]*conv_out
self.assertAllClose(expected_output, w_output, rtol=1e-05)
class WeightedDepthwiseConv2DTest(tf.test.TestCase):
"""Weighted depthwise convolution test class."""
def setUp(self):
"""Sets default parameters."""
super(WeightedDepthwiseConv2DTest, self).setUp()
self.kernel_size = 3
self.activation = 'relu'
self.depth_multiplier = 2
self.input_channel = 20
self.num_templates = 10
self.kernel = np.random.rand(self.num_templates, self.kernel_size,
self.kernel_size, self.input_channel,
self.depth_multiplier).astype(np.float32)
self.bias = np.random.rand(
self.num_templates,
self.input_channel * self.depth_multiplier).astype(np.float32)
self.kernel_init = tf.constant_initializer(self.kernel)
self.bias_init = tf.constant_initializer(self.bias)
self.padding = 'same'
self.xi_initializer = tf.random_uniform_initializer(minval=0.0, maxval=1.0)
self.xi = MixtureWeight(num_templates=self.num_templates,
initializer=self.xi_initializer)
def _create_default_depth_conv(self):
"""Creates a WeightedDepthwiseConv2D instance with default parameters."""
return wl.WeightedDepthwiseConv2D(
depth_multiplier=self.depth_multiplier, activation=self.activation,
padding=self.padding, kernel_size=self.kernel_size,
num_templates=self.num_templates, bias_initializer=self.bias_init,
depthwise_initializer=self.kernel_init)
def _get_default_inputs(self, in_shape):
"""returns default layer inputs."""
layer_inputs = tf.Variable(np.random.rand(*in_shape), dtype=tf.float32)
return [layer_inputs, self.xi(None)[0]]
def test_output_shape(self):
"""checks if the shape of the output tensor is correct."""
w_d_conv = self._create_default_depth_conv()
input_shape = (32, 64, 64, self.input_channel)
inputs = self._get_default_inputs(input_shape)
output = w_d_conv(inputs)
expected_shape = (32, 64, 64, self.input_channel*self.depth_multiplier)
self.assertAllEqual(expected_shape, output.shape)
def test_output_value(self):
"""checks if the value of the output tensor is correct."""
w_d_conv = self._create_default_depth_conv()
w_d_conv.activation = None
input_shape = (32, 16, 16, self.input_channel)
inputs = self._get_default_inputs(input_shape)
w_d_output = w_d_conv(inputs)
# the output of weighted convolution should be same as linear combination of
# outputs of regular convolution with template weights in case when no
# activation is used.
expected_output = tf.zeros_like(w_d_output)
conv = tf.keras.layers.DepthwiseConv2D(
depth_multiplier=self.depth_multiplier, activation=None,
padding=self.padding, kernel_size=self.kernel_size)
conv.build(input_shape)
for t in range(self.num_templates):
conv.depthwise_kernel = self.kernel[t]
conv.bias = self.bias[t]
conv_out = conv(inputs[0])
expected_output += inputs[1][t]*conv_out
self.assertAllClose(expected_output, w_d_output, rtol=1e-05)
class WeightedBatchNormalizationTest(tf.test.TestCase):
""""WeightedBatchNormalizationSeparate test class."""
def setUp(self):
"""Sets default parameters."""
self.num_templates = 10
self.input_channels = 40
self.gamma_template = np.random.rand(
self.num_templates, self.input_channels).astype(np.float32)
self.beta_template = np.random.rand(
self.num_templates, self.input_channels).astype(np.float32)
self.beta_init = tf.constant_initializer(self.beta_template)
self.gamma_init = tf.constant_initializer(self.gamma_template)
self.xi_initializer = tf.random_uniform_initializer(minval=0.0, maxval=1.0)
self.xi = MixtureWeight(num_templates=self.num_templates,
initializer=self.xi_initializer)
def test_output_shape(self):
"""checks if the output shape is same as input shape."""
input_shape = (256, 16, 16, self.input_channels)
inputs = tf.random.normal(input_shape)
bn = wl.WeightedBatchNormalizationSeparate(
num_templates=self.num_templates, gamma_initializer=self.gamma_init,
beta_initializer=self.beta_init)
outputs = bn([inputs, self.xi(None)[0]], training=True)
self.assertAllEqual(input_shape, outputs.shape)
def test_output_moments(self):
"""checks if the output moments match to the mixture of moments."""
input_shape = (256, 16, 16, self.input_channels)
inputs = tf.random.normal(input_shape, mean=2.5, stddev=8.0)
bn = wl.WeightedBatchNormalizationSeparate(
num_templates=self.num_templates, gamma_initializer=self.gamma_init,
beta_initializer=self.beta_init)
outputs = bn([inputs, self.xi(None)[0]], training=True)
reduction_axes = range(len(input_shape) - 1)
mean, var = tf.nn.moments(outputs, reduction_axes)
reshaped_mix_w = tf.reshape(self.xi(None), [self.num_templates, 1])
mix_gamma = tf.reduce_sum(reshaped_mix_w * self.gamma_template, axis=0)
mix_beta = tf.reduce_sum(reshaped_mix_w * self.beta_template, axis=0)
self.assertAllClose(mean, mix_beta, rtol=1e-03)
self.assertAllClose(tf.math.sqrt(var), mix_gamma, rtol=1e-03)
if __name__ == '__main__':
unittest.main(argv=['first-arg-is-ignored'], exit=False)
| [
"tensorflow.random.normal",
"lib.weighted_layers_v2.WeightedBatchNormalizationSeparate",
"lib.weighted_layers_v2.WeightedDepthwiseConv2D",
"numpy.random.rand",
"tensorflow.keras.layers.Conv2D",
"tensorflow.reduce_sum",
"tensorflow.nn.moments",
"tensorflow.math.sqrt",
"tensorflow.zeros_like",
"lib.... | [((8216, 8272), 'unittest.main', 'unittest.main', ([], {'argv': "['first-arg-is-ignored']", 'exit': '(False)'}), "(argv=['first-arg-is-ignored'], exit=False)\n", (8229, 8272), False, 'import unittest\n'), ((472, 581), 'numpy.random.rand', 'np.random.rand', (['self.num_templates', 'self.kernel_size', 'self.kernel_size', 'self.input_channel', 'self.filters'], {}), '(self.num_templates, self.kernel_size, self.kernel_size, self\n .input_channel, self.filters)\n', (486, 581), True, 'import numpy as np\n'), ((659, 707), 'numpy.random.rand', 'np.random.rand', (['self.num_templates', 'self.filters'], {}), '(self.num_templates, self.filters)\n', (673, 707), True, 'import numpy as np\n'), ((731, 767), 'tensorflow.constant_initializer', 'tf.constant_initializer', (['self.kernel'], {}), '(self.kernel)\n', (754, 767), True, 'import tensorflow as tf\n'), ((789, 823), 'tensorflow.constant_initializer', 'tf.constant_initializer', (['self.bias'], {}), '(self.bias)\n', (812, 823), True, 'import tensorflow as tf\n'), ((864, 917), 'tensorflow.random_uniform_initializer', 'tf.random_uniform_initializer', ([], {'minval': '(0.0)', 'maxval': '(1.0)'}), '(minval=0.0, maxval=1.0)\n', (893, 917), True, 'import tensorflow as tf\n'), ((932, 1000), 'lib.weighted_resblock.MixtureWeight', 'MixtureWeight', ([], {'num_templates': 'self.num_templates', 'initializer': 'xi_init'}), '(num_templates=self.num_templates, initializer=xi_init)\n', (945, 1000), False, 'from lib.weighted_resblock import MixtureWeight\n'), ((1150, 1388), 'lib.weighted_layers_v2.WeightedConv2D', 'wl.WeightedConv2D', ([], {'filters': 'self.filters', 'activation': 'self.activation', 'padding': 'self.padding', 'kernel_size': 'self.kernel_size', 'num_templates': 'self.num_templates', 'kernel_initializer': 'self.kernel_init', 'bias_initializer': 'self.bias_init'}), '(filters=self.filters, activation=self.activation, padding\n =self.padding, kernel_size=self.kernel_size, num_templates=self.\n num_templates, kernel_initializer=self.kernel_init, bias_initializer=\n self.bias_init)\n', (1167, 1388), True, 'import lib.weighted_layers_v2 as wl\n'), ((2478, 2501), 'tensorflow.zeros_like', 'tf.zeros_like', (['w_output'], {}), '(w_output)\n', (2491, 2501), True, 'import tensorflow as tf\n'), ((2513, 2631), 'tensorflow.keras.layers.Conv2D', 'tf.keras.layers.Conv2D', ([], {'filters': 'self.filters', 'activation': 'None', 'padding': 'self.padding', 'kernel_size': 'self.kernel_size'}), '(filters=self.filters, activation=None, padding=self.\n padding, kernel_size=self.kernel_size)\n', (2535, 2631), True, 'import tensorflow as tf\n'), ((3698, 3734), 'tensorflow.constant_initializer', 'tf.constant_initializer', (['self.kernel'], {}), '(self.kernel)\n', (3721, 3734), True, 'import tensorflow as tf\n'), ((3756, 3790), 'tensorflow.constant_initializer', 'tf.constant_initializer', (['self.bias'], {}), '(self.bias)\n', (3779, 3790), True, 'import tensorflow as tf\n'), ((3843, 3896), 'tensorflow.random_uniform_initializer', 'tf.random_uniform_initializer', ([], {'minval': '(0.0)', 'maxval': '(1.0)'}), '(minval=0.0, maxval=1.0)\n', (3872, 3896), True, 'import tensorflow as tf\n'), ((3911, 3996), 'lib.weighted_resblock.MixtureWeight', 'MixtureWeight', ([], {'num_templates': 'self.num_templates', 'initializer': 'self.xi_initializer'}), '(num_templates=self.num_templates, initializer=self.xi_initializer\n )\n', (3924, 3996), False, 'from lib.weighted_resblock import MixtureWeight\n'), ((4150, 4417), 'lib.weighted_layers_v2.WeightedDepthwiseConv2D', 'wl.WeightedDepthwiseConv2D', ([], {'depth_multiplier': 'self.depth_multiplier', 'activation': 'self.activation', 'padding': 'self.padding', 'kernel_size': 'self.kernel_size', 'num_templates': 'self.num_templates', 'bias_initializer': 'self.bias_init', 'depthwise_initializer': 'self.kernel_init'}), '(depth_multiplier=self.depth_multiplier,\n activation=self.activation, padding=self.padding, kernel_size=self.\n kernel_size, num_templates=self.num_templates, bias_initializer=self.\n bias_init, depthwise_initializer=self.kernel_init)\n', (4176, 4417), True, 'import lib.weighted_layers_v2 as wl\n'), ((5563, 5588), 'tensorflow.zeros_like', 'tf.zeros_like', (['w_d_output'], {}), '(w_d_output)\n', (5576, 5588), True, 'import tensorflow as tf\n'), ((5600, 5744), 'tensorflow.keras.layers.DepthwiseConv2D', 'tf.keras.layers.DepthwiseConv2D', ([], {'depth_multiplier': 'self.depth_multiplier', 'activation': 'None', 'padding': 'self.padding', 'kernel_size': 'self.kernel_size'}), '(depth_multiplier=self.depth_multiplier,\n activation=None, padding=self.padding, kernel_size=self.kernel_size)\n', (5631, 5744), True, 'import tensorflow as tf\n'), ((6511, 6554), 'tensorflow.constant_initializer', 'tf.constant_initializer', (['self.beta_template'], {}), '(self.beta_template)\n', (6534, 6554), True, 'import tensorflow as tf\n'), ((6577, 6621), 'tensorflow.constant_initializer', 'tf.constant_initializer', (['self.gamma_template'], {}), '(self.gamma_template)\n', (6600, 6621), True, 'import tensorflow as tf\n'), ((6648, 6701), 'tensorflow.random_uniform_initializer', 'tf.random_uniform_initializer', ([], {'minval': '(0.0)', 'maxval': '(1.0)'}), '(minval=0.0, maxval=1.0)\n', (6677, 6701), True, 'import tensorflow as tf\n'), ((6716, 6801), 'lib.weighted_resblock.MixtureWeight', 'MixtureWeight', ([], {'num_templates': 'self.num_templates', 'initializer': 'self.xi_initializer'}), '(num_templates=self.num_templates, initializer=self.xi_initializer\n )\n', (6729, 6801), False, 'from lib.weighted_resblock import MixtureWeight\n'), ((6984, 7013), 'tensorflow.random.normal', 'tf.random.normal', (['input_shape'], {}), '(input_shape)\n', (7000, 7013), True, 'import tensorflow as tf\n'), ((7023, 7166), 'lib.weighted_layers_v2.WeightedBatchNormalizationSeparate', 'wl.WeightedBatchNormalizationSeparate', ([], {'num_templates': 'self.num_templates', 'gamma_initializer': 'self.gamma_init', 'beta_initializer': 'self.beta_init'}), '(num_templates=self.num_templates,\n gamma_initializer=self.gamma_init, beta_initializer=self.beta_init)\n', (7060, 7166), True, 'import lib.weighted_layers_v2 as wl\n'), ((7464, 7515), 'tensorflow.random.normal', 'tf.random.normal', (['input_shape'], {'mean': '(2.5)', 'stddev': '(8.0)'}), '(input_shape, mean=2.5, stddev=8.0)\n', (7480, 7515), True, 'import tensorflow as tf\n'), ((7525, 7668), 'lib.weighted_layers_v2.WeightedBatchNormalizationSeparate', 'wl.WeightedBatchNormalizationSeparate', ([], {'num_templates': 'self.num_templates', 'gamma_initializer': 'self.gamma_init', 'beta_initializer': 'self.beta_init'}), '(num_templates=self.num_templates,\n gamma_initializer=self.gamma_init, beta_initializer=self.beta_init)\n', (7562, 7668), True, 'import lib.weighted_layers_v2 as wl\n'), ((7807, 7845), 'tensorflow.nn.moments', 'tf.nn.moments', (['outputs', 'reduction_axes'], {}), '(outputs, reduction_axes)\n', (7820, 7845), True, 'import tensorflow as tf\n'), ((7934, 7993), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(reshaped_mix_w * self.gamma_template)'], {'axis': '(0)'}), '(reshaped_mix_w * self.gamma_template, axis=0)\n', (7947, 7993), True, 'import tensorflow as tf\n'), ((8009, 8067), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(reshaped_mix_w * self.beta_template)'], {'axis': '(0)'}), '(reshaped_mix_w * self.beta_template, axis=0)\n', (8022, 8067), True, 'import tensorflow as tf\n'), ((1514, 1539), 'numpy.random.rand', 'np.random.rand', (['*in_shape'], {}), '(*in_shape)\n', (1528, 1539), True, 'import numpy as np\n'), ((4552, 4577), 'numpy.random.rand', 'np.random.rand', (['*in_shape'], {}), '(*in_shape)\n', (4566, 4577), True, 'import numpy as np\n'), ((8144, 8161), 'tensorflow.math.sqrt', 'tf.math.sqrt', (['var'], {}), '(var)\n', (8156, 8161), True, 'import tensorflow as tf\n'), ((3344, 3462), 'numpy.random.rand', 'np.random.rand', (['self.num_templates', 'self.kernel_size', 'self.kernel_size', 'self.input_channel', 'self.depth_multiplier'], {}), '(self.num_templates, self.kernel_size, self.kernel_size, self\n .input_channel, self.depth_multiplier)\n', (3358, 3462), True, 'import numpy as np\n'), ((3559, 3637), 'numpy.random.rand', 'np.random.rand', (['self.num_templates', '(self.input_channel * self.depth_multiplier)'], {}), '(self.num_templates, self.input_channel * self.depth_multiplier)\n', (3573, 3637), True, 'import numpy as np\n'), ((6297, 6352), 'numpy.random.rand', 'np.random.rand', (['self.num_templates', 'self.input_channels'], {}), '(self.num_templates, self.input_channels)\n', (6311, 6352), True, 'import numpy as np\n'), ((6406, 6461), 'numpy.random.rand', 'np.random.rand', (['self.num_templates', 'self.input_channels'], {}), '(self.num_templates, self.input_channels)\n', (6420, 6461), True, 'import numpy as np\n')] |