content stringlengths 1 1.04M | input_ids listlengths 1 774k | ratio_char_token float64 0.38 22.9 | token_count int64 1 774k |
|---|---|---|---|
import itertools
import logging
from bs4 import BeautifulSoup
from chibi.file.temp import Chibi_temp_path
from .episode import Episode
from chibi_dl.site.base.site import Site
logger = logging.getLogger( "chibi_dl.sites.tmo_fans.serie" )
| [
11748,
340,
861,
10141,
198,
11748,
18931,
198,
198,
6738,
275,
82,
19,
1330,
23762,
50,
10486,
198,
6738,
442,
27567,
13,
7753,
13,
29510,
1330,
609,
27567,
62,
29510,
62,
6978,
198,
198,
6738,
764,
38668,
1330,
7922,
198,
6738,
442,... | 2.927711 | 83 |
import matplotlib.pyplot as plt
import numpy as np
import math
import time
import sys
if __name__ == '__main__':
# this random seed provides better result
np.random.seed(1000384)
X, Y = input_coordinates("prefs.out")
#init_path = greedy_tsp(X, Y)
init_path = random_path(X, Y)
plt.title('Annealing result')
plt.xlabel('steps')
plt.ylabel('Tour length (m)')
path = anneal(init_path, X, Y, n_iter=100000, procplt=True)
plt.show()
plt.subplot(121)
plt.title('Initial(random) Route')
showmap(init_path, X, Y)
plt.subplot(122)
plt.title('Optimized Route')
showmap(path, X, Y)
plt.show()
distance = calc_distance(path, X, Y)
print("distance: {}".format(distance))
with open('result', 'w') as fout:
fout.write('path\n')
for city in path:
fout.write(str(city) + '\n')
fout.write('')
| [
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
10688,
198,
11748,
640,
198,
11748,
25064,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
... | 2.194175 | 412 |
from cffconvert import CodemetaObject
import unittest
import os
import ruamel.yaml as yaml
| [
6738,
269,
487,
1102,
1851,
1330,
18720,
368,
17167,
10267,
198,
11748,
555,
715,
395,
198,
11748,
28686,
198,
11748,
7422,
17983,
13,
88,
43695,
355,
331,
43695,
628,
198
] | 3.1 | 30 |
# Copyright 2019 Verily Life Sciences Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for the bq library."""
# Workaround for https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2366
from __future__ import absolute_import
import cStringIO
import csv
import random
import uuid
from ddt import data, ddt, unpack
from google.cloud import storage
from google.cloud.bigquery import ExtractJob
from google.cloud.bigquery.schema import SchemaField
from mock import patch, PropertyMock
from verily.bigquery_wrapper import bq_shared_tests, bq_test_case
@ddt
if __name__ == '__main__':
bq_test_case.main()
| [
2,
15069,
13130,
4643,
813,
5155,
13473,
3457,
13,
1439,
6923,
33876,
13,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
... | 3.536585 | 328 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from tap.problemak.solucion import diferencia_hojas
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
6738,
11593,
37443,
834,
1330,
4112,
62,
11748,
198,
198,
6738,
9814,
13,
45573,
461,
13,
34453,
1229,
295,
1330,
288,
361,
567,
10782,
544,
62,
8873,
28121,
628
] | 2.681818 | 44 |
# -*- coding: utf-8 -*-
"""Non-player characters."""
# Part of Clockwork MUD Server (https://github.com/whutch/cwmud)
# :copyright: (c) 2008 - 2017 Will Hutcheson
# :license: MIT (https://github.com/whutch/cwmud/blob/master/LICENSE.txt)
import re
from .attributes import Attribute
from .characters import Character
from .entities import ENTITIES
from .logs import get_logger
from .utils import joins
log = get_logger("npcs")
@ENTITIES.register
class NPC(Character):
"""A non-player character."""
_uid_code = "N"
type = "npc"
def get_name(self):
"""Get this character's name."""
return self.name
def get_short_description(self):
"""Get this character's short description."""
return self.short
def get_long_description(self):
"""Get this character's long description."""
raise self.long
@NPC.register_attr("name")
class NPCName(Attribute):
"""An NPC's name."""
_min_len = 2
_max_len = 24
_valid_chars = re.compile(r"^[a-zA-Z ]+$")
default = "an NPC"
# Other modules can add any reservations they need to this list.
RESERVED = []
@classmethod
@classmethod
def check_reserved(cls, name):
"""Check if an NPC name is reserved.
:param str name: The NPC name to check
:returns bool: True if the name is reserved, else False
"""
name = name.lower()
for reserved in cls.RESERVED:
if reserved.lower() == name:
return True
return False
@NPC.register_attr("short")
class NPCShortDesc(Attribute):
"""An NPC's short description."""
default = "Some sort of NPC is here."
@NPC.register_attr("long")
class NPCLongDesc(Attribute):
"""An NPC's long description."""
default = "There's nothing particularly interesting about them."
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
15419,
12,
7829,
3435,
526,
15931,
198,
2,
2142,
286,
47120,
337,
8322,
9652,
357,
5450,
1378,
12567,
13,
785,
14,
1929,
7140,
14,
66,
26377,
463,
8,
198,
2,
... | 2.633001 | 703 |
import os
from rackattack.virtual import sh
| [
11748,
28686,
198,
6738,
19127,
20358,
13,
32844,
1330,
427,
628,
198
] | 3.833333 | 12 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model | [
2,
16529,
1783,
10541,
198,
2,
15069,
357,
66,
8,
5413,
10501,
13,
1439,
2489,
10395,
13,
198,
2,
49962,
739,
262,
17168,
13789,
13,
4091,
13789,
13,
14116,
287,
262,
1628,
6808,
329,
5964,
1321,
13,
198,
2,
16529,
1783,
10541,
198,... | 7.384615 | 52 |
try:
from Occiput_Interface_Biograph_mMR import Biograph_mMR_Physiology
except:
Biograph_mMR_Physiology = None
try:
from Occiput_Interface_Brain_PET import Brain_PET_Physiology
except:
Brain_PET_Physiology = None
| [
28311,
25,
198,
220,
220,
220,
422,
10775,
541,
315,
62,
39317,
62,
23286,
2384,
62,
76,
13599,
1330,
8436,
2384,
62,
76,
13599,
62,
43215,
12371,
198,
16341,
25,
198,
220,
220,
220,
8436,
2384,
62,
76,
13599,
62,
43215,
12371,
796,... | 2.705882 | 85 |
#sep8a: RH 65
#sep8b: rh65
#sep15: temp22
import serial
from datetime import datetime
import datetime
import time
import csv
import sys
start = time.time()
runtime = 310
serial_port = 'com3'
baud_rate = 9600
print(datetime.datetime.now())
ser = serial.Serial(serial_port, baud_rate)
with open(r"C:\\feb18.csv", "a+") as f:
while True:
if time.time() > start + runtime : sys.exit("end")
init = (datetime.datetime.now().time())
a = (init.hour * 60) + (init.minute) + (init.second / 60)
line = ser.readline();
line = line.decode("utf-8")
f.writelines(["1000,"])
end = (datetime.datetime.now().time())
a = ((init.hour * 60) + (init.minute) + ((init.second) / (60)))
b = ((end.hour * 60) + (end.minute) + (end.second / 60))
c = b - a
f.writelines([line.strip(), ",%s"%(a), ",%s"%(b), ",%s\n"%(c)])
print(line);
| [
2,
325,
79,
23,
64,
25,
35662,
6135,
201,
198,
2,
325,
79,
23,
65,
25,
9529,
2996,
201,
198,
2,
325,
79,
1314,
25,
20218,
1828,
201,
198,
11748,
11389,
201,
198,
6738,
4818,
8079,
1330,
4818,
8079,
201,
198,
11748,
4818,
8079,
2... | 1.891107 | 551 |
# urllib3/_collections.py
##
##
##
##
from collections import deque
from threading import RLock
__all__ = ['RecentlyUsedContainer']
class RecentlyUsedContainer(dict):
''''''
##
##
##
CLEANUP_FACTOR = 10
def _invalidate_entry(self, key):
''''''
old_entry = self.access_lookup.get(key)
if old_entry:
old_entry.is_valid = False
return old_entry
def _push_entry(self, key):
''''''
self._invalidate_entry(key)
new_entry = AccessEntry(key)
self.access_lookup[key] = new_entry
self.access_log_lock.acquire()
self.access_log.appendleft(new_entry)
self.access_log_lock.release()
def _prune_entries(self, num):
''''''
while num > 0:
self.access_log_lock.acquire()
p = self.access_log.pop()
self.access_log_lock.release()
if not p.is_valid:
continue ##
dict.pop(self, p.key, None)
self.access_lookup.pop(p.key, None)
num -= 1
def _prune_invalidated_entries(self):
''''''
self.access_log_lock.acquire()
self.access_log = deque(e for e in self.access_log if e.is_valid)
self.access_log_lock.release()
def _get_ordered_access_keys(self):
''''''
self.access_log_lock.acquire()
r = [e.key for e in self.access_log if e.is_valid]
self.access_log_lock.release()
return r
| [
2,
2956,
297,
571,
18,
47835,
4033,
26448,
13,
9078,
198,
2235,
198,
198,
2235,
198,
198,
2235,
198,
198,
2235,
628,
198,
6738,
17268,
1330,
390,
4188,
198,
198,
6738,
4704,
278,
1330,
371,
25392,
198,
198,
834,
439,
834,
796,
37250... | 2.036339 | 743 |
from pyspark.sql import SparkSession,SQLContext
from pyspark.sql.types import *
from pyspark import SparkConf
from pyspark.context import SparkContext
from pyspark.sql.functions import asc
from pyspark.sql.functions import col
from functools import reduce # For Python 3.x
from pyspark.sql import DataFrame
from pyspark.sql.functions import sha2, concat_ws
from pyspark.sql import readwriter
from pyspark.sql import utils
sc = SparkContext("local", "count app")
SQLContext = SQLContext(sc)
spark = SparkSession.builder. \
master('local'). \
appName('foo'). \
getOrCreate()
spark.sparkContext.setLogLevel('WARN')
#Reading source and lrf
df_source = spark.read.csv("gs://cicd-files/sample.csv", inferSchema= True , header= True)
df_source.show()
# Saving the data to BigQuery
df_source.write.format('bigquery') \
.option('table', 'employees.emp_data') \
.option("temporaryGcsBucket","cicd-files") \
.mode('append') \
.save()
| [
6738,
279,
893,
20928,
13,
25410,
1330,
17732,
36044,
11,
17861,
21947,
198,
6738,
279,
893,
20928,
13,
25410,
13,
19199,
1330,
1635,
198,
6738,
279,
893,
20928,
1330,
17732,
18546,
198,
6738,
279,
893,
20928,
13,
22866,
1330,
17732,
21... | 2.950464 | 323 |
from collections import OrderedDict
from color import ColorString
from prettytable import PrettyTable
| [
6738,
17268,
1330,
14230,
1068,
35,
713,
198,
198,
6738,
3124,
1330,
5315,
10100,
198,
6738,
2495,
11487,
1330,
20090,
10962,
628
] | 4.727273 | 22 |
import os
import sys
import numpy as np
from algorithm.errors import *
try:
import algorithm.internal as internal
from algorithm.dists import *
except ImportError:
raise EFSMCompilationError()
| [
11748,
28686,
198,
11748,
25064,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
11862,
13,
48277,
1330,
1635,
198,
198,
28311,
25,
198,
220,
220,
220,
1330,
11862,
13,
32538,
355,
5387,
198,
220,
220,
220,
422,
11862,
13,
67,
1023,
13... | 3.071429 | 70 |
# -*- coding: utf-8 -*-
'''
Copyright (c) 2016, Virginia Tech
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those of the authors and should not be
interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
This material was prepared as an account of work sponsored by an agency of the United States Government. Neither the
United States Government nor the United States Department of Energy, nor Virginia Tech, nor any of their employees,
nor any jurisdiction or organization that has cooperated in the development of these materials, makes any warranty,
express or implied, or assumes any legal liability or responsibility for the accuracy, completeness, or usefulness or
any information, apparatus, product, software, or process disclosed, or represents that its use would not infringe
privately owned rights.
Reference herein to any specific commercial product, process, or service by trade name, trademark, manufacturer, or
otherwise does not necessarily constitute or imply its endorsement, recommendation, favoring by the United States
Government or any agency thereof, or Virginia Tech - Advanced Research Institute. The views and opinions of authors
expressed herein do not necessarily state or reflect those of the United States Government or any agency thereof.
VIRGINIA TECH – ADVANCED RESEARCH INSTITUTE
under Contract DE-EE0006352
#__author__ = "BEMOSS Team"
#__credits__ = ""
#__version__ = "2.0"
#__maintainer__ = "BEMOSS Team"
#__email__ = "aribemoss@gmail.com"
#__website__ = "www.bemoss.org"
#__created__ = "2014-09-12 12:04:50"
#__lastUpdated__ = "2016-03-14 11:23:33"
'''
import datetime
import sys
import time
import uuid
from cassandra import cluster
from bemoss_lib.communication.Email import EmailService
from bemoss_lib.communication.sms import SMSService
from bemoss_lib.databases.cassandraAPI import cassandraDB
from bemoss_lib.platform.BEMOSSAgent import BEMOSSAgent
from bemoss_lib.platform.agentstats import agentstats
from bemoss_lib.utils import date_converter
from bemoss_lib.utils import db_helper
from bemoss_lib.utils import find_own_ip
from bemoss_lib.utils.BEMOSS_globals import *
from bemoss_lib.utils.catcherror import catcherror
debug_agent = settings.DEBUG
from bemoss_lib.utils.offline_table_init import *
import pytz
#1.Basic variables initialized
Agents_Launch_DIR = settings.Agents_Launch_DIR
Agents_DIR = settings.Agents_DIR
db_database = settings.DATABASES['default']['NAME']
db_host = settings.DATABASES['default']['HOST']
db_port = settings.DATABASES['default']['PORT']
db_user = settings.DATABASES['default']['USER']
db_password = settings.DATABASES['default']['PASSWORD']
db_table_node_device = settings.DATABASES['default']['TABLE_node_device']
db_table_device_info = settings.DATABASES['default']['TABLE_device_info']
db_table_node_info = settings.DATABASES['default']['TABLE_node_info']
multinode_data = db_helper.get_multinode_data()
node_name = multinode_data['this_node']
myips = find_own_ip.getIPs()
_email_subject = node_name+'@'+str(myips[0])
emailService = EmailService()
#email settings
email_fromaddr = settings.NOTIFICATION['email']['fromaddr']
email_recipients = settings.NOTIFICATION['email']['recipients']
email_username = settings.NOTIFICATION['email']['username']
email_password = settings.NOTIFICATION['email']['password']
email_mailServer = settings.NOTIFICATION['email']['mailServer']
smsService = SMSService()
notify_heartbeat = settings.NOTIFICATION['heartbeat']
#Offline variables initialized
platform_table = 'platform_event'
platform_log_variables = {'agent_id':'text','start_time':'TIMESTAMP','event_id':'UUID','date_id':'text','end_time':'TIMESTAMP'}
platform_log_partition_keys = ['agent_id']
platform_log_clustering_keys = ['start_time','event_id']
#Start_time made the clustering key so that we can order the result by start_time and sort in desc order to find the latest entry
platform_variables=dict()
notification_table = 'email_sent'
notification_log_variables = {'agent_id': 'text', 'date_id': 'text', 'last_event_log_time': 'TIMESTAMP', 'email_sent_time': 'TIMESTAMP'}
notification_log_partition_keys = ['date_id']
notification_log_clustering_keys = ['last_event_log_time']
#Start_time made the clustering key so that we can order the result by start_time and sort in desc order to find the latest entry
notification_variables=dict()
class PlatformMonitorAgent(BEMOSSAgent):
"""Agent for querying WeatherUndergrounds API"""
#1. agent initialization
@catcherror('agentBackup Monitoring Failed')
#3. deviceMonitorBehavior (TickerBehavior)
#@catcherror('agentMonitoring Failed @ platformmonitoragent')
#@catcherror('email sending failed @ platformmonitoragent')
@catcherror('email send function failed')
@catcherror('SMS function failed')
if __name__ == '__main__':
# Entry point for script
try:
sys.exit(main())
except KeyboardInterrupt:
pass
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
7061,
6,
198,
15269,
357,
66,
8,
1584,
11,
6025,
9634,
198,
3237,
2489,
10395,
13,
198,
198,
7738,
396,
3890,
290,
779,
287,
2723,
290,
13934,
5107,
11,
351,
393,
12... | 3.326944 | 1,878 |
# -*- coding: utf-8 -*-
from flask_restful import Resource
from static.imports import *
from db import db
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
6738,
42903,
62,
2118,
913,
1330,
20857,
198,
198,
6738,
9037,
13,
320,
3742,
1330,
1635,
198,
6738,
20613,
1330,
20613,
628
] | 3 | 36 |
import unittest
import sys
import numpy as np
sys.path.append("../")
if __name__ == '__main__':
unittest.main()
| [
11748,
555,
715,
395,
198,
11748,
25064,
198,
11748,
299,
32152,
355,
45941,
198,
17597,
13,
6978,
13,
33295,
7203,
40720,
4943,
628,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
555,
715,
395,
13... | 2.622222 | 45 |
from cc3d import CompuCellSetup
from ReactionDiffusion_2D_FNSteppables import ReactionDiffusion_2D_FNSteppable
CompuCellSetup.register_steppable(steppable=ReactionDiffusion_2D_FNSteppable(frequency=1))
CompuCellSetup.run()
| [
6738,
36624,
18,
67,
1330,
3082,
84,
28780,
40786,
198,
198,
6738,
39912,
28813,
4241,
62,
17,
35,
62,
43221,
7447,
381,
2977,
1330,
39912,
28813,
4241,
62,
17,
35,
62,
43221,
7447,
381,
540,
198,
198,
7293,
84,
28780,
40786,
13,
30... | 2.860759 | 79 |
from flask_admin.contrib import sqla
from flask_security import current_user, utils
from wtforms.fields import PasswordField
##
## view models
##
# Customized User model for SQL-Admin
# Customized Role model for SQL-Admin
# Prevent administration of Roles unless the currently logged-in user has the "admin" role
| [
6738,
42903,
62,
28482,
13,
3642,
822,
1330,
19862,
5031,
198,
6738,
42903,
62,
12961,
1330,
1459,
62,
7220,
11,
3384,
4487,
198,
6738,
266,
83,
23914,
13,
25747,
1330,
30275,
15878,
198,
198,
2235,
198,
2235,
1570,
4981,
198,
2235,
1... | 3.701149 | 87 |
from xml.etree import ElementTree as ET
from graph_analyzer import create_graph_from_connection
if __name__ == '__main__':
main()
| [
6738,
35555,
13,
316,
631,
1330,
11703,
27660,
355,
12152,
198,
198,
6738,
4823,
62,
38200,
9107,
1330,
2251,
62,
34960,
62,
6738,
62,
38659,
628,
628,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,... | 3.043478 | 46 |
#-*- coding: utf-8 -*-
import multiprocessing
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
LOG = logging.getLogger(__name__)
LOG.addHandler(NullHandler())
| [
2,
12,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
11748,
18540,
305,
919,
278,
198,
11748,
18931,
198,
28311,
25,
220,
1303,
11361,
362,
13,
22,
10,
198,
220,
220,
220,
422,
18931,
1330,
35886,
25060,
198,
16341,
... | 2.859155 | 71 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# FSRobo-R Package BSDL
# ---------
# Copyright (C) 2019 FUJISOFT. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ---------
from task_common.key import scenario_key
# from task_common.action_interface import action_manager
from task_common.action_interface import common
import math
import rospy
import tf
from geometry_msgs.msg import TransformStamped, Vector3
TOOL_MOVE_FIXED_Z = 100
def home(action_manager, arm_name, param):
"""
ホームに移動する
"""
result, _ = action_manager.arm_operation(arm_name, "move_home", param)
return result
def move(action_manager, arm_name, param):
"""
指定された座標に移動する
"""
result, _ = action_manager.arm_operation(arm_name, scenario_key.ARM_MOTION_MOVE, param)
return result
def move_approach_position(action_manager, arm_name, param):
"""
アプローチ座標に移動する
"""
appr_param = param.copy()
# 座標計算
position, _ = _get_approach_position(appr_param)
appr_param[scenario_key.PARAM_KEY_COMMON_X] = position[0]
appr_param[scenario_key.PARAM_KEY_COMMON_Y] = position[1]
appr_param[scenario_key.PARAM_KEY_COMMON_Z] = position[2]
# キー名変更
if scenario_key.PARAM_KEY_ARM_PLAN_KEY in param and param[scenario_key.PARAM_KEY_ARM_PLAN_KEY] is not None:
appr_param[scenario_key.PARAM_KEY_ARM_PLAN_KEY] = param[scenario_key.PARAM_KEY_ARM_PLAN_KEY] + "_AP"
result, _ = action_manager.arm_operation(arm_name, scenario_key.ARM_MOTION_MOVE, appr_param)
return result
def move_target_position(action_manager, arm_name, param):
"""
ピック位置に移動
"""
tar_param = param.copy()
tar_param[scenario_key.PARAM_KEY_COMMON_X] = 0
tar_param[scenario_key.PARAM_KEY_COMMON_Y] = 0
tar_param[scenario_key.PARAM_KEY_COMMON_RX] = 0
tar_param[scenario_key.PARAM_KEY_COMMON_RY] = 0
tar_param[scenario_key.PARAM_KEY_COMMON_RZ] = 0
# 座標計算
if scenario_key.PARAM_KEY_ARM_DOWN_DISTANCE in param:
tar_param[scenario_key.PARAM_KEY_COMMON_Z] = param[scenario_key.PARAM_KEY_ARM_DOWN_DISTANCE]
else:
tar_param[scenario_key.PARAM_KEY_COMMON_Z] = TOOL_MOVE_FIXED_Z
# キー名変更
if scenario_key.PARAM_KEY_ARM_PLAN_KEY in param and param[scenario_key.PARAM_KEY_ARM_PLAN_KEY] is not None:
tar_param[scenario_key.PARAM_KEY_ARM_PLAN_KEY] = param[scenario_key.PARAM_KEY_ARM_PLAN_KEY] + "_TP"
result, _ = action_manager.arm_operation(arm_name, scenario_key.ARM_MOTION_TOOL_MOVE, tar_param)
return result
def move_departure_position(action_manager, arm_name, param):
"""
離脱座標に移動する
"""
# ツール点から退避
dep_param = param.copy()
# if scenario_key.PARAM_KEY_ARM_DOWN_DISTANCE in param:
# dep_param[scenario_key.PARAM_KEY_COMMON_Z] = param[scenario_key.PARAM_KEY_COMMON_Z] + param[scenario_key.PARAM_KEY_ARM_DOWN_DISTANCE]
# else:
# dep_param[scenario_key.PARAM_KEY_COMMON_Z] = param[scenario_key.PARAM_KEY_COMMON_Z] + TOOL_MOVE_FIXED_Z
# result, _ = action_manager.arm_operation(arm_name, scenario_key.ARM_MOTION_LINE_MOVE, dep_param)
dep_param[scenario_key.PARAM_KEY_COMMON_X] = 0
dep_param[scenario_key.PARAM_KEY_COMMON_Y] = 0
dep_param[scenario_key.PARAM_KEY_COMMON_RX] = 0
dep_param[scenario_key.PARAM_KEY_COMMON_RY] = 0
dep_param[scenario_key.PARAM_KEY_COMMON_RZ] = 0
# 座標計算
if scenario_key.PARAM_KEY_ARM_DOWN_DISTANCE in param:
dep_param[scenario_key.PARAM_KEY_COMMON_Z] = -param[scenario_key.PARAM_KEY_ARM_DOWN_DISTANCE]
else:
dep_param[scenario_key.PARAM_KEY_COMMON_Z] = -TOOL_MOVE_FIXED_Z
# キー名変更
if scenario_key.PARAM_KEY_ARM_PLAN_KEY in param and param[scenario_key.PARAM_KEY_ARM_PLAN_KEY] is not None:
dep_param[scenario_key.PARAM_KEY_ARM_PLAN_KEY] = param[scenario_key.PARAM_KEY_ARM_PLAN_KEY] + "_DP"
result, _ = action_manager.arm_operation(arm_name, scenario_key.ARM_MOTION_TOOL_MOVE, dep_param)
return result
def get_current_joint(action_manager, arm_name, param):
"""
現在のジョイント情報を取得
"""
_, data = action_manager.arm_operation(arm_name, "get_current_joint", param)
return data["joint"]
def joint_move(action_manager, arm_name, param):
"""
指定された軸の位置に移動する
"""
result, _ = action_manager.arm_operation(arm_name, scenario_key.ARM_MOTION_JOINT_MOVE, param)
return result
def line_move(action_manager, arm_name, param):
"""
指定された座標に直線補間で移動する
"""
result, _ = action_manager.arm_operation(arm_name, scenario_key.ARM_MOTION_LINE_MOVE, param)
return result
def get_object_position(action_manager, arm_name, param):
"""
オブジェクト位置情報を取得
"""
_, data = action_manager.arm_operation(arm_name, "get_object_position", param)
return data["position"]
def relative_move(action_manager, arm_name, param):
"""
相対位置移動
"""
# 現在位置から相対的な座標に移動する
rel_param = param.copy()
_, data = action_manager.arm_operation(arm_name, "get_position", None)
pos = data["position"].position
rel_param[scenario_key.PARAM_KEY_COMMON_X] = rel_param[scenario_key.PARAM_KEY_COMMON_X] + (pos.x * 1000)
rel_param[scenario_key.PARAM_KEY_COMMON_Y] = rel_param[scenario_key.PARAM_KEY_COMMON_Y] + (pos.y * 1000)
rel_param[scenario_key.PARAM_KEY_COMMON_Z] = rel_param[scenario_key.PARAM_KEY_COMMON_Z] + (pos.z * 1000)
#rel_param["orientation"] = data["position"].orientation
e = common.quaternion_to_euler(data["position"].orientation)
rel_param[scenario_key.PARAM_KEY_COMMON_RX] = rel_param[scenario_key.PARAM_KEY_COMMON_RX] + math.degrees(e.x)
rel_param[scenario_key.PARAM_KEY_COMMON_RY] = rel_param[scenario_key.PARAM_KEY_COMMON_RY] + math.degrees(e.y)
rel_param[scenario_key.PARAM_KEY_COMMON_RZ] = rel_param[scenario_key.PARAM_KEY_COMMON_RZ] + math.degrees(e.z)
if rel_param[scenario_key.PARAM_KEY_ARM_MOVE_TYPE] == scenario_key.ARM_MOVE_TYPE_PTP:
# ptp移動
result, _ = action_manager.arm_operation(arm_name, scenario_key.ARM_MOTION_MOVE, rel_param)
else:
# line移動
result, _ = action_manager.arm_operation(arm_name, scenario_key.ARM_MOTION_LINE_MOVE, rel_param)
return result
def _get_approach_position(param):
"""
ターゲット座標からアプローチ座標を算出
"""
down_dis = TOOL_MOVE_FIXED_Z
if scenario_key.PARAM_KEY_ARM_DOWN_DISTANCE in param:
down_dis = param[scenario_key.PARAM_KEY_ARM_DOWN_DISTANCE]
t = tf.Transformer(True, rospy.Duration(10.0))
# target
tp = TransformStamped()
tp.header.frame_id = 'BASE_LINK'
tp.child_frame_id = 'OBJECT'
tp.transform.translation = Vector3(param[scenario_key.PARAM_KEY_COMMON_X], param[scenario_key.PARAM_KEY_COMMON_Y], param[scenario_key.PARAM_KEY_COMMON_Z])
tp.transform.rotation = common.degree_to_quaternion(param[scenario_key.PARAM_KEY_COMMON_RX], param[scenario_key.PARAM_KEY_COMMON_RY], param[scenario_key.PARAM_KEY_COMMON_RZ])
t.setTransform(tp)
# approach
ap = TransformStamped()
ap.header.frame_id = 'OBJECT'
ap.child_frame_id = 'APPROACH'
ap.transform.translation = Vector3(0, 0, -down_dis)
ap.transform.rotation = common.degree_to_quaternion(0, 0, 0)
t.setTransform(ap)
position, quaternion = t.lookupTransform('BASE_LINK', 'APPROACH', rospy.Time(0))
return position, quaternion
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
2,
376,
12562,
20391,
12,
49,
15717,
347,
10305,
43,
198,
2,
45337,
198,
2,
15069,
357,
34,
8,
13130,
376,
52,... | 2.296813 | 3,797 |
from metablock import Metablock
| [
6738,
10523,
5354,
1330,
3395,
397,
5354,
628
] | 4.125 | 8 |
"""
Ensures that the protocol handlers for the control protocol and the network
protocol work as expected.
"""
from collections import defaultdict
import socket
import threading
import traceback
import unittest
from lns import control_proto, reactor
# Change this to some port that is available on your machine, so that the
# control protocol handler and the control protocol client can communicate
TEST_CONTROL_PORT = 4097
# How long to check back with a threading.Event, so that the reactor runner
# thread can die within a reasonable time
RUNNER_CHECK_TIME = 2
def reactor_runner_thread(reactor, handler, event):
"Steps a reactor, until the given event is triggered."
while not event.isSet():
reactor.poll(RUNNER_CHECK_TIME)
handler.close()
class MockNetworkHandler:
"""
A network handler created to test the control protocol handler, which
provides static data for testing.
"""
if __name__ == '__main__':
unittest.main()
| [
37811,
198,
4834,
82,
942,
326,
262,
8435,
32847,
329,
262,
1630,
8435,
290,
262,
3127,
198,
11235,
4668,
670,
355,
2938,
13,
198,
37811,
198,
6738,
17268,
1330,
4277,
11600,
198,
11748,
17802,
198,
11748,
4704,
278,
198,
11748,
12854,
... | 3.492806 | 278 |
"""
ogn_lib.client
--------------
This module contains methods and classes related to opening and managing a
connection to OGN's APRS servers.
"""
import logging
import socket
import time
import ogn_lib
logger = logging.getLogger(__name__)
class OgnClient:
"""
Holds an APRS session.
Provides methods for listening to received messages and managing
the session.
"""
APRS_SERVER = 'aprs.glidernet.org'
APRS_PORT_FULL = 10152
APRS_PORT_FILTER = 14580
SOCKET_KEEPALIVE = 240
def __init__(self, username, passcode='-1', server=None, port=None,
filter_=None):
"""
Creates a new OgnClient instance.
:param str username: username used for logging in the APRS system
:param str passcode: a valid passcode for given `username`
:param server: an optional addres of an APRS server (defaults to
aprs.glidernet.org)
:type server: str or None
:param port: optional port of the APRS server (defaults to 10152 or
14580)
:type port: int or None
:param filter_: optional `filter` parameter to be passed to the APRS
server
:type filter_: str or None
"""
self.username = username
self.passcode = passcode
self.server = server or self.APRS_SERVER
self.port = port or (self.APRS_PORT_FILTER if filter_
else self.APRS_PORT_FULL)
self.filter_ = filter_
self._authenticated = False
self._kill = False
self._last_send = -1
self._connection_retries = 50
def connect(self):
"""
Opens a socket connection to the APRS server and authenticates the
client.
:raise ogn_lib.exceptions.LoginError: if an authentication error has
occured
"""
logger.info('Connecting to %s:%d as %s:%s. Filter: %s',
self.server, self.port, self.username, self.passcode,
self.filter_ if self.filter_ else 'not set')
self._socket = socket.create_connection((self.server, self.port))
self._socket.settimeout(15)
self._sock_file = self._socket.makefile()
conn_response = self._sock_file.readline().strip()
logger.debug('Connection response: %s', conn_response)
auth = self._gen_auth_message()
logger.debug('Sending authentication message: %s', auth)
self.send(auth)
login_status = self._sock_file.readline().strip()
logger.debug('Login status: %s', login_status.strip())
try:
self._authenticated = self._validate_login(login_status)
except (ogn_lib.exceptions.LoginError,
ogn_lib.exceptions.ParseError) as e:
logger.exception(e)
logger.fatal('Failed to authenticate')
self._sock_file.close()
self._socket.close()
logger.info('Socket closed')
raise
self._kill = False
def receive(self, callback, reconnect=True, parser=None):
"""
Receives the messages received from the APRS stream and passes them to
the callback function.
:param callback: the callback function which takes one parameter
(the received message)
:type callback: callable
:param bool reconnect: True if the client should automatically restart
after the connection drops
:param parser: function that parses the APRS messages or None if
callback should receive raw messages
:type parser: callable or None
"""
# The client might be ran for extended periods of time. Although using
# a recursive call to enter the inner for loop would be considered
# a "nicer" solution, it would also have the potential to _eventually_
# exceed the maximum recursion depth (in cPython, other implementations
# might support tail optimized calls).
# This is why this function is written with a double while loop.
while not self._kill:
try:
self._receive_loop(callback, parser)
except (BrokenPipeError, ConnectionResetError, socket.error,
socket.timeout) as e:
logger.error('Socket connection dropped')
logger.exception(e)
if self._kill or not reconnect:
logger.info('Exiting OgnClient.receive()')
return
self._reconnect(retries=self._connection_retries, wait_period=15)
def _reconnect(self, retries=1, wait_period=15):
"""
Attempts to recover a failed server connection.
:param int retries: number of times reestablishing connection is
attempted
:param float wait_period: amount of seconds between two sequential
retries
"""
logger.info('Trying to reconnect...')
while retries > 0:
try:
self.connect()
logger.error('Successfully reconnected')
break
except (BrokenPipeError, ConnectionResetError, socket.error,
socket.timeout) as e:
logger.error('Reconnection attempt failed')
logger.exception(e)
retries -= 1
time.sleep(wait_period)
else:
raise ConnectionError
def _receive_loop(self, callback, parser):
"""
The main loop of the receive function.
:param callback: the callback function which takes one parameter
(the received message)
:type callback: callable
:param parser: function that parses the APRS messages or None if
callback should receive raw messages
:type parser: callable or None
"""
line = None
while line != '' and not self._kill:
line = self._sock_file.readline().strip()
logger.debug('Received APRS message: %s', line)
if line.startswith('#'):
logger.debug('Received server message: %s', line)
elif parser:
try:
callback(parser(line))
except ogn_lib.exceptions.ParseError as e:
logger.exception(e)
else:
logger.debug('Returning raw APRS message to callback')
callback(line)
self._keepalive()
def send(self, message, retries=0, wait_period=0):
"""
Sends the message to the APRS server.
:param str message: message to be sent
"""
try:
message_nl = message.strip('\n') + '\n'
logger.info('Sending: %s', message_nl)
self._socket.sendall(message_nl.encode())
self._last_send = time.time()
except (BrokenPipeError, ConnectionResetError, socket.error,
socket.timeout):
if retries < 3:
self._reconnect(retries=3, wait_period=wait_period)
self.send(message, retries=retries + 1)
else:
raise
def _keepalive(self):
"""
Sends the keep alive message to APRS server (if necessary).
"""
td = time.time() - self._last_send
if td > self.SOCKET_KEEPALIVE:
logger.info('No messages sent for %.0f seconds; sending keepalive',
td)
self.send('#keepalive')
def _gen_auth_message(self):
"""
Generates an APRS authentication message.
:return: authentication message
:rtype: str
"""
base = 'user {} pass {} vers {} {}'.format(self.username,
self.passcode,
ogn_lib.__title__,
ogn_lib.__version__)
if self.filter_:
base += ' filter {}'.format(self.filter_)
return base
def _validate_login(self, message):
"""
Verifies that the login to the APRS server was successful.
:param str message: authentication response from the server
:return: True if user is authenticated to send messages
:rtype: bool
:raises ogn_lib.exceptions.LoginError: if the login was unsuccessful
"""
# Sample response: # logresp user unverified, server GLIDERN3
if not message.startswith('# logresp'):
raise ogn_lib.exceptions.LoginError(
'Not a login message: ' + message)
try:
user_info, serv_info = message.split(', ')
username, status = user_info[10:].split(' ')
server = serv_info[7:]
except (IndexError, ValueError):
raise ogn_lib.exceptions.ParseError(
'Unable to parse login message: ' + message)
authenticated = False
if status == 'verified':
authenticated = True
logger.info('Successfully connected to %s as %s', server, username)
elif status == 'unverified' and self.passcode != '-1':
logger.info('Connected to %s', server)
logger.warn('Wrong username/passcode, continuing in r/o mode')
elif status == 'unverified':
logger.info('Connected to %s as guest', server)
else:
raise ogn_lib.exceptions.LoginError('Login failed: ' + message)
return authenticated
| [
37811,
198,
2360,
62,
8019,
13,
16366,
198,
26171,
198,
198,
1212,
8265,
4909,
5050,
290,
6097,
3519,
284,
4756,
290,
11149,
257,
198,
38659,
284,
440,
16630,
338,
3486,
6998,
9597,
13,
198,
37811,
198,
198,
11748,
18931,
198,
11748,
... | 2.199727 | 4,396 |
''' Pointwise wrapper '''
import os
# --- Python/system level imports
from subprocess import call
from string import Template
# --- OpenMDAO main and library imports
from openmdao.api import Problem, Group, ExternalCode, IndepVarComp
class Pointwise(ExternalCode):
''' OpenMDAO component for executing Pointwise '''
# -------------------------------------------
# --- File Wrapper/Template for Pointwise ---
# -------------------------------------------
if __name__ == "__main__":
# -------------------------
# --- Default Test Case ---
# -------------------------
p = Problem(root=Group())
p.root.add('pointwise', Pointwise())
p.setup()
p.run()
| [
7061,
6,
6252,
3083,
29908,
705,
7061,
198,
11748,
28686,
198,
198,
2,
11420,
11361,
14,
10057,
1241,
17944,
198,
6738,
850,
14681,
1330,
869,
198,
6738,
4731,
1330,
37350,
198,
198,
2,
11420,
4946,
44,
5631,
46,
1388,
290,
5888,
1794... | 3.507538 | 199 |
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
if __name__ == "__main__":
main()
| [
11748,
19798,
292,
355,
279,
67,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
2603,
29487,
8019,
1330,
12972,
29487,
355,
458,
83,
628,
628,
628,
628,
628,
628,
628,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
198,... | 2.666667 | 48 |
import logging
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
# torch.manual_seed(0)
# torch.backends.cudnn.deterministic = True
# torch.backends.cudnn.benchmark = False
from pyro.distributions import MultivariateNormal, Normal, Independent
from sklearn.cluster import KMeans, SpectralClustering
from sklearn.metrics import adjusted_rand_score
import scipy
from scipy.sparse import csgraph
from scipy.sparse.linalg import eigsh
import sys
sys.path.append('/home/REDACTED/chf-github/model/')
from utils import check_has_missing, quad_function, convert_XY_pack_pad
sys.path.append('../evaluation/')
from eval_utils import get_cluster_swap_metric, get_cluster_pear_metric
sys.path.append('../plot/')
from plot_utils import plot_latent_labels, plot_delta_comp
import matplotlib.pylab as pylab
params = {'legend.fontsize': 'x-large',
# 'figure.figsize': (10,6),
'axes.labelsize': 'x-large',
'axes.titlesize':'x-large',
'xtick.labelsize':'x-large',
'ytick.labelsize':'x-large'}
pylab.rcParams.update(params)
# f.write(' & '.join([args.model_name] + line_str) + '\\\\' + '\n')
# f.close()
if __name__=='__main__':
main() | [
11748,
18931,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
28034,
198,
11748,
28034,
13,
20471,
355,
299,
77,
198,
11748,
28034,
13,
20471,
13,
45124,
355,
376,
198,
198,
2,
28034,
13,
805,
723,
62,
28826,
7,
15,
8,
198,
2,
28034... | 2.545263 | 475 |
from django.apps import AppConfig
| [
6738,
42625,
14208,
13,
18211,
1330,
2034,
16934,
198
] | 3.777778 | 9 |
# Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Automatically generated crosswind controllers.
This file was generated by: analysis/control/generate_crosswind_controllers.m.
"""
from makani.control import control_types as m
def GetControllers(wing_serial):
"""Returns PID gains for crosswind airspeed controllers."""
if wing_serial == m.kWingSerialOktoberKite01:
airspeed = {
'kp': 3.63e+03,
'ki': 2.08e+03,
'kd': 0.00
}
else:
assert False, 'wing_serial %d was not recognized' % wing_serial
return {
'airspeed': airspeed,
}
| [
2,
15069,
12131,
15841,
3216,
21852,
11419,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
198,
2,
... | 3.177966 | 354 |
# Kata url: https://www.codewars.com/kata/58bf9bd943fadb2a980000a7.
from typing import List
| [
2,
509,
1045,
19016,
25,
3740,
1378,
2503,
13,
19815,
413,
945,
13,
785,
14,
74,
1045,
14,
3365,
19881,
24,
17457,
24,
3559,
69,
324,
65,
17,
64,
4089,
2388,
64,
22,
13,
198,
198,
6738,
19720,
1330,
7343,
628
] | 2.292683 | 41 |
import json
import time
| [
11748,
33918,
198,
11748,
640,
628,
628,
628,
628
] | 3.444444 | 9 |
"""
This file is meant to be a fully working mining client to interact with the REST api defined in the node-files
directory. Other client software can be written to mine and transact with the API however.
"""
import json
import sys
import os
import requests
import time
import ecdsa
from hashlib import sha256
from pprint import pprint
from uuid import uuid4
NODE_URL = 'http://127.0.0.1:1337/'
CLIENT_MODE = ''
TRANSACTION_GOAL = 10
# Initialize client RSA credentials
"""
Transaction Submission
"""
# Creates a new transaction dict
# Add a user_data dict to the end of the transaction with a valid signature
# Creates a new transaction output
"""
Block Submission
"""
"""
Misc Functions
"""
# Returns the input and output sum of the transaction
# Returns a hash hex digest of any dict object
# Returns a hash digest of any dict object
initialize()
utxo_header = {
'pk': str(vk.to_string().hex()),
'mode': 'confirmed'
}
print("UTXO OF THIS CLIENT IS...")
print("[CONFIRMED] ", requests.post(f"{NODE_URL}/node/chain/utxo", json.dumps(utxo_header)).json())
utxo_header['mode'] = 'unconfirmed'
print("[UNCONFIRMED] ", requests.post(f"{NODE_URL}/node/chain/utxo", json.dumps(utxo_header)).json())
while CLIENT_MODE not in ['MINE', 'TRANSACT', 'CANCEL']:
CLIENT_MODE = str(input("Choose client mode: [TRANSACT/MINE] "))
print("Client mode: " + CLIENT_MODE)
if CLIENT_MODE == 'MINE':
block_loop = 0
while block_loop < 1:
try:
block_loop = int(input("Number of blocks to mine: "))
except:
print("Enter a whole number value greater than 0...")
i = block_loop
while i > 0:
print("Constructing Block...")
if create_block():
i -= 1
print(block_loop - i)
if CLIENT_MODE == 'TRANSACT':
output_count = None
output_list = []
transaction_fee = None
# Get the amount of outputs in this transaction
while type(output_count) is not int:
try:
output_count = int(input("Enter the number of outputs in your transaction: "))
except:
print("Enter a whole number...")
continue
break
# Add all outputs to output list
for j in range(output_count):
output_value = None
output_receiver = None
while type(output_value) is not float:
try:
output_value = float(input("Enter the value of this output up to the hundredths place: "))
except:
print("Enter a number...")
continue
output_receiver = str(input("Enter the recipient's address: "))
output_list.append(create_transaction_output(output_value, output_receiver))
# Get the desired mining fee
while type(transaction_fee) is not int:
try:
transaction_fee = int(input("Enter a whole number for the mining fee: "))
except:
print("Enter a whole number address...")
continue
break
final_transaction = create_transaction(output_list, transaction_fee)
if len(final_transaction) == 2 and type(final_transaction) is tuple:
final_transaction = sign_transaction(final_transaction[0], final_transaction[1])
print(f"Transaction submitted: {json.dumps(final_transaction, indent=4)}")
else:
print(f"[ERROR]: {final_transaction}")
print("Transaction Signed...")
res = requests.post(f"{NODE_URL}/node/tx/submit", json.dumps(final_transaction))
print("[RESPONSE]")
print(res.text)
print("CURRENT BALANCE")
utxo_header['mode'] = 'confirmed'
print("[CONFIRMED] ", requests.post(f"{NODE_URL}/node/chain/utxo", json.dumps(utxo_header)).json())
utxo_header['mode'] = 'unconfirmed'
print("[UNCONFIRMED] ", requests.post(f"{NODE_URL}/node/chain/utxo", json.dumps(utxo_header)).json())
print("UTXO OF ADDRESS (1): ")
utxo_header['pk'] = '1'
utxo_header['mode'] = 'confirmed'
print("[CONFIRMED] ", requests.post(f"{NODE_URL}/node/chain/utxo", json.dumps(utxo_header)).json())
utxo_header['mode'] = 'unconfirmed'
print("[UNCONFIRMED] ", requests.post(f"{NODE_URL}/node/chain/utxo", json.dumps(utxo_header)).json())
print("UTXO OF ADDRESS (2): ")
utxo_header['pk'] = '2'
utxo_header['mode'] = 'confirmed'
print("[CONFIRMED] ", requests.post(f"{NODE_URL}/node/chain/utxo", json.dumps(utxo_header)).json())
utxo_header['mode'] = 'unconfirmed'
print("[UNCONFIRMED] ", requests.post(f"{NODE_URL}/node/chain/utxo", json.dumps(utxo_header)).json())
| [
37811,
198,
1212,
2393,
318,
4001,
284,
307,
257,
3938,
1762,
9691,
5456,
284,
9427,
351,
262,
30617,
40391,
5447,
287,
262,
10139,
12,
16624,
198,
34945,
13,
3819,
5456,
3788,
460,
307,
3194,
284,
6164,
290,
48878,
351,
262,
7824,
21... | 2.547968 | 1,772 |
import numpy as np
def pade(time,signal,sigma=100.0,max_len=None,w_min=0.0,w_max=10.0,w_step=0.01,read_freq=None):
""" Routine to take the Fourier transform of a time signal using the method
of Pade approximants.
Inputs:
time: (list or Numpy NDArray) signal sampling times
signal: (list or Numpy NDArray)
Optional Inputs:
sigma: (float) signal damp factor, yields peaks with
FWHM of 2/sigma
max_len: (int) maximum number of points to use in Fourier transform
w_min: (float) lower returned frequency bound
w_max: (float) upper returned frequency bound
w_step: (float) returned frequency bin width
Returns:
fsignal: (complex NDArray) transformed signal
frequency: (NDArray) transformed signal frequencies
From: Bruner, Adam, Daniel LaMaster, and Kenneth Lopata. "Accelerated
broadband spectra using transition signal decomposition and Pade
approximants." Journal of chemical theory and computation 12.8
(2016): 3741-3750.
"""
# center signal about zero
signal = np.asarray(signal) - signal[0]
stepsize = time[1] - time[0]
# Damp the signal with an exponential decay.
damp = np.exp(-(stepsize*np.arange(len(signal)))/float(sigma))
signal *= damp
M = len(signal)
N = int(np.floor(M / 2))
# Check signal length, and truncate if too long
if max_len:
if M > max_len:
N = int(np.floor(max_len / 2))
# G and d are (N-1) x (N-1)
# d[k] = -signal[N+k] for k in range(1,N)
d = -signal[N+1:2*N]
try:
from scipy.linalg import toeplitz, solve_toeplitz
# Instead, form G = (c,r) as toeplitz
#c = signal[N:2*N-1]
#r = np.hstack((signal[1],signal[N-1:1:-1]))
b = solve_toeplitz((signal[N:2*N-1],\
np.hstack((signal[1],signal[N-1:1:-1]))),d,check_finite=False)
except (ImportError,np.linalg.linalg.LinAlgError) as e:
# OLD CODE: sometimes more stable
# G[k,m] = signal[N - m + k] for m,k in range(1,N)
G = signal[N + np.arange(1,N)[:,None] - np.arange(1,N)]
b = np.linalg.solve(G,d)
# Now make b Nx1 where b0 = 1
b = np.hstack((1,b))
# b[m]*signal[k-m] for k in range(0,N), for m in range(k)
a = np.dot(np.tril(toeplitz(signal[0:N])),b)
p = np.poly1d(np.flip(a))
q = np.poly1d(np.flip(b))
if read_freq is None:
# choose frequencies to evaluate over
frequency = np.arange(w_min,w_max,w_step)
else:
frequency = read_freq
W = np.exp(-1j*frequency*stepsize)
fsignal = p(W)/q(W)
return fsignal, frequency
| [
11748,
299,
32152,
355,
45941,
198,
198,
4299,
279,
671,
7,
2435,
11,
12683,
282,
11,
82,
13495,
28,
3064,
13,
15,
11,
9806,
62,
11925,
28,
14202,
11,
86,
62,
1084,
28,
15,
13,
15,
11,
86,
62,
9806,
28,
940,
13,
15,
11,
86,
... | 2.087706 | 1,334 |
#------------------------------------------------------------------------------
# Copyright (c) 2013, Nucleic Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#------------------------------------------------------------------------------
import wx
from atom.api import Int, Typed
from enaml.widgets.combo_box import ProxyComboBox
from .wx_control import WxControl
# cyclic notification guard flags
INDEX_GUARD = 0x1
class WxComboBox(WxControl, ProxyComboBox):
""" A Wx implementation of an Enaml ProxyComboBox.
"""
#: A reference to the widget created by the proxy.
widget = Typed(wx.ComboBox)
#: Cyclic notification guard. This a bitfield of multiple guards.
_guard = Int(0)
#--------------------------------------------------------------------------
# Initialization API
#--------------------------------------------------------------------------
def create_widget(self):
""" Create the QComboBox widget.
"""
self.widget = wx.ComboBox(self.parent_widget(), style=wx.CB_READONLY)
def init_widget(self):
""" Create and initialize the underlying widget.
"""
super(WxComboBox, self).init_widget()
d = self.declaration
self.set_items(d.items)
self.set_index(d.index)
self.set_editable(d.editable)
self.widget.Bind(wx.EVT_COMBOBOX, self.on_index_changed)
#--------------------------------------------------------------------------
# Event Handlers
#--------------------------------------------------------------------------
def on_index_changed(self, event):
""" The signal handler for the index changed signal.
"""
if not self._guard & INDEX_GUARD:
self.declaration.index = self.widget.GetCurrentSelection()
#--------------------------------------------------------------------------
# ProxyComboBox API
#--------------------------------------------------------------------------
def set_items(self, items):
""" Set the items of the ComboBox.
"""
widget = self.widget
sel = widget.GetCurrentSelection()
widget.SetItems(items)
widget.SetSelection(sel)
def set_index(self, index):
""" Set the current index of the ComboBox
"""
self._guard |= INDEX_GUARD
try:
self.widget.SetSelection(index)
finally:
self._guard &= ~INDEX_GUARD
def set_editable(self, editable):
""" Set whether the combo box is editable.
This is not supported on wx.
"""
pass
| [
2,
10097,
26171,
198,
2,
15069,
357,
66,
8,
2211,
11,
399,
14913,
291,
7712,
4816,
13,
198,
2,
198,
2,
4307,
6169,
739,
262,
2846,
286,
262,
40499,
347,
10305,
13789,
13,
198,
2,
198,
2,
383,
1336,
5964,
318,
287,
262,
2393,
279... | 3.019956 | 902 |
"""
elcaro-contract.py
MIT License
Copyright 2017 Splyse Inc.
"""
from boa.blockchain.vm.System.ExecutionEngine import GetScriptContainer,GetExecutingScriptHash
from boa.blockchain.vm.Neo.Transaction import *
from boa.blockchain.vm.Neo.Runtime import GetTrigger,CheckWitness,Notify,Log
from boa.blockchain.vm.Neo.TriggerType import Application,Verification
from boa.blockchain.vm.Neo.Output import GetScriptHash,GetValue,GetAssetId
from boa.blockchain.vm.Neo.Storage import GetContext,Get,Put
from boa.code.builtins import concat,take
OWNER = b'\x8e\x5b\x17\x79\x3c\xa9\xf5\xd9\x13\x1d\x67\x4d\xfc\x00\x0f\x5a\x65\x58\xa4\x65'
GAS_ASSET_ID = b'\xe7\x2d\x28\x69\x79\xee\x6c\xb1\xb7\xe6\x5d\xfd\xdf\xb2\xe3\x84\x10\x0b\x8d\x14\x8e\x77\x58\xde\x42\xe4\x16\x8b\x71\x79\x2c\x60';
BADPREFIX='price/'
| [
37811,
198,
417,
7718,
78,
12,
28484,
13,
9078,
198,
198,
36393,
13789,
198,
198,
15269,
2177,
1338,
306,
325,
3457,
13,
198,
37811,
198,
198,
6738,
1489,
64,
13,
9967,
7983,
13,
14761,
13,
11964,
13,
23002,
1009,
13798,
1330,
3497,
... | 2.136364 | 374 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-30 18:30
from __future__ import unicode_literals
from django.db import migrations
NEW_SOURCES = [
{'code': 'trabajospython', 'name': 'Trabajos Python', 'url': 'http://www.trabajospython.com/'},
{'code': 'workinstartups', 'name': 'Work In Startups', 'url': 'http://workinstartups.com/'},
{'code': 'workingnomads', 'name': 'Working Nomads', 'url': 'https://www.workingnomads.co/'},
]
def load_sources(apps, schema_editor):
'''Load new source data.'''
Source = apps.get_model('remotes', 'Source')
for source in NEW_SOURCES:
new_source = Source(**source)
new_source.save()
def delete_sources(apps, schema_editor):
'''Delete source data.'''
Source = apps.get_model('remotes', 'Source')
for source in NEW_SOURCES:
Source.objects.filter(code=source['code']).delete()
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
2980,
515,
416,
37770,
352,
13,
1157,
319,
2177,
12,
3023,
12,
1270,
1248,
25,
1270,
198,
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
198,
6738... | 2.525568 | 352 |
from django.db import models
from intragram.users import models as user_model
# post를 표현하기 위한 모델 posts - 터미널에서 'django-admin startapp posts'를 입력
# Django data model을 만들 때 게시물 생성 날짜/업데이트 날짜를 Post/Comment마다 만들지 말고,
# TimeStampedModel에 상속 시켜주자
| [
6738,
42625,
14208,
13,
9945,
1330,
4981,
198,
6738,
9913,
6713,
13,
18417,
1330,
4981,
355,
2836,
62,
19849,
198,
2,
1281,
167,
98,
120,
220,
169,
239,
250,
169,
246,
226,
47991,
246,
166,
116,
108,
23821,
250,
226,
47991,
250,
316... | 1.235 | 200 |
from cool.modules.ASA.asa_0_login_info import ip, my_headers, auth_header
import requests
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# 创建内部Object
# rest-api image flash:/asa-restapi-131-lfbff-k8.SPA
# rest-api agent
# 文档:https://192.168.20.4/doc/#
# 创建外部Object
if __name__ == "__main__":
create_in_obj(57, ip)
create_out_obj(57, ip)
| [
6738,
3608,
13,
18170,
13,
1921,
32,
13,
15462,
62,
15,
62,
38235,
62,
10951,
1330,
20966,
11,
616,
62,
50145,
11,
6284,
62,
25677,
198,
11748,
7007,
198,
11748,
2956,
297,
571,
18,
198,
333,
297,
571,
18,
13,
40223,
62,
40539,
65... | 2.167598 | 179 |
import sys
import math
import random
A = [7,6,5,4,3,2,1]
merge_sort(A)
print(A)
# TESTING
START = -99
STOP = 99
LIMIT = 1000
RANDOM1 = [random.randint(START, STOP) for iter in range(LIMIT)]
#print(RANDOM1)
RANDOM2 = [random.randint(START, STOP) for iter in range(LIMIT)]
#print(RANDOM2)
RANDOM3 = [random.randint(START, STOP) for iter in range(LIMIT)]
#print(RANDOM3)
A1 = [3,2,1,4,5,0,8,7]
A2 = [10,9,8,7,6,5,4,3,2,1]
A3 = [10,0,9,2,8,3,7,4,6,5]
assert merge_sort(A1) == control(A1)
assert merge_sort(A2) == control(A2)
assert merge_sort(A3) == control(A3)
assert merge_sort(RANDOM1) == control(RANDOM1)
assert merge_sort(RANDOM2) == control(RANDOM2)
assert merge_sort(RANDOM3) == control(RANDOM3)
| [
11748,
25064,
198,
11748,
10688,
198,
11748,
4738,
198,
198,
32,
796,
685,
22,
11,
21,
11,
20,
11,
19,
11,
18,
11,
17,
11,
16,
60,
198,
647,
469,
62,
30619,
7,
32,
8,
198,
4798,
7,
32,
8,
198,
2,
43001,
2751,
198,
198,
2257,... | 2.049563 | 343 |
import logging
import torch
import torch.nn as nn
import torch.utils.checkpoint as cp
from mmcv.cnn import constant_init, kaiming_init
from mmcv.runner import load_checkpoint
from ...registry import BACKBONES
import torch.nn.functional as F
from torch.nn.modules.batchnorm import _BatchNorm
import numpy as np
def conv3x3(in_planes, out_planes, stride=1, dilation=1):
"3x3 convolution with padding"
return nn.Conv2d(
in_planes,
out_planes,
kernel_size=3,
stride=stride,
padding=dilation,
dilation=dilation,
bias=False)
@BACKBONES.register_module
class ResNet_DNR(nn.Module):
"""ResNet backbone.
Args:
depth (int): Depth of resnet, from {18, 34, 50, 101, 152}.
num_stages (int): Resnet stages, normally 4.
strides (Sequence[int]): Strides of the first block of each stage.
dilations (Sequence[int]): Dilation of each stage.
out_indices (Sequence[int]): Output from which stages.
style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two
layer is the 3x3 conv layer, otherwise the stride-two layer is
the first 1x1 conv layer.
frozen_stages (int): Stages to be frozen (all param fixed). -1 means
not freezing any parameters.
bn_eval (bool): Whether to set BN layers to eval mode, namely, freeze
running stats (mean and var).
bn_frozen (bool): Whether to freeze weight and bias of BN layers.
partial_bn (bool): Whether to freeze weight and bias of **all but the first** BN layers.
with_cp (bool): Use checkpoint or not. Using checkpoint will save some
memory while slowing down the training speed.
"""
arch_settings = {
18: (BasicBlock, (2, 2, 2, 2)),
34: (BasicBlock, (3, 4, 6, 3)),
50: (Bottleneck, (3, 4, 6, 3)),
101: (Bottleneck, (3, 4, 23, 3)),
152: (Bottleneck, (3, 8, 36, 3))
}
| [
11748,
18931,
198,
11748,
28034,
198,
11748,
28034,
13,
20471,
355,
299,
77,
198,
11748,
28034,
13,
26791,
13,
9122,
4122,
355,
31396,
198,
198,
6738,
8085,
33967,
13,
66,
20471,
1330,
6937,
62,
15003,
11,
479,
1385,
278,
62,
15003,
1... | 2.449257 | 808 |
"""Tests for the ee.computedobject module."""
import unittest
import ee
from ee import apitestcase
if __name__ == '__main__':
unittest.main()
| [
37811,
51,
3558,
329,
262,
304,
68,
13,
785,
17128,
15252,
8265,
526,
15931,
628,
198,
198,
11748,
555,
715,
395,
198,
198,
11748,
304,
68,
198,
198,
6738,
304,
68,
1330,
2471,
270,
395,
7442,
628,
198,
198,
361,
11593,
3672,
834,
... | 2.62069 | 58 |
from django.core.management.base import BaseCommand
from pyquery import PyQuery
from olympia.applications.models import AppVersion
from olympia.constants.applications import APP_GUIDS
| [
6738,
42625,
14208,
13,
7295,
13,
27604,
13,
8692,
1330,
7308,
21575,
198,
198,
6738,
12972,
22766,
1330,
9485,
20746,
198,
198,
6738,
267,
6760,
544,
13,
1324,
677,
602,
13,
27530,
1330,
2034,
14815,
198,
6738,
267,
6760,
544,
13,
99... | 3.528302 | 53 |
import asyncio
import json
import aiosqlite
from aiogram.types import Message
from config import DB_URL, dp, CHANNEL_BOT_ID
from core.db.utils import dict_factory
from core.models.group import GroupSettings
@dp.throttled(rate=2)
| [
11748,
30351,
952,
198,
11748,
33918,
198,
198,
11748,
257,
4267,
13976,
578,
198,
6738,
257,
72,
21857,
13,
19199,
1330,
16000,
198,
198,
6738,
4566,
1330,
20137,
62,
21886,
11,
288,
79,
11,
5870,
22846,
3698,
62,
33,
2394,
62,
2389,... | 2.987179 | 78 |
#Tester
n = Numbers(2, 5)
n.add() | [
198,
2,
51,
7834,
198,
77,
796,
27797,
7,
17,
11,
642,
8,
198,
77,
13,
2860,
3419
] | 1.888889 | 18 |
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-lines
from knack.help_files import helps
helps['crossdeviceexperiences_beta'] = '''
type: group
short-summary: Manage Cross Device Experiences
'''
helps['crossdeviceexperiences user'] = """
type: group
short-summary: Manage user with crossdeviceexperiences_beta
"""
helps['crossdeviceexperiences user create-activity'] = """
type: command
short-summary: "Create new navigation property to activities for users."
parameters:
- name: --attribution
short-summary: "imageInfo"
long-summary: |
Usage: --attribution add-image-query=XX alternate-text=XX alternative-text=XX icon-url=XX
add-image-query: Optional; parameter used to indicate the server is able to render image dynamically in \
response to parameterization. For example – a high contrast image
alternate-text: Optional; alt-text accessible content for the image
icon-url: Optional; URI that points to an icon which represents the application used to generate the \
activity
"""
helps['crossdeviceexperiences user create-device'] = """
type: command
short-summary: "Create new navigation property to devices for users."
parameters:
- name: --alternative-security-ids
short-summary: "For internal use only. Not nullable."
long-summary: |
Usage: --alternative-security-ids identity-provider=XX key=XX type=XX
identity-provider: For internal use only
key: For internal use only
type: For internal use only
Multiple actions can be specified by using more than one --alternative-security-ids argument.
- name: --extension-attributes
short-summary: "onPremisesExtensionAttributes"
long-summary: |
Usage: --extension-attributes extension-attribute1=XX extension-attribute10=XX extension-attribute11=XX \
extension-attribute12=XX extension-attribute13=XX extension-attribute14=XX extension-attribute15=XX \
extension-attribute2=XX extension-attribute3=XX extension-attribute4=XX extension-attribute5=XX \
extension-attribute6=XX extension-attribute7=XX extension-attribute8=XX extension-attribute9=XX
extension-attribute1: First customizable extension attribute.
extension-attribute10: Tenth customizable extension attribute.
extension-attribute11: Eleventh customizable extension attribute.
extension-attribute12: Twelfth customizable extension attribute.
extension-attribute13: Thirteenth customizable extension attribute.
extension-attribute14: Fourteenth customizable extension attribute.
extension-attribute15: Fifteenth customizable extension attribute.
extension-attribute2: Second customizable extension attribute.
extension-attribute3: Third customizable extension attribute.
extension-attribute4: Fourth customizable extension attribute.
extension-attribute5: Fifth customizable extension attribute.
extension-attribute6: Sixth customizable extension attribute.
extension-attribute7: Seventh customizable extension attribute.
extension-attribute8: Eighth customizable extension attribute.
extension-attribute9: Ninth customizable extension attribute.
- name: --member-of
short-summary: "Groups that this group is a member of. HTTP Methods: GET (supported for all groups). \
Read-only. Nullable."
long-summary: |
Usage: --member-of deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --member-of argument.
- name: --registered-owners
short-summary: "The user that cloud joined the device or registered their personal device. The registered \
owner is set at the time of registration. Currently, there can be only one owner. Read-only. Nullable."
long-summary: |
Usage: --registered-owners deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --registered-owners argument.
- name: --registered-users
short-summary: "Collection of registered users of the device. For cloud joined devices and registered personal \
devices, registered users are set to the same value as registered owners at the time of registration. Read-only. \
Nullable."
long-summary: |
Usage: --registered-users deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --registered-users argument.
- name: --transitive-member-of
long-summary: |
Usage: --transitive-member-of deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --transitive-member-of argument.
- name: --extensions
short-summary: "The collection of open extensions defined for the device. Read-only. Nullable."
long-summary: |
Usage: --extensions id=XX
id: Read-only.
Multiple actions can be specified by using more than one --extensions argument.
"""
helps['crossdeviceexperiences user delete-activity'] = """
type: command
short-summary: "Delete navigation property activities for users."
"""
helps['crossdeviceexperiences user delete-device'] = """
type: command
short-summary: "Delete navigation property devices for users."
"""
helps['crossdeviceexperiences user list-activity'] = """
type: command
short-summary: "Get activities from users."
"""
helps['crossdeviceexperiences user list-device'] = """
type: command
short-summary: "Get devices from users."
"""
helps['crossdeviceexperiences user show-activity'] = """
type: command
short-summary: "Get activities from users."
"""
helps['crossdeviceexperiences user show-device'] = """
type: command
short-summary: "Get devices from users."
"""
helps['crossdeviceexperiences user update-activity'] = """
type: command
short-summary: "Update the navigation property activities in users."
parameters:
- name: --attribution
short-summary: "imageInfo"
long-summary: |
Usage: --attribution add-image-query=XX alternate-text=XX alternative-text=XX icon-url=XX
add-image-query: Optional; parameter used to indicate the server is able to render image dynamically in \
response to parameterization. For example – a high contrast image
alternate-text: Optional; alt-text accessible content for the image
icon-url: Optional; URI that points to an icon which represents the application used to generate the \
activity
"""
helps['crossdeviceexperiences user update-device'] = """
type: command
short-summary: "Update the navigation property devices in users."
parameters:
- name: --alternative-security-ids
short-summary: "For internal use only. Not nullable."
long-summary: |
Usage: --alternative-security-ids identity-provider=XX key=XX type=XX
identity-provider: For internal use only
key: For internal use only
type: For internal use only
Multiple actions can be specified by using more than one --alternative-security-ids argument.
- name: --extension-attributes
short-summary: "onPremisesExtensionAttributes"
long-summary: |
Usage: --extension-attributes extension-attribute1=XX extension-attribute10=XX extension-attribute11=XX \
extension-attribute12=XX extension-attribute13=XX extension-attribute14=XX extension-attribute15=XX \
extension-attribute2=XX extension-attribute3=XX extension-attribute4=XX extension-attribute5=XX \
extension-attribute6=XX extension-attribute7=XX extension-attribute8=XX extension-attribute9=XX
extension-attribute1: First customizable extension attribute.
extension-attribute10: Tenth customizable extension attribute.
extension-attribute11: Eleventh customizable extension attribute.
extension-attribute12: Twelfth customizable extension attribute.
extension-attribute13: Thirteenth customizable extension attribute.
extension-attribute14: Fourteenth customizable extension attribute.
extension-attribute15: Fifteenth customizable extension attribute.
extension-attribute2: Second customizable extension attribute.
extension-attribute3: Third customizable extension attribute.
extension-attribute4: Fourth customizable extension attribute.
extension-attribute5: Fifth customizable extension attribute.
extension-attribute6: Sixth customizable extension attribute.
extension-attribute7: Seventh customizable extension attribute.
extension-attribute8: Eighth customizable extension attribute.
extension-attribute9: Ninth customizable extension attribute.
- name: --member-of
short-summary: "Groups that this group is a member of. HTTP Methods: GET (supported for all groups). \
Read-only. Nullable."
long-summary: |
Usage: --member-of deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --member-of argument.
- name: --registered-owners
short-summary: "The user that cloud joined the device or registered their personal device. The registered \
owner is set at the time of registration. Currently, there can be only one owner. Read-only. Nullable."
long-summary: |
Usage: --registered-owners deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --registered-owners argument.
- name: --registered-users
short-summary: "Collection of registered users of the device. For cloud joined devices and registered personal \
devices, registered users are set to the same value as registered owners at the time of registration. Read-only. \
Nullable."
long-summary: |
Usage: --registered-users deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --registered-users argument.
- name: --transitive-member-of
long-summary: |
Usage: --transitive-member-of deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --transitive-member-of argument.
- name: --extensions
short-summary: "The collection of open extensions defined for the device. Read-only. Nullable."
long-summary: |
Usage: --extensions id=XX
id: Read-only.
Multiple actions can be specified by using more than one --extensions argument.
"""
helps['crossdeviceexperiences user-activity'] = """
type: group
short-summary: Manage user activity with crossdeviceexperiences_beta
"""
helps['crossdeviceexperiences user-activity create-history-item'] = """
type: command
short-summary: "Create new navigation property to historyItems for users."
"""
helps['crossdeviceexperiences user-activity delete-history-item'] = """
type: command
short-summary: "Delete navigation property historyItems for users."
"""
helps['crossdeviceexperiences user-activity list-history-item'] = """
type: command
short-summary: "Get historyItems from users."
"""
helps['crossdeviceexperiences user-activity show-history-item'] = """
type: command
short-summary: "Get historyItems from users."
"""
helps['crossdeviceexperiences user-activity update-history-item'] = """
type: command
short-summary: "Update the navigation property historyItems in users."
"""
helps['crossdeviceexperiences user-activity-history-item'] = """
type: group
short-summary: Manage user activity history item with crossdeviceexperiences_beta
"""
helps['crossdeviceexperiences user-activity-history-item delete-ref-activity'] = """
type: command
short-summary: "Delete ref of navigation property activity for users."
"""
helps['crossdeviceexperiences user-activity-history-item set-ref-activity'] = """
type: command
short-summary: "Update the ref of navigation property activity in users."
"""
helps['crossdeviceexperiences user-activity-history-item show-activity'] = """
type: command
short-summary: "Get activity from users."
"""
helps['crossdeviceexperiences user-activity-history-item show-ref-activity'] = """
type: command
short-summary: "Get ref of activity from users."
"""
| [
2,
16529,
35937,
201,
198,
2,
15069,
357,
66,
8,
5413,
10501,
13,
1439,
2489,
10395,
13,
201,
198,
2,
49962,
739,
262,
17168,
13789,
13,
4091,
13789,
13,
14116,
287,
262,
1628,
6808,
329,
201,
198,
2,
5964,
1321,
13,
201,
198,
2,
... | 2.987872 | 4,535 |
import pika
import sys
connection = pika.BlockingConnection(pika.ConnectionParameters("localhost"))
channel = connection.channel()
# channel.queue_declare(queue="hello")
channel.queue_declare(queue="task_queue", durable=True)
message = " ".join(sys.argv[1:]) or "Hello World!"
# channel.basic_publish(exchange="", routing_key="hello", body=message)
channel.basic_publish(
exchange="",
routing_key="task_queue",
body=message,
properties=pika.BasicProperties(
delivery_mode=2, # make message persistent
),
)
print(f" [x] Sent {message}")
connection.close()
| [
11748,
279,
9232,
198,
11748,
25064,
198,
198,
38659,
796,
279,
9232,
13,
3629,
8629,
32048,
7,
79,
9232,
13,
32048,
48944,
7203,
36750,
48774,
198,
17620,
796,
4637,
13,
17620,
3419,
198,
198,
2,
6518,
13,
36560,
62,
32446,
533,
7,
... | 2.910891 | 202 |
import os
import sys
import click
from ..client.api import api_client
from .plugins import COMMANDS
# cmd_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), 'plugins'))
add_command = ["ps", "init", "start", "stop", "deploy", "update", "create", "status", "health", "fetch"]
@click.command(cls=PluginCommand)
@click.pass_context
| [
11748,
28686,
198,
11748,
25064,
198,
198,
11748,
3904,
198,
198,
6738,
11485,
16366,
13,
15042,
1330,
40391,
62,
16366,
198,
6738,
764,
37390,
1330,
9440,
10725,
5258,
198,
198,
2,
23991,
62,
43551,
796,
28686,
13,
6978,
13,
397,
2777,... | 2.844262 | 122 |
import pygame
from pygame.locals import *
from threading import Lock
from videoreceiver import VideoReceiver
from controlio import ControlIO
import sys
class Rover(object):
"""Primary control interface for Rover"""
FPS = 15
MLIMITLOW = 32
LLIMITLOW = 96
WIDTH = 1280
HEIGHT = 960
BLACK = (0,0,0)
WHITE = (255,255,255)
RED = (255,0,0)
def setup(self):
"""Handles library initialization and additional thread setup"""
pygame.init()
pygame.joystick.init()
self.clock = pygame.time.Clock()
self.font = pygame.font.Font('freesansbold.ttf', 16)
if pygame.joystick.get_count() > 0:
self.gamepad = pygame.joystick.Joystick(0)
self.gamepad.init()
else:
print("Gamepad not found. Exiting.")
sys.exit(2)
self.screen = pygame.display.set_mode([self.WIDTH,self.HEIGHT])
pygame.display.set_caption("Rover Control")
self.video_receiver = VideoReceiver(self.ip, self.FPS, (self.WIDTH, self.HEIGHT))
self.video_receiver.start()
self.cio = ControlIO(self.ip)
self.cio.start()
def js_convert(self, pos_list):
"""Convert gamepad values from pygame format to motor control format"""
c0 = int(pos_list[0] * -255)
c1 = int(pos_list[1] * -255)
c2 = int(pos_list[2] * 512)
c3 = int(pos_list[3] * -512)
if c0 < self.MLIMITLOW and c0 > -1 * self.MLIMITLOW:
c0 = 0
if c1 < self.MLIMITLOW and c1 > -1 * self.MLIMITLOW:
c1 = 0
if c2 < self.LLIMITLOW and c2 > -1 * self.LLIMITLOW:
c2 = 0
if c3 < self.LLIMITLOW and c3 > -1 * self.LLIMITLOW:
c3 = 0
return (c0, c1, c2, c3)
def text_objects(self, text):
"""Helper function for displaying text to screen"""
textSurface = self.font.render(text, True, self.WHITE)
return textSurface, textSurface.get_rect()
def gamepad_position(self, clist):
"""Diplay gamepad analog stick positions to screen"""
TextSurf, TextRect = self.text_objects("Move [%4d,%4d] Look[%4d,%4d]" % clist)
TextRect.center = (self.WIDTH-1000, self.HEIGHT-100)
self.screen.blit(TextSurf, TextRect)
def battery_voltage(self, voltage):
"""Display battery voltage to screen"""
TextSurf, TextRect = self.text_objects("Voltage: %.2f" % voltage)
TextRect.center = (self.WIDTH-280, self.HEIGHT-100)
self.screen.blit(TextSurf, TextRect)
def run(self):
"""Main control loop for rover"""
self.setup()
while not self.stop:
m0 = self.gamepad.get_axis(1)
m1 = self.gamepad.get_axis(0)
l0 = self.gamepad.get_axis(4)
l1 = self.gamepad.get_axis(3)
values = ( m0, m1, l0, l1 )
gamepad_values = self.js_convert(values)
self.screen.fill(self.BLACK)
self.screen.blit(self.video_receiver.get_frame(), (0,0))
self.gamepad_position(gamepad_values)
self.battery_voltage(self.cio.get_voltage())
pygame.display.flip()
if self.cio.is_ready():
self.cio.send_command(gamepad_values)
for event in pygame.event.get():
if event.type == QUIT:
self.stop = True
elif event.type == JOYBUTTONDOWN:
if event.button == 0:
print("Doing snapshot.")
self.video_receiver.snapshot()
elif event.button == 1:
print("Doing video.")
self.video_receiver.toggleVideo()
self.clock.tick(self.FPS)
self.video_receiver.active = False
self.cio.active = False
self.video_receiver.join()
self.cio.join()
pygame.quit()
| [
11748,
12972,
6057,
198,
6738,
12972,
6057,
13,
17946,
874,
1330,
1635,
198,
6738,
4704,
278,
1330,
13656,
198,
6738,
18784,
382,
39729,
1330,
7623,
3041,
39729,
198,
6738,
1630,
952,
1330,
6779,
9399,
198,
11748,
25064,
198,
198,
4871,
... | 1.978979 | 1,998 |
#!/usr/bin/python
#
# (c) 2017 Apstra Inc, <community@apstra.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: aos_blueprint_param
author: jeremy@apstra.com (@jeremyschulman)
version_added: "2.3"
short_description: Manage AOS blueprint parameter values
deprecated:
removed_in: "2.9"
why: This module does not support AOS 2.1 or later
alternative: See new modules at U(https://www.ansible.com/ansible-apstra).
description:
- Apstra AOS Blueprint Parameter module let you manage your Blueprint Parameter easily.
You can create access, define and delete Blueprint Parameter. The list of
Parameters supported is different per Blueprint. The option I(get_param_list)
can help you to access the list of supported Parameters for your blueprint.
This module is idempotent and support the I(check) mode. It's using the AOS REST API.
requirements:
- "aos-pyez >= 0.6.0"
options:
session:
description:
- An existing AOS session as obtained by M(aos_login) module.
required: true
blueprint:
description:
- Blueprint Name or Id as defined in AOS.
required: True
name:
description:
- Name of blueprint parameter, as defined by AOS design template. You can
use the option I(get_param_list) to get the complete list of supported
parameters for your blueprint.
value:
description:
- Blueprint parameter value. This value may be transformed by using the
I(param_map) field; used when the blueprint parameter requires
an AOS unique ID value.
get_param_list:
description:
- Get the complete list of supported parameters for this blueprint and the
description of those parameters.
state:
description:
- Indicate what is the expected state of the Blueprint Parameter (present or not).
default: present
choices: ['present', 'absent']
param_map:
description:
- Defines the aos-pyez collection that will is used to map the user-defined
item name into the AOS unique ID value. For example, if the caller
provides an IP address pool I(param_value) called "Server-IpAddrs", then
the aos-pyez collection is 'IpPools'. Some I(param_map) are already defined
by default like I(logical_device_maps).
'''
EXAMPLES = '''
- name: Add Logical Device Maps information in a Blueprint
aos_blueprint_param:
session: "{{ aos_session }}"
blueprint: "my-blueprint-l2"
name: "logical_device_maps"
value:
spine_1: CumulusVX-Spine-Switch
spine_2: CumulusVX-Spine-Switch
leaf_1: CumulusVX-Leaf-Switch
leaf_2: CumulusVX-Leaf-Switch
leaf_3: CumulusVX-Leaf-Switch
state: present
- name: Access Logical Device Maps information from a Blueprint
aos_blueprint_param:
session: "{{ aos_session }}"
blueprint: "my-blueprint-l2"
name: "logical_device_maps"
state: present
- name: Reset Logical Device Maps information in a Blueprint
aos_blueprint_param:
session: "{{ aos_session }}"
blueprint: "my-blueprint-l2"
name: "logical_device_maps"
state: absent
- name: Get list of all supported Params for a blueprint
aos_blueprint_param:
session: "{{ aos_session }}"
blueprint: "my-blueprint-l2"
get_param_list: yes
register: params_list
- debug: var=params_list
- name: Add Resource Pools information in a Blueprint, by providing a param_map
aos_blueprint_param:
session: "{{ aos_session }}"
blueprint: "my-blueprint-l2"
name: "resource_pools"
value:
leaf_loopback_ips: ['Switches-IpAddrs']
spine_loopback_ips: ['Switches-IpAddrs']
spine_leaf_link_ips: ['Switches-IpAddrs']
spine_asns: ['Private-ASN-pool']
leaf_asns: ['Private-ASN-pool']
virtual_network_svi_subnets: ['Servers-IpAddrs']
param_map:
leaf_loopback_ips: IpPools
spine_loopback_ips: IpPools
spine_leaf_link_ips: IpPools
spine_asns: AsnPools
leaf_asns: AsnPools
virtual_network_svi_subnets: IpPools
state: present
'''
RETURNS = '''
blueprint:
description: Name of the Blueprint
returned: always
type: str
sample: Server-IpAddrs
name:
description: Name of the Blueprint Parameter
returned: always
type: str
sample: fcc4ac1c-e249-4fe7-b458-2138bfb44c06
value:
description: Value of the Blueprint Parameter as returned by the AOS Server
returned: always
type: dict
sample: {'...'}
params_list:
description: Value of the Blueprint Parameter as returned by the AOS Server
returned: when I(get_param_list) is defined.
type: dict
sample: {'...'}
'''
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.aos.aos import get_aos_session, find_collection_item, check_aos_version
from ansible.module_utils._text import to_native
try:
import yaml
HAS_YAML = True
except ImportError:
HAS_YAML = False
try:
from apstra.aosom.collection_mapper import CollectionMapper, MultiCollectionMapper
HAS_AOS_PYEZ_MAPPER = True
except ImportError:
HAS_AOS_PYEZ_MAPPER = False
param_map_list = dict(
logical_device_maps='LogicalDeviceMaps',
resource_pools=dict(
spine_asns="AsnPools",
leaf_asns="AsnPools",
virtual_network_svi_subnets="IpPools",
spine_loopback_ips="IpPools",
leaf_loopback_ips="IpPools",
spine_leaf_link_ips="IpPools"
)
)
if __name__ == '__main__':
main()
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
2,
198,
2,
357,
66,
8,
2177,
5949,
12044,
3457,
11,
1279,
28158,
31,
499,
12044,
13,
785,
29,
198,
2,
198,
2,
770,
2393,
318,
636,
286,
28038,
856,
198,
2,
198,
2,
28038,
856,
318,
14... | 2.716522 | 2,300 |
from typing import Callable
def get_sentry_extension() -> Callable[[str], None]:
"""
Return set_transaction_id, if the Sentry-sdk is installed.
"""
try:
import sentry_sdk # noqa: F401, TC002
from asgi_correlation_id.extensions.sentry import set_transaction_id
return set_transaction_id
except ImportError: # pragma: no cover
return lambda correlation_id: None
def set_transaction_id(correlation_id: str) -> None:
"""
Set Sentry's event transaction ID as the current correlation ID.
The transaction ID is displayed in a Sentry event's detail view,
which makes it easier to correlate logs to specific events.
"""
from sentry_sdk import configure_scope
with configure_scope() as scope:
scope.set_tag('transaction_id', correlation_id)
| [
6738,
19720,
1330,
4889,
540,
628,
198,
4299,
651,
62,
82,
13000,
62,
2302,
3004,
3419,
4613,
4889,
540,
30109,
2536,
4357,
6045,
5974,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
8229,
900,
62,
7645,
2673,
62,
312,
11,
611,
262,... | 2.858621 | 290 |
from JobBrowserBFF.TestBase import TestBase
import JobBrowserBFF.Utils as utils
UPSTREAM_SERVICE = 'ee2'
ENV = 'ci'
USER_CLASS = 'user'
| [
6738,
15768,
46532,
33,
5777,
13,
14402,
14881,
1330,
6208,
14881,
198,
11748,
15768,
46532,
33,
5777,
13,
18274,
4487,
355,
3384,
4487,
198,
198,
8577,
2257,
32235,
62,
35009,
27389,
796,
705,
1453,
17,
6,
198,
1677,
53,
796,
705,
97... | 2.653846 | 52 |
from snuba.datasets.factory import get_dataset
from snuba.query import SelectedExpression
from snuba.query.conditions import OPERATOR_TO_FUNCTION, binary_condition, in_condition
from snuba.query.expressions import Column, FunctionCall, Literal
from snuba.query.parser import parse_query
from snuba.query.processors.tags_expander import TagsExpanderProcessor
from snuba.request.request_settings import HTTPRequestSettings
| [
6738,
3013,
22013,
13,
19608,
292,
1039,
13,
69,
9548,
1330,
651,
62,
19608,
292,
316,
198,
6738,
3013,
22013,
13,
22766,
1330,
41344,
16870,
2234,
198,
6738,
3013,
22013,
13,
22766,
13,
17561,
1756,
1330,
43521,
25633,
62,
10468,
62,
... | 3.637931 | 116 |
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.response import Response
from ..models import *
from ..serializers import *
| [
6738,
1334,
62,
30604,
1330,
3722,
198,
6738,
1334,
62,
30604,
13,
33571,
1330,
3486,
3824,
769,
198,
6738,
1334,
62,
30604,
13,
26209,
1330,
18261,
198,
198,
6738,
11485,
27530,
1330,
1635,
198,
6738,
11485,
46911,
11341,
1330,
1635,
1... | 4.195122 | 41 |
from data_structures.stack import Stack
from trees_and_tree_algorithms.binary_tree import BinaryTree
import operator
| [
6738,
1366,
62,
7249,
942,
13,
25558,
1330,
23881,
198,
6738,
7150,
62,
392,
62,
21048,
62,
282,
7727,
907,
13,
39491,
62,
21048,
1330,
45755,
27660,
198,
198,
11748,
10088,
628,
628,
198
] | 3.588235 | 34 |
#!/usr/bin/python
import os, codecs, datetime
import jinja2, markdown
from email.utils import formatdate
from xml.sax.saxutils import escape
from ..utils.misc import json_dumps
__all__ = ["add_entries"]
config = {"__Instructions_filename_":"In the filename, 0 is the extension, specified by ext, 1 is the format name, 2 is the log name, and 3 is the date, specified by the format in date",
"__Instructions_master_feed_":"A master feed will link to all the other feeds. 0 is the extension, the format is 1, and the date is 2",
"__Instructions_feed_title_":"{} will be replaced with the log name",
"__Instructions_author_":"Author will be included in atom feed. email may also be specified",
"path": os.path.join(os.path.expanduser('~'),'Dropbox','Feed'),
"author":{"name":"Nobody","email":None},
"filename":"feed_{1}_{2}_{3}.{0}",
"master_feed":"all_feeds_{1}_{2}.{0}",
"date":"%Y-%m-%d",
"date_time":"%c", "formats": ['rss'],
"ext":{"rss":"xml","atom":"xml","json":"json","jsonp":"js"},
"jsonp_callback":"drop_feed",
"feed_link":"https://github.com/goodevilgenius/droplogger/",
"feed_title":"DropLogger feed for {}"}
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
198,
11748,
28686,
11,
40481,
82,
11,
4818,
8079,
198,
11748,
474,
259,
6592,
17,
11,
1317,
2902,
198,
6738,
3053,
13,
26791,
1330,
5794,
4475,
198,
6738,
35555,
13,
82,
897,
13,
82,
897,
... | 2.532389 | 494 |
import pytest
from contessa.models import ResultTable, QualityCheck
from contessa.rules import GtRule, NotNullRule
@pytest.mark.parametrize(
"rule_def, rule_cls",
[({"type": "not_null"}, NotNullRule), ({"type": "gt"}, GtRule)],
)
| [
11748,
12972,
9288,
198,
198,
6738,
542,
21411,
13,
27530,
1330,
25414,
10962,
11,
14156,
9787,
198,
6738,
542,
21411,
13,
38785,
1330,
402,
83,
31929,
11,
1892,
35067,
31929,
628,
198,
198,
31,
9078,
9288,
13,
4102,
13,
17143,
316,
3... | 2.692308 | 91 |
from dbus_next.service import ServiceInterface, signal
from dbus_next.aio import MessageBus
from dbus_next import Message
import pytest
@pytest.mark.asyncio
| [
6738,
288,
10885,
62,
19545,
13,
15271,
1330,
4809,
39317,
11,
6737,
198,
6738,
288,
10885,
62,
19545,
13,
64,
952,
1330,
16000,
16286,
198,
6738,
288,
10885,
62,
19545,
1330,
16000,
198,
198,
11748,
12972,
9288,
628,
198,
198,
31,
90... | 3.22 | 50 |
'''
* Solution for Binary Tree Inorder Traversal (Leetcode 94)
* https://leetcode.com/problems/binary-tree-inorder-traversal/
* Time Complexity: O(n)
* Space Complexity: O(h)
* Idea: Recursion
'''
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
| [
7061,
6,
198,
1635,
28186,
329,
45755,
12200,
554,
2875,
4759,
690,
282,
357,
3123,
316,
8189,
10048,
8,
198,
1635,
3740,
1378,
293,
316,
8189,
13,
785,
14,
1676,
22143,
14,
39491,
12,
21048,
12,
259,
2875,
12,
9535,
690,
282,
14,
... | 2.453333 | 150 |
import glob
import os
import yaml
with open("server_config.yaml") as f:
config = yaml.load(f.read())
| [
198,
11748,
15095,
198,
11748,
28686,
198,
11748,
331,
43695,
628,
198,
4480,
1280,
7203,
15388,
62,
11250,
13,
88,
43695,
4943,
355,
277,
25,
198,
220,
220,
220,
4566,
796,
331,
43695,
13,
2220,
7,
69,
13,
961,
28955,
628,
628,
628... | 2.6 | 45 |
from chainer import Chain
import chainer.functions as F
import chainer.links as L
from pydlshogi.common import *
ch=192
| [
171,
119,
123,
6738,
6333,
263,
1330,
21853,
198,
11748,
6333,
263,
13,
12543,
2733,
355,
376,
198,
11748,
6333,
263,
13,
28751,
355,
406,
198,
198,
6738,
279,
5173,
75,
1477,
44381,
13,
11321,
1330,
1635,
198,
198,
354,
28,
17477,
... | 2.883721 | 43 |
# Generated by Django 2.2.13 on 2021-02-26 11:42
from django.core.exceptions import ObjectDoesNotExist
from django.db import migrations, models
| [
2,
2980,
515,
416,
37770,
362,
13,
17,
13,
1485,
319,
33448,
12,
2999,
12,
2075,
1367,
25,
3682,
198,
198,
6738,
42625,
14208,
13,
7295,
13,
1069,
11755,
1330,
9515,
13921,
3673,
3109,
396,
198,
6738,
42625,
14208,
13,
9945,
1330,
1... | 3.0625 | 48 |
import math
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import ops
class BatchNorm(object):
"""Code modification of http://stackoverflow.com/a/33950177"""
def linear(inpt, output_dim, scope=None, stddev=1.0, with_w=False):
"""
Linear transformation
:param inpt: data
:param output_dim: hidden layers
:param scope: name
:param stddev: standard deviation
:param with_w: whether weights and bias should be returned
:return: tensor of type input
"""
normal = tf.random_normal_initializer(stddev=stddev)
const = tf.constant_initializer(0.0)
shape = inpt.get_shape().as_list()
with tf.variable_scope(scope or 'linear'):
w = tf.get_variable('w', [shape[1], output_dim], tf.float32,
initializer=normal)
b = tf.get_variable('b', [output_dim], initializer=const)
if with_w:
return tf.matmul(inpt, w) + b, w, b
else:
return tf.matmul(inpt, w) + b
def lrelu(x, leak=0.2, scope="lrelu"):
"""
leaky relu
if x > 0: return x
else: return leak * x
:param x: tensor
:param leak: float, leak factor alpha >= 0
:param scope: str, name of the operation
:return: tensor, leaky relu operation
"""
with tf.variable_scope(scope):
# if leak < 1:
# return tf.maximum(x, leak * x)
# elif x > 0:
# return x
# else:
# return leak * x
return tf.nn.relu(x) - leak * tf.nn.relu(-x)
def lrelu_alternative(x, leak=0.2, name="lrelu"):
"""
Alternative implementation of lrelu
:param x: tensor
:param leak: float, leak factor alpha >= 0
:param name: str, name of the operation
:return: tensor, leaky relu operation
"""
with tf.variable_scope(name):
f1 = 0.5 * (1 + leak)
f2 = 0.5 * (1 - leak)
return f1 * x + f2 * abs(x)
def conv2d(inpt, output_dim, k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.2,
scope=None):
"""
Convolution for binned spike trains over the whole trial
:param inpt:
:param output_dim:
:param k_h:
:param k_w:
:param d_h:
:param d_w:
:param stddev:
:param scope:
:return:
"""
normal = tf.random_normal_initializer(stddev=stddev)
const = tf.constant_initializer(0.0)
with tf.variable_scope(scope or 'conv2d'):
# TODO give an own defined filter
w = tf.get_variable('w', [k_h, k_w, inpt.get_shape()[-1], output_dim],
initializer=normal)
conv = tf.nn.conv2d(inpt, w, strides=[1, d_h, d_w, 1], padding='SAME')
b = tf.get_variable('b', [output_dim], initializer=const)
# conv = tf.reshape(tf.nn.bias_add(conv, b), conv.get_shape())
conv = tf.nn.bias_add(conv, b)
return conv
def conv2d_transpose(inpt, output_dim, k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.2,
scope=None, with_w=False):
"""
Convolution done for one trial with several spike trains
:param inpt:
:param output_dim:
:param k_h:
:param k_w:
:param d_h:
:param d_w:
:param stddev:
:param scope:
:param with_w:
:return:
"""
initializer = tf.random_normal_initializer(stddev=stddev)
const = tf.constant_initializer(0.0)
with tf.variable_scope(scope or 'conv2d_transpose'):
# filter : [height, width, output_channels, in_channels]
w = tf.get_variable('w', [k_h, k_w, output_dim[-1],
inpt.get_shape()[-1]],
initializer=initializer)
deconv = tf.nn.conv2d_transpose(inpt, w, output_shape=output_dim,
strides=[1, d_h, d_w, 1])
biases = tf.get_variable('b', [output_dim[-1]], initializer=const)
# deconv = tf.reshape(tf.nn.bias_add(deconv, biases), deconv.get_shape())
deconv = tf.nn.bias_add(deconv, biases)
if with_w:
return deconv, w, biases
else:
return deconv
def conv1d(inpt, output_dim, k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.2,
scope=None):
"""
Convolution done for one binned spike train
:param inpt:
:param output_dim:
:param k_h:
:param k_w:
:param d_h:
:param d_w:
:param stddev:
:param scope:
:return:
"""
normal = tf.random_normal_initializer(stddev=stddev)
const = tf.constant_initializer(0.0)
with tf.variable_scope(scope or 'conv1d'):
# TODO give an own defined filter
w = tf.get_variable('w', [k_h, k_w, inpt.get_shape()[1], output_dim],
initializer=normal)
conv = tf.nn.conv1d(inpt, w, stride=[1, d_h, d_w, 1], padding='SAME')
b = tf.get_variable('b', [output_dim], initializer=const)
# conv = tf.reshape(tf.nn.bias_add(conv, b), conv.get_shape())
conv = tf.nn.bias_add(conv, b)
return conv
def binary_cross_entropy(preds, targets, scope=None):
"""Computes binary cross entropy given `preds`.
For brevity, let `x = `, `z = targets`. The logistic loss is
loss(x, z) = - sum_i (x[i] * log(z[i]) + (1 - x[i]) * log(1 - z[i]))
Args:
preds: A `Tensor` of type `float32` or `float64`.
targets: A `Tensor` of the same type and shape as `preds`.
"""
eps = 1e-12
with ops.op_scope([preds, targets], scope, "bce_loss") as scope:
preds = ops.convert_to_tensor(preds, name="preds")
targets = ops.convert_to_tensor(targets, name="targets")
return tf.reduce_mean(-(targets * tf.log(preds + eps) +
(1. - targets) * tf.log(1. - preds + eps)))
def conv_cond_concat(x, y):
"""Concatenate conditioning vector on feature map axis."""
x_shapes = x.get_shape()
y_shapes = y.get_shape()
return tf.concat(3, [x, y * tf.ones(
[x_shapes[0], x_shapes[1], x_shapes[2], y_shapes[3]])])
| [
11748,
10688,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
11192,
273,
11125,
355,
48700,
198,
198,
6738,
11192,
273,
11125,
13,
29412,
13,
30604,
1330,
39628,
628,
198,
4871,
347,
963,
35393,
7,
15252,
2599,
198,
220,
220,
220,
3722... | 2.067361 | 2,880 |
from .studiesIndex_whenDownloaded import studiesIndex_whenDownloaded
from .studiesIndex_whenDownloaded_withEmptyStudyList import studiesIndex_whenDownloaded_withEmptyStudyList
from .study_whenDownloaded import study_whenDownloaded
from .study_whenDownloaded_withEmptySeriesList import study_whenDownloaded_withEmptySeriesList
| [
6738,
764,
19149,
444,
15732,
62,
12518,
10002,
276,
1330,
3640,
15732,
62,
12518,
10002,
276,
198,
6738,
764,
19149,
444,
15732,
62,
12518,
10002,
276,
62,
4480,
40613,
39841,
8053,
1330,
3640,
15732,
62,
12518,
10002,
276,
62,
4480,
4... | 3.97561 | 82 |
"""Builds command for emulator."""
from __future__ import annotations
import abc
import argparse
import os
from dataclasses import dataclass
from enum import Enum
from typing import Dict
from emulation_system.consts import (
PRODUCTION_MODE_NAME,
DEVELOPMENT_MODE_NAME,
LATEST_KEYWORD,
ROOT_DIR,
)
from emulation_system.commands.command import CommandList, Command
from emulation_system.commands.abstract_command_creator import (
AbstractCommandCreator,
)
from emulation_system.opentrons_emulation_configuration import (
OpentronsEmulationConfiguration,
SourceDownloadLocations,
)
class CommonEmulationOptions(str, Enum):
"""Options shared by all sub-commands."""
DETACHED = "--detached"
class EmulationSubCommands(str, Enum):
"""Sub-Commands available to the `emulation` command."""
PROD_MODE = PRODUCTION_MODE_NAME
DEV_MODE = DEVELOPMENT_MODE_NAME
class ProductionEmulationOptions(str, Enum):
"""Options specific to `prod` sub-command."""
OT3_FIRMWARE_SHA = "--ot3-firmware-repo-sha"
MODULES_SHA = "--opentrons-modules-repo-sha"
MONOREPO_SHA = "--opentrons-repo-sha"
class DevelopmentEmulationOptions(str, Enum):
"""Options specfic to `dev` sub-command."""
MODULES_PATH = "--opentrons-modules-repo-path"
OT3_FIRMWARE_PATH = "--ot3-firmware-repo-path"
OPENTRONS_REPO = "--opentrons-repo-path"
class InvalidModeError(ValueError):
"""Thrown when an invalid emulation mode is provided. (Not `prod` or `dev`)."""
pass
class AbstractEmulationCommandCreator(AbstractCommandCreator):
"""Things common to both EmulationCreator classes."""
BUILD_COMMAND_NAME = "Build Emulation"
KILL_COMMAND_NAME = "Kill Emulation"
REMOVE_COMMAND_NAME = "Remove Emulation"
RUN_COMMAND_NAME = "Run Emulation"
DOCKER_RESOURCES_LOCATION = os.path.join(
ROOT_DIR, "emulation_system/resources/docker"
)
DOCKER_BUILD_ENV_VARS = {"COMPOSE_DOCKER_CLI_BUILD": "1", "DOCKER_BUILDKIT": "1"}
@property
@abc.abstractmethod
def compose_file_name(self) -> str:
"""Name of the compose file to use."""
...
@abc.abstractmethod
def build(self) -> Command:
"""Creates build command."""
...
@abc.abstractmethod
def run(self) -> Command:
"""Creates run command."""
...
@property
@abc.abstractmethod
def dry_run(self) -> bool:
"""Whether running as a dry run."""
...
def kill(self) -> Command:
"""Kill and remove any existing dev containers."""
return Command(
command_name=self.KILL_COMMAND_NAME,
command=f"docker-compose -f {self.compose_file_name} kill",
cwd=self.DOCKER_RESOURCES_LOCATION,
)
def remove(self) -> Command:
"""Kill and remove any existing dev containers."""
return Command(
command_name=self.REMOVE_COMMAND_NAME,
command=f"docker-compose -f {self.compose_file_name} rm -f",
cwd=self.DOCKER_RESOURCES_LOCATION,
)
def _get_commands(self) -> CommandList:
"""Returns list of commands that will be run with an emulation command."""
return CommandList(
command_list=[
self.kill(),
self.remove(),
self.build(),
self.run(),
],
dry_run=self.dry_run,
)
@dataclass
class ProdEmulationCommandCreator(AbstractEmulationCommandCreator):
"""Class to build docker commands for creating a Production Emulator.
Supports `build`, `clean`, and `run` commands.
"""
detached: bool = False
ot3_firmware_download_location: str = ""
modules_download_location: str = ""
opentrons_download_location: str = ""
dry_run: bool = False
# Pulled from Dockerfile in root of repo
OT3_FIRMWARE_DOCKER_BUILD_ARG_NAME = "FIRMWARE_SOURCE_DOWNLOAD_LOCATION"
MODULES_DOCKER_BUILD_ARG_NAME = "MODULE_SOURCE_DOWNLOAD_LOCATION"
OPENTRONS_DOCKER_BUILD_ARG_NAME = "OPENTRONS_SOURCE_DOWNLOAD_LOCATION"
@property
def compose_file_name(self) -> str:
"""Compose file name to use."""
return "docker-compose.yaml"
@classmethod
def from_cli_input(
cls, args: argparse.Namespace, settings: OpentronsEmulationConfiguration
) -> ProdEmulationCommandCreator:
"""Factory method to convert CLI input into a ProdEmulatorCreator object."""
download_locations = settings.emulation_settings.source_download_locations
return cls(
detached=args.detached,
ot3_firmware_download_location=cls._parse_download_location(
"ot3_firmware", args.ot3_firmware_repo_sha, download_locations
),
modules_download_location=cls._parse_download_location(
"modules", args.opentrons_modules_repo_sha, download_locations
),
opentrons_download_location=cls._parse_download_location(
"opentrons", args.opentrons_repo_sha, download_locations
),
dry_run=args.dry_run,
)
@staticmethod
def _parse_download_location(
key: str, location: str, download_locations: SourceDownloadLocations
) -> str:
"""Parse download location into a downloadable url."""
if location == LATEST_KEYWORD:
download_location = download_locations.heads.__getattribute__(key)
else:
download_location = download_locations.commits.__getattribute__(
key
).replace("{{commit-sha}}", location)
return download_location
def build(self) -> Command:
"""Construct a docker-compose build command."""
cmd = (
f"docker-compose -f {self.compose_file_name} build "
f"--build-arg {self.OT3_FIRMWARE_DOCKER_BUILD_ARG_NAME}="
f"{self.ot3_firmware_download_location} "
f"--build-arg {self.MODULES_DOCKER_BUILD_ARG_NAME}="
f"{self.modules_download_location} "
f"--build-arg {self.OPENTRONS_DOCKER_BUILD_ARG_NAME}="
f"{self.opentrons_download_location} "
)
return Command(
command_name=self.BUILD_COMMAND_NAME,
command=cmd,
cwd=self.DOCKER_RESOURCES_LOCATION,
env=self.DOCKER_BUILD_ENV_VARS,
)
def run(self) -> Command:
"""Construct a docker-compose up command."""
cmd = f"docker-compose -f {self.compose_file_name} up"
if self.detached:
cmd += " -d"
return Command(
command_name=self.RUN_COMMAND_NAME,
command=cmd,
cwd=self.DOCKER_RESOURCES_LOCATION,
)
def get_commands(self) -> CommandList:
"""Get a list of commands to create emulation."""
return self._get_commands()
@dataclass
class DevEmulationCommandCreator(AbstractEmulationCommandCreator):
"""Command creator for `dev` sub-command of `emulation` command.
Supports `build`, `clean`, and `run` commands.
"""
OT3_FIRMWARE_DOCKER_ENV_VAR_NAME = "OT3_FIRMWARE_DIRECTORY"
MODULES_DOCKER_ENV_VAR_NAME = "OPENTRONS_MODULES_DIRECTORY"
OPENTRONS_DOCKER_ENV_VAR_NAME = "OPENTRONS_DIRECTORY"
detached: bool = False
ot3_firmware_path: str = ""
modules_path: str = ""
opentrons_path: str = ""
dry_run: bool = False
def _get_run_env_vars(self) -> Dict[str, str]:
"""Returns env vars necessary for run command."""
return {
self.OT3_FIRMWARE_DOCKER_ENV_VAR_NAME: self.ot3_firmware_path,
self.MODULES_DOCKER_ENV_VAR_NAME: self.modules_path,
self.OPENTRONS_DOCKER_ENV_VAR_NAME: self.opentrons_path,
}
def _get_build_env_vars(self) -> Dict[str, str]:
"""Returns env vars necessary for build command."""
default_vars_copy = self.DOCKER_BUILD_ENV_VARS.copy()
default_vars_copy.update(self._get_run_env_vars())
return default_vars_copy
@property
def compose_file_name(self) -> str:
"""Compose file name to use."""
return "docker-compose-dev.yaml"
@classmethod
def from_cli_input(
cls, args: argparse.Namespace, settings: OpentronsEmulationConfiguration
) -> DevEmulationCommandCreator:
"""Factory method to convert CLI input into a DevEmulatorCreator object."""
return cls(
detached=args.detached,
ot3_firmware_path=args.ot3_firmware_repo_path,
modules_path=args.opentrons_modules_repo_path,
opentrons_path=args.opentrons_repo_path,
dry_run=args.dry_run,
)
def build(self) -> Command:
"""Construct a docker-compose build command."""
# Need to specify env vars to satisfy docker-compose file even though nothing
# is done with the env vars
return Command(
command_name=self.BUILD_COMMAND_NAME,
command=f"docker-compose -f {self.compose_file_name} build",
cwd=self.DOCKER_RESOURCES_LOCATION,
env=self._get_build_env_vars(),
)
def run(self) -> Command:
"""Construct a docker-compose up command."""
cmd = f"docker-compose -f {self.compose_file_name} up"
if self.detached:
cmd += " -d"
return Command(
command_name=self.RUN_COMMAND_NAME,
command=cmd,
cwd=self.DOCKER_RESOURCES_LOCATION,
env=self._get_run_env_vars(),
)
def get_commands(self) -> CommandList:
"""Get a list of commands to create emulation."""
return self._get_commands()
| [
37811,
15580,
82,
3141,
329,
38274,
526,
15931,
198,
198,
6738,
11593,
37443,
834,
1330,
37647,
198,
198,
11748,
450,
66,
198,
11748,
1822,
29572,
198,
11748,
28686,
198,
6738,
4818,
330,
28958,
1330,
4818,
330,
31172,
198,
6738,
33829,
... | 2.266308 | 4,277 |
from flask import render_template
from flask_restful import Api, reqparse
from models import app
from resources import UserAuthentication, UserRefreshToken, UserRegistration, UserLogout, Todos, Todo
from flask_moment import Moment
moment = Moment(app)
api = Api(app)
api.add_resource(UserRegistration, '/registration')
api.add_resource(UserAuthentication, '/authentication')
api.add_resource(UserRefreshToken, '/refresh')
api.add_resource(UserLogout, '/logout')
api.add_resource(Todos, '/todos')
api.add_resource(Todo, '/todo/<string:todo_id>')
@app.route('/', methods=['GET', 'POST'])
@app.route('/docs', methods=['GET'])
@app.route('/playground', methods=['GET','POST'])
# Start app
if __name__ == '__main__':
app.run(debug=True) | [
6738,
42903,
1330,
8543,
62,
28243,
198,
6738,
42903,
62,
2118,
913,
1330,
5949,
72,
11,
43089,
29572,
198,
6738,
4981,
1330,
598,
198,
6738,
4133,
1330,
11787,
47649,
3299,
11,
11787,
8134,
3447,
30642,
11,
11787,
47133,
11,
11787,
111... | 2.924901 | 253 |
from flask import Flask, jsonify, request
from flask_cors import CORS
from os import environ
import pandas as pd
# To do : Filter away very small amounts
app = Flask(__name__)
CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/compute_weights',methods=['POST'])
if __name__ == "__main__":
app.run(host="0.0.0.0",port=5001,debug=environ.get('PROD_ENV') != False) | [
6738,
42903,
1330,
46947,
11,
33918,
1958,
11,
2581,
198,
6738,
42903,
62,
66,
669,
1330,
327,
20673,
198,
6738,
28686,
1330,
551,
2268,
198,
11748,
19798,
292,
355,
279,
67,
198,
198,
2,
1675,
466,
1058,
25853,
1497,
845,
1402,
6867,... | 2.625 | 144 |
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
# bem: triangulation and fmm/bem electrostatics tools
#
# Copyright (C) 2011-2012 Robert Jordens <jordens@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# <headingcell level=1>
# `bem` 3D electrostatics example
# <codecell>
import logging, os
import numpy as np
import matplotlib.pyplot as plt
from bem import Electrodes, Sphere, Mesh, Grid, Configuration, Result
from bem.formats import stl
# <codecell>
# base file name for outputs and inputs is the script name
try:
# works only if we are a script
prefix = os.path.splitext(__file__)[0]
except NameError:
# fallback for notebooks
prefix = "SimpleTrap"
# <codecell>
# scale to natural units (ion height)
scale = 40e-6
use_stl = True
if not use_stl:
# load electrode faces from loops
ele = Electrodes.from_trap(open("%s.ele" % prefix), scale)
# initial triangulation, area 20, quiet
mesh = Mesh.from_electrodes(ele)
mesh.triangulate(opts="qa10Q")
else:
# load electrode faces from colored stl
s = stl.read_stl(open("%s.stl" % prefix, "rb"))
mesh = Mesh.from_mesh(stl.stl_to_mesh(*s, scale=scale/1e-6,
rename={9495: "DC1", 17962: "DC3", 18994: "DC5",
18869: "DC2", 20943: "RF", 18129: "DC4"}))
# <codecell>
# <codecell>
# set .1 max area within 3
mesh.areas_from_constraints(Sphere(center=np.array([0, 0, 1.]),
radius=2, inside=.2, outside=10.))
# retriangulate quality and quiet with areas
mesh.triangulate(opts="qQ", new=False)
# save base mesh to vtk
mesh.to_vtk(prefix)
# grid to evalute potential and fields at
n, s = 2*10, .1
grid = Grid(center=(0, 0, 1.5), step=(s, s, s), shape=(n, n, n))
# generate electrode potential configurations to simulate
# use regexps to match electrode names
jobs = list(Configuration.select(mesh, "DC.*", "RF"))
# run the different electrodes on the parallel pool
#pmap = Pool().map # parallel map
pmap = map # serial map
pmap(run_job, ((job, grid, prefix) for job in jobs))
# <codecell>
# isocontour plot of the RF pseudopotential radially
result = Result.from_vtk(prefix, "RF")
p = result.pseudo_potential
x = grid.to_mgrid()[:, p.shape[0]/2]
p = p[p.shape[0]/2]
fig, ax = plt.subplots()
ax.set_aspect("equal")
ax.contour(x[1], x[2], p, levels=np.linspace(0, 2e-2, 20), cmap=plt.cm.Reds)
# <codecell>
fig, ax = plt.subplots(subplot_kw=dict(aspect="equal"))
mesh.plot(ax)
# <codecell>
# explore it in fancy 3D
# fire up a mayavi2 window showing base mesh, charges on final mesh
# and isosurfaces of the pseudopotential
Result.view(prefix, "RF")
# need to start the full eventloop for the window.
# close it to return control to the notebook
from pyface.api import GUI
GUI().start_event_loop()
# <codecell>
from electrode import System, GridElectrode
# load the electrostatics results into a electrode.System()
s = System()
for name in "DC1 DC2 DC3 DC4 DC5 RF".split():
r = Result.from_vtk(prefix, name)
e = GridElectrode.from_result(r)
e.name = name
s.append(e)
s["RF"].rf = 1.
# <codecell>
from scipy.constants import atomic_mass
x0 = s.minimum((0, 0, 1.))
for _ in s.analyze_static(x0, m=25*atomic_mass, u=50.,
l=40e-6, o=100e6*2*np.pi):
print _
# <codecell>
n = 30
#xyz = np.mgrid[-.1:.1:1j*n, -.1:.1:1j*n, 1.12:2]
#xyz = np.mgrid[0:1, -.02:.02:1j*n, .5:1.5:1j*n]
xyz = grid.to_mgrid()
p = s.potential(xyz.reshape(3, -1).T, 0).reshape(xyz[0].shape)
v = np.linspace(0, 2e-2, 21)
fig, ax = plt.subplots()
ax.set_aspect("equal")
ax.contour(xyz[1, 10, :, :], xyz[2, 10, :, :], p[10, :, :], v, cmap=plt.cm.Reds_r)
# <codecell>
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
1279,
46803,
18982,
29,
18,
13,
15,
3556,
46803,
18982,
29,
198,
198,
2,
1279,
8189,
3846,
29,
198,
198,
2,
220,
220,
307,
76,
25,
1333,
648,
1741,
290,
277,
30... | 2.509145 | 1,695 |
"""
MUV dataset loader.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import os
import numpy as np
import shutil
import deepchem as dc
def load_muv(featurizer='ECFP', split='index'):
"""Load MUV datasets. Does not do train/test split"""
# Load MUV dataset
print("About to load MUV dataset.")
current_dir = os.path.dirname(os.path.realpath(__file__))
dataset_file = os.path.join(
current_dir, "../../datasets/muv.csv.gz")
# Featurize MUV dataset
print("About to featurize MUV dataset.")
if featurizer == 'ECFP':
featurizer_func = dc.feat.CircularFingerprint(size=1024)
elif featurizer == 'GraphConv':
featurizer_func = dc.feat.ConvMolFeaturizer()
MUV_tasks = sorted(['MUV-692', 'MUV-689', 'MUV-846', 'MUV-859', 'MUV-644',
'MUV-548', 'MUV-852', 'MUV-600', 'MUV-810', 'MUV-712',
'MUV-737', 'MUV-858', 'MUV-713', 'MUV-733', 'MUV-652',
'MUV-466', 'MUV-832'])
loader = dc.data.CSVLoader(
tasks=MUV_tasks, smiles_field="smiles", featurizer=featurizer_func)
dataset = loader.featurize(dataset_file)
# Initialize transformers
transformers = [
dc.trans.BalancingTransformer(transform_w=True, dataset=dataset)]
print("About to transform data")
for transformer in transformers:
dataset = transformer.transform(dataset)
splitters = {'index': dc.splits.IndexSplitter(),
'random': dc.splits.RandomSplitter(),
'scaffold': dc.splits.ScaffoldSplitter()}
splitter = splitters[split]
train, valid, test = splitter.train_valid_test_split(dataset)
return MUV_tasks, (train, valid, test), transformers
| [
37811,
198,
44,
31667,
27039,
40213,
13,
198,
37811,
198,
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
198,
6738,
11593,
37443,
834,
1330,
7297,
198,
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
198,
11748,
28686,
... | 2.411683 | 719 |
__author__ = 'yinjun'
# @param s, a string
# @return an integer
| [
834,
9800,
834,
796,
705,
88,
259,
29741,
6,
198,
220,
220,
220,
1303,
2488,
17143,
264,
11,
257,
4731,
198,
220,
220,
220,
1303,
2488,
7783,
281,
18253,
628,
198
] | 2.387097 | 31 |
from django.db import models
from django.db.models.fields import BooleanField, CharField, FloatField, IntegerField, SmallIntegerField
from django.db.models.fields.files import ImageField
from .user import User
from .inmueble import Inmueble
| [
6738,
42625,
14208,
13,
9945,
1330,
4981,
198,
6738,
42625,
14208,
13,
9945,
13,
27530,
13,
25747,
1330,
41146,
15878,
11,
3178,
15878,
11,
48436,
15878,
11,
34142,
15878,
11,
10452,
46541,
15878,
198,
6738,
42625,
14208,
13,
9945,
13,
... | 3.597015 | 67 |
import json
from chargebee.model import Model
from chargebee import request
from chargebee import APIError
| [
11748,
33918,
198,
6738,
3877,
20963,
13,
19849,
1330,
9104,
198,
6738,
3877,
20963,
1330,
2581,
198,
6738,
3877,
20963,
1330,
7824,
12331,
628
] | 4.5 | 24 |
#Written by Gary Zeri
#Computer Science Major at Chapman University, Member of Dr. LaRue's CatLab
#Abstract Class to define the properties of all methods used to generate a Potential Energy Surface
from compChemGlobal import *
from graphableData import *
###################################################################################
###################################################################################
#Override the graph function to account for the fact that PESMethods will compute
#data all at once instead of in real time due to speed issues
###################################################################################
###################################################################################
###################################################################################
################################################################################### | [
2,
25354,
416,
10936,
1168,
33442,
198,
2,
34556,
5800,
8386,
379,
29045,
2059,
11,
10239,
286,
1583,
13,
4689,
49,
518,
338,
5181,
17822,
198,
198,
2,
23839,
5016,
284,
8160,
262,
6608,
286,
477,
5050,
973,
284,
7716,
257,
32480,
6... | 6.070968 | 155 |
"""The builtin int type (W_AbstractInt) and the base impl (W_IntObject)
based on rpython ints.
In order to have the same behavior running on CPython, and after RPython
translation this module uses rarithmetic.ovfcheck to explicitly check
for overflows, something CPython does not do anymore.
"""
import operator
import sys
from rpython.rlib import jit
from rpython.rlib.objectmodel import instantiate, enforceargs
from rpython.rlib.rarithmetic import (
LONG_BIT, intmask, is_valid_int, ovfcheck, r_longlong, r_uint,
string_to_int)
from rpython.rlib.rbigint import (
InvalidEndiannessError, InvalidSignednessError, rbigint)
from rpython.rlib.rfloat import DBL_MANT_DIG
from rpython.rlib.rstring import (
ParseStringError, ParseStringOverflowError)
from rpython.tool.sourcetools import func_renamer, func_with_new_name
from pypy.interpreter import typedef
from pypy.interpreter.baseobjspace import W_Root
from pypy.interpreter.error import OperationError, oefmt
from pypy.interpreter.gateway import (
WrappedDefault, applevel, interp2app, interpindirect2app, unwrap_spec)
from pypy.interpreter.typedef import TypeDef
from pypy.objspace.std import newformat
from pypy.objspace.std.util import (
BINARY_OPS, CMP_OPS, COMMUTATIVE_OPS, IDTAG_INT, IDTAG_SHIFT, wrap_parsestringerror)
SENTINEL = object()
HASH_BITS = 61 if sys.maxsize > 2 ** 31 - 1 else 31
HASH_MODULUS = 2 ** HASH_BITS - 1
def _pow(space, iv, iw, iz):
"""Helper for pow"""
if iz == 0:
return _pow_nomod(iv, iw)
else:
return _pow_mod(space, iv, iw, iz)
@jit.look_inside_iff(lambda iv, iw: jit.isconstant(iw))
@jit.look_inside_iff(lambda space, iv, iw, iz:
jit.isconstant(iw) and jit.isconstant(iz))
divmod_near = applevel('''
def divmod_near(a, b):
"""Return a pair (q, r) such that a = b * q + r, and abs(r)
<= abs(b)/2, with equality possible only if q is even. In
other words, q == a / b, rounded to the nearest integer using
round-half-to-even."""
q, r = divmod(a, b)
# round up if either r / b > 0.5, or r / b == 0.5 and q is
# odd. The expression r / b > 0.5 is equivalent to 2 * r > b
# if b is positive, 2 * r < b if b negative.
greater_than_half = 2*r > b if b > 0 else 2*r < b
exactly_half = 2*r == b
if greater_than_half or exactly_half and q % 2 == 1:
q += 1
r -= b
return q, r
''', filename=__file__).interphook('divmod_near')
def _recover_with_smalllong(space):
"""True if there is a chance that a SmallLong would fit when an Int
does not
"""
return (space.config.objspace.std.withsmalllong and
sys.maxint == 2147483647)
@enforceargs(None, None, W_AbstractIntObject, typecheck=False)
@enforceargs(None, W_AbstractIntObject, typecheck=False)
W_AbstractIntObject.typedef = TypeDef("int",
__doc__ = """int([x]) -> integer
int(x, base=10) -> integer
Convert a number or string to an integer, or return 0 if no arguments
are given. If x is a number, return x.__int__(). For floating point
numbers, this truncates towards zero.
If x is not a number or if base is given, then x must be a string,
bytes, or bytearray instance representing an integer literal in the
given base. The literal can be preceded by '+' or '-' and be surrounded
by whitespace. The base defaults to 10. Valid bases are 0 and 2-36.
Base 0 means to interpret the base from the string as an integer literal.
>>> int('0b100', base=0)
4""",
__new__ = interp2app(W_IntObject.descr_new),
numerator = typedef.GetSetProperty(
W_AbstractIntObject.descr_get_numerator,
doc="the numerator of a rational number in lowest terms"),
denominator = typedef.GetSetProperty(
W_AbstractIntObject.descr_get_denominator,
doc="the denominator of a rational number in lowest terms"),
real = typedef.GetSetProperty(
W_AbstractIntObject.descr_get_real,
doc="the real part of a complex number"),
imag = typedef.GetSetProperty(
W_AbstractIntObject.descr_get_imag,
doc="the imaginary part of a complex number"),
from_bytes = interp2app(W_AbstractIntObject.descr_from_bytes,
as_classmethod=True),
to_bytes = interpindirect2app(W_AbstractIntObject.descr_to_bytes),
__repr__ = interpindirect2app(W_AbstractIntObject.descr_repr),
__str__ = interpindirect2app(W_AbstractIntObject.descr_str),
conjugate = interpindirect2app(W_AbstractIntObject.descr_conjugate),
bit_length = interpindirect2app(W_AbstractIntObject.descr_bit_length),
__format__ = interpindirect2app(W_AbstractIntObject.descr_format),
__hash__ = interpindirect2app(W_AbstractIntObject.descr_hash),
__getnewargs__ = interpindirect2app(W_AbstractIntObject.descr_getnewargs),
__int__ = interpindirect2app(W_AbstractIntObject.int),
__index__ = interpindirect2app(W_AbstractIntObject.descr_index),
__trunc__ = interpindirect2app(W_AbstractIntObject.descr_trunc),
__float__ = interpindirect2app(W_AbstractIntObject.descr_float),
__round__ = interpindirect2app(W_AbstractIntObject.descr_round),
__pos__ = interpindirect2app(W_AbstractIntObject.descr_pos),
__neg__ = interpindirect2app(W_AbstractIntObject.descr_neg),
__abs__ = interpindirect2app(W_AbstractIntObject.descr_abs),
__bool__ = interpindirect2app(W_AbstractIntObject.descr_bool),
__invert__ = interpindirect2app(W_AbstractIntObject.descr_invert),
__floor__ = interpindirect2app(W_AbstractIntObject.descr_floor),
__ceil__ = interpindirect2app(W_AbstractIntObject.descr_ceil),
__lt__ = interpindirect2app(W_AbstractIntObject.descr_lt),
__le__ = interpindirect2app(W_AbstractIntObject.descr_le),
__eq__ = interpindirect2app(W_AbstractIntObject.descr_eq),
__ne__ = interpindirect2app(W_AbstractIntObject.descr_ne),
__gt__ = interpindirect2app(W_AbstractIntObject.descr_gt),
__ge__ = interpindirect2app(W_AbstractIntObject.descr_ge),
__add__ = interpindirect2app(W_AbstractIntObject.descr_add),
__radd__ = interpindirect2app(W_AbstractIntObject.descr_radd),
__sub__ = interpindirect2app(W_AbstractIntObject.descr_sub),
__rsub__ = interpindirect2app(W_AbstractIntObject.descr_rsub),
__mul__ = interpindirect2app(W_AbstractIntObject.descr_mul),
__rmul__ = interpindirect2app(W_AbstractIntObject.descr_rmul),
__and__ = interpindirect2app(W_AbstractIntObject.descr_and),
__rand__ = interpindirect2app(W_AbstractIntObject.descr_rand),
__or__ = interpindirect2app(W_AbstractIntObject.descr_or),
__ror__ = interpindirect2app(W_AbstractIntObject.descr_ror),
__xor__ = interpindirect2app(W_AbstractIntObject.descr_xor),
__rxor__ = interpindirect2app(W_AbstractIntObject.descr_rxor),
__lshift__ = interpindirect2app(W_AbstractIntObject.descr_lshift),
__rlshift__ = interpindirect2app(W_AbstractIntObject.descr_rlshift),
__rshift__ = interpindirect2app(W_AbstractIntObject.descr_rshift),
__rrshift__ = interpindirect2app(W_AbstractIntObject.descr_rrshift),
__floordiv__ = interpindirect2app(W_AbstractIntObject.descr_floordiv),
__rfloordiv__ = interpindirect2app(W_AbstractIntObject.descr_rfloordiv),
__truediv__ = interpindirect2app(W_AbstractIntObject.descr_truediv),
__rtruediv__ = interpindirect2app(W_AbstractIntObject.descr_rtruediv),
__mod__ = interpindirect2app(W_AbstractIntObject.descr_mod),
__rmod__ = interpindirect2app(W_AbstractIntObject.descr_rmod),
__divmod__ = interpindirect2app(W_AbstractIntObject.descr_divmod),
__rdivmod__ = interpindirect2app(W_AbstractIntObject.descr_rdivmod),
__pow__ = interpindirect2app(W_AbstractIntObject.descr_pow),
__rpow__ = interpindirect2app(W_AbstractIntObject.descr_rpow),
)
| [
37811,
464,
3170,
259,
493,
2099,
357,
54,
62,
23839,
5317,
8,
290,
262,
2779,
4114,
357,
54,
62,
5317,
10267,
8,
198,
3106,
319,
374,
29412,
493,
82,
13,
198,
198,
818,
1502,
284,
423,
262,
976,
4069,
2491,
319,
16932,
7535,
11,
... | 2.512584 | 3,139 |
#import libraries
import MySQLdb
import ConfigParser
from Tkinter import *
from tkMessageBox import *
emain = Tk()
emain.title('Input')
Label(emain, text="Enter table name to delete : ").grid(row=1,column=2,sticky=W,pady=4)
inp_tbl = Entry(emain)
inp_tbl.grid(row=1,column=4,padx=12)
inp_tbl.bind('<Return>', delete)
inp_tbl.focus()
emain.mainloop()
| [
2,
11748,
12782,
201,
198,
11748,
33476,
9945,
201,
198,
11748,
17056,
46677,
201,
198,
6738,
309,
74,
3849,
1330,
1635,
201,
198,
6738,
256,
74,
12837,
14253,
1330,
1635,
201,
198,
201,
198,
201,
198,
368,
391,
796,
309,
74,
3419,
... | 2.246988 | 166 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""SSH tunneling module.
Set up a forward tunnel across an SSH server, using paramiko. A local port
(given with -p) is forwarded across an SSH session to an address:port from
the SSH server. This is similar to the openssh -L option.
"""
try:
import eventlet
eventlet.monkey_patch()
from eventlet.green import threading
from eventlet.green import time
except ImportError:
import threading
import time
pass
import logging
import select
import socket
try:
import SocketServer
except ImportError:
import socketserver as SocketServer
import paramiko
LOG = logging.getLogger(__name__)
class TunnelServer(SocketServer.ThreadingTCPServer):
"""Serve on a local ephemeral port.
Clients will connect to that port/server.
"""
daemon_threads = True
allow_reuse_address = True
class TunnelHandler(SocketServer.BaseRequestHandler):
"""Handle forwarding of packets."""
def handle(self):
"""Do all the work required to service a request.
The request is available as self.request, the client address as
self.client_address, and the server instance as self.server, in
case it needs to access per-server information.
This implementation will forward packets.
"""
try:
chan = self.ssh_transport.open_channel('direct-tcpip',
self.target_address,
self.request.getpeername())
except Exception as exc:
LOG.error('Incoming request to %s:%s failed',
self.target_address[0],
self.target_address[1],
exc_info=exc)
return
if chan is None:
LOG.error('Incoming request to %s:%s was rejected '
'by the SSH server.',
self.target_address[0],
self.target_address[1])
return
while True:
r, w, x = select.select([self.request, chan], [], [])
if self.request in r:
data = self.request.recv(1024)
if len(data) == 0:
break
chan.send(data)
if chan in r:
data = chan.recv(1024)
if len(data) == 0:
break
self.request.send(data)
try:
peername = None
peername = str(self.request.getpeername())
except socket.error as exc:
LOG.warning("Couldn't fetch peername.", exc_info=exc)
chan.close()
self.request.close()
LOG.info("Tunnel closed from '%s'", peername or 'unnamed peer')
class Tunnel(object): # pylint: disable=R0902
"""Create a TCP server which will use TunnelHandler."""
def __init__(self, target_host, target_port,
sshclient, tunnel_host='localhost',
tunnel_port=0):
"""Constructor."""
if not isinstance(sshclient, paramiko.SSHClient):
raise TypeError("'sshclient' must be an instance of "
"paramiko.SSHClient.")
self.target_host = target_host
self.target_port = target_port
self.target_address = (target_host, target_port)
self.address = (tunnel_host, tunnel_port)
self._tunnel = None
self._tunnel_thread = None
self.sshclient = sshclient
self._ssh_transport = self.get_sshclient_transport(
self.sshclient)
TunnelHandler.target_address = self.target_address
TunnelHandler.ssh_transport = self._ssh_transport
self._tunnel = TunnelServer(self.address, TunnelHandler)
# reset attribute to the port it has actually been set to
self.address = self._tunnel.server_address
tunnel_host, self.tunnel_port = self.address
def get_sshclient_transport(self, sshclient):
"""Get the sshclient's transport.
Connect the sshclient, that has been passed in and return its
transport.
"""
sshclient.connect()
return sshclient.get_transport()
def serve_forever(self, async=True):
"""Serve the tunnel forever.
if async is True, this will be done in a background thread
"""
if not async:
self._tunnel.serve_forever()
else:
self._tunnel_thread = threading.Thread(
target=self._tunnel.serve_forever)
self._tunnel_thread.start()
# cooperative yield
time.sleep(0)
def shutdown(self):
"""Stop serving the tunnel.
Also close the socket.
"""
self._tunnel.shutdown()
self._tunnel.socket.close()
| [
2,
220,
220,
220,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
345,
743,
198,
2,
220,
220,
220,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
921,
743,
7330,
198,
2,
220,
220,
... | 2.310747 | 2,317 |
import subprocess
from pathlib import Path
from uuid import uuid4
from django.conf import settings
from django.http import FileResponse
from django.shortcuts import render
from ratelimit import UNSAFE as UNSAFE_METHODS
from ratelimit.decorators import ratelimit
from .forms import MetadataRemovalForm
@ratelimit(key="ip", rate="10/m", method=["GET", "HEAD", "OPTIONS", "CONNECT", "TRACE"])
@ratelimit(key="ip", rate="10/h", method=UNSAFE_METHODS, block=True)
| [
11748,
850,
14681,
198,
6738,
3108,
8019,
1330,
10644,
198,
6738,
334,
27112,
1330,
334,
27112,
19,
198,
198,
6738,
42625,
14208,
13,
10414,
1330,
6460,
198,
6738,
42625,
14208,
13,
4023,
1330,
9220,
31077,
198,
6738,
42625,
14208,
13,
... | 3.128378 | 148 |
from django.shortcuts import render, redirect
# Create your views here.
| [
6738,
42625,
14208,
13,
19509,
23779,
1330,
8543,
11,
18941,
198,
198,
2,
13610,
534,
5009,
994,
13,
198
] | 3.842105 | 19 |
"""Integration-ish tests for CLI endpoints.
We only mock subprocess calls since we cannot do multi-host testing.
Some of the test above don't verify much, but they at least ensure that all parts work well together.
"""
import os
import sys
from contextlib import contextmanager
from datetime import datetime
from unittest.mock import ANY, MagicMock, Mock, call, patch
import pytest
from click import BadParameter
from click.testing import CliRunner
from remote import entrypoints
from remote.configuration.classic import CONFIG_FILE_NAME, IGNORE_FILE_NAME, INDEX_FILE_NAME
from remote.configuration.toml import WORKSPACE_CONFIG
from remote.exceptions import RemoteExecutionError
TEST_HOST = "test-host1.example.com"
TEST_DIR = ".remotes/myproject"
TEST_CONFIG = f"{TEST_HOST}:{TEST_DIR}"
@contextmanager
@pytest.fixture
@pytest.mark.parametrize(
"connection, is_valid",
[
("host", True),
("host123", True),
("host.domain.com", True),
("ho-st.dom-ain.as1234", True),
("ho-st.dom-ain.as1234:/home/dir", True),
("ho-st.dom-ain.as1234:.home/dir.dir", True),
("ho-st.dom-ain.as1234:.home/dir.dir/123/", True),
("ho-st.dom-ain.as1234:.home/dir.dir/123/:something", False),
("ho-st.dom-ain.as1234::/home/dir", False),
],
)
@patch("remote.util.subprocess.run")
@patch(
"remote.configuration.toml.TomlConfigurationMedium.generate_remote_directory",
MagicMock(return_value=".remotes/myproject_foo"),
)
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch(
"remote.entrypoints.datetime",
MagicMock(now=MagicMock(return_value=datetime(year=2020, month=7, day=13, hour=10, minute=11, second=12))),
)
@patch("remote.util.subprocess.run")
@patch(
"remote.entrypoints.datetime",
MagicMock(now=MagicMock(return_value=datetime(year=2020, month=7, day=13, hour=10, minute=11, second=12))),
)
@patch("remote.util.subprocess.run")
@pytest.mark.parametrize("label, host", [("usual", "host1"), ("unusual", "host2"), ("2", "host2"), ("3", "host3")])
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
@pytest.mark.parametrize(
"port_value, expected_output, expected_exit_code",
[
("bar:foo", "Please pass valid integer value for ports", 1),
("bar:5000", "Please pass valid integer value for ports", 1),
("bar:foo:foo", "Please pass a valid value to enable local port forwarding", 1),
("2.4:2.4", "Please pass valid integer value for ports", 1),
],
)
@patch("remote.util.subprocess.run")
@pytest.mark.parametrize(
"port_value, expected_port_forwarding, expected_exit_code",
[("5000", "5000:localhost:5000", 0), ("5000:5005", "5005:localhost:5000", 0)],
)
@pytest.mark.parametrize(
"entrypoint", [entrypoints.remote, entrypoints.remote_quick], ids=["remote", "remote-quick"],
)
@patch("remote.util.subprocess.run")
@patch("remote.util.subprocess.run")
def test_stream_changes(mock_run, tmp_workspace):
"""Ensure the execution with stream changes runs successfully"""
mock_run.return_value = Mock(returncode=0)
runner = CliRunner()
with cwd(tmp_workspace):
result = runner.invoke(entrypoints.remote, ["--stream-changes", "echo test"])
assert result.exit_code == 0
@patch("remote.explain.subprocess.run")
@patch("remote.util.subprocess.run")
| [
37811,
34500,
1358,
12,
680,
5254,
329,
43749,
886,
13033,
13,
198,
1135,
691,
15290,
850,
14681,
3848,
1201,
356,
2314,
466,
5021,
12,
4774,
4856,
13,
198,
198,
4366,
286,
262,
1332,
2029,
836,
470,
11767,
881,
11,
475,
484,
379,
1... | 2.665122 | 1,511 |
from pathlib import Path
from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense
from keras.models import Sequential, model_from_json
from keras.datasets import mnist
from keras.utils import to_categorical
from keras.preprocessing import image
import matplotlib.pyplot as plt
import numpy as np
# %matplotlib inline
| [
6738,
3108,
8019,
1330,
10644,
198,
198,
6738,
41927,
292,
13,
75,
6962,
1330,
34872,
17,
35,
11,
5436,
27201,
278,
17,
35,
11,
1610,
41769,
11,
360,
1072,
198,
6738,
41927,
292,
13,
27530,
1330,
24604,
1843,
11,
2746,
62,
6738,
62,... | 3.114286 | 105 |
from __future__ import unicode_literals
from django.core.management import BaseCommand
from dbpedia_links_rating.rating.models import Link
| [
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
198,
6738,
42625,
14208,
13,
7295,
13,
27604,
1330,
7308,
21575,
198,
198,
6738,
20613,
50235,
62,
28751,
62,
8821,
13,
8821,
13,
27530,
1330,
7502,
198
] | 3.710526 | 38 |
import unittest
import pandas as pd
from utils import index as utils
from afo.driver import Driver
from test.test_fixtures_funcs import generate_list_str, get_test_file | [
11748,
555,
715,
395,
198,
198,
11748,
19798,
292,
355,
279,
67,
198,
198,
6738,
3384,
4487,
1330,
6376,
355,
3384,
4487,
198,
6738,
257,
6513,
13,
26230,
1330,
12434,
198,
6738,
1332,
13,
9288,
62,
69,
25506,
62,
12543,
6359,
1330,
... | 3.207547 | 53 |
# mask_decoder.py
#
# Sampson Mao, Anders Poirel
# 26-10-2019
#
# script for decoding the masks for each image in the training set from
# the .csv file, writing the output as a seperate .jpg file for each
# label-image pair
import pandas as pd
import numpy as np
from PIL import Image
def rle2mask(row, height = 1400, width = 2100):
"""
Parameters:
- row (Int): row being decoded in the csv file
- height (Int): height of image being decoded
- width (Int): width of image being decoded
Generates a mask from a row in the csv file for an image in the training
set, writing a .jpg file to the appropriate folder for the label
"""
if row['EncodedPixels'] == -1:
return
rows, cols = height, width
rle_string = row['EncodedPixels']
rle_numbers = [int(num_string) for num_string
in rle_string.split(' ')]
rle_pairs = np.array(rle_numbers).reshape(-1,2)
img = np.zeros(rows * cols, dtype = np.uint8)
for index, length in rle_pairs:
index -= 1
img[index : index + length] = 255
img = img.reshape(cols,rows)
img = img.T
pil_image = Image.fromarray(img)
label = row['Label']
mask_file = row['Image'] + '.jpg' + '_' + row['Label'] + '.jpg'
if label == 'Fish':
pil_image.save('../../data/processed/mask_fish/' + mask_file)
elif label == 'Flower':
pil_image.save('../../data/processed/mask_flower/' + mask_file)
elif label == 'Gravel':
pil_image.save('../../data/processed/mask_gravel/' + mask_file)
elif label == 'Sugar':
pil_image.save('../../data/processed/mask_sugar/' + mask_file)
if __name__ == 'main':
train = pd.read_csv('../../data/raw/train.csv')
train = train.fillna(-1)
train[['Image', 'Label']] = train['Image_Label'].str.split('_', expand=True)
train['Image'] = train['Image'].str.split('.').str[0]
train = train.drop('Image_Label', axis=1)
train.apply(rle2mask, axis = 1) | [
2,
9335,
62,
12501,
12342,
13,
9078,
198,
2,
220,
198,
2,
42168,
1559,
22828,
11,
25519,
7695,
557,
75,
198,
2,
2608,
12,
940,
12,
23344,
198,
2,
198,
2,
4226,
329,
39938,
262,
20680,
329,
1123,
2939,
287,
262,
3047,
900,
422,
1... | 2.432039 | 824 |
import os
import requests
import pandas as pd
from datetime import datetime
from easydict import EasyDict as edict
# ----------------------------------------------------------------------------------------------------------------------
# Global variables
# ----------------------------------------------------------------------------------------------------------------------
BASE_URL = "https://query1.finance.yahoo.com/v7/finance/download/{}?period1={}&period2={}&interval=1d&events=history"
# ----------------------------------------------------------------------------------------------------------------------
# Auxiliary functions
# ----------------------------------------------------------------------------------------------------------------------
def _make_request(url):
""" Make a request and return the table data as dataframe.
:param url: resource url
:return: pandas dataframe
"""
print("Pulling data from:\n{}".format(url))
req = requests.get(url)
url_content = req.content
# TODO: pass the content to a dataframe without saving .csv to disk
with open("tmp.csv", "wb") as fp:
fp.write(url_content)
df = pd.read_csv("tmp.csv")
os.remove("tmp.csv")
return df
def _download_index(index_name, start_date, end_date):
""" Pull date for a specific index.
:param index_name: Market index short name
:param start_date: string in YYYY-MM-DD format
:param end_date: string in YYYY-MM-DD format
:return: pandas dataframe
"""
start_timestamp = int(datetime.timestamp(datetime.strptime(start_date, "%Y-%m-%d")))
end_timstamp = int(datetime.timestamp(datetime.strptime(end_date, "%Y-%m-%d")))
url = BASE_URL.format(requests.utils.quote("^{}".format(index_name)), start_timestamp, end_timstamp)
df = _make_request(url)
if len(df) == 0:
# indices like "BTC-USD" and "DAX" don't need a prepended "^" in the request URL.
print("Failed to pull data data from:\n'{}'".format(url))
url = BASE_URL.format(index_name, start_timestamp, end_timstamp)
df = _make_request(url)
if len(df) == 0:
raise RuntimeError("Failed to pull date for index: ", index_name)
return df
# ----------------------------------------------------------------------------------------------------------------------
# Utility functions
# ----------------------------------------------------------------------------------------------------------------------
def download(start_date, end_date, indices=None):
""" Download all data for the predefined `indexes`
:param start_date: string in YYYY-MM-DD format
:param end_date: string in YYYY-MM-DD format
:param indices: list of market index names
:return: dictionary with dataframes
"""
if indices is None:
indices = ["AORD", "BTC-USD", "DAX", "DJI", "HSI", "N225", "NYA", "GSPC"]
if not isinstance(indices, list):
raise ValueError("Argument `indices` must be a list of strings.")
dfs = edict({})
for idx in indices:
idx_key = idx.lower().replace("-", "_")
idx_value = _download_index(idx, start_date, end_date)
dfs.update({idx_key:idx_value})
return dfs
| [
11748,
28686,
198,
11748,
7007,
198,
11748,
19798,
292,
355,
279,
67,
198,
6738,
4818,
8079,
1330,
4818,
8079,
198,
6738,
2562,
11600,
1330,
16789,
35,
713,
355,
1225,
713,
628,
198,
2,
16529,
3880,
19351,
438,
198,
2,
8060,
9633,
198... | 3.176355 | 1,015 |
#!/usr/bin/env python
from scapy.all import *
pkts = []
pkts += Ether(dst='ff:ff:ff:ff:ff:ff', src='00:01:02:03:04:05')/ \
Dot1Q(vlan=6)/ \
IP(dst='1.2.3.4', src='5.6.7.8')/UDP(dport=53)/DNS(id=1, rd=1, qd=DNSQR(qname='example.com'))
pkts += Ether(dst='ff:ff:ff:ff:ff:ff', src='00:01:02:03:04:05')/ \
Dot1Q(vlan=6)/ \
IP(dst='1.2.3.4', src='5.6.7.8')/TCP(sport=6666, dport=80, flags='P''A')/"GET / HTTP/1.1\r\nHost: example.com\r\n\r\n"
wrpcap('input.pcap', pkts)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
6738,
629,
12826,
13,
439,
1330,
1635,
198,
198,
79,
74,
912,
796,
17635,
198,
198,
79,
74,
912,
15853,
20017,
7,
67,
301,
11639,
487,
25,
487,
25,
487,
25,
487,
25,
487,
25,
487... | 1.718861 | 281 |
import os
import re
import glob
from pathlib import Path
from tqdm import tqdm
import supernotelib as sn
SUPERNOTE_PATH = '/run/user/1000/gvfs/mtp:host=rockchip_Supernote_A5_X_SN100B10004997/Supernote/Note'
OUTPUT_PATH = '../Notes_synced'
assert os.path.exists(SUPERNOTE_PATH)
assert os.path.exists(OUTPUT_PATH)
for p in tqdm(glob.glob(f'{SUPERNOTE_PATH}/**/*.note', recursive=True)):
out_path = re.sub(SUPERNOTE_PATH, OUTPUT_PATH, p)
out_path = re.sub(r'.note$', '.pdf', out_path)
# make dirs if needed
os.makedirs(Path(out_path).parent, exist_ok=True)
# convert to pdf
convert_to_pdf(
notebook_path=p,
output_path=out_path
)
| [
11748,
28686,
198,
11748,
302,
198,
11748,
15095,
198,
6738,
3108,
8019,
1330,
10644,
198,
6738,
256,
80,
36020,
1330,
256,
80,
36020,
198,
11748,
2208,
1662,
417,
571,
355,
3013,
198,
198,
40331,
1137,
16580,
62,
34219,
796,
31051,
514... | 2.298305 | 295 |
if __name__ == '__main__':
from setuptools import setup, Extension
_synctex_parser = Extension('pysynctex._synctex_parser',
sources=['wrapper/synctex_parser.i',
'wrapper/synctex_package/synctex_parser.c',
'wrapper/synctex_package/synctex_parser_utils.c'],
include_dirs=['wrapper/synctex_package'])
setup(name='PySyncTeX',
version='0.2.0',
author='Jan Kumor',
author_email='jan.kumor@gmail.com',
description='Python wrapper for SyncTeX parser C library.',
long_description=open('README.rst').read(),
url='https://github.com/elohhim/PySyncTeX',
license="MIT",
platforms='ANY',
packages=['pysynctex'],
ext_modules=[_synctex_parser],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Natural Language :: English',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: LaTeX',
]
)
| [
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
422,
900,
37623,
10141,
1330,
9058,
11,
27995,
198,
220,
220,
220,
220,
198,
220,
220,
220,
4808,
28869,
310,
1069,
62,
48610,
796,
27995,
10786,
79,
893,
... | 2.022355 | 671 |
import os
import time
import errno
import socket
from nrepl._compat import urlparse
from nrepl.clj import get_ns
from nrepl.utils import console, get_ts, traverse_dir
from nrepl.client import Client
COLOR_GRAY = '2'
COLOR_RED = '0;31'
| [
11748,
28686,
198,
11748,
640,
198,
11748,
11454,
3919,
198,
11748,
17802,
198,
198,
6738,
299,
35666,
13557,
5589,
265,
1330,
19016,
29572,
198,
6738,
299,
35666,
13,
565,
73,
1330,
651,
62,
5907,
198,
6738,
299,
35666,
13,
26791,
1330... | 3.0125 | 80 |
import numpy as np
import os
import json
import tensorflow as tf
from tensorflow.keras.models import model_from_json
from tensorflow.keras.preprocessing import image
from tensorflow.compat.v1 import ConfigProto
from tqdm import tqdm
import shutil
from animal_classification_report_generator import Animal_Classification_Report
# root_dir = os.getcwd()
tf.keras.backend.clear_session()
config = tf.compat.v1.ConfigProto(gpu_options =
tf.compat.v1.GPUOptions(per_process_gpu_memory_fraction=0.9)
)
config.gpu_options.allow_growth = True
session = tf.compat.v1.Session(config=config)
tf.compat.v1.keras.backend.set_session(session)
# load the model from json file
json_file = open('animal_classifier_0.4dropoutxceptionnet.json','r')
loaded_model_json = json_file.read()
json_file.close()
model = model_from_json(loaded_model_json)
# Load the weights into a model
model.load_weights('animal_classifier_xceptionnet_05-0.925932.h5')
print("Model Loaded Successfully")
folder_path = input("Please provide the path where your folder containing images are present")
folder_name = folder_path.split('/')[-1]
if not os.path.exists(os.path.join(folder_path,folder_name + '_' + 'Animals')):
os.mkdir(os.path.join(folder_path,folder_name+'_'+'Animals'))
else:
pass
if not os.path.exists(os.path.join(folder_path, folder_name + '_' + 'No_Animals')):
os.mkdir(os.path.join(folder_path,folder_name+'_'+'No_Animals'))
else:
pass
animal_folder_path = os.path.join(folder_path,folder_name+'_'+'Animals')
no_animal_folder_path = os.path.join(folder_path,folder_name+'_'+'No_Animals')
csv_name = os.path.join(folder_path,folder_name + '_' + 'report' + '.csv')
animal_classification_report_obj = Animal_Classification_Report(csv_filename=csv_name)
animal_classification_report_obj.write_csv_header(image_name="Image_Name",original_image_path="Original_Image_Path",generated_image_path="Generated_Image_Path",prediction="Prediction",confidence="Confidience")
for img in tqdm(os.listdir(folder_path)):
if img.endswith('.jpg') or img.endswith('.png') or img.endswith('.JPG'):
img_path = os.path.join(folder_path,img)
try:
preds = model_predict(img_path,model)
if preds > 0.5:
pos_conf = preds * 100
neg_conf = 100 - pos_conf
pred_class = "No_Animal"
non_pred_class = "Animal"
shutil.copy(img_path,os.path.join(no_animal_folder_path,img))
animal_classification_record = [img,img_path,os.path.join(no_animal_folder_path,img),pred_class,pos_conf[0][0]]
animal_classification_report_obj.append_csv_rows(records=animal_classification_record)
else:
pos_conf = 50 + (0.5-preds) * 100
neg_conf = 100 - pos_conf
pred_class = "Animal"
non_pred_class = "No_Animal"
shutil.copy(img_path,os.path.join(animal_folder_path,img))
animal_classification_record = [img,img_path,os.path.join(animal_folder_path,img),pred_class,pos_conf[0][0]]
animal_classification_report_obj.append_csv_rows(records=animal_classification_record)
except Exception as e:
pass
| [
11748,
299,
32152,
355,
45941,
201,
198,
11748,
28686,
201,
198,
11748,
33918,
201,
198,
11748,
11192,
273,
11125,
355,
48700,
201,
198,
6738,
11192,
273,
11125,
13,
6122,
292,
13,
27530,
1330,
2746,
62,
6738,
62,
17752,
201,
198,
6738,... | 2.275441 | 1,474 |
#!/usr/bin/env python
# pylint: disable=C0111, C0325
import logging
import sys
import tempfile
from pprint import pprint
from cutlass import HostVariantCall
from cutlass import iHMPSession
username = "test"
password = "test"
def set_logging():
""" Setup logging. """
root = logging.getLogger()
root.setLevel(logging.DEBUG)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
root.addHandler(ch)
set_logging()
session = iHMPSession(username, password)
print("Required fields: ")
print(HostVariantCall.required_fields())
call = HostVariantCall()
print("Creating a temp file for example/testing purposes.")
temp_file = tempfile.NamedTemporaryFile(delete=False).name
print("Local file: %s" % temp_file)
call.comment = "test variant call comment"
call.checksums = {"md5": "72bdc024d83226ccc90fbd2177e78d56"}
call.format = "txt"
call.local_file = temp_file
call.reference = "test reference"
call.size = 131313
call.study = "prediabetes"
call.variant_calling_process = "test variant caller 1.0"
# Optional properties
call.format_doc = "the format url"
call.private_files = False
call.sop = "http://google.com"
# Host variant calls are 'computed_from' HostWgsRawSeqSets
call.links = {"computed_from": ["1aa48c782c9b176e9ab7b265a401431a"]}
call.tags = ["host_variant_call", "ihmp"]
call.add_tag("another")
call.add_tag("and_another")
print(call.to_json(indent=2))
if call.is_valid():
print("Valid!")
success = call.save()
if success:
call_id = call.id
print("Succesfully saved host variant call ID: %s" % call_id)
call2 = HostVariantCall.load(call_id)
print(call2.to_json(indent=2))
deletion_success = call.delete()
if deletion_success:
print("Deleted host_variant_call with ID %s" % call_id)
else:
print("Deletion of host_variant_call %s failed." % call_id)
else:
print("Save failed")
else:
print("Invalid...")
validation_errors = call.validate()
pprint(validation_errors)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
2,
279,
2645,
600,
25,
15560,
28,
34,
486,
1157,
11,
327,
3070,
1495,
198,
198,
11748,
18931,
198,
11748,
25064,
198,
11748,
20218,
7753,
198,
6738,
279,
4798,
1330,
279,
4798,
... | 2.575685 | 839 |
import argparse
# import sys
import os
from patient import Patient
from vitals import VitalSigns
from lab import Lab
from socialhistory import SocialHistory
from familyhistory import FamilyHistory
from immunization import Immunization
from procedure import Procedure
from condition import Condition
from med import Med
from refill import Refill
from document import Document
from allergy import Allergy
from clinicalnote import ClinicalNote
from practitioner import Practitioner
def initData():
"""Load data and mappings from Raw data files and mapping files"""
Patient.generate()
Patient.load()
VitalSigns.load()
Lab.load()
Procedure.load()
Immunization.load()
FamilyHistory.load()
SocialHistory.load()
Condition.load()
Med.load()
Refill.load()
Document.load()
Allergy.load()
ClinicalNote.load()
Practitioner.load()
def displayPatientSummary(pid):
"""writes a patient summary to stdout"""
if not pid in Patient.mpi: return
print (Patient.mpi[pid].asTabString())
print ("PROBLEMS: ")
# if not pid in Problem.problems: print "None",
# else:
# for prob in Problem.problems[pid]: print prob.name+"; ",
# print "\nMEDICATIONS: ",
# if not pid in Med.meds: print "None",
# else:
# for med in Med.meds[pid]:
# print med.name+"{%d}; "%len(Refill.refill_list(pid,med.rxn)),
# print "\nLABS: ",
# if not pid in Lab.results: print "None",
# else:
# print "%d results"%len(Lab.results[pid])
print (",\n")
if __name__ == '__main__':
# Create the parser
PARSER = argparse.ArgumentParser(description='SMART on FHIR Test Data Generator')
GROUP = PARSER.add_mutually_exclusive_group()
GROUP.add_argument(
'--summary',
metavar='pid',
nargs='?',
const="all",
help="displays patient summary (default is 'all')"
)
GROUP.add_argument(
'--write-fhir',
dest='writeFHIR',
metavar='dir',
nargs='?',
const='.',
help="writes patient XML files to an FHIR sample data directory dir (default='.')"
)
PARSER.add_argument(
'--id-prefix',
dest='prefix',
metavar='id_prefix',
nargs='?',
const='',
help="adds the given prefix to the FHIR resource IDs (default=none)"
)
PARSER.add_argument(
'--base-url',
dest='baseURL',
metavar='base_url',
nargs='?',
const='',
help="uses the supplied URL base to generate absolute resource references (default='')"
)
ARGS = PARSER.parse_args()
# print summary ------------------------------------------------------------
if ARGS.summary:
initData()
if ARGS.summary == "all": # Print a summary of all patients
for pid in Patient.mpi:
displayPatientSummary(pid)
else: # Just print a single patient's summary
displayPatientSummary(ARGS.summary)
PARSER.exit()
if ARGS.writeFHIR:
import fhir
print ("Writing files to %s:"%ARGS.writeFHIR)
initData()
path = ARGS.writeFHIR
baseURL = ARGS.baseURL or ""
if not os.path.exists(path):
PARSER.error("Invalid path: '%s'.Path must already exist."%path)
if ARGS.prefix:
prefix = ARGS.prefix
else:
prefix = None
for pid in Patient.mpi:
fhir.FHIRSamplePatient(pid, path, baseURL).writePatientData(prefix)
# Show progress with '.' characters
# print "%s %s - %s" % (baseURL, prefix, pid)
# sys.stdout.flush()
# PARSER.exit(0, "\nDone writing %d patient FHIR files!\n"%len(Patient.mpi))
PARSER.exit()
PARSER.error("No arguments given")
| [
11748,
1822,
29572,
198,
2,
1330,
25064,
198,
11748,
28686,
198,
6738,
5827,
220,
220,
220,
220,
220,
220,
1330,
35550,
198,
6738,
410,
8321,
1330,
28476,
11712,
82,
198,
6738,
2248,
1330,
3498,
198,
6738,
1919,
23569,
1330,
5483,
18122... | 2.37362 | 1,630 |
import unittest
import numpy as np
import sklearn
import sklearn.datasets
from incremental_trees.models.classification.streaming_extc import StreamingEXTC
from incremental_trees.models.regression.streaming_extr import StreamingEXTR
from incremental_trees.models.regression.streaming_rfr import StreamingRFR
from incremental_trees.trees import StreamingRFC
from tests.integration.base import PartialFitTests, FitTests
class TestStreamingRFC1(PartialFitTests, unittest.TestCase):
"""
Test SRFC with single estimator per chunk with "random forest style" max features. ie, subset.
No limit on the total number of trees.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_blobs(n_samples=int(2e4),
random_state=0,
n_features=40,
centers=2,
cluster_std=100)
cls.mod = StreamingRFC(verbose=1,
n_estimators_per_chunk=1,
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 100
# Set helper values
super().setUpClass()
class TestStreamingRFC2(PartialFitTests, unittest.TestCase):
"""
Test SRFC with single estimator per chunk with "random forest style" max features. ie, subset.
Total models limited to 39.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_blobs(n_samples=int(2e4),
random_state=0,
n_features=40,
centers=2,
cluster_std=100)
cls.mod = StreamingRFC(n_estimators_per_chunk=1,
max_n_estimators=39)
# Set expected number of estimators
cls.expected_n_estimators = 39
# Set helper values
super().setUpClass()
class TestStreamingRFC3(PartialFitTests, unittest.TestCase):
"""
Test SRFC with multiple estimators per chunk with "random forest style" max features. ie, subset.
No limit on total models, 3 estimators per row subset (each with different feature subset)
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_blobs(n_samples=int(2e4),
random_state=0,
n_features=40,
centers=2,
cluster_std=100)
cls.mod = StreamingRFC(n_estimators_per_chunk=3,
n_jobs=-1,
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 300
# Set helper values
super().setUpClass()
class TestStreamingRFC4(PartialFitTests, unittest.TestCase):
"""
Test SRFC with single estimator per chunk with "decision tree style" max features. ie, all available to each tree.
No limit on total models, 1 estimators per row subset.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_blobs(n_samples=int(2e4),
random_state=0,
n_features=40,
centers=2,
cluster_std=100)
cls.mod = StreamingRFC(n_estimators_per_chunk=1,
max_features=cls.x.shape[1],
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 100
# Set helper values
super().setUpClass()
class TestStreamingRFC5(PartialFitTests, unittest.TestCase):
"""
Test SRFC with single estimator per chunk with "decision tree style" max features. ie, all available to each tree.
No limit on total models, 3 estimators per row subset.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_blobs(n_samples=int(2e4),
random_state=0,
n_features=40,
centers=2,
cluster_std=100)
cls.mod = StreamingRFC(n_estimators_per_chunk=3,
n_jobs=-1,
max_features=cls.x.shape[1],
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 300
# Set helper values
super().setUpClass()
class TestStreamingRFC6(PartialFitTests, unittest.TestCase):
"""
Test SRFC with single estimator per chunk with "decision tree style" max features. ie, all available to each tree.
No limit on total models, 3 estimators per row subset.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_blobs(n_samples=int(2e4),
random_state=0,
n_features=40,
centers=2,
cluster_std=100)
cls.mod = StreamingRFC(n_estimators_per_chunk=3,
n_jobs=-1,
max_features=cls.x.shape[1],
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 300
# Set helper values
super().setUpClass()
class TestStreamingRFR1(PartialFitTests, unittest.TestCase):
"""
Test SRFC with single estimator per chunk with "random forest style" max features. ie, subset.
No limit on the total number of trees.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_regression(n_samples=int(2e4),
random_state=0,
n_features=40)
cls.mod = StreamingRFR(verbose=1,
n_estimators_per_chunk=1,
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 100
# Set helper values
super().setUpClass()
class TestStreamingRFR2(PartialFitTests, unittest.TestCase):
"""
Test SRFC with single estimator per chunk with "random forest style" max features. ie, subset.
Total models limited to 39.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_regression(n_samples=int(2e4),
random_state=0,
n_features=40)
cls.mod = StreamingRFR(n_estimators_per_chunk=1,
max_n_estimators=39)
# Set expected number of estimators
cls.expected_n_estimators = 39
# Set helper values
super().setUpClass()
class TestStreamingRFR3(PartialFitTests, unittest.TestCase):
"""
Test SRFC with multiple estimators per chunk with "random forest style" max features. ie, subset.
No limit on total models, 3 estimators per row subset (each with different feature subset)
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_regression(n_samples=int(2e4),
random_state=0,
n_features=40)
cls.mod = StreamingRFR(n_estimators_per_chunk=3,
n_jobs=-1,
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 300
# Set helper values
super().setUpClass()
class TestStreamingRFR4(PartialFitTests, unittest.TestCase):
"""
Test SRFC with single estimator per chunk with "decision tree style" max features. ie, all available to each tree.
No limit on total models, 1 estimators per row subset.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_regression(n_samples=int(2e4),
random_state=0,
n_features=40)
cls.mod = StreamingRFR(n_estimators_per_chunk=1,
max_features=cls.x.shape[1],
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 100
# Set helper values
super().setUpClass()
class TestStreamingRFR5(PartialFitTests, unittest.TestCase):
"""
Test SRFC with single estimator per chunk with "decision tree style" max features. ie, all available to each tree.
No limit on total models, 3 estimators per row subset.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_regression(n_samples=int(2e4),
random_state=0,
n_features=40)
cls.mod = StreamingRFR(n_estimators_per_chunk=3,
n_jobs=-1,
max_features=cls.x.shape[1],
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 300
# Set helper values
super().setUpClass()
class TestStreamingRFR6(PartialFitTests, unittest.TestCase):
"""
Test SRFC with single estimator per chunk with "decision tree style" max features. ie, all available to each tree.
No limit on total models, 3 estimators per row subset.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_regression(n_samples=int(2e4),
random_state=0,
n_features=40)
cls.mod = StreamingRFR(n_estimators_per_chunk=3,
n_jobs=-1,
max_features=cls.x.shape[1],
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 300
# Set helper values
super().setUpClass()
class TestStreamingEXTC1(PartialFitTests, unittest.TestCase):
"""
Test SEXT with single estimator per chunk with "random forest style" max features. ie, subset.
No limit on the total number of trees.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_blobs(n_samples=int(2e4),
random_state=0,
n_features=40,
centers=2,
cluster_std=100)
cls.mod = StreamingEXTC(n_estimators_per_chunk=1,
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 100
# Set helper values
super().setUpClass()
class TestStreamingEXTC2(PartialFitTests, unittest.TestCase):
"""
Test SEXT with single estimator per chunk with "random forest style" max features. ie, subset.
Total models limited to 39.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_blobs(n_samples=int(2e4),
random_state=0,
n_features=40,
centers=2,
cluster_std=100)
cls.mod = StreamingEXTC(n_estimators_per_chunk=1,
max_n_estimators=39)
# Set expected number of estimators
cls.expected_n_estimators = 39
# Set helper values
super().setUpClass()
class TestStreamingEXTC3(PartialFitTests, unittest.TestCase):
"""
Test SEXT with multiple estimators per chunk with "random forest style" max features. ie, subset.
No limit on total models, 3 estimators per row subset (each with different feature subset)
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_blobs(n_samples=int(2e4),
random_state=0,
n_features=40,
centers=2,
cluster_std=100)
cls.mod = StreamingEXTC(n_estimators_per_chunk=3,
n_jobs=-1,
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 300
# Set helper values
super().setUpClass()
class TestStreamingEXTC4(PartialFitTests, unittest.TestCase):
"""
Test SEXT with single estimator per chunk with "decision tree style" max features. ie, all available to each tree.
No limit on total models, 1 estimators per row subset.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_blobs(n_samples=int(2e4),
random_state=0,
n_features=40,
centers=2,
cluster_std=100)
cls.mod = StreamingEXTC(n_estimators_per_chunk=1,
max_features=cls.x.shape[1],
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 100
# Set helper values
super().setUpClass()
class TestStreamingEXTC5(PartialFitTests, unittest.TestCase):
"""
Test SEXT with single estimator per chunk with "decision tree style" max features. ie, all available to each tree.
No limit on total models, 3 estimators per row subset.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_blobs(n_samples=int(2e4),
random_state=0,
n_features=40,
centers=2,
cluster_std=100)
cls.mod = StreamingEXTC(n_estimators_per_chunk=3,
n_jobs=-1,
max_features=cls.x.shape[1],
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 300
# Set helper values
super().setUpClass()
class TestStreamingEXTC6(PartialFitTests, unittest.TestCase):
"""
Test SEXT with single estimator per chunk with "decision tree style" max features. ie, all available to each tree.
No limit on total models, 3 estimators per row subset.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_blobs(n_samples=int(2e4),
random_state=0,
n_features=40,
centers=2,
cluster_std=100)
cls.mod = StreamingEXTC(n_estimators_per_chunk=3,
n_jobs=-1,
max_features=cls.x.shape[1],
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 300
# Set helper values
super().setUpClass()
class TestStreamingEXTR1(PartialFitTests, unittest.TestCase):
"""
Test SEXT with single estimator per chunk with "random forest style" max features. ie, subset.
No limit on the total number of trees.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_regression(n_samples=int(2e4),
random_state=0,
n_features=40)
cls.mod = StreamingEXTR(n_estimators_per_chunk=1,
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 100
# Set helper values
super().setUpClass()
class TestStreamingEXTR2(PartialFitTests, unittest.TestCase):
"""
Test SEXT with single estimator per chunk with "random forest style" max features. ie, subset.
Total models limited to 39.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_regression(n_samples=int(2e4),
random_state=0,
n_features=400)
cls.mod = StreamingEXTR(n_estimators_per_chunk=1,
max_n_estimators=39)
# Set expected number of estimators
cls.expected_n_estimators = 39
# Set helper values
super().setUpClass()
class TestStreamingEXTR3(PartialFitTests, unittest.TestCase):
"""
Test SEXT with multiple estimators per chunk with "random forest style" max features. ie, subset.
No limit on total models, 3 estimators per row subset (each with different feature subset)
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_regression(n_samples=int(2e4),
random_state=0,
n_features=40)
cls.mod = StreamingEXTR(n_estimators_per_chunk=3,
n_jobs=-1,
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 300
# Set helper values
super().setUpClass()
class TestStreamingEXTR4(PartialFitTests, unittest.TestCase):
"""
Test SEXT with single estimator per chunk with "decision tree style" max features. ie, all available to each tree.
No limit on total models, 1 estimators per row subset.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_regression(n_samples=int(2e4),
random_state=0,
n_features=4)
cls.mod = StreamingEXTR(n_estimators_per_chunk=1,
max_features=cls.x.shape[1],
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 100
# Set helper values
super().setUpClass()
class TestStreamingEXTR5(PartialFitTests, unittest.TestCase):
"""
Test SEXT with single estimator per chunk with "decision tree style" max features. ie, all available to each tree.
No limit on total models, 3 estimators per row subset.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_regression(n_samples=int(2e4),
random_state=0,
n_features=40)
cls.mod = StreamingEXTR(n_estimators_per_chunk=3,
n_jobs=-1,
max_features=cls.x.shape[1],
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 300
# Set helper values
super().setUpClass()
class TestStreamingEXTR6(PartialFitTests, unittest.TestCase):
"""
Test SEXT with single estimator per chunk with "decision tree style" max features. ie, all available to each tree.
No limit on total models, 3 estimators per row subset.
"""
@classmethod
def setUpClass(cls):
"""Set up model to test."""
cls.n_samples = 1000
cls.x, cls.y = sklearn.datasets.make_regression(n_samples=int(2e4),
random_state=0,
n_features=40)
cls.mod = StreamingEXTR(n_estimators_per_chunk=3,
n_jobs=-1,
max_features=cls.x.shape[1],
max_n_estimators=np.inf)
# Set expected number of estimators
cls.expected_n_estimators = 300
# Set helper values
super().setUpClass()
| [
11748,
555,
715,
395,
198,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
1341,
35720,
198,
11748,
1341,
35720,
13,
19608,
292,
1039,
198,
198,
6738,
29497,
62,
83,
6037,
13,
27530,
13,
4871,
2649,
13,
5532,
278,
62,
2302,
66,
1330,
... | 1.821113 | 12,684 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import socket
import os
import socks
import json
from core.targets import target_type
from core.alert import info
from core.alert import messages
from core.alert import warn
from lib.icmp.engine import send_one_ping, receive_one_ping
from lib.socks_resolver.engine import getaddrinfo
from core._time import now
from core.log import __log_into_file
def do_one_ping(dest_addr, timeout, psize):
"""
Returns either the delay (in seconds) or none on timeout.
"""
icmp = socket.getprotobyname("icmp")
try:
my_socket = socket.socket(socket.AF_INET, socket.SOCK_RAW, icmp)
except socket.error:
return None
my_id = os.getpid() & 0xFFFF
send_one_ping(my_socket, dest_addr, my_id, psize)
delay = receive_one_ping(my_socket, my_id, timeout)
my_socket.close()
return delay
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
11748,
17802,
198,
11748,
28686,
198,
11748,
24359,
198,
11748,
33918,
198,
6738,
4755,
13,
83,
853,
1039,
1330,
2... | 2.697531 | 324 |
""" CryptoPy - a pure python cryptographic libraries
cryptopy.passwords package
Copyright (c) 2002 by Paul A. Lambert
Read LICENSE.txt for license information.
"""
| [
37811,
36579,
20519,
532,
257,
5899,
21015,
40705,
12782,
628,
220,
220,
220,
8194,
11081,
13,
6603,
10879,
5301,
628,
220,
220,
220,
15069,
357,
66,
8,
6244,
416,
3362,
317,
13,
36978,
198,
220,
220,
220,
4149,
38559,
24290,
13,
1411... | 3.480769 | 52 |
from django.contrib import admin
from django.http import QueryDict
from django.urls import reverse
from django.utils.html import format_html
from .forms import (
AgencyForm,
CorrespondenceForm,
RecordRequestFileForm,
RecordRequestForm,
)
from .models import Agency, Correspondence, RecordRequest, RecordRequestFile
@admin.register(Agency)
@admin.register(RecordRequest)
@admin.register(Correspondence)
@admin.register(RecordRequestFile)
| [
6738,
42625,
14208,
13,
3642,
822,
1330,
13169,
198,
6738,
42625,
14208,
13,
4023,
1330,
43301,
35,
713,
198,
6738,
42625,
14208,
13,
6371,
82,
1330,
9575,
198,
6738,
42625,
14208,
13,
26791,
13,
6494,
1330,
5794,
62,
6494,
198,
198,
... | 3.323741 | 139 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .entity_health_state_chunk import EntityHealthStateChunk
class DeployedServicePackageHealthStateChunk(EntityHealthStateChunk):
"""Represents the health state chunk of a deployed service package, which
contains the service manifest name and the service package aggregated
health state.
.
:param health_state: Possible values include: 'Invalid', 'Ok', 'Warning',
'Error', 'Unknown'
:type health_state: str
:param service_manifest_name: The name of the service manifest.
:type service_manifest_name: str
:param service_package_activation_id:
:type service_package_activation_id: str
"""
_attribute_map = {
'health_state': {'key': 'HealthState', 'type': 'str'},
'service_manifest_name': {'key': 'ServiceManifestName', 'type': 'str'},
'service_package_activation_id': {'key': 'ServicePackageActivationId', 'type': 'str'},
}
| [
2,
19617,
28,
40477,
12,
23,
198,
2,
16529,
35937,
198,
2,
15069,
357,
66,
8,
5413,
10501,
13,
1439,
2489,
10395,
13,
198,
2,
49962,
739,
262,
17168,
13789,
13,
4091,
13789,
13,
14116,
287,
262,
1628,
6808,
329,
198,
2,
5964,
1321... | 3.530612 | 392 |
import numpy as np
import getopt
import vars.utils as utils
import glob
import sys
import pdb
def parse_logs(files):
"""
parses through log files to extract marginal
likelihood estimates from executing the
variational inference algorithm on a dataset.
Arguments:
files : list
list of .log file names
"""
marginal_likelihood = []
for file in files:
handle = open(file,'r')
for line in handle:
if 'Marginal Likelihood' in line:
m = float(line.strip().split('=')[1])
marginal_likelihood.append(m)
break
handle.close()
return marginal_likelihood
def parse_varQs(files):
"""
parses through multiple .meanQ files to extract the mean
admixture proportions estimated by executing the
variational inference algorithm on a dataset. This is then used
to identify the number of model components used to explain
structure in the data, for each .meanQ file.
Arguments:
files : list
list of .meanQ file names
"""
bestKs = []
for file in files:
handle = open(file,'r')
Q = np.array([map(float,line.strip().split()) for line in handle])
Q = Q/utils.insum(Q,[1])
handle.close()
N = Q.shape[0]
C = np.cumsum(np.sort(Q.sum(0))[::-1])
bestKs.append(np.sum(C<N-1)+1)
return bestKs
def parseopts(opts):
"""
parses the command-line flags and options passed to the script
"""
for opt, arg in opts:
if opt in ["--input"]:
filetag = arg
return filetag
def usage():
"""
brief description of various flags and options for this script
"""
print "\nHere is how you can use this script\n"
print "Usage: python %s"%sys.argv[0]
print "\t --input=<file>"
if __name__=="__main__":
# parse command-line options
argv = sys.argv[1:]
smallflags = ""
bigflags = ["input="]
try:
opts, args = getopt.getopt(argv, smallflags, bigflags)
if not opts:
usage()
sys.exit(2)
except getopt.GetoptError:
print "Incorrect options passed"
usage()
sys.exit(2)
filetag = parseopts(opts)
files = glob.glob('%s.*.log'%filetag)
Ks = np.array([int(file.split('.')[-2]) for file in files])
marginal_likelihoods = parse_logs(files)
files = glob.glob('%s.*.meanQ'%filetag)
bestKs = parse_varQs(files)
print "Model complexity that maximizes marginal likelihood = %d"%Ks[np.argmax(marginal_likelihoods)]
print "Model components used to explain structure in data = %d"%np.argmax(np.bincount(bestKs))
| [
11748,
299,
32152,
355,
45941,
198,
11748,
651,
8738,
198,
11748,
410,
945,
13,
26791,
355,
3384,
4487,
198,
11748,
15095,
198,
11748,
25064,
198,
11748,
279,
9945,
198,
198,
4299,
21136,
62,
6404,
82,
7,
16624,
2599,
628,
220,
220,
2... | 2.402839 | 1,127 |
import sys
import ast
from random import randint
from questions_backend import QuestionGeneration
import requests
from watson_developer_cloud import NaturalLanguageUnderstandingV1
from watson_developer_cloud.natural_language_understanding_v1 import Features, KeywordsOptions
import json
"""
argv[1] : keyword dictionary
argv[2] : corpus
argv[3] : user text
"""
def azure_extract_keywords(new_text):
""" takes new user text and returns a list of keywords using Microsoft's Azure Cognitive Services
"""
headers = {'Ocp-Apim-Subscription-Key': '59bf3be460ad434585a4b4143c470a92'}
document = {'documents': [{'id': '1', 'text': new_text}]}
azure_url = "https://eastus.api.cognitive.microsoft.com/text/analytics/v2.0/KeyPhrases"
response = requests.post(azure_url, headers=headers, json=document)
response = response.json()
return [keyword.encode('utf-8') for keyword in response['documents'][0]['keyPhrases']]
def watson_extract_keywords(new_text):
""" takes new user text and returns a list of keywords using IBM's Watson Natural Language Understanding
"""
nlp = NaturalLanguageUnderstandingV1(
version='2018-03-16',
username='be4694ef-b0b4-44cf-a2bf-de9e669525ef',
password='XB1M0xRrLFQT'
)
response = nlp.analyze(
text = new_text,
features = Features(
keywords=KeywordsOptions()
),
)
return [keyword['text'].encode('utf-8') for keyword in response.get_result()['keywords']]
def extract_date_location(keyword_entry, date_location):
""" Given a keywords entry in the keyword_dict, return either the most recent date or location
"""
data_len = len(keyword_entry[date_location])
# if only one date, return that date
if data_len == 1:
return keyword_entry[date_location][0]
else:
# 70% of the time, grab the most recent date
if randint(0,9) < 7:
return keyword_entry[date_location][-1]
else: # grab a random date in the list
return keyword_entry[date_location][randint(0,data_len-1)]
def extract_comp(keyword_entry):
""" Given a keywords entry in the keyword_dict, return either the most recent date/location pair or a random pair
"""
data_len = len(keyword_entry['comp'])
# if only one date, return that date
if data_len == 1:
return keyword_entry['comp'][0]
else:
# 70% of the time, grab the most recent date
if randint(0,9) < 7:
return keyword_entry['comp'][-1]
else: # grab a random date in the list
return keyword_entry['comp'][randint(0,data_len-1)]
if __name__ == '__main__':
main()
| [
11748,
25064,
201,
198,
11748,
6468,
201,
198,
6738,
4738,
1330,
43720,
600,
201,
198,
6738,
2683,
62,
1891,
437,
1330,
18233,
8645,
341,
201,
198,
11748,
7007,
201,
198,
201,
198,
6738,
266,
13506,
62,
16244,
263,
62,
17721,
1330,
12... | 2.499548 | 1,105 |
from .__init__helper import *
#clear warning from pylance
from .Exit import *
from .Trap import *
from .Chair import *
from .Monster import * | [
6738,
764,
834,
15003,
834,
2978,
525,
1330,
1635,
198,
2,
20063,
6509,
422,
279,
2645,
590,
198,
6738,
764,
30337,
1330,
1635,
198,
6738,
764,
51,
2416,
1330,
1635,
198,
6738,
764,
43189,
1330,
1635,
198,
6738,
764,
40872,
1330,
1635... | 3.357143 | 42 |
#
# @lc app=leetcode id=301 lang=python3
#
# [301] Remove Invalid Parentheses
#
# https://leetcode.com/problems/remove-invalid-parentheses/description/
#
# algorithms
# Hard (44.46%)
# Likes: 3255
# Dislikes: 151
# Total Accepted: 258.9K
# Total Submissions: 577.9K
# Testcase Example: '"()())()"'
#
# Given a string s that contains parentheses and letters, remove the minimum
# number of invalid parentheses to make the input string valid.
#
# Return all the possible results. You may return the answer in any order.
#
#
# Example 1:
#
#
# Input: s = "()())()"
# Output: ["(())()","()()()"]
#
#
# Example 2:
#
#
# Input: s = "(a)())()"
# Output: ["(a())()","(a)()()"]
#
#
# Example 3:
#
#
# Input: s = ")("
# Output: [""]
#
#
#
# Constraints:
#
#
# 1 <= s.length <= 25
# s consists of lowercase English letters and parentheses '(' and ')'.
# There will be at most 20 parentheses in s.
#
#
#
# @lc code=start
# DFS without pruning
# Runtime: 7544 ms
# Memory Usage: 14.6 MB
# DFS with pruning
# Runtime: 80 ms
# Memory Usage: 14.3 MB
# @lc code=end
| [
2,
198,
2,
2488,
44601,
598,
28,
293,
316,
8189,
4686,
28,
18938,
42392,
28,
29412,
18,
198,
2,
198,
2,
685,
18938,
60,
17220,
17665,
16774,
39815,
198,
2,
198,
2,
3740,
1378,
293,
316,
8189,
13,
785,
14,
1676,
22143,
14,
28956,
... | 2.589499 | 419 |