content
stringlengths
1
1.05M
input_ids
listlengths
1
883k
ratio_char_token
float64
1
22.9
token_count
int64
1
883k
#!/usr/bin/python # # Copyright 2019 Polyaxon, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # coding: utf-8 from __future__ import absolute_import, division, print_function from distutils.version import LooseVersion # pylint:disable=import-error from polyaxon.managers.base import BaseConfigManager from polyaxon.schemas.cli.cli_configuration import CliConfigurationConfig
[ 2, 48443, 14629, 14, 8800, 14, 29412, 198, 2, 198, 2, 15069, 13130, 12280, 897, 261, 11, 3457, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, ...
3.612245
245
import pytest from duckql.properties import Null
[ 11748, 12972, 9288, 198, 198, 6738, 22045, 13976, 13, 48310, 1330, 35886, 628, 628, 198 ]
3.6
15
# -*- Mode: Python -*- # GObject-Introspection - a framework for introspecting GObject libraries # Copyright (C) 2010 Red Hat, Inc. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the # Free Software Foundation, Inc., 59 Temple Place - Suite 330, # Boston, MA 02111-1307, USA. # import os from contextlib import contextmanager from . import ast
[ 2, 532, 9, 12, 10363, 25, 11361, 532, 9, 12, 198, 2, 402, 10267, 12, 5317, 305, 31308, 532, 257, 9355, 329, 18951, 4443, 278, 402, 10267, 12782, 198, 2, 15069, 357, 34, 8, 3050, 220, 2297, 10983, 11, 3457, 13, 198, 2, 198, 2, ...
3.780488
246
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import os import sys import shutil import tempfile import subprocess import typing as tp from pathlib import Path from nevergrad.common import tools as ngtools def __enter__(self) -> Path: super().__enter__() return self.copyname class FailedJobError(RuntimeError): """Job failed during processing """ class CommandFunction: """Wraps a command as a function in order to make sure it goes through the pipeline and notify when it is finished. The output is a string containing everything that has been sent to stdout Parameters ---------- command: list command to run, as a list verbose: bool prints the command and stdout at runtime cwd: Path/str path to the location where the command must run from Returns ------- str Everything that has been sent to stdout """ def __call__(self, *args: tp.Any, **kwargs: tp.Any) -> str: """Call the cammand line with addidional arguments The keyword arguments will be sent as --{key}={val} The logs are bufferized. They will be printed if the job fails, or sent as output of the function Errors are provided with the internal stderr """ # TODO make the following command more robust (probably fails in multiple cases) full_command = self.command + [str(x) for x in args] + ["--{}={}".format(x, y) for x, y in kwargs.items()] if self.verbose: print(f"The following command is sent: {full_command}") outlines: tp.List[str] = [] with subprocess.Popen(full_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, cwd=self.cwd, env=self.env) as process: try: assert process.stdout is not None for line in iter(process.stdout.readline, b''): if not line: break outlines.append(line.decode().strip()) if self.verbose: print(outlines[-1], flush=True) except Exception: # pylint: disable=broad-except process.kill() process.wait() raise FailedJobError("Job got killed for an unknown reason.") stderr = process.communicate()[1] # we already got stdout stdout = "\n".join(outlines) retcode = process.poll() if stderr and (retcode or self.verbose): print(stderr.decode(), file=sys.stderr) if retcode: subprocess_error = subprocess.CalledProcessError(retcode, process.args, output=stdout, stderr=stderr) raise FailedJobError(stderr.decode()) from subprocess_error return stdout
[ 2, 15069, 357, 66, 8, 3203, 11, 3457, 13, 290, 663, 29116, 13, 1439, 6923, 33876, 13, 198, 2, 198, 2, 770, 2723, 2438, 318, 11971, 739, 262, 17168, 5964, 1043, 287, 262, 198, 2, 38559, 24290, 2393, 287, 262, 6808, 8619, 286, 428, ...
2.407736
1,241
import paramiko ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname='54.165.97.91',username='ec2-user',password='paramiko123',port=22) sftp_client=ssh.open_sftp() #sftp_client.get('/home/ec2-user/paramiko_download.txt','paramiko_downloaded_file.txt') #sftp_client.chdir("/home/ec2-user") #print(sftp_client.getcwd()) #sftp_client.get('demo.txt','C:\\Users\\Automation\\Desktop\\download_file.txt') sftp_client.put("transfer_files.py",'/home/ec2-user/transfer_files.py') sftp_client.close() ssh.close()
[ 11748, 5772, 12125, 201, 198, 45824, 796, 5772, 12125, 13, 5432, 39, 11792, 3419, 201, 198, 45824, 13, 2617, 62, 45688, 62, 4774, 62, 2539, 62, 30586, 7, 17143, 12125, 13, 27722, 4550, 36727, 28955, 201, 198, 45824, 13, 8443, 7, 4774,...
2.40678
236
# coding=utf-8 """ Internal tools for NimLime development & testing. """ from pprint import pprint import sublime try: from cProfile import Profile except ImportError: from profile import Profile from functools import wraps from pstats import Stats try: from StringIO import StringIO except ImportError: from io import StringIO debug_on = False if debug_on: sublime.message_dialog("NimLime running in debug mode.") # Debug printer def print_debug(*args, **kwargs): """ Print when debugging. :type args: Any :type kwargs: Any """ if debug_on: pprint(*args, **kwargs) # Profiling functions profiler = Profile() profiler_running = False def profile_func(func): """ Decorator which profiles a single function. Call print_profile_data to print the collected data. :type func: Callable :rtype: Callable """ return _profile_wrapper def print_profile_data(): """ Print the collected profile data. """ stream = StringIO() statistics = Stats(profiler, stream=stream) statistics.sort_stats('cumulative') statistics.print_stats() print(stream.getvalue())
[ 2, 19617, 28, 40477, 12, 23, 198, 37811, 198, 37693, 4899, 329, 27168, 43, 524, 2478, 1222, 4856, 13, 198, 37811, 198, 6738, 279, 4798, 1330, 279, 4798, 198, 198, 11748, 41674, 198, 198, 28311, 25, 198, 220, 220, 220, 422, 269, 3704...
2.939547
397
import orjson from asynctest import TestCase, Mock, patch from freezegun import freeze_time from driftage.monitor import Monitor
[ 11748, 393, 17752, 198, 6738, 355, 2047, 310, 395, 1330, 6208, 20448, 11, 44123, 11, 8529, 198, 6738, 1479, 89, 1533, 403, 1330, 16611, 62, 2435, 198, 6738, 24260, 496, 13, 41143, 1330, 18289, 628 ]
3.714286
35
import fastarg import commands.todo as todo import commands.user as user app = fastarg.Fastarg(description="productivity app", prog="todo") app.add_fastarg(todo.app, name="todo") app.add_fastarg(user.app, name="user") if __name__ == "__main__": app.run()
[ 11748, 3049, 853, 198, 11748, 9729, 13, 83, 24313, 355, 284, 4598, 198, 11748, 9729, 13, 7220, 355, 2836, 198, 198, 1324, 796, 3049, 853, 13, 22968, 853, 7, 11213, 2625, 11167, 3458, 598, 1600, 1172, 2625, 83, 24313, 4943, 198, 198, ...
2.747368
95
import asyncio import uuid import pytest from aiomisc_pytest.pytest_plugin import TCPProxy import aiormq
[ 11748, 30351, 952, 198, 11748, 334, 27112, 198, 198, 11748, 12972, 9288, 198, 6738, 257, 29005, 2304, 62, 9078, 9288, 13, 9078, 9288, 62, 33803, 1330, 23633, 44148, 198, 198, 11748, 257, 72, 579, 80, 628, 628, 628, 628, 628 ]
2.9
40
__all__ = ['mediaclassification_stats']
[ 834, 439, 834, 796, 37250, 2379, 330, 31172, 2649, 62, 34242, 20520 ]
3.25
12
''' Problem description: Given a string, determine whether or not the parentheses are balanced ''' def balanced_parens(str): ''' runtime: O(n) space : O(1) ''' if str is None: return True open_count = 0 for char in str: if char == '(': open_count += 1 elif char == ')': open_count -= 1 if open_count < 0: return False return open_count == 0
[ 7061, 6, 198, 40781, 6764, 25, 198, 15056, 257, 4731, 11, 5004, 1771, 393, 407, 262, 46672, 389, 12974, 198, 7061, 6, 628, 198, 4299, 12974, 62, 11730, 82, 7, 2536, 2599, 198, 220, 220, 220, 705, 7061, 198, 220, 220, 220, 19124, 2...
2.146226
212
# -*- coding: utf-8 -*- """Parser for the CCleaner Registry key.""" import re from dfdatetime import time_elements as dfdatetime_time_elements from plaso.containers import events from plaso.containers import time_events from plaso.lib import definitions from plaso.parsers import winreg_parser from plaso.parsers.winreg_plugins import interface winreg_parser.WinRegistryParser.RegisterPlugin(CCleanerPlugin)
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 37811, 46677, 329, 262, 12624, 13087, 263, 33432, 1994, 526, 15931, 198, 198, 11748, 302, 198, 198, 6738, 288, 16344, 265, 8079, 1330, 640, 62, 68, 3639, 355, 288, 16344...
3.058824
136
from abc import ABCMeta, abstractmethod from dataclasses import dataclass from typing import Any, TypeVar X = TypeVar('X') _UNIT_SINGLETON = Unit()
[ 6738, 450, 66, 1330, 9738, 48526, 11, 12531, 24396, 198, 6738, 4818, 330, 28958, 1330, 4818, 330, 31172, 198, 6738, 19720, 1330, 4377, 11, 5994, 19852, 628, 198, 55, 796, 5994, 19852, 10786, 55, 11537, 628, 628, 628, 198, 62, 4944, 20...
3.078431
51
import time import pytest from test import config from test.cube_utils import CubeUtils ITERATIONS_NUM = getattr(config, 'iterations_num', 1) ROUNDS_NUM = getattr(config, 'rounds_num', 10)
[ 11748, 640, 198, 198, 11748, 12972, 9288, 198, 198, 6738, 1332, 1330, 4566, 198, 6738, 1332, 13, 40296, 62, 26791, 1330, 23315, 18274, 4487, 198, 198, 2043, 1137, 18421, 62, 41359, 796, 651, 35226, 7, 11250, 11, 705, 2676, 602, 62, 22...
2.924242
66
import pytest from mindmeld.components import Conversation def assert_reply(directives, templates, *, start_index=0, slots=None): """Asserts that the provided directives contain the specified reply Args: directives (list[dict[str, dict]]): list of directives returned by application templates (Union[str, Set[str]]): The reply must be a member of this set. start_index (int, optional): The index of the first client action associated with this reply. slots (dict, optional): The slots to fill the templates """ slots = slots or {} if isinstance(templates, str): templates = [templates] texts = set(map(lambda x: x.format(**slots), templates)) assert len(directives) >= start_index + 1 assert directives[start_index]['name'] == 'reply' assert directives[start_index]['payload']['text'] in texts def assert_dialogue_state(dm, dialogue_state): for rule in dm.rules: if rule.dialogue_state == dialogue_state: return True return False def test_dialogue_flow_async(async_kwik_e_mart_app): assert some_handler.flow_state == 'some_handler_flow' assert 'some_handler' in some_handler.all_flows dm = some_handler.dialogue_manager assert_dialogue_state(dm, 'some_handler') assert_dialogue_state(dm, 'some_handler_flow') assert len(some_handler.rules) == 0 assert len(some_handler.rules) == 1 assert len(some_handler.rules) == 2 assert 'some_flow_handler_2' in some_handler.exit_flow_states def test_dialogue_flow(kwik_e_mart_app): assert some_handler.flow_state == 'some_handler_flow' assert 'some_handler' in some_handler.all_flows dm = some_handler.dialogue_manager assert_dialogue_state(dm, 'some_handler') assert_dialogue_state(dm, 'some_handler_flow') assert len(some_handler.rules) == 0 assert len(some_handler.rules) == 1 assert len(some_handler.rules) == 2 assert 'some_flow_handler_2' in some_handler.exit_flow_states
[ 11748, 12972, 9288, 198, 6738, 2000, 1326, 335, 13, 5589, 3906, 1330, 42427, 628, 198, 4299, 6818, 62, 47768, 7, 12942, 1083, 11, 24019, 11, 1635, 11, 923, 62, 9630, 28, 15, 11, 17314, 28, 14202, 2599, 198, 220, 220, 220, 37227, 802...
2.784636
729
import os,sys import webbrowser import numpy as np import matplotlib matplotlib.use('Agg') import matplotlib.cm as cm import matplotlib.pylab as plt from matplotlib import ticker plt.rcParams['font.family'] = 'monospace' fig = plt.figure() rect = fig.add_subplot(111, aspect='equal') data0 = np.loadtxt('data0.dat', delimiter=',') data1 = np.loadtxt('data1.dat', delimiter=',') dense = np.loadtxt('dense.dat', delimiter=',') ID = sys.argv[1] X = np.arange(-2.0, 2.05, 0.05) Y = np.arange(-2.0, 2.05, 0.05) Xm, Ym = np.meshgrid(X, Y) vmin, vmax = dense.min(), dense.max() if vmin * vmax < 0: vmin = -abs(max(-vmin, vmax)) vmax = +abs(max(-vmin, vmax)) cr = rect.imshow(dense.reshape((len(Y), len(X))), extent=(X[0], X[-1], Y[0], Y[-1]), vmin=vmin, vmax=vmax, cmap=cm.coolwarm, origin='lower') plt.contour(Xm, Ym, dense, levels=[-1, 1], cmap=cm.bwr, linestyles='dashed', linewidths=[2,2]) plt.contour(Xm, Ym, dense, levels=[0], colors='black', linestyles='dashed', linewidths=[2]) cb = plt.colorbar(cr, format='%+.1e') cb.solids.set_edgecolor('face') cb.set_ticks(ticker.LinearLocator(6)) cb.ax.tick_params(labelsize=12) rect.scatter(data0[:,0], data0[:,1], marker='v', facecolor='red', edgecolor='black', s=30, lw=1) rect.scatter(data1[:,0], data1[:,1], marker='^', facecolor='blue', edgecolor='black', s=30, lw=1) plt.xlim(X[0], X[-1]) plt.ylim(Y[0], Y[-1]) plt.xlabel("") plt.ylabel("") plt.grid(ls='dotted') plt.savefig('{}.svg'.format(ID), bbox_inches='tight', pad_inches=0.1) plt.savefig('{}.eps'.format(ID), bbox_inches='tight', pad_inches=0.1) os.remove('dense.dat') os.remove('data0.dat') os.remove('data1.dat') webbrowser.open('file://{}'.format(os.path.realpath('{}.svg'.format(sys.argv[1]))))
[ 11748, 28686, 11, 17597, 198, 11748, 3992, 40259, 198, 11748, 299, 32152, 355, 45941, 198, 11748, 2603, 29487, 8019, 198, 6759, 29487, 8019, 13, 1904, 10786, 46384, 11537, 198, 11748, 2603, 29487, 8019, 13, 11215, 355, 12067, 198, 11748, ...
2.237221
763
from sarna.model.enums import Score, Language from sarna.report_generator import make_run from sarna.report_generator.locale_choice import locale_choice from sarna.report_generator.style import RenderStyle
[ 6738, 264, 28610, 13, 19849, 13, 268, 5700, 1330, 15178, 11, 15417, 198, 6738, 264, 28610, 13, 13116, 62, 8612, 1352, 1330, 787, 62, 5143, 198, 6738, 264, 28610, 13, 13116, 62, 8612, 1352, 13, 17946, 1000, 62, 25541, 1330, 36693, 62, ...
3.568966
58
# Open mode AP tests # Copyright (c) 2014, Qualcomm Atheros, Inc. # # This software may be distributed under the terms of the BSD license. # See README for more details. import logging logger = logging.getLogger() import struct import subprocess import time import os import hostapd import hwsim_utils from tshark import run_tshark from utils import alloc_fail from wpasupplicant import WpaSupplicant def test_ap_open(dev, apdev): """AP with open mode (no security) configuration""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", bg_scan_period="0") ev = hapd.wait_event([ "AP-STA-CONNECTED" ], timeout=5) if ev is None: raise Exception("No connection event received from hostapd") hwsim_utils.test_connectivity(dev[0], hapd) dev[0].request("DISCONNECT") ev = hapd.wait_event([ "AP-STA-DISCONNECTED" ], timeout=5) if ev is None: raise Exception("No disconnection event received from hostapd") def test_ap_open_packet_loss(dev, apdev): """AP with open mode configuration and large packet loss""" params = { "ssid": "open", "ignore_probe_probability": "0.5", "ignore_auth_probability": "0.5", "ignore_assoc_probability": "0.5", "ignore_reassoc_probability": "0.5" } hapd = hostapd.add_ap(apdev[0]['ifname'], params) for i in range(0, 3): dev[i].connect("open", key_mgmt="NONE", scan_freq="2412", wait_connect=False) for i in range(0, 3): dev[i].wait_connected(timeout=20) def test_ap_open_unknown_action(dev, apdev): """AP with open mode configuration and unknown Action frame""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412") bssid = apdev[0]['bssid'] cmd = "MGMT_TX {} {} freq=2412 action=765432".format(bssid, bssid) if "FAIL" in dev[0].request(cmd): raise Exception("Could not send test Action frame") ev = dev[0].wait_event(["MGMT-TX-STATUS"], timeout=10) if ev is None: raise Exception("Timeout on MGMT-TX-STATUS") if "result=SUCCESS" not in ev: raise Exception("AP did not ack Action frame") def test_ap_open_invalid_wmm_action(dev, apdev): """AP with open mode configuration and invalid WMM Action frame""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412") bssid = apdev[0]['bssid'] cmd = "MGMT_TX {} {} freq=2412 action=1100".format(bssid, bssid) if "FAIL" in dev[0].request(cmd): raise Exception("Could not send test Action frame") ev = dev[0].wait_event(["MGMT-TX-STATUS"], timeout=10) if ev is None or "result=SUCCESS" not in ev: raise Exception("AP did not ack Action frame") def test_ap_open_reconnect_on_inactivity_disconnect(dev, apdev): """Reconnect to open mode AP after inactivity related disconnection""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412") hapd.request("DEAUTHENTICATE " + dev[0].p2p_interface_addr() + " reason=4") dev[0].wait_disconnected(timeout=5) dev[0].wait_connected(timeout=2, error="Timeout on reconnection") def test_ap_open_assoc_timeout(dev, apdev): """AP timing out association""" ssid = "test" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].scan(freq="2412") hapd.set("ext_mgmt_frame_handling", "1") dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", wait_connect=False) for i in range(0, 10): req = hapd.mgmt_rx() if req is None: raise Exception("MGMT RX wait timed out") if req['subtype'] == 11: break req = None if not req: raise Exception("Authentication frame not received") resp = {} resp['fc'] = req['fc'] resp['da'] = req['sa'] resp['sa'] = req['da'] resp['bssid'] = req['bssid'] resp['payload'] = struct.pack('<HHH', 0, 2, 0) hapd.mgmt_tx(resp) assoc = 0 for i in range(0, 10): req = hapd.mgmt_rx() if req is None: raise Exception("MGMT RX wait timed out") if req['subtype'] == 0: assoc += 1 if assoc == 3: break if assoc != 3: raise Exception("Association Request frames not received: assoc=%d" % assoc) hapd.set("ext_mgmt_frame_handling", "0") dev[0].wait_connected(timeout=15) def test_ap_open_id_str(dev, apdev): """AP with open mode and id_str""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", id_str="foo", wait_connect=False) ev = dev[0].wait_connected(timeout=10) if "id_str=foo" not in ev: raise Exception("CTRL-EVENT-CONNECT did not have matching id_str: " + ev) if dev[0].get_status_field("id_str") != "foo": raise Exception("id_str mismatch") def test_ap_open_select_any(dev, apdev): """AP with open mode and select any network""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) id = dev[0].connect("unknown", key_mgmt="NONE", scan_freq="2412", only_add_network=True) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", only_add_network=True) dev[0].select_network(id) ev = dev[0].wait_event(["CTRL-EVENT-NETWORK-NOT-FOUND", "CTRL-EVENT-CONNECTED"], timeout=10) if ev is None: raise Exception("No result reported") if "CTRL-EVENT-CONNECTED" in ev: raise Exception("Unexpected connection") dev[0].select_network("any") dev[0].wait_connected(timeout=10) def test_ap_open_unexpected_assoc_event(dev, apdev): """AP with open mode and unexpected association event""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412") dev[0].request("DISCONNECT") dev[0].wait_disconnected(timeout=15) dev[0].dump_monitor() # This will be accepted due to matching network subprocess.call(['iw', 'dev', dev[0].ifname, 'connect', 'open', "2412", apdev[0]['bssid']]) dev[0].wait_connected(timeout=15) dev[0].dump_monitor() dev[0].request("REMOVE_NETWORK all") dev[0].wait_disconnected(timeout=5) dev[0].dump_monitor() # This will result in disconnection due to no matching network subprocess.call(['iw', 'dev', dev[0].ifname, 'connect', 'open', "2412", apdev[0]['bssid']]) dev[0].wait_disconnected(timeout=15) def test_ap_bss_load(dev, apdev): """AP with open mode (no security) configuration""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open", "bss_load_update_period": "10" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412") # this does not really get much useful output with mac80211_hwsim currently, # but run through the channel survey update couple of times for i in range(0, 10): hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd) time.sleep(0.15) def test_ap_open_out_of_memory(dev, apdev): """hostapd failing to setup interface due to allocation failure""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) hapd_out_of_mem(hapd, apdev[1], 1, "hostapd_alloc_bss_data") for i in range(1, 3): hapd_out_of_mem(hapd, apdev[1], i, "hostapd_iface_alloc") for i in range(1, 5): hapd_out_of_mem(hapd, apdev[1], i, "hostapd_config_defaults;hostapd_config_alloc") hapd_out_of_mem(hapd, apdev[1], 1, "hostapd_config_alloc") hapd_out_of_mem(hapd, apdev[1], 1, "hostapd_driver_init") for i in range(1, 4): hapd_out_of_mem(hapd, apdev[1], i, "=wpa_driver_nl80211_drv_init") # eloop_register_read_sock() call from i802_init() hapd_out_of_mem(hapd, apdev[1], 1, "eloop_sock_table_add_sock;eloop_register_sock;?eloop_register_read_sock;=i802_init") # verify that a new interface can still be added when memory allocation does # not fail hostapd.add_ap(apdev[1]['ifname'], { "ssid": "open" }) def test_bssid_black_white_list(dev, apdev): """BSSID black/white list""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) hapd2 = hostapd.add_ap(apdev[1]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", bssid_whitelist=apdev[1]['bssid']) dev[1].connect("open", key_mgmt="NONE", scan_freq="2412", bssid_blacklist=apdev[1]['bssid']) dev[2].connect("open", key_mgmt="NONE", scan_freq="2412", bssid_whitelist="00:00:00:00:00:00/00:00:00:00:00:00", bssid_blacklist=apdev[1]['bssid']) if dev[0].get_status_field('bssid') != apdev[1]['bssid']: raise Exception("dev[0] connected to unexpected AP") if dev[1].get_status_field('bssid') != apdev[0]['bssid']: raise Exception("dev[1] connected to unexpected AP") if dev[2].get_status_field('bssid') != apdev[0]['bssid']: raise Exception("dev[2] connected to unexpected AP") dev[0].request("REMOVE_NETWORK all") dev[1].request("REMOVE_NETWORK all") dev[2].request("REMOVE_NETWORK all") dev[2].connect("open", key_mgmt="NONE", scan_freq="2412", bssid_whitelist="00:00:00:00:00:00", wait_connect=False) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", bssid_whitelist="11:22:33:44:55:66/ff:00:00:00:00:00 " + apdev[1]['bssid'] + " aa:bb:cc:dd:ee:ff") dev[1].connect("open", key_mgmt="NONE", scan_freq="2412", bssid_blacklist="11:22:33:44:55:66/ff:00:00:00:00:00 " + apdev[1]['bssid'] + " aa:bb:cc:dd:ee:ff") if dev[0].get_status_field('bssid') != apdev[1]['bssid']: raise Exception("dev[0] connected to unexpected AP") if dev[1].get_status_field('bssid') != apdev[0]['bssid']: raise Exception("dev[1] connected to unexpected AP") dev[0].request("REMOVE_NETWORK all") dev[1].request("REMOVE_NETWORK all") ev = dev[2].wait_event(["CTRL-EVENT-CONNECTED"], timeout=0.1) if ev is not None: raise Exception("Unexpected dev[2] connectin") dev[2].request("REMOVE_NETWORK all") def test_ap_open_wpas_in_bridge(dev, apdev): """Open mode AP and wpas interface in a bridge""" br_ifname='sta-br0' ifname='wlan5' try: _test_ap_open_wpas_in_bridge(dev, apdev) finally: subprocess.call(['ip', 'link', 'set', 'dev', br_ifname, 'down']) subprocess.call(['brctl', 'delif', br_ifname, ifname]) subprocess.call(['brctl', 'delbr', br_ifname]) subprocess.call(['iw', ifname, 'set', '4addr', 'off']) def test_ap_open_start_disabled(dev, apdev): """AP with open mode and beaconing disabled""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open", "start_disabled": "1" }) bssid = apdev[0]['bssid'] dev[0].flush_scan_cache() dev[0].scan(freq=2412, only_new=True) if dev[0].get_bss(bssid) is not None: raise Exception("AP was seen beaconing") if "OK" not in hapd.request("RELOAD"): raise Exception("RELOAD failed") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412") def test_ap_open_start_disabled2(dev, apdev): """AP with open mode and beaconing disabled (2)""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open", "start_disabled": "1" }) bssid = apdev[0]['bssid'] dev[0].flush_scan_cache() dev[0].scan(freq=2412, only_new=True) if dev[0].get_bss(bssid) is not None: raise Exception("AP was seen beaconing") if "OK" not in hapd.request("UPDATE_BEACON"): raise Exception("UPDATE_BEACON failed") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412") if "OK" not in hapd.request("UPDATE_BEACON"): raise Exception("UPDATE_BEACON failed") dev[0].request("DISCONNECT") dev[0].wait_disconnected() dev[0].request("RECONNECT") dev[0].wait_connected() def test_ap_open_ifdown(dev, apdev): """AP with open mode and external ifconfig down""" params = { "ssid": "open", "ap_max_inactivity": "1" } hapd = hostapd.add_ap(apdev[0]['ifname'], params) bssid = apdev[0]['bssid'] dev[0].connect("open", key_mgmt="NONE", scan_freq="2412") dev[1].connect("open", key_mgmt="NONE", scan_freq="2412") subprocess.call(['ip', 'link', 'set', 'dev', apdev[0]['ifname'], 'down']) ev = hapd.wait_event(["AP-STA-DISCONNECTED"], timeout=10) if ev is None: raise Exception("Timeout on AP-STA-DISCONNECTED (1)") ev = hapd.wait_event(["AP-STA-DISCONNECTED"], timeout=5) if ev is None: raise Exception("Timeout on AP-STA-DISCONNECTED (2)") ev = hapd.wait_event(["INTERFACE-DISABLED"], timeout=5) if ev is None: raise Exception("No INTERFACE-DISABLED event") # The following wait tests beacon loss detection in mac80211 on dev0. # dev1 is used to test stopping of AP side functionality on client polling. dev[1].request("REMOVE_NETWORK all") subprocess.call(['ip', 'link', 'set', 'dev', apdev[0]['ifname'], 'up']) dev[0].wait_disconnected() dev[1].wait_disconnected() ev = hapd.wait_event(["INTERFACE-ENABLED"], timeout=10) if ev is None: raise Exception("No INTERFACE-ENABLED event") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def test_ap_open_disconnect_in_ps(dev, apdev, params): """Disconnect with the client in PS to regression-test a kernel bug""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", bg_scan_period="0") ev = hapd.wait_event([ "AP-STA-CONNECTED" ], timeout=5) if ev is None: raise Exception("No connection event received from hostapd") time.sleep(0.2) hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_MANUAL_POLL) try: # inject some traffic sa = hapd.own_addr() da = dev[0].own_addr() hapd.request('DATA_TEST_CONFIG 1') hapd.request('DATA_TEST_TX {} {} 0'.format(da, sa)) hapd.request('DATA_TEST_CONFIG 0') # let the AP send couple of Beacon frames time.sleep(0.3) # disconnect - with traffic pending - shouldn't cause kernel warnings dev[0].request("DISCONNECT") finally: hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_DISABLED) time.sleep(0.2) out = run_tshark(os.path.join(params['logdir'], "hwsim0.pcapng"), "wlan_mgt.tim.partial_virtual_bitmap", ["wlan_mgt.tim.partial_virtual_bitmap"]) if out is not None: state = 0 for l in out.splitlines(): pvb = int(l, 16) if pvb > 0 and state == 0: state = 1 elif pvb == 0 and state == 1: state = 2 if state != 2: raise Exception("Didn't observe TIM bit getting set and unset (state=%d)" % state) def test_ap_open_select_network(dev, apdev): """Open mode connection and SELECT_NETWORK to change network""" hapd1 = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) bssid1 = apdev[0]['bssid'] hapd2 = hostapd.add_ap(apdev[1]['ifname'], { "ssid": "open2" }) bssid2 = apdev[1]['bssid'] id1 = dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", only_add_network=True) id2 = dev[0].connect("open2", key_mgmt="NONE", scan_freq="2412") hwsim_utils.test_connectivity(dev[0], hapd2) dev[0].select_network(id1) dev[0].wait_connected() res = dev[0].request("BLACKLIST") if bssid1 in res or bssid2 in res: raise Exception("Unexpected blacklist entry") hwsim_utils.test_connectivity(dev[0], hapd1) dev[0].select_network(id2) dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd2) res = dev[0].request("BLACKLIST") if bssid1 in res or bssid2 in res: raise Exception("Unexpected blacklist entry(2)") def test_ap_open_disable_enable(dev, apdev): """AP with open mode getting disabled and re-enabled""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) dev[0].connect("open", key_mgmt="NONE", scan_freq="2412", bg_scan_period="0") for i in range(2): hapd.request("DISABLE") dev[0].wait_disconnected() hapd.request("ENABLE") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def test_ap_open_sta_enable_disable(dev, apdev): """AP with open mode and wpa_supplicant ENABLE/DISABLE_NETWORK""" hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" }) bssid = apdev[0]['bssid'] sta_enable_disable(dev[0], bssid) wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5') wpas.interface_add("wlan5", drv_params="force_connect_cmd=1") sta_enable_disable(wpas, bssid)
[ 2, 4946, 4235, 3486, 5254, 198, 2, 15069, 357, 66, 8, 1946, 11, 32903, 27751, 4951, 11, 3457, 13, 198, 2, 198, 2, 770, 3788, 743, 307, 9387, 739, 262, 2846, 286, 262, 347, 10305, 5964, 13, 198, 2, 4091, 20832, 11682, 329, 517, 3...
2.184765
8,021
#!/usr/bin/env python # coding: utf-8 # pylint: disable-all from __future__ import absolute_import from sklearn.preprocessing import LabelEncoder from pathlib import Path import torch from torch.autograd import Variable import torch.nn as nn import torch.optim as optim def train_epoch(model, opt, criterion, X, y, batch_size=50): model.train() losses = [] for beg_i in range(0, X.size(0), batch_size): x_batch = X[beg_i : beg_i + batch_size, :] # y_hat will be (batch_size, 1) dim, so coerce target to look the same y_batch = y[beg_i : beg_i + batch_size].reshape(-1, 1) x_batch = Variable(x_batch) y_batch = Variable(y_batch) opt.zero_grad() # (1) Forward y_hat = model(x_batch) # (2) Compute diff loss = criterion(y_hat, y_batch) # (3) Compute gradients loss.backward() # (4) update weights opt.step() losses.append(loss.data.numpy()) return losses def build_classifier(X, num_labels): class_model = BinModel(X.shape[1]) if num_labels == 2 else MultiModel(X.shape[1], num_labels) class_opt = optim.Adam(class_model.parameters(), lr=0.001) class_criterion = nn.BCELoss() if num_labels == 2 else nn.CrossEntropyLoss() return class_model, class_opt, class_criterion def build_regressor(X): reg_model = RegModel(X.shape[1]) reg_opt = optim.Adam(reg_model.parameters(), lr=0.001) reg_criterion = nn.MSELoss() return reg_model, reg_opt, reg_criterion def train_classifier(X, y, class_model, class_opt, class_criterion, n_epochs=5): target_encoder = LabelEncoder() target_encoder.fit(y) transformed_y = target_encoder.transform(y) bin_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) bin_t_y = torch.from_numpy(transformed_y).type(class_model.expected_target_type) for e in range(n_epochs): train_epoch(class_model, class_opt, class_criterion, bin_t_X, bin_t_y)
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 19617, 25, 3384, 69, 12, 23, 198, 198, 2, 279, 2645, 600, 25, 15560, 12, 439, 198, 6738, 11593, 37443, 834, 1330, 4112, 62, 11748, 198, 6738, 1341, 35720, 13, 3866, 36948, 1330, ...
2.350296
845
"""Simulate stochastic observing weather conditions. The simulated conditions include seeing, transparency and the dome-open fraction. """ from __future__ import print_function, division, absolute_import from datetime import datetime import numpy as np import astropy.time import astropy.table import astropy.units as u import desiutil.log import desimodel.weather import desisurvey.config import desisurvey.ephem import desisurvey.utils
[ 37811, 8890, 5039, 3995, 354, 3477, 21769, 6193, 3403, 13, 198, 198, 464, 28590, 3403, 2291, 4379, 11, 13902, 290, 262, 29500, 12, 9654, 13390, 13, 198, 37811, 198, 6738, 11593, 37443, 834, 1330, 3601, 62, 8818, 11, 7297, 11, 4112, 62...
3.626016
123
# coding=utf-8 # Copyright 2015 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utility to convert a Data Export API reponse into TSV. This provides utitlites to both print TSV files to the standard output as well as directly to a file. This logic handles all the utf-8 conversion. GetTsvFilePrinter: Returns an instantiated object to output to files. GetTsvScreenPrinter: Returns an instantiated object to output to the screen. UnicodeWriter(): Utf-8 encodes output. ExportPrinter(): Converts the Data Export API response into tabular data. """ __author__ = 'api.nickm@ (Nick Mihailovski)' import codecs import csv import StringIO import sys import types # A list of special characters that need to be escaped. SPECIAL_CHARS = ('+', '-', '/', '*', '=') # TODO(nm): Test leading numbers. def GetTsvFilePrinter(file_name): """Returns a ExportPrinter object to output to file_name. Args: file_name: string The name of the file to output to. Returns: The newly created ExportPrinter object. """ my_handle = open(file_name) writer = UnicodeWriter(my_handle, dialect='excel-tab') return ExportPrinter(writer) def GetTsvScreenPrinter(): """Returns a ExportPrinter object to output to std.stdout.""" writer = UnicodeWriter(sys.stdout, dialect='excel-tab') return ExportPrinter(writer) def GetTsvStringPrinter(f): """Returns a ExportPrinter object to output to std.stdout.""" writer = UnicodeWriter(f, dialect='excel-tab') return ExportPrinter(writer) # Wrapper to output to utf-8. Taken mostly / directly from Python docs: # http://docs.python.org/library/csv.html def ExcelEscape(input_value): """Escapes the first character of a string if it is special in Excel. Args: input_value: string The value to escape. Returns: A string that has the first character escaped if it is special. """ if input_value and input_value[0] in SPECIAL_CHARS: return "'" + input_value return input_value
[ 2, 19617, 28, 40477, 12, 23, 628, 198, 2, 15069, 1853, 3012, 3457, 13, 1439, 2489, 10395, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, ...
3.385135
740
# -*- coding: utf-8 -*- """ @Project : RNN_Prediction @Author : Xu-Shan Zhao @Filename: stockPrediction202005201318.py @IDE : PyCharm @Time1 : 2020-05-20 13:18:46 @Time2 : 2020/5/20 13:18 @Month1 : 5 @Month2 : """ import tushare as ts import tensorflow as tf import pandas as pd from sklearn.model_selection import train_test_split import matplotlib.pyplot as plt stock_catl = ts.get_hist_data('300750') stock_catl = stock_catl.sort_index(ascending=True) stock_catl = (stock_catl - stock_catl.mean()) / \ (stock_catl.max() - stock_catl.min()) # train, val = train_test_split(stock_catl, test_size=0.5) # train = train.sort_index(ascending=True) # val = val.sort_index(ascending=True) train = stock_catl.iloc[:-60, :] val = stock_catl.iloc[-60:, :] window_size = 30 column = 'high' epoches = 300 ds_train = zip_ds(train) ds_val = zip_ds(val) model = tf.keras.Sequential( [ tf.keras.layers.LSTM(128, return_sequences=True, activation='relu'), tf.keras.layers.LSTM(128, activation='relu'), tf.keras.layers.Dense(13) ] ) optimizer = tf.keras.optimizers.Adam(learning_rate=0.01) model.compile(optimizer=optimizer, loss='mse') history = model.fit( ds_train, epochs=epoches, steps_per_epoch=5, validation_data=ds_val, validation_steps=1 ) model.save('stockLSTM') # Plot loss function plt.figure(figsize=(19, 9)) ax = plt.gca() plt.plot(range(len(history.history['loss'])), history.history['loss']) plt.plot(range(len(history.history['val_loss'])), history.history['val_loss']) ax.set_yscale('log') plt.show() # Compare fitting and real values. dff = pd.DataFrame() for i in range(len(stock_catl) - window_size): fits = model.predict(tf.constant(tf.expand_dims(stock_catl.values[i:i + window_size, :], axis=0))) dffits = pd.DataFrame(fits, columns=stock_catl.columns) dff = dff.append(dffits) dff.index = stock_catl.index[window_size:] plt.figure(figsize=(19, 9)) dff[column].plot() stock_catl.iloc[window_size:, :][column].plot(style='-o') plt.show() # To predict future 100 business days. dfp = stock_catl.copy() for i in range(100): pres = model.predict(tf.constant(tf.expand_dims(dfp.values[-1 * window_size:], axis=0))) dfpres = pd.DataFrame(pres, columns=stock_catl.columns) dfp = dfp.append(dfpres, ignore_index=True) dfp[column].plot() plt.show()
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 37811, 198, 31, 16775, 1058, 371, 6144, 62, 39156, 2867, 198, 31, 13838, 220, 1058, 33591, 12, 2484, 272, 29436, 198, 31, 35063, 25, 4283, 39156, 2867, 1238, 14315, 6390...
2.334322
1,011
#!/usr/bin/env python # -*- coding: utf-8 -*- from src import app import os import shutil from flask import Flask, render_template, session, request, flash, url_for, redirect from Forms import ContactForm, LoginForm, editForm, ReportForm, CommentForm, searchForm, AddPlaylist from flask.ext.mail import Message, Mail from werkzeug import secure_filename from werkzeug import SharedDataMiddleware from api import API from songs import SONG from playlist import playlist from admin import admin from artist import artist import pymysql import hashlib from flask import g mail = Mail() mail.init_app(app) #For the collector script. app.register_blueprint(API); #For the songs app.register_blueprint(SONG); #For the playlist app.register_blueprint(playlist); #for the admin pages app.register_blueprint(admin); #for the artist pages app.register_blueprint(artist); UPLOAD_FOLDER = "img/ProfilePic/" ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif']) app.config['UPLOAD_FOLDER'] = 'src/static/' + UPLOAD_FOLDER #For database connections. def flash_errors(form): for field, errors in form.errors.items(): for error in errors: flash(u"Error in the %s field - %s" % ( getattr(form, field).label.text, error )) app.add_url_rule('/user/uploads/<filename>', 'uploaded_file',build_only=True) app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {'/user/uploads': 'src/static' + app.config['UPLOAD_FOLDER'] }) def requestvalidate(userfrom,userto): check = g.database.execute(""" SELECT Status from requests where Request_to="%s" and Request_from="%s" """ % (userfrom,userto)) if check and g.database.fetchone()[0]=='-1' and userfrom!=userto: return False else: return True #All your profile are belong to us. #To handle 404 not found errors if not app.debug: import logging from logging.handlers import SMTPHandler mail_handler = SMTPHandler('127.0.0.1', 'server-error@example.com', app.config['DEFAULT_MAIL_SENDER'], 'YourApplication Failed') mail_handler.setLevel(logging.ERROR) app.logger.addHandler(mail_handler) from logging import FileHandler file_handler = FileHandler('log.txt') file_handler.setLevel(logging.WARNING) app.logger.addHandler(file_handler) from logging import Formatter mail_handler.setFormatter(Formatter(''' Message type: %(levelname)s Location: %(pathname)s:%(lineno)d Module: %(module)s Function: %(funcName)s Time: %(asctime)s Message: %(message)s ''')) if __name__ == """__main__""": # To allow aptana to receive errors, set use_debugger=False app = create_app(config="""config.yaml""") if app.debug: use_debugger = True try: # Disable Flask's debugger if external debugger is requested use_debugger = not(app.config.get('DEBUG_WITH_APTANA')) except: pass app.run(use_debugger=use_debugger, use_reloader=use_debugger, threaded=True, port=8080)
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 6738, 12351, 1330, 598, 198, 11748, 28686, 198, 11748, 4423, 346, 198, 6738, 42903, 1330, 46947, 11, 8543, 62, 28243, 1...
2.605932
1,180
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Lint as: python3 """Evaluate lazy slot filling results.""" import codecs import collections import gzip import json import random import re import string import unicodedata from absl import app from absl import flags from bert import tokenization from language.labs.drkit import input_fns import numpy as np import tensorflow.compat.v1 as tf PUNCTUATION = frozenset(string.punctuation) FLAGS = flags.FLAGS ## Required parameters flags.DEFINE_string("ground_truth_file", None, "File with ground truth answers.") flags.DEFINE_string("predicted_answers_file", None, "File with predicted answers from model.") flags.DEFINE_string("relation_counts_file", None, "JSON file with relation counts.") def wikimovie_eval_fn(dataset, results, name_map, output_prediction_file, **kwargs): """Compute evaluation metrics for OneHopDataset or TwoHopDataset. Args: dataset: An object of type OneHopDataset. results: A list of result dicts from running estimator.predict. name_map: A mapping from prediction indices to text strings. output_prediction_file: File to store predictions to. **kwargs: Variable keyword arguments. Returns: metrics: A dict mapping metric names to values. """ del kwargs # Collect ground truth answers. gt_answer = {ex.qas_id: ex.answer_entity for ex in dataset.examples} gt_ques = {ex.qas_id: ex.question_text for ex in dataset.examples} gt_entity = {ex.qas_id: ex.subject_entity[0] for ex in dataset.examples} inf_chain = {ex.qas_id: ex.inference_chain for ex in dataset.examples} # Compute basic metrics. num_correct = 0. all_predictions = {} chain2stats = {ch: [0., 0.] for ch in inf_chain.values()} incorrect_results, correct_results = [], [] for result in results: qas_id = result["qas_ids"] prediction = result["predictions"] if prediction in gt_answer[qas_id]: num_correct += 1 chain2stats[inf_chain[qas_id]][0] += 1 correct_results.append({ "qas_id": result["qas_ids"], "question": gt_ques[qas_id], "answers": gt_answer[qas_id], "subject": gt_entity[qas_id], "inf-chain": inf_chain[qas_id], "predictions": result["predictions"], }) for hop in range(3): if "sparse_%d" % hop in result: correct_results[-1].update({ "sparse_%d" % hop: result["sparse_%d" % hop], "dense_%d" % hop: result["dense_%d" % hop], "mention_%d" % hop: result["mention_%d" % hop], "entity_%d" % hop: result["entity_%d" % hop], "sparse_scores_%d" % hop: result["sparse_scores_%d" % hop], "dense_scores_%d" % hop: result["dense_scores_%d" % hop], "mention_scores_%d" % hop: result["mention_scores_%d" % hop], "entity_scores_%d" % hop: result["entity_scores_%d" % hop], }) else: incorrect_results.append({ "qas_id": result["qas_ids"], "question": gt_ques[qas_id], "answers": gt_answer[qas_id], "subject": gt_entity[qas_id], "inf-chain": inf_chain[qas_id], "predictions": result["predictions"], }) for hop in range(3): if "sparse_%d" % hop in result: incorrect_results[-1].update({ "sparse_%d" % hop: result["sparse_%d" % hop], "dense_%d" % hop: result["dense_%d" % hop], "mention_%d" % hop: result["mention_%d" % hop], "entity_%d" % hop: result["entity_%d" % hop], "sparse_scores_%d" % hop: result["sparse_scores_%d" % hop], "dense_scores_%d" % hop: result["dense_scores_%d" % hop], "mention_scores_%d" % hop: result["mention_scores_%d" % hop], "entity_scores_%d" % hop: result["entity_scores_%d" % hop], }) chain2stats[inf_chain[qas_id]][1] += 1 all_predictions[qas_id] = name_map[str(prediction)] accuracy = num_correct / len(all_predictions) json.dump(all_predictions, tf.gfile.Open(output_prediction_file, "w")) json.dump( random.sample(incorrect_results, 100), tf.gfile.Open(output_prediction_file + ".incorrect", "w"), cls=NumpyEncoder) json.dump( random.sample(correct_results, 100), tf.gfile.Open(output_prediction_file + ".correct", "w"), cls=NumpyEncoder) # Return metrics. metrics = { "accuracy": accuracy, } for ch, stats in chain2stats.items(): metrics["inference-chains-acc/" + ch] = stats[0] / stats[1] return metrics def multihop_eval_fn(dataset, results, name_map, output_prediction_file, supervision="mention", **kwargs): """Compute evaluation metrics for OneHopDataset or TwoHopDataset. Args: dataset: An object of type OneHopDataset. results: A list of result dicts from running estimator.predict. name_map: A mapping from prediction indices to text strings. output_prediction_file: File to store predictions to. supervision: Type of supervision used in the model. **kwargs: Variable keyword arguments. Returns: metrics: A dict mapping metric names to values. """ del kwargs # Collect ground truth answers. gt_mentions = {ex.qas_id: ex.answer_mention[0] for ex in dataset.examples} if supervision == "mention": gt_answer = gt_mentions else: gt_answer = {ex.qas_id: ex.answer_entity[0] for ex in dataset.examples} # Compute basic metrics. num_correct = 0. all_predictions = {} for result in results: qas_id = result["qas_ids"] prediction = result["predictions"] if prediction == gt_answer[qas_id]: num_correct += 1 all_predictions[qas_id] = name_map[str(prediction)] accuracy = num_correct / len(all_predictions) # Compute advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, "w")) micro, macro, _, _ = compute_scores(dataset.gt_file, output_prediction_file) # Return metrics. metrics = { "accuracy": accuracy, "micro-p": micro[0], "micro-r": micro[1], "micro-f": micro[2], "macro-p": macro[0], "macro-r": macro[1], "macro-f": macro[2], } return metrics def hotpot_eval_fn(dataset, results, name_map, output_prediction_file, **kwargs): """Compute evaluation metrics for HotpotQADataset. Args: dataset: An object of type HotpotQADataset. results: A list of result dicts from running estimator.predict. name_map: A mapping from prediction indices to text strings. output_prediction_file: File to store predictions to. **kwargs: Variable keyword arguments. Returns: metrics: A dict mapping metric names to values. """ del kwargs # Collect ground truth answers. gt_answer = {ex.qas_id: ex.answer_entity for ex in dataset.examples} gt_types = {ex.qas_id: ex.inference_chain for ex in dataset.examples} # Compute basic metrics. num_correct = {2: 0., 5: 0., 10: 0., 20: 0.} aps = [] no_answer = 0. all_predictions = {} bridge_acc, comp_acc = 0., 0. bridge_tot, comp_tot = 0, 0 single_acc = 0. layer_weights = np.zeros_like(results[0]["layer_probs"]) num_layer_entities = {i: 0. for i in range(layer_weights.shape[0])} num_new_entities = {i: 0. for i in range(layer_weights.shape[0])} for result in results: qas_id = result["qas_ids"].decode("utf-8") preds = result["top_idx"] scores = result["top_vals"] ans = gt_answer[qas_id] my_type = gt_types[qas_id] if my_type == "bridge": bridge_tot += 1 else: comp_tot += 1 ranks = np.where(np.in1d(preds, ans))[0] ranks = np.sort(ranks) ap = 0. cnt = 0. if any(rr < 10 for rr in ranks): single_acc += 1 if ranks.shape[0] == 0: no_answer += 1 for rr in ranks: cnt += 1 ap += cnt / (rr + 1) if ans: aps.append(ap / len(ans)) else: aps.append(0.) found = False for key in [2, 5, 10, 20]: if found or np.in1d(ans, preds[:key]).all(): num_correct[key] += 1 found = True if key == 10: if my_type == "bridge": bridge_acc += 1 else: comp_acc += 1 # Non-accuracy stats layer_weights += result["layer_probs"] layer_entities = {i: set() for i in range(layer_weights.shape[0])} all_predictions[qas_id] = {} for i in range(layer_weights.shape[0]): layer_entities[i] = set( [ee for ee in result["layer_%d_ent" % i] if ee != -1]) num_layer_entities[i] += len(layer_entities[i]) num_new_entities[i] += len(layer_entities[i] - layer_entities[0]) # all_predictions[qas_id]["layer_%d" % i] = [ # name_map[str(ee)] for ee in layer_entities[i]] all_predictions[qas_id]["predictions"] = [ (name_map[str(pred)], str(scores[i])) for i, pred in enumerate(preds) ] tf.logging.info("Evaluated %d items", len(all_predictions)) accuracy = { key: (num_correct[key] / len(all_predictions)) for key in num_correct } # Compute advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, "w")) # Return metrics. metrics = {"eval/@%d" % key: accuracy[key] for key in accuracy} metrics["accuracy"] = accuracy[10] metrics["eval/map"] = sum(aps) / len(all_predictions) metrics["eval/bridge_accuracy"] = bridge_acc / bridge_tot metrics["eval/comparison_accuracy"] = comp_acc / comp_tot metrics["analysis/single_accuracy"] = single_acc / len(all_predictions) metrics["analysis/no_answers"] = no_answer / len(all_predictions) for i in range(layer_weights.shape[0]): metrics["analysis/layer_weight_%d" % i] = layer_weights[i] / len(all_predictions) metrics["analysis/num_entities_%d" % i] = num_layer_entities[i] / len(all_predictions) metrics["analysis/num_new_entities_%d" % i] = num_new_entities[i] / len(all_predictions) return metrics def normalize_answer(s): """Lower text and remove punctuation, articles and extra whitespace.""" return white_space_fix(remove_articles(remove_punc(lower(s)))) def f1_score(prediction, ground_truth): """Compute F1 score.""" prediction_tokens = normalize_answer(prediction).split() ground_truth_tokens = normalize_answer(ground_truth).split() common = collections.Counter(prediction_tokens) & collections.Counter( ground_truth_tokens) num_same = sum(common.values()) if num_same == 0: return 0 precision = 1.0 * num_same / len(prediction_tokens) recall = 1.0 * num_same / len(ground_truth_tokens) f1 = (2 * precision * recall) / (precision + recall) return f1 def exact_match_score(prediction, ground_truth): """Compute EM score.""" return normalize_answer(prediction) == normalize_answer(ground_truth) def read_answers(gold_file): """Read ground truth answers.""" answers = {} f = tf.gfile.Open(gold_file) if gold_file.endswith(".gz"): f = gzip.GzipFile(fileobj=f) for i, line in enumerate(f): example = json.loads(line) if i == 0 and "header" in example: continue for qa in example["qas"]: answers[qa["qid"]] = qa["answers"] f.close() return answers def evaluate(answers, predictions, skip_no_answer=False): """Compute F1 and EM scores.""" f1 = exact_match = total = 0 for qid, ground_truths in answers.items(): if qid not in predictions: if not skip_no_answer: message = "Unanswered question %s will receive score 0." % qid print(message) total += 1 continue total += 1 prediction = predictions[qid] exact_match += metric_max_over_ground_truths(exact_match_score, prediction, ground_truths) f1 += metric_max_over_ground_truths(f1_score, prediction, ground_truths) exact_match = 100.0 * exact_match / total f1 = 100.0 * f1 / total return {"exact_match": exact_match, "f1": f1} def compute_scores(ground_truth_file, predicted_answers_file): """Read predictions and ground truth and return P, R, F.""" telemetry, incorrect = read_results(ground_truth_file, predicted_answers_file) micro = aprf(telemetry) relationwise = aprf_relationwise(telemetry) macro = sum([val[0] for _, val in relationwise.items()]) macro = macro / len(relationwise) return micro, macro, relationwise, incorrect def read_results(ground_truth_file, predicted_answers_file): """Read results and ground truth and return data structure with stats.""" with codecs.getreader("utf-8")(tf.gfile.GFile(ground_truth_file, "r")) as read: data_ = {} for line in read: item = json.loads(line.strip()) if isinstance(item["relation"], dict): relation = item["relation"]["wikidata_id"] elif isinstance(item["relation"], list): relation = ( item["relation"][0]["wikidata_id"] + "_" + item["relation"][1]["wikidata_id"]) data_[item["id"]] = [relation, item["subject"]["wikidata_id"]] if "is_impossible" in item and item["is_impossible"]: continue if item["object"] is None: continue if isinstance(item["object"]["mention"], dict): data_[item["id"]] += [item["object"]["mention"]["text"]] if "name" in item["object"]: data_[item["id"]] += [item["object"]["name"]] if "aliases" in item["object"]: data_[item["id"]] += item["object"]["aliases"].keys() with codecs.getreader("utf-8")(tf.gfile.GFile(predicted_answers_file, "r")) as fin: predictions = json.load(fin) telemetry, incorrect = [], [] n = 0 for key in data_: if key not in predictions: continue g = data_[key][2:] a = predictions[key] m = data_[key][:2] stats = score(g, a) telemetry.append([m[0], m[1], g, a, stats]) if stats[0] == 0. and stats[3] > 0.: incorrect.append(key) n += 1 return telemetry, incorrect def aprf_relationwise(g): """Returns precision, recall and F score for each relation.""" rel_to_stats = collections.defaultdict(list) for item in g: rel_to_stats[item[0]].append(item) rel_to_scores = {} for rel, stats in rel_to_stats.items(): rel_to_scores[rel] = [aprf(stats), len(stats)] return rel_to_scores def aprf(g): """Returns precision, recall and F of the given statistics.""" tp, _, sys_pos, real_pos = sum([x[-1] for x in g]) if tp == 0: p = r = f = 0.0 else: p = tp / float(sys_pos) if sys_pos > 0 else 0. r = tp / float(real_pos) if real_pos > 0 else 0. f = 2 * p * r / (p + r) return np.asarray([p, r, f]) def score(gold, answer): """Compares answer to ground truth to return TP / FP stats.""" if gold: gold = set([simplify(g) for g in gold]) answer = simplify(answer) result = np.zeros(4) if gold: result[3] += 1 if answer in gold: result[0] += 1 else: if not answer: result[1] += 1 if answer: result[2] += 1 return result def strip_accents_and_punct(text): """Strips accents from a piece of text.""" text = unicodedata.normalize("NFD", text) output = [] for char in text: if char in PUNCTUATION: continue cat = unicodedata.category(char) if cat == "Mn": continue output.append(char) return "".join(output) def simplify(answer): """Pre-process answer string.""" toks = [] articles = {"the", "a", "an", "and", ""} for t in answer.strip().lower().split(): tok = strip_accents_and_punct(t) if tok not in articles: toks.append(tok) return "".join(toks) def rare_relation_scores(relationwise, relation2counts): """Print statistics of rare relations for different thresholds.""" for thresh in [5, 100, 500, 1000]: freq_stats, freq_total = np.array([0., 0., 0.]), 0 rare_stats, rare_total = np.array([0., 0., 0.]), 0 for relation, (stats, _) in relationwise.items(): if relation2counts.get(relation, 0) < thresh: rare_stats += stats rare_total += 1 else: freq_stats += stats freq_total += 1 rare_stats /= rare_total freq_stats /= freq_total print( "Threshold =", thresh, "rare", rare_total, "Micro-P %.3f Micro-R %.3f Micro-F %.3f" % (rare_stats[0], rare_stats[1], rare_stats[2]), "freq", freq_total, "Micro-P %.3f Micro-R %.3f Micro-F %.3f" % (freq_stats[0], freq_stats[1], freq_stats[2])) if __name__ == "__main__": app.run(main)
[ 2, 19617, 28, 40477, 12, 23, 198, 2, 15069, 2864, 383, 3012, 9552, 15417, 4816, 46665, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393,...
2.372157
7,298
""" test_pop_models.py Author: Jordan Mirocha Affiliation: UCLA Created on: Fri Jul 15 15:23:11 PDT 2016 Description: """ import ares import matplotlib.pyplot as pl PB = ares.util.ParameterBundle if __name__ == '__main__': test()
[ 37811, 198, 198, 9288, 62, 12924, 62, 27530, 13, 9078, 198, 198, 13838, 25, 8078, 337, 7058, 11693, 198, 35191, 15547, 25, 21750, 198, 41972, 319, 25, 19480, 5979, 1315, 1315, 25, 1954, 25, 1157, 28288, 1584, 198, 198, 11828, 25, 220,...
2.652174
92
# coding: utf-8 import time import hashlib import leancloud from leancloud._compat import to_bytes __author__ = 'asaka <lan@leancloud.rocks>'
[ 2, 19617, 25, 3384, 69, 12, 23, 198, 198, 11748, 640, 198, 11748, 12234, 8019, 198, 198, 11748, 10904, 17721, 198, 6738, 10904, 17721, 13557, 5589, 265, 1330, 284, 62, 33661, 198, 198, 834, 9800, 834, 796, 705, 292, 8130, 1279, 9620, ...
2.862745
51
""" mavsimPy - Chapter 4 assignment for Beard & McLain, PUP, 2012 - Update history: 12/27/2018 - RWB 1/17/2019 - RWB """ import sys sys.path.append('..') import numpy as np import parameters.simulation_parameters as SIM from chap2.mav_viewer import mav_viewer # from chap2.video_writer import video_writer from chap3.data_viewer import data_viewer from chap4.mav_dynamics import mav_dynamics from chap4.wind_simulation import wind_simulation from time import sleep # initialize the visualization VIDEO = False # True==write video, False==don't write video mav_view = mav_viewer() # initialize the mav viewer data_view = data_viewer() # initialize view of data plots if VIDEO == True: video = video_writer(video_name="chap4_video.avi", bounding_box=(0, 0, 1000, 1000), output_rate=SIM.ts_video) # initialize elements of the architecture wind = wind_simulation(SIM.ts_simulation) mav = mav_dynamics(SIM.ts_simulation) # initialize the simulation time sim_time = SIM.start_time # main simulation loop # sleep(5) print("Press Command-Q to exit...") while sim_time < SIM.end_time: #-------set control surfaces------------- if(sim_time<25): delta_e = -0.1 delta_t = 1.0 # 0.5 delta_a = 0.0 # 0.0 delta_r = 0.0 # 0.005 delta = np.array([[delta_e, delta_t, delta_a, delta_r]]).T # transpose to make it a column vector else: delta_e = -0.3 delta_t = 1.0#0.5 delta_a = 0.01#0.0 delta_r = 0.00025#0.005 delta = np.array([[delta_e, delta_t, delta_a, delta_r]]).T # transpose to make it a column vector #-------physical system------------- current_wind = wind.update() # get the new wind vector # print("current wind: ", current_wind) mav.update_state(delta, current_wind) # propagate the MAV dynamics #-------update viewer------------- mav_view.update(mav.msg_true_state) # plot body of MAV data_view.update(mav.msg_true_state, # true states mav.msg_true_state, # estimated states mav.msg_true_state, # commanded states SIM.ts_simulation) if VIDEO == True: video.update(sim_time) #-------increment time------------- sim_time += SIM.ts_simulation if VIDEO == True: video.close()
[ 37811, 198, 76, 615, 14323, 20519, 198, 220, 220, 220, 532, 7006, 604, 16237, 329, 41698, 1222, 18365, 391, 11, 350, 8577, 11, 2321, 198, 220, 220, 220, 532, 10133, 2106, 25, 220, 220, 198, 220, 220, 220, 220, 220, 220, 220, 1105, ...
2.41565
984
import os.path as osp import sys import numpy as np import mmcv from tqdm import tqdm from functools import cmp_to_key cur_dir = osp.dirname(osp.abspath(__file__)) PROJ_ROOT = osp.normpath(osp.join(cur_dir, "../../../../")) sys.path.insert(0, PROJ_ROOT) from lib.pysixd import inout, misc from lib.utils.bbox_utils import xyxy_to_xywh from lib.utils.utils import iprint, wprint id2obj = { 1: "002_master_chef_can", # [1.3360, -0.5000, 3.5105] 2: "003_cracker_box", # [0.5575, 1.7005, 4.8050] 3: "004_sugar_box", # [-0.9520, 1.4670, 4.3645] 4: "005_tomato_soup_can", # [-0.0240, -1.5270, 8.4035] 5: "006_mustard_bottle", # [1.2995, 2.4870, -11.8290] 6: "007_tuna_fish_can", # [-0.1565, 0.1150, 4.2625] 7: "008_pudding_box", # [1.1645, -4.2015, 3.1190] 8: "009_gelatin_box", # [1.4460, -0.5915, 3.6085] 9: "010_potted_meat_can", # [2.4195, 0.3075, 8.0715] 10: "011_banana", # [-18.6730, 12.1915, -1.4635] 11: "019_pitcher_base", # [5.3370, 5.8855, 25.6115] 12: "021_bleach_cleanser", # [4.9290, -2.4800, -13.2920] 13: "024_bowl", # [-0.2270, 0.7950, -2.9675] 14: "025_mug", # [-8.4675, -0.6995, -1.6145] 15: "035_power_drill", # [9.0710, 20.9360, -2.1190] 16: "036_wood_block", # [1.4265, -2.5305, 17.1890] 17: "037_scissors", # [7.0535, -28.1320, 0.0420] 18: "040_large_marker", # [0.0460, -2.1040, 0.3500] 19: "051_large_clamp", # [10.5180, -1.9640, -0.4745] 20: "052_extra_large_clamp", # [-0.3950, -10.4130, 0.1620] 21: "061_foam_brick", # [-0.0805, 0.0805, -8.2435] } obj_num = len(id2obj) obj2id = {_name: _id for _id, _name in id2obj.items()} if __name__ == "__main__": new_res_path = osp.join( PROJ_ROOT, "datasets/BOP_DATASETS/ycbv/test/init_poses/", "resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_ycbv_pbr_100e_so_GdrnPbrPose_withYolov4PbrBbox_wDeepimPbrPose_ycbv_train_real_uw.json", ) if osp.exists(new_res_path): wprint("{} already exists! overriding!".format(new_res_path)) res_root = "output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/" iter_num_test = 4 pkl_paths = [ "01_02MasterChefCan/inference_model_final_wo_optim-2de2b4e3/ycbv_002_master_chef_can_train_real_uw/results.pkl", "02_03CrackerBox/inference_model_final_wo_optim-41082f8a/ycbv_003_cracker_box_train_real_uw/results.pkl", "03_04SugarBox/inference_model_final_wo_optim-e09dec3e/ycbv_004_sugar_box_train_real_uw/results.pkl", "04_05TomatoSoupCan/inference_model_final_wo_optim-5641f5d3/ycbv_005_tomato_soup_can_train_real_uw/results.pkl", "05_06MustardBottle/inference_model_final_wo_optim-6ce23e94/ycbv_006_mustard_bottle_train_real_uw/results.pkl", "06_07TunaFishCan/inference_model_final_wo_optim-0a768962/ycbv_007_tuna_fish_can_train_real_uw/results.pkl", "07_08PuddingBox/inference_model_final_wo_optim-f2f2cf73/ycbv_008_pudding_box_train_real_uw/results.pkl", "08_09GelatinBox/inference_model_final_wo_optim-a303aa1e/ycbv_009_gelatin_box_train_real_uw/results.pkl", "09_10PottedMeatCan/inference_model_final_wo_optim-84a56ffd/ycbv_010_potted_meat_can_train_real_uw/results.pkl", "10_11Banana/inference_model_final_wo_optim-83947126/ycbv_011_banana_train_real_uw/results.pkl", "11_19PitcherBase/inference_model_final_wo_optim-af1c7e62/ycbv_019_pitcher_base_train_real_uw/results.pkl", "12_21BleachCleanser/inference_model_final_wo_optim-5d740a46/ycbv_021_bleach_cleanser_train_real_uw/results.pkl", "13_24Bowl/inference_model_final_wo_optim-f11815d3/ycbv_024_bowl_train_real_uw/results.pkl", "14_25Mug/inference_model_final_wo_optim-e4824065/ycbv_025_mug_train_real_uw/results.pkl", "15_35PowerDrill/inference_model_final_wo_optim-30d7d1da/ycbv_035_power_drill_train_real_uw/results.pkl", "16_36WoodBlock/inference_model_final_wo_optim-fbb38751/ycbv_036_wood_block_train_real_uw/results.pkl", "17_37Scissors/inference_model_final_wo_optim-5068c6bb/ycbv_037_scissors_train_real_uw/results.pkl", "18_40LargeMarker/inference_model_final_wo_optim-e8d5867c/ycbv_040_large_marker_train_real_uw/results.pkl", "19_51LargeClamp/inference_model_final_wo_optim-1ea79b34/ycbv_051_large_clamp_train_real_uw/results.pkl", "20_52ExtraLargeClamp/inference_model_final_wo_optim-cb595297/ycbv_052_extra_large_clamp_train_real_uw/results.pkl", "21_61FoamBrick/inference_model_final_wo_optim-d3757ca1/ycbv_061_foam_brick_train_real_uw/results.pkl", ] obj_names = [obj for obj in obj2id] new_res_dict = {} for obj_name, pred_name in zip(obj_names, pkl_paths): assert obj_name in pred_name, "{} not in {}".format(obj_name, pred_name) pred_path = osp.join(res_root, pred_name) assert osp.exists(pred_path), pred_path iprint(obj_name, pred_path) # pkl scene_im_id key, list of preds preds = mmcv.load(pred_path) for scene_im_id, pred_list in preds.items(): for pred in pred_list: obj_id = pred["obj_id"] score = pred["score"] bbox_est = pred["bbox_det_xyxy"] # xyxy bbox_est_xywh = xyxy_to_xywh(bbox_est) refined_pose = pred["pose_{}".format(iter_num_test)] pose_est = pred["pose_0"] cur_new_res = { "obj_id": obj_id, "score": float(score), "bbox_est": bbox_est_xywh.tolist(), "pose_est": pose_est.tolist(), "pose_refine": refined_pose.tolist(), } if scene_im_id not in new_res_dict: new_res_dict[scene_im_id] = [] new_res_dict[scene_im_id].append(cur_new_res) inout.save_json(new_res_path, new_res_dict) iprint() iprint("new result path: {}".format(new_res_path))
[ 11748, 28686, 13, 6978, 355, 267, 2777, 198, 11748, 25064, 198, 11748, 299, 32152, 355, 45941, 198, 11748, 8085, 33967, 198, 6738, 256, 80, 36020, 1330, 256, 80, 36020, 198, 6738, 1257, 310, 10141, 1330, 269, 3149, 62, 1462, 62, 2539, ...
1.904309
3,156
from flask import abort from guniflask.context import service from ..config.jwt_config import jwt_manager
[ 6738, 42903, 1330, 15614, 198, 6738, 2485, 361, 75, 2093, 13, 22866, 1330, 2139, 198, 198, 6738, 11485, 11250, 13, 73, 46569, 62, 11250, 1330, 474, 46569, 62, 37153, 628 ]
3.6
30
import pandas as pd I = ["A", "B", "C", "D", "E"] oneDigit = pd.Series([1, 2, 3, 4, 5], pd.Index(I)) twoDigit = pd.Series([10, 20, 30, 40, 50], pd.Index(I)) print "addends:" print oneDigit print twoDigit print print "sum:" print oneDigit + twoDigit print I2 = ["A", "B", "C"] I3 = ["B", "C", "D", "E"] X = pd.Series([0, 1, 2], pd.Index(I2)) Y = pd.Series([10, 20, 0, 0], pd.Index(I3)) print "addends:" print X print Y print print "sum:" print X + Y print A = pd.Series(["hello ", "my ", "name", "is", "brad"]) B = pd.Series(["world", "real"]) print "addends:" print A print B print print "sum: " print A + B
[ 11748, 19798, 292, 355, 279, 67, 198, 198, 40, 796, 14631, 32, 1600, 366, 33, 1600, 366, 34, 1600, 366, 35, 1600, 366, 36, 8973, 198, 198, 505, 19511, 270, 796, 279, 67, 13, 27996, 26933, 16, 11, 362, 11, 513, 11, 604, 11, 642, ...
2.146341
287
# # (c) 2008-2020 Matthew Shaw # import sys import os import re import logging import nelly from .scanner import Scanner from .program import Program from .types import *
[ 2, 198, 2, 357, 66, 8, 3648, 12, 42334, 9308, 18193, 198, 2, 198, 198, 11748, 25064, 198, 11748, 28686, 198, 11748, 302, 198, 11748, 18931, 198, 198, 11748, 497, 12810, 198, 198, 6738, 764, 35836, 1008, 1330, 20937, 1008, 198, 6738, ...
3.218182
55
""" This module contains helper functions that provide information about how QCoDeS is installed and about what other packages are installed along with QCoDeS """ import sys from typing import Dict, List, Optional import subprocess import json import logging import requirements if sys.version_info >= (3, 8): from importlib.metadata import distribution, version, PackageNotFoundError else: # 3.7 and earlier from importlib_metadata import distribution, version, PackageNotFoundError import qcodes log = logging.getLogger(__name__) def is_qcodes_installed_editably() -> Optional[bool]: """ Try to ask pip whether QCoDeS is installed in editable mode and return the answer a boolean. Returns None if pip somehow did not respond as expected. """ answer: Optional[bool] try: pipproc = subprocess.run(['python', '-m', 'pip', 'list', '-e', '--no-index', '--format=json'], check=True, stdout=subprocess.PIPE) e_pkgs = json.loads(pipproc.stdout.decode('utf-8')) answer = any([d["name"] == 'qcodes' for d in e_pkgs]) except Exception as e: # we actually do want a catch-all here log.warning(f'{type(e)}: {str(e)}') answer = None return answer def get_qcodes_version() -> str: """ Get the version of the currently installed QCoDeS """ return qcodes.version.__version__ def get_qcodes_requirements() -> List[str]: """ Return a list of the names of the packages that QCoDeS requires """ qc_pkg = distribution('qcodes').requires if qc_pkg is None: return [] package_names = [list(requirements.parse(req))[0].name for req in qc_pkg] return package_names def get_qcodes_requirements_versions() -> Dict[str, str]: """ Return a dictionary of the currently installed versions of the packages that QCoDeS requires. The dict maps package name to version string. If an (optional) dependency is not installed the name maps to "Not installed". """ req_names = get_qcodes_requirements() req_versions = {} for req in req_names: try: req_versions[req] = version(req) except PackageNotFoundError: req_versions[req] = "Not installed" return req_versions
[ 37811, 198, 1212, 8265, 4909, 31904, 5499, 326, 2148, 1321, 546, 703, 198, 48, 7222, 5005, 50, 318, 6589, 290, 546, 644, 584, 10392, 389, 6589, 1863, 351, 198, 48, 7222, 5005, 50, 198, 37811, 198, 11748, 25064, 198, 6738, 19720, 1330,...
2.655056
890
from django.shortcuts import render from django.http import JsonResponse from .models import FieldCategory
[ 6738, 42625, 14208, 13, 19509, 23779, 1330, 8543, 198, 6738, 42625, 14208, 13, 4023, 1330, 449, 1559, 31077, 198, 198, 6738, 764, 27530, 1330, 7663, 27313, 628, 198 ]
3.928571
28
# tests/test_provider_Mongey_kafka-connect.py # Automatically generated by tools/makecode.py (24-Sep-2021 15:20:11 UTC) # TODO: Shortcut imports without namespace for official and supported providers. # TODO: This has to be moved into a required_providers block. # def test_version_source(): # # import terrascript.provider.Mongey.kafka_connect # # t = terrascript.provider.Mongey.kafka_connect.kafka_connect() # s = str(t) # # assert 'https://github.com/Mongey/terraform-provider-kafka-connect' in s # assert '0.2.3' in s
[ 2, 5254, 14, 9288, 62, 15234, 1304, 62, 9069, 39608, 62, 74, 1878, 4914, 12, 8443, 13, 9078, 198, 2, 17406, 4142, 7560, 416, 4899, 14, 15883, 8189, 13, 9078, 357, 1731, 12, 19117, 12, 1238, 2481, 1315, 25, 1238, 25, 1157, 18119, 8...
2.608491
212
from flask import Flask, render_template, request, redirect, jsonify, g from flask import url_for, flash, make_response from flask import session as login_session from sqlalchemy import create_engine, asc from sqlalchemy.orm import sessionmaker from models import Base, Category, Item, User from oauth2client.client import flow_from_clientsecrets from oauth2client.client import FlowExchangeError import httplib2 import json import requests app = Flask(__name__) # Retrieves client ID's and secrets from the json files CLIENT_ID = json.loads(open('client_secrets.json', 'r') .read())['web']['client_id'] APP_ID = json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_id'] APP_SECRET = json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_secret'] # Connect to Database and create database session engine = create_engine('sqlite:///catalog.db') Base.metadata.bind = engine DBSession = sessionmaker(bind=engine) session = DBSession() # Login handler # Third Party Oauth callback def createUser(login_session): newUser = User(username=login_session['username'], email=login_session['email'], picture=login_session['picture']) session.add(newUser) session.commit() def getUserID(email): try: user = session.query(User).filter_by(email=email).one() return user.id except: return None # Revoke current user's token and reset login_session # JSON APIs to view Category Information. # Show all Categories and the latest items # Show Items in a category item # Show an item in a category # Create a new item # Edit a category item # Delete a category item if __name__ == '__main__': app.secret_key = 'N10kuN!' app.debug = True app.run(host='0.0.0.0', port=5000)
[ 6738, 42903, 1330, 46947, 11, 8543, 62, 28243, 11, 2581, 11, 18941, 11, 33918, 1958, 11, 308, 198, 6738, 42903, 1330, 19016, 62, 1640, 11, 7644, 11, 787, 62, 26209, 198, 6738, 42903, 1330, 6246, 355, 17594, 62, 29891, 198, 6738, 44161...
2.749267
682
from .echo import echo, set_quiet from .errors import NooException, cancel from .store import STORE, FileStore, Store __all__ = ( "FileStore", "NooException", "Store", "STORE", "cancel", "echo", "set_quiet", )
[ 6738, 764, 30328, 1330, 9809, 11, 900, 62, 39624, 198, 6738, 764, 48277, 1330, 1400, 78, 16922, 11, 14241, 198, 6738, 764, 8095, 1330, 3563, 6965, 11, 9220, 22658, 11, 9363, 198, 198, 834, 439, 834, 796, 357, 198, 220, 220, 220, 366...
2.463918
97
# Copyright Allen Institute for Artificial Intelligence 2017 """ ai2thor.server Handles all communication with Unity through a Flask service. Messages are sent to the controller using a pair of request/response queues. """ import json import logging import sys import os import os.path try: from queue import Empty except ImportError: from Queue import Empty import time import warnings from flask import Flask, request, make_response, abort import werkzeug import werkzeug.serving import werkzeug.http import numpy as np from enum import Enum from ai2thor.util.depth import apply_real_noise, generate_noise_indices logging.getLogger('werkzeug').setLevel(logging.ERROR) werkzeug.serving.WSGIRequestHandler.protocol_version = 'HTTP/1.1' MAX_DEPTH = 5000 # get with timeout to allow quit def read_buffer_image(buf, width, height, flip_y=True, flip_x=False, dtype=np.uint8, flip_rb_colors=False): im_bytes = np.frombuffer(buf.tobytes(), dtype=dtype) if sys.version_info.major < 3 \ else np.frombuffer(buf, dtype=dtype) im = im_bytes.reshape(height, width, -1) if flip_y: im = np.flip(im, axis=0) if flip_x: im = np.flip(im, axis=1) if flip_rb_colors: im = im[..., ::-1] return im def unique_rows(arr, return_index=False, return_inverse=False): arr = np.ascontiguousarray(arr).copy() b = arr.view(np.dtype((np.void, arr.dtype.itemsize * arr.shape[1]))) if return_inverse: _, idx, inv = np.unique(b, return_index=True, return_inverse=True) else: _, idx = np.unique(b, return_index=True) unique = arr[idx] if return_index and return_inverse: return unique, idx, inv elif return_index: return unique, idx elif return_inverse: return unique, inv else: return unique class MultipartFormParser(object): class DepthFormat(Enum): Meters = 0, Normalized = 1, Millimeters = 2 class Server(object):
[ 2, 15069, 9659, 5136, 329, 35941, 9345, 2177, 198, 37811, 198, 1872, 17, 400, 273, 13, 15388, 198, 198, 12885, 829, 477, 6946, 351, 18714, 832, 257, 46947, 2139, 13, 220, 43534, 198, 533, 1908, 284, 262, 10444, 1262, 257, 5166, 286, ...
2.535714
784
from setuptools import setup setup(name='mydocstring', version='0.2.7', description="""A tool for extracting and converting Google-style docstrings to plain-text, markdown, and JSON.""", url='http://github.com/ooreilly/mydocstring', author="Ossian O'Reilly", license='MIT', packages=['mydocstring'], install_requires=['mako', 'docopt'], entry_points = { 'console_scripts': [ 'mydocstring=mydocstring.docstring:main', ],}, package_data={'mydocstring': ['templates/google_docstring.md']}, zip_safe=False)
[ 6738, 900, 37623, 10141, 1330, 9058, 198, 220, 198, 40406, 7, 3672, 11639, 1820, 15390, 8841, 3256, 198, 220, 220, 220, 220, 220, 2196, 11639, 15, 13, 17, 13, 22, 3256, 198, 220, 220, 220, 220, 220, 6764, 2625, 15931, 32, 2891, 329,...
2.264493
276
# -*- coding: utf-8 -*- # # Copyright (c) 2020~2999 - Cologler <skyoflw@gmail.com> # ---------- # # ---------- import bson import struct from ..err import SerializeError from ..abc import * from ..core import register_format
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 2, 198, 2, 15069, 357, 66, 8, 12131, 93, 1959, 2079, 532, 327, 928, 1754, 1279, 15688, 1659, 75, 86, 31, 14816, 13, 785, 29, 198, 2, 24200, 438, 198, 2, 198, 2, ...
2.802469
81
# -*- encoding: utf-8 -*- import json import os import shutil import tempfile from collections import OrderedDict from datetime import timedelta from pyparsing import ParseBaseException, ParseException, ParseSyntaxException import mock import pytest from pyhocon import (ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree) from pyhocon.exceptions import (ConfigException, ConfigMissingException, ConfigWrongTypeException) try: from dateutil.relativedelta import relativedelta as period except Exception: from datetime import timedelta as period def test_parse_string_with_duration_with_long_unit_name(self): config = ConfigFactory.parse_string( """ a: foo b: 10 weeks c: bar """ ) assert config['b'] == period(weeks=10) def test_parse_with_list_mixed_types_with_durations_and_trailing_comma(self): config = ConfigFactory.parse_string( """ a: foo b: [a, 1, 10 weeks, 5 minutes,] c: bar """ ) assert config['b'] == ['a', 1, period(weeks=10), period(minutes=5)] def test_parse_with_enclosing_square_bracket(self): config = ConfigFactory.parse_string("[1, 2, 3]") assert config == [1, 2, 3] def test_quoted_key_with_dots(self): config = ConfigFactory.parse_string( """ "a.b.c.d": 3 t { "d": { "c": 5 } } k { "b.f.d": 7 } """ ) assert config['"a.b.c.d"'] == 3 assert config['t.d.c'] == 5 assert config['k."b.f.d"'] == 7 def test_dotted_notation_merge(self): config = ConfigFactory.parse_string( """ a { b = foo c = bar } a.c = ${a.b}" "${a.b} a.d = baz """ ) assert config['a.b'] == "foo" assert config['a.c'] == "foo foo" assert config['a.d'] == "baz" def test_comma_to_separate_expr(self): config = ConfigFactory.parse_string( """ a=1, b="abc", c=the man, d=woof, a-b-c-d=test, a b c d=test2, "a b c d e"=test3 """ ) assert config.get('a') == 1 assert config.get('b') == 'abc' assert config.get('c') == 'the man' assert config.get('d') == 'woof' assert config.get('a-b-c-d') == 'test' assert config.get('a b c d') == 'test2' assert config.get('a b c d e') == 'test3' def test_dict_merge(self): config = ConfigFactory.parse_string( """ a { d { g.h.j.u: 5 g { h.d: 4 } g.h.k: f d } h.i.m = 7 h.i { d: 5 } h.i { e:65 } } """) expected_result = { "a": { "d": { "g": { "h": { "j": { "u": 5 }, "d": 4, "k": "f d" } } }, "h": { "i": { "m": 7, "d": 5, "e": 65 } } } } assert expected_result == config def test_parse_with_comments(self): config = ConfigFactory.parse_string( """ // comment 1 # comment 2 { c = test // comment 0 g = 6 test # comment 0 # comment 3 a: { # comment 4 b: test, # comment 5 } # comment 6 t = [1, # comment 7 2, # comment 8 3, # comment 9 ] } # comment 10 // comment 11 // comment 12 """ ) assert config.get('c') == 'test' assert config.get('g') == '6 test' assert config.get('a.b') == 'test' assert config.get_string('a.b') == 'test' assert config.get('t') == [1, 2, 3] def test_missing_config(self): config = ConfigFactory.parse_string( """ a = 5 """ ) # b is not set so show raise an exception with pytest.raises(ConfigMissingException): config.get('b') def test_parse_null(self): config = ConfigFactory.parse_string( """ a = null b = [null] """ ) assert config.get('a') is None assert config.get('b')[0] is None def test_parse_override(self): config = ConfigFactory.parse_string( """ { a: { b: { c = 5 } } a.b { c = 7 d = 8 } } """ ) assert config.get('a.b.c') == 7 assert config.get('a.b.d') == 8 def test_concat_dict(self): config = ConfigFactory.parse_string( """ a: {b: 1} a: {c: 2} b: {c: 3} {d: 4} { c: 5 } """ ) assert config.get('a.b') == 1 assert config.get('a.c') == 2 assert config.get('b.c') == 5 assert config.get('b.d') == 4 def test_concat_string(self): config = ConfigFactory.parse_string( """ a = a b c b = 5 b c = b 7 """ ) assert config.get('a') == 'a b c' assert config.get('b') == '5 b' assert config.get('c') == 'b 7' def test_concat_list(self): config = ConfigFactory.parse_string( """ a = [1, 2] [3, 4] [ 5, 6 ] """ ) assert config.get('a') == [1, 2, 3, 4, 5, 6] assert config.get_list('a') == [1, 2, 3, 4, 5, 6] def test_bad_concat(self): ConfigFactory.parse_string('a = 45\n') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = [4] "4"') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = "4" [5]') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = {b: 5} "4"') def test_string_substitutions(self): config1 = ConfigFactory.parse_string( """ { a: { b: { c = str e = "str " } } d = ${a.b.c} f = ${a.b.e} } """ ) assert config1.get('a.b.c') == 'str' assert config1.get('d') == 'str' assert config1.get('f') == 'str ' config2 = ConfigFactory.parse_string( """ { a: { b: { c = str e = "str " } } d = test ${a.b.c} f = test ${a.b.e} } """ ) assert config2.get('a.b.c') == 'str' assert config2.get('d') == 'test str' assert config2.get('f') == 'test str ' config3 = ConfigFactory.parse_string( u""" { a: { b: { c = str e = "str " } } d = test ${a.b.c} me f = test ${a.b.e} me } """ ) assert config3.get('a.b.c') == 'str' assert config3.get('d') == 'test str me' assert config3.get('f') == 'test str me' def test_string_substitutions_with_no_space(self): config = ConfigFactory.parse_string( """ app.heap_size = 128 app.java_opts = [ -Xms${app.heap_size}m -Xmx${app.heap_size}m ] """ ) assert config.get('app.java_opts') == [ '-Xms128m', '-Xmx128m' ] def test_int_substitutions(self): config1 = ConfigFactory.parse_string( """ { a: { b: { c = 5 } } d = ${a.b.c} } """ ) assert config1.get('a.b.c') == 5 assert config1.get('d') == 5 config2 = ConfigFactory.parse_string( """ { a: { b: { c = 5 } } d = test ${a.b.c} } """ ) assert config2.get('a.b.c') == 5 assert config2.get('d') == 'test 5' config3 = ConfigFactory.parse_string( """ { a: { b: { c = 5 } } d = test ${a.b.c} me } """ ) assert config3.get('a.b.c') == 5 assert config3.get('d') == 'test 5 me' def test_cascade_string_substitutions(self): config = ConfigFactory.parse_string( """ { a: { b: { c = ${e} } } d = test ${a.b.c} me e = 7 } """ ) assert config.get('a.b.c') == 7 assert config.get('d') == 'test 7 me' def test_multiple_substitutions(self): config = ConfigFactory.parse_string( """ a = 5 b=${a}${a} c=${a} ${a} """ ) assert config == { 'a': 5, 'b': '55', 'c': '5 5' } def test_dict_substitutions(self): config = ConfigFactory.parse_string( """ data-center-generic = { cluster-size = 6 } data-center-east = ${data-center-generic} {name = "east"} """ ) assert config.get('data-center-east.cluster-size') == 6 assert config.get('data-center-east.name') == 'east' config2 = ConfigFactory.parse_string( """ data-center-generic = { cluster-size = 6 } data-center-east = {name = "east"} ${data-center-generic} """ ) assert config2.get('data-center-east.cluster-size') == 6 assert config2.get('data-center-east.name') == 'east' config3 = ConfigFactory.parse_string( """ data-center-generic = { cluster-size = 6 } data-center-east = {name = "east"} ${data-center-generic} { cluster-size = 9, opts = "-Xmx4g" } """ ) assert config3.get('data-center-east.cluster-size') == 9 assert config3.get('data-center-east.name') == 'east' assert config3.get('data-center-east.opts') == '-Xmx4g' config4 = ConfigFactory.parse_string( """ data-center-generic = { cluster-size = 6 } data-center-east = {name = "east"} ${data-center-generic} data-center-east-prod = ${data-center-east} {tmpDir=/tmp} """ ) assert config4.get('data-center-east.cluster-size') == 6 assert config4.get('data-center-east.name') == 'east' assert config4.get('data-center-east-prod.cluster-size') == 6 assert config4.get('data-center-east-prod.tmpDir') == '/tmp' config5 = ConfigFactory.parse_string( """ data-center-generic = { cluster-size = 6 } data-center-east = ${data-center-generic} data-center-east = { name = "east" } """ ) assert config5['data-center-east'] == { 'name': 'east', 'cluster-size': 6 } config6 = ConfigFactory.parse_string( """ data-center-generic = { cluster-size = 6 } data-center-east = { name = "east" } data-center-east = ${data-center-generic} """ ) assert config6['data-center-east'] == { 'name': 'east', 'cluster-size': 6 } def test_dos_chars_with_unquoted_string_noeol(self): config = ConfigFactory.parse_string("foo = bar") assert config['foo'] == 'bar' def test_dos_chars_with_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo = "5"') assert config['foo'] == '5' def test_dos_chars_with_triple_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo = """5"""') assert config['foo'] == '5' def test_dos_chars_with_int_noeol(self): config = ConfigFactory.parse_string("foo = 5") assert config['foo'] == 5 def test_dos_chars_with_float_noeol(self): config = ConfigFactory.parse_string("foo = 5.0") assert config['foo'] == 5.0 def test_list_substitutions(self): config = ConfigFactory.parse_string( """ common_modules = [php, python] host_modules = ${common_modules} [java] """ ) assert config.get('host_modules') == ['php', 'python', 'java'] config2 = ConfigFactory.parse_string( """ common_modules = [php, python] host_modules = [java] ${common_modules} """ ) assert config2.get('host_modules') == ['java', 'php', 'python'] config3 = ConfigFactory.parse_string( """ common_modules = [php, python] host_modules = [java] ${common_modules} [perl] """ ) assert config3.get('common_modules') == ['php', 'python'] assert config3.get('host_modules') == ['java', 'php', 'python', 'perl'] config4 = ConfigFactory.parse_string( """ common_modules = [php, python] host_modules = [java] ${common_modules} [perl] full_modules = ${host_modules} [c, go] """ ) assert config4.get('common_modules') == ['php', 'python'] assert config4.get('host_modules') == ['java', 'php', 'python', 'perl'] assert config4.get('full_modules') == ['java', 'php', 'python', 'perl', 'c', 'go'] def test_list_element_substitution(self): config = ConfigFactory.parse_string( """ main_language = php languages = [java, ${main_language}] """ ) assert config.get('languages') == ['java', 'php'] def test_substitution_list_with_append(self): config = ConfigFactory.parse_string( """ application.foo = 128mm application.large-jvm-opts = ["-XX:+UseParNewGC"] [-Xm16g, ${application.foo}] application.large-jvm-opts2 = [-Xm16g, ${application.foo}] ["-XX:+UseParNewGC"] """) assert config["application.large-jvm-opts"] == [ '-XX:+UseParNewGC', '-Xm16g', '128mm' ] assert config["application.large-jvm-opts2"] == [ '-Xm16g', '128mm', '-XX:+UseParNewGC', ] def test_substitution_list_with_append_substitution(self): config = ConfigFactory.parse_string( """ application.foo = 128mm application.default-jvm-opts = ["-XX:+UseParNewGC"] application.large-jvm-opts = ${application.default-jvm-opts} [-Xm16g, ${application.foo}] application.large-jvm-opts2 = [-Xm16g, ${application.foo}] ${application.default-jvm-opts} """) assert config["application.large-jvm-opts"] == [ '-XX:+UseParNewGC', '-Xm16g', '128mm' ] assert config["application.large-jvm-opts2"] == [ '-Xm16g', '128mm', '-XX:+UseParNewGC' ] def test_non_existent_substitution(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( """ common_modules = ${non_existent} """ ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( """ common_modules = abc ${non_existent} """ ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( """ common_modules = ${non_existent} abc """ ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( """ common_modules = abc ${non_existent} def """ ) def test_non_compatible_substitution(self): with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( """ common_modules = [perl] host_modules = 55 ${common_modules} """ ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( """ common_modules = [perl] host_modules = ${common_modules} 55 """ ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( """ common_modules = [perl] host_modules = aa ${common_modules} bb """ ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( """ common_modules = [perl] host_modules = aa ${common_modules} """ ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( """ common_modules = [perl] host_modules = ${common_modules} aa """ ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( """ common_modules = [perl] host_modules = aa ${common_modules} bb """ ) def test_self_ref_substitution_array(self): config = ConfigFactory.parse_string( """ x = [1,2] x = ${x} [3,4] x = [-1, 0] ${x} [5, 6] x = [-3, -2] ${x} """ ) assert config.get("x") == [-3, -2, -1, 0, 1, 2, 3, 4, 5, 6] def test_self_append_array(self): config = ConfigFactory.parse_string( """ x = [1,2] x += [3,4] """ ) assert config.get("x") == [1, 2, 3, 4] def test_self_append_string(self): ''' Should be equivalent to x = abc x = ${?x} def ''' config = ConfigFactory.parse_string( """ x = abc x += def """ ) assert config.get("x") == "abc def" def test_self_append_non_existent_string(self): ''' Should be equivalent to x = ${?x} def ''' config = ConfigFactory.parse_string( """ x += def """ ) assert config.get("x") == " def" def test_self_append_nonexistent_array(self): config = ConfigFactory.parse_string( """ x += [1,2] """ ) assert config.get("x") == [1, 2] def test_self_append_object(self): config = ConfigFactory.parse_string( """ x = {a: 1} x += {b: 2} """ ) assert config.get("x") == {'a': 1, 'b': 2} def test_self_append_nonexistent_object(self): config = ConfigFactory.parse_string( """ x += {a: 1} """ ) assert config.get("x") == {'a': 1} def test_self_ref_substitution_array_to_dict(self): config = ConfigFactory.parse_string( """ x = [1,2] x = {x: [3,4]} x = {y: [5,6]} x = {z: ${x}} """ ) assert config.get("x.x") == [3, 4] assert config.get("x.y") == [5, 6] assert config.get("x.z") == {'x': [3, 4], 'y': [5, 6]} def test_self_ref_substitiotion_dict_in_array(self): config = ConfigFactory.parse_string( """ x = {x: [3,4]} x = [${x}, 2, 3] """ ) (one, two, three) = config.get("x") assert one == {'x': [3, 4]} assert two == 2 assert three == 3 def test_self_ref_substitution_dict_path(self): config = ConfigFactory.parse_string( """ x = {y: {z: 1}} x = ${x.y} """ ) assert config.get("x.y") == {'z': 1} assert config.get("x.z") == 1 assert set(config.get("x").keys()) == set(['y', 'z']) def test_self_ref_substitution_dict_path_hide(self): config = ConfigFactory.parse_string( """ x = {y: {y: 1}} x = ${x.y} """ ) assert config.get("x.y") == 1 assert set(config.get("x").keys()) == set(['y']) def test_self_ref_substitution_dict_recurse(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( """ x = ${x} """ ) def test_self_ref_substitution_dict_recurse2(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( """ x = ${x} x = ${x} """ ) def test_self_ref_substitution_dict_merge(self): ''' Example from HOCON spec ''' config = ConfigFactory.parse_string( """ foo : { a : { c : 1 } } foo : ${foo.a} foo : { a : 2 } """ ) assert config.get('foo') == {'a': 2, 'c': 1} assert set(config.keys()) == set(['foo']) def test_self_ref_substitution_dict_otherfield(self): ''' Example from HOCON spec ''' config = ConfigFactory.parse_string( """ bar : { foo : 42, baz : ${bar.foo} } """ ) assert config.get("bar") == {'foo': 42, 'baz': 42} assert set(config.keys()) == set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in(self): ''' Example from HOCON spec ''' config = ConfigFactory.parse_string( """ bar : { foo : 42, baz : ${bar.foo} } bar : { foo : 43 } """ ) assert config.get("bar") == {'foo': 43, 'baz': 43} assert set(config.keys()) == set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in_mutual(self): ''' Example from HOCON spec ''' config = ConfigFactory.parse_string( """ // bar.a should end up as 4 bar : { a : ${foo.d}, b : 1 } bar.b = 3 // foo.c should end up as 3 foo : { c : ${bar.b}, d : 2 } foo.d = 4 """ ) assert config.get("bar") == {'a': 4, 'b': 3} assert config.get("foo") == {'c': 3, 'd': 4} assert set(config.keys()) == set(['bar', 'foo']) def test_self_ref_substitution_string_opt_concat(self): ''' Example from HOCON spec ''' config = ConfigFactory.parse_string( """ a = ${?a}foo """ ) assert config.get("a") == 'foo' assert set(config.keys()) == set(['a']) def test_self_ref_substitution_dict_recurse_part(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( """ x = ${x} {y: 1} x = ${x.y} """ ) def test_self_ref_substitution_object(self): config = ConfigFactory.parse_string( """ x = {a: 1, b: 2} x = ${x} {c: 3} x = {z: 0} ${x} x = {y: -1} ${x} {d: 4} """ ) assert config.get("x") == {'a': 1, 'b': 2, 'c': 3, 'z': 0, 'y': -1, 'd': 4} def test_self_ref_child(self): config = ConfigFactory.parse_string( """ a.b = 3 a.b = ${a.b} a.b = ${a.b} a.c = [1,2] a.c = ${a.c} a.d = {foo: bar} a.d = ${a.d} """ ) assert config.get("a") == {'b': 3, 'c': [1, 2], 'd': {'foo': 'bar'}} def test_concat_multi_line_string(self): config = ConfigFactory.parse_string( """ common_modules = perl \ java \ python """ ) assert [x.strip() for x in config['common_modules'].split() if x.strip(' ') != ''] == ['perl', 'java', 'python'] def test_concat_multi_line_list(self): config = ConfigFactory.parse_string( """ common_modules = [perl] \ [java] \ [python] """ ) assert config['common_modules'] == ['perl', 'java', 'python'] def test_concat_multi_line_dict(self): config = ConfigFactory.parse_string( """ common_modules = {a:perl} \ {b:java} \ {c:python} """ ) assert config['common_modules'] == {'a': 'perl', 'b': 'java', 'c': 'python'} def test_parse_URL_from_samples(self): config = ConfigFactory.parse_URL("file:samples/aws.conf") assert config.get('data-center-generic.cluster-size') == 6 assert config.get('large-jvm-opts') == ['-XX:+UseParNewGC', '-Xm16g'] def test_parse_URL_from_invalid(self): config = ConfigFactory.parse_URL("https://nosuchurl") assert config == [] def test_include_dict_from_samples(self): config = ConfigFactory.parse_file("samples/animals.conf") assert config.get('cat.garfield.say') == 'meow' assert config.get('dog.mutt.hates.garfield.say') == 'meow' def test_include_glob_dict_from_samples(self): config = ConfigFactory.parse_file("samples/all_animals.conf") assert config.get('animals.garfield.say') == 'meow' assert config.get('animals.mutt.hates.garfield.say') == 'meow' def test_include_glob_list_from_samples(self): config = ConfigFactory.parse_file("samples/all_bars.conf") bars = config.get_list('bars') assert len(bars) == 10 names = {bar['name'] for bar in bars} types = {bar['type'] for bar in bars if 'type' in bar} print(types, '(((((') assert 'Bloody Mary' in names assert 'Homer\'s favorite coffee' in names assert 'milk' in types def test_list_of_dicts(self): config = ConfigFactory.parse_string( """ a: [ {a: 1, b: 2}, {a: 3, c: 4}, ] """ ) assert config['a'] == [ {'a': 1, 'b': 2}, {'a': 3, 'c': 4} ] def test_list_of_lists(self): config = ConfigFactory.parse_string( """ a: [ [1, 2] [3, 4] ] """ ) assert config['a'] == [ [1, 2], [3, 4] ] def test_list_of_dicts_with_merge(self): config = ConfigFactory.parse_string( """ b = {f: 4} a: [ ${b} {a: 1, b: 2}, {a: 3, c: 4} ${b}, {a: 3} ${b} {c: 6}, ] """ ) assert config['a'] == [ {'a': 1, 'b': 2, 'f': 4}, {'a': 3, 'c': 4, 'f': 4}, {'a': 3, 'c': 6, 'f': 4} ] def test_list_of_lists_with_merge(self): config = ConfigFactory.parse_string( """ b = [5, 6] a: [ ${b} [1, 2] [3, 4] ${b} [1, 2] ${b} [7, 8] ] """ ) assert config['a'] == [ [5, 6, 1, 2], [3, 4, 5, 6], [1, 2, 5, 6, 7, 8] ] def test_invalid_assignment(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('common_modules [perl]') with pytest.raises(ParseException): ConfigFactory.parse_string('common_modules {} {perl: 1}') with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( """ a = {f: 5} common_modules ${a} {perl: 1} """) def test_invalid_dict(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( """ a = { f: 5 g } """) with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('a = {g}') def test_include_file(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('[1, 2]') fdin.flush() config1 = ConfigFactory.parse_string( """ a: [ include "{tmp_file}" ] """.format(tmp_file=fdin.name) ) assert config1['a'] == [1, 2] config2 = ConfigFactory.parse_string( """ a: [ include file("{tmp_file}") ] """.format(tmp_file=fdin.name) ) assert config2['a'] == [1, 2] config3 = ConfigFactory.parse_string( """ a: [ include url("file://{tmp_file}") ] """.format(tmp_file=fdin.name) ) assert config3['a'] == [1, 2] def test_include_missing_file(self): config1 = ConfigFactory.parse_string( """ a: [ include "dummy.txt" 3 4 ] """ ) assert config1['a'] == [3, 4] def test_include_required_file(self): config = ConfigFactory.parse_string( """ a { include required("samples/animals.d/cat.conf") t = 2 } """ ) expected = { 'a': { 'garfield': { 'say': 'meow' }, 't': 2 } } assert expected == config config2 = ConfigFactory.parse_string( """ a { include required(file("samples/animals.d/cat.conf")) t = 2 } """ ) assert expected == config2 def test_include_missing_required_file(self): with pytest.raises(IOError): ConfigFactory.parse_string( """ a: [ include required("dummy.txt") 3 4 ] """ ) def test_resolve_package_path(self): path = ConfigParser.resolve_package_path("pyhocon:config_parser.py") assert os.path.exists(path) def test_resolve_package_path_format(self): with pytest.raises(ValueError): ConfigParser.resolve_package_path("pyhocon/config_parser.py") def test_resolve_package_path_missing(self): with pytest.raises(ImportError): ConfigParser.resolve_package_path("non_existent_module:foo.py") def test_include_package_file(self, monkeypatch): temp_dir = tempfile.mkdtemp() try: module_dir = os.path.join(temp_dir, 'my_module') module_conf = os.path.join(module_dir, 'my.conf') # create the module folder and necessary files (__init__ and config) os.mkdir(module_dir) open(os.path.join(module_dir, '__init__.py'), 'a').close() with open(module_conf, 'w') as fdin: fdin.write("{c: 3}") # add the temp dir to sys.path so that 'my_module' can be discovered monkeypatch.syspath_prepend(temp_dir) # load the config and include the other config file from 'my_module' config = ConfigFactory.parse_string( """ a: 1 b: 2 include package("my_module:my.conf") """ ) # check that the contents of both config files are available assert dict(config.as_plain_ordered_dict()) == {'a': 1, 'b': 2, 'c': 3} finally: shutil.rmtree(temp_dir, ignore_errors=True) def test_include_dict(self): expected_res = { 'a': 1, 'b': 2, 'c': 3, 'd': 4 } with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{a: 1, b: 2}') fdin.flush() config1 = ConfigFactory.parse_string( """ a: {{ include "{tmp_file}" c: 3 d: 4 }} """.format(tmp_file=fdin.name) ) assert config1['a'] == expected_res config2 = ConfigFactory.parse_string( """ a: {{ c: 3 d: 4 include "{tmp_file}" }} """.format(tmp_file=fdin.name) ) assert config2['a'] == expected_res config3 = ConfigFactory.parse_string( """ a: {{ c: 3 include "{tmp_file}" d: 4 }} """.format(tmp_file=fdin.name) ) assert config3['a'] == expected_res def test_include_substitution(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('y = ${x}') fdin.flush() config = ConfigFactory.parse_string( """ include "{tmp_file}" x = 42 """.format(tmp_file=fdin.name) ) assert config['x'] == 42 assert config['y'] == 42 def test_sci_real(self): """ Test scientific expression of number """ config = ConfigFactory.parse_string( """ short = 12.12321 long1 = 121.22E3423432 neg_long1 = 121.22E-1 long2 = 121.22e3423432 neg_long2 = 121.22e-3 """ ) # on python 3 long will be an int but on python 2 long with be a long assert config['short'] == 12.12321 assert config['long1'] == 121.22E3423432 assert config['neg_long1'] == 121.22E-1 assert config['long2'] == 121.22E3423432 assert config['neg_long2'] == 121.22E-3 def test_unicode_dict_key(self): input_string = u""" www.sample.com { us { name = "first domain" } } www.example-.com { us { name = "second domain" } } """ config = ConfigFactory.parse_string(input_string) assert config.get_string(u'www.sample.com.us.name') == 'first domain' assert config.get_string(u'www.example-.com.us.name') == 'second domain' with pytest.raises(ConfigWrongTypeException): config.put(u'www.example-', 'append_failure', append=True) with pytest.raises(ConfigMissingException): config.get_string(u'missing_unicode_key_') with pytest.raises(ConfigException): config.get_bool(u'www.example-.com.us.name') with pytest.raises(ConfigException): config.get_list(u'www.example-.com.us.name') with pytest.raises(ConfigException): config.get_config(u'www.example-.com.us.name') with pytest.raises(ConfigWrongTypeException): config.get_string(u'www.example-.com.us.name.missing') def test_with_comment_on_last_line(self): # Adress issue #102 config_tree = ConfigFactory.parse_string(""" foo: "1" bar: "2" # DO NOT CHANGE ANY OF THE ABOVE SETTINGS!""") assert config_tree == { 'foo': '1', 'bar': '2' } def test_triple_quotes_same_line(self): config_tree = ConfigFactory.parse_string('a:["""foo"""", "bar"]') assert config_tree == { 'a': ['foo"', "bar"] } def test_pop(self): config_tree = ConfigFactory.parse_string('a:{b: 3, d: 6}') assert 3 == config_tree.pop('a.b', 5) assert 5 == config_tree.pop('a.c', 5) expected = { 'a': {'d': 6} } assert expected == config_tree def test_merge_overriden(self): # Adress issue #110 # ConfigValues must merge with its .overriden_value # if both are ConfigTree config_tree = ConfigFactory.parse_string(""" foo: ${bar} foo: ${baz} bar: {r: 1, s: 2} baz: {s: 3, t: 4} """) assert 'r' in config_tree['foo'] and 't' in config_tree['foo'] and config_tree['foo']['s'] == 3 def test_attr_syntax(self): config = ConfigFactory.parse_string( """ a: 1 b: { pb: 5 } """) assert 5 == config.b.pb def test_escape_quote(self): config = ConfigFactory.parse_string( """ quoted: "abc\\"test" unquoted: abc\\"test """) assert 'abc"test' == config['quoted'] assert 'abc"test' == config['unquoted'] def test_escape_quote_complex(self): config = ConfigFactory.parse_string( """ value: "{\\"critical\\":\\"0.00\\",\\"warning\\":\\"99.99\\"}" """ ) assert '{"critical":"0.00","warning":"99.99"}' == config['value'] def test_keys_with_slash(self): config = ConfigFactory.parse_string( """ /abc/cde1: abc "/abc/cde2": "cde" /abc/cde3: "fgh" """) assert 'abc' == config['/abc/cde1'] assert 'cde' == config['/abc/cde2'] assert 'fgh' == config['/abc/cde3'] def test_mutation_values(self): config = ConfigFactory.parse_string( """ common : { } b1 = [] var = "wrong" compilerCommon : ${common} { VAR : ${var} } substrate-suite: { VAR : "right" } b1 = [ ${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite} ] b2 = [ ${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite} ] """) assert config.get("b1")[1]['VAR'] == 'right' assert config.get("b2")[1]['VAR'] == 'right' def test_escape_sequences_json_equivalence(self): """ Quoted strings are in the same format as JSON strings, See: https://github.com/lightbend/config/blob/master/HOCON.md#unchanged-from-json """ source = r""" { "plain-backslash": "\\", "tab": "\t", "no-tab": "\\t", "newline": "\n", "no-newline": "\\n", "cr": "\r", "no-cr": "\\r", "windows": "c:\\temp" } """ expected = { 'plain-backslash': '\\', 'tab': '\t', 'no-tab': '\\t', 'newline': '\n', 'no-newline': '\\n', 'cr': '\r', 'no-cr': '\\r', 'windows': 'c:\\temp', } config = ConfigFactory.parse_string(source) assert config == expected assert config == json.loads(source) try: from dateutil.relativedelta import relativedelta except Exception: pass
[ 2, 532, 9, 12, 21004, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 198, 11748, 33918, 198, 11748, 28686, 198, 11748, 4423, 346, 198, 11748, 20218, 7753, 198, 6738, 17268, 1330, 14230, 1068, 35, 713, 198, 6738, 4818, 8079, 1330, 28805, 125...
1.745873
24,169
#!/usr/bin/env python # Copyright (c) 2018-2020 Intel Corporation # # This work is licensed under the terms of the MIT license. # For a copy, see <https://opensource.org/licenses/MIT>. """ This module provides the ScenarioManager implementation. It must not be modified and is for reference only! """ from __future__ import print_function import sys import time import py_trees from srunner.autoagents.agent_wrapper import AgentWrapper from srunner.scenariomanager.carla_data_provider import CarlaDataProvider from srunner.scenariomanager.result_writer import ResultOutputProvider from srunner.scenariomanager.timer import GameTime from srunner.scenariomanager.watchdog import Watchdog
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 198, 2, 15069, 357, 66, 8, 2864, 12, 42334, 8180, 10501, 198, 2, 198, 2, 770, 670, 318, 11971, 739, 262, 2846, 286, 262, 17168, 5964, 13, 198, 2, 1114, 257, 4866, 11, 766, 1279, ...
3.548718
195
# # This source file is part of the EdgeDB open source project. # # Copyright 2008-present MagicStack Inc. and the EdgeDB authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import annotations from typing import * import hashlib from edb import errors from edb.common import struct from edb.edgeql import ast as qlast from . import delta as sd from . import inheriting from . import objects as so from . import schema as s_schema from . import name as sn from . import utils ReferencedT = TypeVar('ReferencedT', bound='ReferencedObject') ReferencedInheritingObjectT = TypeVar('ReferencedInheritingObjectT', bound='ReferencedInheritingObject') def _build_alter_cmd_stack( self, schema: s_schema.Schema, context: sd.CommandContext, scls: so.Object, *, referrer: Optional[so.Object] = None ) -> Tuple[sd.DeltaRoot, sd.Command]: delta = sd.DeltaRoot() if referrer is None: assert isinstance(scls, ReferencedObject) referrer = scls.get_referrer(schema) obj = referrer object_stack = [] if type(self) != type(referrer): object_stack.append(referrer) while obj is not None: if isinstance(obj, ReferencedObject): obj = obj.get_referrer(schema) object_stack.append(obj) else: obj = None cmd: sd.Command = delta for obj in reversed(object_stack): assert obj is not None alter_cmd_cls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, type(obj)) alter_cmd = alter_cmd_cls(classname=obj.get_name(schema)) cmd.add(alter_cmd) cmd = alter_cmd return delta, cmd class CreateReferencedObject( ReferencedObjectCommand[ReferencedT], sd.CreateObject[ReferencedT], ): referenced_astnode: ClassVar[Type[qlast.ObjectDDL]] def _create_innards( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: referrer_ctx = self.get_referrer_context(context) if referrer_ctx is None: return super()._create_innards(schema, context) else: referrer = referrer_ctx.scls schema = self._create_ref(schema, context, referrer) return super()._create_innards(schema, context) def _create_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: referrer_cls = type(referrer) mcls = type(self.scls) refdict = referrer_cls.get_refdict_for_class(mcls) schema = referrer.add_classref(schema, refdict.attr, self.scls) return schema class DeleteReferencedObjectCommand( ReferencedObjectCommand[ReferencedT], sd.DeleteObject[ReferencedT], ): class ReferencedInheritingObjectCommand( ReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.InheritingObjectCommand[ReferencedInheritingObjectT], ): class CreateReferencedInheritingObject( CreateReferencedObject[ReferencedInheritingObjectT], inheriting.CreateInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): class AlterReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.AlterInheritingObject[ReferencedInheritingObjectT], ): class RebaseReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.RebaseInheritingObject[ReferencedInheritingObjectT], ): implicit = struct.Field(bool, default=False) class RenameReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], sd.RenameObject, ): class DeleteReferencedInheritingObject( DeleteReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.DeleteInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ):
[ 2, 198, 2, 770, 2723, 2393, 318, 636, 286, 262, 13113, 11012, 1280, 2723, 1628, 13, 198, 2, 198, 2, 15069, 3648, 12, 25579, 6139, 25896, 3457, 13, 290, 262, 13113, 11012, 7035, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11...
2.525579
1,857
from calendar import month_name
[ 6738, 11845, 1330, 1227, 62, 3672 ]
5.166667
6
#!/usr/bin/env python # -*- coding: utf-8 -*- #__Author__ = #_PlugName_ = Shop7z /admin/lipinadd.asp import re if __name__ == '__main__': from dummy import * audit(assign('shop7z', 'http://www.99ysbjw.com/')[1])
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 201, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 201, 198, 2, 834, 13838, 834, 796, 220, 201, 198, 2, 62, 23257, 5376, 62, 796, 13705, 22, 89, 1220, 28482, 14, 4071...
2.042735
117
"""Support for the Philips Hue lights.""" from __future__ import annotations from datetime import timedelta from functools import partial import logging import random import aiohue import async_timeout from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, EFFECT_COLORLOOP, EFFECT_RANDOM, FLASH_LONG, FLASH_SHORT, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_TRANSITION, LightEntity, ) from homeassistant.core import callback from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.entity import DeviceInfo from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) from homeassistant.util import color from .const import ( DOMAIN as HUE_DOMAIN, GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_LIGHT_SOURCE, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_ROOM, REQUEST_REFRESH_DELAY, ) from .helpers import remove_devices SCAN_INTERVAL = timedelta(seconds=5) _LOGGER = logging.getLogger(__name__) SUPPORT_HUE_ON_OFF = SUPPORT_FLASH | SUPPORT_TRANSITION SUPPORT_HUE_DIMMABLE = SUPPORT_HUE_ON_OFF | SUPPORT_BRIGHTNESS SUPPORT_HUE_COLOR_TEMP = SUPPORT_HUE_DIMMABLE | SUPPORT_COLOR_TEMP SUPPORT_HUE_COLOR = SUPPORT_HUE_DIMMABLE | SUPPORT_EFFECT | SUPPORT_COLOR SUPPORT_HUE_EXTENDED = SUPPORT_HUE_COLOR_TEMP | SUPPORT_HUE_COLOR SUPPORT_HUE = { "Extended color light": SUPPORT_HUE_EXTENDED, "Color light": SUPPORT_HUE_COLOR, "Dimmable light": SUPPORT_HUE_DIMMABLE, "On/Off plug-in unit": SUPPORT_HUE_ON_OFF, "Color temperature light": SUPPORT_HUE_COLOR_TEMP, } ATTR_IS_HUE_GROUP = "is_hue_group" GAMUT_TYPE_UNAVAILABLE = "None" # Minimum Hue Bridge API version to support groups # 1.4.0 introduced extended group info # 1.12 introduced the state object for groups # 1.13 introduced "any_on" to group state objects GROUP_MIN_API_VERSION = (1, 13, 0) def create_light(item_class, coordinator, bridge, is_group, rooms, api, item_id): """Create the light.""" api_item = api[item_id] if is_group: supported_features = 0 for light_id in api_item.lights: if light_id not in bridge.api.lights: continue light = bridge.api.lights[light_id] supported_features |= SUPPORT_HUE.get(light.type, SUPPORT_HUE_EXTENDED) supported_features = supported_features or SUPPORT_HUE_EXTENDED else: supported_features = SUPPORT_HUE.get(api_item.type, SUPPORT_HUE_EXTENDED) return item_class( coordinator, bridge, is_group, api_item, supported_features, rooms ) def hue_brightness_to_hass(value): """Convert hue brightness 1..254 to hass format 0..255.""" return min(255, round((value / 254) * 255)) def hass_to_hue_brightness(value): """Convert hass brightness 0..255 to hue 1..254 scale.""" return max(1, round((value / 255) * 254)) class HueLight(CoordinatorEntity, LightEntity): """Representation of a Hue light.""" def __init__(self, coordinator, bridge, is_group, light, supported_features, rooms): """Initialize the light.""" super().__init__(coordinator) self.light = light self.bridge = bridge self.is_group = is_group self._supported_features = supported_features self._rooms = rooms if is_group: self.is_osram = False self.is_philips = False self.is_innr = False self.is_ewelink = False self.is_livarno = False self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut = None else: self.is_osram = light.manufacturername == "OSRAM" self.is_philips = light.manufacturername == "Philips" self.is_innr = light.manufacturername == "innr" self.is_ewelink = light.manufacturername == "eWeLink" self.is_livarno = light.manufacturername.startswith("_TZ3000_") self.gamut_typ = self.light.colorgamuttype self.gamut = self.light.colorgamut _LOGGER.debug("Color gamut of %s: %s", self.name, str(self.gamut)) if self.light.swupdatestate == "readytoinstall": err = ( "Please check for software updates of the %s " "bulb in the Philips Hue App." ) _LOGGER.warning(err, self.name) if self.gamut and not color.check_valid_gamut(self.gamut): err = "Color gamut of %s: %s, not valid, setting gamut to None." _LOGGER.debug(err, self.name, str(self.gamut)) self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut = None async def async_added_to_hass(self) -> None: """Handle entity being added to Home Assistant.""" self.async_on_remove( self.bridge.listen_updates( self.light.ITEM_TYPE, self.light.id, self.async_write_ha_state ) ) await super().async_added_to_hass()
[ 37811, 15514, 329, 262, 46905, 31788, 7588, 526, 15931, 198, 6738, 11593, 37443, 834, 1330, 37647, 198, 198, 6738, 4818, 8079, 1330, 28805, 12514, 198, 6738, 1257, 310, 10141, 1330, 13027, 198, 11748, 18931, 198, 11748, 4738, 198, 198, 11...
2.327563
2,253
# Copyright (c) 2018-2021 Manfred Moitzi # License: MIT License # source: http://www.lee-mac.com/bulgeconversion.html # source: http://www.afralisp.net/archive/lisp/Bulges1.htm from typing import Any, TYPE_CHECKING, Tuple import math from ezdxf.math import Vec2 if TYPE_CHECKING: from ezdxf.eztypes import Vertex __all__ = [ "bulge_to_arc", "bulge_3_points", "bulge_center", "bulge_radius", "arc_to_bulge" ] def polar(p: Any, angle: float, distance: float) -> Vec2: """ Returns the point at a specified `angle` and `distance` from point `p`. Args: p: point as :class:`Vec2` compatible object angle: angle in radians distance: distance """ return Vec2(p) + Vec2.from_angle(angle, distance) def angle(p1: Any, p2: Any) -> float: """ Returns angle a line defined by two endpoints and x-axis in radians. Args: p1: start point as :class:`Vec2` compatible object p2: end point as :class:`Vec2` compatible object """ return (Vec2(p2) - Vec2(p1)).angle def arc_to_bulge(center: 'Vertex', start_angle: float, end_angle: float, radius: float) -> Tuple['Vec2', 'Vec2', float]: """ Returns bulge parameters from arc parameters. Args: center: circle center point as :class:`Vec2` compatible object start_angle: start angle in radians end_angle: end angle in radians radius: circle radius Returns: tuple: (start_point, end_point, bulge) """ start_point = polar(center, start_angle, radius) end_point = polar(center, end_angle, radius) pi2 = math.pi * 2 a = math.fmod((pi2 + (end_angle - start_angle)), pi2) / 4. bulge = math.sin(a) / math.cos(a) return start_point, end_point, bulge def bulge_3_points(start_point: 'Vertex', end_point: 'Vertex', point: 'Vertex') -> float: """ Returns bulge value defined by three points. Based on 3-Points to Bulge by `Lee Mac`_. Args: start_point: start point as :class:`Vec2` compatible object end_point: end point as :class:`Vec2` compatible object point: arbitrary point as :class:`Vec2` compatible object """ a = (math.pi - angle(point, start_point) + angle(point, end_point)) / 2 return math.sin(a) / math.cos(a) def bulge_to_arc(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> Tuple['Vec2', float, float, float]: """ Returns arc parameters from bulge parameters. The arcs defined by bulge values of :class:`~ezdxf.entities.LWPolyline` and 2D :class:`~ezdxf.entities.Polyline` entities start at the vertex which includes the bulge value and ends at the following vertex. Based on Bulge to Arc by `Lee Mac`_. Args: start_point: start vertex as :class:`Vec2` compatible object end_point: end vertex as :class:`Vec2` compatible object bulge: bulge value Returns: Tuple: (center, start_angle, end_angle, radius) """ r = signed_bulge_radius(start_point, end_point, bulge) a = angle(start_point, end_point) + (math.pi / 2 - math.atan(bulge) * 2) c = polar(start_point, a, r) if bulge < 0: return c, angle(c, end_point), angle(c, start_point), abs(r) else: return c, angle(c, start_point), angle(c, end_point), abs(r) def bulge_center(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> 'Vec2': """ Returns center of arc described by the given bulge parameters. Based on Bulge Center by `Lee Mac`_. Args: start_point: start point as :class:`Vec2` compatible object end_point: end point as :class:`Vec2` compatible object bulge: bulge value as float """ start_point = Vec2(start_point) a = angle(start_point, end_point) + (math.pi / 2. - math.atan(bulge) * 2.) return start_point + Vec2.from_angle(a, signed_bulge_radius(start_point, end_point, bulge)) def bulge_radius(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> float: """ Returns radius of arc defined by the given bulge parameters. Based on Bulge Radius by `Lee Mac`_ Args: start_point: start point as :class:`Vec2` compatible object end_point: end point as :class:`Vec2` compatible object bulge: bulge value """ return abs(signed_bulge_radius(start_point, end_point, bulge))
[ 2, 15069, 357, 66, 8, 2864, 12, 1238, 2481, 1869, 39193, 4270, 4224, 72, 198, 2, 13789, 25, 17168, 13789, 198, 2, 2723, 25, 2638, 1378, 2503, 13, 7197, 12, 20285, 13, 785, 14, 15065, 469, 1102, 9641, 13, 6494, 198, 2, 2723, 25, ...
2.361054
1,936
# To change this license header, choose License Headers in Project Properties. # To change this template file, choose Tools | Templates # and open the template in the editor. #if __name__ == "__main__": # print "Hello World" from ProgrammingEmail import ManageAttachments import jpype import os.path asposeapispath = os.path.join(os.path.abspath("./../../../"), "lib/") dataDir = os.path.join(os.path.abspath("./"), "data/") print "You need to put your Aspose.Email for Java APIs .jars in this folder:\n"+asposeapispath #print dataDir jpype.startJVM(jpype.getDefaultJVMPath(), "-Djava.ext.dirs=%s" % asposeapispath) hw = ManageAttachments(dataDir) hw.main()
[ 2, 1675, 1487, 428, 5964, 13639, 11, 3853, 13789, 7123, 364, 287, 4935, 24946, 13, 198, 2, 1675, 1487, 428, 11055, 2393, 11, 3853, 20003, 930, 5825, 17041, 198, 2, 290, 1280, 262, 11055, 287, 262, 5464, 13, 198, 198, 2, 361, 11593, ...
2.921053
228
from prompt_toolkit.key_binding.bindings.named_commands import (accept_line, self_insert, backward_delete_char, beginning_of_line) from prompt_toolkit.key_binding.bindings.basic import if_no_repeat from prompt_toolkit.key_binding.bindings.basic import load_basic_bindings from prompt_toolkit.key_binding.bindings.emacs import load_emacs_bindings, load_emacs_search_bindings from prompt_toolkit.key_binding.bindings.mouse import load_mouse_bindings from prompt_toolkit.key_binding.bindings.cpr import load_cpr_bindings from prompt_toolkit.key_binding.bindings.page_navigation import load_emacs_page_navigation_bindings from prompt_toolkit.key_binding import KeyBindings, merge_key_bindings from prompt_toolkit.keys import Keys, ALL_KEYS from prompt_toolkit.filters import Condition, HasSelection, is_searching from prompt_toolkit.selection import SelectionState from prompt_toolkit.clipboard import ClipboardData from prompt_toolkit.input.vt100_parser import ANSI_SEQUENCES from prompt_toolkit.application.current import get_app from prompt_toolkit.application import run_in_terminal from prompt_toolkit import __version__ as prompt_toolkit_version from .multiline import (auto_newline, tab_should_insert_whitespace, document_is_multiline_python) from .tokenize import inside_string, matching_parens from .theme import emoji, emoji_pudb from .processors import get_pyflakes_warnings import re import subprocess import sys import textwrap import platform r = custom_key_bindings = KeyBindings() # This can be removed once # https://github.com/prompt-toolkit/python-prompt-toolkit/pull/857 is in a # released version of prompt-toolkit. ANSI_SEQUENCES['\x1b[1;9A'] = (Keys.Escape, Keys.Up) ANSI_SEQUENCES['\x1b[1;9B'] = (Keys.Escape, Keys.Down) # Document.start_of_paragraph/end_of_paragraph don't treat multiple blank # lines correctly. # Gives the positions right before one or more blank lines BLANK_LINES = re.compile(r'\S *(\n *\n)') WORD = re.compile(r'([a-z0-9]+|[A-Z]{2,}|[a-zA-Z0-9][a-z0-9]*)') def insert_text_ovewrite(buffer, data, move_cursor=True): """ Insert characters at cursor position. :param fire_event: Fire `on_text_insert` event. This is mainly used to trigger autocompletion while typing. """ # Original text & cursor position. otext = buffer.text ocpos = buffer.cursor_position # Don't overwrite the newline itself. Just before the line ending, # it should act like insert mode. overwritten_text = otext[ocpos:ocpos + len(data)] buffer.text = otext[:ocpos] + data + otext[ocpos + len(overwritten_text):] if move_cursor: buffer.cursor_position += len(data) is_returnable = Condition( lambda: get_app().current_buffer.is_returnable) # Always accept the line if the previous key was Up # Requires https://github.com/jonathanslenders/python-prompt-toolkit/pull/492. # We don't need a parallel for down because down is already at the end of the # prompt. # M-[ a g is set to S-Enter in iTerm2 settings Keys.ShiftEnter = "<Shift-Enter>" ALL_KEYS.append('<Shift-Enter>') ANSI_SEQUENCES['\x1b[ag'] = Keys.ShiftEnter ANSI_SEQUENCES['\x1bOM'] = Keys.ShiftEnter if prompt_toolkit_version[0] != '3': r.add_binding(Keys.ShiftEnter)(accept_line) LEADING_WHITESPACE = re.compile(r'( *)[^ ]?') def do_cycle_spacing(text, cursor_position, state=[]): rstripped = text[:cursor_position].rstrip() lstripped = text[cursor_position:].lstrip() text_before_cursor = text[:cursor_position] # The first element of state is the original text. The last element is the # buffer text and cursor position as we last left them. If either of those # have changed, reset. The state here is global, but that's fine, because # we consider any change to be enough clear the state. The worst that # happens here is that we resume when we shouldn't if things look exactly # as they did where we left off. # TODO: Use event.previous_key_sequence instead. if state and state[-1] != (text, cursor_position): state.clear() if len(state) == 0: # Replace all whitespace at the cursor (if any) with a single space. state.append((text, cursor_position)) cursor_position -= len(text_before_cursor) - len(rstripped) -1 text = rstripped + ' ' + lstripped state.append((text, cursor_position)) elif len(state) == 2: # Exactly one space at the cursor. Remove it. cursor_position -= 1 text = rstripped + lstripped state.append((text, cursor_position)) elif len(state) == 3: # Restore original text and cursor position text, cursor_position = state[0] state.clear() if cursor_position < 0: cursor_position = 0 if cursor_position > len(text): cursor_position = len(text) return text, cursor_position # Selection stuff # The default doesn't toggle correctly def system_copy(text): if "Linux" in platform.platform(): copy_command = ['xclip', '-selection', 'c'] else: copy_command = ['pbcopy'] try: # In Python 3.6 we can do this: # run(copy_command, input=text, encoding='utf-8', check=True) subprocess.run(copy_command, input=text.encode('utf-8'), check=True) except FileNotFoundError: print("Error: could not find", copy_command[0], file=sys.stderr) except subprocess.CalledProcessError as e: print(copy_command[0], "error:", e, file=sys.stderr) def system_paste(): if "Linux" in platform.platform(): paste_command = ['xsel', '-b'] else: paste_command = ['pbpaste'] try: # In Python 3.6 we can do this: # run(paste_command, input=text, encoding='utf-8') p = subprocess.run(paste_command, stdout=subprocess.PIPE, check=True) except FileNotFoundError: print("Error: could not find", paste_command[0], file=sys.stderr) except subprocess.CalledProcessError as e: print(paste_command[0], "error:", e, file=sys.stderr) return p.stdout.decode('utf-8') # M-[ a b is set to C-S-/ (C-?) in iTerm2 settings Keys.ControlQuestionmark = "<C-?>" ALL_KEYS.append("<C-?>") ANSI_SEQUENCES['\x1b[ab'] = Keys.ControlQuestionmark Keys.ControlSlash = "<C-/>" ALL_KEYS.append("<C-/>") ANSI_SEQUENCES['\x1b"5/'] = Keys.ControlSlash # This won't work until # https://github.com/jonathanslenders/python-prompt-toolkit/pull/484 is # merged. if prompt_toolkit_version[0] != '3': # Need to escape all spaces here because of verbose (x) option below ps1_prompts = [r'>>>\ '] + [re.escape(i) + r'\[\d+\]:\ ' for i, j in emoji + [emoji_pudb]] + [r'In\ \[\d+\]:\ '] ps2_prompts = [r'\ *\.\.\.:\ ?', r'\.\.\.\ ?', '\N{CLAPPING HANDS SIGN}+\\ ?\\ ?'] PS1_PROMPTS_RE = re.compile('|'.join(ps1_prompts)) PS2_PROMPTS_RE = re.compile('|'.join(ps2_prompts)) PROMPTED_TEXT_RE = re.compile(r'''(?x) # Multiline and verbose (?P<prompt> (?P<ps1prompt>{PS1_PROMPTS_RE.pattern}) # Match prompts at the front | (?P<ps2prompt>{PS2_PROMPTS_RE.pattern}))? # of the line. (?P<noprompt>(?(prompt)\r|))? # If the prompt is not # matched, this is a special # marker group that will match # the empty string. # Otherwise it will not # match (because all \r's # have been stripped from # the string). (?P<line>.*)\n # The actual line. '''.format(PS1_PROMPTS_RE=PS1_PROMPTS_RE, PS2_PROMPTS_RE=PS2_PROMPTS_RE)) def prompt_repl(match): r""" repl function for re.sub for clearing prompts Replaces PS1 prompts with \r and removes PS2 prompts. """ # TODO: Remove the lines with no prompt if match.group('ps1prompt') is not None: return '\r' + match.group('line') + '\n' elif match.group('ps2prompt') is not None: return match.group('line') + '\n' return '' def split_prompts(text, indent=''): r""" Takes text copied from mypython, Python, or IPython session and returns a list of inputs Outputs are stripped. If no prompts are found the text is left alone. The resulting text is indented by indent, except for the first line. It is assumed that the text contains no carriage returns (\r). Trailing whitespace and newlines is stripped from the outputs. Example: >>> split_prompts(''' ... In [1]: a = 1 ... ... In [2]: a ... Out[2]: 1 ... ... In [3]: def test(): ... ...: pass ... ...: ... ''') ['a = 1', 'a', 'def test():\n pass'] """ from .mypython import validate_text text = textwrap.dedent(text).strip() + '\n' text = textwrap.dedent(PROMPTED_TEXT_RE.sub(prompt_repl, text)).lstrip() lines = text.split('\r') # Make sure multilines end in two newlines for i, line in enumerate(lines): try: validate_text(line) except SyntaxError: # If there is a syntax error, we can't use the CMD_QUEUE (it # breaks things). lines = ['\n'.join(lines)] break if '\n' in line.rstrip(): lines[i] += '\n' lines[0] = textwrap.indent(lines[0], indent, # Don't indent the first line, it's already indented lambda line, _x=[]: bool(_x or _x.append(1))) for i in range(1, len(lines)): lines[i] = textwrap.indent(lines[i], indent) # Extraneous newlines at the end will be stripped by the prompt anyway. # This just makes this function easier to test. lines = [i.rstrip() for i in lines] return lines
[ 6738, 6152, 62, 25981, 15813, 13, 2539, 62, 30786, 13, 21653, 654, 13, 13190, 62, 9503, 1746, 1330, 357, 13635, 62, 1370, 11, 198, 220, 220, 220, 2116, 62, 28463, 11, 19528, 62, 33678, 62, 10641, 11, 3726, 62, 1659, 62, 1370, 8, 1...
2.439379
4,058
from biogeme import * from headers import * from loglikelihood import * from statistics import * from nested import * #import random cons_work= Beta('cons for work', 0,-10,10,0) cons_edu = Beta('cons for education',0,-50,10,0) cons_shopping = Beta('cons for shopping',0,-10,10,0) cons_other = Beta('cons for other',0,-10,10,0) cons_Q = Beta('cons for quit',0,-10,10,1) first_stop_inbound= Beta('dummy for first stop of inbound half tour', 0,-10,10,1) second_stop_inbound= Beta('dummy for second stop of inbound half tour',0,-10,10,0) threeplus_stop_inbound=Beta('dummy for 3+ stop of inbound half tour',0,-10,10,0) first_stop_outbound= Beta('dummy for first stop of outbound half tour', 0,-10,10,0) second_stop_outbound= Beta('dummy for second stop of outbound half tour',0,-10,10,0) threeplus_stop_outbound=Beta('dummy for 3+ stop of outbound half tour',0,-10,10,0) work_tour_dummy_Q=Beta('work tour dummy in quit',0,-10,10,1) edu_tour_dummy_Q=Beta('edu tour dummy in quit',0,-10,10,1) shopping_tour_dummy_Q=Beta('shopping tour dummy in quit',0,-10,10,1) other_tour_dummy_Q=Beta('other tour dummy in quit',0,-10,10,1) first_tour_dummy_Q=Beta('first tour dummy in quit',0,-10,10,0) sub_tour_dummy_Q=Beta('has subtour dummy in quit',0,-10,10,0) zero_tour_remain_Q=Beta('zero tour remain dummy',0,-10,10,1) one_tour_remain_Q=Beta('one tour remain dummy',0,-10,10,0) twoplus_tour_remain_Q=Beta('2+ tour remain dummy',0,-10,10,1) work_tour_dummy_W=Beta('work tour dummy in work',0,-10,10,1) edu_tour_dummy_W=Beta('edu tour dummy in work',0,-10,10,1) shopping_tour_dummy_W=Beta('shopping tour dummy in work',0,-10,10,1) other_tour_dummy_W=Beta('other tour dummy in work',0,-10,10,1) female_dummy_W=Beta('female dummy in work',0,-10,10,0) student_dummy_W=Beta('student dummy in work',0,-10,10,1) worker_dummy_W=Beta('worker dummy in work',0,-10,10,1) driver_dummy_W=Beta('driver dummy in work',0,-10,10,0) passenger_dummy_W=Beta('passenger dummy in work',0,-10,10,0) public_dummy_W=Beta('PT dummy in work',0,-10,10,0) work_tour_dummy_E=Beta('work tour dummy in edu',0,-10,10,1) edu_tour_dummy_E=Beta('edu tour dummy in edu',0,-10,10,1) shopping_tour_dummy_E=Beta('shopping tour dummy in edu',0,-10,10,1) other_tour_dummy_E=Beta('other tour dummy in edu',0,-10,10,1) female_dummy_E=Beta('female dummy in edu',0,-10,10,0) student_dummy_E=Beta('student dummy in edu',0,-10,10,1) worker_dummy_E=Beta('worker dummy in edu',0,-10,10,1) driver_dummy_E=Beta('driver dummy in edu',0,-10,10,0) passenger_dummy_E=Beta('passenger dummy in edu',0,-10,10,0) public_dummy_E=Beta('PT dummy in edu',0,-10,10,0) work_tour_dummy_S=Beta('work tour dummy in shopping',0,-10,10,1) edu_tour_dummy_S=Beta('edu tour dummy in shopping',0,-10,10,1) shopping_tour_dummy_S=Beta('shopping tour dummy in shopping',0,-10,10,1) other_tour_dummy_S=Beta('other tour dummy in shopping',0,-10,10,0) female_dummy_S=Beta('female dummy in shopping',0,-10,10,0) student_dummy_S=Beta('student dummy in shopping',0,-10,10,1) worker_dummy_S=Beta('worker dummy in shopping',0,-10,10,0) driver_dummy_S=Beta('driver dummy in shopping',0,-10,10,0) passenger_dummy_S=Beta('passenger dummy in shopping',0,-10,10,0) public_dummy_S=Beta('PT dummy in shopping',0,-10,10,0) work_tour_dummy_O=Beta('work tour dummy in other',0,-10,10,0) edu_tour_dummy_O=Beta('edu tour dummy in other',0,-10,10,0) shopping_tour_dummy_O=Beta('shopping tour dummy in other',0,-10,10,0) other_tour_dummy_O=Beta('other tour dummy in other',0,-10,10,1) female_dummy_O=Beta('female dummy in other',0,-10,10,0) student_dummy_O=Beta('student dummy in other',0,-10,10,0) worker_dummy_O=Beta('worker dummy in other',0,-10,10,0) driver_dummy_O=Beta('driver dummy in other',0,-10,10,0) passenger_dummy_O=Beta('passenger dummy in other',0,-10,10,0) public_dummy_O=Beta('PT dummy in other',0,-10,10,0) work_logsum=Beta('work logsum in work',0,-10,10,1) edu_logsum=Beta('edu logsum in edu',0,-10,10,1) shop_logsum=Beta('shop logsum in shop',0,-10,10,1) other_logsum=Beta('other logsum in other',0,-10,10,1) time_window_work=Beta('time available in work',0,-10,10,1) time_window_edu= Beta('time available in edu',0,-10,10,1) time_window_shopping= Beta('time available in shopping',0,-10,10,1) time_window_other= Beta('time available in other',0,-10,10,1) tour_distance_work= Beta('log tour distance in work',0,-10,10,0) tour_distance_edu= Beta('log tour distance in edu',0,-10,10,0) tour_distance_shopping= Beta('log tour distance in shopping',0,-10,10,0) tour_distance_other=Beta('log tour distance in other',0,-10,10,0) a700_a930_work= Beta('period 7am to 9:30am in work',0,-10,10,0) a930_a1200_work=Beta('period 9:30am to 12pm in work',0,-10,10,0) p300_p530_work=Beta('period 3pm to 5:30pm in work',0,-10,10,0) p530_p730_work=Beta('period 5:30pm to 7:30 pm in work',0,-10,10,0) p730_p1000_work=Beta('period 7:30pm to 10pm in work',0,-10,10,0) p1000_a700_work=Beta('period 10pm to 7am in work',0,-10,10,0) a700_a930_edu= Beta('period 7am to 9:30am in edu',0,-10,10,0) a930_a1200_edu=Beta('period 9:30am to 12pm in edu',0,-10,10,0) p300_p530_edu=Beta('period 3pm to 5:30pm in edu',0,-10,10,0) p530_p730_edu=Beta('period 5:30pm to 7:30 pm in edu',0,-10,10,0) p730_p1000_edu=Beta('period 7:30pm to 10pm in edu',0,-10,10,0) p1000_a700_edu=Beta('period 10pm to 7am in edu',0,-10,10,0) a700_a930_shopping= Beta('period 7am to 9:30am in shopping',0,-10,10,0) a930_a1200_shopping=Beta('period 9:30am to 12pm in shopping',0,-10,10,0) p300_p530_shopping=Beta('period 3pm to 5:30pm in shopping',0,-10,10,0) p530_p730_shopping=Beta('period 5:30pm to 7:30 pm in shopping',0,-10,10,0) p730_p1000_shopping=Beta('period 7:30pm to 10pm in shopping',0,-10,10,0) p1000_a700_shopping=Beta('period 10pm to 7am in shopping',0,-10,10,0) a700_a930_other= Beta('period 7am to 9:30am in other',0,-10,10,0) a930_a1200_other=Beta('period 9:30am to 12pm in other',0,-10,10,0) p300_p530_other=Beta('period 3pm to 5:30pm in other',0,-10,10,0) p530_p730_other=Beta('period 5:30pm to 7:30 pm in other',0,-10,10,0) p730_p1000_other=Beta('period 7:30pm to 10pm in other',0,-10,10,0) p1000_a700_other=Beta('period 10pm to 7am in other',0,-10,10,0) MU1 = Beta('MU for quit',1,0,100,1) MU2 = Beta('MU for non-quit', 1.0,0,100,1) #V for work V_work= cons_work+\ work_tour_dummy_W*1*(tour_type==1)+\ edu_tour_dummy_W*1*(tour_type==2)+\ shopping_tour_dummy_W*1*(tour_type==3)+\ other_tour_dummy_W*1*(tour_type==4)+\ female_dummy_W*female_dummy+\ student_dummy_W*student_dummy+\ worker_dummy_W*worker_dummy+\ driver_dummy_W*driver_dummy+\ passenger_dummy_W*passenger_dummy+\ public_dummy_W*public_dummy+\ work_logsum * worklogsum+\ time_window_work*time_window_h+\ tour_distance_work*log(1+distance)+\ a700_a930_work*p_700a_930a+\ a930_a1200_work*p_930a_1200a+\ p300_p530_work*p_300p_530p+\ p530_p730_work*p_530p_730p+\ p730_p1000_work*p_730p_1000p+\ p1000_a700_work*p_1000p_700a #V for education V_edu = cons_edu+\ work_tour_dummy_E*1*(tour_type==1)+\ edu_tour_dummy_E*1*(tour_type==2)+\ shopping_tour_dummy_E*1*(tour_type==3)+\ other_tour_dummy_E*1*(tour_type==4)+\ female_dummy_E*female_dummy+\ student_dummy_E*student_dummy+\ worker_dummy_E*worker_dummy+\ driver_dummy_E*driver_dummy+\ passenger_dummy_E*passenger_dummy+\ public_dummy_E*public_dummy+\ edu_logsum * edulogsum+\ time_window_edu*time_window_h+\ tour_distance_edu*log(1+distance)+\ a700_a930_edu*p_700a_930a+\ a930_a1200_edu*p_930a_1200a+\ p300_p530_edu*p_300p_530p+\ p530_p730_edu*p_530p_730p+\ p730_p1000_edu*p_730p_1000p+\ p1000_a700_edu*p_1000p_700a #V for shopping V_shopping = cons_shopping+\ work_tour_dummy_S*1*(tour_type==1)+\ edu_tour_dummy_S*1*(tour_type==2)+\ shopping_tour_dummy_S*1*(tour_type==3)+\ other_tour_dummy_S*1*(tour_type==4)+\ female_dummy_S*female_dummy+\ student_dummy_S*student_dummy+\ worker_dummy_S*worker_dummy+\ driver_dummy_S*driver_dummy+\ passenger_dummy_S*passenger_dummy+\ public_dummy_S*public_dummy+\ shop_logsum * shoplogsum+\ time_window_shopping*time_window_h+\ tour_distance_shopping*log(1+distance)+\ a700_a930_shopping*p_700a_930a+\ a930_a1200_shopping*p_930a_1200a+\ p300_p530_shopping*p_300p_530p+\ p530_p730_shopping*p_530p_730p+\ p730_p1000_shopping*p_730p_1000p+\ p1000_a700_shopping*p_1000p_700a #V for other V_other=cons_other+\ work_tour_dummy_O*1*(tour_type==1)+\ edu_tour_dummy_O*1*(tour_type==2)+\ shopping_tour_dummy_O*1*(tour_type==3)+\ other_tour_dummy_O*1*(tour_type==4)+\ female_dummy_O*female_dummy+\ student_dummy_O*student_dummy+\ worker_dummy_O*worker_dummy+\ driver_dummy_O*driver_dummy+\ passenger_dummy_O*passenger_dummy+\ public_dummy_O*public_dummy+\ other_logsum * otherlogsum+\ time_window_other*time_window_h+\ tour_distance_other*log(1+distance)+\ a700_a930_other*p_700a_930a+\ a930_a1200_other*p_930a_1200a+\ p300_p530_other*p_300p_530p+\ p530_p730_other*p_530p_730p+\ p730_p1000_other*p_730p_1000p+\ p1000_a700_other*p_1000p_700a #V for quit V_quit= cons_Q+first_stop_inbound*first_stop*first_bound+\ second_stop_inbound*second_stop*first_bound+\ threeplus_stop_inbound*three_plus_stop*first_bound+\ first_stop_outbound*first_stop*second_bound+\ second_stop_outbound*second_stop*second_bound+\ threeplus_stop_outbound*three_plus_stop*second_bound+\ work_tour_dummy_Q*1*(tour_type==1)+\ edu_tour_dummy_Q*1*(tour_type==2)+\ shopping_tour_dummy_Q*1*(tour_type==3)+\ other_tour_dummy_Q*1*(tour_type==4)+\ first_tour_dummy_Q*first_tour_dummy+\ sub_tour_dummy_Q*has_subtour+zero_tour_remain_Q*1*(tour_remain==0)+\ one_tour_remain_Q*1*(tour_remain==1)+twoplus_tour_remain_Q*1*(tour_remain>=2) V = {0:V_quit,1: V_work,2:V_edu,3:V_shopping,4:V_other} av= {0:avail_quit,1:avail_workstop,2:avail_edustop,3:avail_shopstop,4:avail_otherstop} nest_quit = MU1 , [0] nest_nonquit = MU2 , [1,2,3,4] nests=nest_quit,nest_nonquit prob = nested(V,av,nests,stop_type) #prob = bioLogit(V,av,stop_type) rowIterator('obsIter') BIOGEME_OBJECT.ESTIMATE = Sum(log(prob),'obsIter') exclude = ((avail_violation==1)+(origin_mtz==0)+(destination_mtz==0)+(time_window_h>=10)) > 0 BIOGEME_OBJECT.EXCLUDE = exclude nullLoglikelihood(av,'obsIter') choiceSet = [0,1,2,3,4] cteLoglikelihood(choiceSet,stop_type,'obsIter') availabilityStatistics(av,'obsIter') BIOGEME_OBJECT.PARAMETERS['optimizationAlgorithm'] = "CFSQP" BIOGEME_OBJECT.PARAMETERS['checkDerivatives'] = "1" BIOGEME_OBJECT.PARAMETERS['numberOfThreads'] = "6"
[ 6738, 3182, 519, 34755, 1330, 1635, 201, 198, 201, 198, 6738, 24697, 1330, 1635, 201, 198, 201, 198, 6738, 2604, 2339, 11935, 1330, 1635, 201, 198, 201, 198, 6738, 7869, 1330, 1635, 201, 198, 201, 198, 6738, 28376, 1330, 1635, 201, 19...
2.071198
5,267
# from config import Config from flask import Flask from flask_sqlalchemy import SQLAlchemy # db = SQLAlchemy() # app
[ 2, 220, 198, 6738, 4566, 1330, 17056, 198, 6738, 42903, 1330, 46947, 198, 6738, 42903, 62, 25410, 282, 26599, 1330, 16363, 2348, 26599, 198, 198, 2, 220, 198, 9945, 796, 16363, 2348, 26599, 3419, 628, 198, 2, 598, 198 ]
3.153846
39
import json import web3
[ 11748, 33918, 198, 11748, 3992, 18, 628, 198 ]
3.25
8
"""STOCHASTIC ROSS plotting module. This module returns graphs for each type of analyses in st_rotor_assembly.py. """ import numpy as np from plotly import express as px from plotly import graph_objects as go from plotly import io as pio from plotly.subplots import make_subplots from ross.plotly_theme import tableau_colors pio.renderers.default = "browser" # set Plotly palette of colors colors1 = px.colors.qualitative.Dark24 colors2 = px.colors.qualitative.Light24
[ 37811, 2257, 46, 3398, 11262, 2149, 371, 18420, 29353, 8265, 13, 198, 198, 1212, 8265, 5860, 28770, 329, 1123, 2099, 286, 13523, 287, 336, 62, 10599, 273, 62, 41873, 13, 9078, 13, 198, 37811, 198, 11748, 299, 32152, 355, 45941, 198, 6...
3.077419
155
import os import itertools import importlib import numpy as np import random STRATEGY_FOLDER = "exampleStrats" RESULTS_FILE = "results.txt" pointsArray = [[1,5],[0,3]] # The i-j-th element of this array is how many points you receive if you do play i, and your opponent does play j. moveLabels = ["D","C"] # D = defect, betray, sabotage, free-ride, etc. # C = cooperate, stay silent, comply, upload files, etc. # Returns a 2-by-n numpy array. The first axis is which player (0 = us, 1 = opponent) # The second axis is which turn. (0 = first turn, 1 = next turn, etc. # For example, it might return # # [[0 0 1] a.k.a. D D C # [1 1 1]] a.k.a. C C C # # if there have been 3 turns, and we have defected twice then cooperated once, # and our opponent has cooperated all three times. runFullPairingTournament(STRATEGY_FOLDER, RESULTS_FILE)
[ 11748, 28686, 201, 198, 11748, 340, 861, 10141, 201, 198, 11748, 1330, 8019, 201, 198, 11748, 299, 32152, 355, 45941, 201, 198, 11748, 4738, 201, 198, 201, 198, 18601, 6158, 31212, 62, 37, 3535, 14418, 796, 366, 20688, 13290, 1381, 1, ...
2.558659
358
# Copyright (c) 2014, Stanford University # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. # 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ''' Created on Sep 24, 2013 @author: paepcke Modifications: - Dec 30, 2013: Added closing of connection to close() method ''' import re import subprocess import tempfile import pymysql #import MySQLdb
[ 2, 15069, 357, 66, 8, 1946, 11, 13863, 2059, 198, 2, 1439, 2489, 10395, 13, 198, 2, 198, 2, 2297, 396, 3890, 290, 779, 287, 2723, 290, 13934, 5107, 11, 351, 393, 1231, 17613, 11, 389, 10431, 2810, 326, 262, 1708, 3403, 389, 1138, ...
3.811111
450
#!./venv/bin/python from lib.mbp32 import XKey from lib.utils import one_line_from_stdin xkey = XKey.from_xkey(one_line_from_stdin()) print(xkey) print("Version:", xkey.version) print("Depth:", xkey.depth) print("Parent FP:", xkey.parent_fp.hex()) print("Child number:", xkey.child_number_with_tick()) print("Chain code:", xkey.chain_code.hex()) print("Key:", xkey.key) if xkey.key.get_private_bytes(): print("Private bytes:", xkey.key.get_private_bytes().hex()) print("Public bytes:", xkey.key.get_public_bytes().hex()) print("Key ID:", xkey.keyid().hex()) print("XKey:", xkey.to_xkey().decode('ascii'))
[ 2, 0, 19571, 574, 85, 14, 8800, 14, 29412, 198, 198, 6738, 9195, 13, 2022, 79, 2624, 1330, 1395, 9218, 198, 6738, 9195, 13, 26791, 1330, 530, 62, 1370, 62, 6738, 62, 19282, 259, 198, 198, 87, 2539, 796, 1395, 9218, 13, 6738, 62, ...
2.545833
240
from spherical_distortion.util import * sample_order = 9 # Input resolution to examine ang_fov(sample_order)
[ 6738, 43180, 62, 17080, 5817, 13, 22602, 1330, 1635, 198, 198, 39873, 62, 2875, 796, 860, 1303, 23412, 6323, 284, 10716, 198, 198, 648, 62, 69, 709, 7, 39873, 62, 2875, 8 ]
3.4375
32
from polymath import UNSET_SHAPE, DEFAULT_SHAPES import builtins import operator from collections import OrderedDict, Mapping, Sequence, deque import functools from numbers import Integral, Rational, Real import contextlib import traceback import uuid import numpy as np import importlib from .graph import Graph from .domain import Domain from .util import _noop_callback, _flatten_iterable, node_hash, \ _is_node_type_instance, is_iterable def instantiate_node(self, node): # pylint:disable=W0621 """ Instantiate nodes by retrieving the node object associated with the node name. Parameters ---------- node : Node or str Node instance or name of an node. Returns ------- instantiated_node : Node Node instance. Raises ------ ValueError If `node` is not an `Node` instance or an node name. RuntimeError If `node` is an `Node` instance but does not belong to this graph. """ if isinstance(node, str): return self.nodes[node] if isinstance(node, Node): if node.name not in self.nodes and (node.graph != self): raise RuntimeError(f"node '{node}' does not belong to {self} graph, instead belongs to" f" {node.graph}") return node raise ValueError(f"'{node}' is not an `Node` instance or node name") def instantiate_graph(self, context, **kwargs): """ Instantiate a graph by replacing all node names with node instances. .. note:: This function modifies the context in place. Use :code:`context=context.copy()` to avoid the context being modified. Parameters ---------- context : dict[Node or str, object] Context whose keys are node instances or names. kwargs : dict[str, object] Additional context information keyed by variable name. Returns ------- normalized_context : dict[Node, object] Normalized context whose keys are node instances. Raises ------ ValueError If the context specifies more than one value for any node. ValueError If `context` is not a mapping. """ if context is None: context = {} elif not isinstance(context, Mapping): raise ValueError("`context` must be a mapping.") nodes = list(context) # Add the keyword arguments for node in nodes: # pylint:disable=W0621 value = context.pop(node) node = self.instantiate_node(node) if node in context: raise ValueError(f"duplicate unequal value for node '{node}'") context[node] = value if node.op_name in ["placeholder", "state", "input", "output", "temp"] and not node.is_shape_finalized(): context[node] = node.evaluate(context) for name, value in kwargs.items(): node = self.nodes[name] if node in context: raise ValueError(f"duplicate value for node '{node}'") context[node] = value if node.op_name in ["placeholder", "state", "input", "output", "temp"] and not node.is_shape_finalized(): context[node] = node.evaluate(context) return context def run(self, fetches, context=None, *, callback=None, **kwargs): """ Evaluate one or more nodes given a dictionary of node names with their values. .. note:: This function modifies the context in place. Use :code:`context=context.copy()` to avoid the context being modified. Parameters ---------- fetches : list[str or Node] or str or Node One or more `Node` instances or names to evaluate. context : dict or None Context in which to evaluate the nodes. callback : callable or None Callback to be evaluated when an node is evaluated. kwargs : dict Additional context information keyed by variable name. Returns ------- values : Node or tuple[object] Output of the nodes given the context. Raises ------ ValueError If `fetches` is not an `Node` instance, node name, or a sequence thereof. """ if isinstance(fetches, (str, Node)): fetches = [fetches] single = True elif isinstance(fetches, Sequence): single = False else: raise ValueError("`fetches` must be an `Node` instance, node name, or a " "sequence thereof.") fetches = [self.instantiate_node(node) for node in fetches] context = self.instantiate_graph(context, **kwargs) for c in context: if c in fetches and c.op_name in ["output", "state", "temp"]: write_name = "/".join([f"{i}{c.write_count-1}" for i in c.name.split("/")]) if c.write_count > 0 else c.name fetches[fetches.index(c)] = c.graph.nodes[write_name] values = [fetch.evaluate_node(fetch, context, callback=callback) for fetch in fetches] return values[0] if single else tuple(values) def set_name(self, name): """ Set the name of the node and update the graph. Parameters ---------- value : str Unique name of the node. Returns ------- self : Node This node. Raises ------ ValueError If an node with `value` already exists in the associated graph. KeyError If the current name of the node cannot be found in the associated graph. """ name = name or uuid.uuid4().hex # TODO: Need a way to check if the existing node is not equal to the current ndoe as ewll if self.graph and name in self.graph.nodes: raise ValueError(f"duplicate name '{name}' in {self.graph.name}:\n\t" f"Existing: {self.graph.nodes[name].args}\n\t" f"New: {self.args}") if self.graph: graph = self.graph if self._name and self._name in graph.nodes: graph.update_graph_key(self._name, name) else: graph.nodes[name] = self self._name = name return self def evaluate_dependencies(self, context, callback=None): """ Evaluate the dependencies of this node and discard the values. Parameters ---------- context : dict Normalised context in which to evaluate the node. callback : callable or None Callback to be evaluated when an node is evaluated. """ for node in self.dependencies: node.evaluate(context, callback) def evaluate(self, context, callback=None): """ Evaluate the node given a context. Parameters ---------- context : dict Normalised context in which to evaluate the node. callback : callable or None Callback to be evaluated when an node is evaluated. Returns ------- value : object Output of the node given the context. """ # Evaluate all explicit dependencies first self.evaluate_dependencies(context, callback) if self in context: return context[self] # Evaluate the parents partial = functools.partial(self.evaluate_node, context=context, callback=callback) args = [partial(arg) for arg in self.args] kwargs = {key: partial(value) for key, value in self.kwargs.items() if key not in self.added_attrs} # Evaluate the node callback = callback or _noop_callback with callback(self, context): if self.__class__.__name__ == "Node": context[self] = self.value = self._evaluate(*args, context=context, **kwargs) else: context[self] = self.value = self._evaluate(*args, **kwargs) return self.value def _evaluate(self, *args, context=None, **kwargs): """ Inheriting nodes should implement this function to evaluate the node. """ return self(*args, context, **kwargs) def __bool__(self): return True def __hash__(self): return id(self) def func_hash(self): """ This returns the functional hash of a particular node. The default hash returns an object id, whereas this function returns a hash of all attributes and subgraphs of a node. """ return node_hash(self) def find_node(self, name): g = self.graph while g is not None and name not in g.nodes: g = g.graph if name in g.nodes: return g.nodes[name] raise RuntimeError(f"Cannot find {name} in graph nodes. Graph: {self.graph}") def __len__(self): #TODO: Update this to check for finalzied shape if self.shape == UNSET_SHAPE: raise TypeError(f'`shape` must be specified explicitly for nodes {self}') return self.shape[0] def __iter__(self): num = len(self) for i in range(num): yield self[i] class EvaluationError(RuntimeError): """ Failed to evaluate an node. """ class var_index(Node): # pylint: disable=C0103,W0223 """ Node representing values of a variable corresponding to input index values. Parameters ---------- var : Node The multi-dimensional variable used for indexing into. idx : tuple Tuple of either integer values or index/index_op nodes. """ def set_name(self, name): """ Set the name for a variable index, making sure to replicate the new name with a unique stringwhich corresponds to the variable, index combination. Parameters ---------- value : str Unique name of the node. Returns ------- self : Node This node. Raises ------ ValueError If an node with `value` already exists in the associated graph. KeyError If the current name of the node cannot be found in the associated graph. """ # TODO: Need a way to check if the existing node is not equal to the current ndoe as ewll if self.graph and name in self.graph.nodes: raise ValueError(f"duplicate name '{name}' in {self.graph.name}:" f"Existing: {self.graph.nodes[name].args}\n" f"New: {self.args}") if self.graph: graph = self.graph if self._name is not None and self._name in graph.nodes: graph.update_graph_key(self._name, name) else: graph.nodes[name] = self self._name = name return self def __getitem__(self, key): if self.is_shape_finalized() and len(self.nodes) >= np.prod(self.shape): if isinstance(key, Integral): key = tuple([key]) idx = np.ravel_multi_index(key, dims=self.shape, order='C') ret = self.nodes.item_by_index(idx) return ret else: if isinstance(key, (list)): ret = var_index(self.var, tuple(key), graph=self) elif isinstance(key, tuple): ret = var_index(self.var, key, graph=self) else: ret = var_index(self.var, tuple([key]), graph=self) return ret def is_scalar(self, val=None): if val is not None and (not isinstance(val, np.ndarray) or (len(val.shape) == 1 and val.shape[0] == 1)): if self.var.shape != DEFAULT_SHAPES[0] and (len(self.var.shape) == 1 and not isinstance(self.var.shape[0],Node)): raise ValueError(f"Invalid shape var for var index {self} with variable shape {self.var.shape}") return True else: return self.var.shape == DEFAULT_SHAPES[0] def _evaluate(self, var, indices, **kwargs): if self.is_scalar(var): out_shape = (1,) indices = (0,) single = True else: out_shape = self.domain.shape_from_indices(indices) indices = self.domain.compute_pairs() single = False if isinstance(var, (Integral, Real, str)): var = np.asarray([var]) elif not isinstance(var, (np.ndarray, list)): raise TypeError(f"Variable {var} with type {type(var)} is not a list or numpy array, and cannot be sliced for {self.name}") elif isinstance(var, list): var = np.asarray(var) if len(var.shape) != len(out_shape) and np.prod(var.shape) == np.prod(out_shape): if len(out_shape) > len(var.shape): for i in range(len(out_shape)): if out_shape[i] == 1: var = np.expand_dims(var, axis=i) else: var = np.squeeze(var) if len(var.shape) != len(out_shape) and np.prod(var.shape) != np.prod(out_shape): raise ValueError(f"Index list does not match {var.shape} in {self.var.name} - {self.var.op_name}" f"dimensions for slice {self.args[0].name} with {out_shape}.\n" f"Domain: {self.domain}\n" f"Eval Stack: {Node._eval_stack}") if not single and not all([(idx_val - 1) >= indices[-1][idx] for idx, idx_val in enumerate(var.shape)]): raise ValueError(f"var_index {self.name} has indices which are greater than the variable shape:\n" f"\tArgs: {self.args}\n" f"\tVar shape: {var.shape}\n" f"\tNode shape: {self.var.shape}\n" f"\tIndex Upper bounds: {indices[-1]}") indices = list(map(lambda x: x.tolist() if isinstance(x, np.ndarray) else x, indices)) res = var[indices] if single else np.asarray([var[idx] for idx in indices]).reshape(out_shape) if out_shape == (1,) and len(indices) == 1: res = res[0] self.domain.set_computed(out_shape, indices) return res def __add__(self, other): return slice_op(operator.add, self, other, graph=self.graph) def __radd__(self, other): return slice_op(operator.add, other, self, graph=self.graph) def __sub__(self, other): return slice_op(operator.sub, self, other, graph=self.graph) def __rsub__(self, other): return slice_op(operator.sub, other, self, graph=self.graph) def __pow__(self, other): return slice_op(builtins.pow, self, other, graph=self.graph) def __rpow__(self, other): return slice_op(builtins.pow, other, self, graph=self.graph) def __mul__(self, other): return slice_op(operator.mul, self, other, graph=self.graph) def __rmul__(self, other): return slice_op(operator.mul, other, self, graph=self.graph) def __truediv__(self, other): return slice_op(operator.truediv, self, other, graph=self.graph) def __rtruediv__(self, other): return slice_op(operator.truediv, other, self, graph=self.graph) def __floordiv__(self, other): return slice_op(operator.floordiv, self, other, graph=self.graph) def __rfloordiv__(self, other): return slice_op(operator.floordiv, other, self, graph=self.graph) def __mod__(self, other): return slice_op(operator.mod, self, other, graph=self.graph) def __rmod__(self, other): return slice_op(operator.mod, other, self, graph=self.graph) def __lshift__(self, other): return slice_op(operator.lshift, self, other, graph=self.graph) def __rlshift__(self, other): return slice_op(operator.lshift, other, self, graph=self.graph) def __rshift__(self, other): return slice_op(operator.rshift, self, other, graph=self.graph) def __rrshift__(self, other): return slice_op(operator.rshift, other, self, graph=self.graph) def __and__(self, other): return slice_op(operator.and_, self, other, graph=self.graph) def __rand__(self, other): return slice_op(operator.and_, other, self, graph=self.graph) def __or__(self, other): return slice_op(operator.or_, self, other, graph=self.graph) def __ror__(self, other): return slice_op(operator.or_, other, self, graph=self.graph) def __xor__(self, other): return slice_op(operator.xor, self, other, graph=self.graph) def __rxor__(self, other): return slice_op(operator.xor, other, self, graph=self.graph) def __lt__(self, other): return slice_op(operator.lt, self, other, graph=self.graph) def __le__(self, other): return slice_op(operator.lt, other, self, graph=self.graph) def __ne__(self, other): return slice_op(operator.ne, self, other, graph=self.graph) def __gt__(self, other): return slice_op(operator.gt, self, other, graph=self.graph) def __ge__(self, other): return slice_op(operator.ge, self, other, graph=self.graph) def __repr__(self): return "<var_index name=%s, index=%s>" % (self.name, self.args) class slice_op(Node): """ Node representing multi-dimensional operations performed on a node. Parameters ---------- target : cal The multi-dimensional variable used for indexing into. idx : tuple Tuple of either integer values or index/index_op nodes. """ class func_op(Node): # pylint: disable=C0103,R0903 """ Node wrapper for stateless functions. Parameters ---------- target : callable function to evaluate the node args : tuple positional arguments passed to the target kwargs : dict keywoard arguments passed to the target """ def __getitem__(self, key): return self def nodeop(target=None, **kwargs): """ Decorator for creating nodes from functions. """ # This is called when the decorator is used with arguments if target is None: return functools.partial(nodeop, **kwargs) # This is called when the decorator is used without arguments return _wrapper #pylint: disable=C0103 abs_ = nodeop(builtins.abs) dict_ = nodeop(builtins.dict) help_ = nodeop(builtins.help) min_ = nodeop(builtins.min) setattr_ = nodeop(builtins.setattr) all_ = nodeop(builtins.all) dir_ = nodeop(builtins.dir) hex_ = nodeop(builtins.hex) next_ = nodeop(builtins.next) slice_ = nodeop(builtins.slice) any_ = nodeop(builtins.any) divmod_ = nodeop(builtins.divmod) id_ = nodeop(builtins.id) object_ = nodeop(builtins.object) sorted_ = nodeop(builtins.sorted) ascii_ = nodeop(builtins.ascii) enumerate_ = nodeop(builtins.enumerate) input_ = nodeop(builtins.input) oct_ = nodeop(builtins.oct) staticmethod_ = nodeop(builtins.staticmethod) bin_ = nodeop(builtins.bin) eval_ = nodeop(builtins.eval) int_ = nodeop(builtins.int) open_ = nodeop(builtins.open) str_ = nodeop(builtins.str) bool_ = nodeop(builtins.bool) exec_ = nodeop(builtins.exec) isinstance_ = nodeop(builtins.isinstance) ord_ = nodeop(builtins.ord) sum_ = nodeop(builtins.sum) bytearray_ = nodeop(builtins.bytearray) filter_ = nodeop(builtins.filter) issubclass_ = nodeop(builtins.issubclass) pow_ = nodeop(builtins.pow) super_ = nodeop(builtins.super) bytes_ = nodeop(builtins.bytes) float_ = nodeop(builtins.float) iter_ = nodeop(builtins.iter) print_ = nodeop(builtins.print) tuple_ = nodeop(builtins.tuple) callable_ = nodeop(builtins.callable) format_ = nodeop(builtins.format) len_ = nodeop(builtins.len) property_ = nodeop(builtins.property) type_ = nodeop(builtins.type) chr_ = nodeop(builtins.chr) frozenset_ = nodeop(builtins.frozenset) list_ = nodeop(builtins.list) range_ = nodeop(builtins.range) vars_ = nodeop(builtins.vars) classmethod_ = nodeop(builtins.classmethod) getattr_ = nodeop(builtins.getattr) locals_ = nodeop(builtins.locals) repr_ = nodeop(builtins.repr) zip_ = nodeop(builtins.zip) compile_ = nodeop(builtins.compile) globals_ = nodeop(builtins.globals) map_ = nodeop(builtins.map) reversed_ = nodeop(builtins.reversed) complex_ = nodeop(builtins.complex) hasattr_ = nodeop(builtins.hasattr) max_ = nodeop(builtins.max) round_ = nodeop(builtins.round) delattr_ = nodeop(builtins.delattr) hash_ = nodeop(builtins.hash) memoryview_ = nodeop(builtins.memoryview) set_ = nodeop(builtins.set) add = nodeop(operator.add) and_ = nodeop(operator.and_) attrgetter = nodeop(operator.attrgetter) concat = nodeop(operator.concat) contains = nodeop(operator.contains) countOf = nodeop(operator.countOf) delitem = nodeop(operator.delitem) eq = nodeop(operator.eq) floordiv = nodeop(operator.floordiv) ge = nodeop(operator.ge) getitem = nodeop(operator.getitem) gt = nodeop(operator.gt) index = nodeop(operator.index) indexOf = nodeop(operator.indexOf) inv = nodeop(operator.inv) invert = nodeop(operator.invert) ior = nodeop(operator.ior) ipow = nodeop(operator.ipow) irshift = nodeop(operator.irshift) is_ = nodeop(operator.is_) is_not = nodeop(operator.is_not) itemgetter = nodeop(operator.itemgetter) le = nodeop(operator.le) length_hint = nodeop(operator.length_hint) lshift = nodeop(operator.lshift) lt = nodeop(operator.lt) matmul = nodeop(operator.matmul) methodcaller = nodeop(operator.methodcaller) mod = nodeop(operator.mod) mul = nodeop(operator.mul) ne = nodeop(operator.ne) neg = nodeop(operator.neg) not_ = nodeop(operator.not_) or_ = nodeop(operator.or_) pos = nodeop(operator.pos) rshift = nodeop(operator.rshift) setitem = nodeop(operator.setitem) sub = nodeop(operator.sub) truediv = nodeop(operator.truediv) truth = nodeop(operator.truth) xor = nodeop(operator.xor) import_ = nodeop(importlib.import_module)
[ 198, 6738, 7514, 11018, 1330, 4725, 28480, 62, 9693, 45721, 11, 5550, 38865, 62, 9693, 2969, 1546, 198, 11748, 3170, 1040, 198, 11748, 10088, 198, 6738, 17268, 1330, 14230, 1068, 35, 713, 11, 337, 5912, 11, 45835, 11, 390, 4188, 198, ...
2.321118
9,551
from django.db import models from django.utils.translation import ugettext_lazy as _ # Create your models here.
[ 6738, 42625, 14208, 13, 9945, 1330, 4981, 198, 6738, 42625, 14208, 13, 26791, 13, 41519, 1330, 334, 1136, 5239, 62, 75, 12582, 355, 4808, 198, 198, 2, 13610, 534, 4981, 994, 13, 628 ]
3.454545
33
"""Build the C client docs. """ from __future__ import with_statement import os import shutil import socket import subprocess import time import urllib2 def version(): """Get the driver version from doxygenConfig. """ with open("doxygenConfig") as f: for line in f.readlines(): if line.startswith("PROJECT_NUMBER"): return line.split("=")[1].strip() if __name__ == "__main__": main()
[ 37811, 15580, 262, 327, 5456, 34165, 13, 198, 37811, 198, 198, 6738, 11593, 37443, 834, 1330, 351, 62, 26090, 198, 11748, 28686, 198, 11748, 4423, 346, 198, 11748, 17802, 198, 11748, 850, 14681, 198, 11748, 640, 198, 11748, 2956, 297, 5...
2.584795
171
import glm import math from lib.opengl import RenderSettings
[ 11748, 1278, 76, 198, 11748, 10688, 198, 198, 6738, 9195, 13, 404, 1516, 75, 1330, 46722, 26232, 628, 628 ]
3.421053
19
#!/usr/bin/env python3 import argparse import json import os import statistics from collections import defaultdict from tools.stats.s3_stat_parser import ( get_previous_reports_for_branch, Report, Version2Report, ) from typing import cast, DefaultDict, Dict, List, Any from urllib.request import urlopen SLOW_TESTS_FILE = ".pytorch-slow-tests.json" SLOW_TEST_CASE_THRESHOLD_SEC = 60.0 RELATIVE_DIFFERENCE_THRESHOLD = 0.1 IGNORED_JOBS = ["asan", "periodic"] if __name__ == "__main__": main()
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 198, 11748, 1822, 29572, 198, 11748, 33918, 198, 11748, 28686, 198, 11748, 7869, 198, 6738, 17268, 1330, 4277, 11600, 198, 6738, 4899, 13, 34242, 13, 82, 18, 62, 14269, 62, 48610, ...
2.616162
198
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved. import itertools import logging import numpy as np import scipy as sp import torch from ml.rl.evaluation.cpe import CpeEstimate from ml.rl.evaluation.evaluation_data_page import EvaluationDataPage logger = logging.getLogger(__name__) logger.setLevel(logging.INFO)
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 2, 15069, 357, 66, 8, 3203, 11, 3457, 13, 290, 663, 29116, 13, 1439, 2489, 10395, 13, 198, 198, 11748, 340, 861, 10141, 198, 11748, 18931, 198, 198, 11748, 299, 32152, 355, 45941...
3.119658
117
# -*- coding: utf-8 -*- # @Author: # @Create Date: 2019-08-03 10:48:30 # @Last Modified by: # @Last Modified time: 2019-08-03 10:53:15 import copy import random from typing import List
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 2, 2488, 13838, 25, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 198, 2, 2488, 16447, 7536, 25, 220, 220, 220, 220, 220, 220, 220, 13130, 12, 2...
2.221053
95
import numpy as np import sklearn import pandas as pd import scipy.spatial.distance as ssd from scipy.cluster import hierarchy from scipy.stats import chi2_contingency from sklearn.base import BaseEstimator from sklearn.ensemble import RandomForestClassifier from sklearn.feature_extraction.text import CountVectorizer from sklearn.feature_selection import SelectKBest, SelectorMixin from sklearn.pipeline import Pipeline def get_fs_pipeline(k, threshold, random_state=0): """ Creates feature selection pipeline Parameters ---------- k - the k parameter for the SelectKBest features function threshold - clustering threshold for the Hierarchial clustering random_state - random state for the RandomForestClassifier. Deafult value: 0 Returns ---------- pipeline - feature selection pipeline """ pipeline = Pipeline(steps=[('vectorize', CountVectorizer(lowercase=False, binary=True)), ('k_best', SelectKBest(score_func=sklearn.feature_selection.chi2, k=k)), ('cluster', SelectHierarchicalClustering(threshold=threshold)), ('rf', RandomForestClassifier(random_state=random_state))]) return pipeline
[ 11748, 299, 32152, 355, 45941, 198, 11748, 1341, 35720, 198, 11748, 19798, 292, 355, 279, 67, 198, 11748, 629, 541, 88, 13, 2777, 34961, 13, 30246, 355, 264, 21282, 198, 6738, 629, 541, 88, 13, 565, 5819, 1330, 18911, 198, 6738, 629, ...
2.868966
435
from bs4 import BeautifulSoup import requests import re from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import By from selenium import webdriver from selenium.webdriver.common.keys import Keys import time from selenium.webdriver.common.action_chains import ActionChains from selenium.webdriver.common.touch_actions import TouchActions from selenium.common.exceptions import TimeoutException URL = 'https://shopping.thinkwithgoogle.com' EXAMPLES = ["Demonstrate unexpected use-case", "Demonstrate google search", "Demonstrate search on thinkwithgoogle", "Demonstrate search on WebDriverWait", "Demonstrate search on thinkwithgoogle search result", "Download and extract additional data", "Demonstrate maximizing screen", "Demonstrate mouse actions for Chrome", "Demonstrate navigation"] if __name__ == '__main__': while(True): printSelection() choice = input('Enter choice: ') try: choice = int(choice) except ValueError: print('Invalid input, stop program') break if(choice not in range(0,9)): print('Invalid input, stop program') break run(int(choice), URL)
[ 6738, 275, 82, 19, 1330, 23762, 50, 10486, 198, 11748, 7007, 198, 11748, 302, 198, 6738, 384, 11925, 1505, 13, 12384, 26230, 13, 11284, 13, 9019, 1330, 5313, 32103, 21321, 220, 198, 6738, 384, 11925, 1505, 13, 12384, 26230, 13, 11284, ...
2.70566
530
#!/usr/bin/env python3 #****************************************************************************** # (C) 2018, Stefan Korner, Austria * # * # The Space Python Library is free software; you can redistribute it and/or * # modify it under under the terms of the MIT License as published by the * # Massachusetts Institute of Technology. * # * # The Space Python Library is distributed in the hope that it will be useful, * # but WITHOUT ANY WARRANTY; without even the implied warranty of * # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the MIT License * # for more details. * #****************************************************************************** # Unit Tests * #****************************************************************************** import sys from UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR import UTIL.SYS, UTIL.TASK, UTIL.TCP ############# # constants # ############# LINEBUFFERLEN = 256 ########### # classes # ########### # ============================================================================= ############# # functions # ############# # ----------------------------------------------------------------------------- def initConfiguration(): """initialise the system configuration""" UTIL.SYS.s_configuration.setDefaults([ ["HOST", "127.0.0.1"], ["SERVER_PORT", "1234"]]) # ----------------------------------------------------------------------------- def createServer(): """create the TCP server""" server = TCPserver(portNr=int(UTIL.SYS.s_configuration.SERVER_PORT)) if not server.openConnectPort(UTIL.SYS.s_configuration.HOST): sys.exit(-1) # activate zyclic idle function idleFunction() # ----------------------------------------------------------------------------- ######## # main # ######## if __name__ == "__main__": # initialise the system configuration initConfiguration() # initialise the console handler consoleHandler = UTIL.TASK.ConsoleHandler() # initialise the model modelTask = UTIL.TASK.ProcessingTask(isParent=True) # register the console handler modelTask.registerConsoleHandler(consoleHandler) # create the TCP server LOG("Open the TCP server") createServer() # start the tasks LOG("start modelTask...") modelTask.start()
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 2, 17174, 17174, 46068, 1174, 198, 2, 357, 34, 8, 2864, 11, 28842, 14769, 1008, 11, 17322, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, ...
2.797669
944
from __future__ import unicode_literals import logging import cfgv import pytest import pre_commit.constants as C from pre_commit.clientlib import check_type_tag from pre_commit.clientlib import CONFIG_HOOK_DICT from pre_commit.clientlib import CONFIG_REPO_DICT from pre_commit.clientlib import CONFIG_SCHEMA from pre_commit.clientlib import DEFAULT_LANGUAGE_VERSION from pre_commit.clientlib import MANIFEST_SCHEMA from pre_commit.clientlib import MigrateShaToRev from pre_commit.clientlib import validate_config_main from pre_commit.clientlib import validate_manifest_main from testing.fixtures import sample_local_config def test_local_hooks_with_rev_fails(): config_obj = {'repos': [dict(sample_local_config(), rev='foo')]} with pytest.raises(cfgv.ValidationError): cfgv.validate(config_obj, CONFIG_SCHEMA) def test_config_with_local_hooks_definition_passes(): config_obj = {'repos': [sample_local_config()]} cfgv.validate(config_obj, CONFIG_SCHEMA) def test_config_schema_does_not_contain_defaults(): """Due to the way our merging works, if this schema has any defaults they will clobber potentially useful values in the backing manifest. #227 """ for item in CONFIG_HOOK_DICT.items: assert not isinstance(item, cfgv.Optional) def test_migrate_sha_to_rev_dont_specify_both(): with pytest.raises(cfgv.ValidationError) as excinfo: MigrateShaToRev().check({'repo': 'a', 'sha': 'b', 'rev': 'c'}) msg, = excinfo.value.args assert msg == 'Cannot specify both sha and rev' def test_minimum_pre_commit_version_failing(): with pytest.raises(cfgv.ValidationError) as excinfo: cfg = {'repos': [], 'minimum_pre_commit_version': '999'} cfgv.validate(cfg, CONFIG_SCHEMA) assert str(excinfo.value) == ( '\n' '==> At Config()\n' '==> At key: minimum_pre_commit_version\n' '=====> pre-commit version 999 is required but version {} is ' 'installed. Perhaps run `pip install --upgrade pre-commit`.'.format( C.VERSION, ) ) def test_minimum_pre_commit_version_passing(): cfg = {'repos': [], 'minimum_pre_commit_version': '0'} cfgv.validate(cfg, CONFIG_SCHEMA)
[ 6738, 11593, 37443, 834, 1330, 28000, 1098, 62, 17201, 874, 198, 198, 11748, 18931, 198, 198, 11748, 30218, 70, 85, 198, 11748, 12972, 9288, 198, 198, 11748, 662, 62, 41509, 13, 9979, 1187, 355, 327, 198, 6738, 662, 62, 41509, 13, 163...
2.611888
858
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # IkaLog # ====== # Copyright (C) 2015 Takeshi HASEGAWA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import gettext import wx import wx.lib.scrolledpanel import ikalog.outputs from ikalog.ui.events import * from ikalog.ui.panel import * from ikalog.ui import VideoCapture from ikalog.utils import * _ = Localization.gettext_translation('IkaUI', fallback=True).gettext
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 2, 198, 2, 220, 314, 4914, 11187, 198, 2, 220, 29335, 28, 198, 2, 220, 15069, 357, 34, 8, 1853, 33687, 5303, ...
3.151515
297
from setuptools import setup, find_packages with open("README.md", 'r',encoding="utf-8") as f: long_description = f.read() setup( name='LineBot', version='0.1.0', description='Simple-LINELIB', long_description=long_description, author='Tolg KR', author_email='tolgkr@cybertkr.com', url='https://github.com/CyberTKR/Simple-LINELIB', packages=find_packages(include=['CyberTK', 'CyberTK.*']), install_requires=[ 'httpx==0.19.0', 'requests', 'thrift', 'CyberTKAPI' ], extras_require={'httpx': ['http2']} )
[ 6738, 900, 37623, 10141, 1330, 9058, 11, 1064, 62, 43789, 198, 198, 4480, 1280, 7203, 15675, 11682, 13, 9132, 1600, 705, 81, 3256, 12685, 7656, 2625, 40477, 12, 23, 4943, 355, 277, 25, 198, 220, 220, 220, 890, 62, 11213, 796, 277, 1...
2.220532
263
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright(c) 2019 Nippon Telegraph and Telephone Corporation # Filename: CgwshDeviceDriverSetParameterECDB.py ''' Parameter module for Cgwsh driver configuration ''' import GlobalModule from EmCommonLog import decorater_log from DriverSetParameterECDB import DriverSetParameterECDB
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 201, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 201, 198, 2, 15069, 7, 66, 8, 13130, 399, 3974, 261, 21821, 290, 44735, 10501, 201, 198, 2, 7066, 12453, 25, 327, 7...
3.099099
111
#!/usr/bin/env python3.6 import os import subprocess import json import argparse import zipfile import shutil import requests import datetime import re import operator import unicodedata # global list of error messages to keep track of all error msgs errorMessages = [] """ Collection of Common Functions used by Build Scripts A collection of common functions shared by each individual build scripts. """ def get(url, usr, pwd): """ HTTP/HTTPS GET requests using external Python module requests @param url the url of the REST call @param usr the functional username for the docker registry @param pwd the password for the docker registry functional user @return a JSON response """ headers = { 'Accept': 'application/vnd.docker.distribution.manifest.v1+json', } # TEMP: Remove the suppressed verification once the docker cert location # is figured out and we specify it in REQUESTS_CA_BUNDLE return requests.get(url, auth=(usr, pwd), headers=headers, verify=False) def get_latest_tag(registry_path, usr, pwd): """ Retrieve the latest version of an image based on its tags: vX-YYYYMMDD-HHmm. The latest, by definition, is defined to be the one with the highest version number (vX) and the latest timestamp (YYYYMMDD-HHmm). @param registry_path docker registry path @param usr the functional username for the docker registry @param pwd the password for the docker registry functional user @return the latest image tag """ tag_list_url = registry_path + '/tags/list' request = get(tag_list_url, usr, pwd) tag_list = json.loads(request.text) for tag in tag_list['tags']: if '-' not in tag: continue str_version, str_dash, str_timestamp = tag.partition('-') tag_format="%Y%m%d-%H%M" try: dt_timestamp = datetime.datetime.strptime(str_timestamp, tag_format) except ValueError: continue try: latest_version latest_timestamp latest_tag except NameError: latest_version = str_version latest_timestamp = dt_timestamp latest_tag = tag else: if latest_version > str_version: continue elif latest_version < str_version: latest_version = str_version latest_timestamp = dt_timestamp latest_tag = tag else: if latest_timestamp < dt_timestamp: latest_timestamp = dt_timestamp latest_tag = tag return latest_tag def unzip(zip_file, to_dir): """ Generic unzip function for extracting zip files @param zip_file the zip file to be extracted @param to_dir the destination directory to extract the zip file to """ with zipfile.ZipFile(zip_file, "r") as zip_ref: zip_ref.extractall(to_dir) zip_ref.close() def create_dockerfile(dockerfile_parent_dir, docker_url, image_namespace, image_name, image_tag_latest): """ Creates a dockerfile using the correct docker registry URL associated with the datacenter this script is being run on :param str dockerfile_parent_dir: path to the parent directory for the Dockerfile :param str docker_url: the docker registry VIP accessible from the mesos slaves :param str image_namespace: the name of the image :param str image_name: the name of the image :param str image_tag_latest: the latest version tag of the base image :returns: None """ # Form the path for the Dockerfile based on the parent of the caller script dockerfile_path = os.path.join(dockerfile_parent_dir, "Dockerfile") # Create the Dockerfile dockerfile = open(dockerfile_path, "w+") # Format the FROM command dockerfile_from_cmd = "FROM " + docker_url + image_namespace + "/" + image_name + ":" + image_tag_latest # Write the FROM command string to the Dockerfile dockerfile.write(dockerfile_from_cmd) # Close the open file instance dockerfile.close() def set_docker_client_timeout(): """ Sets the DOCKER_CLIENT_TIMEOUT environment variable to 300 """ os.environ['DOCKER_CLIENT_TIMEOUT'] = '300' print("The timeout set for docker client: " + os.environ['DOCKER_CLIENT_TIMEOUT'] + " seconds") # ======================= verify bundle Structure =============================================== def openJSONfile(jsonFile): """ Function to open a JSON file @param jsonFile path to the JSON file @return the loaded JSON file """ try: with open(jsonFile) as json_data_file: data = json.load(json_data_file) except: addToErrorMessages("The specified JSON file is not valid: " + jsonFile) raise return data def directoryToJSON(directory): """ Function to convert objects in a given directory into JSON form. The parent object is always a dict, it may contain children if type=directory. A directory is composed of a list and may contain files and/or directories. @param directory directory to convert @return JSON representation of a directory """ d = {'name': os.path.basename(directory)} # the parent object is dict if os.path.isdir(directory): d['type'] = "directory" # directory may have children # the children in a directory is a list composed of more files/directories d['children'] = [directoryToJSON(os.path.join(directory,x)) for x in os.listdir(directory)] else: d['type'] = "file" return d def verifyBundleStructure(expected, actual, currentPath): """ Function to verify if an uploaded bundle follows IBM defined structure @param expected the JSON representation of the IBM defined structure @param actual the JSON representation of the actual structure of the uploaded bundle @param currentPath the path currently being checked (used to build paths recursively for error msg) @return True if structure of the uploaded bundle follows IBM defined structure. False otherwise. """ isMatched = True if type(expected) is dict: if matches(expected,actual): # a matching file or directory was found if expected['type'] == 'directory': currentPath = currentPath + actual['name'] + "/" if expected['children'] == "_any": isMatched = isMatched & True # if the contents of the directory can be anything then do no further checking else: isMatched = isMatched & verifyBundleStructure(expected['children'], actual['children'], currentPath) # do further checking else: # a matching file or directory was not found if expected['fail-if-not-found'] == "yes": logBundleStructureErrorMessage(expected, currentPath) return False if type(expected) is list: for k in range(0,len(expected)): isMatched = isMatched & verifyActualContainsExpectedElement(actual, expected[k], currentPath, isMatched) return isMatched def logBundleStructureErrorMessage(expected, currentPath): """ Function to adds error messages to the global array. @param expected the expected element @param currentPath the current path we are on that has the missing file or directory """ addToErrorMessages("A "+ expected['type'] +" is missing from the path: \"" + currentPath + "\"") addToErrorMessages(expected['error-message-if-fails']) return def matches(expectedElement, actualElement): """ Function to check if files/directories match. They must have the same name and must both be the same type. @param expectedElement the expected element. May be defined by regular expression @param actualElement the actual element """ ret = False if re.fullmatch(expectedElement['name'], actualElement['name']) is not None and expectedElement['type'] == actualElement['type']: ret = True return ret def verifyActualContainsExpectedElement(actual, expectedElement, currentPath, isMatched): """ Function to verify if an actual list of objects contains an expected element. Helper method to verifyBundleStructure. @param actual list of the actual files and directories in the bundle @param expectedElement the expected element to find in the bundle @param currentPath the path currently being checked (used to build paths recursively for error msg) @param isMatched (only used for recursive calls) @return True if the list of actual objects contain the expected element """ # if actual is a dict then verify it and its children if type(actual) is dict: isMatched = isMatched & verifyBundleStructure(expectedElement,actual, currentPath) # if actual is a list then find out if they match anywhere, if so get the matched position elif type(actual) is list: matchedPosition = -1 for i in range(0, len(actual)): if matches(expectedElement,actual[i]): matchedPosition = i break if matchedPosition != -1: # if they match then verify their children too isMatched = isMatched & verifyBundleStructure(expectedElement, actual[matchedPosition] , currentPath) else : # if they don't match then log the error msg and return false if expectedElement['fail-if-not-found'] == "yes": # log error msg and return false if needed isMatched = False logBundleStructureErrorMessage(expectedElement, currentPath) return isMatched def addToErrorMessages(errorMessage): """ Function to add error messages to the global list of errorMessages @param errorMessage the error message to add """ print(errorMessage) global errorMessges errorMessages.extend([errorMessage]) return def unzipRecursively(zipFileName, directoryToUnzipTo): """ Function to unzip a ZIP file recursively @param zipFileName the zip file to be extracted @param directoryToUnzipTo the destination directory to extract the zip file to """ # update if zipFileName.endswith(".zip"): #check if it's a .zip unzip(zipFileName,directoryToUnzipTo) os.remove(zipFileName) for x in os.listdir(directoryToUnzipTo): subdirectory = os.path.join(directoryToUnzipTo, os.path.splitext(x)[0]) subfile = os.path.join(directoryToUnzipTo, x ) unzipRecursively(subfile, subdirectory) return def zipFileIsGood(filePath): """ Function to test if a ZIP file is good or bad @param filePath the zip file to be tested @return True if the ZIP file is good. False otherwise. """ ret = True try: the_zip_file = zipfile.ZipFile(filePath) badFile = the_zip_file.testzip() if badFile is not None: ret = False else: ret = True except: ret = False return ret def verifyZipFile(zipDirectory, nameOfBundle): """ Function to verify if an uploaded bundle is: 1) a valid zip file 2) follows IBM defined structure @param zipDirectory where the bundle ZIP is located @param nameOfBundle name of the bundle ZIP file """ print ('Validating bundle structure...') bundleIsGood = True bundleZip = os.path.join(zipDirectory, nameOfBundle) if zipFileIsGood(bundleZip): try: # copy bundle into new working directory ----------------------------------------------------------- directoryToUnzipTo = os.path.join(zipDirectory, "temp") if not os.path.exists(directoryToUnzipTo): os.makedirs(directoryToUnzipTo) shutil.copy(bundleZip, os.path.join(directoryToUnzipTo, nameOfBundle)) # unzip the bundle ---------------------------------------------------------------------------------- unzipRecursively(os.path.join(directoryToUnzipTo, nameOfBundle), os.path.join(directoryToUnzipTo, os.path.splitext(nameOfBundle)[0])) # verify structure of bundle ------------------------------------------------------------------------ # check package stucture expectedPackageStructure = openJSONfile(os.path.join(zipDirectory, "bundle-definition.json")) actualBundleStructure = directoryToJSON(directoryToUnzipTo) # convert the unzipped directory to JSON file bundleIsGood = verifyBundleStructure(expectedPackageStructure, actualBundleStructure, "") if not bundleIsGood: addToErrorMessages("The uploaded bundle does not meet predefined structure. Could not proceed with deployment.") # clean up unzipped stuff and package structure Json ------------------------------------------------- shutil.rmtree(directoryToUnzipTo) except: addToErrorMessages("Exception occurred while verifying bundle structure. Could not proceed with deployment.") bundleIsGood = False else: bundleIsGood = False addToErrorMessages("The uploaded bundle could not be unzipped. Could not proceed with deployment.") # out put report value , join all the messages together print ("report=[" + ". ".join(str(x) for x in errorMessages) + "]") return bundleIsGood
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 13, 21, 198, 198, 11748, 28686, 198, 11748, 850, 14681, 198, 11748, 33918, 198, 11748, 1822, 29572, 198, 11748, 19974, 7753, 198, 11748, 4423, 346, 198, 11748, 7007, 198, 11748, 4818, 807...
2.72452
5,053
from __future__ import absolute_import from __future__ import division from __future__ import print_function import _init_paths import os import json import cv2 import cv2.aruco as aruco import numpy as np import sys import rospy from std_msgs.msg import Bool from std_msgs.msg import Float64MultiArray from sensor_msgs.msg import Image, CameraInfo from cv_bridge import CvBridge, CvBridgeError import message_filters import torch from external.nms import soft_nms from opts import opts from logger import Logger from utils.utils import AverageMeter from datasets.dataset_factory import dataset_factory from detectors.detector_factory import detector_factory # transformation from the robot base to aruco tag M_BL = np.array([[1., 0., 0., 0.30000], [0., 1., 0., 0.32000], [0., 0., 1., -0.0450], [0., 0., 0., 1.00000]]) # default transformation from the camera to aruco tag default_M_CL = np.array([[-0.07134498, -0.99639369, 0.0459293, -0.13825178], [-0.8045912, 0.03027403, -0.59305689, 0.08434352], [ 0.58952768, -0.07926594, -0.8038495, 0.66103522], [ 0., 0., 0., 1. ]] ) # camera intrinsic matrix of Realsense D435 cameraMatrix = np.array([[607.47165, 0.0, 325.90064], [0.0, 606.30420, 240.91934], [0.0, 0.0, 1.0]]) # distortion of Realsense D435 distCoeffs = np.array([0.08847, -0.04283, 0.00134, -0.00102, 0.0]) # initialize GKNet Detector opt = opts().parse() Dataset = dataset_factory[opt.dataset] opt = opts().update_dataset_info_and_set_heads(opt, Dataset) print(opt) Detector = detector_factory[opt.task] detector = Detector(opt) # Publisher of perception result pub_res = rospy.Publisher('/result', Float64MultiArray, queue_size=10) def project(pixel, depth_image, M_CL, M_BL, cameraMatrix): ''' project 2d pixel on the image to 3d by depth info :param pixel: x, y :param M_CL: trans from camera to aruco tag :param cameraMatrix: camera intrinsic matrix :param depth_image: depth image :param depth_scale: depth scale that trans raw data to mm :return: q_B: 3d coordinate of pixel with respect to base frame ''' depth = depth_image[pixel[1], pixel[0]] # if the depth of the detected pixel is 0, check the depth of its neighbors # by counter-clock wise nei_range = 1 while depth == 0: for delta_x in range(-nei_range, nei_range + 1): for delta_y in range(-nei_range, nei_range + 1): nei = [pixel[0] + delta_x, pixel[1] + delta_y] depth = depth_image[nei[1], nei[0]] if depth != 0: break if depth != 0: break nei_range += 1 pxl = np.linalg.inv(cameraMatrix).dot( np.array([pixel[0] * depth, pixel[1] * depth, depth])) q_C = np.array([pxl[0], pxl[1], pxl[2], 1]) q_L = np.linalg.inv(M_CL).dot(q_C) q_B = M_BL.dot(q_L) return q_B def kinect_rgbd_callback(rgb_data, depth_data): """ Save raw RGB and depth input from Kinect V1 :param rgb_data: RGB image :param depth_data: raw depth image :return: None """ try: cv_rgb = cv_bridge.imgmsg_to_cv2(rgb_data, "bgr8") cv_depth = cv_bridge.imgmsg_to_cv2(depth_data, "32FC1") cv_rgb_arr = np.array(cv_rgb, dtype=np.uint8) cv_depth_arr = np.array(cv_depth, dtype=np.float32) # cv_depth_arr = np.nan_to_num(cv_depth_arr) cv2.imshow("Depth", cv_depth) cv2.imshow("RGB", cv_rgb) img = cv_rgb_arr.copy() depth_raw = cv_depth_arr.copy() gray = img.astype(np.uint8) depth = (depth_raw * 1000).astype(np.uint8) # get the current transformation from the camera to aruco tag M_CL, corners = get_M_CL_info(gray, img, False) # remove aruco tag from input image to avoid mis-detection if corners is not None: img_wo_at = aruco_tag_remove(img, corners) # replace blue channel with the depth channel inp_image = pre_process(img_wo_at, depth) # pass the image into the network ret = detector.run(inp_image[:, :, :]) ret = ret["results"] loc_ori = KpsToGrasppose(ret, img, depth_raw, M_CL, M_BL, cameraMatrix) pub_res.publish(loc_ori) except CvBridgeError as e: print(e) if __name__ == '__main__': # initialize ros node rospy.init_node("Static_grasping") # Bridge to convert ROS Image type to OpenCV Image type cv_bridge = CvBridge() cv2.WITH_QT = False # Get camera calibration parameters cam_param = rospy.wait_for_message('/camera/rgb/camera_info', CameraInfo, timeout=None) # Subscribe to rgb and depth channel image_sub = message_filters.Subscriber("/camera/rgb/image_rect_color", Image) depth_sub = message_filters.Subscriber("/camera/depth_registered/image", Image) ts = message_filters.ApproximateTimeSynchronizer([image_sub, depth_sub], 1, 0.1) ts.registerCallback(kinect_rgbd_callback) rospy.spin()
[ 6738, 11593, 37443, 834, 1330, 4112, 62, 11748, 198, 6738, 11593, 37443, 834, 1330, 7297, 198, 6738, 11593, 37443, 834, 1330, 3601, 62, 8818, 198, 198, 11748, 4808, 15003, 62, 6978, 82, 198, 198, 11748, 28686, 198, 11748, 33918, 198, 11...
2.203883
2,369
import numpy as np import cv2 import random def preprocess(img,img_size,padding=True): """[summary] Args: img (np.ndarray): images img_size (int,list,tuple): target size. eg: 224 , (224,224) or [224,224] padding (bool): padding img before resize. Prevent from image distortion. Defaults to True. Returns: images (np.ndarray): images in target size """ if padding: height,width,_ = img.shape delta = height - width if delta > 0: img = np.pad(img,[[0,0],[delta//2,delta//2],[0,0]], mode='constant',constant_values =255) else: img = np.pad(img,[[-delta//2,-delta//2],[0,0],[0,0]], mode='constant',constant_values =255) if isinstance(img_size,int): img_size = (img_size,img_size) return cv2.resize(img,img_size) if __name__ =='__main__': augmentation_test()
[ 11748, 299, 32152, 355, 45941, 220, 198, 11748, 269, 85, 17, 198, 11748, 4738, 198, 4299, 662, 14681, 7, 9600, 11, 9600, 62, 7857, 11, 39231, 28, 17821, 2599, 198, 220, 220, 220, 13538, 17912, 49736, 60, 628, 220, 220, 220, 943, 145...
2.214815
405
## @file # generate capsule # # Copyright (c) 2007-2017, Intel Corporation. All rights reserved.<BR> # # This program and the accompanying materials # are licensed and made available under the terms and conditions of the BSD License # which accompanies this distribution. The full text of the license may be found at # http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. # ## # Import Modules # import Ffs from GenFdsGlobalVariable import GenFdsGlobalVariable import StringIO from struct import pack import os from Common.Misc import SaveFileOnChange import uuid ## base class for capsule data # # ## FFS class for capsule data # # ## FV class for capsule data # # ## FD class for capsule data # # ## AnyFile class for capsule data # # ## Afile class for capsule data # #
[ 2235, 2488, 7753, 198, 2, 7716, 27855, 198, 2, 198, 2, 220, 15069, 357, 66, 8, 4343, 12, 5539, 11, 8180, 10501, 13, 1439, 2489, 10395, 29847, 11473, 29, 198, 2, 198, 2, 220, 770, 1430, 290, 262, 19249, 5696, 198, 2, 220, 389, 11...
3.17608
301
import matplotlib.pyplot as plt import numpy as np # Read data size = [] time = [] with open("pi_linear.txt") as file: for line in file.readlines(): x, y = line.split(',') size.append(int(x.strip())) time.append(float(y.strip())) # Plot data fig, ax = plt.subplots() ax.plot(size, time) ax.set(xlabel='Num. processes', ylabel='Time (s)', title='Pi linear') #ax.grid() fig.savefig("pi_linear.png") plt.show()
[ 11748, 2603, 29487, 8019, 13, 9078, 29487, 355, 458, 83, 198, 11748, 299, 32152, 355, 45941, 198, 198, 2, 4149, 1366, 198, 7857, 796, 17635, 198, 2435, 796, 17635, 198, 4480, 1280, 7203, 14415, 62, 29127, 13, 14116, 4943, 355, 2393, 2...
2.253731
201
from typing import ClassVar, List, Optional from ...constants import ApiKey, ErrorCode from ..base import ResponseData
[ 6738, 19720, 1330, 5016, 19852, 11, 7343, 11, 32233, 198, 198, 6738, 2644, 9979, 1187, 1330, 5949, 72, 9218, 11, 13047, 10669, 198, 6738, 11485, 8692, 1330, 18261, 6601, 628, 628 ]
3.967742
31
import mock import pytest import py_zipkin.storage
[ 11748, 15290, 198, 11748, 12972, 9288, 198, 198, 11748, 12972, 62, 13344, 5116, 13, 35350, 628, 628, 628, 628, 628, 628, 628 ]
2.954545
22
from pywps import Process, LiteralInput, ComplexInput, ComplexOutput from pywps import Format import logging LOGGER = logging.getLogger('PYWPS') import matplotlib # no X11 server ... must be run first # https://github.com/matplotlib/matplotlib/issues/3466/ matplotlib.use('Agg') import matplotlib.pylab as plt import cartopy.crs as ccrs from netCDF4 import Dataset AIR_DS = 'https://www.esrl.noaa.gov/psd/thredds/fileServer/Datasets/ncep.reanalysis.derived/surface/air.mon.ltm.nc'
[ 198, 6738, 12972, 86, 862, 1330, 10854, 11, 25659, 1691, 20560, 11, 19157, 20560, 11, 19157, 26410, 198, 6738, 12972, 86, 862, 1330, 18980, 198, 198, 11748, 18931, 198, 25294, 30373, 796, 18931, 13, 1136, 11187, 1362, 10786, 47, 56, 54,...
2.757062
177
from .composed import List from .composed import IntList
[ 6738, 764, 5589, 1335, 1330, 7343, 198, 6738, 764, 5589, 1335, 1330, 2558, 8053 ]
4
14
# -*- coding: utf-8 -*- import json import os.path import random import re from flask import Flask, send_from_directory from flask import request, abort from flaskrun.flaskrun import flask_run import datab.social_database as db app = Flask(__name__) # Regular expression to only accept certain files fileChecker = re.compile(r"(.*\.js|.*\.html|.*\.png|.*\.css|.*\.map)$") numberOfAnswers = 4 random.seed(7) if __name__ == '__main__': flask_run(app)
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 198, 11748, 33918, 198, 11748, 28686, 13, 6978, 198, 11748, 4738, 198, 11748, 302, 198, 198, 6738, 42903, 1330, 46947, 11, 3758, 62, 6738, 62, 34945, 198, 6738, 42903, 1...
2.80117
171
""" Copyright (c) 2020 Aiven Ltd See LICENSE for details """ from astacus.common import magic, utils from astacus.common.ipc import SnapshotFile, SnapshotHash, SnapshotState from astacus.common.progress import increase_worth_reporting, Progress from pathlib import Path from typing import Optional import base64 import hashlib import logging import os import threading logger = logging.getLogger(__name__) _hash = hashlib.blake2s
[ 37811, 198, 198, 15269, 357, 66, 8, 12131, 317, 1469, 12052, 198, 6214, 38559, 24290, 329, 3307, 198, 198, 37811, 198, 198, 6738, 6468, 48628, 13, 11321, 1330, 5536, 11, 3384, 4487, 198, 6738, 6468, 48628, 13, 11321, 13, 541, 66, 1330...
3.421875
128
# Copyright 2018 Esteve Fernandez # Licensed under the Apache License, Version 2.0 from distutils import dir_util import glob import os from pathlib import Path import shutil from colcon_core.environment import create_environment_scripts from colcon_core.logging import colcon_logger from colcon_core.plugin_system import satisfies_version from colcon_core.shell import create_environment_hook from colcon_core.shell import get_command_environment from colcon_core.task import run from colcon_core.task import TaskExtensionPoint from colcon_gradle.task.gradle import get_wrapper_executable from colcon_gradle.task.gradle import GRADLE_EXECUTABLE from colcon_gradle.task.gradle import has_wrapper_executable logger = colcon_logger.getChild(__name__)
[ 2, 15069, 2864, 412, 4169, 303, 39692, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 198, 198, 6738, 1233, 26791, 1330, 26672, 62, 22602, 198, 11748, 15095, 198, 11748, 28686, 198, 6738, 3108, 8019, 1330, 10644, 198, ...
3.474654
217
# Copyright 2021 Sony Semiconductors Israel, Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import tensorflow as tf from model_compression_toolkit.common.constants import MIN_THRESHOLD, THRESHOLD def ste_ceil(x: tf.Tensor) -> tf.Tensor: """ Return the ceil values of a tensor. """ error = tf.stop_gradient(tf.math.ceil(x) - x) return error + x def ste_round(x: tf.Tensor) -> tf.Tensor: """ Return the rounded values of a tensor. """ error = tf.stop_gradient(tf.math.round(x) - x) return error + x def log2(x: tf.Tensor) -> tf.Tensor: """ Compute log2 of a tensor. """ return tf.math.log(x) / tf.math.log(2.0) def power_of_two_max(max_tensor: tf.Tensor) -> tf.Tensor: """ Compute the power of two threshold for a tensor. """ return tf.math.pow(2.0, ste_ceil(log2(tf.maximum(max_tensor, MIN_THRESHOLD)))) def calculate_delta(max_tensor: tf.Tensor, num_bits: int, signed: bool) -> tf.Tensor: """ Compute the step size for the quantization. """ return max_tensor / (2 ** (num_bits - int(signed))) def adjustable_steps(x: tf.Variable, t: float) -> tf.Tensor: """ A function to gradually quantize a float variable to an integer of values [-1, 0 ,1] Args: x: input float variable t: temperature to control quantization Returns: semi-quantized variable """ return tf.sigmoid(tf.add(x, 1) / t) + tf.sigmoid(tf.add(x, -1) / t) - 1 def ste_clip(x: [tf.Tensor, tf.Variable], max_val=1, min_val=None) -> tf.Tensor: """ clip a variable between fixed values such that min_val<=output<=max_val Args: x: input variable max_val: maximum value for clipping min_val: minimum value for clipping (defaults to -max_val) Returns: clipped variable """ min_val = -max_val if min_val is None else min_val return tf.stop_gradient(tf.math.minimum(tf.math.maximum(x, min_val), max_val) - x) + x def symmetric_quantizer(input_tensor: tf.Tensor, max_tensor: tf.Tensor, num_bits: int, signed: bool, power_of_two: bool) -> tf.Tensor: """ Quantize a tensor symmetrically. Args: input_tensor: Tensor to quantize. max_tensor: Tensor with max values to compute the threshold. num_bits: Num of bits to use. signed: Signedness of the quantization range. power_of_two: Whether the threshold should be constrained or not. Returns: A quantized tensor. """ if power_of_two: max_tensor = power_of_two_max(max_tensor) delta = calculate_delta(max_tensor, num_bits, signed) tensor_q = ste_round(input_tensor / delta) min_int = -int(signed) * (2 ** (num_bits - int(signed))) max_int = (2 ** (num_bits - int(signed))) - 1 return delta * tf.math.minimum(tf.math.maximum(tensor_q, min_int), max_int) def symmetric_constrained_quantizer(input_tensor: tf.Tensor, auxvar_tensor: tf.Variable, max_tensor: tf.Tensor, num_bits: int, signed: bool, power_of_two: bool, max_lsbs_change: int = 1) -> tf.Tensor: """ Quantize a tensor symmetrically with maximum LSBs shift. Args: input_tensor: Tensor to quantize. values of this tensor are not changed during gptq. auxvar_tensor: Tensor that manifests the bit shift the weight due to gptq max_tensor: Tensor with max values to compute the threshold. num_bits: Num of bits to use. signed: Signedness of the quantization range. power_of_two: Whether the threshold should be constrained or not. max_lsbs_change: maximum number of LSBs that the auxvar is allowed to change Returns: A quantized tensor. """ if power_of_two: max_tensor = power_of_two_max(max_tensor) delta = calculate_delta(max_tensor, num_bits, signed) tensor_q = ste_round(tf.stop_gradient(tf.round(input_tensor / delta)) + ste_clip(auxvar_tensor, max_val=max_lsbs_change)) min_int = -int(signed) * (2 ** (num_bits - int(signed))) max_int = (2 ** (num_bits - int(signed))) - 1 return delta * ste_clip(tensor_q, max_val=max_int, min_val=min_int)
[ 2, 15069, 33448, 10184, 311, 5314, 12920, 669, 2692, 11, 3457, 13, 1439, 2489, 10395, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393, ...
2.357407
2,160
# -*- coding: utf-8 -*- """ pygments.lexers.tnt ~~~~~~~~~~~~~~~~~~~ Lexer for Typographic Number Theory. :copyright: Copyright 2019-2020 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re from pygments.lexer import Lexer from pygments.token import Text, Comment, Operator, Keyword, Name, Number, \ Punctuation, Error __all__ = ['TNTLexer']
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 37811, 198, 220, 220, 220, 12972, 11726, 13, 2588, 364, 13, 83, 429, 198, 220, 220, 220, 220, 27156, 4907, 93, 628, 220, 220, 220, 17210, 263, 329, 17134, 6826, 7913, ...
2.819444
144
from django.urls import path from contacts.views import ( ContactsListView, CreateContactView, ContactDetailView, UpdateContactView, RemoveContactView, GetContactsView, AddCommentView, UpdateCommentView, DeleteCommentView, AddAttachmentsView, DeleteAttachmentsView) app_name = 'contacts' urlpatterns = [ path('list/', ContactsListView.as_view(), name='list'), path('create/', CreateContactView.as_view(), name='add_contact'), path('<int:pk>/view/', ContactDetailView.as_view(), name="view_contact"), path('<int:pk>/edit/', UpdateContactView.as_view(), name="edit_contact"), path('<int:pk>/delete/', RemoveContactView.as_view(), name="remove_contact"), path('get/list/', GetContactsView.as_view(), name="get_contacts"), path('comment/add/', AddCommentView.as_view(), name="add_comment"), path('comment/edit/', UpdateCommentView.as_view(), name="edit_comment"), path('comment/remove/', DeleteCommentView.as_view(), name="remove_comment"), path('attachment/add/', AddAttachmentsView.as_view(), name="add_attachment"), path('attachment/remove/', DeleteAttachmentsView.as_view(), name="remove_attachment"), ]
[ 6738, 42625, 14208, 13, 6371, 82, 1330, 3108, 198, 6738, 13961, 13, 33571, 1330, 357, 198, 220, 220, 220, 2345, 8656, 8053, 7680, 11, 13610, 17829, 7680, 11, 14039, 11242, 603, 7680, 11, 198, 220, 220, 220, 10133, 17829, 7680, 11, 172...
2.698031
457
import windows import ctypes import socket import struct from windows import winproxy import windows.generated_def as gdef from windows.com import interfaces as cominterfaces from windows.generated_def.winstructs import * from windows.generated_def.windef import * class TCP6Connection(MIB_TCP6ROW_OWNER_PID): """A TCP6 socket (connected or listening)""" def close(self): raise NotImplementedError("Closing IPV6 connection non implemented") def __repr__(self): if not self.established: return "<TCP IPV6 Listening socket on {0}:{1}>".format(self.local_addr, self.local_port) return "<TCP IPV6 Connection {0}:{1} -> {2}:{3}>".format(self.local_addr, self.local_port, self.remote_addr, self.remote_port) def get_MIB_TCPTABLE_OWNER_PID_from_buffer(buffer): x = windows.generated_def.winstructs.MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) nb_entry = x.dwNumEntries return _GENERATED_MIB_TCPTABLE_OWNER_PID.from_buffer(buffer) def get_MIB_TCP6TABLE_OWNER_PID_from_buffer(buffer): x = windows.generated_def.winstructs.MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) nb_entry = x.dwNumEntries # Struct _MIB_TCP6TABLE_OWNER_PID definitions return _GENERATED_MIB_TCP6TABLE_OWNER_PID.from_buffer(buffer) class Firewall(cominterfaces.INetFwPolicy2): """The windows firewall""" def enabled_for_profile_type(self, profile_type): enabled = gdef.VARIANT_BOOL() self.get_FirewallEnabled(profile_type, enabled) return enabled.value class FirewallRule(cominterfaces.INetFwRule): """A rule of the firewall""" def __repr__(self): return u'<{0} "{1}">'.format(type(self).__name__, self.name).encode("ascii", errors='backslashreplace') class Network(object): NetFwPolicy2 = windows.com.IID.from_string("E2B3C97F-6AE1-41AC-817A-F6F92166D7DD") ipv4 = property(lambda self: self._get_tcp_ipv4_sockets()) """List of TCP IPv4 socket (connection and listening) :type: [:class:`TCP4Connection`]""" ipv6 = property(lambda self: self._get_tcp_ipv6_sockets()) """List of TCP IPv6 socket (connection and listening) :type: [:class:`TCP6Connection`] """
[ 11748, 9168, 198, 11748, 269, 19199, 198, 11748, 17802, 198, 11748, 2878, 198, 198, 6738, 9168, 1330, 1592, 36436, 198, 11748, 9168, 13, 27568, 62, 4299, 355, 308, 4299, 198, 6738, 9168, 13, 785, 1330, 20314, 355, 401, 3849, 32186, 198,...
2.510832
877
#!/usr/bin/env python3 # Coded by Massimiliano Tomassoli, 2012. # # - Thanks to b49P23TIvg for suggesting that I should use a set operation # instead of repeated membership tests. # - Thanks to Ian Kelly for pointing out that # - "minArgs = None" is better than "minArgs = -1", # - "if args" is better than "if len(args)", and # - I should use "isdisjoint". # def genCur(func, unique = True, minArgs = None): """ Generates a 'curried' version of a function. """ return g if __name__ == "__main__": # Simple Function. # NOTE: '<====' means "this line prints to the screen". # Example 1. f = cur(func) # f is a "curried" version of func c1 = f(1) c2 = c1(2, d = 4) # Note that c is still unbound c3 = c2(3)(f = 6)(e = 5) # now c = 3 c3() # () forces the evaluation <==== # it prints "1 2 3 4 5 6 100" c4 = c2(30)(f = 60)(e = 50) # now c = 30 c4() # () forces the evaluation <==== # it prints "1 2 30 4 50 60 100" print("\n------\n") # Example 2. f = curr(func) # f is a "curried" version of func # curr = cur with possibly repeated # keyword args c1 = f(1, 2)(3, 4) c2 = c1(e = 5)(f = 6)(e = 10)() # ops... we repeated 'e' because we <==== # changed our mind about it! # again, () forces the evaluation # it prints "1 2 3 4 10 6 100" print("\n------\n") # Example 3. f = cur(func, 6) # forces the evaluation after 6 arguments c1 = f(1, 2, 3) # num args = 3 c2 = c1(4, f = 6) # num args = 5 c3 = c2(5) # num args = 6 ==> evalution <==== # it prints "1 2 3 4 5 6 100" c4 = c2(5, g = -1) # num args = 7 ==> evaluation <==== # we can specify more than 6 arguments, but # 6 are enough to force the evaluation # it prints "1 2 3 4 5 6 -1" print("\n------\n") # Example 4. printTree(func) print("\n------\n") stress(cur(f2), 100)
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 198, 2, 327, 9043, 416, 5674, 26641, 10115, 4186, 562, 11106, 11, 2321, 13, 198, 2, 198, 2, 532, 6930, 284, 275, 2920, 47, 1954, 25621, 45119, 329, 9524, 326, 314, 815, 779, 25...
1.795875
1,406
greeting = """ --------------- BEGIN SESSION --------------- You have connected to a chat server. Welcome! :: About Chat is a small piece of server software written by Evan Pratten to allow people to talk to eachother from any computer as long as it has an internet connection. (Even an arduino!). Check out the project at: https://github.com/Ewpratten/chat :: Disclaimer While chatting, keep in mind that, if there is a rule or regulation about privacy, this server does not follow it. All data is sent to and from this server over a raw TCP socket and data is temporarily stored in plaintext while the server handles message broadcasting Now that's out of the way so, happy chatting! --------------------------------------------- """
[ 70, 2871, 278, 796, 37227, 198, 24305, 347, 43312, 311, 47621, 220, 24305, 198, 1639, 423, 5884, 284, 257, 8537, 4382, 13, 19134, 0, 198, 198, 3712, 7994, 198, 30820, 318, 257, 1402, 3704, 286, 4382, 3788, 220, 198, 15266, 416, 21523,...
4.145251
179
import cv2 import itertools, os, time import numpy as np from Model import get_Model from parameter import letters import argparse from keras import backend as K K.set_learning_phase(0) Region = {"A": " ", "B": " ", "C": " ", "D": " ", "E": " ", "F": " ", "G": " ", "H": " ", "I": " ", "J": " ", "K": " ", "L": " ", "M": " ", "N": " ", "O": " ", "P": " "} Hangul = {"dk": "", "dj": "", "dh": "", "dn": "", "qk": "", "qj": "", "qh": "", "qn": "", "ek": "", "ej": "", "eh": "", "en": "", "rk": "", "rj": "", "rh": "", "rn": "", "wk": "", "wj": "", "wh": "", "wn": "", "ak": "", "aj": "", "ah": "", "an": "", "sk": "", "sj": "", "sh": "", "sn": "", "fk": "", "fj": "", "fh": "", "fn": "", "tk": "", "tj": "", "th": "", "tn": "", "gj": ""} parser = argparse.ArgumentParser() parser.add_argument("-w", "--weight", help="weight file directory", type=str, default="models/weights.best.hdf5") parser.add_argument("-t", "--test_img", help="Test image directory", type=str, default="./DB/test/") args = parser.parse_args() # Get CRNN model model = get_Model(training=False) try: model.load_weights(args.weight) print("...Previous weight data...") except: raise Exception("No weight file!") test_dir =args.test_img test_imgs = os.listdir(args.test_img) total = 0 acc = 0 letter_total = 0 letter_acc = 0 start = time.time() for test_img in test_imgs: img = cv2.imread(test_dir + test_img, cv2.IMREAD_GRAYSCALE) img_pred = img.astype(np.float32) img_pred = cv2.resize(img_pred, (128, 64)) img_pred = (img_pred / 255.0) * 2.0 - 1.0 img_pred = img_pred.T img_pred = np.expand_dims(img_pred, axis=-1) img_pred = np.expand_dims(img_pred, axis=0) net_out_value = model.predict(img_pred) pred_texts = decode_label(net_out_value) for i in range(min(len(pred_texts), len(test_img[0:-4]))): if pred_texts[i] == test_img[i]: letter_acc += 1 letter_total += max(len(pred_texts), len(test_img[0:-4])) predOk = "True" if pred_texts == test_img[0:-4]: acc += 1 else: predOk = "False" total += 1 # print('Predicted: %s / True: %s / net_out_value: %s / ' % (label_to_hangul(pred_texts), label_to_hangul(test_img[0:-4]))) print('Predicted: %s / True: %s / predOk: %s ' % (pred_texts, test_img[0:-4], predOk )) # cv2.rectangle(img, (0,0), (150, 30), (0,0,0), -1) # cv2.putText(img, pred_texts, (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255,255,255),2) #cv2.imshow("q", img) #if cv2.waitKey(0) == 27: # break #cv2.destroyAllWindows() end = time.time() total_time = (end - start) print("Time : ",total_time / total) print("ACC : ", acc / total) print("letter ACC : ", letter_acc / letter_total)
[ 11748, 269, 85, 17, 198, 11748, 340, 861, 10141, 11, 28686, 11, 640, 198, 11748, 299, 32152, 355, 45941, 198, 6738, 9104, 1330, 651, 62, 17633, 198, 6738, 11507, 1330, 7475, 198, 11748, 1822, 29572, 198, 6738, 41927, 292, 1330, 30203, ...
2.12406
1,330
# Copyright (c) Facebook, Inc. and its affiliates. from typing import List, Optional, cast # Skipping analyzing 'numpy': found module but no type hints or library stubs import numpy as np # type: ignore import numpy.ma as ma # type: ignore # Skipping analyzing 'pandas': found module but no type hints or library stubs import pandas as pd # type: ignore import pyarrow as pa # type: ignore import torcharrow.dtypes as dt from torcharrow import Scope def from_arrow_table( table, dtype: Optional[dt.DType] = None, columns: Optional[List[str]] = None, scope=None, device="", ): """ " Convert arrow table to a torcharrow dataframe. """ scope = scope or Scope.default device = device or scope.device assert isinstance(table, pa.Table) if dtype is not None: assert dt.is_struct(dtype) dtype = cast(dt.Struct, dtype) res = {} for f in dtype.fields: chunked_array = table.column(f.name) pydata = chunked_array.to_pylist() res[f.name] = scope.Column(pydata, f.dtype) return scope.DataFrame(res, device=device) else: res = {} table = table.select(columns) if columns is not None else table for n in table.column_names: chunked_array = table.column(n) pydata = chunked_array.to_pylist() res[n] = scope.Column( pydata, dtype=_arrowtype_to_dtype( table.schema.field(n).type, table.column(n).null_count > 0 ), ) return scope.DataFrame(res, device=device) def from_pandas_dataframe( df, dtype: Optional[dt.DType] = None, columns: Optional[List[str]] = None, scope=None, device="", ): """ Convert pandas dataframe to torcharrow dataframe (drops indices). Parameters ---------- df : Pandas dataframe dtype : dtype, default None Data type to force, if None will automatically infer. columns : array-like List of column names to extract from df. scope : Scope or None Scope to use, or None for default scope. device : str or "" Device to use, or default if blank. Examples -------- >>> import pandas as pd >>> import torcharrow as ta >>> pdf = pd.DataFrame({'a': [0, 1, 2, 3],'b': [0.1, 0.2, None, 0.3]}) >>> gdf = ta.from_pandas_dataframe(pdf) >>> gdf index a b ------- --- --- 0 0 0.1 1 1 0.2 2 2 3 3 0.3 dtype: Struct([Field('a', int64), Field('b', Float64(nullable=True))]), count: 4, null_count: 0 """ scope = scope or Scope.default device = device or scope.device if dtype is not None: assert dt.is_struct(dtype) dtype = cast(dt.Struct, dtype) res = {} for f in dtype.fields: # this shows that Column shoud also construct Dataframes! res[f.name] = from_pandas_series( pd.Series(df[f.name]), f.dtype, scope=scope ) return scope.Frame(res, dtype=dtype, device=device) else: res = {} for n in df.columns: if columns is None or n in columns: res[n] = from_pandas_series(pd.Series(df[n]), scope=scope) return scope.Frame(res, device=device) def from_arrow_array(array, dtype=None, scope=None, device=""): """ " Convert arrow array to a torcharrow column. """ scope = scope or Scope.default device = device or scope.device assert isinstance(array, pa.Array) pydata = _arrow_scalar_to_py(array) if dtype is not None: assert not dt.is_struct(dtype) return scope.Column(pydata, dtype, device=device) else: return scope.Column( pydata, dtype=_arrowtype_to_dtype(array.type, array.null_count > 0), device=device, ) def from_pandas_series(series, dtype=None, scope=None, device=""): """ " Convert pandas series array to a torcharrow column (drops indices). """ scope = scope or Scope.default device = device or scope.device return from_numpy(series.to_numpy(), dtype, scope, device) def from_numpy(array, dtype, scope=None, device=""): """ Convert 1dim numpy array to a torcharrow column (zero copy). """ scope = scope or Scope.default device = device or scope.device if isinstance(array, ma.core.MaskedArray) and array.ndim == 1: return _from_numpy_ma(array.data, array.mask, dtype, scope, device) elif isinstance(array, np.ndarray) and array.ndim == 1: return _from_numpy_nd(array, dtype, scope, device) else: raise TypeError(f"cannot convert numpy array of type {array.dtype}") # def _column_without_nan(series, dtype): # if dtype is None or is_floating(dtype): # for i in series: # if isinstance(i, float) and np.isnan(i): # yield None # else: # yield i # else: # for i in series: # yield i
[ 2, 15069, 357, 66, 8, 3203, 11, 3457, 13, 290, 663, 29116, 13, 198, 6738, 19720, 1330, 7343, 11, 32233, 11, 3350, 198, 198, 2, 3661, 4501, 22712, 705, 77, 32152, 10354, 1043, 8265, 475, 645, 2099, 20269, 393, 5888, 17071, 82, 198, ...
2.282005
2,234
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Evaluates a TFGAN trained compression model.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl import app import tensorflow as tf from research.gan.image_compression import data_provider from research.gan.image_compression import networks from research.gan.image_compression import summaries FLAGS = tf.app.flags.FLAGS flags = tf.app.flags flags.DEFINE_string('master', '', 'Name of the TensorFlow master to use.') flags.DEFINE_string('checkpoint_dir', '/tmp/compression/', 'Directory where the model was written to.') flags.DEFINE_string('eval_dir', '/tmp/compression/', 'Directory where the results are saved to.') flags.DEFINE_integer('max_number_of_evaluations', None, 'Number of times to run evaluation. If `None`, run ' 'forever.') flags.DEFINE_string('dataset_dir', 'testdata', 'Location of data.') # Compression-specific flags. flags.DEFINE_integer('batch_size', 32, 'The number of images in each batch.') flags.DEFINE_integer('patch_size', 32, 'The size of the patches to train on.') flags.DEFINE_integer('bits_per_patch', 1230, 'The number of bits to produce per patch.') flags.DEFINE_integer('model_depth', 64, 'Number of filters for compression model') if __name__ == '__main__': app.run(_)
[ 2, 15069, 2177, 383, 309, 22854, 37535, 46665, 13, 1439, 6923, 33876, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393, 2845, 287, 11846, ...
3.093842
682
#!/usr/local/bin/python import os import mysql.connector as mysql metrics_mysql_password = os.environ['METRICS_MYSQL_PWD'] sql_host = os.environ['SQL_HOST'] metrics = os.environ['QUERY_ON'] def dump_query_results(): """ This is a simple SQL table dump of a given query so we can supply users with custom tables. Note that the SQL query itself and column headers portion need to be changed if you want to change the query/results. Otherwise it is good to go. It can be called simply with the bin shell script. Read the README at the top level for an example. """ #connect to mysql db_connection = mysql.connect( host = sql_host,#"mysql1", #"localhost", user = "metrics", #"root", passwd = metrics_mysql_password, database = "metrics" #"datacamp" ) cursor = db_connection.cursor() query = "use "+metrics cursor.execute(query) #CHANGE QUERY HERE query = "select username, display_name, email, orcid, kb_internal_user, institution, country, signup_date, last_signin_date from user_info order by signup_date" #CHANGE COLUMN HEADERS HERE TO MATCH QUERY HEADERS print("username\tdisplay_name\temail\torcid\tkb_internal_user\tinstitution\tcountry\tsignup_date\tlast_signin_date") cursor.execute(query) row_values = list() for (row_values) in cursor: temp_string = "" for i in range(len(row_values) - 1): if row_values[i] is not None: temp_string += str(row_values[i]) temp_string += "\t" if row_values[-1] is not None: temp_string += str(row_values[-1]) print(temp_string) return 1 dump_query_results()
[ 2, 48443, 14629, 14, 12001, 14, 8800, 14, 29412, 198, 198, 11748, 28686, 198, 11748, 48761, 13, 8443, 273, 355, 48761, 220, 220, 220, 220, 198, 198, 4164, 10466, 62, 28744, 13976, 62, 28712, 796, 28686, 13, 268, 2268, 17816, 47123, 49...
2.528804
677
from desktop_local_tests.local_packet_capture_test_case_with_disrupter import LocalPacketCaptureTestCaseWithDisrupter from desktop_local_tests.windows.windows_dns_force_public_dns_servers_disrupter import WindowsDNSForcePublicDNSServersDisrupter
[ 6738, 11364, 62, 12001, 62, 41989, 13, 12001, 62, 8002, 316, 62, 27144, 495, 62, 9288, 62, 7442, 62, 4480, 62, 6381, 12618, 353, 1330, 10714, 47, 8317, 49630, 14402, 20448, 3152, 7279, 12618, 353, 198, 6738, 11364, 62, 12001, 62, 4198...
3.194805
77
''' Asynchronous data loader ======================== This is the Asynchronous Loader. You can use it to load an image and use it, even if data are not yet available. You must specify a default loading image for using a such loader:: from kivy import * image = Loader.image('mysprite.png') You can also load image from url:: image = Loader.image('http://mysite.com/test.png') If you want to change the default loading image, you can do:: Loader.loading_image = Image('another_loading.png') Tweaking the asynchronous loader -------------------------------- .. versionadded:: 1.6.0 You can now tweak the loader to have a better user experience or more performance, depending of the images you're gonna to load. Take a look at the parameters: - :data:`Loader.num_workers` - define the number of threads to start for loading images - :data:`Loader.max_upload_per_frame` - define the maximum image uploads in GPU to do per frames. ''' __all__ = ('Loader', 'LoaderBase', 'ProxyImage') from kivy import kivy_data_dir from kivy.logger import Logger from kivy.clock import Clock from kivy.cache import Cache from kivy.core.image import ImageLoader, Image from kivy.compat import PY2 from collections import deque from time import sleep from os.path import join from os import write, close, unlink, environ import threading # Register a cache for loader Cache.register('kv.loader', limit=500, timeout=60) # # Loader implementation # if 'KIVY_DOC' in environ: Loader = None else: # # Try to use pygame as our first choice for loader # from kivy.compat import queue from threading import Thread Loader = LoaderThreadPool() Logger.info('Loader: using a thread pool of {} workers'.format( Loader.num_workers))
[ 7061, 6, 198, 1722, 31301, 1366, 40213, 198, 4770, 2559, 198, 198, 1212, 318, 262, 1081, 31301, 8778, 263, 13, 921, 460, 779, 340, 284, 3440, 281, 2939, 198, 392, 779, 340, 11, 772, 611, 1366, 389, 407, 1865, 1695, 13, 921, 1276, ...
3.232305
551
# 13. Join # it allows to print list a bit better friends = ['Pythobit','boy','Pythoman'] print(f'My friends are {friends}.') # Output - My friends are ['Pythobit', 'boy', 'Pythoman']. # So, the Output needs to be a bit clearer. friends = ['Pythobit','boy','Pythoman'] friend = ', '.join(friends) print(f'My friends are {friend}') # Output - My friends are Pythobit, boy, Pythoman # Here (, ) comma n space is used as separator, but you can use anything.
[ 2, 1511, 13, 15251, 198, 2, 340, 3578, 284, 3601, 1351, 257, 1643, 1365, 198, 198, 36154, 796, 37250, 47, 5272, 672, 270, 41707, 7081, 41707, 47, 5272, 5185, 20520, 198, 4798, 7, 69, 6, 3666, 2460, 389, 1391, 36154, 92, 2637, 8, 2...
2.91875
160
# settings file for builds. # if you want to have custom builds, copy this file to "localbuildsettings.py" and make changes there. # possible fields: # resourceBaseUrl - optional - the URL base for external resources (all resources embedded in standard IITC) # distUrlBase - optional - the base URL to use for update checks # buildMobile - optional - if set, mobile builds are built with 'ant'. requires the Android SDK and appropriate mobile/local.properties file configured # preBuild - optional - an array of strings to run as commands, via os.system, before building the scripts # postBuild - optional - an array of string to run as commands, via os.system, after all builds are complete buildSettings = { # local: use this build if you're not modifying external resources # no external resources allowed - they're not needed any more 'randomizax': { 'resourceUrlBase': None, 'distUrlBase': 'https://randomizax.github.io/polygon-label', }, # local8000: if you need to modify external resources, this build will load them from # the web server at http://0.0.0.0:8000/dist # (This shouldn't be required any more - all resources are embedded. but, it remains just in case some new feature # needs external resources) 'local8000': { 'resourceUrlBase': 'http://0.0.0.0:8000/dist', 'distUrlBase': None, }, # mobile: default entry that also builds the mobile .apk # you will need to have the android-sdk installed, and the file mobile/local.properties created as required 'mobile': { 'resourceUrlBase': None, 'distUrlBase': None, 'buildMobile': 'debug', }, # if you want to publish your own fork of the project, and host it on your own web site # create a localbuildsettings.py file containing something similar to this # note: Firefox+Greasemonkey require the distUrlBase to be "https" - they won't check for updates on regular "http" URLs #'example': { # 'resourceBaseUrl': 'http://www.example.com/iitc/dist', # 'distUrlBase': 'https://secure.example.com/iitc/dist', #}, } # defaultBuild - the name of the default build to use if none is specified on the build.py command line # (in here as an example - it only works in localbuildsettings.py) #defaultBuild = 'local'
[ 2, 6460, 2393, 329, 12188, 13, 198, 198, 2, 611, 345, 765, 284, 423, 2183, 12188, 11, 4866, 428, 2393, 284, 366, 12001, 11249, 33692, 13, 9078, 1, 290, 787, 2458, 612, 13, 198, 198, 2, 1744, 7032, 25, 198, 2, 8271, 14881, 28165, ...
3.277465
710
from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from builtins import int from future import standard_library standard_library.install_aliases() import os import os.path import stat import urllib.parse import paramiko import traceback import osaka.utils """ A backend used to handle stfp using parimiko @author starchmd """
[ 6738, 11593, 37443, 834, 1330, 3601, 62, 8818, 198, 6738, 11593, 37443, 834, 1330, 28000, 1098, 62, 17201, 874, 198, 6738, 11593, 37443, 834, 1330, 7297, 198, 6738, 11593, 37443, 834, 1330, 4112, 62, 11748, 628, 198, 6738, 3170, 1040, 1...
3.706897
116
import os os.environ["PYGAME_HIDE_SUPPORT_PROMPT"] = "hide" import pygame RENDER_RATIO = 2
[ 11748, 28686, 198, 198, 418, 13, 268, 2268, 14692, 47, 56, 47109, 62, 39, 14114, 62, 40331, 15490, 62, 4805, 2662, 11571, 8973, 796, 366, 24717, 1, 198, 11748, 12972, 6057, 198, 198, 49, 10619, 1137, 62, 49, 1404, 9399, 796, 362, 62...
2.186047
43
import random from internal_representation_analysis.network import ActorCriticFFNetwork from internal_representation_analysis.scene_loader import THORDiscreteEnvironment as Environment from internal_representation_analysis.constants import MINI_BATCH_SIZE
[ 11748, 4738, 198, 198, 6738, 5387, 62, 15603, 341, 62, 20930, 13, 27349, 1330, 27274, 18559, 291, 5777, 26245, 198, 6738, 5387, 62, 15603, 341, 62, 20930, 13, 29734, 62, 29356, 1330, 2320, 12532, 2304, 8374, 31441, 355, 9344, 198, 198, ...
4.316667
60
import numpy as np from models import dist_model as dm from data import data_loader as dl import argparse from IPython import embed parser = argparse.ArgumentParser() parser.add_argument("--dataset_mode", type=str, default="2afc", help="[2afc,jnd]") parser.add_argument( "--datasets", type=str, nargs="+", default=[ "val/traditional", "val/cnn", "val/superres", "val/deblur", "val/color", "val/frameinterp", ], help="datasets to test - for jnd mode: [val/traditional],[val/cnn]; for 2afc mode: [train/traditional],[train/cnn],[train/mix],[val/traditional],[val/cnn],[val/color],[val/deblur],[val/frameinterp],[val/superres]", ) parser.add_argument( "--model", type=str, default="net-lin", help="distance model type [net-lin] for linearly calibrated net, [net] for off-the-shelf network, [l2] for euclidean distance, [ssim] for Structured Similarity Image Metric", ) parser.add_argument( "--net", type=str, default="alex", help="[squeeze], [alex], or [vgg] for network architectures", ) parser.add_argument( "--colorspace", type=str, default="Lab", help="[Lab] or [RGB] for colorspace to use for l2, ssim model types", ) parser.add_argument( "--batch_size", type=int, default=50, help="batch size to test image patches in" ) parser.add_argument("--use_gpu", action="store_true", help="turn on flag to use GPU") parser.add_argument( "--model_path", type=str, default=None, help="location of model, will default to ./weights/v[version]/[net_name].pth", ) parser.add_argument( "--from_scratch", action="store_true", help="model was initialized from scratch" ) parser.add_argument( "--train_trunk", action="store_true", help="model trunk was trained/tuned" ) parser.add_argument( "--version", type=str, default="0.1", help="v0.1 is latest, v0.0 was original release", ) opt = parser.parse_args() if opt.model in ["l2", "ssim"]: opt.batch_size = 1 # initialize model model = dm.DistModel() # model.initialize(model=opt.model,net=opt.net,colorspace=opt.colorspace,model_path=opt.model_path,use_gpu=opt.use_gpu) model.initialize( model=opt.model, net=opt.net, colorspace=opt.colorspace, model_path=opt.model_path, use_gpu=opt.use_gpu, pnet_rand=opt.from_scratch, pnet_tune=opt.train_trunk, version=opt.version, ) if opt.model in ["net-lin", "net"]: print("Testing model [%s]-[%s]" % (opt.model, opt.net)) elif opt.model in ["l2", "ssim"]: print("Testing model [%s]-[%s]" % (opt.model, opt.colorspace)) # embed() # initialize data loader for dataset in opt.datasets: data_loader = dl.CreateDataLoader( dataset, dataset_mode=opt.dataset_mode, batch_size=opt.batch_size ) # evaluate model on data if opt.dataset_mode == "2afc": (score, results_verbose) = dm.score_2afc_dataset(data_loader, model.forward) elif opt.dataset_mode == "jnd": (score, results_verbose) = dm.score_jnd_dataset(data_loader, model.forward) # print results print(" Dataset [%s]: %.2f" % (dataset, 100.0 * score))
[ 11748, 299, 32152, 355, 45941, 198, 6738, 4981, 1330, 1233, 62, 19849, 355, 288, 76, 198, 6738, 1366, 1330, 1366, 62, 29356, 355, 288, 75, 198, 11748, 1822, 29572, 198, 6738, 6101, 7535, 1330, 11525, 198, 198, 48610, 796, 1822, 29572, ...
2.445221
1,287