content stringlengths 1 1.05M | input_ids listlengths 1 883k | ratio_char_token float64 1 22.9 | token_count int64 1 883k |
|---|---|---|---|
from datetime import datetime
from dateutil.relativedelta import relativedelta
| [
6738,
4818,
8079,
1330,
4818,
8079,
198,
198,
6738,
3128,
22602,
13,
2411,
265,
1572,
12514,
1330,
48993,
1572,
12514,
628
] | 3.857143 | 21 |
import argparse
import json
from os import openpty
if __name__ == "__main__":
args = get_parser()
main(args) | [
11748,
1822,
29572,
198,
11748,
33918,
198,
6738,
28686,
1330,
1280,
5835,
198,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
198,
220,
220,
220,
26498,
796,
651,
62,
48610,
3419,
198,
220,
220,
220,
1388,
7,
22046,
8
... | 2.785714 | 42 |
from django import template
from django.conf import settings
from ..models import Bookmark
register = template.Library()
RESKIN_MENU_APP_ORDER = settings.RESKIN_MENU_APP_ORDER
RESKIN_MENU_MODEL_ORDER = settings.RESKIN_MENU_MODEL_ORDER
RESKIN_APP_ICON = settings.RESKIN_APP_ICON
| [
6738,
42625,
14208,
1330,
11055,
198,
6738,
42625,
14208,
13,
10414,
1330,
6460,
198,
198,
6738,
11485,
27530,
1330,
4897,
4102,
198,
198,
30238,
796,
11055,
13,
23377,
3419,
198,
198,
19535,
42,
1268,
62,
49275,
52,
62,
24805,
62,
1253... | 2.669811 | 106 |
import os
import io
import urllib.request
import zipfile
import pandas
import fda
| [
11748,
28686,
198,
11748,
33245,
198,
11748,
2956,
297,
571,
13,
25927,
198,
11748,
19974,
7753,
198,
198,
11748,
19798,
292,
198,
198,
11748,
277,
6814,
198,
220,
220,
220,
220,
198
] | 2.78125 | 32 |
import numpy as np
import matplotlib.pyplot as plt
import re as r
import easyocr
#import os
#os.environ['KMP_DUPLICATE_LIB_OK']='True'
re = easyocr.Reader(['en'])
#pl = []
chk = []
a = ''
a1 = ''
#pl = []
#sym = ['{', ']', '[', '}']
| [
11748,
299,
32152,
355,
45941,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
302,
355,
374,
198,
11748,
2562,
1696,
198,
2,
11748,
28686,
198,
2,
418,
13,
268,
2268,
17816,
42,
7378,
62,
35,
52,
31484,
61... | 2.235849 | 106 |
# SPDX-FileCopyrightText: 2021 Carnegie Mellon University
#
# SPDX-License-Identifier: Apache-2.0
import logging
import cv2
from busedge_protocol import busedge_pb2
from gabriel_protocol import gabriel_pb2
from sign_filter import SignFilter
logger = logging.getLogger(__name__)
import argparse
import multiprocessing
import time
import rospy
from cv_bridge import CvBridge
from sensor_msgs.msg import CompressedImage, Image, NavSatFix
from std_msgs.msg import UInt8MultiArray
DEFAULT_SOURCE_NAME = "sign_filter3"
CUR_GPS = NavSatFix()
if __name__ == "__main__":
# run_node('camera3')
parser = argparse.ArgumentParser()
parser.add_argument(
"-n",
"--source-name",
nargs="+",
default=[DEFAULT_SOURCE_NAME],
help="Set source name for this pipeline",
)
args = parser.parse_args()
for source in args.source_name:
multiprocessing.Process(target=run_node, args=(source,)).start()
| [
2,
30628,
55,
12,
8979,
15269,
8206,
25,
33448,
33976,
49808,
2059,
198,
2,
198,
2,
30628,
55,
12,
34156,
12,
33234,
7483,
25,
24843,
12,
17,
13,
15,
198,
198,
11748,
18931,
198,
198,
11748,
269,
85,
17,
198,
6738,
1323,
14907,
62... | 2.675978 | 358 |
import click
from pathlib import Path
# Local imports
from .__init__ import *
from .utils import parse_time, create_dir, write_file, get_profiles, compress, INVALID_PROFILE, INVALID_DATES
from .lambda_log_collector import LambdaLogCollector
| [
11748,
3904,
198,
6738,
3108,
8019,
1330,
10644,
198,
198,
2,
10714,
17944,
198,
6738,
764,
834,
15003,
834,
1330,
1635,
198,
6738,
764,
26791,
1330,
21136,
62,
2435,
11,
2251,
62,
15908,
11,
3551,
62,
7753,
11,
651,
62,
5577,
2915,
... | 3.283784 | 74 |
import sys
sys.path.append('../../../optimus')
from optimus.server import app | [
11748,
25064,
198,
17597,
13,
6978,
13,
33295,
10786,
40720,
40720,
40720,
8738,
20704,
11537,
198,
198,
6738,
6436,
385,
13,
15388,
1330,
598
] | 3.25 | 24 |
"""Common MDPs in RL literature."""
from gym.envs.registration import register
from .baird_star import BairdStar
from .boyan_chain import BoyanChain
from .double_chain import DoubleChainProblem
from .grid_world import EasyGridWorld
from .random_mdp import RandomMDP
from .single_chain import SingleChainProblem
from .two_state import TwoStateProblem
register(id="BairdStar-v0", entry_point="rllib.environment.mdps.baird_star:BairdStar")
register(
id="BoyanChain-v0", entry_point="rllib.environment.mdps.boyan_chain:BoyanChain"
)
register(
id="DoubleChainProblem-v0",
entry_point="rllib.environment.mdps.double_chain:DoubleChainProblem",
)
register(
id="EasyGridWorld-v0", entry_point="rllib.environment.mdps.grid_world:EasyGridWorld"
)
register(id="RandomMDP-v0", entry_point="rllib.environment.mdps.random_mdp:RandomMDP")
register(
id="SingleChainProblem-v0",
entry_point="rllib.environment.mdps.single_chain:SingleChainProblem",
)
register(
id="TwoStateProblem-v0",
entry_point="rllib.environment.mdps.two_state:TwoStateProblem",
)
| [
37811,
17227,
337,
6322,
82,
287,
45715,
9285,
526,
15931,
198,
6738,
11550,
13,
268,
14259,
13,
2301,
33397,
1330,
7881,
198,
198,
6738,
764,
65,
41620,
62,
7364,
1330,
48503,
8248,
198,
6738,
764,
7081,
272,
62,
7983,
1330,
6387,
27... | 2.870968 | 372 |
import glob
import pickle
from shutil import copy
from tqdm import tqdm
#copy_specific_training_data_to_new_folder('F:/Project_Cars_Data/Raw',
#'F:/Project_Cars_Data/Watkins Glen International - Short Circuit',
# 'Watkins Glen International', 'Short Circuit')
# b'Watkins Glen International'
# b'Short Circuit'
# b'Watkins Glen International'
# b'Grand Prix' | [
11748,
15095,
198,
11748,
2298,
293,
198,
6738,
4423,
346,
1330,
4866,
198,
6738,
256,
80,
36020,
1330,
256,
80,
36020,
628,
198,
198,
2,
30073,
62,
11423,
62,
34409,
62,
7890,
62,
1462,
62,
3605,
62,
43551,
10786,
37,
14079,
16775,
... | 3.008264 | 121 |
from faults.faultmodel import FaultModel
from utils import *
| [
6738,
31025,
13,
69,
1721,
19849,
1330,
40050,
17633,
198,
6738,
3384,
4487,
1330,
1635,
628
] | 3.875 | 16 |
import pytest
from traitlets import TraitError
from ipygany import PolyMesh, Warp
from .utils import get_test_assets
| [
11748,
12972,
9288,
198,
198,
6738,
1291,
2578,
912,
1330,
4759,
270,
12331,
198,
198,
6738,
20966,
88,
1030,
88,
1330,
12280,
37031,
11,
31382,
198,
198,
6738,
764,
26791,
1330,
651,
62,
9288,
62,
19668,
628,
198
] | 3.210526 | 38 |
"""
The template of the script for playing the game in the ml mode
"""
| [
37811,
198,
464,
11055,
286,
262,
4226,
329,
2712,
262,
983,
287,
262,
25962,
4235,
198,
37811,
198
] | 3.944444 | 18 |
#!/usr/bin/env python3
# coding:utf-8
if __name__ == "__main__":
# data = [1, 3, 3, 3, 3, 4, 5]
# k = 2
# k = 3
# k = 4
# k = 6
data = [1, 2, 3, 3, 3, 3]
k = 3
s = Solution()
ans = s.GetNumberOfK(data, k)
print(ans)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
19617,
25,
40477,
12,
23,
628,
198,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
198,
220,
220,
220,
1303,
1366,
796,
685,
16,
11,
513,
11,
513,
11,
513,
... | 1.825175 | 143 |
# Copyright 2022 Canonical Ltd.
# See LICENSE file for licensing details.
from typing import Union
from ops.model import Application, Relation, Unit
| [
2,
15069,
33160,
19507,
605,
12052,
13,
198,
2,
4091,
38559,
24290,
2393,
329,
15665,
3307,
13,
198,
6738,
19720,
1330,
4479,
198,
198,
6738,
39628,
13,
19849,
1330,
15678,
11,
4718,
341,
11,
11801,
628,
628,
628,
628,
628,
628,
628,
... | 3.813953 | 43 |
import torch
import torch.nn as nn
from torchvision import models
from torch.autograd import Variable
from torch.nn.parameter import Parameter
from DeepImageDenoiser import LR_THRESHOLD, DIMENSION, LEARNING_RATE
from NeuralModels import SpectralNorm
ITERATION_LIMIT = int(1e6)
SQUEEZENET_CONFIG = {'dnn' : models.squeezenet1_1(pretrained=True).features, 'features' : [2, 5, 8, 13]}
VGG_16_CONFIG = {'dnn' : models.vgg16(pretrained=True).features, 'features' : [4, 9, 16, 23]}
VGG_16_BN_CONFIG = {'dnn' : models.vgg16_bn(pretrained=True).features, 'features' : [6, 13, 23, 33] }
VGG_19_CONFIG = {'dnn' : models.vgg19(pretrained=True).features, 'features' : [ 4, 9, 18, 36] }
VGG_19_BN_CONFIG = {'dnn': models.vgg19_bn(pretrained=True).features, 'features' : [6, 13, 23, 52]}
| [
11748,
28034,
198,
11748,
28034,
13,
20471,
355,
299,
77,
198,
6738,
28034,
10178,
1330,
4981,
198,
6738,
28034,
13,
2306,
519,
6335,
1330,
35748,
198,
6738,
28034,
13,
20471,
13,
17143,
2357,
1330,
25139,
2357,
198,
198,
6738,
10766,
5... | 2.466049 | 324 |
from flask import render_template,request,redirect,url_for
from .import main
from ..request import get_sources,get_articles
from ..models import News_article,News_source | [
6738,
42903,
1330,
8543,
62,
28243,
11,
25927,
11,
445,
1060,
11,
6371,
62,
1640,
198,
6738,
764,
11748,
1388,
198,
6738,
11485,
25927,
1330,
651,
62,
82,
2203,
11,
1136,
62,
26845,
198,
6738,
11485,
27530,
1330,
3000,
62,
20205,
11,
... | 3.755556 | 45 |
from models import Mongua
| [
6738,
4981,
1330,
18428,
6413,
628
] | 4.5 | 6 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
File: Party.py
Author: Scott Yang(Scott)
Email: yangyingfa@skybility.com
Copyright: Copyright (c) 2021, Skybility Software Co.,Ltd. All rights reserved.
Description:
"""
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
37811,
198,
8979,
25,
3615,
13,
9078,
198,
13838,
25,
4746,
10998,
7,
19040,
8,
198,
15333,
25,
331,
648,
1112,
... | 2.775 | 80 |
#!/usr/bin/env python
import os
import shutil
import sqlite3
import unittest
import init_db
'''name of database to use as master'''
master_name = 'projects.db'
def setUpModule():
'''create and fill the database'''
conn = sqlite3.connect(master_name)
init_db.execute_file(conn, 'create_db.sql')
init_db.execute_file(conn, 'fill_db.sql')
def tearDownModule():
'''remove database file once testing is done'''
os.remove(master_name)
if __name__ == '__main__':
unittest.main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
11748,
28686,
198,
11748,
4423,
346,
198,
11748,
44161,
578,
18,
198,
11748,
555,
715,
395,
198,
198,
11748,
2315,
62,
9945,
628,
198,
7061,
6,
3672,
286,
6831,
284,
779,
355,
4... | 2.680628 | 191 |
import json
from django.http.response import JsonResponse
from django.db.models import Q
from django.contrib.auth import authenticate
from rest_framework import viewsets, mixins
from rest_framework.permissions import IsAuthenticated
from rest_framework.exceptions import ValidationError, AuthenticationFailed
from rest_framework.decorators import action
from jsonschema.exceptions import ValidationError as JsonValidationError
from ids.models import Id
from ids.serializers.id.create import IdCreateSerializer
from ids.serializers.id.list import IdListSerializer
from ids.actions import create_verifiable_presentation
from ids.utils import verify_json_id
from lib.json_ids.validate import validate_json_id
from lib.drf.pagination import DefaultPageNumberPagination
| [
11748,
33918,
198,
198,
6738,
42625,
14208,
13,
4023,
13,
26209,
1330,
449,
1559,
31077,
198,
6738,
42625,
14208,
13,
9945,
13,
27530,
1330,
1195,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
18439,
1330,
8323,
5344,
198,
6738,
1334,
62,... | 3.710145 | 207 |
wordinsquare('SURAJ W', 3)
| [
628,
198,
198,
4775,
1040,
421,
533,
10786,
12564,
3861,
41,
370,
3256,
513,
8,
198
] | 1.9375 | 16 |
import os
| [
11748,
28686,
201,
198,
201,
198,
201,
198
] | 1.875 | 8 |
from datetime import datetime
import logging
import json
import csv
from io import StringIO
import pymongo
from bson.objectid import ObjectId
from . import PUBLICATION_TYPES, PROJECTS, SITES
| [
6738,
4818,
8079,
1330,
4818,
8079,
198,
11748,
18931,
198,
11748,
33918,
198,
11748,
269,
21370,
198,
6738,
33245,
1330,
10903,
9399,
198,
198,
11748,
279,
4948,
25162,
198,
6738,
275,
1559,
13,
15252,
312,
1330,
9515,
7390,
198,
198,
... | 3.271186 | 59 |
#!/usr/bin/env python3
# Copyright (c) 2015-2021 Agalmic Ventures LLC (www.agalmicventures.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import argparse
import json
import sys
def main():
"""
Runs the main JTL program.
:return: int
"""
#Parse arguments
parser = argparse.ArgumentParser(description='JSON Transformation Language')
parser.add_argument('-i', '--indent', default=4, type=int, help='Indentation amount.')
parser.add_argument('-t', '--transform-file', help='The name of the JSON file containing the transformation to run.')
parser.add_argument('transform', nargs='?', help='The transformation to run.')
arguments = parser.parse_args(sys.argv[1:])
#Load the transformation
if arguments.transform is None and arguments.transform_file is not None:
#From a file
with open(arguments.transform_file, 'r') as f:
transformStr = f.read()
elif arguments.transform is not None and arguments.transform_file is None:
#From the command line
transformStr = arguments.transform
else:
print('ERROR: Specify either a transform file or a transform')
return 1
transformData = json.loads(transformStr)
#Read the JSON in from stdin
#TODO: error handling
data = json.loads(sys.stdin.read())
#Transform the JSON
#TODO: cleaner way to do this
sys.path.append('.')
import Interpreter
result = Interpreter.transformJson(data, transformData)
#Output the result
print(json.dumps(result, indent=arguments.indent, sort_keys=True))
return 0
if __name__ == '__main__':
sys.exit(main())
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
2,
15069,
357,
66,
8,
1853,
12,
1238,
2481,
2449,
282,
9383,
41673,
11419,
357,
2503,
13,
363,
282,
9383,
10065,
13,
785,
8,
198,
2,
198,
2,
2448,
3411,
318,
29376,
7520,
... | 3.378667 | 750 |
import chainer
import pytest
| [
11748,
6333,
263,
198,
11748,
12972,
9288,
628,
198
] | 3.444444 | 9 |
#!/usr/bin/python
import BoostBuild
t = BoostBuild.Tester()
t.write("test.jam","""
actions unbuilt { } unbuilt all ;
ECHO "Hi" ;
""")
t.run_build_system("-ftest.jam", stdout="Hi\n")
t.pass_test()
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
198,
11748,
19835,
15580,
198,
198,
83,
796,
19835,
15580,
13,
51,
7834,
3419,
198,
198,
83,
13,
13564,
7203,
9288,
13,
39159,
2430,
15931,
198,
4658,
555,
18780,
1391,
1782,
555,
18780,
477,... | 2.380952 | 84 |
a, b = MyClass(), MyClass2()
c, (d, e) = a.func1, (a.func2, a.func3)
c()
d()
e()
| [
198,
64,
11,
275,
796,
2011,
9487,
22784,
2011,
9487,
17,
3419,
198,
198,
66,
11,
357,
67,
11,
304,
8,
796,
257,
13,
20786,
16,
11,
357,
64,
13,
20786,
17,
11,
257,
13,
20786,
18,
8,
198,
198,
66,
3419,
198,
67,
3419,
198,
6... | 1.714286 | 49 |
from .language import Language
from .user_tweet_raw import UserTweetRaw
| [
6738,
764,
16129,
1330,
15417,
198,
6738,
764,
7220,
62,
83,
7277,
62,
1831,
1330,
11787,
47845,
27369,
198
] | 3.789474 | 19 |
import numpy as np
from math import ceil,floor
| [
11748,
299,
32152,
355,
45941,
198,
6738,
10688,
1330,
2906,
346,
11,
28300,
198,
220,
220,
220,
220,
220,
220,
220,
220,
198
] | 2.434783 | 23 |
"""
Encodes SPOT MILP as the structure of a CART tree in order to apply CART's pruning method
Also supports traverse() which traverses the tree
"""
import numpy as np
from mtp_SPO2CART import MTP_SPO2CART
from decision_problem_solver import*
from scipy.spatial import distance
| [
37811,
198,
27195,
4147,
6226,
2394,
31515,
47,
355,
262,
4645,
286,
257,
327,
7227,
5509,
287,
1502,
284,
4174,
327,
7227,
338,
778,
46493,
2446,
198,
7583,
6971,
38138,
3419,
543,
33038,
274,
262,
5509,
198,
37811,
198,
11748,
299,
... | 3.297619 | 84 |
# This package uses tk to create a simple graphical
# output representing the iDrive state
import tkinter as tk
import numpy as np
# why not use the numpy native? but whatever
# this class intializes the canvas and all geometrical
# objets drawn onto it. The method setState simply
# adjusts the color of the respective objects
| [
2,
770,
5301,
3544,
256,
74,
284,
2251,
257,
2829,
27831,
201,
198,
2,
220,
220,
5072,
10200,
262,
1312,
24825,
1181,
201,
198,
201,
198,
11748,
256,
74,
3849,
355,
256,
74,
201,
198,
11748,
299,
32152,
355,
45941,
201,
198,
201,
... | 3.342857 | 105 |
import numpy as np
from scipy import stats
import utils
def fit(xdata, ydata):
"""Calculate 2D regression.
Args:
xdata (numpy.ndarray): 1D array of independent data [ntim],
where ntim is the number of time points (or other independent
points).
ydata (numpy.ndarray): 2D array of dependent data [ntim, nspat],
where nspat is the number of spatial points (or other dependent
points).
Returns:
numpy.ndarray of dimension [5, nspat]. The 5 outputs are: slope,
intercept, Pearson's correlation coefficient, two-sided p-value for
a hypothesis test with null hypothesis that the slope is zero,
standard error for the slope estimate.
"""
# Small number to prevent divide-by-zero errors
TINY = 1.0e-20
# Dimensions
ntim = xdata.shape[0]
nspat = ydata.shape[1]
# Add a constant (1) to the xdata to allow for intercept calculation
xdata_plus_const = utils.add_constant(xdata)
# Calculate parameters of the regression by solving the OLS problem
# in its matrix form
mat1 = np.swapaxes(np.dot(xdata_plus_const.T,
(xdata_plus_const[np.newaxis, :, :])), 0, 1)
mat2 = np.dot(xdata_plus_const.T, ydata)
beta = np.linalg.solve(mat1, mat2.T)
output = beta.T
# Pearson correlation coefficient
xm, ym = xdata-xdata.mean(0), ydata-ydata.mean(0)
r_num = np.dot(xm, ym)
r_den = np.sqrt(stats.ss(xm) * stats.ss(ym))
pearson_r = r_num / r_den
# Two-sided p-value for a hypothesis test whose null hypothesis is that
# the slope is zero.
df = ntim - 2
tval = pearson_r * np.sqrt(df / ((1.0 - pearson_r + TINY) *
(1.0 + pearson_r + TINY)))
pval = stats.distributions.t.sf(np.abs(tval), df)*2
# Standard error of the slope estimate
sst = np.sum(ym ** 2, 0)
ssr = (output[0, :] ** 2) * np.sum(xm ** 2)
se = np.sqrt((1. / df) * (sst - ssr))
stderr = se / np.sqrt(np.sum(xm ** 2))
return np.vstack([output, pearson_r, pval, stderr])
| [
11748,
299,
32152,
355,
45941,
198,
6738,
629,
541,
88,
1330,
9756,
198,
198,
11748,
3384,
4487,
628,
198,
4299,
4197,
7,
87,
7890,
11,
331,
7890,
2599,
198,
220,
220,
220,
37227,
9771,
3129,
378,
362,
35,
20683,
13,
628,
220,
220,
... | 2.263102 | 935 |
from event import Event
| [
6738,
1785,
1330,
8558,
628
] | 5 | 5 |
import socket
import threading
import json
PORT = 5000
SERVER = socket.gethostbyname(socket.gethostname())
ADDRESS = ('', PORT)
FORMAT = 'utf-8'
clients, names = [], []
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(ADDRESS)
StartChat()
| [
11748,
17802,
198,
11748,
4704,
278,
198,
11748,
33918,
198,
198,
15490,
796,
23336,
198,
35009,
5959,
796,
17802,
13,
1136,
4774,
1525,
3672,
7,
44971,
13,
1136,
4774,
3672,
28955,
198,
198,
2885,
7707,
7597,
796,
19203,
3256,
350,
986... | 2.676471 | 102 |
"""Modules related specifically to the handling of synchronous requests from a supplier system."""
| [
37811,
5841,
5028,
3519,
5734,
284,
262,
9041,
286,
18305,
516,
7007,
422,
257,
22693,
1080,
526,
15931,
198
] | 5.210526 | 19 |
import gi
import time
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk,GObject,Gdk,Pango,GLib
from wta_module import *
# Generated By WiredGTK for Python: by Rocky Nuarin, 2018 Phils
# #####################www.WireThemAll.com#####################
class Handler(usercontrol):
#WiredEvent def usercontrolevent(self,value) #add more events
#WiredProperty 'usercontrolproperty': 'sample only'
def __init__(self,*param):
initUI(self,param,w=400,h=400,title="WiredGTKV1.0",controlbox=True,startpos=(200,200),timeoutdestroy=-1)
self.GTKForms()
super().__init__(self.usercontrol)
self.sch=Scheduler(500)#500 ms
self.sch.Start()
self._text=''
self._usercontrolproperty=''
@property
def usercontrolproperty(self):
return self._usercontrolproperty
@usercontrolproperty.setter
def usercontrolproperty(self,value):
self._usercontrolproperty=value
def connect(self,ev,evusr):
self.wiredevents.update({ev:evusr})
def activeXcreated(self,*args):
pass
def unload(self,*args):
destroy=True
if destroy==True:
GLib.source_remove(self.timeout_id)
self._window.hide()
del self._window
#ExitApplication() #activate this if u want to destroy this window
return False
else:
self.window.Visible=False
return True
def loop(self, user_data):
if self.form_load==False:
self.form_load=True
if self.sch.Event():#timer routine
#code here
if self.timeoutdestroy!=-1:
self.timeoutdestroy-=1
if self.timeoutdestroy==0:
self.unload(None)
self.sch.Start()#restart scheduler
return True #return true so that main_loop can call it again
def create(self,prop,control,parent,event=[]):
createWidget(self,prop,control,parent,event)
def GTKForms(self):
self.create("{'BackColor': '(0, 0.45338815965065005, 0.401859108611177, 0.5)', 'Text': 'ctlServo1', 'Left': '135', 'Width': '30', 'ParentsType': '', 'Pin': '18', 'Name': 'ctlServo1', 'Tag': 'Activex', 'Top': '100', 'MinDutyCycle': '540', 'ForeColor': '(0,0,0,1)', 'Angle': '90', 'Events': '[]', 'Picture': '', 'Height': '30', 'Enable': 'True', 'Visible': 'True', 'Font': '', 'MaxDutyCycle': '2400'}","Servo","usercontrol","[]")
self.create("{'BackColor': '(0, 0.9944924427369468, 0.012752023212419639, 0.5)', 'Text': 'VScrollBarWTA1', 'Value': '90', 'Left': '50', 'Width': '20', 'ParentsType': '', 'Name': 'VScrollBarWTA1', 'Tag': 'Activex', 'Top': '40', 'ForeColor': '(0,0,0,1)', 'Events': '[change-value]', 'Picture': 'VScrollBarWTA.png', 'Height': '220', 'Enable': 'True', 'Visible': 'True', 'Font': '', 'Max': '180'}","VScrollBarWTA","usercontrol","[['change-value', 'self,value']]")
self.create("{'BackColor': '(1,1,1,1)', 'Text': 'Entry1', 'Left': '85', 'Width': '170', 'ParentsType': '', 'Alignment': '', 'Name': 'Entry1', 'Tag': '', 'Top': '195', 'ForeColor': '(0,0,0,1)', 'Events': '[]', 'Picture': '', 'Height': '30', 'Enable': 'True', 'Visible': 'True', 'Font': ''}","Entry","usercontrol","[]")
def Widget(self):
if self._usercontrol in self._mainlayout.get_children():
self._mainlayout.remove(self._usercontrol)
return self._usercontrol
def Hide(self):
self._window.hide()
def Show(self,modal=False,x=None,y=None):
if x!=None:
self._window.move(x,y)
if modal and self.caller!=None:
self._window.set_transient_for(self.caller._window)
self._window.set_modal(modal)
self._window.show()
Gtk.main()
return ""#put ur return value here upon closing this form
def VScrollBarWTA1_change_value(self,value):
self.Entry1.Text=int(float(value))
self.ctlServo1.write(self.Entry1.Text)
pass
if __name__ == "__main__":
_m = Handler()
_m._window.show()
Gtk.main() | [
11748,
308,
72,
201,
11748,
640,
201,
12397,
13,
46115,
62,
9641,
10786,
38,
30488,
3256,
705,
18,
13,
15,
11537,
201,
6738,
308,
72,
13,
260,
1930,
37765,
1330,
402,
30488,
11,
38,
10267,
11,
38,
34388,
11,
47,
14208,
11,
8763,
5... | 2.502409 | 1,453 |
from slam_recognition.constant_convolutions.center_surround import rgby_3
from slam_recognition.util.get_dimensions import get_dimensions
import tensorflow as tf
| [
6738,
21158,
62,
26243,
653,
13,
9979,
415,
62,
1102,
10396,
3508,
13,
16159,
62,
11793,
744,
1330,
48670,
1525,
62,
18,
198,
6738,
21158,
62,
26243,
653,
13,
22602,
13,
1136,
62,
27740,
5736,
1330,
651,
62,
27740,
5736,
198,
11748,
... | 3.395833 | 48 |
"""
Adapted from https://github.com/huggingface/transformers/blob/master/examples/run_generation.py
"""
import re
import torch
import logging
from typing import List
from collections import defaultdict
from transformers import GPT2Tokenizer, XLNetTokenizer, TransfoXLTokenizer, OpenAIGPTTokenizer
from transformers import GPT2LMHeadModel, XLNetLMHeadModel, TransfoXLLMHeadModel, OpenAIGPTLMHeadModel
logging.basicConfig(
format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt = '%m/%d/%Y %H:%M:%S', level = logging.INFO)
logger = logging.getLogger(__name__)
# Padding text to help Transformer-XL and XLNet with short prompts as proposed by Aman Rusia
# in https://github.com/rusiaaman/XLNet-gen#methodology
# and https://medium.com/@amanrusia/xlnet-speaks-comparison-to-gpt-2-ea1a4e9ba39e
PADDING_TEXT = """ In 1991, the remains of Russian Tsar Nicholas II and his family
(except for Alexei and Maria) are discovered.
The voice of Nicholas's young son, Tsarevich Alexei Nikolaevich, narrates the
remainder of the story. 1883 Western Siberia,
a young Grigori Rasputin is asked by his father and a group of men to perform magic.
Rasputin has a vision and denounces one of the men as a horse thief. Although his
father initially slaps him for making such an accusation, Rasputin watches as the
man is chased outside and beaten. Twenty years later, Rasputin sees a vision of
the Virgin Mary, prompting him to become a priest. Rasputin quickly becomes famous,
with people, even a bishop, begging for his blessing. <eod> </s> <eos>"""
MODEL_CLASSES = {
'distilgpt2': (GPT2LMHeadModel, GPT2Tokenizer),
'gpt2': (GPT2LMHeadModel, GPT2Tokenizer),
'gpt2-medium': (GPT2LMHeadModel, GPT2Tokenizer),
'gpt2-large': (GPT2LMHeadModel, GPT2Tokenizer),
'gpt2-xl': (GPT2LMHeadModel, GPT2Tokenizer),
'openai-gpt': (OpenAIGPTLMHeadModel, OpenAIGPTTokenizer),
'xlnet-base-cased': (XLNetLMHeadModel, XLNetTokenizer),
'xlnet-large-cased': (XLNetLMHeadModel, XLNetTokenizer),
'transfo-xl-wt103': (TransfoXLLMHeadModel, TransfoXLTokenizer)
}
def init_model(model_name: str,
device: str):
"""
Initialize a pre-trained LM
:param model_name: from MODEL_CLASSES
:param device: CUDA / CPU device
:return: the model and tokenizer
"""
logger.info(f'Initializing {model_name}')
model_class, tokenizer_class = MODEL_CLASSES[model_name]
tokenizer = tokenizer_class.from_pretrained(model_name)
model = model_class.from_pretrained(model_name)
model.to(device)
model.eval()
return model, tokenizer
| [
37811,
198,
48003,
276,
422,
3740,
1378,
12567,
13,
785,
14,
71,
1018,
2667,
2550,
14,
35636,
364,
14,
2436,
672,
14,
9866,
14,
1069,
12629,
14,
5143,
62,
20158,
13,
9078,
198,
37811,
198,
11748,
302,
198,
11748,
28034,
198,
11748,
... | 2.775053 | 938 |
from .handlers import router as internal_router
__all__ = ["internal_router"]
| [
6738,
764,
4993,
8116,
1330,
20264,
355,
5387,
62,
472,
353,
198,
198,
834,
439,
834,
796,
14631,
32538,
62,
472,
353,
8973,
198
] | 3.291667 | 24 |
import pprint
# message
message = '''
Books and doors are the same thing books.
You open them, and you go through into another world.
'''
# split message to words into a list
words = message.split()
# define dictionary counter
count = {}
# traverse every word and accumulate
for word in words:
if not word[-1].isalpha():
word = word[:-1]
word = word.lower()
count.setdefault(word, 0)
count[word] +=1
# print
pprint.pprint(count) | [
11748,
279,
4798,
198,
198,
2,
3275,
198,
20500,
796,
705,
7061,
198,
30650,
290,
8215,
389,
262,
976,
1517,
3835,
13,
220,
198,
1639,
1280,
606,
11,
290,
345,
467,
832,
656,
1194,
995,
13,
198,
7061,
6,
198,
198,
2,
6626,
3275,
... | 3.098592 | 142 |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""todo: add classes docstring."""
from __future__ import annotations
from dataclasses import dataclass
from clouddq.classes.rule_type import RuleType
| [
2,
15069,
33448,
3012,
11419,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
198,
2,
921,
743,
733... | 3.762887 | 194 |
from dataclasses import dataclass
from typing import Iterable
from minsk.analysis.syntax.expression import ExpressionSyntax
from minsk.analysis.syntax.kind import SyntaxKind
from minsk.analysis.syntax.node import SyntaxNode
from minsk.analysis.syntax.token import SyntaxToken
| [
6738,
4818,
330,
28958,
1330,
4818,
330,
31172,
198,
6738,
19720,
1330,
40806,
540,
198,
198,
6738,
949,
8135,
13,
20930,
13,
1837,
41641,
13,
38011,
1330,
41986,
13940,
41641,
198,
6738,
949,
8135,
13,
20930,
13,
1837,
41641,
13,
11031... | 3.657895 | 76 |
#!/usr/bin/python
# -*- coding: iso-8859-1 -*-
import logging
from peachyprinter import config, PrinterAPI
import argparse
import os
import sys
import time
from Tkinter import *
from ui.main_ui import MainUI
if __name__ == "__main__":
if not os.path.exists(config.PEACHY_PATH):
os.makedirs(config.PEACHY_PATH)
parser = argparse.ArgumentParser("Configure and print with Peachy Printer")
parser.add_argument('-l', '--log', dest='loglevel', action='store', required=False, default="WARNING", help="Enter the loglevel [DEBUG|INFO|WARNING|ERROR] default: WARNING")
parser.add_argument('-c', '--console', dest='console', action='store_true', required=False, help="Logs to console not file")
parser.add_argument('-d', '--development', dest='devmode', action='store_true', required=False, help="Enable Developer Testing Mode")
args, unknown = parser.parse_known_args()
setup_logging(args)
if args.devmode:
config.devmode = True
if getattr(sys, 'frozen', False):
path = os.path.dirname(sys.executable)
else:
path = os.path.dirname(os.path.realpath(__file__))
app = PeachyPrinterTools(None, path)
app.title('Peachy Printer Tools')
app.mainloop()
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
2,
532,
9,
12,
19617,
25,
47279,
12,
3459,
3270,
12,
16,
532,
9,
12,
198,
198,
11748,
18931,
198,
6738,
613,
35586,
1050,
3849,
1330,
4566,
11,
1736,
3849,
17614,
198,
11748,
1822,
29572,
... | 2.71116 | 457 |
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from env_variables import SQL_ALCHEMY_URL
_db_url_without_db = '/'.join(SQL_ALCHEMY_URL.split('/')[:-1])
engine = create_engine(f'{_db_url_without_db}', isolation_level='AUTOCOMMIT', echo=True)
Session = sessionmaker(engine)
if __name__ == '__main__':
create_database()
| [
6738,
44161,
282,
26599,
1330,
2251,
62,
18392,
198,
6738,
44161,
282,
26599,
13,
579,
1330,
6246,
10297,
198,
198,
6738,
17365,
62,
25641,
2977,
1330,
16363,
62,
1847,
3398,
3620,
56,
62,
21886,
198,
198,
62,
9945,
62,
6371,
62,
1941... | 2.75969 | 129 |
print(threeRailEncrypt("Vineet"))
| [
198,
4798,
7,
15542,
44631,
27195,
6012,
7203,
53,
500,
316,
48774,
198
] | 2.692308 | 13 |
import torch
from random import random
from torch.nn.utils.rnn import pad_sequence
from torch.utils.data import Dataset
def collate_fn(batch):
'''
Batch-wise preprocessing and padding.
:param batch: the current batch.
:returns: padded sources, targets, alignments
stacks and corresponding real lengths.
'''
sources, targets, alignments, \
stacks, stack_lens = zip(*batch)
src_lens = [len(src) for src in sources]
tgt_lens = [len(tgt) for tgt in targets]
source_pad = pad_sequence(sources, padding_value=0)
target_pad = pad_sequence(targets, padding_value=0)
align_pad = pad_sequence(alignments, padding_value=0)
max_stack_len = max(s.size(1) for s in stacks)
max_target_len = target_pad.size(0)
# Must be send to device.
stack_pad = torch.zeros(
[len(batch),
max_target_len,
max_stack_len]
).long()
for i in range(len(batch)):
stack = stacks[i]
stack_pad[i, :stack.size(0), :stack.size(1)] = stack
# Padding value is 1, for stacks that only contain
# start-of-sequence token. Ignored during forward pass
# since it corresponds to decoder padding targets.
stack_lens = pad_sequence(stack_lens, padding_value=1)
stack_lens = stack_lens.tolist()
return (source_pad,
target_pad,
src_lens,
tgt_lens,
align_pad,
stack_pad,
stack_lens)
| [
11748,
28034,
198,
198,
6738,
4738,
1330,
4738,
198,
6738,
28034,
13,
20471,
13,
26791,
13,
81,
20471,
1330,
14841,
62,
43167,
198,
6738,
28034,
13,
26791,
13,
7890,
1330,
16092,
292,
316,
628,
198,
4299,
2927,
378,
62,
22184,
7,
4350... | 2.3281 | 637 |
from . import StocklabObject
| [
6738,
764,
1330,
10500,
23912,
10267,
198
] | 4.142857 | 7 |
"""
A collection of routines for validating, santizing and otherwise messing
with content coming in from the web to be :py:class:`tiddlers
<tiddlyweb.model.tiddler.Tidder>`, :py:class:`bags
<tiddlyweb.model.bag.Bag>` or :py:class:`recipes
<tiddlyweb.model.recipe.Recipe>`.
The validators can be extended by adding functions to the ``BAG_VALIDATORS``,
``RECIPE_VALIDATORS`` and ``TIDDLER_VALIDATORS``. The functions take an
entity object, and an optional WSGI ``environ`` dict.
"""
def sanitize_desc(entity, environ):
"""
Strip any dangerous HTML which may be present in a :py:class:`bag
<tiddlyweb.model.bag.Bag>` or :py:class:`recipe
<tiddlyweb.model.recipe.Recipe>` description.
"""
desc = entity.desc
entity.desc = sanitize_html_fragment(desc)
BAG_VALIDATORS = [
sanitize_desc,
]
TIDDLER_VALIDATORS = []
RECIPE_VALIDATORS = [
sanitize_desc,
]
def validate_tiddler(tiddler, environ=None):
"""
Pass the :py:class:`tiddler <tiddlyweb.model.tiddler.Tiddler>`
to each of the functions in ``TIDDLER_VALIDATORS``, in order,
either changing the content of the tiddler's attributes, or if
some aspect of the tiddler can not be accepted raising
:py:class:`InvalidTiddlerError`.
``TIDDLER_VALIDATORS`` is an empty list which may be extended
by plugins.
``validate_tiddler`` is called from :py:mod:`web handlers
<tiddlyweb.web.handler>`, when the ``accept`` constraint on
the :py:class:`policy <tiddlyweb.model.policy.Policy>` of the
:py:class:`bag <tiddlyweb.model.bag.Bag>` containing the
tiddler does not pass.
"""
_validate(tiddler, environ, TIDDLER_VALIDATORS)
def validate_bag(bag, environ=None):
"""
Pass the :py:class:`bag <tiddlyweb.model.bag.Bag>` to each of
the functions in ``BAG_VALIDATORS``, in order, either changing
the content of the bags's attributes, or if some aspect of the
bag can not be accepted raising :py:class:`InvalidBagError`.
``BAG_VALIDATORS`` may be extended by plugins.
``validate_bag`` is called whenever a bag is ``PUT`` via HTTP.
"""
_validate(bag, environ, BAG_VALIDATORS)
def validate_recipe(recipe, environ=None):
"""
Pass the :py:class:`recipe <tiddlyweb.model.recipe.Recipe>` to
each of the functions in ``RECIPE_VALIDATORS``, in order, either
changing the content of the recipes's attributes, or if some aspect
of the recipe can not be accepted raising :py:class:`InvalidRecipeError`.
``RECIPE_VALIDATORS`` may be extended by plugins.
``validate_recipe`` is called whenever a recipe is ``PUT`` via HTTP.
"""
_validate(recipe, environ, RECIPE_VALIDATORS)
def _validate(entity, environ, validators):
"""
Validate the provided entity against the list of functions
in validators.
"""
if environ is None:
environ = {}
for validator in validators:
validator(entity, environ)
def sanitize_html_fragment(fragment):
"""
Santize an HTML ``fragment``, returning a copy of the fragment
that has been cleaned up.
"""
if fragment:
import html5lib
from html5lib.sanitizer import HTMLSanitizer
from html5lib.serializer.htmlserializer import HTMLSerializer
parser = html5lib.HTMLParser(tokenizer=HTMLSanitizer)
parsed = parser.parseFragment(fragment)
walker = html5lib.treewalkers.getTreeWalker('etree')
stream = walker(parsed)
serializer = HTMLSerializer(quote_attr_values=True,
omit_optional_tags=False)
output = serializer.render(stream)
return output
else:
return fragment
| [
37811,
198,
32,
4947,
286,
31878,
329,
4938,
803,
11,
264,
415,
2890,
290,
4306,
37241,
198,
4480,
2695,
2406,
287,
422,
262,
3992,
284,
307,
1058,
9078,
25,
4871,
25,
63,
83,
1638,
8116,
198,
27,
83,
1638,
306,
12384,
13,
19849,
... | 2.612259 | 1,403 |
# -*- coding: utf-8 -*-
"""
Created on Tue May 11 15:31:31 2021
:copyright:
Jared Peacock (jpeacock@usgs.gov)
:license: MIT
"""
from pathlib import Path
import pandas as pd
import numpy as np
import logging
from mth5.timeseries import ChannelTS, RunTS
from mt_metadata.timeseries import Station, Run
def to_run_ts(self, fn=None, e_channels=["e1", "e2"]):
"""
Return a RunTS object from the data
:param fn: DESCRIPTION, defaults to None
:type fn: TYPE, optional
:return: DESCRIPTION
:rtype: TYPE
"""
ch_list = []
for comp in (
["bx", "by", "bz"] + e_channels + ["temperature_e", "temperature_h"]
):
if comp[0] in ["h", "b"]:
ch = ChannelTS("magnetic")
elif comp[0] in ["e"]:
ch = ChannelTS("electric")
else:
ch = ChannelTS("auxiliary")
ch.sample_rate = self.sample_rate
ch.start = self.start
ch.ts = self._df[comp].values
ch.component = comp
ch_list.append(ch)
return RunTS(
array_list=ch_list,
station_metadata=self.station_metadata,
run_metadata=self.run_metadata,
)
# =============================================================================
# define the reader
# =============================================================================
def read_lemi424(fn, e_channels=["e1", "e2"], logger_file_handler=None):
"""
Read a LEMI 424 TXT file.
:param fn: input file name
:type fn: string or Path
:param e_channels: A list of electric channels to read,
defaults to ["e1", "e2"]
:type e_channels: list of strings, optional
:return: A RunTS object with appropriate metadata
:rtype: :class:`mth5.timeseries.RunTS`
"""
txt_obj = LEMI424()
if logger_file_handler:
txt_obj.logger.addHandler(logger_file_handler)
txt_obj.read(fn)
return txt_obj.to_run_ts(e_channels=e_channels)
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
41972,
319,
30030,
1737,
1367,
1315,
25,
3132,
25,
3132,
33448,
198,
198,
25,
22163,
4766,
25,
220,
198,
220,
220,
220,
19116,
2631,
330,
735,
357,
73,
431... | 2.284761 | 899 |
import abc
import logging
import os
import pickle
from collections import Counter
from datetime import datetime
from typing import List, Union
import numpy as np
_logger = logging.getLogger(__name__)
class StatelessTransformation(Transformation, abc.ABC):
class WhitespaceTokenizer(StatelessTransformation):
class CategoricalFeature(Transformation):
UNK = "unk"
class ToList(StatelessTransformation):
class Lowercase(StatelessTransformation):
class DateTransformer(StatelessTransformation):
class Scale(StatelessTransformation):
class PositionEncoder(StatelessTransformation):
class Encoder(object):
| [
11748,
450,
66,
198,
11748,
18931,
198,
11748,
28686,
198,
11748,
2298,
293,
198,
6738,
17268,
1330,
15034,
198,
6738,
4818,
8079,
1330,
4818,
8079,
198,
6738,
19720,
1330,
7343,
11,
4479,
198,
198,
11748,
299,
32152,
355,
45941,
198,
1... | 3.654971 | 171 |
"""Initialize unittest."""
| [
37811,
24243,
1096,
555,
715,
395,
526,
15931,
198
] | 3 | 9 |
import pytest
from pytest import param as p
from anglicize import anglicize, build_mapping
| [
11748,
12972,
9288,
198,
6738,
12972,
9288,
1330,
5772,
355,
279,
198,
198,
6738,
3550,
677,
1096,
1330,
3550,
677,
1096,
11,
1382,
62,
76,
5912,
628,
198
] | 3.357143 | 28 |
import baopig as bp
import images as im
# TODO : a city defines the style, a district defines the content | [
198,
198,
11748,
26605,
404,
328,
355,
275,
79,
198,
11748,
4263,
355,
545,
628,
198,
2,
16926,
46,
1058,
257,
1748,
15738,
262,
3918,
11,
257,
4783,
15738,
262,
2695
] | 3.516129 | 31 |
from datetime import datetime
from flask import render_template, flash, redirect, url_for, request
from flask_login import login_user, logout_user, current_user, login_required
from werkzeug.urls import url_parse
from app import app, db
from app.forms import LoginForm, RegistrationForm, EditProfileForm, PostForm, \
ResetPasswordRequestForm, ResetPasswordForm, EditPostForm
from app.models import User, Post
from app.email import send_password_reset_email
| [
6738,
4818,
8079,
1330,
4818,
8079,
201,
198,
6738,
42903,
1330,
8543,
62,
28243,
11,
7644,
11,
18941,
11,
19016,
62,
1640,
11,
2581,
201,
198,
6738,
42903,
62,
38235,
1330,
17594,
62,
7220,
11,
2604,
448,
62,
7220,
11,
1459,
62,
72... | 3.067485 | 163 |
# -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
success = True
wd = webdriver.Firefox()
wait = WebDriverWait(wd, 22)
try:
wd.get("https://www.python.org/")
wd.find_element_by_css_selector("button").click()
wd.find_element_by_link_text("Success Stories").click()
wd.find_element_by_css_selector("button").click()
wd.find_element_by_link_text("About").click()
wd.find_element_by_css_selector("button").click()
wd.find_element_by_link_text("Docs").click()
wd.find_element_by_link_text("Audio/Visual Talks").click()
wd.find_element_by_css_selector("button").click()
wd.find_element_by_link_text("PyPI").click()
wd.find_element_by_link_text("Log in").click()
wd.find_element_by_id("username").click()
wd.find_element_by_id("username").clear()
wd.find_element_by_id("username").send_keys("oliver")
wd.find_element_by_id("password").click()
wd.find_element_by_id("password").clear()
wd.find_element_by_id("password").send_keys("Vbhjy_30")
wd.find_element_by_css_selector("input.button.button--primary").click()
wd.find_element_by_id("search").click()
wd.find_element_by_id("search").clear()
wd.find_element_by_id("search").send_keys("python")
wd.find_element_by_css_selector("button.search-form__button").click()
wd.find_element_by_xpath("//form[@id='classifiers']//button[.='Topic']").click()
wd.find_element_by_xpath("//form[@id='classifiers']//button[.='Topic']").click()
wd.find_element_by_id("search").click()
wd.find_element_by_id("search").clear()
wd.find_element_by_id("search").send_keys("pytest")
wd.find_element_by_css_selector("button.search-form__button").click()
wd.find_element_by_css_selector("button.horizontal-menu__link.dropdown__trigger").click()
wd.find_element_by_css_selector("button.dropdown__link").click()
print("Everything is Ok")
finally:
wd.quit()
if not success:
raise Exception("Test failed.")
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
6738,
384,
11925,
1505,
1330,
3992,
26230,
198,
6738,
384,
11925,
1505,
13,
12384,
26230,
13,
11284,
13,
9019,
1330,
5313,
32103,
21321,
198,
198,
13138,
796,
6407,
198,
... | 2.477356 | 817 |
import logging
import re
from datetime import timezone
import pendulum
from aioscheduler import TimedScheduler
from dateparser import parse
from discord.ext import commands
from discord.ext.menus import MenuPages
import db
from cogs import CustomCog, AinitMixin
from cogs.Logging import log_usage
from const import UNICODE_EMOJI
from menu import ReminderListSource, SimpleConfirm
from models import Reminder
from util import has_passed, auto_help, safe_send
logger = logging.getLogger(__name__)
| [
11748,
18931,
198,
11748,
302,
198,
6738,
4818,
8079,
1330,
640,
11340,
198,
198,
11748,
44017,
14452,
198,
6738,
257,
4267,
1740,
18173,
1330,
5045,
276,
50,
1740,
18173,
198,
6738,
3128,
48610,
1330,
21136,
198,
6738,
36446,
13,
2302,
... | 3.462069 | 145 |
import json
import base64
import os
import boto3
import zlib
# Used for decryption of the received payload
import aws_encryption_sdk
from aws_encryption_sdk import CommitmentPolicy
from aws_encryption_sdk.internal.crypto import WrappingKey
from aws_encryption_sdk.key_providers.raw import RawMasterKeyProvider
from aws_encryption_sdk.identifiers import WrappingAlgorithm, EncryptionKeyType
import processor.heartbeat_processor as heartbeat_processor
import processor.sqlevents_processor as sqlevents_processor
from processor import heartbeat_processor
from processor import sqlevents_processor
# Controls the filtering of Heartbean events
FILTER_HEARTBEAT_EVENTS = os.getenv('FILTER_HEARTBEAT_EVENTS', "false").lower() == "true"
# Setup the session | clients
REGION_NAME= os.environ['AWS_REGION']
session = boto3.session.Session()
kms = session.client('kms', region_name=REGION_NAME)
# Create the encryption client
enc_client = aws_encryption_sdk.EncryptionSDKClient(commitment_policy=CommitmentPolicy.REQUIRE_ENCRYPT_ALLOW_DECRYPT)
# Represents the Master Key Provider
# Decrypt the payload using the key and then decompress (zip to plaintext)
def decrypt_decompress(payload, key):
my_key_provider = MyRawMasterKeyProvider(key)
my_key_provider.add_master_key("DataKey")
decrypted_plaintext, header = enc_client.decrypt(
source=payload,
materials_manager=aws_encryption_sdk.materials_managers.default.DefaultCryptoMaterialsManager(master_key_provider=my_key_provider))
# print(decrypted)
return zlib.decompress(decrypted_plaintext, zlib.MAX_WBITS + 16)
# Lambda Handler function
| [
11748,
33918,
198,
11748,
2779,
2414,
198,
11748,
28686,
198,
11748,
275,
2069,
18,
198,
11748,
1976,
8019,
198,
198,
2,
16718,
329,
875,
13168,
286,
262,
2722,
21437,
198,
11748,
3253,
82,
62,
12685,
13168,
62,
21282,
74,
198,
6738,
... | 3.048417 | 537 |
from django.contrib import admin
from .models import Location
admin.site.register(Location, LocationAdmin)
| [
6738,
42625,
14208,
13,
3642,
822,
1330,
13169,
198,
198,
6738,
764,
27530,
1330,
13397,
628,
198,
198,
28482,
13,
15654,
13,
30238,
7,
14749,
11,
13397,
46787,
8,
198
] | 3.7 | 30 |
# -*- coding: utf-8 -*-
# @Date : 2022/4/13 12:07
# @Author : WangYihao
# @File : __init__.py.py
from SimCam.simcam import SimCam
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
2488,
10430,
220,
220,
220,
1058,
33160,
14,
19,
14,
1485,
1105,
25,
2998,
198,
2,
2488,
13838,
220,
1058,
15233,
56,
72,
23778,
198,
2,
2488,
8979,
220,
220,
220... | 2.090909 | 66 |
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
| [
6738,
220,
844,
27349,
62,
2118,
9078,
13,
8692,
1330,
7308,
198,
6738,
220,
844,
27349,
62,
2118,
9078,
13,
16624,
1330,
13283,
628
] | 3.375 | 24 |
# Generated by Django 4.0.2 on 2022-02-16 14:11
from django.db import migrations
| [
2,
2980,
515,
416,
37770,
604,
13,
15,
13,
17,
319,
33160,
12,
2999,
12,
1433,
1478,
25,
1157,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
628
] | 2.766667 | 30 |
import cv2
import numpy as np
from skimage import exposure as ex
from skimage import data
from PIL import Image
import skfuzzy as fuzz
import math
import timeit
import time
'''
Histogram equalization with colour YCR_CB and histogram equalization only on Y
@img: the image to modify
@return: the image with the histogram equalized
'''
'''
Histogram equalization with colour YCR_CB and histogram equalization only on Y
@img: the image to modify
@return: the image with the histogram equalized
'''
# Histrogram equalization from
# https://github.com/AndyHuang1995/Image-Contrast-Enhancement/blob/master/he.py
'''
Histogram equalization equalizing every colour
@img: the image to modify
@return: the image with the histogram equalized
'''
'''
Gamma correction
@imgage: the image to modify
@gamma: the gamma value, 1.0 does nothing
@return: the image with the gamma corrected
'''
# Adaptive gamma correction based on the reference.
# Reference:
# S. Huang, F. Cheng and Y. Chiu, "Efficient Contrast Enhancement Using Adaptive Gamma Correction With
# Weighting Distribution," in IEEE Transactions on Image Processing, vol. 22, no. 3, pp. 1032-1041,
# March 2013. doi: 10.1109/TIP.2012.2226047
# Revised from https://github.com/mss3331/AGCWD/blob/master/AGCWD.m
#from https://github.com/qyou/AGCWD/blob/master/agcwd.py
import numpy as np
import cv2
'''
Adaptive gamma correction with Weighting Distribution
@image: the image to modify
@w: the weight distribution
@return: the image with the gamma corrected
'''
# Then we sould have from https://github.com/AndyHuang1995/Image-Contrast-Enhancement/blob/master/ying.py
# from https://www.programcreek.com/python/example/89353/cv2.createCLAHE,
# CLAHE (Contrast-limited adaptive histogram equalization)
'''
Function that apply CLAHE (Contrast-limited adaptive histogram equalization)
to every channel of the image
@imgage: the image to modify
@return: the image with the histrogram corrected
'''
# RETINEX from https://github.com/dongb5/Retinex/blob/master/
'''
Function that apply MSRCP (Multi Scale Retinex
@img: the image to modify
@sigma_list: the list of the sigma, by default [15,80,250]
@return: the image with the histrogram corrected
'''
| [
11748,
269,
85,
17,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
1341,
9060,
1330,
7111,
355,
409,
198,
6738,
1341,
9060,
1330,
1366,
198,
6738,
350,
4146,
1330,
7412,
198,
11748,
1341,
69,
4715,
88,
355,
26080,
198,
11748,
10688,
1... | 3.170697 | 703 |
#!/usr/bin/env python
"""
make_known_good_cice_masks.py
Copy known good CICE masks for use in fixing the HadGEM CICE masks.
"""
import os
import numpy as np
from netCDF4 import Dataset
OUTPUT_DIR = "/gws/nopw/j04/primavera1/masks/HadGEM3Ocean_fixes/cice_masks"
def main():
"""main entry"""
rootgrp = Dataset(os.path.join(OUTPUT_DIR, "primavera_cice_orca1_uv.nc"),
"w", format="NETCDF3_CLASSIC")
print(os.path.join(OUTPUT_DIR, "primavera_cice_orca1_uv.nc"))
mask = np.zeros((330, 360))
mask[-1, 180:] += 1
_i = rootgrp.createDimension('i', 360)
_j = rootgrp.createDimension('j', 330)
mask_variable = rootgrp.createVariable('mask', 'i4', ('j', 'i'))
mask_variable.units = '1'
mask_variable[:] = mask
rootgrp.close()
print(os.path.join(OUTPUT_DIR, "primavera_cice_orca025_t.nc"))
rootgrp = Dataset(os.path.join(OUTPUT_DIR, "primavera_cice_orca025_t.nc"),
"w", format="NETCDF3_CLASSIC")
mask = np.zeros((1205, 1440))
mask[-1, 720:] += 1
_i = rootgrp.createDimension('i', 1440)
_j = rootgrp.createDimension('j', 1205)
mask_variable = rootgrp.createVariable('mask', 'i4', ('j', 'i'))
mask_variable.units = '1'
mask_variable[:] = mask
rootgrp.close()
print(os.path.join(OUTPUT_DIR, "primavera_cice_orca12_t.nc"))
rootgrp = Dataset(os.path.join(OUTPUT_DIR, "primavera_cice_orca12_t.nc"),
"w", format="NETCDF3_CLASSIC")
mask = np.zeros((3604, 4320))
mask[-1, 2160:] += 1
_i = rootgrp.createDimension('i', 4320)
_j = rootgrp.createDimension('j', 3604)
mask_variable = rootgrp.createVariable('mask', 'i4', ('j', 'i'))
mask_variable.units = '1'
mask_variable[:] = mask
rootgrp.close()
if __name__ == "__main__":
main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
37811,
198,
15883,
62,
4002,
62,
11274,
62,
66,
501,
62,
5356,
591,
13,
9078,
198,
198,
29881,
1900,
922,
327,
8476,
20680,
329,
779,
287,
18682,
262,
11161,
38,
3620,
327,
8476,
206... | 2.097222 | 864 |
from datetime import timedelta | [
6738,
4818,
8079,
1330,
28805,
12514
] | 5 | 6 |
# -*- coding: utf-8 -*-
import logging
import configparser
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
11748,
18931,
198,
11748,
4566,
48610,
628
] | 2.857143 | 21 |
import os
import subprocess
from dotenv import load_dotenv
import pymongo
from pymongo import MongoClient
from pymongo.cursor import Cursor
from pymongo.errors import DuplicateKeyError, BulkWriteError
from util.args import Args
load_dotenv()
| [
11748,
28686,
198,
11748,
850,
14681,
198,
6738,
16605,
24330,
1330,
3440,
62,
26518,
24330,
198,
198,
11748,
279,
4948,
25162,
198,
6738,
279,
4948,
25162,
1330,
42591,
11792,
198,
6738,
279,
4948,
25162,
13,
66,
21471,
1330,
327,
21471,... | 3.369863 | 73 |
from enum import Enum
from typing import Union
from iqa_common.executor import Command, Execution, ExecutorAnsible, CommandAnsible, ExecutorContainer, \
CommandContainer, Executor
from iqa_common.utils.docker_util import DockerUtil
from messaging_abstract.component import Service, ServiceStatus
import logging
| [
6738,
33829,
1330,
2039,
388,
198,
6738,
19720,
1330,
4479,
198,
198,
6738,
1312,
20402,
62,
11321,
13,
18558,
38409,
1330,
9455,
11,
37497,
11,
8393,
38409,
2025,
82,
856,
11,
9455,
2025,
82,
856,
11,
8393,
38409,
29869,
11,
3467,
19... | 3.819277 | 83 |
import numpy as np
from ..mixins import Preprocessor, AlwaysPredictPlotter, AdvantageEstimator
from warnings import warn
| [
11748,
299,
32152,
355,
45941,
198,
6738,
11485,
19816,
1040,
1330,
3771,
41341,
11,
16622,
47,
17407,
43328,
353,
11,
45318,
22362,
320,
1352,
198,
6738,
14601,
1330,
9828,
628
] | 4.066667 | 30 |
import logging
logging.warning('warning message')
logging.error('This is an error message')
logging.critical('This is a critical error message')
| [
198,
11748,
18931,
198,
198,
6404,
2667,
13,
43917,
10786,
43917,
3275,
11537,
198,
6404,
2667,
13,
18224,
10786,
1212,
318,
281,
4049,
3275,
11537,
198,
6404,
2667,
13,
34666,
10786,
1212,
318,
257,
4688,
4049,
3275,
11537,
198
] | 3.769231 | 39 |
from textblob import TextBlob
a = str(input("enter your word to check spell")
_b = TextBlob(a)
print (_b.correct())
# from textblob import Textblob
#mylst = ["firt","clor"]
#correct_list = []
#for word in mylst:
# correct_list.append(TextBlob())
#
#for word in correct_list:
# print (word.correct())
| [
6738,
2420,
2436,
672,
1330,
8255,
3629,
672,
220,
198,
64,
796,
965,
7,
15414,
7203,
9255,
534,
1573,
284,
2198,
4822,
4943,
198,
62,
65,
796,
8255,
3629,
672,
7,
64,
8,
198,
4798,
44104,
65,
13,
30283,
28955,
198,
198,
2,
422,
... | 2.643478 | 115 |
from __future__ import division
import numpy as np
import scipy as sp
import matplotlib as mpl
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from scipy.special import jn, jn_zeros
import subprocess
# Define polar and cartesian coordinates for the drum.
theta = np.r_[0:2*np.pi:50j]
radius = np.r_[0:1:50j]
x = np.array([r*np.cos(theta) for r in radius])
y = np.array([r*np.sin(theta) for r in radius])
radial_nodes = 2
zeros = 2
# Define the base plot.
fig = plt.figure(num=None,figsize=(16,16),dpi=120,facecolor='w',edgecolor='k')
ax = list()
# Loop over the desired angular nodes.
cnt = 0
pixcnt = 0
plt.ion()
for t in np.r_[0:2*np.pi:40j]:
cnt = 0
pixcnt += 1
for i in np.r_[0:radial_nodes+1:1]:
for j in np.r_[1:zeros+1:1]:
cnt += 1;
ax.append(fig.add_subplot(radial_nodes+1,zeros,cnt,projection='3d'))
z = np.array([drumhead_height(i, j, r, theta, t) for r in radius])
ax[-1].set_xlabel('R@%d,A@%d' % (i,j))
ax[-1].plot_surface(x,y,z,rstride=1,cstride=1,cmap=mpl.cm.Accent,linewidth=0,vmin=-1,vmax=1)
ax[-1].set_zlim(-1,1)
plt.savefig('./drum-modes-%d.png' % pixcnt, format='png')
# Collate pictures to an animated GIF.
import os,string
cwd = os.getcwd()
cmd = 'cd %s; ls drum-modes*.png | sort -k1.12n'%cwd
png_files = os.popen(cmd)
png_files_list = string.join(png_files.readlines()).replace('\n',' ')
os.popen('convert -delay 10 -loop 1 %s ./drum-animate.gif'%png_files_list)
| [
6738,
11593,
37443,
834,
1330,
7297,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
629,
541,
88,
355,
599,
198,
11748,
2603,
29487,
8019,
355,
285,
489,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
6738,
285,
4... | 2.06694 | 732 |
import scrapy
from bs4 import BeautifulSoup
import requests
from QB5.pipelines import dbHandle
from QB5.items import Qb5Item
| [
11748,
15881,
88,
198,
6738,
275,
82,
19,
1330,
23762,
50,
10486,
198,
11748,
7007,
198,
6738,
16135,
20,
13,
79,
541,
20655,
1330,
20613,
37508,
198,
6738,
16135,
20,
13,
23814,
1330,
1195,
65,
20,
7449,
198
] | 3.289474 | 38 |
"""
:mod: 'BookDatabaseUtility'
~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. py:module:: BookDatabaseUtility
:copyright: Copyright BitWorks LLC, All rights reserved.
:license: MIT
:synopsis: SQLAlchemy ORM engine, metadata, and utility functions for working with dynamic sqlite databases
:description: Contains the following functions:
makeEntityTable - creates table 'entities' - columns = entity_cik, parent_cik, entity_name
makeFilingsTable - creates table 'filings####' - columns = entity_cik, q1, q2, q3, q4
getAllTables - returns a list of all SQLAlchemy Table objects
tableExists - determines whether a given table name exists in the database
getEntityTreeInfo - returns list of tuples, where each tuple is a row [(entity_cik, parent_cik, entity_name)]
getNameFromCik - uses a given cik to get an entity_name from the database
updateEntityParent - updates the parent cik of a given child cik; used when user alters entity tree view hierarchy
getEntityDict - returns a dict of the format {entity_name:entity_cik}, for all entities in database
getFilingTreeInfo - returns list of strings, where each string corresponds to a filing available for viewing
selectFromDatabase - given a cik and filing period, selects a Filing object from the database
existsInDatabase - determines whether a given filing exists in the database
manualExistsInDatabase - determines whether a given filing exists in the database, with input from user
addToEntitiesTable - updates 'entities' table to include a given entity, if not present
addToFilingsTable - updates a 'filings####' table to include a given filing, if not present
addToDatabase - adds a given fact file to the database in the form of a pickled Filing object
manualAddToDatabase - adds a given fact file to the database in the form of a pickled Filing object, with input from user
countEntityAndChildren - determines the breadth and depth of an entity tree in the database, used for status bar updates
removeEntityFromDatabase - removes a given entity (and all its children) from the database; currently an expensive function
removeFilingFromDatabase - removes a given filing item (and all its children) from the database; currently also expensive
updateEntityName - updates the name of an entity to that disclosed in the latest available filing
getLastFiling - returns the latest filing for a particular entity
renameEntityInDatabase(target_cik, new_entity_name) - manual replacement of the entity name with new_entity_name in the database
"""
try:
import pickle, sys, os, datetime, logging
database_utility_logger = logging.getLogger()
from sqlalchemy import (create_engine, Table, Column, Integer, String, PickleType)
from sqlalchemy.schema import MetaData
from sqlalchemy.pool import NullPool
# Tiered
# from . import (BookFilingUtility)
# Flat
import BookFilingUtility
except Exception as err:
database_utility_logger.error("{0}:BookDatabaseUtility import error:{1}".format(str(datetime.datetime.now()), str(err)))
| [
37811,
198,
25,
4666,
25,
705,
10482,
38105,
18274,
879,
6,
198,
27156,
15116,
4907,
93,
198,
198,
492,
220,
12972,
25,
21412,
3712,
4897,
38105,
18274,
879,
198,
220,
220,
220,
1058,
22163,
4766,
25,
15069,
4722,
23044,
11419,
11,
14... | 3.396166 | 939 |
"""
Module for math and statistics related functions.
"""
| [
37811,
198,
26796,
329,
10688,
290,
7869,
3519,
5499,
13,
198,
37811,
198
] | 4.461538 | 13 |
import itertools
import numpy as np
import constants
from utils import file
if __name__ == '__main__':
train_and_evaluate(constants.DATASET_NAME_TPL.format('100_no_singles'), 100)
#train_and_evaluate(constants.DATASET_NAME_TPL.format('50_no_singles'), 50)
| [
11748,
340,
861,
10141,
198,
11748,
299,
32152,
355,
45941,
198,
198,
11748,
38491,
198,
6738,
3384,
4487,
1330,
2393,
628,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
4512,
62,
392,
62,
497... | 2.601942 | 103 |
ACCURACY :::62.86377259982597 | [
198,
15859,
4261,
43300,
1058,
3712,
5237,
13,
4521,
26514,
1495,
34808,
1495,
5607
] | 2.214286 | 14 |
import bsddb.db as bdb
import os.path
import cPickle
from base64 import b64encode, b64decode
from struct import pack
| [
198,
11748,
275,
82,
1860,
65,
13,
9945,
355,
275,
9945,
198,
11748,
28686,
13,
6978,
198,
11748,
269,
31686,
293,
198,
6738,
2779,
2414,
1330,
275,
2414,
268,
8189,
11,
275,
2414,
12501,
1098,
198,
6738,
2878,
1330,
2353,
198
] | 2.878049 | 41 |
# Copyright 2022 AI Singapore
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from peekingduck.pipeline.nodes.dabble.check_large_groups import Node
| [
2,
15069,
33160,
9552,
12551,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
198,
2,
921,
743,
733... | 3.694444 | 180 |
# =============================================================================
# Copyright (c) 2021 SeisSpark (https://github.com/kdeyev/SeisSpark).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import os
from zipfile import ZipFile
import pyspark
from pyspark.sql import SparkSession
| [
2,
38093,
25609,
198,
2,
15069,
357,
66,
8,
33448,
1001,
271,
4561,
668,
357,
5450,
1378,
12567,
13,
785,
14,
74,
2934,
88,
1990,
14,
4653,
271,
4561,
668,
737,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
... | 4.243902 | 205 |
from unittest import TestCase
from unittest import TestSuite
from unittest import main
from unittest import makeSuite
from mwstools.parsers.notifications import Notification
__all__ = [
TestNotification
]
if __name__ == '__main__':
main(defaultTest='suite')
| [
6738,
555,
715,
395,
1330,
6208,
20448,
198,
6738,
555,
715,
395,
1330,
6208,
5606,
578,
198,
6738,
555,
715,
395,
1330,
1388,
198,
6738,
555,
715,
395,
1330,
787,
5606,
578,
198,
198,
6738,
285,
86,
301,
10141,
13,
79,
945,
364,
... | 3 | 91 |
import json
from pathlib import Path
from typing import Union
import jsonschema
import yaml
DEFAULT_CONFIG_PATH = Path(__file__).parent / "settings.yaml"
config = Configuration()
config.load(DEFAULT_CONFIG_PATH)
| [
11748,
33918,
198,
6738,
3108,
8019,
1330,
10644,
198,
6738,
19720,
1330,
4479,
198,
198,
11748,
44804,
684,
2395,
2611,
198,
11748,
331,
43695,
628,
198,
198,
7206,
38865,
62,
10943,
16254,
62,
34219,
796,
10644,
7,
834,
7753,
834,
737... | 3.176471 | 68 |
from .Event import Event
| [
6738,
764,
9237,
1330,
8558,
198
] | 4.166667 | 6 |
import nltk
from nltk.chunk.regexp import ChunkString, ChunkRule, ChinkRule
from nltk.tree import Tree
from nltk.chunk import RegexpParser
from nltk.corpus import conll2000
from nltk.tag import NgramTagger
#class for Unigram Chunking
#class for Bigram Chunking
#class for Ngram Chunking
#Rule-based chunking
def regexp_chunk():
#define rules here
grammar = r"""NP: {<DT|PDT|CD|PRP\$>?<JJ>*<N.*>+}
VP: {<V.*>+<TO>?<V.*>*}
PP: {<IN>+}
"""
cp = nltk.RegexpParser(grammar)
return(cp)
#train Unigram chunker on conll2000 dataset
#train Bigram chunker on conll2000 dataset
#train Ngram chunker on conll2000 dataset
#Call best performing chunker
if __name__ == '__main__':
regexp_chunker = regexp_chunk()
unigram_chunker = ngram_chunk(1)
bigram_chunker = ngram_chunk(2)
trigram_chunker = ngram_chunk(3)
fourgram_chunker = ngram_chunk(4)
fivegram_chunker = ngram_chunk(5)
"""
phrase = "My yellow dog has been asking to eat the whole day because of hunger"
text = nltk.word_tokenize(phrase)
tags = nltk.pos_tag(text)
print(regexp_chunker.parse(tags))
print(unigram_chunker.parse(tags))
print(bigram_chunker.parse(tags))
"""
test_sents = conll2000.chunked_sents('test.txt')
print(regexp_chunker.evaluate(test_sents))
print(unigram_chunker.evaluate(test_sents))
print(bigram_chunker.evaluate(test_sents))
print(trigram_chunker.evaluate(test_sents))
print(fourgram_chunker.evaluate(test_sents))
print(fivegram_chunker.evaluate(test_sents))
"""
phrase = "play football and watch netflix"
text = nltk.word_tokenize(phrase)
tags = nltk.pos_tag(text)
chunks = split_phrases(tags)
print(chunks)
"""
"""
for chunk in chunks:
if type(chunk) is nltk.Tree:
for word,tag in chunk:
print(word)
else:
print(chunk[0])
""" | [
11748,
299,
2528,
74,
198,
6738,
299,
2528,
74,
13,
354,
2954,
13,
260,
25636,
79,
1330,
609,
2954,
10100,
11,
609,
2954,
31929,
11,
609,
676,
31929,
198,
6738,
299,
2528,
74,
13,
21048,
1330,
12200,
198,
6738,
299,
2528,
74,
13,
... | 2.377483 | 755 |
# Imports
from os.path import join, isfile
from os import remove, rmdir
from pysav import Save, does_save_exist, does_app_dir_exist, generate_environment_path
from utils.random_data import generate_dict
# Test
def test_answer():
"""Does Save work as expected"""
# Test Importing
# Create file to be imported
save = Save("python_test", "test_file")
save.data = generate_dict()
save.save_data()
# Create save to import into
save2 = Save("python_test", "test_file_two")
# Test merge mode 0 (Replace all data)
save2.import_data(save._save_file_path)
save2.save_data()
try:
assert save2.data == save.data
# Ensure that save is deleted in event of error
except AssertionError as excpt:
save.annihilate() # Delete file
save2.annihilate(True) # Delete file and folder
raise AssertionError from excpt
save.annihilate()
save2.annihilate(True)
# Test Exporting
save = Save("python_test", "test_file")
save.data = generate_dict()
save.export_data(generate_environment_path("DESKTOP"), True)
try:
assert does_save_exist("python_test", "test_file", environment="DESKTOP")
assert does_app_dir_exist("python_test", environment="DESKTOP")
remove(
join(
generate_environment_path("DESKTOP"),
save._app_name,
save._save_name + "." + save._extension,
)
)
rmdir(join(generate_environment_path("DESKTOP"), save._app_name))
assert not does_save_exist("python_test", "test_file", environment="DESKTOP")
assert not does_app_dir_exist("python_test", environment="DESKTOP")
save.export_data(generate_environment_path("DESKTOP"), False)
assert isfile(join(generate_environment_path("DESKTOP"), "test_file.json"))
assert not does_app_dir_exist("python_test", environment="DESKTOP")
remove(
join(
generate_environment_path("DESKTOP"),
save._save_name + "." + save._extension,
)
)
# Ensure that save is deleted in event of error
except AssertionError as excpt:
# Remove export if any
try:
remove(
join(
generate_environment_path("DESKTOP"),
save._app_name,
save._save_name + "." + save._extension,
)
)
except: pass
try:
rmdir(join(generate_environment_path("DESKTOP"), save._app_name))
except: pass
try:
remove(
join(
generate_environment_path("DESKTOP"),
save._save_name + "." + save._extension,
)
)
except: pass
# Delete file and folder
save.annihilate(True)
raise AssertionError from excpt
save.annihilate(True) # Delete file
| [
2,
1846,
3742,
198,
6738,
28686,
13,
6978,
1330,
4654,
11,
318,
7753,
198,
6738,
28686,
1330,
4781,
11,
374,
9132,
343,
198,
6738,
279,
893,
615,
1330,
12793,
11,
857,
62,
21928,
62,
38476,
11,
857,
62,
1324,
62,
15908,
62,
38476,
... | 2.169906 | 1,389 |
import doctest
import pytest
from insights.parsers import ansible_tower_settings, SkipException
from insights.tests import context_wrap
ANSIBLE_TOWER_CONFIG_CUSTOM = '''
AWX_CLEANUP_PATHS = False
LOGGING['handlers']['tower_warnings']['level'] = 'DEBUG'
'''.strip()
ANSIBLE_TOWER_CONFIG_CUSTOM_INVALID1 = '''
'''.strip()
ANSIBLE_TOWER_CONFIG_CUSTOM_INVALID2 = '''
AWX_CLEANUP_PATHS
'''.strip()
| [
11748,
10412,
395,
198,
11748,
12972,
9288,
198,
6738,
17218,
13,
79,
945,
364,
1330,
9093,
856,
62,
36170,
62,
33692,
11,
32214,
16922,
198,
6738,
17218,
13,
41989,
1330,
4732,
62,
37150,
628,
198,
15037,
34563,
62,
51,
36048,
62,
10... | 2.525316 | 158 |
import os
import json
from app.config import DATA_PATH
"""
_id: ID
date: eg "2020-02-06T15:24:59.942Z"
msg: eg ""
status: eg "" a emoji)
"""
if __name__ == '__main__':
DATA_PATH = '../../data'
with open('../../../../Node/data/zoneMsg.json', 'r') as f:
data = json.load(f)
data = data['data']
for i in data[::-1]:
add_zone(i)
| [
11748,
28686,
198,
11748,
33918,
198,
198,
6738,
598,
13,
11250,
1330,
42865,
62,
34219,
198,
198,
37811,
198,
62,
312,
25,
220,
220,
220,
220,
4522,
198,
4475,
25,
220,
220,
220,
220,
220,
220,
220,
29206,
366,
42334,
12,
2999,
12,... | 2.010101 | 198 |
import random
import pprint
import matplotlib.pyplot as plt
import numpy as np
from cells import *
pp = pprint.PrettyPrinter(indent=2)
random.seed(5)
def get_image_from_state(cells, time, debug=False):
"""
Generates an image from the cell states
"""
# print("time: ", time)
img = []
for rix, row in enumerate(cells):
img_row = []
for cix, col in enumerate(row):
img_row.append(col.color)
img.append(img_row)
if debug == True:
plt.imshow(np.array(img), origin='lower')
plt.show()
return img
| [
11748,
4738,
198,
11748,
279,
4798,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
4778,
1330,
1635,
198,
198,
381,
796,
279,
4798,
13,
35700,
6836,
3849,
7,
521,
298,
28,
... | 2.330677 | 251 |
#! /usr/bin/python3
import time
from events import *
q = QueueExecutor()
q.addEvent(test1, time.time() + 3, 1, 5, "foo", "bar", "baz")
q.addEvent(test1, time.time() + .5, .3, 20, "foo2", "bar")
print("Main thread asleep at %s" % (time.time(),))
time.sleep(6)
print("Main thread awake, terminating...")
q.stop() | [
2,
0,
1220,
14629,
14,
8800,
14,
29412,
18,
198,
11748,
640,
198,
6738,
2995,
1330,
1635,
198,
198,
80,
796,
4670,
518,
23002,
38409,
3419,
198,
198,
80,
13,
2860,
9237,
7,
9288,
16,
11,
640,
13,
2435,
3419,
1343,
513,
11,
352,
... | 2.496 | 125 |
from typing import Any, Callable, Dict, Iterable, Mapping, Tuple, TypeVar, Union, cast, overload
__all__ = ("extract_iterable_from_tuple", "is_iterable", "item_to_tuple", "mapping_merge")
KT = TypeVar("KT")
VT = TypeVar("VT")
T = TypeVar("T")
def item_to_tuple(item: Union[T, Iterable[T]]) -> Tuple[T, ...]: # noqa
if is_iterable(item):
return tuple(cast(Iterable[T], item))
return (cast(T, item),)
def extract_iterable_from_tuple( # noqa
tuple_to_extract: Union[Tuple[Iterable[T]], Tuple[T, ...]],
check: Callable[[Any], bool] = is_iterable,
) -> Iterable[T]:
if len(tuple_to_extract) == 1:
maybe_return = tuple_to_extract[0]
if check(maybe_return):
return cast(Iterable[T], maybe_return)
return cast(Iterable[T], tuple_to_extract)
| [
6738,
19720,
1330,
4377,
11,
4889,
540,
11,
360,
713,
11,
40806,
540,
11,
337,
5912,
11,
309,
29291,
11,
5994,
19852,
11,
4479,
11,
3350,
11,
31754,
198,
198,
834,
439,
834,
796,
5855,
2302,
974,
62,
2676,
540,
62,
6738,
62,
83,
... | 2.337176 | 347 |
import numpy as np
from spacetime.potential import Potential
| [
11748,
299,
32152,
355,
45941,
198,
6738,
34752,
8079,
13,
13059,
1843,
1330,
32480,
198
] | 4.066667 | 15 |
from typing import Tuple, List
from algosdk.v2client.algod import AlgodClient
from algosdk.future import transaction
from algosdk.logic import get_application_address
from algosdk import account, encoding
from pyteal import compileTeal, Mode, Keccak256
from tellorflex.methods import report
from utils.account import Account
from tellorflex.contracts import approval_program, clear_state_program
from utils.helpers import add_standalone_account, fund_account
from utils.util import (
waitForTransaction,
fullyCompileContract,
getAppGlobalState,
)
APPROVAL_PROGRAM = b""
CLEAR_STATE_PROGRAM = b""
if __name__ == "__main__":
s = setup()
app_id = s.deploy_tellor_flex(
query_id="hi",
query_data="hi",
)
s.stake() | [
6738,
19720,
1330,
309,
29291,
11,
7343,
198,
198,
6738,
435,
70,
418,
34388,
13,
85,
17,
16366,
13,
14016,
375,
1330,
978,
25344,
11792,
198,
6738,
435,
70,
418,
34388,
13,
37443,
1330,
8611,
198,
6738,
435,
70,
418,
34388,
13,
640... | 2.794118 | 272 |
####
#### Convert a TSV into a fully parsed JSON list blob that could be
#### used by a mustache (or other logicless) template.
####
#### Example usage to analyze the usual suspects:
#### python3 parse.py --help
####
#### Get report of current problems:
#### python3 parse-vocab-list.py --tsv ~/Downloads/UCSC_\ -\ \(4\).tsv --output /tmp/parsed-vocab-list.json
####
#### As part of a pipeline for vocab list:
#### python3 parse-vocab-list.py --tsv ~/Downloads/UCSC_\ -\ \(13\).tsv --output /tmp/parsed-vocab-list.json && python3 chapter-bin.py -v --input /tmp/parsed-vocab-list.json --output /tmp/chapters.json && python3 apply-to-chapters.py --input /tmp/chapters.json --template ./word-html-frame.template.html --output /tmp/chapter
####
#### As part of a pipeline for glossary:
#### python3 parse-vocab-list.py --tsv ~/Downloads/UCSC_\ -\ \(13\).tsv --output /tmp/parsed-vocab-list.json && python3 jalphabetical-bin.py --pattern vocab-list --input /tmp/parsed-vocab-list.json --output /tmp/jalphed-vocab-list.json && python3 apply-globally.py --input /tmp/jalphed-vocab-list.json --template ./manual-glossary.template.html --output /tmp/glossary.html
####
import sys
import argparse
import logging
import csv
import pystache
import json
import os
## Logger basic setup.
logging.basicConfig(level=logging.INFO)
LOGGER = logging.getLogger('parse')
LOGGER.setLevel(logging.WARNING)
def die_screaming(string):
""" Die and take our toys home. """
LOGGER.error(string)
sys.exit(1)
## You saw it coming...
if __name__ == '__main__':
main()
| [
4242,
198,
4242,
38240,
257,
26136,
53,
656,
257,
3938,
44267,
19449,
1351,
44812,
326,
714,
307,
198,
4242,
973,
416,
257,
49303,
357,
273,
584,
9156,
1203,
8,
11055,
13,
198,
4242,
198,
4242,
17934,
8748,
284,
16602,
262,
6678,
1182... | 2.850638 | 549 |
import builtins, importlib
importer = Importer()
| [
11748,
3170,
1040,
11,
1330,
8019,
628,
198,
198,
320,
26634,
796,
1846,
26634,
3419,
198
] | 3.25 | 16 |
from app import app, db
from app.models import *
import datetime
import sys
sys.path.append('./sanitize')
from sanitize_utils import *
from trueskill import setup, Rating, quality_1vs1, rate_1vs1
from trueskill_functions import MU, SIGMA, CONS_MU, BETA, TAU, DRAW_PROBABILITY, populate_trueskills
from misc_utils import *
# Make some of these Class functions in app.models??
# Changes User's tag, given string new_tag. Also ensures that user's tag is changed in the Sets he has played
# transfers the data the User represented by joined_tag has to User root_tag, while deleting the User represented by joined_tag
# currently doesn't actually link the Users or tag in any way before deletion
# currently doesn't change Matches
# Given a User tag and region name, changes user.region and changes regional trueskill if region is valid, otherwise deletes it
# Given parameter tournament name and a list of integers representing year, month, and day, queries for Tournament object and assigns a date for it.
# given Tournament object, if tournament name already exists, if tournament is a pool of a larger one, add placements and sets to Tournament object and return it, else simply return original Tournament object
# given user tag, returns a simple dictionary with keys tournament_name and value placement for a tournament a User has attended
# deletes a Set given tournament name, set winner, and set loser
# reassigns Tournament Placement and Sets from one User to another
def delete_tournament(header_name):
'''
Give TournamentHeader name, query for and delete TournamentHeader
'''
header = TournamentHeader.query.filter(TournamentHeader.name==header_name).first()
if header is None:
return "TournamentHeader not found"
else:
db.session.delete(header)
db.session.commit()
deleted_header = TournamentHeader.query.filter(TournamentHeader.name==header_name).first()
if deleted_header:
return "Failure"
else:
return "Successful deletion"
# Doesn't actually delete | [
6738,
598,
1330,
598,
11,
20613,
198,
6738,
598,
13,
27530,
1330,
1635,
198,
11748,
4818,
8079,
198,
198,
11748,
25064,
198,
17597,
13,
6978,
13,
33295,
7,
4458,
14,
12807,
270,
1096,
11537,
198,
6738,
5336,
270,
1096,
62,
26791,
1330... | 3.786116 | 533 |
# coding=utf-8
#
# Copyright (c) 2015 EMC Corporation
# All Rights Reserved
#
import httplib
import cjson
import argparse
import sys
import os
import time
import json
import uuid
import base64
import urllib
import requests
import email
from email.Utils import formatdate
import cookielib
import telnetlib
import xml.etree.ElementTree as ET
#import xml2obj as x2o
import copy
import hmac
import re
import hashlib
import cookielib
import binascii
import datetime
import socket
import zlib
import struct
from time import sleep
try:
# OpenSUSE CoprHD kits tend to display certificate warnings which aren't
# relevant to running sanity tests
requests.packages.urllib3.disable_warnings()
except AttributeError:
# Swallow error, likely ViPR devkit
pass
URI_SERVICES_BASE = ''
URI_CATALOG = URI_SERVICES_BASE + '/catalog'
URI_CATALOG_SERVICES = URI_CATALOG + '/services'
URI_CATALOG_SERVICE = URI_CATALOG_SERVICES + '/{0}'
URI_CATALOG_SERVICE_SEARCH = URI_CATALOG_SERVICES + '/search'
URI_CATALOG_SERVICE_SEARCH_NAME = URI_CATALOG_SERVICE_SEARCH + '?name={0}'
URI_CATALOG_CATEGORIES = URI_CATALOG + '/categories'
URI_CATALOG_CATEGORY = URI_CATALOG_CATEGORIES + '/{0}'
URI_CATALOG_CATEGORY_UPGRADE = URI_CATALOG_CATEGORIES + '/upgrade?tenantId={0}'
URI_CATALOG_ORDERS = URI_CATALOG + '/orders'
URI_CATALOG_ORDER = URI_CATALOG_ORDERS + '/{0}'
URI_CATALOG_VPOOL = URI_CATALOG + '/vpools'
URI_CATALOG_VPOOL_FILE = URI_CATALOG_VPOOL + '/file'
URI_CATALOG_VPOOL_BLOCK = URI_CATALOG_VPOOL + '/block'
URI_CATALOG_VPOOL_OBJECT = URI_CATALOG_VPOOL + '/object'
URI_VPOOLS = URI_SERVICES_BASE + '/{0}/vpools'
URI_VPOOLS_MATCH = URI_SERVICES_BASE + '/{0}/vpools/matching-pools'
URI_OBJ_VPOOL = URI_SERVICES_BASE + '/{0}/data-services-vpools'
URI_VPOOL_INSTANCE = URI_VPOOLS + '/{1}'
URI_OBJ_VPOOL_INSTANCE = URI_OBJ_VPOOL + '/{1}'
URI_VPOOL_ACLS = URI_VPOOL_INSTANCE + '/acl'
URI_VPOOL_UPDATE = URI_VPOOL_INSTANCE + '/assign-matched-pools'
URI_VPOOL_DEACTIVATE = URI_VPOOL_INSTANCE + '/deactivate'
URI_VPOOL_REFRESH = URI_VPOOL_INSTANCE + '/refresh-matched-pools'
URI_BLOCKVPOOLS_BULKGET = URI_SERVICES_BASE + '/block/vpools/bulk'
URI_FILEVPOOLS_BULKGET = URI_SERVICES_BASE + '/file/vpools/bulk'
URI_SMISPROVIDER_BULKGET = URI_SERVICES_BASE + '/vdc/smis-providers/bulk'
URI_BLOCKSNAPSHOT_BULKGET = URI_SERVICES_BASE + '/block/snapshots/bulk'
URI_FILESNAPSHOT_BULKGET = URI_SERVICES_BASE + '/file/snapshots/bulk'
URI_EXPORTGROUP_BULKGET = URI_SERVICES_BASE + '/block/exports/bulk'
URI_LOGOUT = URI_SERVICES_BASE + '/logout'
URI_MY_PASSWORD_CHANGE = URI_SERVICES_BASE + '/password'
URI_USER_PASSWORD_CHANGE = URI_MY_PASSWORD_CHANGE + '/reset/'
URI_USER_PASSWORD_GET = URI_SERVICES_BASE + '/config/properties'
URI_USER_PASSWORD_PATTERN = 'system_{0}_encpassword","value":"(.+?)"'
URI_TENANT = URI_SERVICES_BASE + '/tenant'
URI_TENANTS = URI_SERVICES_BASE + '/tenants/{0}'
URI_TENANTS_DEACTIVATE = URI_TENANTS + '/deactivate'
URI_TENANTS_ROLES = URI_TENANTS + '/role-assignments'
URI_TENANTS_SUBTENANT = URI_TENANTS + '/subtenants'
URI_TENANTS_BULKGET = URI_SERVICES_BASE + '/tenants/bulk'
URI_TENANTS_HOSTS = URI_TENANTS + '/hosts'
URI_TENANTS_CLUSTERS = URI_TENANTS + '/clusters'
URI_TENANTS_VCENTERS = URI_TENANTS + '/vcenters'
URI_NODEOBJ = '/nodeobj/?name={0}'
URI_PROJECTS = URI_TENANTS + '/projects'
URI_PROJECT = URI_SERVICES_BASE + '/projects/{0}'
URI_PROJECT_ACLS = URI_PROJECT + '/acl'
URI_PROJECTS_BULKGET = URI_SERVICES_BASE + '/projects/bulk'
URI_FILESYSTEMS_LIST = URI_SERVICES_BASE + '/file/filesystems'
URI_FILESYSTEM = URI_SERVICES_BASE + '/file/filesystems/{0}'
URI_FILESHARE_BULKGET = URI_FILESYSTEMS_LIST + '/bulk'
URI_FILESYSTEMS_EXPORTS = URI_FILESYSTEM + '/exports'
URI_FILESYSTEMS_EXPORTS_UPDATE = URI_FILESYSTEM + '/export'
URI_FILESYSTEMS_UNEXPORT = URI_FILESYSTEM + '/export'
URI_FILESYSTEMS_EXPAND = URI_FILESYSTEM + '/expand'
URI_FILESYSTEMS_SHARES = URI_FILESYSTEM + '/shares'
URI_FILESYSTEMS_UNSHARE = URI_FILESYSTEMS_SHARES + '/{1}'
URI_FILESYSTEMS_SHARES_ACL = URI_FILESYSTEMS_SHARES + '/{1}/acl'
URI_FILESYSTEMS_SHARES_ACL_SHOW = URI_FILESYSTEMS_SHARES + '/{1}/acl'
URI_FILESYSTEMS_SHARES_ACL_DELETE = URI_FILESYSTEMS_SHARES + '/{1}/acl'
URI_FILESYSTEM_SNAPSHOT = URI_FILESYSTEM + '/protection/snapshots'
URI_FILESYSTEMS_SEARCH = URI_FILESYSTEMS_LIST + '/search'
URI_FILESYSTEMS_SEARCH_PROJECT = URI_FILESYSTEMS_SEARCH + '?project={0}'
URI_FILESYSTEMS_SEARCH_PROJECT_NAME = URI_FILESYSTEMS_SEARCH_PROJECT + '&name={1}'
URI_FILESYSTEMS_SEARCH_NAME = URI_FILESYSTEMS_SEARCH + '?name={0}'
URI_FILESYSTEMS_SEARCH_TAG = URI_FILESYSTEMS_SEARCH + '?tag={0}'
URI_FILE_SNAPSHOTS = URI_SERVICES_BASE + '/file/snapshots'
URI_FILE_SNAPSHOT = URI_FILE_SNAPSHOTS + '/{0}'
URI_FILE_SNAPSHOT_EXPORTS = URI_FILE_SNAPSHOT + '/exports'
URI_FILE_SNAPSHOT_UNEXPORT = URI_FILE_SNAPSHOT + '/export'
URI_FILE_SNAPSHOT_RESTORE = URI_FILE_SNAPSHOT + '/restore'
URI_FILE_SNAPSHOT_SHARES = URI_FILE_SNAPSHOT + '/shares'
URI_FILE_SNAPSHOT_SHARES_ACL = URI_FILE_SNAPSHOT_SHARES + '/{1}/acl'
URI_FILE_SNAPSHOT_SHARES_ACL_SHOW = URI_FILE_SNAPSHOT_SHARES + '/{1}/acl'
URI_FILE_SNAPSHOT_SHARES_ACL_DELETE = URI_FILE_SNAPSHOT_SHARES + '/{1}/acl'
URI_FILE_SNAPSHOT_UNSHARE = URI_FILE_SNAPSHOT_SHARES + '/{1}'
URI_FILE_SNAPSHOT_TASKS = URI_FILE_SNAPSHOT + '/tasks/{1}'
URI_FILE_QUOTA_DIR_LIST = URI_FILESYSTEM + '/quota-directories'
URI_FILE_QUOTA_DIR_BASE = URI_SERVICES_BASE + '/file/quotadirectories'
URI_FILE_QUOTA_DIR = URI_FILE_QUOTA_DIR_BASE + '/{0}'
URI_FILE_QUOTA_DIR_DELETE = URI_FILE_QUOTA_DIR + '/deactivate'
URI_DR = URI_SERVICES_BASE + '/site'
URI_DR_GET = URI_DR + '/{0}'
URI_DR_GET_DETAILS = URI_DR + '/{0}' + '/details'
URI_DR_DELETE = URI_DR + '/{0}'
URI_DR_PAUSE = URI_DR + '/{0}' + '/pause'
URI_DR_RESUME = URI_DR + '/{0}' + '/resume'
URI_DR_SWITCHOVER = URI_DR + '/{0}/switchover'
URI_DR_FAILOVER = URI_DR + '/{0}/failover'
URI_VDC = URI_SERVICES_BASE + '/vdc'
URI_VDC_GET = URI_VDC + '/{0}'
URI_VDC_DISCONNECT_POST = URI_VDC + '/{0}/disconnect'
URI_VDC_RECONNECT_POST = URI_VDC + '/{0}/reconnect'
URI_VDC_SECRETKEY = URI_VDC + '/secret-key'
URI_VDC_CERTCHAIN = URI_VDC + '/keystore'
URI_TASK = URI_VDC + "/tasks"
URI_TASK_GET = URI_TASK + '/{0}'
URI_TASK_LIST = URI_TASK
URI_TASK_LIST_SYSTEM = URI_TASK + "?tenant=system"
URI_TASK_DELETE = URI_TASK_GET + '/delete'
URI_EVENT = URI_VDC + "/events"
URI_EVENT_GET = URI_EVENT + '/{0}'
URI_EVENT_LIST = URI_EVENT + '?tenant={0}'
URI_EVENT_DELETE = URI_EVENT_GET + "/deactivate"
URI_EVENT_APPROVE = URI_EVENT_GET + "/approve"
URI_EVENT_DECLINE = URI_EVENT_GET + "/decline"
URI_IPSEC = '/ipsec'
URI_IPSEC_STATUS = '/ipsec?status={0}'
URI_IPSEC_KEY = '/ipsec/key'
URI_VDCINFO = '/object/vdcs'
URI_VDCINFO_GET = URI_VDCINFO + '/vdc' + '/{0}'
URI_VDCINFO_INSERT = URI_VDCINFO_GET
URI_VDCINFO_LOCAL = URI_VDCINFO + '/vdc/local'
URI_VDCINFO_LIST = URI_VDCINFO + '/vdc/list'
URI_CONTROL = URI_SERVICES_BASE + '/control'
URI_RECOVERY = URI_CONTROL + '/cluster/recovery'
URI_DB_REPAIR = URI_CONTROL + '/cluster/dbrepair-status'
URI_BACKUP = URI_SERVICES_BASE + '/backupset'
URI_BACKUP_CREATE = URI_BACKUP + '/backup?tag={0}'
URI_BACKUP_DELETE = URI_BACKUP + '/backup?tag={0}'
URI_BACKUP_LIST = URI_BACKUP
URI_BACKUP_LIST_EXTERNAL = URI_BACKUP + '/external'
URI_BACKUP_DOWNLOAD = URI_BACKUP + '/download?tag={0}'
URI_BACKUP_UPLOAD = URI_BACKUP + '/backup/upload?tag={0}'
URI_BACKUP_QUERY_UPLOAD = URI_BACKUP + '/backup?tag={0}'
URI_BACKUP_QUERY_INFO = URI_BACKUP + '/backup/info?backupname={0}&isLocal={1}'
URI_BACKUP_PULL = URI_BACKUP + '/pull?file={0}'
URI_BACKUP_QUERY_PULL = URI_BACKUP + '/restore/status?backupname={0}&isLocal={1}'
URI_BACKUP_RESTORE = URI_BACKUP + '/restore?backupname={0}&isLocal={1}&password={2}'
URI_VOLUME_LIST = URI_SERVICES_BASE + '/block/volumes'
URI_VOLUME_BULKGET = URI_VOLUME_LIST + '/bulk'
URI_VOLUME = URI_VOLUME_LIST + '/{0}'
URI_VOLUME_EXPAND = URI_VOLUME + '/expand'
URI_VOLUMES_EXPORTS = URI_VOLUME + '/exports'
URI_VOLUMES_UNEXPORTS = URI_VOLUME + '/exports/{1},{2},{3}'
URI_VOLUMES_DEACTIVATE = '/block/volumes/deactivate'
URI_BLOCK_SNAPSHOTS_LIST = URI_VOLUME + '/protection/snapshots'
URI_BLOCK_SNAPSHOTS = URI_SERVICES_BASE + '/block/snapshots/{0}'
URI_BLOCK_SNAPSHOTS_EXPORTS = URI_BLOCK_SNAPSHOTS + '/exports'
URI_BLOCK_SNAPSHOTS_UNEXPORTS = URI_BLOCK_SNAPSHOTS + '/exports/{1},{2},{3}'
URI_BLOCK_SNAPSHOTS_RESTORE = URI_BLOCK_SNAPSHOTS + '/restore'
URI_BLOCK_SNAPSHOTS_EXPAND = URI_BLOCK_SNAPSHOTS + '/expand'
URI_BLOCK_SNAPSHOTS_ACTIVATE = URI_BLOCK_SNAPSHOTS + '/activate'
URI_BLOCK_SNAPSHOTS_EXPOSE = URI_BLOCK_SNAPSHOTS + '/expose'
URI_BLOCK_SNAPSHOTS_TASKS = URI_BLOCK_SNAPSHOTS + '/tasks/{1}'
URI_VOLUME_CHANGE_VPOOL = URI_VOLUME_LIST + '/vpool-change'
URI_VOLUME_CHANGE_VPOOL_MATCH = URI_VOLUME + '/vpool-change/vpool'
URI_VOLUMES_SEARCH = URI_VOLUME_LIST + '/search'
URI_VOLUMES_SEARCH_PROJECT = URI_VOLUMES_SEARCH + '?project={0}'
URI_VOLUMES_SEARCH_PROJECT_NAME = URI_VOLUMES_SEARCH_PROJECT + '&name={1}'
URI_VOLUMES_SEARCH_NAME = URI_VOLUMES_SEARCH + '?name={0}'
URI_VOLUMES_SEARCH_TAG = URI_VOLUMES_SEARCH + '?tag={0}'
URI_VOLUMES_SEARCH_WWN = URI_VOLUMES_SEARCH + '?wwn={0}'
URI_VOLUME_CHANGE_VARRAY = URI_VOLUME + '/varray'
URI_VOLUME_CONTINUOUS = URI_VOLUME + '/protection/continuous-copies'
URI_VOLUME_CHANGE_LINK = URI_VOLUME_CONTINUOUS
URI_VOLUME_FULL_COPY = URI_VOLUME_LIST + '/{0}/protection/full-copies'
URI_VOLUME_FULL_COPY_ACTIVATE = URI_VOLUME_LIST + '/{0}/protection/full-copies/{1}/activate'
URI_VOLUME_FULL_COPY_DETACH = URI_VOLUME_LIST + '/{0}/protection/full-copies/{1}/detach'
URI_VOLUME_FULL_COPY_CHECK_PROGRESS = URI_VOLUME_LIST + '/{0}/protection/full-copies/{1}/check-progress'
URI_FULL_COPY = URI_SERVICES_BASE + '/block/full-copies'
URI_FULL_COPY_RESTORE = URI_FULL_COPY + '/{0}/restore'
URI_FULL_COPY_RESYNC = URI_FULL_COPY + '/{0}/resynchronize'
URI_ADD_JOURNAL = URI_VOLUME_LIST + '/protection/addJournalCapacity'
URI_BLOCK_SNAPSHOT_SESSION = URI_SERVICES_BASE + '/block/snapshot-sessions/{0}'
URI_BLOCK_SNAPSHOT_SESSION_TASK = URI_BLOCK_SNAPSHOT_SESSION + '/tasks/{1}'
URI_BLOCK_SNAPSHOT_SESSION_CREATE = URI_VOLUME + '/protection/snapshot-sessions'
URI_BLOCK_SNAPSHOT_SESSION_DELETE = URI_BLOCK_SNAPSHOT_SESSION + '/deactivate'
URI_BLOCK_SNAPSHOT_SESSION_RESTORE = URI_BLOCK_SNAPSHOT_SESSION + '/restore'
URI_BLOCK_SNAPSHOT_SESSION_LINK_TARGETS = URI_BLOCK_SNAPSHOT_SESSION + '/link-targets'
URI_BLOCK_SNAPSHOT_SESSION_RELINK_TARGETS = URI_BLOCK_SNAPSHOT_SESSION + '/relink-targets'
URI_BLOCK_SNAPSHOT_SESSION_UNLINK_TARGETS = URI_BLOCK_SNAPSHOT_SESSION + '/unlink-targets'
URI_BLOCK_SNAPSHOT_SESSIONS_LIST = URI_BLOCK_SNAPSHOT_SESSION_CREATE
URI_UNMANAGED = URI_VDC + '/unmanaged'
URI_UNMANAGED_UNEXPORTED_VOLUMES = URI_UNMANAGED + '/volumes/ingest'
URI_UNMANAGED_VOLUMES_SEARCH = URI_UNMANAGED + "/volumes/search"
URI_UNMANAGED_VOLUMES_SEARCH_NAME= URI_UNMANAGED_VOLUMES_SEARCH + "?name={0}"
URI_UNMANAGED_EXPORTED_VOLUMES = URI_UNMANAGED + '/volumes/ingest-exported'
URI_UNMANAGED_TASK = URI_VDC + '/tasks/{0}'
URI_BLOCK_MIRRORS_BASE = URI_VOLUME + '/protection/continuous-copies'
URI_BLOCK_MIRRORS_LIST = URI_BLOCK_MIRRORS_BASE
URI_BLOCK_MIRRORS_READ = URI_BLOCK_MIRRORS_BASE + '/{1}'
URI_BLOCK_MIRRORS_ATTACH = URI_BLOCK_MIRRORS_BASE + '/start'
URI_BLOCK_MIRRORS_DETACH_ALL = URI_BLOCK_MIRRORS_BASE + '/stop'
#URI_BLOCK_MIRRORS_DETACH = URI_BLOCK_MIRRORS_BASE + '/{1}/stop'
URI_BLOCK_MIRRORS_PAUSE_ALL = URI_BLOCK_MIRRORS_BASE + '/pause'
#URI_BLOCK_MIRRORS_PAUSE = URI_BLOCK_MIRRORS_BASE + '/{1}/pause'
URI_BLOCK_MIRRORS_RESUME_ALL = URI_BLOCK_MIRRORS_BASE + '/resume'
URI_BLOCK_MIRRORS_DEACTIVATE = URI_BLOCK_MIRRORS_BASE + '/deactivate'
#URI_BLOCK_MIRRORS_RESUME = URI_BLOCK_MIRRORS_BASE + '/{1}/resume'
#URI_BLOCK_SNAPSHOTS_RESTORE = URI_BLOCK_SNAPSHOTS + '/restore'
URI_BLOCK_CONSISTENCY_GROUP_BASE = URI_SERVICES_BASE + '/block/consistency-groups'
URI_BLOCK_CONSISTENCY_GROUP_CREATE = URI_BLOCK_CONSISTENCY_GROUP_BASE
URI_BLOCK_CONSISTENCY_GROUP = URI_BLOCK_CONSISTENCY_GROUP_BASE + '/{0}'
URI_BLOCK_CONSISTENCY_GROUP_TASKS = URI_BLOCK_CONSISTENCY_GROUP + '/tasks/{1}'
URI_BLOCK_CONSISTENCY_GROUP_DELETE = URI_BLOCK_CONSISTENCY_GROUP + '/deactivate'
URI_BLOCK_CONSISTENCY_GROUP_BULK = URI_BLOCK_CONSISTENCY_GROUP_BASE + "/bulk"
URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT_BASE = URI_BLOCK_CONSISTENCY_GROUP + "/protection/snapshots"
URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT_CREATE = URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT_BASE
URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT_LIST = URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT_BASE
URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT = URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT_BASE + "/{1}"
URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT_TASKS = URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT + "/tasks/{2}"
URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT_ACTIVATE = URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT + "/activate"
URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT_DEACTIVATE = URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT + "/deactivate"
URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT_RESTORE = URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT + "/restore"
URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT_SESSION_BASE = URI_BLOCK_CONSISTENCY_GROUP + "/protection/snapshot-sessions"
URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT_SESSION_CREATE = URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT_SESSION_BASE
URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT_SESSION_LIST = URI_BLOCK_CONSISTENCY_GROUP_SNAPSHOT_SESSION_BASE
URI_BLOCK_CONSISTENCY_GROUP_PROTECTION_BASE = URI_BLOCK_CONSISTENCY_GROUP + "/protection/continuous-copies"
URI_BLOCK_CONSISTENCY_GROUP_SWAP = URI_BLOCK_CONSISTENCY_GROUP_PROTECTION_BASE + "/swap"
URI_BLOCK_CONSISTENCY_GROUP_ACCESS_MODE = URI_BLOCK_CONSISTENCY_GROUP_PROTECTION_BASE + "/accessmode"
URI_BLOCK_CONSISTENCY_GROUP_FAILOVER = URI_BLOCK_CONSISTENCY_GROUP_PROTECTION_BASE + "/failover"
URI_BLOCK_CONSISTENCY_GROUP_FAILOVER_CANCEL = URI_BLOCK_CONSISTENCY_GROUP_PROTECTION_BASE + "/failover-cancel"
#Object Platform ECS bucket definitions
URI_ECS_BUCKET_LIST = URI_SERVICES_BASE + '/object/buckets'
URI_ECS_BUCKET = URI_SERVICES_BASE + '/object/buckets/{0}'
URI_NETWORKSYSTEMS = URI_SERVICES_BASE + '/vdc/network-systems'
URI_NETWORKSYSTEM = URI_NETWORKSYSTEMS + '/{0}'
URI_NETWORKSYSTEM_DISCOVER = URI_NETWORKSYSTEMS + '/{0}/discover'
URI_NETWORKSYSTEM_FCENDPOINTS = URI_NETWORKSYSTEMS + '/{0}/fc-endpoints'
URI_NETWORKSYSTEM_FCENDPOINTS_FABRIC = URI_NETWORKSYSTEM_FCENDPOINTS + '?fabric-id={1}'
URI_NETWORKSYSTEM_VDCREFERENCES = URI_NETWORKSYSTEMS + '/san-references/{0},{1}'
URI_NETWORKSYSTEM_REGISTER = URI_NETWORKSYSTEMS + '/{0}/register'
URI_NETWORKSYSTEM_DEREGISTER = URI_NETWORKSYSTEMS + '/{0}/deregister'
URI_NETWORKSYSTEM_ALIASES = URI_NETWORKSYSTEM + '/san-aliases'
URI_NETWORKSYSTEM_ALIASES_FABRIC = URI_NETWORKSYSTEM_ALIASES + '?fabric-id={1}'
URI_NETWORKSYSTEM_ALIASES_REMOVE = URI_NETWORKSYSTEM_ALIASES + '/remove'
URI_NETWORKSYSTEM_ZONES = URI_NETWORKSYSTEM + '/san-fabrics/{1}/san-zones'
URI_NETWORKSYSTEM_ZONES_QUERY = URI_NETWORKSYSTEM_ZONES + '?zone-name={2}&exclude-members={3}&exclude-aliases={4}'
URI_NETWORKSYSTEM_ZONES_REMOVE = URI_NETWORKSYSTEM_ZONES + '/remove'
URI_NETWORKSYSTEM_ZONES_ACTIVATE = URI_NETWORKSYSTEM_ZONES + '/activate'
URI_DISCOVERED_STORAGEDEVICES = URI_SERVICES_BASE + '/vdc/storage-systems'
URI_DISCOVERED_STORAGEDEVICE = URI_DISCOVERED_STORAGEDEVICES + '/{0}'
URI_STORAGEDEVICES = URI_SERVICES_BASE + '/vdc/storage-systems'
URI_STORAGEDEVICE = URI_STORAGEDEVICES + '/{0}'
URI_STORAGEDEVICE_DISCOVERALL = URI_STORAGEDEVICES + '/discover'
URI_STORAGEDEVICE_DEREGISTER = URI_STORAGEDEVICE + '/deregister'
URI_STORAGESYSTEMS_BULKGET = URI_DISCOVERED_STORAGEDEVICES + '/bulk'
URI_DISCOVERED_STORAGEDEVICE_DISCOVER = URI_STORAGEDEVICE + '/discover'
URI_DISCOVERED_STORAGEDEVICE_NS = URI_DISCOVERED_STORAGEDEVICE_DISCOVER + '?namespace={1}'
URI_STORAGEPOOLS = URI_STORAGEDEVICE + '/storage-pools'
URI_STORAGEPOOL = URI_SERVICES_BASE + '/vdc/storage-pools/{0}'
URI_STORAGEPOOL_SHOW = URI_STORAGEPOOLS + '/{1}'
URI_STORAGEPOOL_REGISTER = URI_STORAGEPOOLS + '/{1}/register'
URI_STORAGEPOOL_DEREGISTER = URI_STORAGEPOOL + '/deregister'
URI_STORAGEPOOL_UPDATE = URI_STORAGEPOOL
URI_STORAGEPOOLS_BULKGET = URI_SERVICES_BASE + '/vdc/storage-pools/bulk'
URI_STORAGEPORTS = URI_STORAGEDEVICE + '/storage-ports'
URI_STORAGEPORT = URI_SERVICES_BASE + '/vdc/storage-ports/{0}'
URI_STORAGEPORT_SHOW = URI_STORAGEPORTS + '/{1}'
URI_STORAGEPORT_UPDATE = URI_STORAGEPORT
URI_STORAGEPORT_REGISTER = URI_STORAGEPORTS + '/{1}/register'
URI_STORAGEPORT_DEREGISTER = URI_STORAGEPORT + '/deregister'
URI_STORAGEPORTS_BULKGET = URI_SERVICES_BASE + '/vdc/storage-ports/bulk'
URI_VARRAYS = URI_SERVICES_BASE + '/vdc/varrays'
URI_VARRAY = URI_VARRAYS + '/{0}'
URI_VARRAY_PORTS = URI_VARRAY + '/storage-ports'
URI_VARRAY_ACLS = URI_VARRAY + '/acl'
URI_VARRAYS_BULKGET = URI_VARRAYS + '/bulk'
URI_NETWORKS = URI_SERVICES_BASE + '/vdc/networks'
URI_VARRAY_NETWORKS = URI_VARRAY + '/networks'
URI_NETWORK = URI_NETWORKS + '/{0}'
URI_NETWORK_ENDPOINTS = URI_NETWORK + '/endpoints'
URI_NETWORK_ASSIGN = URI_NETWORK + ''
URI_NETWORK_UNASSIGN = URI_NETWORK + ''
URI_NETWORKS_BULKGET = URI_NETWORKS + '/bulk'
URI_NETWORK_DEACTIVATE = URI_NETWORK + '/deactivate?force={1}'
URI_NETWORK_REGISTER = URI_NETWORK + '/register'
URI_NETWORK_DEREGISTER = URI_NETWORK + '/deregister'
URI_SMISPROVIDERS = URI_SERVICES_BASE + '/vdc/smis-providers'
URI_SMISPROVIDER = URI_SMISPROVIDERS + '/{0}'
URI_FILE_POLICIES = '/file/file-policies'
URI_FILE_POLICY_SHOW = URI_FILE_POLICIES + '/{0}'
URI_FILE_POLICY_DELETE = URI_FILE_POLICIES + '/{0}'
URI_FILE_POLICY_UPDATE = URI_FILE_POLICIES + '/{0}'
URI_FILE_POLICY_ASSIGN = URI_FILE_POLICIES + '/{0}/assign-policy'
URI_FILE_POLICY_UNASSIGN = URI_FILE_POLICIES + '/{0}/unassign-policy'
URI_STORAGEPROVIDERS = URI_SERVICES_BASE + '/vdc/storage-providers'
URI_STORAGEPROVIDER = URI_STORAGEPROVIDERS + '/{0}'
URI_STORAGETIER = URI_SERVICES_BASE + '/vdc/storage-tiers/{0}'
URI_STORAGETIERS = URI_SERVICES_BASE + '/vdc/storage-tiers'
URI_EXPORTGROUP_LIST = URI_SERVICES_BASE + '/block/exports'
URI_EXPORTGROUP_INSTANCE = URI_SERVICES_BASE + '/block/exports/{0}'
URI_EXPORTGROUP_VOLUMES = URI_SERVICES_BASE + '/block/exports/{0}/volumes'
URI_EXPORTGROUP_VOLUME_INSTANCE = URI_SERVICES_BASE + '/block/exports/{0}/volumes/{1}'
URI_EXPORTGROUP_VOLUMES_REMOVE = URI_SERVICES_BASE + '/block/exports/{0}/remove-volumes'
URI_EXPORTGROUP_INITS = URI_SERVICES_BASE + '/block/exports/{0}/initiators'
URI_EXPORTGROUP_INIT_DELETE = URI_SERVICES_BASE + '/block/exports/{0}/initiators/{1},{2}'
URI_EXPORTGROUP_INITS_REMOVE = URI_SERVICES_BASE + '/block/exports/{0}/remove-initiators'
URI_EXPORTGROUP_REALLOC = URI_SERVICES_BASE + '/block/exports/{0}/paths-adjustment-preview'
URI_EXPORTGROUP_REBALANCE = URI_SERVICES_BASE + '/block/exports/{0}/paths-adjustment'
URI_EXPORTGROUP_CHANGEPORTGROUP = URI_SERVICES_BASE + '/block/exports/{0}/change-port-group'
URI_EXPORTGROUP_SEARCH_PROJECT = URI_EXPORTGROUP_LIST + '/search?project={0}'
URI_HOSTS = URI_SERVICES_BASE + '/compute/hosts'
URI_HOST = URI_SERVICES_BASE + '/compute/hosts/{0}'
URI_HOST_DEACTIVATE = URI_HOST + '/deactivate?detach_storage={1}'
URI_HOSTS_BULKGET = URI_HOSTS + '/bulk'
URI_HOST_INITIATORS = URI_SERVICES_BASE + '/compute/hosts/{0}/initiators'
URI_HOST_IPINTERFACES = URI_SERVICES_BASE + '/compute/hosts/{0}/ip-interfaces'
URI_INITIATORS = URI_SERVICES_BASE + '/compute/initiators'
URI_INITIATOR = URI_SERVICES_BASE + '/compute/initiators/{0}'
URI_INITIATOR_REGISTER = URI_SERVICES_BASE + '/compute/initiators/{0}/register'
URI_INITIATOR_DEREGISTER = URI_SERVICES_BASE + '/compute/initiators/{0}/deregister'
URI_INITIATOR_ALIASGET = URI_SERVICES_BASE + "/compute/initiators/{0}/alias/{1}"
URI_INITIATOR_ALIASSET = URI_SERVICES_BASE + "/compute/initiators/{0}/alias"
URI_INITIATORS_BULKGET = URI_SERVICES_BASE + '/compute/initiators/bulk'
URI_IPINTERFACES = URI_SERVICES_BASE + '/compute/ip-interfaces'
URI_IPINTERFACE = URI_SERVICES_BASE + '/compute/ip-interfaces/{0}'
URI_IPINTERFACE_REGISTER = URI_SERVICES_BASE + '/compute/ip-interfaces/{0}/register'
URI_IPINTERFACE_DEREGISTER = URI_SERVICES_BASE + '/compute/ip-interfaces/{0}/deregister'
URI_IPINTERFACES_BULKGET = URI_SERVICES_BASE + '/compute/ip-interfaces/bulk'
URI_VCENTERS = URI_SERVICES_BASE + '/compute/vcenters'
URI_VCENTER = URI_SERVICES_BASE + '/compute/vcenters/{0}'
URI_VCENTER_DISCOVER = URI_VCENTER + '/discover'
URI_VCENTERS_BULKGET = URI_VCENTERS + '/bulk'
URI_VCENTER_DATACENTERS = URI_VCENTER + '/vcenter-data-centers'
URI_CLUSTERS = URI_SERVICES_BASE + '/compute/clusters'
URI_CLUSTER = URI_SERVICES_BASE + '/compute/clusters/{0}'
URI_CLUSTER_DEACTIVATE = URI_CLUSTER + '/deactivate?detach-storage={1}'
URI_CLUSTERS_BULKGET = URI_CLUSTERS + '/bulk'
URI_DATACENTERS = URI_SERVICES_BASE + '/compute/vcenter-data-centers'
URI_DATACENTER = URI_SERVICES_BASE + '/compute/vcenter-data-centers/{0}'
URI_DATACENTERS_BULKGET = URI_SERVICES_BASE + '/compute/vcenter-data-centers/bulk'
URI_DATA_STORE_LIST = URI_SERVICES_BASE + '/vdc/data-stores'
URI_DATA_STORE = URI_SERVICES_BASE + '/vdc/data-stores/{0}'
URI_DATA_STORE_BULKGET = URI_DATA_STORE_LIST + '/bulk'
URI_KEYPOOLS = URI_SERVICES_BASE + '/object/keypools'
URI_KEYPOOLS_INSTANCE = URI_KEYPOOLS + '/{0}'
URI_KEYPOOLS_ACCESSMODE_INSTANCE = URI_KEYPOOLS + '/access-mode' + '/{0}'
URI_KEYPOOLS_FILEACCESS_INSTANCE = URI_KEYPOOLS + '/fileaccess' + '/{0}'
URI_KEY_INSTANCE = URI_KEYPOOLS_INSTANCE + '/{1}'
URI_KEYS = URI_SERVICES_BASE + '/object/keypools'
URI_KEYS_INSTANCE = URI_KEYS + '/{0}'
URI_ATMOS_DEVICE_LIST = URI_SERVICES_BASE + '/object/atmos-importer'
URI_ATMOS_DEVICE_TASK = URI_SERVICES_BASE + '/object/atmos-importer/{0}/tasks/{1}'
URI_ATMOS_DEVICE = URI_SERVICES_BASE + '/object/atmos-importer/{0}'
URI_ATMOS_DEVICE_DELETE = URI_SERVICES_BASE + '/object/atmos-importer/{0}/deactivate'
URI_OBJECT_INGESTION_LIST = URI_SERVICES_BASE + '/object/ingestion'
URI_OBJECT_INGESTION = URI_SERVICES_BASE + '/object/ingestion/{0}'
URI_OBJECT_INGESTION_DELETE = URI_SERVICES_BASE + '/object/ingestion/{0}/deactivate'
URI_OBJECT_INGESTION_OP_STATUS = URI_SERVICES_BASE + '/object/ingestion/{0}/tasks/{1}'
URI_OBJECTTZ = URI_SERVICES_BASE + '/object/networks'
URI_OBJECTTZ_INSTANCE = URI_OBJECTTZ + '/{0}'
URI_OBJECTTZ_DELETE = URI_OBJECTTZ + '/{0}/deactivate'
URI_DISCOVERED_PROTECTION_SYSTEMS = URI_SERVICES_BASE + '/vdc/protection-systems'
URI_DISCOVERED_PROTECTION_SYSTEM = URI_DISCOVERED_PROTECTION_SYSTEMS + '/{0}'
URI_PROTECTION_SYSTEM = URI_SERVICES_BASE + '/vdc/protection-systems/{0}'
URI_PROTECTION_SYSTEMS = URI_SERVICES_BASE + '/vdc/protection-systems'
URI_PROTECTION_SYSTEM_DISCOVER = URI_PROTECTION_SYSTEM + '/discover'
URI_PROTECTION_SYSTEM_UPDATE = URI_PROTECTION_SYSTEM
URI_DISCOVERED_PROTECTION_SYSTEM_DISCOVER = URI_PROTECTION_SYSTEM + '/discover'
URI_DISCOVERED_PROTECTION_SYSTEM_NS = URI_DISCOVERED_PROTECTION_SYSTEM_DISCOVER + '?namespace={1}'
URI_PROTECTIONSET = URI_SERVICES_BASE + '/block/protection-sets/{0}'
URI_PROTECTIONSETS = URI_SERVICES_BASE + '/block/protection-sets'
URI_VDC_ROLES = URI_SERVICES_BASE + '/vdc/role-assignments'
URI_VDC_AUTHN_PROFILE = URI_SERVICES_BASE + '/vdc/admin/authnproviders'
URI_VDC_AUTHN_PROFILES = URI_SERVICES_BASE + '/vdc/admin/authnproviders/{0}'
URI_AUTO_TIER_POLICY = URI_SERVICES_BASE + '/vdc/auto-tier-policies/{0}'
URI_WORKFLOW_LIST = URI_SERVICES_BASE + '/vdc/workflows'
URI_WORKFLOW_RECENT = URI_WORKFLOW_LIST + '/recent'
URI_WORKFLOW_INSTANCE = URI_WORKFLOW_LIST + '/{0}'
URI_WORKFLOW_STEPS = URI_WORKFLOW_INSTANCE + '/steps'
URI_WORKFLOW_RESUME = URI_WORKFLOW_LIST + '/{0}/resume'
URI_WORKFLOW_ROLLBACK = URI_WORKFLOW_LIST + '/{0}/rollback'
URI_WORKFLOW_SUSPEND = URI_WORKFLOW_LIST + '/{0}/suspend/{1}'
URI_AUDIT_QUERY = URI_SERVICES_BASE + '/audit/logs/?time_bucket={0}&language={1}'
URI_MONITOR_QUERY = URI_SERVICES_BASE + '/monitoring/events/?time_bucket={0}'
URI_RESOURCE_DEACTIVATE = '{0}/deactivate'
URI_S3_SERVICE_BASE = ''
URI_S3_BUCKET_INSTANCE = URI_S3_SERVICE_BASE + '/{0}'
URI_S3_KEY_INSTANCE = URI_S3_SERVICE_BASE + '/{0}/{1}'
URI_S3_KEY_INSTANCE_ALTERNATE = URI_S3_SERVICE_BASE + '/{0}' #used when the bucket name is part of the Host header
URI_S3_PING = URI_S3_SERVICE_BASE + '/'
URI_S3_DATANODE = URI_S3_SERVICE_BASE + '/'
URI_ATMOS_SERVICE_BASE = '/rest'
URI_ATMOS_OBJECTS = URI_ATMOS_SERVICE_BASE + '/objects'
URI_ATMOS_OBJECTS_OID = URI_ATMOS_OBJECTS + '/{0}'
URI_ATMOS_NAMESPACE = URI_ATMOS_SERVICE_BASE + '/namespace'
URI_ATMOS_NAMESPACE_PATH = URI_ATMOS_NAMESPACE + '{0}'
URI_ATMOS_SUBTENANT_BASE = URI_ATMOS_SERVICE_BASE + '/subtenant'
URI_ATMOS_SUBTENANT_INSTANCE = URI_ATMOS_SUBTENANT_BASE + '/{0}'
URI_ATMOS_OBJECT_INSTANCE = URI_ATMOS_OBJECTS + '/{0}'
URI_ATMOS_NAMESPACE_INSTANCE = URI_ATMOS_NAMESPACE + '/{0}'
URI_SWIFT_SERVICE_BASE = '/v1'
URI_SWIFT_ACCOUNT_INSTANCE = URI_SWIFT_SERVICE_BASE + '/{0}'
URI_SWIFT_CONTAINER_INSTANCE = URI_SWIFT_SERVICE_BASE + '/{0}/{1}'
URI_SWIFT_KEY_INSTANCE = URI_SWIFT_SERVICE_BASE + '/{0}/{1}/{2}'
URI_NAMESPACE_COMMON = URI_SERVICES_BASE + '/object/namespaces'
URI_NAMESPACE_BASE = URI_NAMESPACE_COMMON + '/namespace'
URI_NAMESPACE_INSTANCE = URI_NAMESPACE_BASE + '/{0}'
URI_NAMESPACE_TENANT_BASE = URI_NAMESPACE_COMMON + '/tenant'
URI_NAMESPACE_TENANT_INSTANCE = URI_NAMESPACE_TENANT_BASE + '/{0}'
URI_NAMESPACE_RETENTION_BASE = URI_NAMESPACE_INSTANCE + '/retention'
URI_NAMESPACE_RETENTION_INSTANCE= URI_NAMESPACE_RETENTION_BASE + '/{1}'
URI_BUCKET_COMMON = '/object/bucket'
URI_BUCKET_INSTANCE = URI_BUCKET_COMMON + '/{0}'
URI_BUCKET_RETENTION = URI_BUCKET_INSTANCE + '/retention'
URI_BUCKET_UPDATE_OWNER = URI_BUCKET_INSTANCE + '/owner'
URI_SECRET_KEY = URI_SERVICES_BASE + '/object/secret-keys'
URI_SECRET_KEY_USER = URI_SERVICES_BASE + '/object/user-secret-keys/{0}'
URI_DELETE_SECRET_KEY_USER = URI_SERVICES_BASE + '/object/user-secret-keys/{0}/deactivate'
URI_WEBSTORAGE_USER = URI_SERVICES_BASE + '/object/users'
URI_WEBSTORAGE_USER_DEACTIVATE = URI_WEBSTORAGE_USER + '/deactivate'
URI_BASEURL_BASE = URI_SERVICES_BASE + '/object/baseurl'
URI_BASEURL_INSTANCE = URI_BASEURL_BASE + '/{0}'
URI_BASEURL_DEACTIVATE = URI_BASEURL_BASE + '/{0}/deactivate'
URI_PASSWORDGROUP = URI_SERVICES_BASE + '/object/user-password/{0}'
URI_PASSWORDGROUP_DEACTIVATE = URI_PASSWORDGROUP + '/deactivate'
URI_MIGRATIONS = URI_SERVICES_BASE + '/block/migrations'
URI_MIGRATION = URI_MIGRATIONS + '/{0}'
URI_ZONE = URI_SERVICES_BASE + '/zone/{0}'
URI_ZONES = URI_SERVICES_BASE + '/zone'
URI_ZONE_CAPACITY = URI_SERVICES_BASE + '/zone/capacity'
URI_CUSTOMCONFIGS = URI_SERVICES_BASE + '/config/controller'
URI_CUSTOMCONFIG = URI_CUSTOMCONFIGS + '/{0}'
URI_CUSTOMCONFIG_DELETE = URI_CUSTOMCONFIG + '/deactivate'
URI_REPLICATION_GROUP = URI_SERVICES_BASE + '/vdc/data-service/vpools/{0}'
URI_REPLICATION_GROUPS = URI_SERVICES_BASE + '/vdc/data-service/vpools'
URI_REPLICATION_EXTEND = URI_SERVICES_BASE + '/vdc/data-service/vpools/{0}/addvarrays'
URI_REPLICATION_COMPRESS = URI_SERVICES_BASE + '/vdc/data-service/vpools/{0}/removevarrays'
URI_VNAS_SERVERS = URI_SERVICES_BASE + '/vdc/vnas-servers'
URI_VNAS_SERVER = URI_SERVICES_BASE + '/vdc/vnas-servers/{0}'
URI_VNAS_SERVER_ASSIGN = URI_SERVICES_BASE + '/projects/{0}/assign-vnas-servers'
URI_VNAS_SERVER_UNASSIGN = URI_SERVICES_BASE + '/projects/{0}/unassign-vnas-servers'
URI_GEO_SERVICES_BASE = ''
URI_CHUNKINFO = URI_GEO_SERVICES_BASE + '/chunkinfo'
URI_CHUNKDATA = URI_GEO_SERVICES_BASE + '/chunkdata/{0}'
URI_OBJ_CERT = '/object-cert/keystore'
URI_OBJ_SECRET_KEY = '/object-cert/secret-key'
URI_COMPUTE_SYSTEMS = URI_SERVICES_BASE + '/vdc/compute-systems'
URI_COMPUTE_SYSTEM = URI_COMPUTE_SYSTEMS + '/{0}'
URI_COMPUTE_SYSTEM_COMPUTEELEMENTS = URI_COMPUTE_SYSTEM + '/compute-elements'
URI_COMPUTE_SYSTEM_DEREGISTER = URI_COMPUTE_SYSTEM + '/deregister'
URI_COMPUTE_SYSTEM_DISCOVER = URI_COMPUTE_SYSTEM + '/discover'
URI_COMPUTE_IMAGESERVERS = URI_SERVICES_BASE + '/compute/imageservers'
URI_COMPUTE_IMAGESERVER = URI_COMPUTE_IMAGESERVERS + '/{0}'
URI_COMPUTE_IMAGES = URI_SERVICES_BASE + '/compute/images'
URI_COMPUTE_IMAGE = URI_COMPUTE_IMAGES + '/{0}'
URI_COMPUTE_VIRTUAL_POOLS = URI_SERVICES_BASE + '/compute/vpools'
URI_COMPUTE_VIRTUAL_POOL = URI_COMPUTE_VIRTUAL_POOLS + '/{0}'
URI_COMPUTE_VIRTUAL_POOL_ASSIGN = URI_COMPUTE_VIRTUAL_POOL + '/assign-matched-elements'
OBJCTRL_INSECURE_PORT = '9010'
OBJCTRL_PORT = '4443'
S3_INSECURE_PORT = '9020'
S3_PORT = '9021'
ATMOS_INSECURE_PORT = '9022'
ATMOS_PORT = '9023'
SWIFT_INSECURE_PORT = '9024'
SWIFT_PORT = '9025'
GEO_PORT = '9096'
GEO_INSECURE_PORT = '9096'
URI_KICKSTART = URI_SERVICES_BASE + '/kickstart'
URI_WHOAMI = URI_SERVICES_BASE + '/user/whoami'
URI_OBJECT_PROPERTIES = URI_SERVICES_BASE + '/config/object/properties'
URI_PROXY_TOKEN = URI_SERVICES_BASE + '/proxytoken'
URI_STORAGEPORTGROUPS = URI_STORAGEDEVICE + '/storage-port-groups'
URI_STORAGEPORTGROUP = URI_STORAGEPORTGROUPS + '/{1}'
URI_STORAGEPORTGROUP_REGISTER = URI_STORAGEPORTGROUP + '/register'
URI_STORAGEPORTGROUP_DEREGISTER = URI_STORAGEPORTGROUP + '/deregister'
URI_STORAGEPORTGROUP_DELETE = URI_STORAGEPORTGROUP + '/deactivate'
PROD_NAME = 'storageos'
TENANT_PROVIDER = 'urn:storageos:TenantOrg:provider:'
API_SYNC_TIMEOUT = os.getenv('BOURNE_API_SYNC_TIMEOUT', 120000)
USE_SSL = os.getenv('BOURNE_USE_SSL', 1)
PORT = os.getenv('BOURNE_PORT', '4443')
BOURNE_DEBUG = os.getenv('BOURNE_DEBUG', 0)
FILE_ACCESS_MODE_HEADER = "x-emc-file-access-mode"
FILE_ACCESS_DURATION_HEADER = "x-emc-file-access-duration"
HOST_LIST_HEADER = "x-emc-file-access-host-list"
USER_HEADER = "x-emc-file-access-uid"
TOKEN_HEADER = "x-emc-file-access-token"
START_TOKEN_HEADER = "x-emc-file-access-start-token"
END_TOKEN_HEADER = "x-emc-file-access-end-token"
FILE_ACCESS_PRESERVE_DIR_STRUCTURE_HEADER = "x-emc-file-access-preserve-directory-structure"
SKIP_SECURITY = os.getenv('BOURNE_SECURITY_DISABLED', 0)
SWIFT_AUTH_TOKEN = 'X-Auth-Token'
SWIFT_AUTH_USER = 'X-Auth-User'
SWIFT_AUTH_KEY = 'X-Auth-Key'
SWIFT_DELETE_AT = 'X-Delete-At'
SWIFT_COPY_FROM = 'X-Copy-From'
SWIFT_DELETE_AFTER = 'X-Delete-After'
SWIFT_X_CONTAINER_READ = "X-Container-Read"
SWIFT_X_CONTAINER_WRITE = "X-Container-Write"
HTTP_OK = 200
HTTP_NO_CONTENT = 204
HTTP_NOT_FOUND=404
S3_XML_NS = 'http://s3.amazonaws.com/doc/2006-03-01/'
OPENSTACK_XML_NS = "http://docs.openstack.org/identity/api/v2.0"
SEC_REDIRECT = 302
SEC_TOKEN_FILE = os.getenv('BOURNE_TOKEN_FILE', 'token.txt')
SEC_AUTHTOKEN_HEADER = 'X-SDS-AUTH-TOKEN'
SEC_PROXYTOKEN_HEADER = 'X-SDS-AUTH-PROXY-TOKEN'
PROXY_USER_NAME = 'proxyuser'
PROXY_USER_PASSWORD = 'ChangeMe1!'
COOKIE_FILE = os.getenv('BOURNE_COOKIE_FILE', 'cookiejar')
# Number of seconds a request should wait for response.
# It only effects the connection process itself, not the downloading of the response body
REQUEST_TIMEOUT_SECONDS = 120
# Total time for server reconnection
MAX_WAIT_TIME_IN_SECONDS=480
CONTENT_TYPE_JSON='application/json'
CONTENT_TYPE_XML='application/xml'
CONTENT_TYPE_OCTET='application/octet-stream'
LB_GUI_PORT = '443'
LB_API_PORT = '4443'
APISVC_PORT = '8443'
_headers = {'Content-Type': 'application/json', 'ACCEPT': 'application/json,text/html,application/octet-stream'}
_ipaddr = None
_port = LB_API_PORT
def _get_versioning_status(self, payload):
tree = ET.fromstring(payload)
return tree.findtext('./{' + S3_XML_NS + '}Status')
def _build_lifecycle_payload(self, rules):
root = ET.Element('LifecycleConfiguration')
root.set('xmlns', S3_XML_NS)
json_rules = cjson.decode(rules)
for r in json_rules.get('rules'):
rule = ET.SubElement(root, 'Rule')
ET.SubElement(rule, 'ID').text = r.get('id')
ET.SubElement(rule, 'Prefix').text = r.get('prefix')
ET.SubElement(rule, 'Status').text = r.get('status')
e = r.get('expiration')
expiration = ET.SubElement(rule, 'Expiration')
if e.get('days'):
ET.SubElement(expiration, 'Days').text = str(e.get('days'))
if e.get('date'):
ET.SubElement(expiration, 'Date').text = e.get('date')
return ET.tostring(root)
def _build_cors_payload(self, rules):
root = ET.Element('CORSConfiguration')
root.set('xmlns', S3_XML_NS)
json_rules = cjson.decode(rules)
for r in json_rules.get('rules'):
rule = ET.SubElement(root, 'CORSRule')
origin = r.get('origin')
for o in origin:
ET.SubElement(rule, 'AllowedOrigin').text = o
method = r.get('method')
for m in method:
ET.SubElement(rule, 'AllowedMethod').text = m
header = r.get('header')
for h in header:
ET.SubElement(rule, 'AllowedHeader').text = h
return ET.tostring(root)
def node_create(self, name):
return self.api('POST', URI_NODEOBJ.format(name))
def security_logout(self):
response = self.__api('GET', URI_LOGOUT)
if (response.status_code != 200):
print "logout failed with code: ", response.status_code
raise Exception('security logout: failed')
if headers:
for header_name,header_value in headers.items():
self._headers[header_name] = header_value
if(bucketNameFormat == 2): #set the bucket name in the Host header and not in the path
if(baseurl == None):
raise Exception('Base URL should be specified if the alternate format of URI needs to be used')
host = bucket + '.'
if(namespaceFormat == 2):
host = host + namespace + '.'
else:
self._headers['x-emc-namespace'] = namespace
host = host + baseurl
self._headers['Host'] = host
if value is None:
print "empty object, setting Content-Length to 0"
self._headers['Content-Length'] = str(0)
self._set_auth_and_ns_header('PUT', namespace, bucket, key, uid, secret, CONTENT_TYPE_OCTET)
print self._headers
md5str = self._computeMD5(value)
altUriFmt = False
if(bucketNameFormat == 2):
altUriFmt = True
response = self.coreapi('PUT', self._get_s3_key_uri(bucket, key, altUriFmt), value, None, content_type=CONTENT_TYPE_OCTET)
#TODO: server returns
if (response.status_code != 200 and response.status_code != 204 ):
print "bucket_key_create failed with code: ", response.status_code
raise Exception('failed to create key')
if BOURNE_DEBUG == '1':
print response.headers
self._checkETag(response, md5str)
qparms = {'acl':None}
self._set_auth_and_ns_header('PUT', namespace, bucket, key, uid, secret, CONTENT_TYPE_OCTET, parameters_to_sign = qparms)
if (bodyAclValue):
md5str = self._computeMD5(bodyAclValue)
response = self.coreapi('PUT', self._get_s3_key_uri(bucket, key) + '?acl', bodyAclValue, None, content_type=CONTENT_TYPE_OCTET)
if (response.status_code != 200 and response.status_code != 204 ):
print "bucket_key_update failed with code: ", response.status_code
raise Exception('failed to update ACL')
return response
# build qparms for list
def _build_list_params(self, params, qparms = None):
if qparms is None:
qparms = {}
for (key, value) in params.iteritems():
if value is not None:
qparms[key] = value
return qparms
# atmos related operations --begin
def atmos_hmac_base64_sig(self, method, content_type, uri, date, secret):
byteRangeStr = ""
custom_headers = {}
for header in self._headers.iterkeys():
if re.match('^x-emc-', header, re.IGNORECASE):
custom_headers[header.lower()] = self._headers[header]
if header == "Range":
byteRangeStr = self._headers[header]
if ('x-emc-signature' in custom_headers):
del custom_headers['x-emc-signature']
msg = method + '\n' + \
content_type + '\n' + \
byteRangeStr + '\n' + \
date + '\n' + \
uri.lower() + '\n'
sorted_headers = custom_headers.keys()
sorted_headers.sort()
for sorted_header in sorted_headers:
msg += sorted_header + ':' + custom_headers[sorted_header] + '\n'
msg = msg.rstrip()
if(BOURNE_DEBUG == '1'):
print 'message to sign:\n' + msg
key = base64.b64decode(secret)
macer = hmac.new(key, msg, hashlib.sha1)
if(BOURNE_DEBUG == '1'):
print "hmac string:"+base64.b64encode(macer.digest())
return base64.b64encode(macer.digest())
def atmos_object_create(self, namespace, value, uid, secret):
uri = ""
if (namespace):
uri = URI_ATMOS_NAMESPACE_INSTANCE.format(namespace)
else:
uri = URI_ATMOS_OBJECTS
method = 'POST'
content_type = CONTENT_TYPE_OCTET
date = email.Utils.formatdate(timeval=None, localtime=False, usegmt=True)
length = str(0)
if value is not None:
length = str(len(value))
self._headers['Content-Length'] = length
self._headers['date'] = date
#_headers['x-emc-date'] = date
self._headers['x-emc-uid'] = uid
self._headers['x-emc-meta'] = 'color=red,city=seattle,key='
self._headers['x-emc-signature'] = self.atmos_hmac_base64_sig(method, content_type, uri, date, secret)
response = self.coreapi(method, uri, value, None, None, content_type)
#cleanup the global variable
del self._headers['Content-Length']
del self._headers['date']
del self._headers['x-emc-uid']
del self._headers['x-emc-signature']
if (response.status_code != 201):
print "atmos_object_create failed with code: ", response.status_code
if(BOURNE_DEBUG == '1'):
print 'response:\n' + response.content
raise Exception('failed to create object')
location = response.headers['location']
match = re.match(r"/rest/objects/(\w+)", location)
if (not match):
print "The location header doesn't contain a valid object id: ", location
raise Exception('failed to create object')
objectid = match.group(1)
if(BOURNE_DEBUG == '1'):
print 'object id:\n' + objectid
return objectid
def atmos_object_read(self, oid, namespace, uid, secret):
uri = ""
if (namespace):
uri = URI_ATMOS_NAMESPACE_INSTANCE.format(namespace)
elif (oid):
uri = URI_ATMOS_OBJECT_INSTANCE.format(oid)
else:
print "Neither object id or namespace is provided"
raise Exception('failed to read object')
method = 'GET'
content_type = CONTENT_TYPE_OCTET
date = email.Utils.formatdate(timeval=None, localtime=False, usegmt=True)
self._headers['x-emc-date'] = date
self._headers['date'] = date
self._headers['x-emc-uid'] = uid
self._headers['x-emc-signature'] = self.atmos_hmac_base64_sig(method, content_type, uri, date, secret)
response = self.coreapi(method, uri, None, None, None, content_type)
#cleanup the global variable
del self._headers['date']
del self._headers['x-emc-date']
del self._headers['x-emc-uid']
del self._headers['x-emc-signature']
if (response.status_code != 200):
print "atmos_object_read failed with code: ", response.status_code
raise Exception('failed to read object')
return response.content
def atmos_object_delete(self, oid, namespace, uid, secret):
uri = ""
if (namespace):
uri = URI_ATMOS_NAMESPACE_INSTANCE.format(namespace)
elif (oid):
uri = URI_ATMOS_OBJECT_INSTANCE.format(oid)
else:
print "Neither object id or namespace is provided"
raise Exception('failed to delete object')
method = 'DELETE'
content_type = CONTENT_TYPE_OCTET
date = email.Utils.formatdate(timeval=None, localtime=False, usegmt=True)
self._headers['x-emc-date'] = date
self._headers['date'] = date
self._headers['x-emc-uid'] = uid
self._headers['x-emc-signature'] = self.atmos_hmac_base64_sig(method, content_type, uri, date, secret)
response = self.coreapi(method, uri, None, None, None, content_type)
#cleanup the global variable
del self._headers['date']
del self._headers['x-emc-date']
del self._headers['x-emc-uid']
del self._headers['x-emc-signature']
if (response.status_code != 204):
print "atmos_object_read failed with code: ", response.status_code
raise Exception('failed to delete object')
# atmos related operation --end
# value starting with @ char is a file, e.g. @/etc/hosts
def security_add_zone_role(self, objecttype, objectname, role):
if( not objecttype in ['subject_id', 'group']):
raise Exception('type must be subject_id or group')
if( not role in ['SYSTEM_MONITOR','SYSTEM_AUDITOR','SYSTEM_ADMIN','SECURITY_ADMIN','TENANT_ADMIN',]):
raise Exception('role must be SYSTEM_MONITOR, SYSTEM_AUDITOR, SYSTEM_ADMIN, SECURITY_ADMIN, or TENANT_ADMIN')
parms = {
"add" : [ { "role" : [role], objecttype : objectname }]
}
print parms
response = self.__api('PUT', URI_VDC_ROLES, parms)
if (response.status_code != 200):
print "security assign role failed with code: ", response.status_code
raise Exception('security assign role: failed')
def _build_complete_mpu_payload(self, etagdict):
root = ET.Element('CompleteMultipartUpload')
root.set('xmlns', S3_XML_NS)
# Note, the part list should be in ascending order
sorted_keys = etagdict.keys()
for key in sorted_keys:
partElem = ET.SubElement(root, 'Part')
ET.SubElement(partElem, 'PartNumber').text = str(key)
ET.SubElement(partElem, 'ETag').text = etagdict[key]
return ET.tostring(root)
def _parse_complete_mpu_response(self, response):
version = None
if 'x-amz-version-id' in response.headers:
version = response.headers['x-amz-version-id']
payload = response.text
root = ET.fromstring(payload)
completetag = '{' + S3_XML_NS + '}CompleteMultipartUploadResult'
uritag = '{' + S3_XML_NS + '}Location'
buckettag = '{' + S3_XML_NS + '}Bucket'
keytag = '{' + S3_XML_NS + '}Key'
etagtag = '{' + S3_XML_NS + '}ETag'
if root.tag != completetag:
print "invalid response", response
raise Exception('Invalid response, no CompleteMultipartUploadResult')
bucket = root.find(buckettag).text
key = root.find(keytag).text
uri = root.find(uritag).text
etag = root.find(etagtag).text
return {'version':version, 'etag':etag, 'uri':uri, 'key':key, 'bucket':bucket}
def _parse_list_mpu_parts_response(self, payload):
result = {}
root = ET.fromstring(payload)
listtag = '{' + S3_XML_NS + '}ListPartsResult'
buckettag = '{' + S3_XML_NS + '}Bucket'
keytag = '{' + S3_XML_NS + '}Key'
if root.tag != listtag:
print "invalid response payload", payload
raise Exception('Invalid response, no ListPartsResult')
result['bucket'] = root.find(buckettag).text
result['key'] = root.find(keytag).text
initiatortag = '{' + S3_XML_NS + '}Initiator'
idtag = '{' + S3_XML_NS + '}ID'
nametag = '{' + S3_XML_NS + '}DisplayName'
ownertag= '{' + S3_XML_NS + '}Owner'
initiator = root.find(initiatortag)
print "debug initiator = ",initiator
result['initiator'] = {'id':initiator.find(idtag).text, 'name':initiator.find(nametag).text}
owner = root.find(ownertag)
result['owner'] = {'id':owner.find(idtag).text, 'name':owner.find(nametag).text}
maxtag = '{' + S3_XML_NS + '}MaxParts'
markertag = '{' + S3_XML_NS + '}PartNumberMarker'
nexttag = '{' + S3_XML_NS + '}NextPartNumberMarker'
trunctag = '{' + S3_XML_NS + '}IsTruncated'
result['maxparts'] = root.find(maxtag).text
if None != root.find(markertag):
result['marker'] = root.find(markertag).text
result['truncated'] = root.find(trunctag).text
if None != root.find(nexttag):
result['nextmarker'] = root.find(nexttag).text
parttag = '{' + S3_XML_NS + '}Part'
etagtag = '{' + S3_XML_NS + '}ETag'
sizetag = '{' + S3_XML_NS + '}Size'
mtimetag = '{' + S3_XML_NS + '}LastModified'
partnumtag = '{' + S3_XML_NS + '}PartNumber'
index = 1
parts = []
for part in root.findall(parttag):
partdict = {}
partdict['num'] = part.find(partnumtag).text
partdict['etag'] = part.find(etagtag).text
partdict['mtime'] = part.find(mtimetag).text
partdict['size'] = part.find(sizetag).text
parts.append(partdict)
result['parts'] = parts
return result
def _parse_list_mpu_uploads_response(self, payload):
result = {}
root = ET.fromstring(payload)
list_tag = '{' + S3_XML_NS + '}ListMultipartUploadsResult'
bucket_tag = '{' + S3_XML_NS + '}Bucket'
keymarker_tag = '{' + S3_XML_NS + '}KeyMarker'
uploadidmarker_tag = '{' + S3_XML_NS + '}UploadIdMarker'
nextkeymarker_tag = '{' + S3_XML_NS + '}NextKeyMarker'
nextuploadidmarker_tag = '{' + S3_XML_NS + '}NextUploadIdMarker'
maxuploads_tag = '{' + S3_XML_NS + '}MaxUploads'
delimiter_tag = '{' + S3_XML_NS + '}Delimiter'
prefix_tag = '{' + S3_XML_NS + '}Prefix'
commonprefixes_tag = '{' + S3_XML_NS + '}CommonPrefixes'
istruncated_tag = '{' + S3_XML_NS + '}IsTruncated'
upload_tag = '{' + S3_XML_NS + '}Upload'
if root.tag != list_tag:
print "invalid response payload", payload
raise Exception('Invalid response, no ListMultipartUploadsResult')
result['bucket'] = root.find(bucket_tag).text
if None != root.find(keymarker_tag):
result['keymarker'] = root.find(keymarker_tag).text
if None != root.find(uploadidmarker_tag):
result['uploadidmarker'] = root.find(uploadidmarker_tag).text
if None != root.find(nextkeymarker_tag):
result['nextkeymarker'] = root.find(nextkeymarker_tag).text
if None != root.find(nextuploadidmarker_tag):
result['nextuploadidmarker'] = root.find(nextuploadidmarker_tag).text
if None != root.find(maxuploads_tag):
result['maxuploads'] = root.find(maxuploads_tag).text
if None != root.find(delimiter_tag):
result['delimiter'] = root.find(delimiter_tag).text
if None != root.find(prefix_tag):
result['prefix'] = root.find(prefix_tag).text
if None != root.find(istruncated_tag):
result['istruncated'] = root.find(istruncated_tag).text
uploads = []
for upload in root.findall(upload_tag):
uploaddict = {}
key_tag = '{' + S3_XML_NS + '}Key'
uploadid_tag = '{' + S3_XML_NS + '}UploadId'
initiator_tag = '{' + S3_XML_NS + '}Initiator'
id_tag = '{' + S3_XML_NS + '}ID'
name_tag = '{' + S3_XML_NS + '}DisplayName'
owner_tag= '{' + S3_XML_NS + '}Owner'
initated_tag = '{' + S3_XML_NS + '}Initiated'
initiator = root.find(initiator_tag)
if None != initiator:
uploaddict['initiator'] = {'id':initiator.find(id_tag).text, 'name':initiator.find(name_tag).text}
owner = root.find(owner_tag)
if None != owner:
uploaddict['owner'] = {'id':owner.find(id_tag).text, 'name':owner.find(name_tag).text}
uploaddict['key'] = upload.find(key_tag).text
uploaddict['uploadid'] = upload.find(uploadid_tag).text
uploads.append(uploaddict)
result['uploads'] = uploads
commonPrefixes = []
for prefix in root.findall(commonprefixes_tag):
commonPrefixes.append({'prefix':prefix.find(prefix_tag).text})
result['commonPrefixes'] = commonPrefixes
return result
def objtz_list(self):
return self.api('GET', URI_OBJECTTZ)
def objtz_show(self, uri):
return self.api('GET', URI_OBJECTTZ_INSTANCE.format(uri))
def objtz_create(self, name, tz):
parms = dict()
if (name):
parms['name'] = name
if (tz):
parms['network'] = tz
return self.api('POST', URI_OBJECTTZ, parms)
def objtz_update(self, objtz, tz):
parms = dict()
if (tz):
parms['network'] = tz
return self.api('PUT', URI_OBJECTTZ_INSTANCE.format(objtz), parms)
def objtz_delete(self, objtz):
return self.api('POST', URI_OBJECTTZ_DELETE.format(objtz))
def passwordgroup_create(self, uid, password, groups, namespace):
parms = dict()
if password:
parms['password'] = password
if groups:
parms['groups_list'] = groups
if namespace:
parms['namespace'] = namespace
response = self.__api('PUT', URI_PASSWORDGROUP.format(uid), parms)
if response.status_code != HTTP_OK:
print "failure:", response
raise Exception("failed to update password/groups")
# props = dict()
# entry = params['properties']['entry']
# if(isinstance(entry, dict)):
# key = entry['key']
# value = entry['value']
# props[key] = value
# else:
# for entry in params['properties']['entry']:
# print type(entry)
# #print key
# #print value
# key = entry['key']
# value = entry['value']
# props[key] = value
# return props
# This routine will raise an exception of the obj passed
# in is not a dictionary
#
# Compute Resources - Vcenter
#
#
# Compute Resources - Vcenter Data Center
#
#
# Compute Resources - Cluster
#
# Service Catalog
#
# Compute Resources - Host
#
#
# Actionable Events
#
#
# Compute Resources - host initiator
#
#
# Compute Resources - host ipinterface
#
#
# Retention classes
#
#
# ECS Bucket oprations
#
# input param to be changed to label
# Snapshot session operations
#
# ComputeSystem Resources - ComputeSystem
#
# APIs for computeSystem (UCS)
# List all compute systems GET /vdc/compute-systems
# Fetch/query compute systems by name/label
# Return service profile template id, for the given serviceprofile template name from the given computeSystem name
# Return compute element id, for the given compute element name from the given computeSystem name
# Show details of given computesystem uri GET /vdc/compute-systems/{0}
# Get task for a given computesystem uri and task uri
# Create a compute system POST /vdc/compute-systems
# update a compute system PUT /vdc/compute-systems/{0} with the specified imageserver
# Delete compute system
# Deregister a compute system
#
# Compute Image Server Resources - ComputeImageServer
# APIs for Compute Image Server
#
# List compute image servers
# Get specified image server by name
# Show details of compute image server
# Get task for a given compute imageserver uri and task uri
# create a compute image server
# update a compute image server
# delete compute image server
#
# Compute Image Resources - ComputeImage
# APIs for Compute Image
#
# List all computeImages
# Fetch a compute image
# show details of compute image
# Get task for a given compute image uri and task uri
# Create a compute image
# delete a compute image
#
#compute virtual pool APIs
#
# Create a compute virtual pool
# Assign compute elements to the compute virtual pool
# Fetch a compute virtual pool
# list all compute virtual pools
# lists all filepolicies
# queries filepolicy
# deletes the filepolicy
# creates the filepolicy
# assigns thefilepolicy to vPool
# assigns the filepolicy to project
# unassigns the filepolicy from vpool
# unassigns the filepolicy from project
| [
2,
19617,
28,
40477,
12,
23,
198,
2,
198,
2,
15069,
357,
66,
8,
1853,
412,
9655,
10501,
198,
2,
1439,
6923,
33876,
198,
2,
198,
198,
11748,
1841,
489,
571,
198,
11748,
269,
17752,
198,
11748,
1822,
29572,
198,
11748,
25064,
198,
1... | 1.988148 | 29,278 |
# -*- coding: utf-8 -*-
"""
@author: stoberblog
@detail: This is a configuration file for the Solar Modbus project.
"""
# MODBUS DETAILS
INVERTER_IP = "192.168.1.29"
MODBUS_PORT = 7502
METER_ADDR = 240
MODBUS_TIMEOUT = 30 #seconds to wait before failure
# METER INSTALLED
METER_INSTALLED = True
# DATABASE
DATABASE_TYPE = "mariadb" # Current options: mariadb
DATABASE_ADDR = "127.0.0.1"
DATABASE_USER = "sUser"
DATABASE_PASSWD = "sPasswd"
DATABASE_DB = "solarMB"
#SCHEDULER
SCHED_INTERVAL = 1 # Minutes between recollecting new data
# DATA
EPOCH_INVERTER = False # False = Use compueter time, True = get time off inverter (scheduler will still use compurter time)
POW_THERESHOLD = 10 # Watt threshold
LOG_LEVEL = "ERROR" # Levels: NONE, FATAL, ERROR, NOTICE, DEBUG
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
201,
198,
37811,
201,
198,
31,
9800,
25,
220,
220,
220,
220,
220,
220,
220,
220,
336,
2023,
14036,
201,
198,
31,
49170,
25,
220,
220,
220,
220,
220,
220,
220,
220,
770,
... | 2.233596 | 381 |
from async_asgi_testclient import TestClient
from myapp import main
import pytest
| [
6738,
30351,
62,
292,
12397,
62,
9288,
16366,
1330,
6208,
11792,
198,
6738,
616,
1324,
1330,
1388,
198,
11748,
12972,
9288,
628,
628,
628,
628,
628,
628
] | 3.444444 | 27 |
from .backlight_mode import BacklightMode
from .angle_unit import AngleUnit
from .measurement_unit import MeasurementUnit
| [
6738,
764,
1891,
2971,
62,
14171,
1330,
5157,
2971,
19076,
198,
6738,
764,
9248,
62,
20850,
1330,
42375,
26453,
198,
6738,
764,
1326,
5015,
434,
62,
20850,
1330,
24291,
434,
26453,
198
] | 3.8125 | 32 |
import random
from operations import abstract_operation
| [
11748,
4738,
198,
198,
6738,
4560,
1330,
12531,
62,
27184,
628
] | 5.272727 | 11 |