content stringlengths 1 1.04M | input_ids listlengths 1 774k | ratio_char_token float64 0.38 22.9 | token_count int64 1 774k |
|---|---|---|---|
# Generated by Django 3.1.3 on 2020-11-23 09:25
import django.core.files.storage
from django.db import migrations, models
| [
2,
2980,
515,
416,
37770,
513,
13,
16,
13,
18,
319,
12131,
12,
1157,
12,
1954,
7769,
25,
1495,
198,
198,
11748,
42625,
14208,
13,
7295,
13,
16624,
13,
35350,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
628
] | 2.952381 | 42 |
from django.conf.urls import url
from django.conf import settings
from django.conf.urls.static import static
from apprentice_learner import views
app_name = 'apprentice_api'
urlpatterns = [
url(r'^create/$', views.create, name = "create"),
url(r'^request/(?P<agent_id>[0-9]+)/$', views.request, name="request"),
url(r'^train/(?P<agent_id>[0-9]+)/$', views.train, name="train"),
url(r'^check/(?P<agent_id>[0-9]+)/$', views.check, name="check"),
url(r'^report/(?P<agent_id>[0-9]+)/$', views.report, name="report"),
url(r'^request/(?P<agent_name>[a-zA-Z0-9_-]{1,200})/$', views.request_by_name, name="request_by_name"),
url(r'^train/(?P<agent_name>[a-zA-Z0-9_-]{1,200})/$', views.train_by_name, name="train_by_name"),
url(r'^check/(?P<agent_name>[a-zA-Z0-9_-]{1,200})/$', views.check_by_name, name="check_by_name"),
url(r'^report/(?P<agent_name>[a-zA-Z0-9_-]{1,200})/$', views.report_by_name, name="report_by_name"),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| [
6738,
42625,
14208,
13,
10414,
13,
6371,
82,
1330,
19016,
198,
6738,
42625,
14208,
13,
10414,
1330,
6460,
198,
6738,
42625,
14208,
13,
10414,
13,
6371,
82,
13,
12708,
1330,
9037,
198,
198,
6738,
39252,
62,
3238,
1008,
1330,
5009,
198,
... | 2.290909 | 440 |
from config.Configuration import Configuration
import logging
class LoggingConfiguration(Configuration):
"""
Author: Ronny Friedland
Handles logging configuration
"""
CONFIG_DEFAULTS = {'loglevel':'INFO'}
def check_config(self):
"""
Check if config.ini contains logging settings
:return: true if configuration contains logging settings
"""
return super().check_config(section="logging")
def refresh_config(self):
"""
Refreshed logging configuration and (re-)-init logging
"""
super().refresh_config()
if self.check_config() is not False:
level = super().read_config("logging", "loglevel")
else:
level = "INFO"
if level == "DEBUG":
logging.basicConfig(filename='minioclient.log', level=logging.DEBUG)
elif level == "INFO":
logging.basicConfig(filename='minioclient.log', level=logging.INFO)
elif level == "WARN":
logging.basicConfig(filename='minioclient.log', level=logging.WARN)
else:
logging.basicConfig(filename='minioclient.log', level=logging.ERROR)
| [
6738,
4566,
13,
38149,
1330,
28373,
198,
198,
11748,
18931,
628,
198,
4871,
5972,
2667,
38149,
7,
38149,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
6434,
25,
6575,
3281,
15442,
1044,
628,
220,
220,
220,
7157,
829,
18931,
839... | 2.519149 | 470 |
import dataclasses
import pprint
import sys
import ccc.oci
import oci
__cmd_name__ = 'oci'
| [
11748,
4818,
330,
28958,
198,
11748,
279,
4798,
198,
11748,
25064,
198,
198,
11748,
269,
535,
13,
1733,
198,
11748,
267,
979,
198,
198,
834,
28758,
62,
3672,
834,
796,
705,
1733,
6,
628,
628,
628
] | 2.722222 | 36 |
# Generated by Django 3.1 on 2021-06-09 14:32
from django.db import migrations, models
| [
2,
2980,
515,
416,
37770,
513,
13,
16,
319,
33448,
12,
3312,
12,
2931,
1478,
25,
2624,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
628
] | 2.966667 | 30 |
#!/usr/bin/env python
__author__ = "bt3"
''' Queue acts as a container for nodes (objects) that are inserted and removed according FIFO'''
if __name__ == '__main__':
queue = LinkedQueue()
print("Is the queue empty? ", queue.isEmpty())
print("Adding 0 to 10 in the queue...")
for i in range(10):
queue.enqueue(i)
print("Is the queue empty? ", queue.isEmpty())
queue._print()
print("Queue size: ", queue.size())
print("Queue peek : ", queue.peek())
print("Dequeue...", queue.dequeue())
print("Queue peek: ", queue.peek())
queue._print()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
834,
9800,
834,
796,
366,
18347,
18,
1,
198,
198,
7061,
6,
4670,
518,
6529,
355,
257,
9290,
329,
13760,
357,
48205,
8,
326,
389,
18846,
290,
4615,
1864,
376,
5064,
46,
7061,
6... | 2.716216 | 222 |
"""add about_me, last_seen fields to users table.
Revision ID: 04f42852fee8
Revises: 60ae9f5ea6d5
Create Date: 2018-03-27 06:43:46.542593
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '04f42852fee8'
down_revision = '60ae9f5ea6d5'
branch_labels = None
depends_on = None
| [
37811,
2860,
546,
62,
1326,
11,
938,
62,
15898,
7032,
284,
2985,
3084,
13,
198,
198,
18009,
1166,
4522,
25,
8702,
69,
40173,
4309,
39071,
23,
198,
18009,
2696,
25,
3126,
3609,
24,
69,
20,
18213,
21,
67,
20,
198,
16447,
7536,
25,
2... | 2.515152 | 132 |
"""
This module contain various types of functions/classes to access and generate CAM.
"""
import warnings
from typing import List, Tuple, Union
import matplotlib.pyplot as plt
import nibabel as nib
import numpy as np
import torch
import torchcam
import torchvision
from src.display.cmap import parula_map
from src.files import preprocess
from src.files.preprocess import image2axial, to_grid
from src.types.string import split_custom_filename
class CAM:
""" """
def __init__(self, model, cam_type=torchcam.cams.GradCAMpp,target_layer:str="model.layer4", cam_kwargs:dict={}):
"""
Parameters
----------
model :
cam_type :
(Default value = torchcam.cams.GradCAMpp)
target_layer : str
(Default value = "model.layer4")
cam_kwargs : dict
(Default value = {})
Returns
-------
"""
self._CLASSES=[0,1,2]
self.CAM_TYPE = cam_type
self.TARGET_LAYER = target_layer
self.model = model
self.extractor = cam_type(model, target_layer=target_layer, **cam_kwargs)
def class_score(self, input_image:'np.ndarray', device='cuda', input_shape=(79,95,79)) -> 'Tuple[torch.Tensor, int]':
"""Calculate the class scores and the highest probability of the target class
Args:
input_image('np.ndarray'):
device: (Default value = 'cuda')
input_shape: (Default value = (79,95,79))
Returns:
'Tuple[torch.Tensor,int]'': All the probabilities and the best probability class
Raises:
"""
image = preprocess.preprocess_image(input_image)
image = preprocess.batchisize_to_5D(image)
image_tensor = torch.from_numpy(image).float()
model = self.model.to(device).eval()
image_tensor = image_tensor.to(device)
# Check that image have the correct shape
assert tuple(image_tensor.shape) == (1, 1, *input_shape), f"Got image shape: {image_tensor.shape} expected: {(1, 1, *input_shape)}"
assert model.device == image_tensor.device, f"Model and image are not on same device: Model: {model.device} Image: {image_tensor.device}"
class_scores = model(image_tensor)
return class_scores, class_scores.squeeze(0).argmax().item()
def activations(self, class_idx:int=None, class_scores:'torch.Tensor'=None) -> 'np.ndarray':
"""Retrieve the map based on the score from the model
Args:
class_idx(int, optional): (Default value = None)
class_scores('torch.Tensor', optional): (Default value = None)
Returns:
np.ndarray: Tensor with activations from image with shape tensor[D,H,W]
Raises:
"""
return self.extractor(class_idx, class_scores, normalized=False).detach().cpu()
@staticmethod
def plot(images:list=[], masks:list=[], labels=[],cmap:list=parula_map, alpha:float=0.7, class_label:str=None, predicted_override:bool=None, architecture:str=None) -> plt.Figure:
"""Create a plot from the given class activation map and input image. CAM is calculated from the models weights and the probability distribution of each class.
Args:
images(list, optional): (Default value = [])
masks(list, optional): (Default value = [])
labels: (Default value = [])
cmap(list, optional): Color object (Default value = parula_map)
alpha(float, optional): int (Default value = 0.7)
class_label(str, optional): str (Default value = None)
predicted_override(bool, optional): Define if the cam class is overwritten (Default value = None)
architecture(str, optional): Name of the architecture provided to add in plot title (Default value = None)
Returns:
type: output (Figure): Figure reference to plot
Raises:
"""
#class_idx = class_idx if isinstance(class_idx, list) else [class_idx]
if (max_length :=len(masks)) > len(images):
pass
else:
max_length = len(images)
if max_length == 0:
raise ValueError("Number of images/masks cant be zero!")
fig, axes = plt.subplots(ncols=max_length,nrows=1,figsize=(max_length*8,8))
if max_length > 1:
# Add images
for i, image in enumerate(images):
im = axes[i].imshow(image,cmap='Greys_r', vmin=image.min(), vmax=image.max())
# Add masks
for i, mask in enumerate(masks):
im = axes[i].imshow(mask,cmap=cmap, alpha=alpha,vmin=mask.min(), vmax=mask.max())
else:
for i, image in enumerate(images):
im = axes.imshow(image,cmap='Greys_r', vmin=image.min(), vmax=image.max())
# Add masks
for i, mask in enumerate(masks):
im = axes.imshow(mask,cmap=cmap, alpha=alpha,vmin=mask.min(), vmax=mask.max())
# Add labels
classes = {
0:'CN',
1:'MCI',
2:'AD'
}
for i, label in enumerate(labels):
title_list = [out for out, con in [
(f'{architecture}',architecture),
#(f'{type(self.extractor).__name__}',True),
(f'Patient: {class_label}',class_label),
(f'Predicted: {classes[label]}',label),
(f'Overrided',predicted_override)] if con != None
]
if max_length > 1:
axes[i].set_title(', '.join(title_list))
else:
axes.set_title(', '.join(title_list))
if max_length > 1:
for a in axes.flatten():
a.set_axis_off()
a.set_xticklabels([])
a.set_yticklabels([])
else:
axes.set_axis_off()
axes.set_xticklabels([])
axes.set_yticklabels([])
# Remove axis data to show colorbar more clean
ax = axes.ravel().tolist() if max_length > 1 else axes
plt.subplots_adjust(wspace=0.01, hspace=0)
cbar = fig.colorbar(im, ax=ax, shrink=1)
return fig
@staticmethod
def get_cam(model, cam_type:'torchcam.cams.gradcam._GradCAM', input_shape:Tuple=(79,95,79),target_layer:str=None,CAM_kwargs:dict={}) -> 'torchcam.cams.gradcam._GradCAM':
"""Generate CAM object
Args:
model:
cam_type('torchcam.cams.gradcam._GradCAM'):
input_shape(Tuple, optional): (Default value = (79,95,79))
target_layer(str, optional): (Default value = None)
CAM_kwargs(dict, optional): (Default value = {})
Returns:
Raises:
"""
extractor = cam_type(model, input_shape=(1,*input_shape), target_layer=target_layer, **CAM_kwargs)
return extractor
@staticmethod
def average_image(images:list) -> 'torch.Tensor':
"""Calculate average over multiple images
Args:
images(list):
Returns:
Raises:
"""
return torch.mean(torch.stack(images), axis=0)
@staticmethod
def repeat_stack(image:'torch.Tensor', repeat:int=1, grid_kwargs:dict={}) -> 'torch.Tensor':
"""Repeat am image in a grid N number of times.
Args:
image('torch.Tensor'):
repeat(int, optional): (Default value = 1)
grid_kwargs(dict, optional): (Default value = {})
Returns:
Raises:
"""
return torch.stack([to_grid(image, **grid_kwargs)]*repeat)
@staticmethod
def preprocess(filename:str) -> np.ndarray:
"""Preprocess image to a valid format
Args:
filename(str):
Returns:
Raises:
"""
class_label = split_custom_filename(filename,'/')[4]
image = image2axial(nib.load(filename).get_fdata())
image[image <= 0]=0
image = preprocess.preprocess_image(image)
return image | [
37811,
198,
1212,
8265,
3994,
2972,
3858,
286,
5499,
14,
37724,
284,
1895,
290,
7716,
32421,
13,
198,
198,
37811,
198,
11748,
14601,
198,
6738,
19720,
1330,
7343,
11,
309,
29291,
11,
4479,
198,
198,
11748,
2603,
29487,
8019,
13,
9078,
... | 2.16263 | 3,757 |
print "SELECT pair_email FROM users where email = %s" , '236294386@qq.com' | [
4798,
366,
46506,
5166,
62,
12888,
16034,
2985,
810,
3053,
796,
4064,
82,
1,
837,
705,
24940,
27696,
21734,
31,
38227,
13,
785,
6
] | 3.083333 | 24 |
import matplotlib.cm as cm
import matplotlib.pyplot as plt
import numpy as np
import scipy.stats as stats
import statsmodels.api as sm
from matplotlib.font_manager import FontProperties
from matplotlib.ticker import MaxNLocator
class GenerateTestPlots:
"""Generates plots using data collected during model run."""
def __init__(self, results):
"""Initialize the plot generator.
Args:
results (dict): Model run data.
"""
self.step_time = results["step_time"]
self.times = results["times"]
self.high_prob_blocks = results["high_prob_blocks"]
self.choices = results["choices"]
self.rewarded_sides = results["rewarded_sides"]
self.rewarded_trials = results["rewarded_trials"]
self.values = results["values"]
self.RPEs = results["RPEs"]
self.stimulated_trials = results["stimulated_trials"]
self.right_decision_value = results["right_decision_value"]
self.left_decision_value = results["left_decision_value"]
self.NAc_activity = results["NAc_activity"]
self.peak_reward_times = results["peak_reward_times"]
| [
11748,
2603,
29487,
8019,
13,
11215,
355,
12067,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
629,
541,
88,
13,
34242,
355,
9756,
198,
11748,
9756,
27530,
13,
15042,
355,
... | 2.54185 | 454 |
# arr=Arr([2,1,3,4,5,6,1,1,6,2,4,5,3,1])
# print(arr.reverse())
arr=Arr([3, -5,-1])
print(arr.minimize_flip_k_length_subarr([1,2,3]))
| [
2,
5240,
28,
3163,
81,
26933,
17,
11,
16,
11,
18,
11,
19,
11,
20,
11,
21,
11,
16,
11,
16,
11,
21,
11,
17,
11,
19,
11,
20,
11,
18,
11,
16,
12962,
198,
2,
3601,
7,
3258,
13,
50188,
28955,
198,
3258,
28,
3163,
81,
26933,
18... | 1.654321 | 81 |
# Copyright (c) 2021, Technische Universität Kaiserslautern (TUK) & National University of Sciences and Technology (NUST).
# All rights reserved.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import print_function
from __future__ import division
import sys
import pickle
import pandas as pd
import seaborn as sn
import matplotlib.pyplot as plt
if __name__ == '__main__':
confusion(file_path=sys.argv[1])
| [
2,
15069,
357,
66,
8,
33448,
11,
5429,
46097,
26986,
270,
11033,
83,
11611,
21572,
75,
2306,
1142,
357,
51,
15039,
8,
1222,
2351,
2059,
286,
13473,
290,
8987,
357,
45,
7759,
737,
198,
2,
1439,
2489,
10395,
13,
198,
198,
2,
770,
27... | 3.416107 | 149 |
import data.music_theory as music_data
from app.note import Note
from app.base import Base
class Chord(Base):
"""
"""
def invert_chord(self, chord, inversion = 'root'):
"""
Args:
chord (list): The notes of the chord
inversion (str): The inversion name (root, first, second; third for 4-note chords)
Returns:
list: The notes of the inverted chord
"""
if inversion == 'root':
return chord
elif len(chord) == 3:
if inversion == 'first':
return chord[1], chord[2], chord[0]
elif inversion == 'second':
return chord[2], chord[0], chord[1]
elif len(chord) == 4:
if inversion == 'first':
return chord[1], chord[2], chord[3], chord[0]
elif inversion == 'second':
return chord[2], chord[3], chord[0], chord[1]
elif inversion == 'third':
return chord[3], chord[0], chord[1], chord[2]
return chord
| [
11748,
1366,
13,
28965,
62,
1169,
652,
355,
2647,
62,
7890,
198,
6738,
598,
13,
11295,
1330,
5740,
198,
6738,
598,
13,
8692,
1330,
7308,
198,
198,
4871,
609,
585,
7,
14881,
2599,
198,
220,
220,
220,
37227,
628,
220,
220,
220,
37227,... | 2.066536 | 511 |
from lineflow.download import cached_download
from lineflow.datasets import Seq2SeqDataset
TRAIN_EN_URL = 'https://raw.githubusercontent.com/odashi/small_parallel_enja/master/train.en'
TRAIN_JA_URL = 'https://raw.githubusercontent.com/odashi/small_parallel_enja/master/train.ja'
DEV_EN_URL = 'https://raw.githubusercontent.com/odashi/small_parallel_enja/master/dev.en'
DEV_JA_URL = 'https://raw.githubusercontent.com/odashi/small_parallel_enja/master/dev.ja'
TEST_EN_URL = 'https://raw.githubusercontent.com/odashi/small_parallel_enja/master/test.en'
TEST_JA_URL = 'https://raw.githubusercontent.com/odashi/small_parallel_enja/master/test.ja'
| [
6738,
1627,
11125,
13,
15002,
1330,
39986,
62,
15002,
198,
6738,
1627,
11125,
13,
19608,
292,
1039,
1330,
1001,
80,
17,
4653,
80,
27354,
292,
316,
628,
198,
51,
3861,
1268,
62,
1677,
62,
21886,
796,
705,
5450,
1378,
1831,
13,
12567,
... | 2.7 | 240 |
from uykfe.sequence.db import DbControl
| [
198,
6738,
334,
48361,
5036,
13,
43167,
13,
9945,
1330,
360,
65,
15988,
628
] | 3 | 14 |
import warnings
warnings.simplefilter('ignore', Warning)
from whoosh_tests.tests.test_forms import *
from whoosh_tests.tests.test_inputs import *
from whoosh_tests.tests.test_whoosh_query import *
from whoosh_tests.tests.test_whoosh_backend import *
| [
11748,
14601,
198,
40539,
654,
13,
36439,
24455,
10786,
46430,
3256,
15932,
8,
198,
198,
6738,
508,
3768,
62,
41989,
13,
41989,
13,
9288,
62,
23914,
1330,
1635,
198,
6738,
508,
3768,
62,
41989,
13,
41989,
13,
9288,
62,
15414,
82,
1330... | 3.177215 | 79 |
# Generated by Django 3.2.8 on 2021-10-06 08:15
from django.db import migrations, models
| [
2,
2980,
515,
416,
37770,
513,
13,
17,
13,
23,
319,
33448,
12,
940,
12,
3312,
8487,
25,
1314,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
628
] | 2.84375 | 32 |
/home/wai/anaconda3/lib/python3.6/locale.py | [
14,
11195,
14,
86,
1872,
14,
272,
330,
13533,
18,
14,
8019,
14,
29412,
18,
13,
21,
14,
17946,
1000,
13,
9078
] | 1.954545 | 22 |
from setuptools import setup, find_packages
setup(name='pkg1',
packages=find_packages(),
test_suite='nose2.collector.collector')
| [
6738,
900,
37623,
10141,
1330,
9058,
11,
1064,
62,
43789,
628,
198,
40406,
7,
3672,
11639,
35339,
16,
3256,
198,
220,
220,
220,
220,
220,
10392,
28,
19796,
62,
43789,
22784,
198,
220,
220,
220,
220,
220,
1332,
62,
2385,
578,
11639,
... | 2.698113 | 53 |
import numpy as np
import argparse
import cv2
if __name__ == "__main__":
ap = argparse.ArgumentParser()
ap.add_argument('-i', '--image', required=False, help='path to the image',default='./image/lego_blocks_1.png')
ap.add_argument('-m', '--method', required=False, help='Sorting method',default='left-to-right')
args = vars(ap.parse_args())
image = cv2.imread(args['image'])
accumEdged = np.zeros(image.shape[:2], dtype='uint8')
for chan in cv2.split(image):
chan = cv2.medianBlur(chan, 11)
edged = cv2.Canny(chan, 50, 200)
accumEdged = cv2.bitwise_or(accumEdged, edged)
# find contours and keep the largest ones
cnts = cv2.findContours(accumEdged.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
cnts = grab_contours(cnts)
cnts = sorted(cnts, key=cv2.contourArea, reverse=True)[:5]
orig = image.copy()
# unsorted
for (i, c) in enumerate(cnts):
orig = draw_contour(orig, c, i)
cv2.imshow('Unsorted', orig)
# sorted
(cnts, boundingboxes) = sort_contours(cnts, method=args['method'])
image2 = image.copy()
for (i, c) in enumerate(cnts):
image2 = draw_contour(image2, c, i)
cv2.imshow('Sorted', image2)
out = cv2.hconcat([image,orig,image2])
cv2.imwrite("./result/out1.jpg",out)
cv2.waitKey(0)
| [
11748,
299,
32152,
355,
45941,
198,
11748,
1822,
29572,
198,
11748,
269,
85,
17,
628,
628,
198,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
198,
220,
220,
220,
2471,
796,
1822,
29572,
13,
28100,
1713,
46677,
3419,
19... | 2.176948 | 616 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Small GUI for getting all the info for the Home Depot survey.
from tkinter import *
master = Tk()
Label(master, text="First Name").grid(row=0)
Label(master, text="Last Name").grid(row=1)
Label(master, text="E-mail").grid(row=2)
Label(master, text="Zip Code").grid(row=3)
e1 = Entry(master)
e2 = Entry(master)
e3 = Entry(master)
e4 = Entry(master)
e1.grid(row=0, column=1)
e2.grid(row=1, column=1)
e3.grid(row=2, column=1)
e4.grid(row=3, column=1)
Button(master, text='Quit', command=master.quit).grid(
row=7, column=0, sticky=W, pady=4)
Button(master, text='Enter', command=show_entry_fields).grid(
row=8, column=1, sticky=W, pady=4)
mainloop()
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
10452,
25757,
329,
1972,
477,
262,
7508,
329,
262,
5995,
30884,
5526,
13,
198,
6738,
256,
74,
3849,
1330,
1635,
628,
198,... | 2.417808 | 292 |
# Copyright (c) 2013, 2018 National Technology and Engineering Solutions of Sandia, LLC . Under the terms of Contract
# DE-NA0003525 with National Technology and Engineering Solutions of Sandia, LLC, the U.S. Government
# retains certain rights in this software.
import numpy
import slycat.web.server
| [
2,
15069,
357,
66,
8,
2211,
11,
2864,
2351,
8987,
290,
14044,
23555,
286,
3837,
544,
11,
11419,
764,
4698,
262,
2846,
286,
17453,
198,
2,
5550,
12,
4535,
830,
2327,
1495,
351,
2351,
8987,
290,
14044,
23555,
286,
3837,
544,
11,
11419... | 4.108108 | 74 |
#!/usr/bin/env python
from ctypes import *
from proteus import MeshTools
from proteus import cmeshTools
from proteus.MeshAdaptPUMI import MeshAdaptPUMI
from nose.tools import eq_ as eq
from nose.tools import ok_ as ok
import os
def test_meshLoadPUMI(verbose=0):
"""Test to load serial PUMI model and mesh"""
testDir=os.path.dirname(os.path.abspath(__file__))
cubeMdl=testDir + '/cube.dmg'
cube670p1=testDir + '/cube.smb'
meshAdaptInstance = MeshAdaptPUMI.MeshAdaptPUMI()
meshAdaptInstance.loadModelAndMesh(cubeMdl, cube670p1)
mesh = MeshTools.TetrahedralMesh()
mesh.cmesh = cmeshTools.CMesh()
meshAdaptInstance.constructFromSerialPUMIMesh(mesh.cmesh)
cmeshTools.allocateGeometricInfo_tetrahedron(mesh.cmesh)
cmeshTools.computeGeometricInfo_tetrahedron(mesh.cmesh)
mesh.buildFromC(mesh.cmesh)
eq(mesh.nElements_global,670)
eq(mesh.nNodes_global,190)
eq(mesh.nEdges_global,977)
eq(mesh.nElementBoundaries_global,1458)
if __name__ == '__main__':
import nose
nose.main(defaultTest='test_meshLoad:test_meshLoadPUMI')
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
6738,
269,
19199,
1330,
1635,
198,
6738,
5915,
385,
1330,
47529,
33637,
198,
6738,
5915,
385,
1330,
12067,
5069,
33637,
198,
6738,
5915,
385,
13,
37031,
48003,
5105,
8895,
1330,
47529,
4... | 2.498851 | 435 |
from django import forms
from .models import Meeting, Resource | [
6738,
42625,
14208,
1330,
5107,
198,
6738,
764,
27530,
1330,
22244,
11,
20857
] | 4.769231 | 13 |
from argparse import ArgumentParser
import numpy as np
import py360convert
import os
import cv2
from mmseg.apis import inference_segmentor, init_segmentor, show_result_pyplot, ret_result
from mmseg.core.evaluation import get_palette
from PIL import Image
import mmcv
if __name__ == '__main__':
main()
| [
6738,
1822,
29572,
1330,
45751,
46677,
198,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
12972,
15277,
1102,
1851,
198,
11748,
28686,
198,
11748,
269,
85,
17,
198,
198,
6738,
8085,
325,
70,
13,
499,
271,
1330,
32278,
62,
325,
5154,
... | 3 | 103 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from netests.constants import NOT_SET
from netests.protocols.vrf import VRF, ListVRF
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
11748,
33918,
198,
6738,
2010,
3558,
13,
9979,
1187,
1330,
5626,
62,
28480,
198,
6738,
2010,
3558,
13,
11235,
... | 2.561404 | 57 |
"""The load unload problem. An agent is placed on a one dimensional grid world
and is tasked with loading itself up on the right side of the world and
unloading on the left. The agent can observe whether or not it is in the load or
unload block but can not tell its exact location of whether it is loaded or
unloaded. Therefore the agent must maintain belief about it's location and load
status.
States are defined by the location of the agent and whether or not it is loaded
Actions: "move-left", "move-right"
Rewards:
+100 for moving into the unload block while loaded
-1 otherwise
"""
import pomdp_py
import random
import numpy as np
import sys
import copy
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation
EPSILON = 1e-3
LOAD_LOCATION = 10
# Observation model
class LUObservationModel(pomdp_py.ObservationModel):
"""This problem is small enough for the probabilities to be directly given
externally"""
def argmax(self, next_state, action, normalized=False, **kwargs):
"""Returns the most likely observation"""
return self.sample(next_state, action)
# Transition Model
class LUTransitionModel(pomdp_py.TransitionModel):
"""This problem is small enough for the probabilities to be directly given
externally"""
def argmax(self, state, action, normalized=False, **kwargs):
"""Returns the most likely next state"""
return self.sample(state, action)
# Reward Model
# Policy Model
class LUPolicyModel(pomdp_py.RandomRollout):
"""This is an extremely dumb policy model; To keep consistent
with the framework."""
def argmax(self, state, normalized=False, **kwargs):
"""Returns the most likely reward"""
raise NotImplementedError
if __name__ == '__main__':
main()
| [
37811,
464,
3440,
555,
2220,
1917,
13,
1052,
5797,
318,
4624,
319,
257,
530,
38517,
10706,
995,
198,
392,
318,
23052,
351,
11046,
2346,
510,
319,
262,
826,
1735,
286,
262,
995,
290,
198,
403,
25138,
319,
262,
1364,
13,
383,
5797,
46... | 3.307692 | 546 |
from pathlib import Path
from fhir.resources.codesystem import CodeSystem
from oops_fhir.utils import CodeSystemConcept
__all__ = ["CodeSearchSupport"]
_resource = CodeSystem.parse_file(Path(__file__).with_suffix(".json"))
class CodeSearchSupport:
"""
CodeSearchSupport
The degree to which the server supports the code search parameter on
ValueSet, if it is supported.
Status: draft - Version: 4.0.1
Copyright None
http://hl7.org/fhir/code-search-support
"""
explicit = CodeSystemConcept(
{
"code": "explicit",
"definition": "The search for code on ValueSet only includes codes explicitly detailed on includes or expansions.",
"display": "Explicit Codes",
}
)
"""
Explicit Codes
The search for code on ValueSet only includes codes explicitly detailed on includes or expansions.
"""
all_ = CodeSystemConcept(
{
"code": "all",
"definition": "The search for code on ValueSet only includes all codes based on the expansion of the value set.",
"display": "Implicit Codes",
}
)
"""
Implicit Codes
The search for code on ValueSet only includes all codes based on the expansion of the value set.
"""
| [
6738,
3108,
8019,
1330,
10644,
198,
198,
6738,
277,
71,
343,
13,
37540,
13,
40148,
6781,
1330,
6127,
11964,
198,
198,
6738,
267,
2840,
62,
69,
71,
343,
13,
26791,
1330,
6127,
11964,
3103,
984,
628,
198,
834,
439,
834,
796,
14631,
10... | 2.822368 | 456 |
""" Some experiments with python ctypes """
# build the shared library using something like:
#
# ifort -shared -fPIC key_module.f90 linked_list_module.f90 tests/blah_module.f90 tests/c_pointer_test_module.f90 -o test.so
from ctypes import *
# load the shared library:
test = CDLL('test.so')
# define the procedure interfaces:
initialize_list = test.initialize_list
create_model = test.create_model
access_model = test.access_model
destroy_model = test.destroy_model
destroy_list = test.destroy_list
initialize_list.restype = None
access_model.restype = None
destroy_model.restype = None
destroy_list.restype = None
create_model.argtypes = [c_int]
create_model.restype = POINTER(c_int) # `c_long` also seems to work ?
# not really sure what is correct for this.
# Fortran returns a `type(c_ptr)`, maybe
# in this context it doesn't matter
print('')
print( 'calling initialize_list...')
initialize_list()
print( 'calling create_model...')
i = c_int(989) # some input to the model
cp = create_model(i)
#cp = byref(cp) # - don't need this if subroutine argument has `value` attribute
print( 'calling access_model...')
for j in range(10):
access_model(cp)
print( 'calling destroy_model...')
destroy_model(cp)
print( 'calling destroy_list...')
destroy_list()
| [
198,
37811,
2773,
10256,
351,
21015,
269,
19199,
37227,
198,
198,
2,
220,
1382,
262,
4888,
5888,
1262,
1223,
588,
25,
198,
2,
198,
2,
611,
419,
532,
28710,
532,
69,
47,
2149,
1994,
62,
21412,
13,
69,
3829,
6692,
62,
4868,
62,
2141... | 2.496503 | 572 |
class DocumintErrorCause(object):
"""
Specific error cause.
"""
class DocumintError(RuntimeError):
"""
Structured Documint error.
"""
class MalformedDocumintError(RuntimeError):
"""
An error, indicated by status code, was malformed.
:ivar bytes data: Error response data.
"""
__all__ = ['DocumintError', 'MalformedDocumintError']
| [
4871,
14432,
388,
600,
12331,
42323,
7,
15252,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
17377,
4049,
2728,
13,
198,
220,
220,
220,
37227,
628,
198,
4871,
14432,
388,
600,
12331,
7,
41006,
12331,
2599,
198,
220,
220,
220,
... | 2.834586 | 133 |
"""Tests for template tags of users."""
from django.test import TestCase
from geokey.categories.tests.model_factories import CategoryFactory
from ..templatetags import filter_tags
| [
37811,
51,
3558,
329,
11055,
15940,
286,
2985,
526,
15931,
198,
198,
6738,
42625,
14208,
13,
9288,
1330,
6208,
20448,
198,
198,
6738,
4903,
2088,
88,
13,
66,
26129,
13,
41989,
13,
19849,
62,
22584,
1749,
1330,
21743,
22810,
198,
198,
... | 3.471698 | 53 |
#-*- coding:utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
'''
最小二乘法回归
参考博客:http://blog.csdn.net/wangyangzhizhou/article/details/60133958
line 代数推导
matrixLine 矩阵推导
'''
if __name__ == '__main__':
# line()
matrixLine()
| [
2,
12,
9,
12,
19617,
25,
40477,
12,
23,
532,
9,
12,
198,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
198,
7061,
6,
198,
17312,
222,
22887,
237,
12859,
234,
20046,
246,
373... | 1.742647 | 136 |
# -------------------------------------------------------------------------------
# Copyright IBM Corp. 2017
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------------
from six import iteritems
import pixiedust
from pixiedust.utils.shellAccess import ShellAccess
from pixiedust.utils.template import PixiedustTemplateEnvironment
from pixiedust.utils.environment import Environment,scalaGateway
import pandas as pd
import uuid
import tempfile
from collections import OrderedDict
from IPython.display import display, HTML, Javascript
try:
from urllib.request import Request, urlopen, URLError, HTTPError
except ImportError:
from urllib2 import Request, urlopen, URLError, HTTPError
dataDefs = OrderedDict([
("1", {
"displayName": "Car performance data",
"url": "https://github.com/ibm-watson-data-lab/open-data/raw/master/cars/cars.csv",
"topic": "transportation",
"publisher": "IBM",
"schema2": [('mpg','int'),('cylinders','int'),('engine','double'),('horsepower','int'),('weight','int'),
('acceleration','double'),('year','int'),('origin','string'),('name','string')]
}),
("2", {
"displayName": "Sample retail sales transactions, January 2009",
"url": "https://raw.githubusercontent.com/ibm-watson-data-lab/open-data/master/salesjan2009/salesjan2009.csv",
"topic": "Economy & Business",
"publisher": "IBM Cloud Data Services"
}),
("3", {
"displayName": "Total population by country",
"url": "https://apsportal.ibm.com/exchange-api/v1/entries/889ca053a19986a4445839358a91963e/data?accessKey=657b130d504ab539947e51b50f0e338e",
"topic": "Society",
"publisher": "IBM Cloud Data Services"
}),
("4", {
"displayName": "GoSales Transactions for Naive Bayes Model",
"url": "https://apsportal.ibm.com/exchange-api/v1/entries/8044492073eb964f46597b4be06ff5ea/data?accessKey=bec2ed69d9c84bed53826348cdc5690b",
"topic": "Leisure",
"publisher": "IBM"
}),
("5", {
"displayName": "Election results by County",
"url": "https://openobjectstore.mybluemix.net/Election/county_election_results.csv",
"topic": "Society",
"publisher": "IBM"
}),
("6", {
"displayName": "Million dollar home sales in NE Mass late 2016",
"url": "https://openobjectstore.mybluemix.net/misc/milliondollarhomes.csv",
"topic": "Economy & Business",
"publisher": "Redfin.com"
}),
("7", {
"displayName": "Boston Crime data, 2-week sample",
"url": "https://raw.githubusercontent.com/ibm-watson-data-lab/open-data/master/crime/boston_crime_sample.csv",
"topic": "Society",
"publisher": "City of Boston"
})
])
@scalaGateway
#Use of progress Monitor doesn't render correctly when previewed a saved notebook, turning it off until solution is found
useProgressMonitor = False | [
2,
16529,
24305,
198,
2,
15069,
19764,
11421,
13,
2177,
198,
2,
220,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
705,
34156,
24036,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
1... | 2.758458 | 1,271 |
reset
set terminal postscript eps enhanced "Helvetica" 20 color
set output "velocity_field_cpu.eps"
unset logscale
set xrange [0 : 1]
set yrange [0 : 1]
set size square
set nokey
factor = 0.1
plot "velocity_cpu_128.dat" using 1:2:($3*factor):($4*factor) every 2 with vec lc 3
set output "velocity_field_gpu.eps"
unset logscale
set xrange [0 : 1]
set yrange [0 : 1]
set nokey
factor = 0.1
#plot "velocity_gpu.dat" using 1:2:($3*factor):($4*factor) every 2 with vec lc 3
plot "velocity_gpu_128_share.dat" using 1:2:($3*factor):($4*factor) every 2 with vec lc 3
| [
42503,
198,
2617,
12094,
1281,
12048,
304,
862,
13105,
366,
39,
32667,
3970,
1,
1160,
3124,
198,
2617,
5072,
366,
626,
11683,
62,
3245,
62,
36166,
13,
25386,
1,
198,
198,
403,
2617,
2604,
9888,
198,
198,
2617,
2124,
9521,
685,
15,
1... | 2.524444 | 225 |
#This is a mother class to get GBM and LLE data files (lle, pha and rsp)
#Author: giacomov@slac.stanford.edu
import os,sys,glob,string,errno,shutil
from GtBurst.my_fits_io import pyfits
from GtBurst.GtBurstException import GtBurstException
import ftplib, socket
import time
try:
from tkinter import *
except:
#Silently accept when tkinter import fail (no X server?)
pass
from GtBurst import downloadCallback
try:
from GtBurst.lleProgressBar import Meter
except:
#Silently accept when tkinter import fail (no X server?)
pass
pass
| [
2,
1212,
318,
257,
2802,
1398,
284,
651,
402,
12261,
290,
406,
2538,
1366,
3696,
357,
75,
293,
11,
872,
64,
290,
374,
2777,
8,
201,
198,
2,
13838,
25,
308,
9607,
296,
709,
31,
6649,
330,
13,
14192,
3841,
13,
15532,
201,
198,
201... | 2.621005 | 219 |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$',
views.feed, {'page_nr': 1, 'sort': 'hot'}, name='root'),
url(r'^page/(?P<page_nr>\d+)$',
views.feed, {'sort': 'new'}, name='feed_page'),
url(r'^hot$',
views.feed, {'page_nr': 1, 'sort': 'hot'}, name='hot_feed'),
url(r'^hot/page/(?P<page_nr>\d+)$',
views.feed, {'sort': 'hot'}, name='hot_feed_page'),
url(r'^top$',
views.feed, {'page_nr': 1, 'sort': 'top'}, name='top_feed'),
url(r'^top/page/(?P<page_nr>\d+)$',
views.feed, {'sort': 'top'}, name='top_feed_page'),
url(r'^new$',
views.feed, {'page_nr': 1, 'sort': 'new'}, name='new_feed'),
url(r'^new/page/(?P<page_nr>\d+)$',
views.feed, {'sort': 'new'}, name='new_feed_page'),
url(r'^random$',
views.feed, {'page_nr': 1, 'sort': 'random'}, name='random_feed'),
url(r'^random/page/(?P<page_nr>\d+)$',
views.feed, {'sort': 'random'}, name='random_feed_page'),
url(r'^d/(?P<dweet_id>\d+)$',
views.dweet_show, name='dweet_show'),
url(r'^d/(?P<dweet_id>\d+)/reply$',
views.dweet_reply, name='dweet_reply'),
url(r'^d/(?P<dweet_id>\d+)/delete$',
views.dweet_delete, name='dweet_delete'),
url(r'^d/(?P<dweet_id>\d+)/like$', views.like, name='like'),
url(r'^e/(?P<dweet_id>\d+)$',
views.dweet_embed, name='dweet_embed'),
url(r'^h/(?P<hashtag_name>[\w._]+)$', views.view_hashtag, {'page_nr': 1}, name='view_hashtag'),
url(r'^h/(?P<hashtag_name>[\w._]+)/page/(?P<page_nr>\d+)$',
views.view_hashtag, name='view_hashtag_page'),
url(r'^dweet$', views.dweet, name='dweet'),
]
| [
6738,
42625,
14208,
13,
10414,
13,
6371,
82,
1330,
19016,
198,
6738,
764,
1330,
5009,
198,
198,
6371,
33279,
82,
796,
685,
198,
220,
220,
220,
19016,
7,
81,
6,
61,
3,
3256,
198,
220,
220,
220,
220,
220,
220,
220,
5009,
13,
12363,
... | 1.960603 | 863 |
from cony import wsgi
app = wsgi()
| [
6738,
369,
88,
1330,
266,
82,
12397,
198,
198,
1324,
796,
266,
82,
12397,
3419,
198
] | 2.25 | 16 |
import devon.maker, devon.make
from devon.tags import *
import re, os.path
# **************************************************************************************************
| [
198,
11748,
1614,
261,
13,
10297,
11,
1614,
261,
13,
15883,
198,
6738,
1614,
261,
13,
31499,
1330,
1635,
198,
11748,
302,
11,
28686,
13,
6978,
198,
198,
2,
41906,
17174,
17174,
1174,
198
] | 5.235294 | 34 |
import random
import math as m
def direct_surface(d):
""" d is the dimension of the sphere, sigma is the standard deviation of the gaussian"""
sigma=1./m.sqrt(d)
S=0
x=[0]*d
for k in range(d):
x[k]=gauss(sigma)[1]
S+=x[k]**2
for k in range(d):x[k]/=m.sqrt(S)
return x
X=[]
Y=[]
Z=[]
for i in range(5000):
l=direct_surface(3)
X+=[l[0], ]
Y+=[l[1], ]
Z+=[l[2], ]
fig=plt.figure()
ax=plt.axes(projection='3d')
ax.scatter3D(X, Y, Z, c=X, cmap="prism_r", s=10)
ax.set_title("direct_surface.png (d=3)")
ax.set_xlabel("X")
ax.set_xlim(-1, 1)
ax.set_ylim(-1, 1)
ax.set_zlim(-1, 1)
ax.set_ylabel("Y")
ax.set_zlabel("Z")
plt.show()
| [
11748,
4738,
198,
11748,
10688,
355,
285,
198,
198,
4299,
1277,
62,
42029,
7,
67,
2599,
198,
220,
220,
220,
220,
220,
220,
220,
37227,
288,
318,
262,
15793,
286,
262,
16558,
11,
264,
13495,
318,
262,
3210,
28833,
286,
262,
31986,
31... | 1.74478 | 431 |
# ----------------------------------------------------------------------------
# Copyright 2015 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
from rpc import RpcClient
import sys
if len(sys.argv) != 2:
print "Usage: python rpc_client <rpc_queue_name>"
sys.exit(1)
# declare an rpc client listening on queue specified by first arg
neon_rpc = RpcClient(sys.argv[1])
arg = int(raw_input("Give an integer to pow: "))
print " [x] Making request"
response = neon_rpc.call(arg)
print " [.] Got %r" % (response,)
| [
2,
16529,
10541,
198,
2,
15069,
1853,
399,
712,
2271,
11998,
3457,
13,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
... | 3.72 | 300 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Part of pymzml test cases
"""
import os
from pymzml.file_classes.standardGzip import StandardGzip
import unittest
import random
from pymzml.spec import Spectrum, Chromatogram
import re
import struct
import test_file_paths
class StandardGzipTest(unittest.TestCase):
""""
"""
def setUp(self):
"""
"""
paths = test_file_paths.paths
self.File = StandardGzip(paths[1], "latin-1")
def tearDown(self):
"""
"""
self.File.close()
def test_getitem_5(self):
"""
"""
ID = 5
spec = self.File[ID]
self.assertIsInstance(spec, Spectrum)
self.assertEqual(spec.ID, ID)
if __name__ == "__main__":
unittest.main(verbosity=3)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
7841,
286,
279,
4948,
89,
4029,
1332,
2663,
198,
37811,
198,
11748,
28686,
198,
6738,
279,
4948,
89,
... | 2.197222 | 360 |
import json
import time
from random import shuffle, randint
from tqdm import tqdm
| [
11748,
33918,
198,
11748,
640,
198,
198,
6738,
4738,
1330,
36273,
11,
43720,
600,
198,
6738,
256,
80,
36020,
1330,
256,
80,
36020,
198
] | 3.458333 | 24 |
import torch
import mctorch.nn as nn
import mctorch.optim as optim
import numpy as np
torch.manual_seed(0)
# Random data with high variance in first two dimension
X = torch.diag(torch.FloatTensor([3,2,1])).matmul(torch.randn(3,200))
X -= X.mean(axis=0)
# 1. Initialize Parameter
manifold_param = nn.Parameter(manifold=nn.Stiefel(3,2))
# 2. Define Cost - squared reconstruction error
# 3. Optimize
optimizer = optim.rAdagrad(params = [manifold_param], lr=1e-1)
# optimizer = optim.rSGD(params = [manifold_param], lr=1e-2)
cost_step = None
for epoch in range(1000):
cost_step = cost(X, manifold_param)
# print(cost_step)
cost_step.backward()
optimizer.step()
optimizer.zero_grad()
print(cost_step)
np_X = X.detach().numpy()
np_w = manifold_param.detach().numpy()
# 4. Test Results
estimated_projector = np_w @ np_w.T
eigenvalues, eigenvectors = np.linalg.eig(np_X @ np_X.T)
indices = np.argsort(eigenvalues)[::-1][:2]
span_matrix = eigenvectors[:, indices]
projector = span_matrix @ span_matrix.T
print("Frobenius norm error between estimated and closed-form projection "
"matrix:", np.linalg.norm(projector - estimated_projector)) | [
11748,
28034,
201,
198,
11748,
285,
2715,
354,
13,
20471,
355,
299,
77,
201,
198,
11748,
285,
2715,
354,
13,
40085,
355,
6436,
201,
198,
201,
198,
11748,
299,
32152,
355,
45941,
201,
198,
201,
198,
13165,
354,
13,
805,
723,
62,
2882... | 2.366472 | 513 |
import zlib
s = 'hello world!hello world!hello world!hello world!'
t = zlib.compress(s.encode())
print(t)
print(bytes(zlib.decompress(t)).decode()) | [
11748,
1976,
8019,
198,
82,
796,
705,
31373,
995,
0,
31373,
995,
0,
31373,
995,
0,
31373,
995,
13679,
198,
83,
796,
1976,
8019,
13,
5589,
601,
7,
82,
13,
268,
8189,
28955,
198,
4798,
7,
83,
8,
198,
4798,
7,
33661,
7,
89,
8019,
... | 2.672727 | 55 |
import os
from socket import *
import time
os.chdir("/storage/emulated/0/Project")
s = socket(AF_INET,SOCK_STREAM)
host = "192.168.64.1"
port = 9010
s.connect((host,port))
filename = 'Text.txt'
clock_start = time.clock()
time_start = time.time()
#rb- readonly : Binary
f = open(filename, 'rb')
#1024 BufferSize
l = f.read(1024)
while (l):
s.send(l)
print('Sent ', repr(l))
l = f.read(1024)
f.close()
#s.close() vs shutdown - shutdown allows receiving pending data from sender
s.shutdown(SHUT_WR)
clock_end = time.clock()
time_end = time.time()
duration_clock = clock_end - clock_start
print 'clock: start = ',clock_start, ' end = ',clock_end
print 'clock: duration_clock = ', duration_clock
duration_time = time_end - time_start
print 'time: start = ',time_start, ' end = ',time_end
print 'time: duration_time = ', duration_time
| [
11748,
28686,
201,
198,
6738,
17802,
1330,
1635,
201,
198,
11748,
640,
201,
198,
201,
198,
418,
13,
354,
15908,
7203,
14,
35350,
14,
368,
4817,
14,
15,
14,
16775,
4943,
201,
198,
201,
198,
82,
796,
17802,
7,
8579,
62,
1268,
2767,
... | 2.552023 | 346 |
import sys
sys.path.append('.')
from astrodbkit2.astrodb import create_database
from astrodbkit2.astrodb import Database
from simple.schema import *
from astropy.table import Table
import numpy as np
from scripts.ingests.utils import ingest_proper_motions
from astropy.coordinates import SkyCoord
import astropy.units as u
from astroquery.simbad import Simbad
import warnings
warnings.filterwarnings("ignore", module='astroquery.simbad')
import re
import os
from pathlib import Path
import pandas as pd
SAVE_DB = True # save the data files in addition to modifying the .db file
RECREATE_DB = True # recreates the .db file from the data files
VERBOSE = False
verboseprint = print if VERBOSE else lambda *a, **k: None
db = load_db()
# load table
ingest_table = Table.read('scripts/ingests/UltracoolSheet-Main.csv', data_start=1)
#Defining variables
sources = ingest_table['name']
#ra_lit = ingest_table['pmra_lit']
#ra_lit_err = ingest_table['pmraerr_lit']
#dec_lit = ingest_table['pmdec_lit']
#dec_lit_err = ingest_table['pmdecerr_lit']
#ref_pm_lit = ingest_table['ref_pm_lit']
ra_UKIRT = ingest_table['pmra_UKIRT']
ra_UKIRT_err = ingest_table['pmraerr_UKIRT']
dec_UKIRT = ingest_table['pmdec_UKIRT']
dec_UKIRT_err = ingest_table['pmdecerr_UKIRT']
ref_pm_UKIRT = ingest_table['ref_plx_UKIRT']
#ingest_table_df = pd.DataFrame({'sources': sources, 'pm_ra' : ra_UKIRT, 'pm_ra_err' : ra_UKIRT_err, 'pm_dec' : dec_UKIRT, 'pm_dec_err' : dec_UKIRT_err, 'pm_ref' : ref_pm_UKIRT})
df = pd.read_csv('scripts/ingests/UltracoolSheet-Main.csv', usecols=['name' ,'pmra_UKIRT', 'pmraerr_UKIRT', 'pmdec_UKIRT', 'pmdecerr_UKIRT', 'ref_plx_UKIRT']) .dropna()
df.reset_index(inplace=True, drop=True)
print(df)
#Ingesting lit pm into db
#ingest_proper_motions(db, sources, ra_lit, ra_lit_err, dec_lit, dec_lit_err, ref_pm_lit, save_db=False, verbose=False)
#Ingesting UKIRT pm into db
ingest_proper_motions(db, df.name, df.pmra_UKIRT, df.pmraerr_UKIRT, df.pmdec_UKIRT, df.pmdecerr_UKIRT, df.ref_plx_UKIRT, save_db=True, verbose=False )
| [
11748,
25064,
198,
17597,
13,
6978,
13,
33295,
10786,
2637,
8,
198,
6738,
6468,
305,
9945,
15813,
17,
13,
459,
305,
9945,
1330,
2251,
62,
48806,
198,
6738,
6468,
305,
9945,
15813,
17,
13,
459,
305,
9945,
1330,
24047,
198,
6738,
2829,
... | 2.586514 | 786 |
from typing import List, Union, Tuple, Optional
from labml.internal.util.colors import StyleCode
| [
6738,
19720,
1330,
7343,
11,
4479,
11,
309,
29291,
11,
32233,
198,
198,
6738,
2248,
4029,
13,
32538,
13,
22602,
13,
4033,
669,
1330,
17738,
10669,
628
] | 3.666667 | 27 |
from django.db import models
# Create your models here.
__all__ = [
'Categories',
'Article',
'Links',
'Tag'
]
| [
6738,
42625,
14208,
13,
9945,
1330,
4981,
198,
198,
2,
13610,
534,
4981,
994,
13,
198,
198,
834,
439,
834,
796,
685,
198,
220,
220,
220,
705,
34,
26129,
3256,
198,
220,
220,
220,
705,
14906,
3256,
198,
220,
220,
220,
705,
31815,
3... | 2.357143 | 56 |
from typing import Union # pylint: disable=unused-import
from tartiflette.constants import UNDEFINED_VALUE
from tartiflette.language.ast import StringValueNode
class USCurrency:
"""
Scalar which handles USD amounts (in format $XX.YY)
"""
@staticmethod
def parse_literal(ast: "ValueNode") -> Union[int, "UNDEFINED_VALUE"]:
"""
Loads the input value from an AST node
:param ast: ast node to coerce
:type ast: ValueNode
:return: the value in cents if it can be parsed, UNDEFINED_VALUE otherwise
:rtype: Union[int, UNDEFINED_VALUE]
"""
if isinstance(ast, StringValueNode):
try:
return _parse_us_currency(ast.value)
except (ValueError, TypeError):
return UNDEFINED_VALUE
return UNDEFINED_VALUE
@staticmethod
def coerce_input(value: str) -> int:
"""
Loads the input value
:param value: the value to coerce
:type value: str
:return: the value in cents if it can be parsed
:rtype: int
:raises TypeError: if the value isn't a string or int
:raises ValueError: if the value isn't convertible to an int
"""
return _parse_us_currency(value)
@staticmethod
def coerce_output(value: int) -> str:
"""
Dumps the output value
:param value: the value to coerce
:type value: int
:return: the value as a USD string if it can be parsed
:raises TypeError: if the value isn't an int
:rtype: str
"""
if isinstance(value, int):
return "$" + "{0:.2f}".format(value / 100.00)
raise TypeError(f"USCurrency cannot represent value: < {value} >")
| [
6738,
19720,
1330,
4479,
220,
1303,
279,
2645,
600,
25,
15560,
28,
403,
1484,
12,
11748,
198,
198,
6738,
35842,
361,
21348,
13,
9979,
1187,
1330,
4725,
7206,
20032,
1961,
62,
39488,
198,
6738,
35842,
361,
21348,
13,
16129,
13,
459,
13... | 2.34221 | 751 |
import komand
from .schema import GetRecordByIdInput, GetRecordByIdOutput, Input, Output, Component
# Custom imports below
from icon_kintone.util.kintone import get_record
| [
11748,
479,
296,
392,
198,
6738,
764,
15952,
2611,
1330,
3497,
23739,
48364,
20560,
11,
3497,
23739,
48364,
26410,
11,
23412,
11,
25235,
11,
35100,
198,
2,
8562,
17944,
2174,
198,
6738,
7196,
62,
74,
600,
505,
13,
22602,
13,
74,
600,
... | 3.604167 | 48 |
# -*- coding: utf-8 -*-
"""Generate all the scales in different notations.
This code has been written to test whether the user interface works.
The notations and names are approximated, as I am no expert of any other
way of spelling scales than spanish, and maybe english.
For the generation of these scales, an equal-tempered scale is assumed
and a reference frequency of A4 == 440.0Hz.
"""
import os
def correct_german_spellings(spelling):
"""Correcting the exceptions in German spellings.
Aeseses -> Aseses
Eeseses -> Eseses
Heseses -> Beses
Aeses -> Ases
Eeses -> Eses
Heses -> Bes
Hes -> B
Aes -> As
Ees -> Es
----------
H♭♭♭ -> B𝄫
H𝄫 -> B♭
H♭ -> B
"""
ret = spelling.replace("Aeseses", "Aseses")
ret = ret.replace("Eeseses", "Eseses")
ret = ret.replace("Heseses", "Beses")
ret = ret.replace("Aeses", "Ases")
ret = ret.replace("Eeses", "Eses")
ret = ret.replace("Heses", "Bes")
ret = ret.replace("Hes", "B")
ret = ret.replace("Aes", "As")
ret = ret.replace("Ees", "Es")
# The former are for keys, these deal with note spellings
ret = ret.replace("H♭♭♭", "B𝄫")
ret = ret.replace("H𝄫", "B♭")
ret = ret.replace("H♭", "B")
return ret
if __name__ == '__main__':
a4 = 440.0
major_scale_semitones_to_a4 = [-9, -7, -5, -4, -2, 0, 2]
minor_scale_semitones_to_a4 = [-9, -7, -6, -4, -2, -1, 1]
major_scale_alterations = [
# C Major
[0, 0, 0, 0, 0, 0, 0],
# C# Major
[1, 1, 1, 1, 1, 1, 1],
# D Major
[1, 0, 0, 1, 0, 0, 0],
# Eb Major
[0, 0, -1, 0, 0, -1, -1],
# E Major
[1, 1, 0, 1, 1, 0, 0],
# F Major
[0, 0, 0, 0, 0, 0, -1],
# F# Major
[1, 1, 1, 1, 1, 1, 0],
# G Major
[0, 0, 0, 1, 0, 0, 0],
# Ab Major
[0, -1, -1, 0, 0, -1, -1],
# A Major
[1, 0, 0, 1, 1, 0, 0],
# Bb Major
[0, 0, -1, 0, 0, 0, -1],
# B Major
[1, 1, 0, 1, 1, 1, 0],
]
minor_scale_alterations = [
# c minor
[0, 0, -1, 0, 0, -1, -1],
# c# minor
[1, 1, 0, 1, 1, 0, 0],
# d minor
[0, 0, 0, 0, 0, 0, -1],
# eb minor
[-1, -1, -1, 0, -1, -1, -1],
# e minor
[0, 0, 0, 1, 0, 0, 0],
# f minor
[0, -1, -1, 0, 0, -1, -1],
# f# minor
[1, 0, 0, 1, 1, 0, 0],
# g minor
[0, 0, -1, 0, 0, 0, -1],
# ab minor
[-1, -1, -1, -1, -1, -1, -1],
# a minor
[0, 0, 0, 0, 0, 0, 0],
# bb minor
[0, -1, -1, 0, -1, -1, -1],
# b minor
[1, 0, 0, 1, 0, 0, 0],
]
notations = {
'North America': {
'notes': ['C', 'D', 'E', 'F', 'G', 'A', 'B'],
'modes': ['Major', 'Minor'],
'scale_alterations': ['', '♯', '𝄪', '♯♯♯', '♭♭♭', '𝄫', '♭'],
'note_alterations': ['', '♯', '𝄪', '♯♯♯', '♭♭♭', '𝄫', '♭'],
},
'German': {
'notes': ['C', 'D', 'E', 'F', 'G', 'A', 'H'],
'modes': ['Dur', 'Moll'],
'scale_alterations': ['', 'is', 'isis', 'isisis', 'eseses', "eses", "es"],
'note_alterations': ['', '♯', '𝄪', '♯♯♯', '♭♭♭', '𝄫', '♭'],
},
'Spanish': {
'notes': ['Do', 'Re', 'Mi', 'Fa', 'Sol', 'La', 'Si'],
'modes': ['Mayor', 'Menor'],
'scale_alterations': ['', '♯', '𝄪', '♯♯♯', '♭♭♭', '𝄫', '♭'],
'note_alterations': ['', '♯', '𝄪', '♯♯♯', '♭♭♭', '𝄫', '♭'],
},
'French': {
'notes': ['Do', 'Re', 'Mi', 'Fa', 'Sol', 'La', 'Ti'],
'modes': ['Majeur', 'Mineur'],
'scale_alterations': ['', '♯', '𝄪', '♯♯♯', '♭♭♭', '𝄫', '♭'],
'note_alterations': ['', '♯', '𝄪', '♯♯♯', '♭♭♭', '𝄫', '♭'],
}
}
scales = [
# (note, alteration) pairs
(0, 0), (0, 1), (1, 0), (2, -1),
(2, 0), (3, 0), (3, 1), (4, 0),
(5, -1), (5, 0), (6, -1), (6, 0)
]
for notation, d in notations.items():
print(notation)
for mode_id, mode in enumerate(d['modes']):
chromatic_increase = 0
base_semitones_to_a4 = major_scale_semitones_to_a4 if mode_id == 0 else minor_scale_semitones_to_a4
scale_alterations = major_scale_alterations if mode_id == 0 else minor_scale_alterations
for scale_id, scale in enumerate(scales):
note, alteration = scale
scale_note = d['notes'][note]
alteration_name = d['scale_alterations'][alteration]
scale_name = '{}{} {}'.format(scale_note, alteration_name, mode)
if notation == "German":
scale_name = correct_german_spellings(scale_name)
scale_semitones_to_a4 = [s + chromatic_increase for s in base_semitones_to_a4]
note_alterations = scale_alterations[scale_id]
note_indexes = [n % 7 for n in range(note, note + 7)]
scale_dir = os.path.join('Scales', notation, scale_name)
if not os.path.exists(scale_dir):
os.makedirs(scale_dir)
csv_filepath = os.path.join(scale_dir, 'fundamental_frequencies.csv')
with open(csv_filepath, encoding='utf-8', mode='w') as csv:
print(scale_name)
for alt in [0, 1, 2, -2, -1]:
for idx, note_idx in enumerate(note_indexes):
note_name = d['notes'][note_idx]
note_alteration = note_alterations[note_idx] + alt
note_alteration_name = d['note_alterations'][note_alteration]
note_name = '{}{}'.format(note_name, note_alteration_name)
if notation == "German":
note_name = correct_german_spellings(note_name)
# print('{}, '.format(note_name), end='')
if idx < len(note_indexes) - 1:
csv.write('{}, '.format(note_name))
else:
csv.write('{}\n'.format(note_name))
for idx, note_idx in enumerate(note_indexes):
note_semitones_to_a4 = scale_semitones_to_a4[idx] + alt
freq = a4 * 2.0 ** (note_semitones_to_a4 / 12.0)
# print('{:.2f}Hz, '.format(freq), end='')
if idx < len(note_indexes) - 1:
csv.write('{:.2f}, '.format(freq))
else:
csv.write('{:.2f}\n'.format(freq))
chromatic_increase += 1
print() | [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
8645,
378,
477,
262,
16252,
287,
1180,
407,
602,
13,
198,
198,
1212,
2438,
468,
587,
3194,
284,
1332,
1771,
262,
2836,
7071,
2499,
13,
198,
464,
407,
602,
290,
... | 1.711873 | 4,026 |
# -*- coding: utf-8 -*-
# @Author: Xiaoyuan Yi
# @Last Modified by: Xiaoyuan Yi
# @Last Modified time: 2020-06-11 22:04:36
# @Email: yi-xy16@mails.tsinghua.edu.cn
# @Description:
'''
Copyright 2020 THUNLP Lab. All Rights Reserved.
This code is part of the online Chinese poetry generation system, Jiuge.
System URL: https://jiuge.thunlp.cn/ and https://jiuge.thunlp.org/.
Github: https://github.com/THUNLP-AIPoet.
'''
from matplotlib import pyplot as plt
plt.rcParams['font.family'] = ['simhei']
from matplotlib.colors import from_levels_and_colors
import numpy as np
import copy
import torch
class Visualization(object):
"""docstring for LogInfo"""
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
2488,
13838,
25,
22450,
726,
7258,
26463,
198,
2,
2488,
5956,
40499,
416,
25,
220,
220,
22450,
726,
7258,
26463,
198,
2,
2488,
5956,
40499,
640,
25,
12131,
12,
3312... | 2.785714 | 238 |
import sys
sys.path.append('..')
from data.DBConnection import DBConnection
from data.user import User
| [
11748,
25064,
198,
17597,
13,
6978,
13,
33295,
10786,
492,
11537,
198,
6738,
1366,
13,
35,
2749,
261,
1606,
295,
1330,
360,
2749,
261,
1606,
295,
198,
6738,
1366,
13,
7220,
1330,
11787,
198,
197,
197,
197,
198,
197,
197,
197,
198,
1... | 1.985714 | 70 |
import datetime
from pathlib import Path
import subprocess
import tempfile
from typing import Tuple, Union
import openpyxl as xl
import pyarrow
from .util import assert_table_equals
# This is hard to test, since it's really an invalid Excel file
# def test_no_sheets_is_error():
# # https://openpyxl.readthedocs.io/en/stable/optimized.html#write-only-mode
# # ... to create a workbook with no worksheets
# workbook = xl.Workbook()
# workbook.remove(workbook.active)
# workbook.get_active_sheet = lambda: None
# result, stdout = do_convert_data(workbook, include_stdout=True)
# assert_table_equals(result, pyarrow.table({}))
# assert stdout == b"Excel file has no worksheets\n"
# openpyxl doesn't write shared strings
# def test_shared_string_column():
# workbook = xl.Workbook()
# sheet = workbook.active
# sheet.append(["a"])
# sheet.append(["b"])
# assert_table_equals(
# do_convert_data(workbook, header_rows=""),
# pyarrow.table({"A": ["a", "b"]})
# )
| [
11748,
4818,
8079,
198,
6738,
3108,
8019,
1330,
10644,
198,
11748,
850,
14681,
198,
11748,
20218,
7753,
198,
6738,
19720,
1330,
309,
29291,
11,
4479,
198,
11748,
1280,
9078,
87,
75,
355,
2124,
75,
198,
11748,
12972,
6018,
198,
6738,
764... | 2.535885 | 418 |
import os
import argparse
import numpy as np
from fr.tagc.rainet.core.util.exception.RainetException import RainetException
from fr.tagc.rainet.core.util.log.Logger import Logger
from fr.tagc.rainet.core.util.time.Timer import Timer
from fr.tagc.rainet.core.util.subprocess.SubprocessUtil import SubprocessUtil
#===============================================================================
# Started 20-May-2016
# Diogo Ribeiro
DESC_COMMENT = "Script to read, filter and process large catRAPID interaction files."
SCRIPT_NAME = "ReadCatrapid.py"
#===============================================================================
#===============================================================================
# General plan:
# 1) Parse catRAPID interaction file
# 2) apply interaction filters
# 3) write filtered interaction file, and other processed data files
#===============================================================================
#===============================================================================
# Processing notes:
# 1) To reduce memory consumption, the score values are rounded to 1 decimal.
# Thus, means are not precise
# 2) Filters are all applied on top of each other, first by score, then RNA, then protein, then interaction-based.
#===============================================================================
if __name__ == "__main__":
try:
# Start chrono
Timer.get_instance().start_chrono()
print "STARTING " + SCRIPT_NAME
#===============================================================================
# Get input arguments, initialise class
#===============================================================================
parser = argparse.ArgumentParser(description= DESC_COMMENT)
# positional args
parser.add_argument('catRAPIDFile', metavar='catRAPIDFile', type=str,
help='Output file from catRAPID library all vs all.')
parser.add_argument('outputFolder', metavar='outputFolder', type=str, help='Folder where to write output files.')
parser.add_argument('--interactionCutoff', metavar='interactionCutoff', type=str,
default = "OFF", help='Minimum catRAPID interaction propensity. Set as "OFF" if no filtering wanted.')
parser.add_argument('--interactionFilterFile', metavar='interactionFilterFile', type=str,
default = "", help='TSV file with list of interacting pairs we want to keep, one pair per line. UniprotAC\tEnsemblTxID. No header.')
parser.add_argument('--rnaFilterFile', metavar='rnaFilterFile', type=str,
default = "", help='File with list of RNAs we want to keep, one per line. No header.')
parser.add_argument('--proteinFilterFile', metavar='proteinFilterFile', type=str,
default = "", help='File with list of Proteins we want to keep, one per line. No header.')
parser.add_argument('--writeInteractions', metavar='writeInteractions', type=int,
default = 1, help='Whether to write interaction file after the filters.')
parser.add_argument('--batchSize', metavar='batchSize', type=int,
default = 1000000, help='How many lines to process before writing to file (to avoid excessive memory consumption).')
parser.add_argument('--writeNormalisedInteractions', metavar='writeNormalisedInteractions', type=int,
default = 0, help='Whether to write interaction file after the filters, normalised by max (unity-based normalisation) score for each RNA. --writeInteractions argument must also be 1.')
parser.add_argument('--writeInteractionMatrix', metavar='writeInteractionMatrix', type=int,
default = 0, help='Whether to write interaction matrix file after the filters. --writeInteractions argument must also be 1.')
parser.add_argument('--booleanInteraction', metavar='booleanInteraction', type=int,
default = 0, help='Whether to write interaction matrix file with 1 or 0 instead of score values. --writeInteractions and --writeInteractionMatrix argument must also be 1.')
parser.add_argument('--sampleInteractions', metavar='sampleInteractions', type=int,
default = 0, help='Whether to write file with at least one interactions for each RNA and each protein. Output file can have more than X interactions for a protein/RNA since they are co-dependent. Applied after all other filters. Default = 0 (OFF).')
#gets the arguments
args = parser.parse_args( )
# init
readCatrapid = ReadCatrapid( args.catRAPIDFile, args.outputFolder, args.interactionCutoff, args.interactionFilterFile,
args.rnaFilterFile, args.proteinFilterFile, args.writeInteractions, args.batchSize,
args.writeNormalisedInteractions, args.writeInteractionMatrix, args.booleanInteraction, args.sampleInteractions)
readCatrapid.run()
# Stop the chrono
Timer.get_instance().stop_chrono( "FINISHED " + SCRIPT_NAME )
# Use RainetException to catch errors
except RainetException as rainet:
Logger.get_instance().error( "Error during execution of %s. Aborting :\n" % SCRIPT_NAME + rainet.to_string())
| [
11748,
28686,
198,
11748,
1822,
29572,
198,
11748,
299,
32152,
355,
45941,
198,
198,
6738,
1216,
13,
12985,
66,
13,
3201,
316,
13,
7295,
13,
22602,
13,
1069,
4516,
13,
31443,
316,
16922,
1330,
10301,
316,
16922,
198,
6738,
1216,
13,
1... | 2.902567 | 1,909 |
#!/usr/bin/env python
import sys
import time
import logging
import boto3
from botocore.exceptions import ClientError
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
logging.basicConfig(level=logging.INFO,format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
logging.getLogger('boto3').setLevel(logging.CRITICAL)
logging.getLogger('botocore').setLevel(logging.CRITICAL)
s3 = boto3.client('s3')
S3_DST_BUCKET = "s3-sync-example"
if __name__ == "__main__":
path = sys.argv[1] if len(sys.argv) > 1 else './folder'
observer = Observer()
observer.schedule(S3Handler(), path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
11748,
25064,
198,
11748,
640,
198,
11748,
18931,
198,
11748,
275,
2069,
18,
198,
6738,
10214,
420,
382,
13,
1069,
11755,
1330,
20985,
12331,
198,
6738,
26856,
13,
672,
2655,
690,
1330,
... | 2.509259 | 324 |
import argparse
from tiger_pl import Tiger
parser = argparse.ArgumentParser()
parser.add_argument("filename")
args = parser.parse_args()
print(Tiger(args.filename).execute())
| [
11748,
1822,
29572,
198,
198,
6738,
26241,
62,
489,
1330,
17030,
198,
198,
48610,
796,
1822,
29572,
13,
28100,
1713,
46677,
3419,
198,
48610,
13,
2860,
62,
49140,
7203,
34345,
4943,
198,
22046,
796,
30751,
13,
29572,
62,
22046,
3419,
19... | 3.339623 | 53 |
#!/usr/bin/env python3
from ctypes import *
import common_application_interface
import common_sensor_interface
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
6738,
269,
19199,
1330,
1635,
198,
198,
11748,
2219,
62,
31438,
62,
39994,
198,
198,
11748,
2219,
62,
82,
22854,
62,
39994,
198
] | 3.352941 | 34 |
from setuptools import setup
readme = ''
with open('README.rst') as f:
readme = f.read()
requirements = [
'aiohttp>=3.7.0,<3.8.0'
]
extras_require = {
'docs': [
'sphinx>=4.1.2',
'sphinx_rtd_theme>=0.5.2',
]
}
packages = [
'aiowowapi',
'aiowowapi.retail',
]
setup(
name='aiowowapi',
author='Adalyia',
url='https://github.com/Adalyia/aiowowapi',
project_urls={
"Documentation": "https://docs.adalyia.com/wowapi",
"Issue tracker": "https://github.com/Adalyia/aiowowapi/issues",
},
version='1.0.3',
packages=packages,
license='MIT',
description='An async ready client library for the World of Warcraft APIs',
long_description=readme,
long_description_content_type="text/x-rst",
include_package_data=True,
install_requires=requirements,
extras_require=extras_require,
python_requires='>=3.8.0',
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Internet',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
]
) | [
6738,
900,
37623,
10141,
1330,
9058,
628,
220,
220,
220,
220,
198,
961,
1326,
796,
10148,
198,
4480,
1280,
10786,
15675,
11682,
13,
81,
301,
11537,
355,
277,
25,
198,
220,
220,
220,
1100,
1326,
796,
277,
13,
961,
3419,
198,
220,
220... | 2.502664 | 563 |
# Django settings for redirect_demo project.
import os
PROJECT_DIR = os.path.abspath(os.path.dirname(__file__))
gettext = lambda s: s
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'redirect_demo.db', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = os.path.join(PROJECT_DIR, "media")
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/admin/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '*^aor)8+d(lg_#ezg0&8sc&&pju^18#t=clw-2ief&q#+%(s*n'
LANGUAGE_CODE = 'en'
CMS_TEMPLATES = (
('home.html', gettext('Homepage')),
)
CMS_PLACEHOLDER_CONF = {
'footer-address-content': {
'plugins': ('TextPlugin',),
'name':gettext('Footer Link List'),
},
'footer-link-list': {
'plugins': ('FilerImagePlugin',),
'name':gettext('Footer Link List'),
},
'right-image': {
'plugins': ('FilerImagePlugin',),
'name':gettext('Right Image'),
},
}
LANGUAGES = (
('en', gettext('English')),
)
CMS_LANGUAGES = LANGUAGES
GOOGLE_MAPS_API_KEY = ""
CMS_SHOW_END_DATE = True
CMS_SHOW_START_DATE = True
CMS_PERMISSION = True
CMS_MODERATOR = False
CMS_URL_OVERWRITE = True
CMS_MENU_TITLE_OVERWRITE = True
CMS_SEO_FIELDS = True
CMS_REDIRECTS = True
CMS_SOFTROOT = True
DEBUG_TOOLBAR_CONFIG = {
"INTERCEPT_REDIRECTS" : False,
}
# Allowed IPs for the Django Debug Toolbar
INTERNAL_IPS = ('127.0.0.1',)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
'django.template.loaders.eggs.load_template_source',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.core.context_processors.auth",
'django.core.context_processors.debug',
"django.core.context_processors.i18n",
"django.core.context_processors.request",
"django.core.context_processors.media",
"cms.context_processors.media",
'django.contrib.messages.context_processors.messages',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'cms.middleware.page.CurrentPageMiddleware',
'cms.middleware.user.CurrentUserMiddleware',
'cms.middleware.toolbar.ToolbarMiddleware',
'cms.middleware.media.PlaceholderMediaMiddleware',
'cms_redirects.middleware.RedirectFallbackMiddleware',
)
ROOT_URLCONF = 'redirect_demo.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_DIR,'templates'),
)
FIXTURE_DIRS = (
os.path.join(PROJECT_DIR, "fixtures"),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
'cms',
'menus',
'cms.plugins.text',
'mptt',
'publisher',
'south',
'appmedia',
'cms_redirects',
)
SOUTH_TESTS_MIGRATE = False
try:
from settings_dev import *
except ImportError:
pass
| [
2,
37770,
6460,
329,
18941,
62,
9536,
78,
1628,
13,
198,
11748,
28686,
198,
31190,
23680,
62,
34720,
796,
28686,
13,
6978,
13,
397,
2777,
776,
7,
418,
13,
6978,
13,
15908,
3672,
7,
834,
7753,
834,
4008,
198,
198,
1136,
5239,
796,
... | 2.479758 | 2,149 |
"""
Hangman.
Authors: Tyler Thenell and Zachary Zdanavicius.
""" # DONE: 1. PUT YOUR NAME IN THE ABOVE LINE.
# DONE: 2. Implement Hangman using your Iterative Enhancement Plan.
import random
####### Do NOT attempt this assignment before class! #######
main()
| [
37811,
198,
39,
648,
805,
13,
198,
198,
30515,
669,
25,
14886,
3244,
695,
290,
18825,
560,
1168,
25604,
615,
291,
3754,
13,
198,
37811,
220,
1303,
360,
11651,
25,
352,
13,
350,
3843,
16592,
36751,
3268,
3336,
317,
8202,
6089,
48920,
... | 3.164706 | 85 |
from __future__ import annotations
from typing import Callable, Optional, Tuple, List
from enum import Enum, Flag, auto
from threading import Thread
import datetime
import json
import websocket
class SIStatus(Enum):
"""
Status of operations on the OpenStuder gateway.
- **SIStatus.SUCCESS**: Operation was successfully completed.
- **SIStatus.IN_PROGRESS**: Operation is already in progress or another operation is occupying the resource.
- **SIStatus.ERROR**: General (unspecified) error.
- **SIStatus.NO_PROPERTY**: The property does not exist or the user's access level does not allow to access the property.
- **SIStatus.NO_DEVICE**: The device does not exist.
- **SIStatus.NO_DEVICE_ACCESS**: The device access instance does not exist.
- **SIStatus.TIMEOUT**: A timeout occurred when waiting for the completion of the operation.
- **SIStatus.INVALID_VALUE**: A invalid value was passed.
"""
SUCCESS = 0
IN_PROGRESS = 1
ERROR = -1
NO_PROPERTY = -2
NO_DEVICE = -3
NO_DEVICE_ACCESS = -4
TIMEOUT = -5
INVALID_VALUE = -6
@staticmethod
class SIConnectionState(Enum):
"""
State of the connection to the OpenStuder gateway.
- **SIConnectionState.DISCONNECTED**: The client is not connected.
- **SIConnectionState.CONNECTING**: The client is establishing the WebSocket connection to the gateway.
- **SIConnectionState.AUTHORIZING**: The WebSocket connection to the gateway has been established and the client is authorizing.
- **SIConnectionState.CONNECTED**: The WebSocket connection is established and the client is authorized, ready to use.
"""
DISCONNECTED = auto()
CONNECTING = auto()
AUTHORIZING = auto()
CONNECTED = auto()
class SIAccessLevel(Enum):
"""
Level of access granted to a client from the OpenStuder gateway.
- **NONE**: No access at all.
- **BASIC**: Basic access to device information properties (configuration excluded).
- **INSTALLER**: Basic access + additional access to most common configuration properties.
- **EXPERT**: Installer + additional advanced configuration properties.
- **QUALIFIED_SERVICE_PERSONNEL**: Expert and all configuration and service properties only for qualified service personnel.
"""
NONE = 0
BASIC = auto()
INSTALLER = auto()
EXPERT = auto()
QUALIFIED_SERVICE_PERSONNEL = auto()
@staticmethod
class SIDescriptionFlags(Flag):
"""
Flags to control the format of the **DESCRIBE** functionality.
- **SIDescriptionFlags.NONE**: No description flags.
- **SIDescriptionFlags.INCLUDE_ACCESS_INFORMATION**: Includes device access instances information.
- **SIDescriptionFlags.INCLUDE_DEVICE_INFORMATION**: Include device information.
- **SIDescriptionFlags.INCLUDE_DRIVER_INFORMATION**: Include device property information.
- **SIDescriptionFlags.INCLUDE_DRIVER_INFORMATION**: Include device access driver information.
"""
NONE = 0
INCLUDE_ACCESS_INFORMATION = auto()
INCLUDE_DEVICE_INFORMATION = auto()
INCLUDE_PROPERTY_INFORMATION = auto()
INCLUDE_DRIVER_INFORMATION = auto()
class SIWriteFlags(Flag):
"""
Flags to control write property operation.
- **SIWriteFlags.NONE**: No write flags.
- **SIWriteFlags.PERMANENT**: Write the change to the persistent storage, eg the change lasts reboots.
"""
NONE = 0
PERMANENT = auto()
class SIProtocolError(IOError):
"""
Class for reporting all OpenStuder protocol errors.
"""
def reason(self) -> str:
"""
Returns the actual reason for the error.
:return: Reason for the error.
"""
return super(SIProtocolError, self).args[0]
class SIDeviceMessage:
"""
The SIDeviceMessage class represents a message a device connected to the OpenStuder gateway has broadcast.
"""
@staticmethod
class SIPropertyReadResult:
"""
The SIDPropertyReadResult class represents the status of a property read result.
"""
@staticmethod
class SIPropertySubscriptionResult:
"""
The SIDPropertyReadResult class represents the status of a property subscription/unsubscription.
"""
@staticmethod
class SIGatewayClient(_SIAbstractGatewayClient):
"""
Simple, synchronous (blocking) OpenStuder gateway client.
This client uses a synchronous model which has the advantage to be much simpler to use than the asynchronous version SIAsyncGatewayClient. The drawback is that device message
indications are ignored by this client and subscriptions to property changes are not possible.
"""
def connect(self, host: str, port: int = 1987, user: str = None, password: str = None) -> SIAccessLevel:
"""
Establishes the WebSocket connection to the OpenStuder gateway and executes the user authorization process once the connection has been established. This method blocks the
current thread until the operation (authorize) has been completed or an error occurred. The method returns the access level granted to the client during authorization on
success or throws an **SIProtocolError** otherwise.
:param host: Hostname or IP address of the OpenStuder gateway to connect to.
:param port: TCP port used for the connection to the OpenStuder gateway, defaults to 1987.
:param user: Username send to the gateway used for authorization.
:param password: Password send to the gateway used for authorization.
:return: Access Level granted to the client.
:raises SIProtocolError: If the connection could not be established, or the authorization was refused.
"""
# Ensure that the client is in the DISCONNECTED state.
self.__ensure_in_state(SIConnectionState.DISCONNECTED)
# Connect to WebSocket server.
self.__state = SIConnectionState.CONNECTING
self.__ws = websocket.create_connection('ws://{host}:{port}'.format(host=host, port=port))
# Authorize client.
self.__state = SIConnectionState.AUTHORIZING
if user is None or password is None:
self.__ws.send(super(SIGatewayClient, self).encode_authorize_frame_without_credentials())
else:
self.__ws.send(super(SIGatewayClient, self).encode_authorize_frame_with_credentials(user, password))
try:
self.__access_level, self.__gateway_version = super(SIGatewayClient, self).decode_authorized_frame(self.__ws.recv())
except ConnectionRefusedError:
self.__state = SIConnectionState.DISCONNECTED
raise SIProtocolError('WebSocket connection refused')
# Change state to connected.
self.__state = SIConnectionState.CONNECTED
# Return access level.
return self.__access_level
def state(self) -> SIConnectionState:
"""
Returns the current state of the client. See **SIConnectionState** for details.
:return: Current state of the client.
"""
return self.__state
def access_level(self) -> SIAccessLevel:
"""
Return the access level the client has gained on the gateway connected. See **SIAccessLevel** for details.
:return: Access level granted to client.
"""
return self.__access_level
def gateway_version(self) -> str:
"""
Returns the version of the OpenStuder gateway software running on the host the client is connected to.
:return: Version of the gateway software.
"""
return self.__gateway_version
def enumerate(self) -> Tuple[SIStatus, int]:
"""
Instructs the gateway to scan every configured and functional device access driver for new devices and remove devices that do not respond anymore. Returns the status of
the operation, and the number of devices present.
:return: Returns two values. 1: operation status, 2: the number of devices present.
:raises SIProtocolError: On a connection, protocol of framing error.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send ENUMERATE message to gateway.
self.__ws.send(super(SIGatewayClient, self).encode_enumerate_frame())
# Wait for ENUMERATED message, decode it and return data.
return super(SIGatewayClient, self).decode_enumerated_frame(self.__receive_frame_until_commands(['ENUMERATED', 'ERROR']))
def describe(self, device_access_id: str = None, device_id: str = None, property_id: int = None, flags: SIDescriptionFlags = None) -> Tuple[SIStatus, Optional[str], object]:
"""
This method can be used to retrieve information about the available devices and their properties from the connected gateway. Using the optional device_access_id,
device_id and property_id parameters, the method can either request information about the whole topology, a particular device access instance, a device or a property.
The flags control the level of detail in the gateway's response.
:param device_access_id: Device access ID for which the description should be retrieved.
:param device_id: Device ID for which the description should be retrieved. Note that device_access_id must be present too.
:param property_id: Property ID for which the description should be retrieved. Note that device_access_id and device_id must be present too.
:param flags: Flags to control level of detail of the response.
:return: Returns three values. 1: Status of the operation, 2: the subject's id, 3: the description object.
:raises SIProtocolError: On a connection, protocol of framing error.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send DESCRIBE message to gateway.
self.__ws.send(super(SIGatewayClient, self).encode_describe_frame(device_access_id, device_id, property_id, flags))
# Wait for DESCRIPTION message, decode it and return data.
return super(SIGatewayClient, self).decode_description_frame(self.__receive_frame_until_commands(['DESCRIPTION', 'ERROR']))
def find_properties(self, property_id: str) -> Tuple[SIStatus, str, int, List[str]]:
"""
This method is used to retrieve a list of existing properties that match the given property ID in the form "<device access ID>.<device ID>.<property ID>". The wildcard
character "*" is supported for <device access ID> and <device ID> fields.
For example "*.inv.3136" represents all properties with ID 3136 on the device with ID "inv" connected through any device access, "demo.*.3136" represents all properties
with ID 3136 on any device that disposes that property connected through the device access "demo" and finally "*.*.3136" represents all properties with ID 3136 on any
device that disposes that property connected through any device access.
:param property_id: The search wildcard ID.
:return: Returns four values: 1: Status of the find operation, 2: the searched ID (including wildcard character), 3: the number of properties found,
4: List of the property IDs.
:raises SIProtocolError: On a connection, protocol of framing error.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send FIND PROPERTIES message to gateway.
self.__ws.send(super(SIGatewayClient, self).encode_find_properties_frame(property_id))
# Wait for PROPERTIES FOUND message, decode it and return data.
return super(SIGatewayClient, self).decode_properties_found_frame(self.__receive_frame_until_commands(['PROPERTIES FOUND', 'ERROR']))
def read_property(self, property_id: str) -> Tuple[SIStatus, str, Optional[any]]:
"""
This method is used to retrieve the actual value of a given property from the connected gateway. The property is identified by the property_id parameter.
:param property_id: The ID of the property to read in the form '{device access ID}.{device ID}.{property ID}'.
:return: Returns three values: 1: Status of the read operation, 2: the ID of the property read, 3: the value read.
:raises SIProtocolError: On a connection, protocol of framing error.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send READ PROPERTY message to gateway.
self.__ws.send(super(SIGatewayClient, self).encode_read_property_frame(property_id))
# Wait for PROPERTY READ message, decode it and return data.
return super(SIGatewayClient, self).decode_property_read_frame(self.__receive_frame_until_commands(['PROPERTY READ', 'ERROR'])).to_tuple()
def read_properties(self, property_ids: List[str]) -> List[SIPropertyReadResult]:
"""
This method is used to retrieve the actual value of multiple properties at the same time from the connected gateway. The properties are identified by the property_ids
parameter.
:param property_ids: The IDs of the properties to read in the form '{device access ID}.{device ID}.{property ID}'.
:return: Returns one value: 1: List of statuses and values of all read properties.
:raises SIProtocolError: On a connection, protocol of framing error.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send READ PROPERTIES message to gateway.
self.__ws.send(super(SIGatewayClient, self).encode_read_properties_frame(property_ids))
# Wait for PROPERTIES READ message, decode it and return data.
return super(SIGatewayClient, self).decode_properties_read_frame(self.__receive_frame_until_commands(['PROPERTIES READ', 'ERROR']))
def write_property(self, property_id: str, value: any = None, flags: SIWriteFlags = None) -> Tuple[SIStatus, str]:
"""
The write_property method is used to change the actual value of a given property. The property is identified by the property_id parameter and the new value is passed by the
optional value parameter.
This value parameter is optional as it is possible to write to properties with the data type "Signal" where there is no actual value written, the write operation rather
triggers an action on the device.
:param property_id: The ID of the property to write in the form '{device access ID}.{<device ID}.{<property ID}'.
:param value: Optional value to write.
:param flags: Write flags, See SIWriteFlags for details, if not provided the flags are not send by the client, and the gateway uses the default flags
(SIWriteFlags.PERMANENT).
:return: Returns two values: 1: Status of the write operation, 2: the ID of the property written.
:raises SIProtocolError: On a connection, protocol of framing error.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send WRITE PROPERTY message to gateway.
self.__ws.send(super(SIGatewayClient, self).encode_write_property_frame(property_id, value, flags))
# Wait for PROPERTY WRITTEN message, decode it and return data.
return super(SIGatewayClient, self).decode_property_written_frame(self.__receive_frame_until_commands(['PROPERTY WRITTEN', 'ERROR']))
def read_datalog_properties(self, from_: datetime.datetime = None, to: datetime.datetime = None) -> Tuple[SIStatus, List[str]]:
"""
This method is used to retrieve the list of IDs of all properties for whom data is logged on the gateway. If a time window is given using from and to, only data in this
time windows is considered.
:param from_: Optional date and time of the start of the time window to be considered.
:param to: Optional date and time of the end of the time window to be considered.
:return: Returns two values: 1: Status of the operation, 2: List of all properties for whom data is logged on the gateway in the optional time window.
:raises SIProtocolError: On a connection, protocol of framing error.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send READ DATALOG message to gateway.
self.__ws.send(super(SIGatewayClient, self).encode_read_datalog_frame(None, from_, to, None))
# Wait for DATALOG READ message, decode it and return data.
status, _, _, parameters = super(SIGatewayClient, self).decode_datalog_read_frame(self.__receive_frame_until_commands(['DATALOG READ', 'ERROR']))
return status, parameters.splitlines()
def read_datalog_csv(self, property_id: str, from_: datetime.datetime = None, to: datetime.datetime = None, limit: int = None) -> Tuple[SIStatus, str, int, str]:
"""
This method is used to retrieve all or a subset of logged data of a given property from the gateway.
:param property_id: Global ID of the property for which the logged data should be retrieved. It has to be in the form '{device access ID}.{device ID}.{property ID}'.
:param from_: Optional date and time from which the data has to be retrieved, defaults to the oldest value logged.
:param to: Optional date and time to which the data has to be retrieved, defaults to the current time on the gateway.
:param limit: Using this optional parameter you can limit the number of results retrieved in total.
:return: Returns four values: 1: Status of the operation, 2: id of the property, 3: number of entries, 4: Properties data in CSV format whereas the first column is the
date and time in ISO 8601 extended format, and the second column contains the actual values.
:raises SIProtocolError: On a connection, protocol of framing error.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send READ DATALOG message to gateway.
self.__ws.send(super(SIGatewayClient, self).encode_read_datalog_frame(property_id, from_, to, limit))
# Wait for DATALOG READ message, decode it and return data.
return super(SIGatewayClient, self).decode_datalog_read_frame(self.__receive_frame_until_commands(['DATALOG READ', 'ERROR']))
def read_messages(self, from_: datetime.datetime = None, to: datetime.datetime = None, limit: int = None) -> Tuple[SIStatus, int, List[SIDeviceMessage]]:
"""
The read_messages() method can be used to retrieve all or a subset of stored messages send by devices on all buses in the past from the gateway.
:param from_: Optional date and time from which the messages have to be retrieved, defaults to the oldest message saved.
:param to: Optional date and time to which the messages have to be retrieved, defaults to the current time on the gateway.
:param limit: Using this optional parameter you can limit the number of messages retrieved in total.
:return: Returns three values. 1: the status of the operation, 2: the number of messages, 3: the list of retrieved messages.
:raises SIProtocolError: On a connection, protocol of framing error.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send READ MESSAGES message to gateway.
self.__ws.send(super(SIGatewayClient, self).encode_read_messages_frame(from_, to, limit))
# Wait for MESSAGES READ message, decode it and return data.
return super(SIGatewayClient, self).decode_messages_read_frame(self.__receive_frame_until_commands(['MESSAGES READ', 'ERROR']))
def disconnect(self) -> None:
"""
Disconnects the client from the gateway.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Change state to disconnected.
self.__state = SIConnectionState.DISCONNECTED
# Close the WebSocket
self.__ws.close()
class SIAsyncGatewayClientCallbacks:
"""
Base class containing all callback methods that can be called by the SIAsyncGatewayClient. You can use this as your base class and register it using
IAsyncGatewayClient.set_callbacks().
"""
def on_connected(self, access_level: SIAccessLevel, gateway_version: str) -> None:
"""
This method is called once the connection to the gateway could be established and the user has been successfully authorized.
:param access_level: Access level that was granted to the user during authorization.
:param gateway_version: Version of the OpenStuder software running on the gateway.
"""
pass
def on_disconnected(self) -> None:
"""
Called when the connection to the OpenStuder gateway has been gracefully closed by either side or the connection was lost by any other reason.
"""
pass
def on_error(self, reason) -> None:
"""
Called on severe errors.
:param reason: Exception that caused the erroneous behavior.
"""
pass
def on_enumerated(self, status: SIStatus, device_count: int) -> None:
"""
Called when the enumeration operation started using enumerate() has completed on the gateway.
The callback takes two arguments. 1: , 2: the .
:param status: Operation status.
:param device_count: Number of devices present.
"""
pass
def on_description(self, status: SIStatus, id_: Optional[str], description: object) -> None:
"""
Called when the gateway returned the description requested using the describe() method.
:param status: Status of the operation.
:param id_: Subject's ID.
:param description: Description object.
"""
pass
def on_properties_found(self, status: SIStatus, id_: str, count: int, properties: List[str]):
"""
Called when the gateway returned the list of found properties requested using the find_properties() method.
:param status: Status of the find operation.
:param id_: The searched ID (including wildcard character).
:param count: The number of properties found.
:param properties: List of the property IDs.
"""
pass
def on_property_read(self, status: SIStatus, property_id: str, value: Optional[any]) -> None:
"""
Called when the property read operation started using read_property() has completed on the gateway.
:param status: Status of the read operation.
:param property_id: ID of the property read.
:param value: The value read.
"""
pass
def on_properties_read(self, results: List[SIPropertyReadResult]) -> None:
"""
Called when the multiple properties read operation started using read_properties() has completed on the gateway.
:param results: List of all results of the operation.
"""
pass
def on_property_written(self, status: SIStatus, property_id: str) -> None:
"""
Called when the property write operation started using write_property() has completed on the gateway.
:param status: Status of the write operation.
:param property_id: ID of the property written.
"""
pass
def on_property_subscribed(self, status: SIStatus, property_id: str) -> None:
"""
Called when the gateway returned the status of the property subscription requested using the subscribe_to_property() method.
:param status: The status of the subscription.
:param property_id: ID of the property.
"""
pass
def on_properties_subscribed(self, statuses: List[SIPropertySubscriptionResult]) -> None:
"""
Called when the gateway returned the status of the properties subscription requested using the subscribe_to_properties() method.
:param statuses: The statuses of the individual subscriptions.
"""
pass
def on_property_unsubscribed(self, status: SIStatus, property_id: str) -> None:
"""
Called when the gateway returned the status of the property unsubscription requested using the unsubscribe_from_property() method.
:param status: The status of the unsubscription.
:param property_id: ID of the property.
"""
pass
def on_properties_unsubscribed(self, statuses: List[SIPropertySubscriptionResult]) -> None:
"""
Called when the gateway returned the status of the properties unsubscription requested using the unsubscribe_from_properties() method.
:param statuses: The statuses of the individual unsubscriptions.
"""
pass
def on_property_updated(self, property_id: str, value: any) -> None:
"""
This callback is called whenever the gateway send a property update.
:param property_id: ID of the updated property.
:param value: The current value of the property.
"""
pass
def on_datalog_properties_read(self, status: SIStatus, properties: List[str]) -> None:
"""
Called when the datalog property list operation started using read_datalog_properties() has completed on the gateway.
:param status: Status of the operation.
:param properties: List of the IDs of the properties for whom data is available in the data log.
"""
pass
def on_datalog_read_csv(self, status: SIStatus, property_id: str, count: int, values: str) -> None:
"""
Called when the datalog read operation started using read_datalog() has completed on the gateway. This version of the method returns the data in CSV format suitable to
be written to a file.
:param status: Status of the operation.
:param property_id: ID of the property.
:param count: Number of entries.
:param values: Properties data in CSV format whereas the first column is the date and time in ISO 8601 extended format and the second column contains the actual values.
"""
pass
def on_device_message(self, message: SIDeviceMessage) -> None:
"""
This callback is called whenever the gateway send a device message indication.
:param message: The device message received.
"""
pass
def on_messages_read(self, status: SIStatus, count: int, messages: List[SIDeviceMessage]) -> None:
"""
Called when the gateway returned the status of the read messages operation using the read_messages() method.
:param status: The status of the operation.
:param count: Number of messages retrieved.
:param messages: List of retrieved messages.
"""
pass
class SIAsyncGatewayClient(_SIAbstractGatewayClient):
"""
Complete, asynchronous (non-blocking) OpenStuder gateway client.
This client uses an asynchronous model which has the disadvantage to be a bit harder to use than the synchronous version. The advantages are that long operations do not block
the main thread as all results are reported using callbacks, device message indications are supported and subscriptions to property changes are possible.
"""
def connect(self, host: str, port: int = 1987, user: str = None, password: str = None, background: bool = True) -> None:
"""
Establishes the WebSocket connection to the OpenStuder gateway and executes the user authorization process once the connection has been established in the background. This
method returns immediately and does not block the current thread.
The status of the connection attempt is reported either by the on_connected() callback on success or the on_error() callback if the connection could not be established
or the authorisation for the given user was rejected by the gateway.
:param host: Hostname or IP address of the OpenStuder gateway to connect to.
:param port: TCP port used for the connection to the OpenStuder gateway, defaults to 1987.
:param user: Username send to the gateway used for authorization.
:param password: Password send to the gateway used for authorization.
:param background: If true, the handling of the WebSocket connection is done in the background, if false the current thread is took over.
:raises SIProtocolError: If there was an error initiating the WebSocket connection.
"""
# Ensure that the client is in the DISCONNECTED state.
self.__ensure_in_state(SIConnectionState.DISCONNECTED)
# Save parameter for later use.
self.__user = user
self.__password = password
# Connect to WebSocket server.
self.__state = SIConnectionState.CONNECTING
self.__ws = websocket.WebSocketApp('ws://{host}:{port}'.format(host=host, port=port),
on_open=self.__on_open,
on_message=self.__on_message,
on_error=self.__on_error,
on_close=self.__on_close
)
# TODO: Start connection timeout.
# If background mode is selected, start a daemon thread for the connection handling, otherwise take over current thread.
if background:
self.__thread = Thread(target=self.__ws.run_forever)
self.__thread.setDaemon(True)
self.__thread.start()
else:
self.__ws.run_forever()
def set_callbacks(self, callbacks: SIAsyncGatewayClientCallbacks) -> None:
"""
Configures the client to use all callbacks of the passed abstract client callback class. Using this you can set all callbacks to be called on the given object and avoid
having to set each callback individually.
:param callbacks: Object derived from SIAsyncGatewayClientCallbacks to be used for all callbacks.
"""
if isinstance(callbacks, SIAsyncGatewayClientCallbacks):
self.on_connected = callbacks.on_connected
self.on_disconnected = callbacks.on_disconnected
self.on_error = callbacks.on_error
self.on_enumerated = callbacks.on_enumerated
self.on_description = callbacks.on_description
self.on_properties_found = callbacks.on_properties_found
self.on_property_read = callbacks.on_property_read
self.on_properties_read = callbacks.on_properties_read
self.on_property_written = callbacks.on_property_written
self.on_property_subscribed = callbacks.on_property_subscribed
self.on_properties_subscribed = callbacks.on_properties_subscribed
self.on_property_unsubscribed = callbacks.on_property_unsubscribed
self.on_properties_unsubscribed = callbacks.on_properties_unsubscribed
self.on_property_updated = callbacks.on_property_updated
self.on_datalog_properties_read = callbacks.on_datalog_properties_read
self.on_datalog_read_csv = callbacks.on_datalog_read_csv
self.on_device_message = callbacks.on_device_message
self.on_messages_read = callbacks.on_messages_read
def state(self) -> SIConnectionState:
"""
Returns the current state of the client. See **SIConnectionState** for details.
:return: Current state of the client.
"""
return self.__state
def access_level(self) -> SIAccessLevel:
"""
Return the access level the client has gained on the gateway connected. See **SIAccessLevel** for details.
:return: Access level granted to client.
"""
return self.__access_level
def gateway_version(self) -> str:
"""
Returns the version of the OpenStuder gateway software running on the host the client is connected to.
:return: Version of the gateway software.
"""
return self.__gateway_version
def enumerate(self) -> None:
"""
Instructs the gateway to scan every configured and functional device access driver for new devices and remove devices that do not respond anymore.
The status of the operation and the number of devices present are reported using the on_enumerated() callback.
:raises SIProtocolError: If the client is not connected or not yet authorized.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send ENUMERATE message to gateway.
self.__ws.send(super(SIAsyncGatewayClient, self).encode_enumerate_frame())
def describe(self, device_access_id: str = None, device_id: str = None, property_id: int = None, flags: SIDescriptionFlags = None) -> None:
"""
This method can be used to retrieve information about the available devices and their properties from the connected gateway. Using the optional device_access_id,
device_id and property_id parameters, the method can either request information about the whole topology, a particular device access instance, a device or a property.
The flags control the level of detail in the gateway's response.
The description is reported using the on_description() callback.
:param device_access_id: Device access ID for which the description should be retrieved.
:param device_id: Device ID for which the description should be retrieved. Note that device_access_id must be present too.
:param property_id: Property ID for which the description should be retrieved. Note that device_access_id and device_id must be present too.
:param flags: Flags to control level of detail of the response.
:raises SIProtocolError: If the client is not connected or not yet authorized.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send DESCRIBE message to gateway.
self.__ws.send(super(SIAsyncGatewayClient, self).encode_describe_frame(device_access_id, device_id, property_id, flags))
def find_properties(self, property_id: str) -> None:
"""
This method is used to retrieve a list of existing properties that match the given property ID in the form "<device access ID>.<device ID>.<property ID>". The wildcard
character "*" is supported for <device access ID> and <device ID> fields.
For example "*.inv.3136" represents all properties with ID 3136 on the device with ID "inv" connected through any device access, "demo.*.3136" represents all properties
with ID 3136 on any device that disposes that property connected through the device access "demo" and finally "*.*.3136" represents all properties with ID 3136 on any
device that disposes that property connected through any device access.
The status of the read operation and the actual value of the property are reported using the on_properties_found() callback.
:param property_id: The search wildcard ID.
:raises SIProtocolError: If the client is not connected or not yet authorized.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send FIND PROPERTIES message to gateway.
self.__ws.send(super(SIAsyncGatewayClient, self).encode_find_properties_frame(property_id))
def read_property(self, property_id: str) -> None:
"""
This method is used to retrieve the actual value of a given property from the connected gateway. The property is identified by the property_id parameter.
The status of the read operation and the actual value of the property are reported using the on_property_read() callback.
:param property_id: The ID of the property to read in the form '{device access ID}.{device ID}.{property ID}'.
:raises SIProtocolError: If the client is not connected or not yet authorized.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send READ PROPERTY message to gateway.
self.__ws.send(super(SIAsyncGatewayClient, self).encode_read_property_frame(property_id))
def read_properties(self, property_ids: List[str]) -> None:
"""
This method is used to retrieve the actual value of multiple property at the same time from the connected gateway. The properties are identified by the property_ids
parameter.
The status of the multiple read operations and the actual value of the properties are reported using the on_properties_read() callback.
:param property_ids: The IDs of the properties to read in the form '{device access ID}.{device ID}.{property ID}'.
:raises SIProtocolError: If the client is not connected or not yet authorized.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send READ PROPERTIES message to gateway.
self.__ws.send(super(SIAsyncGatewayClient, self).encode_read_properties_frame(property_ids))
def write_property(self, property_id: str, value: any = None, flags: SIWriteFlags = None) -> None:
"""
The write_property method is used to change the actual value of a given property. The property is identified by the property_id parameter and the new value is passed by the
optional value parameter.
This value parameter is optional as it is possible to write to properties with the data type "Signal" where there is no actual value written, the write operation rather
triggers an action on the device.
The status of the write operation is reported using the on_property_written() callback.
:param property_id: The ID of the property to write in the form '{device access ID}.{<device ID}.{<property ID}'.
:param value: Optional value to write.
:param flags: Write flags, See SIWriteFlags for details, if not provided the flags are not send by the client and the gateway uses the default flags
(SIWriteFlags.PERMANENT).
:raises SIProtocolError: If the client is not connected or not yet authorized.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send WRITE PROPERTY message to gateway.
self.__ws.send(super(SIAsyncGatewayClient, self).encode_write_property_frame(property_id, value, flags))
def subscribe_to_property(self, property_id: str) -> None:
"""
This method can be used to subscribe to a property on the connected gateway. The property is identified by the property_id parameter.
The status of the subscribe request is reported using the on_property_subscribed() callback.
:param property_id: The ID of the property to subscribe to in the form '{device access ID}.{device ID}.{property ID}'.
:raises SIProtocolError: If the client is not connected or not yet authorized.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send SUBSCRIBE PROPERTY message to gateway.
self.__ws.send(super(SIAsyncGatewayClient, self).encode_subscribe_property_frame(property_id))
def subscribe_to_properties(self, property_ids: List[str]) -> None:
"""
This method can be used to subscribe to multiple properties on the connected gateway. The properties are identified by the property_ids parameter.
The status of the subscribe request is reported using the on_properties_subscribed() callback.
:param property_ids: The list of IDs of the properties to subscribe to in the form '{device access ID}.{device ID}.{property ID}'.
:raises SIProtocolError: If the client is not connected or not yet authorized.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send SUBSCRIBE PROPERTIES message to gateway.
self.__ws.send(super(SIAsyncGatewayClient, self).encode_subscribe_properties_frame(property_ids))
def unsubscribe_from_property(self, property_id: str) -> None:
"""
This method can be used to unsubscribe from a property on the connected gateway. The property is identified by the property_id parameter.
The status of the unsubscribe request is reported using the on_property_unsubscribed() callback.
:param property_id: The ID of the property to unsubscribe from in the form '{device access ID}.{device ID}.{property ID}'.
:raises SIProtocolError: If the client is not connected or not yet authorized.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send UNSUBSCRIBE PROPERTY message to gateway.
self.__ws.send(super(SIAsyncGatewayClient, self).encode_unsubscribe_property_frame(property_id))
def unsubscribe_from_properties(self, property_ids: List[str]) -> None:
"""
This method can be used to unsubscribe from multiple properties on the connected gateway. The properties are identified by the property_ids parameter.
The status of the unsubscribe request is reported using the on_properties_unsubscribed() callback.
:param property_ids: The list of IDs of the properties to unsubscribe from in the form '{device access ID}.{device ID}.{property ID}'.
:raises SIProtocolError: If the client is not connected or not yet authorized.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send UNSUBSCRIBE PROPERTY message to gateway.
self.__ws.send(super(SIAsyncGatewayClient, self).encode_unsubscribe_properties_frame(property_ids))
def read_datalog_properties(self, from_: datetime.datetime = None, to: datetime.datetime = None) -> None:
"""
This method is used to retrieve the list of IDs of all properties for whom data is logged on the gateway. If a time window is given using from and to, only data in this
time windows is considered.
The status of the operation is the list of properties for whom logged data is available are reported using the on_datalog_properties_read() callback.
:param from_: Optional date and time of the start of the time window to be considered.
:param to: Optional date and time of the end of the time window to be considered.
:raises SIProtocolError: On a connection, protocol of framing error.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send READ DATALOG message to gateway.
self.__ws.send(super(SIAsyncGatewayClient, self).encode_read_datalog_frame(None, from_, to, None))
def read_datalog(self, property_id: str, from_: datetime.datetime = None, to: datetime.datetime = None, limit: int = None) -> None:
"""
This method is used to retrieve all or a subset of logged data of a given property from the gateway.
The status of this operation and the respective values are reported using the on_datalog_read_csv() callback.
:param property_id: Global ID of the property for which the logged data should be retrieved. It has to be in the form '{device access ID}.{device ID}.{property ID}'.
:param from_: Optional date and time from which the data has to be retrieved, defaults to the oldest value logged.
:param to: Optional date and time to which the data has to be retrieved, defaults to the current time on the gateway.
:param limit: Using this optional parameter you can limit the number of results retrieved in total.
:raises SIProtocolError: If the client is not connected or not yet authorized.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send READ DATALOG message to gateway.
self.__ws.send(super(SIAsyncGatewayClient, self).encode_read_datalog_frame(property_id, from_, to, limit))
def read_messages(self, from_: datetime.datetime = None, to: datetime.datetime = None, limit: int = None) -> None:
"""
The read_messages method can be used to retrieve all or a subset of stored messages send by devices on all buses in the past from the gateway.
The status of this operation and the retrieved messages are reported using the on_messages_read() callback.
:param from_: Optional date and time from which the messages have to be retrieved, defaults to the oldest message saved.
:param to: Optional date and time to which the messages have to be retrieved, defaults to the current time on the gateway.
:param limit: Using this optional parameter you can limit the number of messages retrieved in total.
:raises SIProtocolError: If the client is not connected or not yet authorized.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Encode and send READ MESSAGES message to gateway.
self.__ws.send(super(SIAsyncGatewayClient, self).encode_read_messages_frame(from_, to, limit))
def disconnect(self) -> None:
"""
Disconnects the client from the gateway.
"""
# Ensure that the client is in the CONNECTED state.
self.__ensure_in_state(SIConnectionState.CONNECTED)
# Close the WebSocket
self.__ws.close()
| [
6738,
11593,
37443,
834,
1330,
37647,
198,
6738,
19720,
1330,
4889,
540,
11,
32233,
11,
309,
29291,
11,
7343,
198,
6738,
33829,
1330,
2039,
388,
11,
19762,
11,
8295,
198,
6738,
4704,
278,
1330,
14122,
198,
11748,
4818,
8079,
198,
11748,... | 2.96273 | 15,750 |
from scipy import fft
import numpy as np
def get_fft_features(features_, s, fs, data, KF_dict, ch, f_ranges, f_band_names):
"""Get FFT features for different f_ranges. Data needs to be a batch of 1s length
Parameters
----------
features_ : dict
feature dictionary
s : dict
settings dict
fs : int/float
sampling frequency
data : np.array
data for single channel, assumed to be one second
KF_dict : dict
Kalmanfilter dictionaries, channel, bandpower and frequency
band specific
ch : string
channel name
f_ranges : list
list of list with respective frequency band ranges
f_band_names : list
list of frequency band names
"""
data = data[-int(s["fft_settings"]["windowlength"]*fs/1000):]
Z = np.abs(fft.rfft(data))
f = np.arange(0, int(s["fft_settings"]["windowlength"]/2)+1, 1)
for idx_fband, f_range in enumerate(f_ranges):
fband = f_band_names[idx_fband]
idx_range = np.where((f >= f_range[0]) & (f <= f_range[1]))[0]
feature_calc = np.mean(Z[idx_range])
if s["fft_settings"]["log_transform"]:
feature_calc = np.log(feature_calc)
if s["methods"]["kalman_filter"] is True:
if fband in s["kalman_filter_settings"]["frequency_bands"]:
KF_name = '_'.join([ch, fband])
KF_dict[KF_name].predict()
KF_dict[KF_name].update(feature_calc)
feature_calc = KF_dict[KF_name].x[0] # filtered signal
feature_name = '_'.join([ch, 'fft', fband])
features_[feature_name] = feature_calc
return features_
| [
6738,
629,
541,
88,
1330,
277,
701,
198,
11748,
299,
32152,
355,
45941,
628,
198,
4299,
651,
62,
487,
83,
62,
40890,
7,
40890,
62,
11,
264,
11,
43458,
11,
1366,
11,
509,
37,
62,
11600,
11,
442,
11,
277,
62,
81,
6231,
11,
277,
... | 2.186684 | 766 |
from __future__ import annotations
from enum import Enum
from imports import k8s
from cdk8s import Chart
from kubeasy_sdk.deployment import Deployment
from kubeasy_sdk.utils.resource import Rendered
from kubeasy_sdk.utils.networking.service_port import ServicePort
from kubeasy_sdk.utils.collections.service_ports import ServicePorts
# Service Labels
# Service Selectors
| [
6738,
11593,
37443,
834,
1330,
37647,
198,
6738,
33829,
1330,
2039,
388,
198,
198,
6738,
17944,
1330,
479,
23,
82,
198,
6738,
22927,
74,
23,
82,
1330,
22086,
198,
198,
6738,
479,
3266,
4107,
62,
21282,
74,
13,
2934,
1420,
434,
1330,
... | 3.318966 | 116 |
import operator
import re
data = open("data", "r").read()
seconds = 2503
reindeers = map(parseDataLine, data.split("\n"))
reindeerNames = {}
for reindeer in reindeers:
reindeerNames[reindeer[0]] = 0
for second in range(seconds + 1):
inLeads = inLeadAtSecond(reindeers, second)
for inLead in inLeads:
reindeerNames[inLead[1][0]] += 1
a = inLeadAtSecond(reindeers, seconds)[0]
print "Part 1, in lead:", (a[1][0], a[0])
print "Part 2, in lead:", list(reversed(sorted(reindeerNames.items(), key=operator.itemgetter(1))))[0]
| [
11748,
10088,
198,
11748,
302,
198,
198,
7890,
796,
1280,
7203,
7890,
1600,
366,
81,
11074,
961,
3419,
628,
198,
43012,
796,
8646,
18,
198,
260,
521,
47619,
796,
3975,
7,
29572,
6601,
13949,
11,
1366,
13,
35312,
7203,
59,
77,
48774,
... | 2.557692 | 208 |
from pyalgotrade import strategy
from pyalgotrade import plotter
from pyalgotrade.tools import quandl
from pyalgotrade.feed import csvfeed
import datetime
if __name__ == "__main__":
main(True)
| [
6738,
12972,
14016,
313,
27585,
1330,
4811,
198,
6738,
12972,
14016,
313,
27585,
1330,
7110,
353,
198,
6738,
12972,
14016,
313,
27585,
13,
31391,
1330,
627,
392,
75,
198,
6738,
12972,
14016,
313,
27585,
13,
12363,
1330,
269,
21370,
12363,... | 3.045455 | 66 |
#
# Tests jupyter notebooks
#
import os
import subprocess
import unittest
import nbconvert
import liionpack as lp
if __name__ == "__main__":
unittest.main()
| [
2,
198,
2,
30307,
474,
929,
88,
353,
43935,
198,
2,
198,
11748,
28686,
198,
11748,
850,
14681,
198,
11748,
555,
715,
395,
198,
198,
11748,
299,
65,
1102,
1851,
198,
198,
11748,
7649,
295,
8002,
355,
300,
79,
628,
198,
198,
361,
11... | 2.634921 | 63 |
import frappe
from frappe.utils import now_datetime | [
198,
11748,
5306,
27768,
198,
6738,
5306,
27768,
13,
26791,
1330,
783,
62,
19608,
8079
] | 3.466667 | 15 |
from flask import Flask, render_template, request
import pickle
import pandas as pd
# app instantiation
APP = Flask(__name__)
# Load model
with open("xgb_class_1.pkl", "rb") as f:
model = pickle.load(f)
@APP.route('/')
def Home_page():
'''Landing page to the Kickstarter Prediction project'''
return render_template('landing.html', title='Home')
@APP.route('/prediction', methods= ["POST"])
| [
6738,
42903,
1330,
46947,
11,
8543,
62,
28243,
11,
2581,
198,
11748,
2298,
293,
198,
11748,
19798,
292,
355,
279,
67,
198,
198,
2,
598,
9113,
3920,
198,
24805,
796,
46947,
7,
834,
3672,
834,
8,
198,
198,
2,
8778,
2746,
198,
4480,
... | 2.886525 | 141 |
#!/usr/bin/python3
# -*-coding:utf-8-*-
__author__ = "Bannings"
if __name__ == '__main__':
obj = MyQueue()
obj.push(1)
obj.push(2)
assert obj.peek() == 1
assert obj.pop() == 1
assert obj.empty() == False | [
2,
48443,
14629,
14,
8800,
14,
29412,
18,
198,
2,
532,
9,
12,
66,
7656,
25,
40477,
12,
23,
12,
9,
12,
198,
198,
834,
9800,
834,
796,
366,
33,
1236,
654,
1,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
19... | 2.12037 | 108 |
#####################################################################################
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# This source code is subject to terms and conditions of the Apache License, Version 2.0. A
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the Apache License, Version 2.0, please send an email to
# ironpy@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Apache License, Version 2.0.
#
# You must not remove this notice, or any other, from this software.
#
#
#####################################################################################
'''
'''
#------------------------------------------------------------------------------
from iptest import *
from iptest.assert_util import *
skiptest("silverlight")
add_clr_assemblies("fieldtests", "typesamples")
if options.RUN_TESTS: #TODO - bug when generating Pydoc
from Merlin.Testing.FieldTest import *
from Merlin.Testing.TypeSample import *
types = [
Struct,
GenericStruct[int],
GenericStruct[SimpleClass],
Class,
GenericClass[SimpleStruct],
GenericClass[SimpleClass],
]
for i in range(len(types)):
exec("def test_%s_get_by_instance(): _test_get_by_instance(types[%s])" % (i, i))
exec("def test_%s_get_by_type(): _test_get_by_type(types[%s])" % (i, i))
exec("def test_%s_get_by_descriptor(): _test_get_by_descriptor(types[%s])" % (i, i))
exec("def test_%s_set_by_instance(): _test_set_by_instance(types[%s])" % (i, i))
exec("def test_%s_set_by_type(): _test_set_by_type(types[%s])" % (i, i))
exec("def test_%s_set_by_descriptor(): _test_set_by_descriptor(types[%s])" % (i, i))
exec("def test_%s_delete_by_type(): _test_delete_by_type(types[%s])" % (i, i))
exec("def test_%s_delete_by_instance(): _test_delete_by_instance(types[%s])" % (i, i))
exec("def test_%s_delete_by_descriptor(): _test_delete_by_descriptor(types[%s])" % (i, i))
@skip("multiple_execute")
run_test(__name__)
| [
29113,
29113,
14468,
4242,
2,
198,
2,
198,
2,
220,
15069,
357,
66,
8,
5413,
10501,
13,
1439,
2489,
10395,
13,
198,
2,
198,
2,
770,
2723,
2438,
318,
2426,
284,
2846,
290,
3403,
286,
262,
24843,
13789,
11,
10628,
362,
13,
15,
13,
... | 2.845845 | 746 |
import os
import torch
import torch.nn as nn
import h5py
from collections import OrderedDict
from torchvision.datasets.utils import download_url
__all__ = ["GoogleNet"]
| [
11748,
28686,
198,
11748,
28034,
198,
11748,
28034,
13,
20471,
355,
299,
77,
198,
11748,
289,
20,
9078,
198,
198,
6738,
17268,
1330,
14230,
1068,
35,
713,
198,
6738,
28034,
10178,
13,
19608,
292,
1039,
13,
26791,
1330,
4321,
62,
6371,
... | 3.264151 | 53 |
import constants
base_url = 'http://websismit.manipal.edu/websis/control/StudentAcademicProfile'
url = base_url + '?productCategoryId=0905-TERM-'
base_url_details = 'http://websismit.manipal.edu/websis/control/'
url_details = base_url_details + 'ListCTPEnrollment?customTimePeriodId='
#HTML IDs to be used in first url
form_id = 'ProgramAdmissionItemDetail'
credits_id = 'ProgramAdmissionItemDetail_pcredits_title'
gpa_id = 'ProgramAdmissionItemDetail_ptermResultScore_title'
course_code_id = 'cc_TermGradeBookSummary_internalName_'
course_id = 'cc_TermGradeBookSummary_productName_'
course_credit_id = 'cc_TermGradeBookSummary_credit_'
course_grade_id = 'cc_TermGradeBookSummary_pfinalResult_'
course_session_id = 'cc_TermGradeBookSummary_customTimePeriodId_'
#HTML IDs to be used in second url
attendance_id = 'cc_ListAttendanceSummary_'
attendance_code_id = attendance_id + 'productId_'
attendance_name_id = attendance_id + 'productName_'
attendance_classes_id = attendance_id + 'attendanceTaken_'
attendance_attended_id = attendance_id + 'classesAttended_'
attendance_absent_id = attendance_id + 'classesAbsent_'
attendance_percent_id = attendance_id + 'attendancePercentage_'
attendance_last_updated = attendance_id + 'lastUpdatedStamp_'
internal_id = 'cc_ListAssessmentScores_'
internal_code_id = internal_id + 'internalName_'
internal_subject_name = internal_id + 'productName_'
internal_marks_id = internal_id + 'obtainedMarks_'
| [
11748,
38491,
198,
198,
8692,
62,
6371,
796,
705,
4023,
1378,
732,
1443,
1042,
270,
13,
805,
8521,
13,
15532,
14,
732,
1443,
271,
14,
13716,
14,
38778,
12832,
49113,
37046,
6,
198,
6371,
796,
2779,
62,
6371,
1343,
705,
30,
11167,
27... | 2.907631 | 498 |
# Write a Python program to access and print a URL's content to the console.
from http.client import HTTPConnection
conn = HTTPConnection("example.com")
conn.request("GET", "/")
result = conn.getresponse()
# retrieves the entire contents.
contents = result.read()
print(contents)
| [
2,
19430,
257,
11361,
1430,
284,
1895,
290,
3601,
257,
10289,
338,
2695,
284,
262,
8624,
13,
198,
198,
6738,
2638,
13,
16366,
1330,
14626,
32048,
198,
37043,
796,
14626,
32048,
7203,
20688,
13,
785,
4943,
198,
37043,
13,
25927,
7203,
... | 3.662338 | 77 |
from .models import Category
| [
6738,
764,
27530,
1330,
21743,
628
] | 5 | 6 |
##Return sum of all unique elements of an array
if __name__ == "__main__":
nums = [1, 2, 3, 2]
print(unique_element(nums))
| [
2235,
13615,
2160,
286,
477,
3748,
4847,
286,
281,
7177,
628,
198,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
198,
220,
220,
220,
997,
82,
796,
685,
16,
11,
362,
11,
513,
11,
362,
60,
198,
220,
220,
220,
3601,
... | 2.528302 | 53 |
from numpy import uint64 as ui, arange
START = 1050
ONE = ui(1)
n = START
while n < 2**32 -1:
print(n)
with open("fromto.txt", "a") as f:
f.write("%s\t%s\n" % (n, carvedToWritten(n)))
n += 1 | [
6738,
299,
32152,
1330,
20398,
2414,
355,
334,
72,
11,
610,
858,
198,
198,
2257,
7227,
796,
47235,
198,
198,
11651,
796,
334,
72,
7,
16,
8,
198,
198,
77,
796,
33303,
198,
4514,
299,
1279,
362,
1174,
2624,
532,
16,
25,
198,
197,
... | 2.084211 | 95 |
import multiprocessing
import threading
from functools import partial
from typing import Union, TYPE_CHECKING
from copy import deepcopy
from ... import __default_host__
from ...hubble.hubio import HubIO
from ...hubble.helper import is_valid_huburi
from ...enums import GatewayProtocolType, RuntimeBackendType
from ...parsers.hubble import set_hub_pull_parser
if TYPE_CHECKING:
from argparse import Namespace
class ConditionalEvent:
"""
:class:`ConditionalEvent` provides a common interface to an event (multiprocessing or threading event)
that gets triggered when any of the events provided in input is triggered (OR logic)
:param backend_runtime: The runtime type to decide which type of Event to instantiate
:param events_list: The list of events that compose this composable event
"""
def update_runtime_cls(args, copy=False) -> 'Namespace':
"""Get runtime_cls as a string from args
:param args: pea/pod namespace args
:param copy: True if args shouldn't be modified in-place
:return: runtime class as a string
"""
_args = deepcopy(args) if copy else args
gateway_runtime_dict = {
GatewayProtocolType.GRPC: 'GRPCRuntime',
GatewayProtocolType.WEBSOCKET: 'WebSocketRuntime',
GatewayProtocolType.HTTP: 'HTTPRuntime',
}
if (
_args.runtime_cls not in gateway_runtime_dict.values()
and _args.host != __default_host__
and not _args.disable_remote
):
_args.runtime_cls = 'JinadRuntime'
# NOTE: remote pea would also create a remote workspace which might take alot of time.
# setting it to -1 so that wait_start_success doesn't fail
_args.timeout_ready = -1
if _args.runtime_cls == 'ZEDRuntime' and _args.uses.startswith('docker://'):
_args.runtime_cls = 'ContainerRuntime'
if _args.runtime_cls == 'ZEDRuntime' and is_valid_huburi(_args.uses):
_args.uses = HubIO(
set_hub_pull_parser().parse_args([_args.uses, '--no-usage'])
).pull()
if _args.uses.startswith('docker://'):
_args.runtime_cls = 'ContainerRuntime'
if hasattr(_args, 'protocol'):
_args.runtime_cls = gateway_runtime_dict[_args.protocol]
return _args
| [
11748,
18540,
305,
919,
278,
198,
11748,
4704,
278,
198,
6738,
1257,
310,
10141,
1330,
13027,
198,
6738,
19720,
1330,
4479,
11,
41876,
62,
50084,
2751,
198,
6738,
4866,
1330,
2769,
30073,
198,
198,
6738,
2644,
1330,
11593,
12286,
62,
47... | 2.757985 | 814 |
from ukw_intelli_store.cli import main
from ukw_intelli_store.endomaterial import EndoMaterial
path_test_imd = "tests/data/imd.xlsx"
path_test_mat = "tests/data/mat.xlsx" | [
198,
6738,
334,
46265,
62,
600,
23225,
62,
8095,
13,
44506,
1330,
1388,
198,
6738,
334,
46265,
62,
600,
23225,
62,
8095,
13,
437,
296,
2273,
1330,
5268,
78,
17518,
198,
198,
6978,
62,
9288,
62,
320,
67,
796,
366,
41989,
14,
7890,
... | 2.492754 | 69 |
print("user 1")
| [
4798,
7203,
7220,
352,
4943,
198
] | 2.666667 | 6 |
from panda3d.core import PTAVecBase2f, Vec2
from Code.DebugObject import DebugObject
from Code.Globals import Globals
from Code.RenderPasses.AntialiasingFXAAPass import AntialiasingFXAAPass
from Code.RenderPasses.AntialiasingSMAAPass import AntialiasingSMAAPass
class AntialiasingManager(DebugObject):
""" The Antialiasing Manager handles the setup of the antialiasing passes,
if antialiasing is defined in the settings. It also handles jittering when
using a temporal antialiasing technique like SMAA.
When jittering is enabled, the frame is moved by half a pixel up/down every
second frame, and then merged with the previous frame result, to achieve
better antialiasing. This is like MSAA but splitted over several frames """
availableTechniques = ["FXAA", "SMAA", "None"]
def __init__(self, pipeline):
""" Creates the manager and directly setups the passes """
DebugObject.__init__(self, "AntialiasingManager")
self.pipeline = pipeline
self.jitter = False
self.jitterOffsets = []
self.jitterIndex = 0
self.jitterPTA = PTAVecBase2f.emptyArray(1)
self.create()
def create(self):
""" Setups the antialiasing passes, and also computes the jitter offsets """
technique = self.pipeline.settings.antialiasingTechnique
if technique not in self.availableTechniques:
self.error("Unrecognized antialiasing technique: " + technique)
return
# No antialiasing
elif technique == "None":
return
# FXAA 3.11 by nvidia
elif technique == "FXAA":
self.antialiasingPass = AntialiasingFXAAPass()
# SMAA T2
elif technique == "SMAA":
self.antialiasingPass = AntialiasingSMAAPass()
self.jitter = True
# Extract smaa quality preset and define it in the shader
quality = self.pipeline.settings.smaaQuality.upper()
if quality in ["LOW", "MEDIUM", "HIGH", "ULTRA"]:
self.pipeline.getRenderPassManager().registerDefine("SMAA_PRESET_" + quality, 1)
else:
self.error("Unrecognized SMAA quality:", quality)
return
# When jittering is enabled, precompute the jitter offsets
if self.jitter:
# Compute how big a pixel is on screen
aspect = float(Globals.resolution.x) / float(Globals.resolution.y)
onePixelShift = Vec2(0.5 / float(Globals.resolution.x),
0.5 / float(Globals.resolution.y) / aspect) * self.pipeline.settings.jitterAmount
# Annoying that Vec2 has no multliply-operator for non-floats
multiplyVec2 = lambda a, b: Vec2(a.x*b.x, a.y*b.y)
# Multiply the pixel size with the offsets to compute the final jitter
self.jitterOffsets = [
multiplyVec2(onePixelShift, Vec2(-0.25, 0.25)),
multiplyVec2(onePixelShift, Vec2(0.25, -0.25))
]
# Finally register the antialiasing pass
self.pipeline.getRenderPassManager().registerPass(self.antialiasingPass)
def update(self):
""" Updates the manager, setting the jitter offsets if enabled """
if self.jitter:
shift = self.jitterOffsets[self.jitterIndex]
self.jitterIndex = 1 - self.jitterIndex
Globals.base.camLens.setFilmOffset(shift.x, shift.y)
| [
6738,
279,
5282,
18,
67,
13,
7295,
1330,
350,
5603,
53,
721,
14881,
17,
69,
11,
38692,
17,
198,
198,
6738,
6127,
13,
27509,
10267,
1330,
31687,
10267,
198,
6738,
6127,
13,
9861,
672,
874,
1330,
40713,
874,
198,
198,
6738,
6127,
13,
... | 2.377397 | 1,460 |
#
# PySNMP MIB module CISCO-MGX82XX-RPM-RSRC-PART-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-MGX82XX-RPM-RSRC-PART-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:07:29 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsUnion, ValueRangeConstraint, SingleValueConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsUnion", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsIntersection")
rpmInterface, = mibBuilder.importSymbols("BASIS-MIB", "rpmInterface")
ciscoWan, = mibBuilder.importSymbols("CISCOWAN-SMI", "ciscoWan")
ObjectGroup, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "NotificationGroup", "ModuleCompliance")
ModuleIdentity, Counter64, iso, Unsigned32, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Counter32, Bits, IpAddress, TimeTicks, ObjectIdentity, NotificationType, Integer32, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "Counter64", "iso", "Unsigned32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Counter32", "Bits", "IpAddress", "TimeTicks", "ObjectIdentity", "NotificationType", "Integer32", "Gauge32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
ciscoMgx82xxRpmRsrcPartMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 351, 150, 61))
ciscoMgx82xxRpmRsrcPartMIB.setRevisions(('2002-09-17 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ciscoMgx82xxRpmRsrcPartMIB.setRevisionsDescriptions(('Initial version of the MIB. The content of this MIB was originally available in CISCO-WAN-AXIPOP-MIB defined using SMIv1. The applicable objects from CISCO-WAN-AXIPOP-MIB are defined using SMIv2 in this MIB. Also the descriptions of some of the objects have been modified.',))
if mibBuilder.loadTexts: ciscoMgx82xxRpmRsrcPartMIB.setLastUpdated('200209170000Z')
if mibBuilder.loadTexts: ciscoMgx82xxRpmRsrcPartMIB.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts: ciscoMgx82xxRpmRsrcPartMIB.setContactInfo('Cisco Systems Customer Service Postal: 170 W Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS E-mail: cs-wanatm@cisco.com')
if mibBuilder.loadTexts: ciscoMgx82xxRpmRsrcPartMIB.setDescription('The MIB module to manage resource partition objects. A resource partition is configured on a RPM subinterface. An administrator can partition connection related resources like VPI, VCI ranges, bandwidth and total amount of available connection entries in the switch through these MIB tables.')
rpmIfCnfResPart = MibIdentifier((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 9, 2))
rpmIfCnfRscPartTable = MibTable((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 9, 2, 1), )
if mibBuilder.loadTexts: rpmIfCnfRscPartTable.setStatus('current')
if mibBuilder.loadTexts: rpmIfCnfRscPartTable.setDescription('The table is for RPM interface resource partition.')
rpmIfCnfRscPartEntry = MibTableRow((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 9, 2, 1, 1), ).setIndexNames((0, "CISCO-MGX82XX-RPM-RSRC-PART-MIB", "rpmIfRscSlotNum"), (0, "CISCO-MGX82XX-RPM-RSRC-PART-MIB", "rpmIfRscPartIfNum"), (0, "CISCO-MGX82XX-RPM-RSRC-PART-MIB", "rpmIfRscPartCtrlrNum"))
if mibBuilder.loadTexts: rpmIfCnfRscPartEntry.setStatus('current')
if mibBuilder.loadTexts: rpmIfCnfRscPartEntry.setDescription('An entry for resource partition configuration on a logical interface. A resource partition need to be configured before connections(or connection endpoints) can be added to the interface.')
rpmIfRscSlotNum = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 9, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rpmIfRscSlotNum.setStatus('current')
if mibBuilder.loadTexts: rpmIfRscSlotNum.setDescription('This object identifies the slot number of the RPM card in MGX shelf.')
rpmIfRscPartIfNum = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 9, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rpmIfRscPartIfNum.setStatus('current')
if mibBuilder.loadTexts: rpmIfRscPartIfNum.setDescription('This is backplane interface number. Currently there is only one interface and value 1 is the only value supported.')
rpmIfRscPartCtrlrNum = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 9, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("par", 1), ("pnni", 2), ("tag", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rpmIfRscPartCtrlrNum.setStatus('current')
if mibBuilder.loadTexts: rpmIfRscPartCtrlrNum.setDescription('This object identifies the type of the controller which owns the resource partition. par(1) : Portable Auto Route Controller. This is a controller software implementing Cisco Proprietary protocol for network routing and topology. pnni(2): Private Network-to-Network Interface(PNNI) controller. This is a controller implementing ATM Forum PNNI protocol for routing. tag(3) : Tag Switching(MPLS) controller. This is a controller supporting MPLS protocol.')
rpmIfRscPrtRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 9, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("add", 1), ("del", 2), ("mod", 3))).clone('del')).setMaxAccess("readonly")
if mibBuilder.loadTexts: rpmIfRscPrtRowStatus.setStatus('current')
if mibBuilder.loadTexts: rpmIfRscPrtRowStatus.setDescription('This object contains the information on whether subinterface exists or being modified. add(1) : sub-interface exists del(2) : sub-interface deleted mod(2) : sub-interface being modified')
rpmIfRscPrtIngrPctBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 9, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rpmIfRscPrtIngrPctBandwidth.setStatus('current')
if mibBuilder.loadTexts: rpmIfRscPrtIngrPctBandwidth.setDescription('The percentage of logical interface bandwidth.')
rpmIfRscPrtEgrPctBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 9, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rpmIfRscPrtEgrPctBandwidth.setStatus('current')
if mibBuilder.loadTexts: rpmIfRscPrtEgrPctBandwidth.setDescription('The percentage of logical interface bandwidth.')
rpmIfRscPrtVpiLow = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 9, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rpmIfRscPrtVpiLow.setStatus('current')
if mibBuilder.loadTexts: rpmIfRscPrtVpiLow.setDescription('The beginning of the VPI range reserved for this partition.')
rpmIfRscPrtVpiHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 9, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rpmIfRscPrtVpiHigh.setStatus('current')
if mibBuilder.loadTexts: rpmIfRscPrtVpiHigh.setDescription('The end of the VPI range reserved for this partition.')
rpmIfRscPrtVciLow = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 9, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rpmIfRscPrtVciLow.setStatus('current')
if mibBuilder.loadTexts: rpmIfRscPrtVciLow.setDescription('The beginning of the VCI range reserved for this partition. This field is only valid for logical interfaces configured with a single VPI.')
rpmIfRscPrtVciHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 9, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rpmIfRscPrtVciHigh.setStatus('current')
if mibBuilder.loadTexts: rpmIfRscPrtVciHigh.setDescription('The end of the VCI range reserved for this partition. This field is only valid for logical interfaces configured with a single VPI.')
rpmIfRscPrtMaxChans = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 9, 2, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4047))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rpmIfRscPrtMaxChans.setStatus('current')
if mibBuilder.loadTexts: rpmIfRscPrtMaxChans.setDescription('This represents maximum number of channels(connections) that are available to the controller.')
cmrRsrcPartMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 351, 150, 61, 3))
cmrRsrcPartMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 351, 150, 61, 3, 1))
cmrRsrcPartMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 351, 150, 61, 3, 2))
cmrRsrcPartMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 351, 150, 61, 3, 1, 1)).setObjects(("CISCO-MGX82XX-RPM-RSRC-PART-MIB", "cmrRsrcPartMIBGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cmrRsrcPartMIBCompliance = cmrRsrcPartMIBCompliance.setStatus('current')
if mibBuilder.loadTexts: cmrRsrcPartMIBCompliance.setDescription('The Compliance statement for Resource partition management group.')
cmrRsrcPartMIBGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 351, 150, 61, 3, 2, 1)).setObjects(("CISCO-MGX82XX-RPM-RSRC-PART-MIB", "rpmIfRscSlotNum"), ("CISCO-MGX82XX-RPM-RSRC-PART-MIB", "rpmIfRscPartIfNum"), ("CISCO-MGX82XX-RPM-RSRC-PART-MIB", "rpmIfRscPartCtrlrNum"), ("CISCO-MGX82XX-RPM-RSRC-PART-MIB", "rpmIfRscPrtRowStatus"), ("CISCO-MGX82XX-RPM-RSRC-PART-MIB", "rpmIfRscPrtIngrPctBandwidth"), ("CISCO-MGX82XX-RPM-RSRC-PART-MIB", "rpmIfRscPrtEgrPctBandwidth"), ("CISCO-MGX82XX-RPM-RSRC-PART-MIB", "rpmIfRscPrtVpiLow"), ("CISCO-MGX82XX-RPM-RSRC-PART-MIB", "rpmIfRscPrtVpiHigh"), ("CISCO-MGX82XX-RPM-RSRC-PART-MIB", "rpmIfRscPrtVciLow"), ("CISCO-MGX82XX-RPM-RSRC-PART-MIB", "rpmIfRscPrtVciHigh"), ("CISCO-MGX82XX-RPM-RSRC-PART-MIB", "rpmIfRscPrtMaxChans"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cmrRsrcPartMIBGroup = cmrRsrcPartMIBGroup.setStatus('current')
if mibBuilder.loadTexts: cmrRsrcPartMIBGroup.setDescription('The collection of objects related to configuration of Resource partition.')
mibBuilder.exportSymbols("CISCO-MGX82XX-RPM-RSRC-PART-MIB", ciscoMgx82xxRpmRsrcPartMIB=ciscoMgx82xxRpmRsrcPartMIB, rpmIfCnfRscPartEntry=rpmIfCnfRscPartEntry, cmrRsrcPartMIBGroups=cmrRsrcPartMIBGroups, rpmIfRscPrtVpiLow=rpmIfRscPrtVpiLow, rpmIfRscPrtVciLow=rpmIfRscPrtVciLow, PYSNMP_MODULE_ID=ciscoMgx82xxRpmRsrcPartMIB, cmrRsrcPartMIBCompliance=cmrRsrcPartMIBCompliance, rpmIfRscPartIfNum=rpmIfRscPartIfNum, rpmIfRscPrtIngrPctBandwidth=rpmIfRscPrtIngrPctBandwidth, cmrRsrcPartMIBGroup=cmrRsrcPartMIBGroup, cmrRsrcPartMIBConformance=cmrRsrcPartMIBConformance, rpmIfCnfResPart=rpmIfCnfResPart, rpmIfRscPrtEgrPctBandwidth=rpmIfRscPrtEgrPctBandwidth, rpmIfRscPartCtrlrNum=rpmIfRscPartCtrlrNum, rpmIfRscPrtVpiHigh=rpmIfRscPrtVpiHigh, rpmIfRscPrtRowStatus=rpmIfRscPrtRowStatus, rpmIfCnfRscPartTable=rpmIfCnfRscPartTable, cmrRsrcPartMIBCompliances=cmrRsrcPartMIBCompliances, rpmIfRscPrtVciHigh=rpmIfRscPrtVciHigh, rpmIfRscPrtMaxChans=rpmIfRscPrtMaxChans, rpmIfRscSlotNum=rpmIfRscSlotNum)
| [
2,
198,
2,
9485,
15571,
7378,
337,
9865,
8265,
36159,
8220,
12,
20474,
55,
6469,
8051,
12,
49,
5868,
12,
6998,
7397,
12,
30709,
12,
8895,
33,
357,
4023,
1378,
16184,
76,
489,
8937,
13,
785,
14,
79,
893,
11632,
8,
198,
2,
7054,
4... | 2.658626 | 4,353 |
#!/usr/bin/env python3
import argparse
import os
import sys
import yaml
from pathlib import Path
if __name__ == '__main__':
main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
11748,
1822,
29572,
198,
11748,
28686,
198,
11748,
25064,
198,
11748,
331,
43695,
198,
6738,
3108,
8019,
1330,
10644,
628,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834... | 2.816327 | 49 |
import random
import time
import pygame
import QuickJSON
import copy
from pathfinding.core.diagonal_movement import DiagonalMovement
from pathfinding.core.grid import Grid
from pathfinding.finder.a_star import AStarFinder
from octagon.utils import render_text, var, img, cout, mp_scene
from octagon.environment import hud, camera
from octagon.sprites import block
| [
11748,
4738,
198,
11748,
640,
198,
11748,
12972,
6057,
198,
11748,
12029,
40386,
198,
11748,
4866,
198,
6738,
3108,
41070,
13,
7295,
13,
10989,
27923,
62,
21084,
434,
1330,
6031,
27923,
21774,
434,
198,
6738,
3108,
41070,
13,
7295,
13,
... | 3.66 | 100 |
from setuptools import setup
import os
# Get version without importing, which avoids dependency issues
setup(
name='imbox',
version=get_version(),
description="Python IMAP for Human beings",
long_description=read('README.md'),
keywords='email, IMAP, parsing emails',
author='Martin Rusev',
author_email='martin@amon.cx',
url='https://github.com/martinrusev/imbox',
license='MIT',
packages=['imbox', 'imbox.vendors'],
package_dir={'imbox': 'imbox'},
install_requires=[
'chardet',
],
python_requires='>=3.3',
zip_safe=False,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
test_suite='tests',
)
| [
6738,
900,
37623,
10141,
1330,
9058,
198,
11748,
28686,
628,
198,
2,
3497,
2196,
1231,
33332,
11,
543,
30940,
20203,
2428,
628,
198,
198,
40406,
7,
198,
220,
220,
220,
1438,
11639,
320,
3524,
3256,
198,
220,
220,
220,
2196,
28,
1136,
... | 2.565934 | 364 |
#!/usr/bin/env python3
#
# Written in 2016 by Calvin Ardi <calvin@isi.edu>
#
# To the extent possible under law, the author(s) have dedicated all copyright
# and related and neighboring rights to this software to the public domain
# worldwide. This software is distributed without any warranty.
#
# You should have received a copy of the CC0 Public Domain Dedication along
# with this software.
# If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
#
"""This snippet prints out an unmodified proofpoint "protected" URL.
Usage:
./decode.py url
Args:
url: a proofpoint url (usually starts with urldefense.proofpoint.com)
Returns:
A decoded URL string.
"""
import sys
import urllib.request, urllib.parse, urllib.error
if __name__ == '__main__':
if len(sys.argv) != 2:
sys.exit('Usage: %s encoded_url' % sys.argv[0])
#
# proofpoint "protected" URLs take the form of:
#
# https://urldefense.proofpoint.com/v2/url?[params]
#
# where [params] is described below
#
# TODO decode parameters
#
# c := constant (per organization)
# d := constant (per organization)
# e := always empty?
# m := ?
# r := unique identifier tied to email address
# s := ?
# u := safe-encoded URL
#
# 'm' might be a hash of the URL or some metadata
# 's' might be a signature or checksum
#
query = urllib.parse.urlparse(sys.argv[1]).query
param = urllib.parse.parse_qs(query)
if 'u' not in param:
sys.exit('ERROR: check if URL is a proofpoint URL')
else:
u = (param['u'][0].replace('-', '%')
.replace('_', '/'))
url = urllib.parse.unquote(u)
print(url)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
2,
198,
2,
22503,
287,
1584,
416,
25017,
943,
10989,
1279,
9948,
7114,
31,
23267,
13,
15532,
29,
198,
2,
198,
2,
1675,
262,
6287,
1744,
739,
1099,
11,
262,
1772,
7,
82,
... | 2.753719 | 605 |
#! /usr/bin/env python
"""
IBIS-AMI model source code, AMI file, and IBIS file configuration utility.
Original author: David Banas
Original date: February 26, 2016
This script gets called from a makefile, when any of the following need rebuilding:
* a C++ source code file
* a ``*.AMI`` file
* a ``*.IBS`` file
All three will be rebuilt.
(We rebuild all three, because it doesn't take very long, and we can
insure consistency this way.)
This gets triggered by one of two things:
#. The common model configuration information has changed, or
#. One of the EmPy template files was updated.
The idea, here, is that the ``*.IBS`` file, the ``*.AMI`` file, and the
C++ source file should be configured from a common model configuration
file, so as to ensure consistency between the three.
Copyright (c) 2016 David Banas; all rights reserved World wide.
"""
import importlib.util
from datetime import date
from pathlib import Path
import click
import em
param_types = {
"INT": {"c_type": "int", "ami_type": "Integer", "getter": "get_param_int"},
"FLOAT": {"c_type": "float", "ami_type": "Float", "getter": "get_param_float"},
"BOOL": {"c_type": "bool", "ami_type": "Boolean", "getter": "get_param_bool"},
"STRING": {"c_type": "char *", "ami_type": "String", "getter": "get_param_str"},
}
def print_param(indent, name, param):
"""
Print AMI parameter specification. Handle nested parameters, via recursion.
Args:
indent (str): String containing some number of spaces.
name (str): Parameter name.
param (dict): Dictionary containing parameter definition fields.
"""
print(indent, f"({name}")
if "subs" in param:
for key in param["subs"]:
print_param(indent + " ", key, param["subs"][key])
if "description" in param:
print(indent + " ", f"(Description {param['description']})")
else:
for (fld_name, fld_key) in [
("Usage", "usage"),
("Type", "type"),
("Format", "format"),
("Default", "default"),
("Description", "description"),
]:
# Trap the special cases.
if fld_name == "Type":
print(indent, " (Type", param_types[param["type"]]["ami_type"], ")")
elif fld_name == "Default":
if param["format"] == "Value":
pass
elif fld_name == "Format":
if param["format"] == "Value":
print(indent, " (Value", param["default"], ")")
elif param["format"] == "List":
print(indent, " (List", end=" ")
for item in param["values"]:
print(item, end=" ")
print(")")
print(indent, " (List_Tip", end=" ")
for item in param["labels"]:
print(item, end=" ")
print(")")
else:
print(indent, f" ({param['format']}", param["default"], param["min"], param["max"], ")")
# Execute the default action.
else:
print(indent, f" ({fld_name}", param[fld_key], ")")
print(indent, ")")
def print_code(pname, param):
"""
Print C++ code needed to query AMI parameter tree for a particular leaf.
Args:
pname (str): Parameter name.
param (dict): Dictionary containing parameter definition fields.
"""
print(" ", f'node_names.push_back("{pname}");')
if "subs" in param:
for key in param["subs"]:
print_code(key, param["subs"][key])
else:
if param["usage"] == "In" or param["usage"] == "InOut":
ptype = param["type"]
print(f" {param_types[ptype]['c_type']} {pname};")
if ptype == "BOOL":
print(f" {pname} = {param_types[ptype]['getter']}(node_names, {param['default'].lower()});")
else:
print(f" {pname} = {param_types[ptype]['getter']}(node_names, {param['default']});")
print(" ", "node_names.pop_back();")
def ami_generator(py_file):
"""Read in the ``py_file`` and cpp.em file then generate a ibis, ami and cpp."""
file_base_name = Path(py_file).stem
# Read model configuration information.
print(f"Reading model configuration information from file: {py_file}.")
spec = importlib.util.spec_from_file_location(file_base_name, py_file)
cfg = importlib.util.module_from_spec(spec)
spec.loader.exec_module(cfg)
# Configure the 3 files.
for ext in ["cpp", "ami", "ibs"]:
out_file = Path(py_file).with_suffix(f".{ext}")
if ext == "ami":
em_file = Path(__file__).parent.joinpath("generic.ami.em")
elif ext == "ibs":
em_file = Path(__file__).parent.joinpath("generic.ibs.em")
else:
em_file = out_file.with_suffix(".cpp.em")
print(f"Building '{out_file}' from '{em_file}'...")
with open(out_file, "w", encoding="UTF-8") as out_file:
interpreter = em.Interpreter(
output=out_file,
globals={
"ami_params": cfg.ami_params,
"ibis_params": cfg.ibis_params,
"param_types": param_types,
"model_name": cfg.kFileBaseName,
"description": cfg.kDescription,
"date": str(date.today()),
},
)
try:
with open(em_file, encoding="UTF-8") as em_file_object:
interpreter.file(em_file_object)
finally:
interpreter.shutdown()
@click.command(context_settings=dict(help_option_names=["-h", "--help"]))
@click.argument("template", type=click.Path(exists=True, resolve_path=True))
@click.version_option()
def main(template):
"""Configure IBIS-AMI model C++ source code, IBIS model, and AMI file.
This command generates three files based off the input config file.
It expects a .cpp.em file to be located in the same directory so that it can
generate a cpp file from the config file and template file.
template: name of model configuration file (*.py)
"""
ami_generator(template)
| [
2,
0,
1220,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
37811,
198,
9865,
1797,
12,
43870,
2746,
2723,
2438,
11,
3001,
40,
2393,
11,
290,
34782,
1797,
2393,
8398,
10361,
13,
198,
198,
20556,
1772,
25,
3271,
10274,
292,
198,
198,
20... | 2.220238 | 2,856 |
'''
name.py
Common name functions
'''
import logging
from nameparser import HumanName
logging.getLogger(__name__).addHandler(logging.NullHandler())
def first_last(name):
'''
Returns name in First Last format
Args:
name(str)
Returns:
str
'''
hn = HumanName(name)
return '{0} {1}'.format(hn.first, hn.last)
def first_last_pair(name):
'''
Returns name in First Last pair
Args:
name(str)
Returns:
tuple: of str
'''
hn = HumanName(name)
return (hn.first, hn.last)
def last_first(name):
'''
Returns name in Last, First format
Args:
name(str)
Returns:
str
'''
hn = HumanName(name)
return '{1}, {0}'.format(hn.first, hn.last)
def namestrip(nm, tostrip=None):
'''
Strips various characters out of name. Used for better matching.
Args:
nm(str):
tostrip(list): of str
Returns:
str
'''
if not tostrip:
tostrip = ['Jr.', 'III', 'IV', 'II', "'", '.', ', ', ',']
for char in tostrip:
nm = nm.replace(char, '')
if len(nm.split()) > 0 and nm.split()[-1] == 'V':
nm = ' '.join(nm.split()[:-1])
return nm.strip()
if __name__ == '__main__':
pass
| [
7061,
6,
198,
3672,
13,
9078,
198,
17227,
1438,
5499,
198,
198,
7061,
6,
198,
198,
11748,
18931,
198,
6738,
1438,
48610,
1330,
5524,
5376,
198,
198,
6404,
2667,
13,
1136,
11187,
1362,
7,
834,
3672,
834,
737,
2860,
25060,
7,
6404,
26... | 2.170648 | 586 |
from distutils.util import convert_path
import setuptools
# Load the readme
with open("README.md", "r") as fh:
long_description = fh.read()
# Load the version info
version_namespace = {}
ver_path = convert_path("recommender_metrics/version.py")
with open(ver_path) as ver_file:
exec(ver_file.read(), version_namespace)
# Execute the setup
setuptools.setup(
name="recommender-metrics",
version=version_namespace["__version__"],
author="Niall Twomey",
author_email="twomeynj@gmail.com",
description="Recommender metric evaluation",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/niall-twomey/recommender_metrics",
install_requires=["numpy", "scikit-learn", "tqdm"],
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
python_requires=">=3.6", # Hasn't been tested below this
test_suite="nose.collector",
tests_require=["nose"],
)
| [
6738,
1233,
26791,
13,
22602,
1330,
10385,
62,
6978,
198,
198,
11748,
900,
37623,
10141,
198,
198,
2,
8778,
262,
1100,
1326,
198,
4480,
1280,
7203,
15675,
11682,
13,
9132,
1600,
366,
81,
4943,
355,
277,
71,
25,
198,
220,
220,
220,
8... | 2.78436 | 422 |
# -*- coding: utf-8 -*-
# Author: Jiajun Ren <jiajunren0522@gmail.com>
import numpy as np
from renormalizer.mps import Mpo, MpDm, ThermalProp
from renormalizer.spectra.base import SpectraTdMpsJobBase
from renormalizer.mps.mps import BraKetPair
from renormalizer.utils import CompressConfig, EvolveConfig
import os
import logging
logger = logging.getLogger(__name__)
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
6434,
25,
449,
544,
29741,
7152,
1279,
73,
544,
29741,
918,
2713,
1828,
31,
14816,
13,
785,
29,
198,
11748,
299,
32152,
355,
45941,
198,
198,
6738,
8851,
6636,
7509... | 2.776119 | 134 |
"""Handle the raw data input/output and interface with external formats."""
from obspy.core import read
from obspy.core.utcdatetime import UTCDateTime
import pandas as pd
import datetime as dt
def load_stream(path):
"""Loads a Stream object from the file at path.
Args:
path: path to the input file, (for supported formats see,
http://docs.obspy.org/tutorial/code_snippets/reading_seismograms.html)
Returns:
an obspy.core.Stream object
(http://docs.obspy.org/packages/autogen/obspy.core.stream.Stream.html#obspy.core.stream.Stream)
"""
stream = read(path)
stream.merge()
# assert len(stream) == 3 # We need X,Y,Z traces
return stream
def load_catalog(path):
"""Loads a event catalog from a .csv file.
Each row in the catalog references a know seismic event.
Args:
path: path to the input .csv file.
Returns:
catalog: A Pandas dataframe.
"""
catalog = pd.read_csv(path)
# Check if utc_timestamp exists, otherwise create it
if 'utc_timestamp' not in catalog.columns:
utc_timestamp = []
for e in catalog.origintime.values:
utc_timestamp.append(UTCDateTime(e).timestamp)
catalog['utc_timestamp'] = utc_timestamp
return catalog
| [
37811,
37508,
262,
8246,
1366,
5128,
14,
22915,
290,
7071,
351,
7097,
17519,
526,
15931,
198,
198,
6738,
909,
2777,
88,
13,
7295,
1330,
1100,
198,
6738,
909,
2777,
88,
13,
7295,
13,
315,
10210,
265,
8079,
1330,
18119,
10430,
7575,
198... | 2.570577 | 503 |
from datastructures.stacks import Stack
from datastructures.trees.binary_tree_node import BinaryTreeNode
| [
6738,
4818,
459,
1356,
942,
13,
301,
4595,
1330,
23881,
198,
6738,
4818,
459,
1356,
942,
13,
83,
6037,
13,
39491,
62,
21048,
62,
17440,
1330,
45755,
27660,
19667,
628
] | 3.533333 | 30 |
import os
import git
import time
import unittest
from roundabout.config import Config
from roundabout.git_client import Git, GitException
from tests import utils
| [
11748,
28686,
198,
11748,
17606,
198,
11748,
640,
198,
11748,
555,
715,
395,
198,
198,
6738,
2835,
10755,
13,
11250,
1330,
17056,
198,
6738,
2835,
10755,
13,
18300,
62,
16366,
1330,
15151,
11,
15151,
16922,
198,
6738,
5254,
1330,
3384,
... | 3.837209 | 43 |
subjects=["I", "You"]
verbs=["Play", "Love"]
objects=["Hockey","Football"]
for i in range(len(subjects)):
for j in range(len(verbs)):
for k in range(len(objects)):
sentence = "%s %s %s." % (subjects[i], verbs[j], objects[k])
print(sentence)
| [
32796,
82,
28,
14692,
40,
1600,
366,
1639,
8973,
198,
46211,
28,
14692,
11002,
1600,
366,
18565,
8973,
198,
48205,
28,
14692,
39,
8337,
2430,
37316,
8973,
198,
1640,
1312,
287,
2837,
7,
11925,
7,
32796,
82,
8,
2599,
198,
220,
220,
2... | 2.216 | 125 |
__version__ = '1.28.0dev'
| [
834,
9641,
834,
796,
705,
16,
13,
2078,
13,
15,
7959,
6,
198
] | 2 | 13 |
from Xdmf import *
if __name__ == "__main__":
#//initialization begin
exampleMap = XdmfMap.New()
#//initialization end
#//initializationnode begin
#create attributes for each task id
#the index of the node id in the attribute is the local node id
map1Attribute = XdmfAttribute.New()
map1Attribute.setName("Test Attribute")
map1Attribute.setType(XdmfAttributeType.Scalar())
map1Attribute.setCenter(XdmfAttributeCenter.Node())
map1Vals = [1,2,3,4,5,7,9]
map1Attribute.insertAsInt32(0, map1Vals)
map2Attribute = XdmfAttribute.New()
map2Attribute.setName("Test Attribute")
map2Attribute.setType(XdmfAttributeType.Scalar())
map2Attribute.setCenter(XdmfAttributeCenter.Node())
map2Vals = [9,8,7,4,3]
map2Attribute.insertAsInt32(0, map2Vals)
#insert the attributes into a vector
#the id of the attribute in the vector is equal to the task id
testVector = AttributeVector()
testVector.push_back(map1Attribute)
testVector.push_back(map2Attribute)
exampleMapVector = XdmfMap.New(testVector)
#returns a vector of maps that holds the equivalencies for the nodes provided
#for example if Attribute 1 had globalNodeID 3 at localNodeID 2
#and Attribute 3 had globalNodeID 3 at localNodeID 5
#then map 1 would have an entry of (3, 5, 2)
#and map 3 would have an entry of (1, 2, 5)
#The entries are formatted (remoteTaskID, remoteLocalNodeID, localNodeID)
#//initializationnode end
#//inserttuple begin
newRemoteTaskID = 4
newLocalNodeID = 7
newRemoteLocalNodeID = 3
exampleMap.insert(newRemoteTaskID, newLocalNodeID, newRemoteLocalNodeID)
#This inserts an entry of (4, 7, 3) into the map
#//inserttuple end
#//setMap begin
newTaskMap = XdmfMapMap()
newNodeIdMap = XdmfMapNodeIdMap()
newNodeIdMap[2] = (3, 6, 8)
newNodeIdMap[3] = (3,)
newNodeIdMap[4] = (7,9)
#newNodeIdMap now contains the following
#(2, 3)
#(2, 6)
#(2, 8)
#(3, 3)
#(4, 7)
#(4, 9)
secondNodeIdMap = XdmfMapNodeIdMap()
secondNodeIdMap[5] = (3, 6, 8)
secondNodeIdMap[7] = (3,)
secondNodeIdMap[9] = (7,9)
#secondNodeIdMap now contains the following
#(5, 3)
#(5, 6)
#(5, 8)
#(7, 3)
#(9, 7)
#(9, 9)
newTaskMap[1] = newNodeIdMap
newTaskMap[2] = secondNodeIdMap
exampleMap = XdmfMap.New()
exampleMap.setMap(newTaskMap)
#(1, 2, 3)
#(1, 2, 6)
#(1, 2, 8)
#(1, 3, 3)
#(1, 4, 7)
#(1, 4, 9)
#(2, 5, 3)
#(2, 5, 6)
#(2, 5, 8)
#(2, 7, 3)
#(2, 9, 7)
#(2, 9, 9)
#Is now stored in exampleMap
#//setMap end
#//setName begin
newName = "New Name"
exampleMap.setName(newName)
#//setName end
#//getName begin
exampleName = exampleMap.getName()
#//getName end
#//getMap begin
#Assuming that exampleMap is a shared pointer to an XdmfMap object filled with the following tuples
#(1, 1, 9)
#(1, 2, 8)
#(2, 3, 7)
#(2, 4, 6)
#(3, 5, 5)
#(3, 6, 4)
taskIDMap = exampleMap.getMap()
i = 0
for val in taskIDMap:
print val
i = i + 1
if i == taskIDMap.size():
break
#This prints out all the task IDs
#unless the break is called on the last iteration the program will fail because of an issue with SWIG
nodeIDMap = taskIDMap[1]
#nodeIDMap now contains the following tuples because it retrieved the tuples associated with taskID 1
#(1, 9)
#(2, 8)
i = 0
for val in nodeIDMap:
print val
i = i + 1
if i == nodeIDMap.size():
break
#This prints out all the local node IDs
#unless the break is called on the last iteration the program will fail because of an issue with SWIG
for val in nodeIDMap[1]:
print val
#prints out all the remote node values associated with taskID 1 and localNode 1
#//getMap end
#//getRemoteNodeIds begin
nodeIDMap = exampleMap.getRemoteNodeIds(1)
#nodeIDMap now contains the following tuples because it retrieved the tuples associated with taskID 1
#(1, 9)
#(2, 8)
i = 0
for val in nodeIDMap:
print val
i = i + 1
if i == nodeIDMap.size():
break
#This prints out all the local node IDs
#unless the break is called on the last iteration the program will fail because of an issue with SWIG
for val in nodeIDMap[1]:
print val
#prints out all the remote node values associated with taskID 1 and localNode 1
#//getRemoteNodeIds end
#//isInitialized begin
if not(exampleMap.isInitialized()):
exampleMap.read()
#//isInitialized end
#//release begin
exampleMap.release()
#//release end
#//setHeavyDataControllers begin
hdf5FilePath = "The HDF5 file path goes here"
hdf5SetPath = "The HDF5 set path goes here"
startIndex = 0#start at the beginning
readStride = 1#read all values
readNumber = 10#read 10 values
newRemoteTaskController = XdmfHDF5Controller.New(
hdf5FilePath, hdf5SetPath, XdmfArrayType.Int32(),
startIndex, readStride, readNumber)
hdf5FilePath = "The HDF5 file path for the local nodes goes here"
hdf5SetPath = "The HDF5 set path for the local nodes goes here"
newLocalNodeController = XdmfHDF5Controller.New(
hdf5FilePath, hdf5SetPath, XdmfArrayType.Int32(),
startIndex, readStride, readNumber)
hdf5FilePath = "The HDF5 file path for the remote local nodes goes here"
hdf5SetPath = "The HDF5 set path for the remote local nodes goes here"
newRemoteLocalNodeController = XdmfHDF5Controller.New(
hdf5FilePath, hdf5SetPath, XdmfArrayType.Int32(),
startIndex, readStride, readNumber)
exampleMap = XdmfMap.New()
exampleMap.setHeavyDataControllers(newRemoteTaskController, newLocalNodeController, newRemoteLocalNodeController)
#//setHeavyDataControllers end
| [
6738,
1395,
36020,
69,
1330,
1635,
198,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
628,
220,
220,
220,
220,
220,
220,
220,
1303,
1003,
36733,
1634,
2221,
628,
220,
220,
220,
220,
220,
220,
220,
1672,
13912,
796,
1... | 2.110763 | 3,196 |
import gpss_pb2_grpc
| [
11748,
27809,
824,
62,
40842,
17,
62,
2164,
14751,
198
] | 2.1 | 10 |
import json
import traceback
import boto3
from boto3.dynamodb.conditions import Attr
TABLE_NAME = 'Quotes'
DAILY_RESOURCE_NAME = 'daily'
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table(TABLE_NAME)
| [
11748,
33918,
198,
11748,
12854,
1891,
198,
11748,
275,
2069,
18,
198,
6738,
275,
2069,
18,
13,
67,
4989,
375,
65,
13,
17561,
1756,
1330,
3460,
81,
628,
198,
38148,
62,
20608,
796,
705,
23138,
6,
198,
5631,
33340,
62,
19535,
31033,
... | 2.666667 | 81 |
import numpy as np;
import pandas as pd
import matplotlib.pyplot as plt;
from pandas_datareader import data as web
import yfinance as yf
yf.pdr_override()
import os
import dadosCart as c
somaPal=0
somaEu=0
rentTotal=0
decisao=1
while decisao!=5:
os.system('cls')
decisao = int(input('Digite o que você deseja fazer:\n1-Calcular carteira\n2-Acompanhamento ao vivo\n3-Evolução mensal da carteira\n4-Análise individual de ativos da carteira\n5-Sair\n'))
os.system('cls')
if decisao==1:
total = c.valorAcoes + c.valorFI + c.valorBDR + c.cartCaixa
print(
f"""O valor da sua carteira é de R${total} e ela atualmente está distribuida da seguinte forma:\n
Ações = {c.valorAcoes*100/total:.2f}% = R${c.valorAcoes}\n
Fundos Imobiliários = {c.valorFI*100/total:.2f}% = R${c.valorFI}\n
Internacional = {(c.valorBDR)*100/total:.2f}% = R${c.valorBDR}\n
Caixa = {c.cartCaixa*100/total:.2f}% = R${c.cartCaixa}\n\n
Além disso também possui R${c.cartCrypto} em Cryptomoedas\n"""
)
y=np.array([int(c.valorAcoes*100/total),int(c.valorFI*100/total),int((c.valorBDR)*100/total),int(c.cartCaixa*100/total)]);
mylabels = ['Ação','FII','Internacional','Caixa'];
myexplode = [0,0,0,0];
plt.pie(y,labels=mylabels,explode=myexplode,shadow=True);
plt.show();
elif decisao==2:
decisao2=1
while (decisao2):
decisao2 = int(input("Digite o que você deseja ver:\n1-Carteira Completa\n2-Ações\n3-FII\n4-BDR\n5-Sair\n"))
if decisao2==1: ativo = c.cartAcao + c.cartFI + c.cartBDR
elif decisao2==2: ativo = c.cartAcao
elif decisao2==3: ativo = c.cartFI
elif decisao2==4: ativo = c.cartBDR
elif decisao2==5:
os.system('cls')
break
os.system('cls')
for x in range(len(ativo)):
df = web.get_data_yahoo(ativo[x][0], start=c.today)["Adj Close"].to_frame()
mostra = str(df) + '@'
for y in range(mostra.index('@'),0,-1):
if mostra[y]==' ':
inicioNum=y+1
break
pal = float(mostra[inicioNum:mostra.index('@')])
somaPal += pal
somaEu += ativo[x][2]
print(f'\n{ativo[x][0].replace(".SA","")} {c.today[8:10]}/{c.today[5:7]}\nValor de fechamento do dia: {pal:.2f}\nPreço Médio de Aquisição: {ativo[x][2]}\nValor Investido: {ativo[x][2]*ativo[x][1]:.2f}\nValor Atual em Carteira: {(pal*ativo[x][1]):.2f}')
rent = (pal)/ativo[x][2]
if rent > 1:
print(f'Rentabilidade: \033[1;32m+{((rent-1)*100):.2f}%\033[0;0m\n')
elif rent < 1:
print(f'Rentabilidade: \033[1;31m{((rent-1)*100):.2f}%\033[0;0m\n')
elif rent ==1:
print(f'Rentabilidade: \033[1;37m{((rent-1)*100):.2f}%\033[0;0m\n')
print('-'*80+'\n')
print('Rentabilidade atual: ')
rentTotal = somaPal/somaEu
if rentTotal > 1:
print(f'\033[1;32m+{((rentTotal-1)*100):.2f}%\033[0;0m\n')
elif rentTotal < 1:
print(f'\033[1;31m{((rentTotal-1)*100):.2f}%\033[0;0m\n')
elif rentTotal ==1 :
print(f'\033[1;37m{((rentTotal-1)*100):.2f}%\033[0;0m\n')
elif decisao==3:
plt.plot(c.meses,c.valorCartMensal,'r-',)
plt.title('Evolução Mensal da Carteira')
plt.show()
elif decisao==4:
decisao2=1
while (decisao2):
decisao2 = int(input("Digite o que você deseja ver:\n1-Ações\n2-FII\n3-BDR\n4-Sair\n"))
if decisao2==1: ativo = c.cartAcao
elif decisao2==2: ativo = c.cartFI
elif decisao2==3: ativo = c.cartBDR
elif decisao2==4:
os.system('cls')
break
print("Qual ativo você deseja análisar?\n")
for x in range(len(ativo)):
print(f'{x+1}-{ativo[x][0].replace(".SA","")}\n')
menu = int(input())
df = web.get_data_yahoo(ativo[menu-1][0])["Close"]
df.plot()
plt.title("Histórico de fechamento (R$)")
plt.show()
| [
11748,
299,
32152,
355,
45941,
26,
198,
11748,
19798,
292,
355,
279,
67,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
26,
198,
6738,
19798,
292,
62,
19608,
533,
5067,
1330,
1366,
355,
3992,
220,
220,
220,
198,
11748,
... | 1.663816 | 2,689 |
names = ['Juan','Pedro','Alejandro','Roberto','Enrique']
print(names[0])
print(names[1])
print(names[2])
print(names[3])
print(names[4]) | [
14933,
796,
37250,
41,
7258,
41707,
43468,
305,
41707,
37474,
47983,
41707,
15924,
1462,
41707,
4834,
33865,
20520,
198,
4798,
7,
14933,
58,
15,
12962,
198,
4798,
7,
14933,
58,
16,
12962,
198,
4798,
7,
14933,
58,
17,
12962,
198,
4798,
... | 2.566038 | 53 |