code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import sys
sys.path.append('/usr/users/oliverren/meng/check-worthy')
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from keras.layers import Embedding, LSTM, Bidirectional, Dropout, Dense
from keras import Sequential
from src.data import debates
import numpy as np
MAX_NUM_WORDS = 1000
# data_set[i] is the ith crossvalidation split, data_set[i][0] says which debate is the test debate
# data_set[i][1] are the sentences in the test set
# data_set[i][2] are the sentences in the training set
data_sets = debates.get_for_crossvalidation()
texts = [sentence.text for sentence in data_sets[0][1]]
texts.extend([sentence.text for sentence in data_sets[0][2]])
MAX_SEQUENCE_LENGTH = max([len(sentence.split()) for sentence in texts])
# print(MAX_SEQUENCE_LENGTH)
# the embedding is already pretrained, so whenever we go to a different dataset, we should reset the embedding layer
# so that the embedding layer uses the words in the vocab of the dataset being tested
tokenizer = Tokenizer(num_words= MAX_NUM_WORDS)
tokenizer.fit_on_texts(texts)
sequences = tokenizer.texts_to_sequences(texts)
# print(sequences)
# print(texts[0])
# print(tokenizer.word_index)
word_index = tokenizer.word_index
# print(word_index)
data = pad_sequences(sequences, maxlen=MAX_SEQUENCE_LENGTH)
# Create Embedding layer
embeddings_index = {}
f = open('/usr/users/oliverren/meng/check-worthy/data/glove/glove.6B.50d.txt')
count = 0
for line in f:
values = line.split()
if count == 0:
# print(values)
count += 1
word = values[0]
coefs = np.asarray(values[1:], dtype='float32')
embeddings_index[word] = coefs
f.close()
EMBEDDING_DIM = 50
# + 1 because indexes are positive integers
embedding_matrix = np.zeros((len(word_index) + 1, EMBEDDING_DIM))
for word, i in word_index.items():
embedding_vector = embeddings_index.get(word)
if embedding_vector is not None:
# words not in embedding index will be all-zeros
embedding_matrix[i] = embedding_vector
embedding_layer = Embedding(len(word_index) + 1,
EMBEDDING_DIM,
weights = [embedding_matrix],
input_length = MAX_SEQUENCE_LENGTH,
trainable = False)
# bi-directional
LSTM_OUTPOUT_DIM = 200
HIDDEN_LAYER_DIM = 200
BATCH_SIZE = 32
x_train = [sentence.text for sentence in data_sets[0][2]]
y_train = [sentence.label for sentence in data_sets[0][2]]
x_test = [sentence.text for sentence in data_sets[0][1]]
y_test = [sentence.label for sentence in data_sets[0][1]]
x_train = tokenizer.texts_to_sequences(x_train)
x_train = pad_sequences(x_train, maxlen=MAX_SEQUENCE_LENGTH)
x_test = tokenizer.texts_to_sequences(x_test)
x_test = pad_sequences(x_test, maxlen=MAX_SEQUENCE_LENGTH)
model = Sequential()
model.add(embedding_layer)
model.add(Bidirectional(LSTM(LSTM_OUTPOUT_DIM)))
model.add(Dense(1,activation='sigmoid'))
model.compile(loss = 'binary_crossentropy', optimizer='adam',metrics = ['accuracy'])
print(model.summary())
model.fit(x_train, y_train,
batch_size=BATCH_SIZE,
epochs=200,
validation_data=[x_test, y_test])
model_2 = Sequential()
model_2.add(embedding_layer)
model_2.add(Bidirectional(LSTM(LSTM_OUTPOUT_DIM)))
model_2.add(Dense(HIDDEN_LAYER_DIM*4,activation='relu'))
model_2.add(Dropout(0.5))
model_2.add(Dense(HIDDEN_LAYER_DIM,activation='relu'))
model_2.add(Dropout(0.5))
model_2.add(Dense(1,activation='sigmoid'))
model_2.compile(loss = 'binary_crossentropy', optimizer='adam',metrics = ['accuracy'])
print(model_2.summary())
model_2.fit(x_train, y_train,
batch_size=BATCH_SIZE,
epochs=200,
validation_data=[x_test, y_test])
from sklearn.metrics import (average_precision_score, precision_score,
recall_score, roc_auc_score)
def f1(y_true, y_pred):
precision = precision_score(y_true, y_pred)
recall = recall_score(y_true, y_pred)
return 2 * ((precision * recall) / (precision + recall))
def accuracy(y_true, y_pred):
num_correct = len([1 for true, pred in zip(y_true, y_pred) if true == pred])
return num_correct/len(y_true)
print('model 1')
print('f1')
print(f1(y_test, model.predict_classes(x_test).reshape(-1)))
print('accuracy')
print(accuracy(y_test, model.predict_classes(x_test).reshape(-1)))
print('model 2')
print('f1')
print(f1(y_test, model_2.predict_classes(x_test).reshape(-1)))
print('accuracy')
print(accuracy(y_test, model_2.predict_classes(x_test).reshape(-1)))
|
[
"sys.path.append",
"keras.preprocessing.sequence.pad_sequences",
"keras.Sequential",
"numpy.asarray",
"keras.layers.Dropout",
"keras.layers.LSTM",
"sklearn.metrics.recall_score",
"keras.preprocessing.text.Tokenizer",
"src.data.debates.get_for_crossvalidation",
"keras.layers.Dense",
"sklearn.metrics.precision_score"
] |
[((11, 68), 'sys.path.append', 'sys.path.append', (['"""/usr/users/oliverren/meng/check-worthy"""'], {}), "('/usr/users/oliverren/meng/check-worthy')\n", (26, 68), False, 'import sys\n'), ((561, 594), 'src.data.debates.get_for_crossvalidation', 'debates.get_for_crossvalidation', ([], {}), '()\n', (592, 594), False, 'from src.data import debates\n'), ((1033, 1067), 'keras.preprocessing.text.Tokenizer', 'Tokenizer', ([], {'num_words': 'MAX_NUM_WORDS'}), '(num_words=MAX_NUM_WORDS)\n', (1042, 1067), False, 'from keras.preprocessing.text import Tokenizer\n'), ((1276, 1328), 'keras.preprocessing.sequence.pad_sequences', 'pad_sequences', (['sequences'], {'maxlen': 'MAX_SEQUENCE_LENGTH'}), '(sequences, maxlen=MAX_SEQUENCE_LENGTH)\n', (1289, 1328), False, 'from keras.preprocessing.sequence import pad_sequences\n'), ((2679, 2729), 'keras.preprocessing.sequence.pad_sequences', 'pad_sequences', (['x_train'], {'maxlen': 'MAX_SEQUENCE_LENGTH'}), '(x_train, maxlen=MAX_SEQUENCE_LENGTH)\n', (2692, 2729), False, 'from keras.preprocessing.sequence import pad_sequences\n'), ((2785, 2834), 'keras.preprocessing.sequence.pad_sequences', 'pad_sequences', (['x_test'], {'maxlen': 'MAX_SEQUENCE_LENGTH'}), '(x_test, maxlen=MAX_SEQUENCE_LENGTH)\n', (2798, 2834), False, 'from keras.preprocessing.sequence import pad_sequences\n'), ((2845, 2857), 'keras.Sequential', 'Sequential', ([], {}), '()\n', (2855, 2857), False, 'from keras import Sequential\n'), ((3212, 3224), 'keras.Sequential', 'Sequential', ([], {}), '()\n', (3222, 3224), False, 'from keras import Sequential\n'), ((1602, 1641), 'numpy.asarray', 'np.asarray', (['values[1:]'], {'dtype': '"""float32"""'}), "(values[1:], dtype='float32')\n", (1612, 1641), True, 'import numpy as np\n'), ((2944, 2974), 'keras.layers.Dense', 'Dense', (['(1)'], {'activation': '"""sigmoid"""'}), "(1, activation='sigmoid')\n", (2949, 2974), False, 'from keras.layers import Embedding, LSTM, Bidirectional, Dropout, Dense\n'), ((3317, 3363), 'keras.layers.Dense', 'Dense', (['(HIDDEN_LAYER_DIM * 4)'], {'activation': '"""relu"""'}), "(HIDDEN_LAYER_DIM * 4, activation='relu')\n", (3322, 3363), False, 'from keras.layers import Embedding, LSTM, Bidirectional, Dropout, Dense\n'), ((3374, 3386), 'keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (3381, 3386), False, 'from keras.layers import Embedding, LSTM, Bidirectional, Dropout, Dense\n'), ((3400, 3442), 'keras.layers.Dense', 'Dense', (['HIDDEN_LAYER_DIM'], {'activation': '"""relu"""'}), "(HIDDEN_LAYER_DIM, activation='relu')\n", (3405, 3442), False, 'from keras.layers import Embedding, LSTM, Bidirectional, Dropout, Dense\n'), ((3455, 3467), 'keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (3462, 3467), False, 'from keras.layers import Embedding, LSTM, Bidirectional, Dropout, Dense\n'), ((3481, 3511), 'keras.layers.Dense', 'Dense', (['(1)'], {'activation': '"""sigmoid"""'}), "(1, activation='sigmoid')\n", (3486, 3511), False, 'from keras.layers import Embedding, LSTM, Bidirectional, Dropout, Dense\n'), ((3916, 3947), 'sklearn.metrics.precision_score', 'precision_score', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (3931, 3947), False, 'from sklearn.metrics import average_precision_score, precision_score, recall_score, roc_auc_score\n'), ((3961, 3989), 'sklearn.metrics.recall_score', 'recall_score', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (3973, 3989), False, 'from sklearn.metrics import average_precision_score, precision_score, recall_score, roc_auc_score\n'), ((2909, 2931), 'keras.layers.LSTM', 'LSTM', (['LSTM_OUTPOUT_DIM'], {}), '(LSTM_OUTPOUT_DIM)\n', (2913, 2931), False, 'from keras.layers import Embedding, LSTM, Bidirectional, Dropout, Dense\n'), ((3280, 3302), 'keras.layers.LSTM', 'LSTM', (['LSTM_OUTPOUT_DIM'], {}), '(LSTM_OUTPOUT_DIM)\n', (3284, 3302), False, 'from keras.layers import Embedding, LSTM, Bidirectional, Dropout, Dense\n')]
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import logging
import webapp2
from google.appengine.api import taskqueue
from appengine_module.test_results.handlers import util
from appengine_module.test_results.model.jsonresults import JsonResults
from appengine_module.test_results.model.testfile import TestFile
from infra_libs import ts_mon
from infra_libs import event_mon
class MonitoringUploadV2(webapp2.RequestHandler):
num_test_results = ts_mon.CounterMetric(
'test_results/num_test_results',
description='Number of reported test results')
def post(self):
data = json.loads(self.request.body)
master = data.get('master')
builder = data.get('builder')
build_number = data.get('build_number')
test_type = data.get('test_type')
if not master or not builder or not build_number or not test_type:
logging.error(
'Missing required parameters: (master=%s, builder=%s, '
'build_number=%s, test_type=%s)' %
(master, builder, build_number, test_type))
self.response.set_status(400)
return
# Create a proto event and send it to event_mon.
event = event_mon.Event('POINT')
test_results = event.proto.test_results
test_results.master_name = master
test_results.builder_name = builder
test_results.build_number = int(build_number)
test_results.test_type = test_type
if 'interrupted' in data:
test_results.interrupted = data['interrupted']
test_results.version = data['version']
test_results.usec_since_epoch = long(
float(data['seconds_since_epoch']) * 1000 * 1000)
def convert_test_result_type(json_val):
self.num_test_results.increment({
'result_type': json_val, 'master': master, 'builder': builder,
'test_type': test_type})
try:
return (event_mon.protos.chrome_infra_log_pb2.TestResultsEvent.
TestResultType.Value(json_val.upper().replace('+', '_')))
except ValueError:
return event_mon.protos.chrome_infra_log_pb2.TestResultsEvent.UNKNOWN
tests = data.get('flat_tests', {})
for name, test in tests.iteritems():
test_result = test_results.tests.add()
test_result.test_name = name
test_result.actual.extend(
convert_test_result_type(res) for res in test['actual'].split(' '))
test_result.expected.extend(
convert_test_result_type(res) for res in test['expected'].split(' '))
event.send()
class EventMonUploader(webapp2.RequestHandler):
num_test_results = ts_mon.CounterMetric(
'test_results/num_test_results',
description='Number of reported test results')
def post(self):
if not self.request.body:
logging.error('Missing request payload')
self.response.set_status(400)
return
try:
payload = json.loads(self.request.body)
except ValueError:
logging.error('Failed to parse request payload as JSON')
self.response.set_status(400)
return
# Retrieve test json from datastore based on task parameters.
master = payload.get('master')
builder = payload.get('builder')
build_number = payload.get('build_number')
test_type = payload.get('test_type')
if not master or not builder or not build_number or not test_type:
logging.error(
'Missing required parameters: (master=%s, builder=%s, '
'build_number=%s, test_type=%s)' %
(master, builder, build_number, test_type))
self.response.set_status(400)
return
files = TestFile.get_files(
master, builder, test_type, build_number, 'full_results.json',
load_data=True, limit=1)
if not files:
logging.error('Failed to find full_results.json for (%s, %s, %s, %s)' % (
master, builder, build_number, test_type))
self.response.set_status(404)
return
file_json = JsonResults.load_json(files[0].data)
# Create a proto event and send it to event_mon.
event = event_mon.Event('POINT')
test_results = event.proto.test_results
test_results.master_name = master
test_results.builder_name = builder
test_results.build_number = int(build_number)
test_results.test_type = test_type
if 'interrupted' in file_json:
test_results.interrupted = file_json['interrupted']
if 'version' in file_json:
test_results.version = file_json['version']
if 'seconds_since_epoch' in file_json:
test_results.usec_since_epoch = long(
float(file_json['seconds_since_epoch']) * 1000 * 1000)
def convert_test_result_type(json_val):
self.num_test_results.increment({
'result_type': json_val, 'master': master, 'builder': builder,
'test_type': test_type})
try:
return (event_mon.protos.chrome_infra_log_pb2.TestResultsEvent.
TestResultType.Value(json_val.upper().replace('+', '_')))
except ValueError:
return event_mon.protos.chrome_infra_log_pb2.TestResultsEvent.UNKNOWN
tests = util.flatten_tests_trie(
file_json.get('tests', {}), file_json.get('path_delimiter', '/'))
for name, test in tests.iteritems():
test_result = test_results.tests.add()
test_result.test_name = name
test_result.actual.extend(
convert_test_result_type(res) for res in test['actual'])
test_result.expected.extend(
convert_test_result_type(res) for res in test['expected'])
event.send()
@classmethod
def upload(cls, master, builder, build_number, test_type):
taskqueue.add(url='/internal/monitoring/upload', params={
'master': master,
'builder': builder,
'build_number': build_number,
'test_type': test_type,
})
|
[
"logging.error",
"json.loads",
"google.appengine.api.taskqueue.add",
"infra_libs.event_mon.Event",
"appengine_module.test_results.model.testfile.TestFile.get_files",
"infra_libs.ts_mon.CounterMetric",
"appengine_module.test_results.model.jsonresults.JsonResults.load_json"
] |
[((579, 684), 'infra_libs.ts_mon.CounterMetric', 'ts_mon.CounterMetric', (['"""test_results/num_test_results"""'], {'description': '"""Number of reported test results"""'}), "('test_results/num_test_results', description=\n 'Number of reported test results')\n", (599, 684), False, 'from infra_libs import ts_mon\n'), ((2667, 2772), 'infra_libs.ts_mon.CounterMetric', 'ts_mon.CounterMetric', (['"""test_results/num_test_results"""'], {'description': '"""Number of reported test results"""'}), "('test_results/num_test_results', description=\n 'Number of reported test results')\n", (2687, 2772), False, 'from infra_libs import ts_mon\n'), ((723, 752), 'json.loads', 'json.loads', (['self.request.body'], {}), '(self.request.body)\n', (733, 752), False, 'import json\n'), ((1275, 1299), 'infra_libs.event_mon.Event', 'event_mon.Event', (['"""POINT"""'], {}), "('POINT')\n", (1290, 1299), False, 'from infra_libs import event_mon\n'), ((3663, 3773), 'appengine_module.test_results.model.testfile.TestFile.get_files', 'TestFile.get_files', (['master', 'builder', 'test_type', 'build_number', '"""full_results.json"""'], {'load_data': '(True)', 'limit': '(1)'}), "(master, builder, test_type, build_number,\n 'full_results.json', load_data=True, limit=1)\n", (3681, 3773), False, 'from appengine_module.test_results.model.testfile import TestFile\n'), ((4013, 4049), 'appengine_module.test_results.model.jsonresults.JsonResults.load_json', 'JsonResults.load_json', (['files[0].data'], {}), '(files[0].data)\n', (4034, 4049), False, 'from appengine_module.test_results.model.jsonresults import JsonResults\n'), ((4116, 4140), 'infra_libs.event_mon.Event', 'event_mon.Event', (['"""POINT"""'], {}), "('POINT')\n", (4131, 4140), False, 'from infra_libs import event_mon\n'), ((5667, 5820), 'google.appengine.api.taskqueue.add', 'taskqueue.add', ([], {'url': '"""/internal/monitoring/upload"""', 'params': "{'master': master, 'builder': builder, 'build_number': build_number,\n 'test_type': test_type}"}), "(url='/internal/monitoring/upload', params={'master': master,\n 'builder': builder, 'build_number': build_number, 'test_type': test_type})\n", (5680, 5820), False, 'from google.appengine.api import taskqueue\n'), ((980, 1135), 'logging.error', 'logging.error', (["('Missing required parameters: (master=%s, builder=%s, build_number=%s, test_type=%s)'\n % (master, builder, build_number, test_type))"], {}), "(\n 'Missing required parameters: (master=%s, builder=%s, build_number=%s, test_type=%s)'\n % (master, builder, build_number, test_type))\n", (993, 1135), False, 'import logging\n'), ((2836, 2876), 'logging.error', 'logging.error', (['"""Missing request payload"""'], {}), "('Missing request payload')\n", (2849, 2876), False, 'import logging\n'), ((2952, 2981), 'json.loads', 'json.loads', (['self.request.body'], {}), '(self.request.body)\n', (2962, 2981), False, 'import json\n'), ((3421, 3576), 'logging.error', 'logging.error', (["('Missing required parameters: (master=%s, builder=%s, build_number=%s, test_type=%s)'\n % (master, builder, build_number, test_type))"], {}), "(\n 'Missing required parameters: (master=%s, builder=%s, build_number=%s, test_type=%s)'\n % (master, builder, build_number, test_type))\n", (3434, 3576), False, 'import logging\n'), ((3811, 3931), 'logging.error', 'logging.error', (["('Failed to find full_results.json for (%s, %s, %s, %s)' % (master, builder,\n build_number, test_type))"], {}), "('Failed to find full_results.json for (%s, %s, %s, %s)' % (\n master, builder, build_number, test_type))\n", (3824, 3931), False, 'import logging\n'), ((3011, 3067), 'logging.error', 'logging.error', (['"""Failed to parse request payload as JSON"""'], {}), "('Failed to parse request payload as JSON')\n", (3024, 3067), False, 'import logging\n')]
|
import requests
import os
from .ProvenanceObject import ProvenanceObject
from .dependency import Dependency
from .task_status import TaskStatus
from .dataset import DataSet
from .performance import Performance
from datetime import datetime
dfa_url = os.environ.get('DFA_URL',"http://localhost:22000/")
class Task(ProvenanceObject):
"""
This class defines a dataflow task.
Attributes:
- id (:obj:`str`): Task Id.
- dataflow_tag (:obj:`str`): Dataflow tag.
- transformation_tag (:obj:`str`): Transformation tag.
- sub_id (:obj:`str`, optional): Task Sub Id.
- dependency (:obj:`Task`): Task which the object has a dependency.
- workspace (:obj:`str`, optional): Task workspace.
- resource (:obj:`str`, optional): Task resource.
- output (:obj:`str`, optional): Task output.
- error (:obj:`str`, optional): Task error.
"""
def __init__(self, id, dataflow_tag, transformation_tag,
sub_id="", dependency=None, workspace="", resource="",
output="", error=""):
ProvenanceObject.__init__(self, transformation_tag)
self._workspace = workspace
self._resource = resource
self._dependency = ""
self._output = output
self._error = error
self._sets = []
self._status = TaskStatus.READY.value
self._dataflow = dataflow_tag.lower()
self._transformation = transformation_tag.lower()
self._id = str(id)
self._sub_id = sub_id
self._performances = []
self.dfa_url = dfa_url
self.start_time = None
self.end_time = None
if isinstance(dependency, Task):
dependency = Dependency([dependency._tag], [dependency._id])
self._dependency = dependency.get_specification()
def add_dependency(self, dependency):
""" Add a dependency to the Task.
Args:
- dependency (:obj:`Dependency`): A :obj:`Dependency` object.
"""
assert isinstance(dependency, Dependency), \
"The dependency must be valid."
self._dependency = dependency.get_specification()
def set_datasets(self, datasets):
""" Set the Task DataSets.
Args:
- dataset (:obj:`list`): A :obj:`list` containing :obj:`DataSet` objects.
"""
assert isinstance(datasets, list), \
"The parameter must be a list."
for dataset in datasets:
self.add_dataset(dataset)
def add_dataset(self, dataset):
""" Add a dataset to the Task.
Args:
- dataset (:obj:`DataSet`): A :obj:`DataSet` object.
"""
assert isinstance(dataset, DataSet), "The dataset must be valid."
self._sets.append(dataset.get_specification())
def set_status(self, status):
""" Change the Task Status.
Args:
- status (:obj:`TaskStatus`): A :obj:`TaskStatus` object.
"""
assert isinstance(status, TaskStatus), \
"The task status must be valid."
self._status = status.value
def begin(self):
""" Send a post request to the Dataflow Analyzer API to store the Task.
"""
self.set_status(TaskStatus.RUNNING)
self.start_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.save()
def end(self):
""" Send a post request to the Dataflow Analyzer API to store the Task.
"""
self.set_status(TaskStatus.FINISHED)
self.end_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
performance = Performance(self.start_time, self.end_time)
self._performances.append(performance.get_specification())
self.save()
def save(self):
""" Send a post request to the Dataflow Analyzer API to store the Task.
"""
url = dfa_url + '/pde/task/json'
message = self.get_specification()
r = requests.post(url, json=message)
print(r.status_code)
|
[
"os.environ.get",
"requests.post",
"datetime.datetime.now"
] |
[((251, 303), 'os.environ.get', 'os.environ.get', (['"""DFA_URL"""', '"""http://localhost:22000/"""'], {}), "('DFA_URL', 'http://localhost:22000/')\n", (265, 303), False, 'import os\n'), ((3961, 3993), 'requests.post', 'requests.post', (['url'], {'json': 'message'}), '(url, json=message)\n', (3974, 3993), False, 'import requests\n'), ((3308, 3322), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3320, 3322), False, 'from datetime import datetime\n'), ((3554, 3568), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3566, 3568), False, 'from datetime import datetime\n')]
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import os
import copy
import math
from pathlib import Path
import warnings
from typing import Callable, Tuple, Union, List
import decord
from einops.layers.torch import Rearrange
import matplotlib.pyplot as plt
import numpy as np
from numpy.random import randint
import torch
from torch.utils.data import Dataset, Subset, DataLoader
from torchvision.transforms import Compose
from .references import transforms_video as transforms
from .references.functional_video import denormalize
from ..common.misc import Config
from ..common.gpu import num_devices, db_num_workers
Trans = Callable[[object, dict], Tuple[object, dict]]
DEFAULT_MEAN = (0.43216, 0.394666, 0.37645)
DEFAULT_STD = (0.22803, 0.22145, 0.216989)
class VideoRecord(object):
"""
This class is used for parsing split-files where each row contains a path
and a label:
Ex:
```
path/to/my/clip_1 3
path/to/another/clip_2 32
```
"""
def __init__(self, data: List[str]):
""" Initialized a VideoRecord
Ex.
data = ["path/to/video.mp4", 2, "cooking"]
Args:
row: a list where first element is the path and second element is
the label, and the third element (optional) is the label name
"""
assert len(data) >= 2 and len(data) <= 3
assert isinstance(data[0], str)
assert isinstance(int(data[1]), int)
if len(data) == 3:
assert isinstance(data[2], str)
self._data = data
self._num_frames = None
@property
def path(self) -> str:
return self._data[0]
@property
def num_frames(self) -> int:
if self._num_frames is None:
self._num_frames = int(
len([x for x in Path(self._data[0]).glob("img_*")]) - 1
)
return self._num_frames
@property
def label(self) -> int:
return int(self._data[1])
@property
def label_name(self) -> str:
return None if len(self._data) <= 2 else self._data[2]
def get_transforms(train: bool = True, tfms_config: Config = None) -> Trans:
""" Get default transformations to apply depending on whether we're applying it to the training or the validation set. If no tfms configurations are passed in, use the defaults.
Args:
train: whether or not this is for training
tfms_config: Config object with tranforms-related configs
Returns:
A list of transforms to apply
"""
if tfms_config is None:
tfms_config = get_default_tfms_config(train=train)
# 1. resize
tfms = [
transforms.ToTensorVideo(),
transforms.ResizeVideo(
tfms_config.im_scale, tfms_config.resize_keep_ratio
),
]
# 2. crop
if tfms_config.random_crop:
if tfms_config.random_crop_scales:
crop = transforms.RandomResizedCropVideo(
tfms_config.input_size, tfms_config.random_crop_scales
)
else:
crop = transforms.RandomCropVideo(tfms_config.input_size)
else:
crop = transforms.CenterCropVideo(tfms_config.input_size)
tfms.append(crop)
# 3. flip
tfms.append(transforms.RandomHorizontalFlipVideo(tfms_config.flip_ratio))
# 4. normalize
tfms.append(transforms.NormalizeVideo(tfms_config.mean, tfms_config.std))
return Compose(tfms)
def get_default_tfms_config(train: bool) -> Config:
"""
Args:
train: whether or not this is for training
Settings:
input_size (int or tuple): Model input image size.
im_scale (int or tuple): Resize target size.
resize_keep_ratio (bool): If True, keep the original ratio when resizing.
mean (tuple): Normalization mean.
if train:
std (tuple): Normalization std.
flip_ratio (float): Horizontal flip ratio.
random_crop (bool): If False, do center-crop.
random_crop_scales (tuple): Range of size of the origin size random cropped.
"""
flip_ratio = 0.5 if train else 0.0
random_crop = True if train else False
random_crop_scales = (0.6, 1.0) if train else None
return Config(
dict(
input_size=112,
im_scale=128,
resize_keep_ratio=True,
mean=DEFAULT_MEAN,
std=DEFAULT_STD,
flip_ratio=flip_ratio,
random_crop=random_crop,
random_crop_scales=random_crop_scales,
)
)
class VideoDataset:
""" A video recognition dataset. """
def __init__(
self,
root: str,
seed: int = None,
train_pct: float = 0.75,
num_samples: int = 1,
sample_length: int = 8,
sample_step: int = 1,
temporal_jitter: bool = True,
temporal_jitter_step: int = 2,
random_shift: bool = True,
batch_size: int = 8,
video_ext: str = "mp4",
warning: bool = False,
train_split_file: str = None,
test_split_file: str = None,
train_transforms: Trans = get_transforms(train=True),
test_transforms: Trans = get_transforms(train=False),
) -> None:
""" initialize dataset
Arg:
root: Videos directory.
seed: random seed
train_pct: percentage of dataset to use for training
num_samples: Number of clips to sample from each video.
sample_length: Number of consecutive frames to sample from a video (i.e. clip length).
sample_step: Sampling step.
temporal_jitter: Randomly skip frames when sampling each frames.
temporal_jitter_step: temporal jitter in frames
random_shift: Random temporal shift when sample a clip.
video_ext: Video file extension.
warning: On or off warning.
train_split_file: Annotation file containing video filenames and labels.
test_split_file: Annotation file containing video filenames and labels.
train_transforms: transforms for training
test_transforms: transforms for testing
"""
assert sample_step > 0
assert num_samples > 0
if temporal_jitter:
assert temporal_jitter_step > 0
if train_split_file:
assert Path(train_split_file).exists()
assert (
test_split_file is not None and Path(test_split_file).exists()
)
if test_split_file:
assert Path(test_split_file).exists()
assert (
train_split_file is not None
and Path(train_split_file).exists()
)
self.root = root
self.seed = seed
self.num_samples = num_samples
self.sample_length = sample_length
self.sample_step = sample_step
self.presample_length = sample_length * sample_step
self.temporal_jitter_step = temporal_jitter_step
self.train_transforms = train_transforms
self.test_transforms = test_transforms
self.random_shift = random_shift
self.temporal_jitter = temporal_jitter
self.batch_size = batch_size
self.video_ext = video_ext
self.warning = warning
# create training and validation datasets
self.train_ds, self.test_ds = (
self.split_with_file(
train_split_file=train_split_file,
test_split_file=test_split_file,
)
if train_split_file
else self.split_by_folder(train_pct=train_pct)
)
# initialize dataloaders
self.init_data_loaders()
def split_by_folder(
self, train_pct: float = 0.8
) -> Tuple[Dataset, Dataset]:
""" Split this dataset into a training and testing set based on the
folders that the videos are in.
```
/data
+-- action_class_1
| +-- video_01.mp4
| +-- video_02.mp4
| +-- ...
+-- action_class_2
| +-- video_11.mp4
| +-- video_12.mp4
| +-- ...
+-- ...
```
Args:
train_pct: the ratio of images to use for training vs
testing
Return
A training and testing dataset in that order
"""
self.video_records = []
# get all dirs in root (and make sure they are dirs)
dirs = []
for entry in os.listdir(self.root):
if os.path.isdir(os.path.join(self.root, entry)):
dirs.append(os.path.join(self.root, entry))
# add each video in each dir as a video record
label = 0
self.classes = []
for action in dirs:
action = os.path.basename(os.path.normpath(action))
self.video_records.extend(
[
VideoRecord(
[
os.path.join(self.root, action, vid.split(".")[0]),
label,
action,
]
)
for vid in os.listdir(os.path.join(self.root, action))
]
)
label += 1
self.classes.append(action)
# random split
test_num = math.floor(len(self) * (1 - train_pct))
if self.seed:
torch.manual_seed(self.seed)
# set indices
indices = torch.randperm(len(self)).tolist()
train_range = indices[test_num:]
test_range = indices[:test_num]
return self.split_train_test(train_range, test_range)
def split_with_file(
self,
train_split_file: Union[Path, str],
test_split_file: Union[Path, str],
) -> Tuple[Dataset, Dataset]:
""" Split this dataset into a training and testing set using a split file.
Each line in the split file must use the form:
```
path/to/jumping/video_name_1 3
path/to/swimming/video_name_2 5
path/to/another/jumping/video_name_3 3
```
Args:
split_files: a tuple of 2 files
Return:
A training and testing dataset in that order
"""
self.video_records = []
# add train records
self.video_records.extend(
[
VideoRecord(row.strip().split(" "))
for row in open(train_split_file)
]
)
train_len = len(self.video_records)
# add validation records
self.video_records.extend(
[
VideoRecord(row.strip().split(" "))
for row in open(test_split_file)
]
)
# create indices
indices = torch.arange(0, len(self.video_records))
train_range = indices[:train_len]
test_range = indices[train_len:]
return self.split_train_test(train_range, test_range)
def split_train_test(
self, train_range: torch.Tensor, test_range: torch.Tensor,
) -> Tuple[Dataset, Dataset]:
""" Split this dataset into a training and testing set
Args:
train_range: range of indices for training set
test_range: range of indices for testing set
Return
A training and testing dataset in that order
"""
# create train subset
train = copy.deepcopy(Subset(self, train_range))
train.dataset.transforms = self.train_transforms
train.dataset.sample_step = (
self.temporal_jitter_step
if self.temporal_jitter
else self.sample_step
)
train.dataset.presample_length = self.sample_length * self.sample_step
# create test subset
test = copy.deepcopy(Subset(self, test_range))
test.dataset.transforms = self.test_transforms
test.dataset.random_shift = False
test.dataset.temporal_jitter = False
return train, test
def init_data_loaders(self) -> None:
""" Create training and validation data loaders. """
devices = num_devices()
self.train_dl = DataLoader(
self.train_ds,
batch_size=self.batch_size * devices,
shuffle=True,
num_workers=db_num_workers(),
pin_memory=True,
)
self.test_dl = DataLoader(
self.test_ds,
batch_size=self.batch_size * devices,
shuffle=False,
num_workers=db_num_workers(),
pin_memory=True,
)
def __len__(self) -> int:
return len(self.video_records)
def _sample_indices(self, record: VideoRecord) -> List[int]:
"""
Create a list of frame-wise offsets into a video record. Depending on
whether or not 'random shift' is used, perform a uniform sample or a
random sample.
Args:
record (VideoRecord): A video record.
Return:
list: Segment offsets (start indices)
"""
if record.num_frames > self.presample_length:
if self.random_shift:
# Random sample
offsets = np.sort(
randint(
record.num_frames - self.presample_length + 1,
size=self.num_samples,
)
)
else:
# Uniform sample
distance = (
record.num_frames - self.presample_length + 1
) / self.num_samples
offsets = np.array(
[
int(distance / 2.0 + distance * x)
for x in range(self.num_samples)
]
)
else:
if self.warning:
warnings.warn(
f"num_samples and/or sample_length > num_frames in {record.path}"
)
offsets = np.zeros((self.num_samples,), dtype=int)
return offsets
def _get_frames(
self, video_reader: decord.VideoReader, offset: int,
) -> List[np.ndarray]:
""" Get frames at sample length.
Args:
video_reader: the decord tool for parsing videos
offset: where to start the reader from
Returns
Frames at sample length in a List
"""
clip = list()
# decord.seek() seems to have a bug. use seek_accurate().
video_reader.seek_accurate(offset)
# first frame
clip.append(video_reader.next().asnumpy())
# remaining frames
try:
for i in range(self.sample_length - 1):
step = (
randint(self.sample_step + 1)
if self.temporal_jitter
else self.sample_step
)
if step == 0 and self.temporal_jitter:
clip.append(clip[-1].copy())
else:
if step > 1:
video_reader.skip_frames(step - 1)
cur_frame = video_reader.next().asnumpy()
clip.append(cur_frame)
except StopIteration:
# pass when video has ended
pass
# if clip needs more frames, simply duplicate the last frame in the clip.
while len(clip) < self.sample_length:
clip.append(clip[-1].copy())
return clip
def __getitem__(self, idx: int) -> Tuple[torch.tensor, int]:
"""
Return:
(clips (torch.tensor), label (int))
"""
record = self.video_records[idx]
video_reader = decord.VideoReader(
"{}.{}".format(
os.path.join(self.root, record.path), self.video_ext
),
# TODO try to add `ctx=decord.ndarray.gpu(0) or .cuda(0)`
)
record._num_frames = len(video_reader)
offsets = self._sample_indices(record)
clips = np.array([self._get_frames(video_reader, o) for o in offsets])
if self.num_samples == 1:
return (
# [T, H, W, C] -> [C, T, H, W]
self.transforms(torch.from_numpy(clips[0])),
record.label,
)
else:
return (
# [S, T, H, W, C] -> [S, C, T, H, W]
torch.stack(
[self.transforms(torch.from_numpy(c)) for c in clips]
),
record.label,
)
def _show_batch(
self,
images: List[torch.tensor],
labels: List[int],
sample_length: int,
mean: Tuple[int, int, int] = DEFAULT_MEAN,
std: Tuple[int, int, int] = DEFAULT_STD,
) -> None:
"""
Display a batch of images.
Args:
images: List of sample (clip) tensors
labels: List of labels
sample_length: Number of frames to show for each sample
mean: Normalization mean
std: Normalization std-dev
"""
batch_size = len(images)
plt.tight_layout()
fig, axs = plt.subplots(
batch_size,
sample_length,
figsize=(4 * sample_length, 3 * batch_size),
)
for i, ax in enumerate(axs):
if batch_size == 1:
clip = images[0]
else:
clip = images[i]
clip = Rearrange("c t h w -> t c h w")(clip)
if not isinstance(ax, np.ndarray):
ax = [ax]
for j, a in enumerate(ax):
a.axis("off")
a.imshow(
np.moveaxis(denormalize(clip[j], mean, std).numpy(), 0, -1)
)
# display label/label_name on the first image
if j == 0:
a.text(
x=3,
y=15,
s=f"{labels[i]}",
fontsize=20,
bbox=dict(facecolor="white", alpha=0.80),
)
def show_batch(self, train_or_test: str = "train", rows: int = 2) -> None:
"""Plot first few samples in the datasets"""
if train_or_test == "train":
batch = [self.train_ds[i] for i in range(rows)]
elif train_or_test == "test":
batch = [self.test_ds[i] for i in range(rows)]
else:
raise ValueError("Unknown data type {}".format(which_data))
images = [im[0] for im in batch]
labels = [im[1] for im in batch]
self._show_batch(images, labels, self.sample_length)
|
[
"torch.utils.data.Subset",
"os.path.join",
"torch.manual_seed",
"numpy.zeros",
"matplotlib.pyplot.subplots",
"pathlib.Path",
"torchvision.transforms.Compose",
"numpy.random.randint",
"os.path.normpath",
"warnings.warn",
"einops.layers.torch.Rearrange",
"matplotlib.pyplot.tight_layout",
"os.listdir",
"torch.from_numpy"
] |
[((3444, 3457), 'torchvision.transforms.Compose', 'Compose', (['tfms'], {}), '(tfms)\n', (3451, 3457), False, 'from torchvision.transforms import Compose\n'), ((8514, 8535), 'os.listdir', 'os.listdir', (['self.root'], {}), '(self.root)\n', (8524, 8535), False, 'import os\n'), ((17194, 17212), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (17210, 17212), True, 'import matplotlib.pyplot as plt\n'), ((17232, 17320), 'matplotlib.pyplot.subplots', 'plt.subplots', (['batch_size', 'sample_length'], {'figsize': '(4 * sample_length, 3 * batch_size)'}), '(batch_size, sample_length, figsize=(4 * sample_length, 3 *\n batch_size))\n', (17244, 17320), True, 'import matplotlib.pyplot as plt\n'), ((9453, 9481), 'torch.manual_seed', 'torch.manual_seed', (['self.seed'], {}), '(self.seed)\n', (9470, 9481), False, 'import torch\n'), ((11483, 11508), 'torch.utils.data.Subset', 'Subset', (['self', 'train_range'], {}), '(self, train_range)\n', (11489, 11508), False, 'from torch.utils.data import Dataset, Subset, DataLoader\n'), ((11861, 11885), 'torch.utils.data.Subset', 'Subset', (['self', 'test_range'], {}), '(self, test_range)\n', (11867, 11885), False, 'from torch.utils.data import Dataset, Subset, DataLoader\n'), ((14042, 14082), 'numpy.zeros', 'np.zeros', (['(self.num_samples,)'], {'dtype': 'int'}), '((self.num_samples,), dtype=int)\n', (14050, 14082), True, 'import numpy as np\n'), ((8566, 8596), 'os.path.join', 'os.path.join', (['self.root', 'entry'], {}), '(self.root, entry)\n', (8578, 8596), False, 'import os\n'), ((8825, 8849), 'os.path.normpath', 'os.path.normpath', (['action'], {}), '(action)\n', (8841, 8849), False, 'import os\n'), ((13901, 13986), 'warnings.warn', 'warnings.warn', (['f"""num_samples and/or sample_length > num_frames in {record.path}"""'], {}), "(f'num_samples and/or sample_length > num_frames in {record.path}'\n )\n", (13914, 13986), False, 'import warnings\n'), ((15823, 15859), 'os.path.join', 'os.path.join', (['self.root', 'record.path'], {}), '(self.root, record.path)\n', (15835, 15859), False, 'import os\n'), ((17537, 17568), 'einops.layers.torch.Rearrange', 'Rearrange', (['"""c t h w -> t c h w"""'], {}), "('c t h w -> t c h w')\n", (17546, 17568), False, 'from einops.layers.torch import Rearrange\n'), ((6377, 6399), 'pathlib.Path', 'Path', (['train_split_file'], {}), '(train_split_file)\n', (6381, 6399), False, 'from pathlib import Path\n'), ((6571, 6592), 'pathlib.Path', 'Path', (['test_split_file'], {}), '(test_split_file)\n', (6575, 6592), False, 'from pathlib import Path\n'), ((8627, 8657), 'os.path.join', 'os.path.join', (['self.root', 'entry'], {}), '(self.root, entry)\n', (8639, 8657), False, 'import os\n'), ((13278, 13355), 'numpy.random.randint', 'randint', (['(record.num_frames - self.presample_length + 1)'], {'size': 'self.num_samples'}), '(record.num_frames - self.presample_length + 1, size=self.num_samples)\n', (13285, 13355), False, 'from numpy.random import randint\n'), ((14804, 14833), 'numpy.random.randint', 'randint', (['(self.sample_step + 1)'], {}), '(self.sample_step + 1)\n', (14811, 14833), False, 'from numpy.random import randint\n'), ((16280, 16306), 'torch.from_numpy', 'torch.from_numpy', (['clips[0]'], {}), '(clips[0])\n', (16296, 16306), False, 'import torch\n'), ((6478, 6499), 'pathlib.Path', 'Path', (['test_split_file'], {}), '(test_split_file)\n', (6482, 6499), False, 'from pathlib import Path\n'), ((6688, 6710), 'pathlib.Path', 'Path', (['train_split_file'], {}), '(train_split_file)\n', (6692, 6710), False, 'from pathlib import Path\n'), ((9208, 9239), 'os.path.join', 'os.path.join', (['self.root', 'action'], {}), '(self.root, action)\n', (9220, 9239), False, 'import os\n'), ((16508, 16527), 'torch.from_numpy', 'torch.from_numpy', (['c'], {}), '(c)\n', (16524, 16527), False, 'import torch\n'), ((1841, 1860), 'pathlib.Path', 'Path', (['self._data[0]'], {}), '(self._data[0])\n', (1845, 1860), False, 'from pathlib import Path\n')]
|
"""
Usage:
tpch-fed.py --file=<file>
Options:
--file=<file> Query file.
-h --help Show this screen.
--version Show version.
"""
import os
import connectorx as cx
from contexttimer import Timer
from docopt import docopt
import pandas as pd
if __name__ == "__main__":
args = docopt(__doc__, version="Naval Fate 2.0")
query_file = args["--file"]
db_map = {
"db1": os.environ["DB1"],
"db2": os.environ["DB2"],
}
print(f"dbs: {db_map}")
with open(query_file, "r") as f:
sql = f.read()
print(f"file: {query_file}")
with Timer() as timer:
df = cx.read_sql(db_map, sql, return_type="pandas")
print("time in total:", timer.elapsed)
print(df)
|
[
"contexttimer.Timer",
"connectorx.read_sql",
"docopt.docopt"
] |
[((316, 357), 'docopt.docopt', 'docopt', (['__doc__'], {'version': '"""Naval Fate 2.0"""'}), "(__doc__, version='Naval Fate 2.0')\n", (322, 357), False, 'from docopt import docopt\n'), ((612, 619), 'contexttimer.Timer', 'Timer', ([], {}), '()\n', (617, 619), False, 'from contexttimer import Timer\n'), ((643, 689), 'connectorx.read_sql', 'cx.read_sql', (['db_map', 'sql'], {'return_type': '"""pandas"""'}), "(db_map, sql, return_type='pandas')\n", (654, 689), True, 'import connectorx as cx\n')]
|
# -*- coding: utf-8 -*-
"""
Copyright ArxanFintech Technology Ltd. 2018 All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import base64
import json
import collections
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import requests
from rest.api.common import APIKEYHEADER, FABIOROUTETAGHEADER, ROUTETAG, \
build_signature_body, build_signature_body_base
class WalletClient(object):
"""A wallet client implementation."""
def __init__(self, client):
"""Init wallet client with Client. """
self.__route_tag = "wallet-ng"
# self.__path = "wallet"
self.__client= client
def __set_header(self, header):
"""Set wallet client header"""
if APIKEYHEADER not in header:
header[APIKEYHEADER] = self.__client.get_apikey()
if ROUTETAG not in header:
header[ROUTETAG] = self.__route_tag
if FABIOROUTETAGHEADER not in header:
header[FABIOROUTETAGHEADER] = self.__route_tag
return header
def __set_url(self, req_path, url_params={}):
request_url = ""
if req_path:
request_url = "/".join([
self.__client.get_ip(),
# self.__path,
req_path
])
else:
request_url = "/".join([
self.__client.get_ip(),
# self.__path,
])
if url_params:
params = "&".join("{}={}".format(x, url_params[x]) \
for x in url_params)
request_url = "?".join([request_url, params])
# self.__client.set_url(request_url)
return request_url
def __set_params(self, header, req_path, url_params={}, body={}):
header = self.__set_header(header)
request_url = self.__set_url(req_path, url_params)
# self.__client.set_url(request_url)
req_params = {
"url": request_url,
"body": body,
"headers": header,
}
if self.__client.ssl_enabled():
req_params["cert"] = self.__client.get_cert()
req_params["verify"] = self.__client.get_verify()
return req_params
def register(self, header, body):
"""Register user wallet."""
req_dir = "v1/wallet/register"
method = self.__client.do_post
req_params = self.__set_params(
header,
req_dir,
body=body
)
return self.__client.do_request(
req_params,
method,
)
def register_sub_wallet(self, header, body):
"""Register a sub wallet."""
req_dir = "v1/wallet/register/subwallet"
method = self.__client.do_post
req_params = self.__set_params(
header,
req_dir,
body=body
)
return self.__client.do_request(
req_params,
method,
)
def update_password(self, header, body):
"""Update wallet password."""
req_dir = "v1/wallet/password"
method = self.__client.do_put
req_params = self.__set_params(
header,
req_dir,
body=body
)
return self.__client.do_request(
req_params,
method,
)
def create_payment_password(self, header, body):
"""Create wallet payment password."""
req_dir = "v1/wallet/payment_passwd"
method = self.__client.do_post
req_params = self.__set_params(
header,
req_dir,
body=body
)
return self.__client.do_request(
req_params,
method,
)
def update_payment_password(self, header, body):
"""Update payment password."""
req_dir = "v1/wallet/payment_passwd"
method = self.__client.do_put
req_params = self.__set_params(
header,
req_dir,
body=body
)
return self.__client.do_request(
req_params,
method,
)
def query_wallet_infos(self, header, id_):
"""Query wallet infos."""
req_dir = "v1/wallet/info"
req_dir = "?".join([req_dir, "id={}".format(id_)])
method = self.__client.do_get
req_params = self.__set_params(header, req_dir)
return self.__client.do_request(
req_params,
method,
)
def query_wallet_balance(self, header, id_):
"""Query wallet balalce"""
req_dir = "v1/wallet/balance"
params = {"id": id_}
method = self.__client.do_get
req_params = self.__set_params(
header,
url_params=params,
req_path=req_dir
)
return self.__client.do_request(
req_params,
method,
)
def create_poe(self, header, payload, params):
"""Create a POE with ed25519 signed body. """
payload = json.dumps(payload)
req_path = "v1/poe/create"
method = self.__client.do_post
params["payload"] = payload
signature = build_signature_body(**params)
body = {
"payload": payload,
"signature": signature
}
req_params = self.__set_params(
header,
req_path,
body=body
)
return self.__client.do_request(
req_params,
method,
)
def update_poe(self, header, payload, params):
"""Update a POE with ed25519 signed body."""
payload = json.dumps(payload)
req_path = "v1/poe/update"
method = self.__client.do_put
params["payload"] = payload
signature = build_signature_body(**params)
body = {
"payload": payload,
"signature": signature
}
req_params = self.__set_params(
header,
req_path,
body=body
)
return self.__client.do_request(
req_params,
method,
)
def query_poe(self, header, id_):
"""Query a POE."""
params= {"id": id_}
req_path="v1/poe"
method = self.__client.do_get
req_params = self.__set_params(
header,
req_path,
url_params=params
)
return self.__client.do_request(
req_params,
method,
)
def upload_poe(self, header, file_, poe_id, readonly):
"""Upload POE file. """
req_path = "v1/poe/upload"
poefile = "application/octet-stream"
filepart = (
file_,
open(file_, 'rb'),
poefile
)
files = {
"poe_file" : filepart
}
data = {
"poe_id": poe_id,
"read_only": readonly
}
req_url = self.__set_url(req_path)
prepared = requests.Request(
"POST",
url=req_url,
files=files,
data=data
).prepare()
header = self.__set_header(header)
header.update(prepared.headers)
prepared.headers = header
return self.__client.do_prepare(prepared)
def sign_txs(self, txs, params):
issuer = params["creator"]
for i, tx in enumerate(txs):
if tx["founder"] != issuer:
# sign fee by platform private key
params = self.__client.get_ent_params()
txs[i] = self.__sign_tx(tx, params)
return txs
def __sign_tx(self, tx, params):
if "txout" not in tx:
raise Exception("'txout' must be set in tx")
for i in range(len(tx["txout"])):
if "script" not in tx["txout"][i] or tx["txout"][i]["script"] is None:
raise Exception("no script field, no need to sign")
utxo_sig = json.loads(base64.b64decode(tx["txout"][i]["script"]))
if utxo_sig["publicKey"] is None:
continue
public_key = base64.b64decode(utxo_sig["publicKey"])
sig_body = build_signature_body_base(
params["creator"],
params["created"],
params["nonce"],
params["privateB64"],
public_key
)
utxo_sig["signature"] = base64.b64encode(sig_body["signature_value"])
utxo_sig["nonce"] = params["nonce"]
utxo_sig["creator"] = params["creator"]
b64_utxo_sig = json.dumps(utxo_sig)
tx["txout"][i]["script"] = base64.b64encode(b64_utxo_sig)
return tx
def issue_colored_token(self, header, payload, params):
"""Issue colored token with sign. """
# 1 send transfer proposal to get wallet.Tx
time_dur_p, issue_pre_resp = self.issue_ctoken_proposal(
header,
payload
)
if "txs" not in issue_pre_resp:
raise Exception("issue ctoken proposal failed: {}".format(issue_pre_resp))
txs = issue_pre_resp["txs"]
# 2 sign public key as signature
txs = self.sign_txs(txs, params)
# 3 call ProcessTx to transfer formally
time_dur_t, result = self.process_tx(header, txs)
payload = json.loads(result["Payload"])
payload["token_id"] = issue_pre_resp["token_id"]
result["Payload"] = json.dumps(payload)
return time_dur_p+time_dur_t, result
def process_tx(self, header, txs):
""" process_tx transfer formally with signature TX. """
if txs is None or len(txs) <= 0:
raise Exception("txs should not be empty!")
req_path = "v2/transaction/process"
body = {"txs": txs}
method = self.__client.do_post
req_params = self.__set_params(
header,
req_path,
body=body
)
return self.__client.do_request(
req_params,
method
)
def issue_ctoken_proposal(self, header, payload):
""" Issue ctoken proposal."""
#payload = json.dumps(payload)
req_path = "v2/transaction/tokens/issue/prepare"
method = self.__client.do_post
req_params = self.__set_params(
header,
req_path,
body=payload
)
time_dur, result = self.__client.do_request(
req_params,
method
)
payload = json.loads(result["Payload"])
return time_dur, payload
def transfer_assets(self, header, payload, params):
"""Transfer assets."""
# 1 send transfer proposal to get wallet.Tx
time_dur_p, trans_pre_resp = self.transfer_assets_proposal(
header,
payload
)
# 2 sign public key as signature
txs = self.sign_txs(trans_pre_resp, params)
# 3 call ProcessTx to transfer formally
time_dur_t, result = self.process_tx(header, txs)
return time_dur_p+time_dur_t, result
def transfer_assets_proposal(self, header, payload):
""" Transfer assets proposal."""
#payload = json.dumps(payload)
req_path = "v2/transaction/assets/transfer/prepare"
method = self.__client.do_post
req_params = self.__set_params(
header,
req_path,
body=payload
)
time_dur, result = self.__client.do_request(
req_params,
method
)
payload = json.loads(result["Payload"])
return time_dur, payload
def transfer_colored_tokens(self, header, payload, params):
"""Transfer colored token. """
# 1 send transfer proposal to get wallet.Tx
time_dur_p, txs = self.transfer_ctoken_proposal(
header,
payload
)
# 2 sign public key as signature
txs = self.sign_txs(txs, params)
# 3 call ProcessTx to transfer formally
time_dur_t, result = self.process_tx(header, txs)
return time_dur_p+time_dur_t, result
def transfer_ctoken_proposal(self, header, payload):
""" Transfer ctoken proposal."""
#payload = json.dumps(payload)
req_path = "v2/transaction/tokens/transfer/prepare"
method = self.__client.do_post
req_params = self.__set_params(
header,
req_path,
body=payload
)
time_dur, result = self.__client.do_request(
req_params,
method
)
payload = json.loads(result["Payload"])
return time_dur, payload
def issue_asset(self, header, payload, params):
"""Issue asset. """
# 1 send transfer proposal to get wallet.Tx
time_dur_p, issue_pre_resp = self.issue_assets_proposal(
header,
payload
)
# 2 sign public key as signature
txs = self.sign_txs(issue_pre_resp, params)
# 3 call ProcessTx to transfer formally
time_dur_t, result = self.process_tx(header, txs)
return time_dur_p+time_dur_t, result
def issue_assets_proposal(self, header, payload):
""" Issue assets proposal."""
#payload = json.dumps(payload)
req_path = "v2/transaction/assets/issue/prepare"
method = self.__client.do_post
req_params = self.__set_params(
header,
req_path,
body=payload
)
time_dur, result = self.__client.do_request(
req_params,
method
)
payload = json.loads(result["Payload"])
return time_dur, payload
def set_index(self, header, id_, indexs):
"""Set the index for did """
req_path = "v1/index/set"
method = self.__client.do_post
body = {
"id": id_,
"indexs": indexs
}
req_params = self.__set_params(
header,
req_path,
body=body
)
return self.__client.do_request(
req_params,
method
)
def get_index(self, header, indexs):
"""Get the did by index"""
req_path = "v1/index/get"
method = self.__client.do_post
body = {
"indexs": indexs
}
req_params = self.__set_params(
header,
req_path,
body=body
)
return self.__client.do_request(
req_params,
method
)
def get_tx_logs(self, header, did, tx_type, num, page):
""" Get transaction logs"""
req_path = "v2/transaction/logs"
method = self.__client.do_get
params = {
"id": did,
"type": tx_type,
"num": num,
"page": page
}
req_params = self.__set_params(
header,
req_path,
params
)
return self.__client.do_request(
req_params,
method
)
def get_tx_utxo(self, header, did, num, page):
""" Get transaction utxo"""
req_path = "v2/transaction/utxo"
method = self.__client.do_get
params = {
"id": did,
"num": num,
"page": page
}
req_params = self.__set_params(
header,
req_path,
params
)
return self.__client.do_request(
req_params,
method
)
def get_tx_stxo(self, header, did, num, page):
""" Get transaction stxo"""
req_path = "v2/transaction/stxo"
method = self.__client.do_get
params = {
"id": did,
"num": num,
"page": page
}
req_params = self.__set_params(
header,
req_path,
params
)
return self.__client.do_request(
req_params,
method
)
|
[
"json.loads",
"rest.api.common.build_signature_body",
"base64.b64decode",
"json.dumps",
"base64.b64encode",
"sys.setdefaultencoding",
"requests.Request",
"rest.api.common.build_signature_body_base"
] |
[((705, 736), 'sys.setdefaultencoding', 'sys.setdefaultencoding', (['"""utf-8"""'], {}), "('utf-8')\n", (727, 736), False, 'import sys\n'), ((5787, 5806), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (5797, 5806), False, 'import json\n'), ((5937, 5967), 'rest.api.common.build_signature_body', 'build_signature_body', ([], {}), '(**params)\n', (5957, 5967), False, 'from rest.api.common import APIKEYHEADER, FABIOROUTETAGHEADER, ROUTETAG, build_signature_body, build_signature_body_base\n'), ((6448, 6467), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (6458, 6467), False, 'import json\n'), ((6597, 6627), 'rest.api.common.build_signature_body', 'build_signature_body', ([], {}), '(**params)\n', (6617, 6627), False, 'from rest.api.common import APIKEYHEADER, FABIOROUTETAGHEADER, ROUTETAG, build_signature_body, build_signature_body_base\n'), ((10354, 10383), 'json.loads', 'json.loads', (["result['Payload']"], {}), "(result['Payload'])\n", (10364, 10383), False, 'import json\n'), ((10469, 10488), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (10479, 10488), False, 'import json\n'), ((11608, 11637), 'json.loads', 'json.loads', (["result['Payload']"], {}), "(result['Payload'])\n", (11618, 11637), False, 'import json\n'), ((12711, 12740), 'json.loads', 'json.loads', (["result['Payload']"], {}), "(result['Payload'])\n", (12721, 12740), False, 'import json\n'), ((13817, 13846), 'json.loads', 'json.loads', (["result['Payload']"], {}), "(result['Payload'])\n", (13827, 13846), False, 'import json\n'), ((14911, 14940), 'json.loads', 'json.loads', (["result['Payload']"], {}), "(result['Payload'])\n", (14921, 14940), False, 'import json\n'), ((9058, 9097), 'base64.b64decode', 'base64.b64decode', (["utxo_sig['publicKey']"], {}), "(utxo_sig['publicKey'])\n", (9074, 9097), False, 'import base64\n'), ((9121, 9240), 'rest.api.common.build_signature_body_base', 'build_signature_body_base', (["params['creator']", "params['created']", "params['nonce']", "params['privateB64']", 'public_key'], {}), "(params['creator'], params['created'], params[\n 'nonce'], params['privateB64'], public_key)\n", (9146, 9240), False, 'from rest.api.common import APIKEYHEADER, FABIOROUTETAGHEADER, ROUTETAG, build_signature_body, build_signature_body_base\n'), ((9407, 9452), 'base64.b64encode', 'base64.b64encode', (["sig_body['signature_value']"], {}), "(sig_body['signature_value'])\n", (9423, 9452), False, 'import base64\n'), ((9580, 9600), 'json.dumps', 'json.dumps', (['utxo_sig'], {}), '(utxo_sig)\n', (9590, 9600), False, 'import json\n'), ((9640, 9670), 'base64.b64encode', 'base64.b64encode', (['b64_utxo_sig'], {}), '(b64_utxo_sig)\n', (9656, 9670), False, 'import base64\n'), ((7917, 7978), 'requests.Request', 'requests.Request', (['"""POST"""'], {'url': 'req_url', 'files': 'files', 'data': 'data'}), "('POST', url=req_url, files=files, data=data)\n", (7933, 7978), False, 'import requests\n'), ((8917, 8959), 'base64.b64decode', 'base64.b64decode', (["tx['txout'][i]['script']"], {}), "(tx['txout'][i]['script'])\n", (8933, 8959), False, 'import base64\n')]
|
#!/usr/bin/env python
import re
import sys
import argparse
ERRORMESSAGE = ("'ChromUsed' chromosome: '%s' chosen for GC-correction is not"
" available in the Mappability file (--MappabilityPath):"
"\n\t'%s'\nMake sure to just same prefixes in ChromUsed"
" and Mappability file\nIf you want to go through all"
" mappability regions, set --ChromUsed: all\n"
"ChromUsed prefixes can be changed in the makefile easily")
def unpack(chrom, *rest):
return str(chrom)
def parse_args(argv):
''' docstring '''
parser = argparse.ArgumentParser(prog='Checking mappability chromosomes')
parser.add_argument('Mappability', type=str)
parser.add_argument('ChromUsed', type=str)
return parser.parse_known_args(argv)
def getmappachroms(args):
with open(args.Mappability, 'r') as fin:
mappa = set()
lastc = ''
for line in fin:
mappa_chrom = unpack(*re.split(r'\s+', line.rstrip()))
if mappa_chrom != lastc:
mappa.add(mappa_chrom)
lastc = mappa_chrom
return mappa
def run(args):
""" does not check if header is present in mappability file """
mappachroms = getmappachroms(args)
if args.ChromUsed.lower() == "all":
args.ChromUsed = "all"
mappachroms.add("all")
assert args.ChromUsed in mappachroms, ERRORMESSAGE % (args.ChromUsed,
' '.join(sorted(mappachroms)))
def main(argv):
args, unknown = parse_args(argv)
run(args)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
[
"argparse.ArgumentParser"
] |
[((604, 668), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""Checking mappability chromosomes"""'}), "(prog='Checking mappability chromosomes')\n", (627, 668), False, 'import argparse\n')]
|
from __future__ import print_function
import os
import cv2
import pickle
import argparse
import numpy as np
import pandas as pd
import xml.dom.minidom
import matplotlib.pyplot as plt
from PIL import Image,ImageDraw
root_dir = "./"
#root_dir = "/home/ksuresh/fpn.pytorch-master/data/uavdt/data/VOCdevkit2007/UAV2017/"
#ImgPath = root_dir+'JPEGImages_split_4_overlap_0/'
AnnoPath = root_dir+'Annotations/' #_10_classes_xml/'
scale = []
area = []
aspect_ratio = []
def scale_and_aspect(img_idx):
# imgfile = ImgPath + img_idx+'.jpg'
xmlfile = AnnoPath + img_idx + '.xml'
#imgfile = '0000097_09823_d_0000051_det.jpg'
#xmlfile = '0000097_09823_d_0000051.xml'
# img = Image.open(imgfile)
DomTree = xml.dom.minidom.parse(xmlfile)
annotation = DomTree.documentElement
filenamelist = annotation.getElementsByTagName('filename')
filename = filenamelist[0].childNodes[0].data
objectlist = annotation.getElementsByTagName('object')
#bboxes.append(len(objectlist))
for objects in objectlist:
namelist = objects.getElementsByTagName('name')
objectname = namelist[0].childNodes[0].data
bndbox = objects.getElementsByTagName('bndbox')
for box in bndbox:
#if objectname == 'pedestrian':
x1_list = box.getElementsByTagName('xmin')
x1 = int(x1_list[0].childNodes[0].data)
y1_list = box.getElementsByTagName('ymin')
y1 = int(y1_list[0].childNodes[0].data)
x2_list = box.getElementsByTagName('xmax')
x2 = int(x2_list[0].childNodes[0].data)
y2_list = box.getElementsByTagName('ymax')
y2 = int(y2_list[0].childNodes[0].data)
#scale.append(get_scale([x1,y1,x2,y2]))
if y2 ==y1:
y2+=1
print("fault...")
if x2 ==x1:
x2+=1
print("faulty")
aspect_ratio.append(get_aspect_ratio([x1,y1,x2,y2]))
area.append((x2-x1)*(y2-y1))
return aspect_ratio, area#, scale
def get_scale(bbox):
pass
def get_aspect_ratio(bbox):
return((float(bbox[2])-bbox[0])/(float(bbox[3])-bbox[1]))
def plot(aspect_ratio, area, scale):
import matplotlib.pyplot as plt
plt.hist(np.array(aspect_ratio), bins = 75, range = (0,5))
# plt.xlabel("number of points")
# plt.ylabel("aspect ratio")
plt.show()
plt.hist(area, bins = 75, range = (0,25000))
plt.show()
if __name__ == '__main__':
for img_idx in os.listdir(AnnoPath):
img_idx = img_idx.split(".")[0]
#9999966_00000_d_0000093_2': weird
#args = parser.parse_args()
asr, area= scale_and_aspect(img_idx)
# with open("./aspect_ratio.txt", "w") as f:
# print(asr, file = f)
# print(area)
plot(asr, area, [])
|
[
"numpy.array",
"matplotlib.pyplot.show",
"os.listdir",
"matplotlib.pyplot.hist"
] |
[((2469, 2479), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2477, 2479), True, 'import matplotlib.pyplot as plt\n'), ((2485, 2526), 'matplotlib.pyplot.hist', 'plt.hist', (['area'], {'bins': '(75)', 'range': '(0, 25000)'}), '(area, bins=75, range=(0, 25000))\n', (2493, 2526), True, 'import matplotlib.pyplot as plt\n'), ((2535, 2545), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2543, 2545), True, 'import matplotlib.pyplot as plt\n'), ((2596, 2616), 'os.listdir', 'os.listdir', (['AnnoPath'], {}), '(AnnoPath)\n', (2606, 2616), False, 'import os\n'), ((2342, 2364), 'numpy.array', 'np.array', (['aspect_ratio'], {}), '(aspect_ratio)\n', (2350, 2364), True, 'import numpy as np\n')]
|
import os
import platform
import re
import sys
from typing import Any, List, Callable
from functools import partial, wraps
from contextlib import contextmanager
from fabric.api import sudo, run, path, puts, quiet
from fabric.contrib.files import upload_template
from .git import get_active_branch_name
def su(user):
return partial(sudo, user=user)
def requires_branch(cls):
"""
Enforce function callers to provide a branch name
unless the current working directory holds a git repository with a branch checked in.
An exception is raised if failed to obtain branch name.
The decorator may be optionally passed a required branch name
limiting the function execution scope to the specified branch only.
"""
def decorator(arg, *required_branches):
def wrapper(branch=None, *args, **kwargs):
force_branch = kwargs.pop('force_branch', False)
if not isinstance(branch, cls):
ci_branch = os.environ.get('BUILD_BRANCH')
branch_name = branch if branch else (ci_branch or get_active_branch_name())
if not branch_name:
raise ValueError('Not in a git repository. Provide a branch name')
branch = cls(branch_name)
# check if the function is allowed to run with this particular branch
if not force_branch and required_branches and branch.name not in required_branches:
puts(f'{branch.name} does not match the required branches {", ".join(required_branches)}')
return
return arg(branch, *args, **kwargs)
if callable(arg):
return wraps(arg)(wrapper)
else:
def inner_dec(func):
return decorator(func, arg, *required_branches)
return inner_dec
return decorator
def requires_user(func):
"""
Require a function caller to supply user as an argument.
The decorated function is passed the provided value (or None)
and a call function (fabric.api.run if user is None).
"""
@wraps(func)
def wrapper(*args, **kwargs):
user = kwargs.pop('user', None)
call_func = su(user) if user else run
return func(user=user, call=call_func, *args, **kwargs)
return wrapper
@requires_user
def managepy(command, user, call):
return call(f'python manage.py {command}')
@contextmanager
def pyenv(python_path):
with path(python_path, 'prepend'):
yield
@contextmanager
def virtualenv(virtualenv_path):
with path(virtualenv_path, 'prepend'):
yield
@contextmanager
def checksum(filename, *files_or_dirs):
paths = ' '.join(files_or_dirs)
# check whether the files have changed (or the checksum file does not exist at all)
with quiet():
if not sudo(f'find {paths} -type f -print0 | sort -z | xargs -0 tar cf - | shasum -c {filename}').failed:
modified = False
else:
modified = True
yield modified
# compute checksum for specified paths
if modified:
sudo(f'find {paths} -type f -print0 | sort -z | xargs -0 tar cf - | shasum > {filename}')
def get_checksum(*files_or_dirs):
"""
Calculate sha checksum for list of given files or directories.
"""
paths = ' '.join(files_or_dirs)
# what this command does is:
shasum = sudo(f'find {paths} -type f -print0 | sort -z | xargs -0 tar cf - | tar xOf - | shasum')
if shasum.failed:
raise Exception('failed to get shasum for specified files')
return str(shasum).split(' ', 1)[0]
def readlink(path):
with quiet():
result = sudo(f'readlink {path}')
if not result.failed:
return str(result)
def slugify_version(version):
# Python 2.7.15 -> python_2_7_15
return re.sub(r'[^\d\w]+', '_', version).lower()
def slugify_command_version(command, user=None):
command_output = str(sudo(command, user=user))
return slugify_version(command_output)
def to_bool(value: Any) -> bool:
"""Convert a command line choice to a boolean value"""
true_values = ('yes', 'y', 'true', 't', '1')
if isinstance(value, (bool, int)):
return bool(value)
if isinstance(value, str) and value.lower() in true_values:
return True
return False
def is_parallel_supported():
return not (sys.version_info > (3, 8) and platform.system() == 'Darwin')
template = partial(upload_template, use_jinja=True, backup=False)
def with_random_node(nodes: List[str], randomizer: Callable) -> Callable:
def decorator(func: Callable) -> Callable:
@wraps(func)
def wrapper(*task_args: Any, **task_kwargs: Any) -> Any:
node_idx = randomizer(*task_args, **task_kwargs)
task_kwargs['node'] = nodes[node_idx]
return func(*task_args, **task_kwargs)
return wrapper
return decorator
with_branch_node = partial(with_random_node,
randomizer=lambda b, *args, **kws: sum(map(ord, b.name)) % len(b.hosts))
|
[
"functools.partial",
"fabric.api.path",
"fabric.api.quiet",
"fabric.api.sudo",
"os.environ.get",
"functools.wraps",
"platform.system",
"re.sub"
] |
[((4414, 4468), 'functools.partial', 'partial', (['upload_template'], {'use_jinja': '(True)', 'backup': '(False)'}), '(upload_template, use_jinja=True, backup=False)\n', (4421, 4468), False, 'from functools import partial, wraps\n'), ((331, 355), 'functools.partial', 'partial', (['sudo'], {'user': 'user'}), '(sudo, user=user)\n', (338, 355), False, 'from functools import partial, wraps\n'), ((2077, 2088), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (2082, 2088), False, 'from functools import partial, wraps\n'), ((3357, 3455), 'fabric.api.sudo', 'sudo', (['f"""find {paths} -type f -print0 | sort -z | xargs -0 tar cf - | tar xOf - | shasum"""'], {}), "(\n f'find {paths} -type f -print0 | sort -z | xargs -0 tar cf - | tar xOf - | shasum'\n )\n", (3361, 3455), False, 'from fabric.api import sudo, run, path, puts, quiet\n'), ((2442, 2470), 'fabric.api.path', 'path', (['python_path', '"""prepend"""'], {}), "(python_path, 'prepend')\n", (2446, 2470), False, 'from fabric.api import sudo, run, path, puts, quiet\n'), ((2546, 2578), 'fabric.api.path', 'path', (['virtualenv_path', '"""prepend"""'], {}), "(virtualenv_path, 'prepend')\n", (2550, 2578), False, 'from fabric.api import sudo, run, path, puts, quiet\n'), ((2785, 2792), 'fabric.api.quiet', 'quiet', ([], {}), '()\n', (2790, 2792), False, 'from fabric.api import sudo, run, path, puts, quiet\n'), ((3066, 3165), 'fabric.api.sudo', 'sudo', (['f"""find {paths} -type f -print0 | sort -z | xargs -0 tar cf - | shasum > {filename}"""'], {}), "(\n f'find {paths} -type f -print0 | sort -z | xargs -0 tar cf - | shasum > {filename}'\n )\n", (3070, 3165), False, 'from fabric.api import sudo, run, path, puts, quiet\n'), ((3609, 3616), 'fabric.api.quiet', 'quiet', ([], {}), '()\n', (3614, 3616), False, 'from fabric.api import sudo, run, path, puts, quiet\n'), ((3635, 3659), 'fabric.api.sudo', 'sudo', (['f"""readlink {path}"""'], {}), "(f'readlink {path}')\n", (3639, 3659), False, 'from fabric.api import sudo, run, path, puts, quiet\n'), ((3911, 3935), 'fabric.api.sudo', 'sudo', (['command'], {'user': 'user'}), '(command, user=user)\n', (3915, 3935), False, 'from fabric.api import sudo, run, path, puts, quiet\n'), ((4601, 4612), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (4606, 4612), False, 'from functools import partial, wraps\n'), ((3793, 3827), 're.sub', 're.sub', (['"""[^\\\\d\\\\w]+"""', '"""_"""', 'version'], {}), "('[^\\\\d\\\\w]+', '_', version)\n", (3799, 3827), False, 'import re\n'), ((973, 1003), 'os.environ.get', 'os.environ.get', (['"""BUILD_BRANCH"""'], {}), "('BUILD_BRANCH')\n", (987, 1003), False, 'import os\n'), ((1663, 1673), 'functools.wraps', 'wraps', (['arg'], {}), '(arg)\n', (1668, 1673), False, 'from functools import partial, wraps\n'), ((2809, 2909), 'fabric.api.sudo', 'sudo', (['f"""find {paths} -type f -print0 | sort -z | xargs -0 tar cf - | shasum -c {filename}"""'], {}), "(\n f'find {paths} -type f -print0 | sort -z | xargs -0 tar cf - | shasum -c {filename}'\n )\n", (2813, 2909), False, 'from fabric.api import sudo, run, path, puts, quiet\n'), ((4370, 4387), 'platform.system', 'platform.system', ([], {}), '()\n', (4385, 4387), False, 'import platform\n')]
|
from cumulus.util.template_query import TemplateQuery
try:
# python 3
from unittest.mock import patch # noqa
from unittest.mock import MagicMock
except: # noqa
# python 2
from mock import patch, MagicMock # noqa
import unittest
import troposphere
from troposphere import codepipeline # noqa
from cumulus.chain import chaincontext
from cumulus.steps.dev_tools import cloud_formation_action, META_PIPELINE_BUCKET_POLICY_REF
from cumulus.types.cloudformation import action_mode
class TestCloudFormationAction(unittest.TestCase):
def setUp(self):
self.context = chaincontext.ChainContext(
template=troposphere.Template(),
instance_name='justtestin'
)
self.context.metadata[META_PIPELINE_BUCKET_POLICY_REF] = "blah"
self.pipeline_name = "ThatPipeline"
self.deploy_stage_name = "DeployIt"
TestCloudFormationAction._add_pipeline_and_stage_to_template(self.context.template, self.pipeline_name, self.deploy_stage_name)
def tearDown(self):
del self.context
@staticmethod
def _add_pipeline_and_stage_to_template(template, pipeline_name, deploy_stage_name):
pipeline = template.add_resource(troposphere.codepipeline.Pipeline(
pipeline_name,
Stages=[]
))
# Not worrying about adding a source stage right now, as the tests are not assumed to actually
# trigger a CloudFormation build of the pipeline
deploy_stage = template.add_resource(troposphere.codepipeline.Stages(
Name=deploy_stage_name,
Actions=[]
))
pipeline.properties['Stages'].append(deploy_stage)
def test_handle_adds_cloud_formation_action_to_stage(self):
action = cloud_formation_action.CloudFormationAction(
action_name="CloudFormation",
input_artifact_names=["InfraInput"],
input_template_path="InfraInput::template.json",
input_template_configuration="InfraInput::myenv.json",
stage_name_to_add=self.deploy_stage_name,
stack_name="my-microservice",
action_mode=action_mode.ActionMode.REPLACE_ON_FAILURE,
# cfn_action_config_role_arn=troposphere.NoValue,
# cfn_action_role_arn=troposphere.NoValue
)
action.handle(self.context)
deploy_stage = TemplateQuery.get_resource_by_type(self.context.template, codepipeline.Stages)[0]
self.assertEqual(len(deploy_stage.Actions), 1)
test_action = deploy_stage.Actions[0]
self.assertEqual(test_action.Name, "CloudFormation")
self.assertEqual(test_action.ActionTypeId.Category, "Deploy")
self.assertEqual(test_action.ActionTypeId.Provider, "CloudFormation")
self.assertEqual(test_action.Configuration['TemplatePath'], "InfraInput::template.json")
self.assertEqual(test_action.Configuration['TemplateConfiguration'], "InfraInput::myenv.json")
self.assertEqual(test_action.Configuration['ActionMode'], action_mode.ActionMode.REPLACE_ON_FAILURE.value)
self.assertEquals(len(test_action.InputArtifacts), 1)
self.assertEquals(test_action.InputArtifacts[0].Name, "InfraInput")
# By default no output artifact is created
self.assertFalse(hasattr(test_action, 'OutputArtifacts'))
self.assertFalse('OutputFileName' in test_action.Configuration)
def test_raises_error_if_target_stage_does_not_exist(self):
action = cloud_formation_action.CloudFormationAction(
action_name="CloudFormation",
input_artifact_names=["InfraInput"],
input_template_path="InfraInput::template.json",
input_template_configuration="InfraInput::myenv.json",
stage_name_to_add="ThisStageDoesNotExist",
stack_name="my-microservice",
action_mode=action_mode.ActionMode.REPLACE_ON_FAILURE
)
self.assertRaises(
ValueError,
action.handle,
self.context)
def test_can_add_multiple_input_artifacts(self):
action = cloud_formation_action.CloudFormationAction(
action_name="CloudFormation",
input_artifact_names=["InfraInput", "ParameterInput"],
input_template_path="InfraInput::template.json",
input_template_configuration="ParameterInput::myenv.json",
stage_name_to_add=self.deploy_stage_name,
stack_name="my-microservice",
action_mode=action_mode.ActionMode.REPLACE_ON_FAILURE
)
action.handle(self.context)
deploy_stage = TemplateQuery.get_resource_by_type(self.context.template, codepipeline.Stages)[0]
self.assertEqual(len(deploy_stage.Actions), 1)
test_action = deploy_stage.Actions[0]
self.assertEqual(test_action.Name, "CloudFormation")
self.assertEqual(test_action.Configuration['TemplatePath'], "InfraInput::template.json")
self.assertEqual(test_action.Configuration['TemplateConfiguration'], "ParameterInput::myenv.json")
self.assertEquals(len(test_action.InputArtifacts), 2)
self.assertEquals(test_action.InputArtifacts[0].Name, "InfraInput")
self.assertEquals(test_action.InputArtifacts[1].Name, "ParameterInput")
def test_can_create_output_artifact(self):
action = cloud_formation_action.CloudFormationAction(
action_name="CloudFormation",
input_artifact_names=["InfraInput"],
input_template_path="InfraInput::template.json",
input_template_configuration="InfraInput::myenv.json",
output_artifact_name="AllOfTheThings",
stage_name_to_add=self.deploy_stage_name,
stack_name="my-microservice",
action_mode=action_mode.ActionMode.REPLACE_ON_FAILURE
)
action.handle(self.context)
deploy_stage = TemplateQuery.get_resource_by_type(self.context.template, codepipeline.Stages)[0]
self.assertEqual(len(deploy_stage.Actions), 1)
test_action = deploy_stage.Actions[0]
self.assertEquals(len(test_action.OutputArtifacts), 1)
self.assertEquals(test_action.OutputArtifacts[0].Name, "AllOfTheThings")
self.assertEquals(test_action.Configuration['OutputFileName'], "StackOutputs.json")
def test_sets_role_arn_to_cfn_action(self):
expected_arn = "im_an_arn"
action = cloud_formation_action.CloudFormationAction(
action_name="CloudFormation",
input_artifact_names=["InfraInput"],
input_template_path="InfraInput::template.json",
input_template_configuration="InfraInput::myenv.json",
output_artifact_name="AllOfTheThings",
stage_name_to_add=self.deploy_stage_name,
stack_name="my-microservice",
action_mode=action_mode.ActionMode.REPLACE_ON_FAILURE,
cfn_action_role_arn=expected_arn
)
action.handle(self.context)
deploy_stage = TemplateQuery.get_resource_by_type(self.context.template, codepipeline.Stages)[0]
self.assertEqual(len(deploy_stage.Actions), 1)
test_action = deploy_stage.Actions[0]
self.assertEquals(test_action.RoleArn, expected_arn)
self.assertTrue(hasattr(test_action, 'RoleArn'))
def test_does_not_set_role_arn_to_cfn_action(self):
action = cloud_formation_action.CloudFormationAction(
action_name="CloudFormation",
input_artifact_names=["InfraInput"],
input_template_path="InfraInput::template.json",
input_template_configuration="InfraInput::myenv.json",
output_artifact_name="AllOfTheThings",
stage_name_to_add=self.deploy_stage_name,
stack_name="my-microservice",
action_mode=action_mode.ActionMode.REPLACE_ON_FAILURE,
)
action.handle(self.context)
deploy_stage = TemplateQuery.get_resource_by_type(self.context.template, codepipeline.Stages)[0]
self.assertEqual(len(deploy_stage.Actions), 1)
test_action = deploy_stage.Actions[0]
self.assertFalse(hasattr(test_action, 'RoleArn'))
def test_sets_config_role_arn_to_cfn_action_configuration(self):
expected_arn = 'im_another_arn'
action = cloud_formation_action.CloudFormationAction(
action_name="CloudFormation",
input_artifact_names=["InfraInput"],
input_template_path="InfraInput::template.json",
input_template_configuration="InfraInput::myenv.json",
output_artifact_name="AllOfTheThings",
stage_name_to_add=self.deploy_stage_name,
stack_name="my-microservice",
action_mode=action_mode.ActionMode.REPLACE_ON_FAILURE,
cfn_action_config_role_arn=expected_arn,
)
action.handle(self.context)
deploy_stage = TemplateQuery.get_resource_by_type(self.context.template, codepipeline.Stages)[0]
self.assertEqual(len(deploy_stage.Actions), 1)
test_action = deploy_stage.Actions[0]
self.assertTrue("RoleArn" in test_action.Configuration)
self.assertEqual(test_action.Configuration['RoleArn'], expected_arn)
self.assertFalse(hasattr(test_action, 'RoleArn'))
def test_uses_default_config_role_arn_to_cfn_action_configuration(self):
unexpected_arn = 'im_another_arn'
action = cloud_formation_action.CloudFormationAction(
action_name="CloudFormation",
input_artifact_names=["InfraInput"],
input_template_path="InfraInput::template.json",
input_template_configuration="InfraInput::myenv.json",
output_artifact_name="AllOfTheThings",
stage_name_to_add=self.deploy_stage_name,
stack_name="my-microservice",
action_mode=action_mode.ActionMode.REPLACE_ON_FAILURE,
)
action.handle(self.context)
deploy_stage = TemplateQuery.get_resource_by_type(self.context.template, codepipeline.Stages)[0]
self.assertEqual(len(deploy_stage.Actions), 1)
test_action = deploy_stage.Actions[0]
self.assertTrue("RoleArn" in test_action.Configuration)
self.assertNotEqual(test_action.Configuration['RoleArn'], unexpected_arn)
self.assertFalse(hasattr(test_action, 'RoleArn'))
self.assertTrue(test_action.Configuration['RoleArn'].__class__ is troposphere.GetAtt)
|
[
"troposphere.codepipeline.Stages",
"cumulus.util.template_query.TemplateQuery.get_resource_by_type",
"cumulus.steps.dev_tools.cloud_formation_action.CloudFormationAction",
"troposphere.Template",
"troposphere.codepipeline.Pipeline"
] |
[((1762, 2126), 'cumulus.steps.dev_tools.cloud_formation_action.CloudFormationAction', 'cloud_formation_action.CloudFormationAction', ([], {'action_name': '"""CloudFormation"""', 'input_artifact_names': "['InfraInput']", 'input_template_path': '"""InfraInput::template.json"""', 'input_template_configuration': '"""InfraInput::myenv.json"""', 'stage_name_to_add': 'self.deploy_stage_name', 'stack_name': '"""my-microservice"""', 'action_mode': 'action_mode.ActionMode.REPLACE_ON_FAILURE'}), "(action_name='CloudFormation',\n input_artifact_names=['InfraInput'], input_template_path=\n 'InfraInput::template.json', input_template_configuration=\n 'InfraInput::myenv.json', stage_name_to_add=self.deploy_stage_name,\n stack_name='my-microservice', action_mode=action_mode.ActionMode.\n REPLACE_ON_FAILURE)\n", (1805, 2126), False, 'from cumulus.steps.dev_tools import cloud_formation_action, META_PIPELINE_BUCKET_POLICY_REF\n'), ((3493, 3858), 'cumulus.steps.dev_tools.cloud_formation_action.CloudFormationAction', 'cloud_formation_action.CloudFormationAction', ([], {'action_name': '"""CloudFormation"""', 'input_artifact_names': "['InfraInput']", 'input_template_path': '"""InfraInput::template.json"""', 'input_template_configuration': '"""InfraInput::myenv.json"""', 'stage_name_to_add': '"""ThisStageDoesNotExist"""', 'stack_name': '"""my-microservice"""', 'action_mode': 'action_mode.ActionMode.REPLACE_ON_FAILURE'}), "(action_name='CloudFormation',\n input_artifact_names=['InfraInput'], input_template_path=\n 'InfraInput::template.json', input_template_configuration=\n 'InfraInput::myenv.json', stage_name_to_add='ThisStageDoesNotExist',\n stack_name='my-microservice', action_mode=action_mode.ActionMode.\n REPLACE_ON_FAILURE)\n", (3536, 3858), False, 'from cumulus.steps.dev_tools import cloud_formation_action, META_PIPELINE_BUCKET_POLICY_REF\n'), ((4105, 4488), 'cumulus.steps.dev_tools.cloud_formation_action.CloudFormationAction', 'cloud_formation_action.CloudFormationAction', ([], {'action_name': '"""CloudFormation"""', 'input_artifact_names': "['InfraInput', 'ParameterInput']", 'input_template_path': '"""InfraInput::template.json"""', 'input_template_configuration': '"""ParameterInput::myenv.json"""', 'stage_name_to_add': 'self.deploy_stage_name', 'stack_name': '"""my-microservice"""', 'action_mode': 'action_mode.ActionMode.REPLACE_ON_FAILURE'}), "(action_name='CloudFormation',\n input_artifact_names=['InfraInput', 'ParameterInput'],\n input_template_path='InfraInput::template.json',\n input_template_configuration='ParameterInput::myenv.json',\n stage_name_to_add=self.deploy_stage_name, stack_name='my-microservice',\n action_mode=action_mode.ActionMode.REPLACE_ON_FAILURE)\n", (4148, 4488), False, 'from cumulus.steps.dev_tools import cloud_formation_action, META_PIPELINE_BUCKET_POLICY_REF\n'), ((5356, 5758), 'cumulus.steps.dev_tools.cloud_formation_action.CloudFormationAction', 'cloud_formation_action.CloudFormationAction', ([], {'action_name': '"""CloudFormation"""', 'input_artifact_names': "['InfraInput']", 'input_template_path': '"""InfraInput::template.json"""', 'input_template_configuration': '"""InfraInput::myenv.json"""', 'output_artifact_name': '"""AllOfTheThings"""', 'stage_name_to_add': 'self.deploy_stage_name', 'stack_name': '"""my-microservice"""', 'action_mode': 'action_mode.ActionMode.REPLACE_ON_FAILURE'}), "(action_name='CloudFormation',\n input_artifact_names=['InfraInput'], input_template_path=\n 'InfraInput::template.json', input_template_configuration=\n 'InfraInput::myenv.json', output_artifact_name='AllOfTheThings',\n stage_name_to_add=self.deploy_stage_name, stack_name='my-microservice',\n action_mode=action_mode.ActionMode.REPLACE_ON_FAILURE)\n", (5399, 5758), False, 'from cumulus.steps.dev_tools import cloud_formation_action, META_PIPELINE_BUCKET_POLICY_REF\n'), ((6425, 6865), 'cumulus.steps.dev_tools.cloud_formation_action.CloudFormationAction', 'cloud_formation_action.CloudFormationAction', ([], {'action_name': '"""CloudFormation"""', 'input_artifact_names': "['InfraInput']", 'input_template_path': '"""InfraInput::template.json"""', 'input_template_configuration': '"""InfraInput::myenv.json"""', 'output_artifact_name': '"""AllOfTheThings"""', 'stage_name_to_add': 'self.deploy_stage_name', 'stack_name': '"""my-microservice"""', 'action_mode': 'action_mode.ActionMode.REPLACE_ON_FAILURE', 'cfn_action_role_arn': 'expected_arn'}), "(action_name='CloudFormation',\n input_artifact_names=['InfraInput'], input_template_path=\n 'InfraInput::template.json', input_template_configuration=\n 'InfraInput::myenv.json', output_artifact_name='AllOfTheThings',\n stage_name_to_add=self.deploy_stage_name, stack_name='my-microservice',\n action_mode=action_mode.ActionMode.REPLACE_ON_FAILURE,\n cfn_action_role_arn=expected_arn)\n", (6468, 6865), False, 'from cumulus.steps.dev_tools import cloud_formation_action, META_PIPELINE_BUCKET_POLICY_REF\n'), ((7395, 7797), 'cumulus.steps.dev_tools.cloud_formation_action.CloudFormationAction', 'cloud_formation_action.CloudFormationAction', ([], {'action_name': '"""CloudFormation"""', 'input_artifact_names': "['InfraInput']", 'input_template_path': '"""InfraInput::template.json"""', 'input_template_configuration': '"""InfraInput::myenv.json"""', 'output_artifact_name': '"""AllOfTheThings"""', 'stage_name_to_add': 'self.deploy_stage_name', 'stack_name': '"""my-microservice"""', 'action_mode': 'action_mode.ActionMode.REPLACE_ON_FAILURE'}), "(action_name='CloudFormation',\n input_artifact_names=['InfraInput'], input_template_path=\n 'InfraInput::template.json', input_template_configuration=\n 'InfraInput::myenv.json', output_artifact_name='AllOfTheThings',\n stage_name_to_add=self.deploy_stage_name, stack_name='my-microservice',\n action_mode=action_mode.ActionMode.REPLACE_ON_FAILURE)\n", (7438, 7797), False, 'from cumulus.steps.dev_tools import cloud_formation_action, META_PIPELINE_BUCKET_POLICY_REF\n'), ((8313, 8760), 'cumulus.steps.dev_tools.cloud_formation_action.CloudFormationAction', 'cloud_formation_action.CloudFormationAction', ([], {'action_name': '"""CloudFormation"""', 'input_artifact_names': "['InfraInput']", 'input_template_path': '"""InfraInput::template.json"""', 'input_template_configuration': '"""InfraInput::myenv.json"""', 'output_artifact_name': '"""AllOfTheThings"""', 'stage_name_to_add': 'self.deploy_stage_name', 'stack_name': '"""my-microservice"""', 'action_mode': 'action_mode.ActionMode.REPLACE_ON_FAILURE', 'cfn_action_config_role_arn': 'expected_arn'}), "(action_name='CloudFormation',\n input_artifact_names=['InfraInput'], input_template_path=\n 'InfraInput::template.json', input_template_configuration=\n 'InfraInput::myenv.json', output_artifact_name='AllOfTheThings',\n stage_name_to_add=self.deploy_stage_name, stack_name='my-microservice',\n action_mode=action_mode.ActionMode.REPLACE_ON_FAILURE,\n cfn_action_config_role_arn=expected_arn)\n", (8356, 8760), False, 'from cumulus.steps.dev_tools import cloud_formation_action, META_PIPELINE_BUCKET_POLICY_REF\n'), ((9435, 9837), 'cumulus.steps.dev_tools.cloud_formation_action.CloudFormationAction', 'cloud_formation_action.CloudFormationAction', ([], {'action_name': '"""CloudFormation"""', 'input_artifact_names': "['InfraInput']", 'input_template_path': '"""InfraInput::template.json"""', 'input_template_configuration': '"""InfraInput::myenv.json"""', 'output_artifact_name': '"""AllOfTheThings"""', 'stage_name_to_add': 'self.deploy_stage_name', 'stack_name': '"""my-microservice"""', 'action_mode': 'action_mode.ActionMode.REPLACE_ON_FAILURE'}), "(action_name='CloudFormation',\n input_artifact_names=['InfraInput'], input_template_path=\n 'InfraInput::template.json', input_template_configuration=\n 'InfraInput::myenv.json', output_artifact_name='AllOfTheThings',\n stage_name_to_add=self.deploy_stage_name, stack_name='my-microservice',\n action_mode=action_mode.ActionMode.REPLACE_ON_FAILURE)\n", (9478, 9837), False, 'from cumulus.steps.dev_tools import cloud_formation_action, META_PIPELINE_BUCKET_POLICY_REF\n'), ((1216, 1275), 'troposphere.codepipeline.Pipeline', 'troposphere.codepipeline.Pipeline', (['pipeline_name'], {'Stages': '[]'}), '(pipeline_name, Stages=[])\n', (1249, 1275), False, 'import troposphere\n'), ((1518, 1585), 'troposphere.codepipeline.Stages', 'troposphere.codepipeline.Stages', ([], {'Name': 'deploy_stage_name', 'Actions': '[]'}), '(Name=deploy_stage_name, Actions=[])\n', (1549, 1585), False, 'import troposphere\n'), ((2376, 2454), 'cumulus.util.template_query.TemplateQuery.get_resource_by_type', 'TemplateQuery.get_resource_by_type', (['self.context.template', 'codepipeline.Stages'], {}), '(self.context.template, codepipeline.Stages)\n', (2410, 2454), False, 'from cumulus.util.template_query import TemplateQuery\n'), ((4624, 4702), 'cumulus.util.template_query.TemplateQuery.get_resource_by_type', 'TemplateQuery.get_resource_by_type', (['self.context.template', 'codepipeline.Stages'], {}), '(self.context.template, codepipeline.Stages)\n', (4658, 4702), False, 'from cumulus.util.template_query import TemplateQuery\n'), ((5904, 5982), 'cumulus.util.template_query.TemplateQuery.get_resource_by_type', 'TemplateQuery.get_resource_by_type', (['self.context.template', 'codepipeline.Stages'], {}), '(self.context.template, codepipeline.Stages)\n', (5938, 5982), False, 'from cumulus.util.template_query import TemplateQuery\n'), ((7019, 7097), 'cumulus.util.template_query.TemplateQuery.get_resource_by_type', 'TemplateQuery.get_resource_by_type', (['self.context.template', 'codepipeline.Stages'], {}), '(self.context.template, codepipeline.Stages)\n', (7053, 7097), False, 'from cumulus.util.template_query import TemplateQuery\n'), ((7944, 8022), 'cumulus.util.template_query.TemplateQuery.get_resource_by_type', 'TemplateQuery.get_resource_by_type', (['self.context.template', 'codepipeline.Stages'], {}), '(self.context.template, codepipeline.Stages)\n', (7978, 8022), False, 'from cumulus.util.template_query import TemplateQuery\n'), ((8915, 8993), 'cumulus.util.template_query.TemplateQuery.get_resource_by_type', 'TemplateQuery.get_resource_by_type', (['self.context.template', 'codepipeline.Stages'], {}), '(self.context.template, codepipeline.Stages)\n', (8949, 8993), False, 'from cumulus.util.template_query import TemplateQuery\n'), ((9984, 10062), 'cumulus.util.template_query.TemplateQuery.get_resource_by_type', 'TemplateQuery.get_resource_by_type', (['self.context.template', 'codepipeline.Stages'], {}), '(self.context.template, codepipeline.Stages)\n', (10018, 10062), False, 'from cumulus.util.template_query import TemplateQuery\n'), ((647, 669), 'troposphere.Template', 'troposphere.Template', ([], {}), '()\n', (667, 669), False, 'import troposphere\n')]
|
# -*- coding: utf-8 -*-
import mock
from pytube import cli
@mock.patch('pytube.cli.YouTube')
@mock.patch('pytube.cli.sys')
def test_download(MockYouTube, mock_sys):
instance = MockYouTube.return_value
instance.prefetch_init.return_value = None
instance.streams = mock.Mock()
cli.download('asdf', 'asdf')
|
[
"mock.patch",
"mock.Mock",
"pytube.cli.download"
] |
[((63, 95), 'mock.patch', 'mock.patch', (['"""pytube.cli.YouTube"""'], {}), "('pytube.cli.YouTube')\n", (73, 95), False, 'import mock\n'), ((97, 125), 'mock.patch', 'mock.patch', (['"""pytube.cli.sys"""'], {}), "('pytube.cli.sys')\n", (107, 125), False, 'import mock\n'), ((278, 289), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (287, 289), False, 'import mock\n'), ((294, 322), 'pytube.cli.download', 'cli.download', (['"""asdf"""', '"""asdf"""'], {}), "('asdf', 'asdf')\n", (306, 322), False, 'from pytube import cli\n')]
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: data.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='data.proto',
package='dataArray',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\ndata.proto\x12\tdataArray\"\x1e\n\x1cGetIntDataArrayStreamRequest\"\x14\n\x12ServerSendResponse\"\x1c\n\x0cIntDataArray\x12\x0c\n\x04\x64\x61ta\x18\x01 \x03(\x05\x32\xb5\x01\n\nDataServer\x12M\n\x11WriteIntDataArray\x12\x17.dataArray.IntDataArray\x1a\x1d.dataArray.ServerSendResponse\"\x00\x12X\n\x10GetIntDataArrays\x12\'.dataArray.GetIntDataArrayStreamRequest\x1a\x17.dataArray.IntDataArray\"\x00\x30\x01\x32U\n\nDataClient\x12G\n\x11WriteIntDataArray\x12\x17.dataArray.IntDataArray\x1a\x17.dataArray.IntDataArray\"\x00\x62\x06proto3')
)
_GETINTDATAARRAYSTREAMREQUEST = _descriptor.Descriptor(
name='GetIntDataArrayStreamRequest',
full_name='dataArray.GetIntDataArrayStreamRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=25,
serialized_end=55,
)
_SERVERSENDRESPONSE = _descriptor.Descriptor(
name='ServerSendResponse',
full_name='dataArray.ServerSendResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=57,
serialized_end=77,
)
_INTDATAARRAY = _descriptor.Descriptor(
name='IntDataArray',
full_name='dataArray.IntDataArray',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='data', full_name='dataArray.IntDataArray.data', index=0,
number=1, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=79,
serialized_end=107,
)
DESCRIPTOR.message_types_by_name['GetIntDataArrayStreamRequest'] = _GETINTDATAARRAYSTREAMREQUEST
DESCRIPTOR.message_types_by_name['ServerSendResponse'] = _SERVERSENDRESPONSE
DESCRIPTOR.message_types_by_name['IntDataArray'] = _INTDATAARRAY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GetIntDataArrayStreamRequest = _reflection.GeneratedProtocolMessageType('GetIntDataArrayStreamRequest', (_message.Message,), dict(
DESCRIPTOR = _GETINTDATAARRAYSTREAMREQUEST,
__module__ = 'data_pb2'
# @@protoc_insertion_point(class_scope:dataArray.GetIntDataArrayStreamRequest)
))
_sym_db.RegisterMessage(GetIntDataArrayStreamRequest)
ServerSendResponse = _reflection.GeneratedProtocolMessageType('ServerSendResponse', (_message.Message,), dict(
DESCRIPTOR = _SERVERSENDRESPONSE,
__module__ = 'data_pb2'
# @@protoc_insertion_point(class_scope:dataArray.ServerSendResponse)
))
_sym_db.RegisterMessage(ServerSendResponse)
IntDataArray = _reflection.GeneratedProtocolMessageType('IntDataArray', (_message.Message,), dict(
DESCRIPTOR = _INTDATAARRAY,
__module__ = 'data_pb2'
# @@protoc_insertion_point(class_scope:dataArray.IntDataArray)
))
_sym_db.RegisterMessage(IntDataArray)
_DATASERVER = _descriptor.ServiceDescriptor(
name='DataServer',
full_name='dataArray.DataServer',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=110,
serialized_end=291,
methods=[
_descriptor.MethodDescriptor(
name='WriteIntDataArray',
full_name='dataArray.DataServer.WriteIntDataArray',
index=0,
containing_service=None,
input_type=_INTDATAARRAY,
output_type=_SERVERSENDRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='GetIntDataArrays',
full_name='dataArray.DataServer.GetIntDataArrays',
index=1,
containing_service=None,
input_type=_GETINTDATAARRAYSTREAMREQUEST,
output_type=_INTDATAARRAY,
serialized_options=None,
),
])
_sym_db.RegisterServiceDescriptor(_DATASERVER)
DESCRIPTOR.services_by_name['DataServer'] = _DATASERVER
_DATACLIENT = _descriptor.ServiceDescriptor(
name='DataClient',
full_name='dataArray.DataClient',
file=DESCRIPTOR,
index=1,
serialized_options=None,
serialized_start=293,
serialized_end=378,
methods=[
_descriptor.MethodDescriptor(
name='WriteIntDataArray',
full_name='dataArray.DataClient.WriteIntDataArray',
index=0,
containing_service=None,
input_type=_INTDATAARRAY,
output_type=_INTDATAARRAY,
serialized_options=None,
),
])
_sym_db.RegisterServiceDescriptor(_DATACLIENT)
DESCRIPTOR.services_by_name['DataClient'] = _DATACLIENT
# @@protoc_insertion_point(module_scope)
|
[
"google.protobuf.symbol_database.Default",
"google.protobuf.descriptor.FieldDescriptor",
"google.protobuf.descriptor.Descriptor",
"google.protobuf.descriptor.MethodDescriptor"
] |
[((434, 460), 'google.protobuf.symbol_database.Default', '_symbol_database.Default', ([], {}), '()\n', (458, 460), True, 'from google.protobuf import symbol_database as _symbol_database\n'), ((1196, 1577), 'google.protobuf.descriptor.Descriptor', '_descriptor.Descriptor', ([], {'name': '"""GetIntDataArrayStreamRequest"""', 'full_name': '"""dataArray.GetIntDataArrayStreamRequest"""', 'filename': 'None', 'file': 'DESCRIPTOR', 'containing_type': 'None', 'fields': '[]', 'extensions': '[]', 'nested_types': '[]', 'enum_types': '[]', 'serialized_options': 'None', 'is_extendable': '(False)', 'syntax': '"""proto3"""', 'extension_ranges': '[]', 'oneofs': '[]', 'serialized_start': '(25)', 'serialized_end': '(55)'}), "(name='GetIntDataArrayStreamRequest', full_name=\n 'dataArray.GetIntDataArrayStreamRequest', filename=None, file=\n DESCRIPTOR, containing_type=None, fields=[], extensions=[],\n nested_types=[], enum_types=[], serialized_options=None, is_extendable=\n False, syntax='proto3', extension_ranges=[], oneofs=[],\n serialized_start=25, serialized_end=55)\n", (1218, 1577), True, 'from google.protobuf import descriptor as _descriptor\n'), ((1626, 1986), 'google.protobuf.descriptor.Descriptor', '_descriptor.Descriptor', ([], {'name': '"""ServerSendResponse"""', 'full_name': '"""dataArray.ServerSendResponse"""', 'filename': 'None', 'file': 'DESCRIPTOR', 'containing_type': 'None', 'fields': '[]', 'extensions': '[]', 'nested_types': '[]', 'enum_types': '[]', 'serialized_options': 'None', 'is_extendable': '(False)', 'syntax': '"""proto3"""', 'extension_ranges': '[]', 'oneofs': '[]', 'serialized_start': '(57)', 'serialized_end': '(77)'}), "(name='ServerSendResponse', full_name=\n 'dataArray.ServerSendResponse', filename=None, file=DESCRIPTOR,\n containing_type=None, fields=[], extensions=[], nested_types=[],\n enum_types=[], serialized_options=None, is_extendable=False, syntax=\n 'proto3', extension_ranges=[], oneofs=[], serialized_start=57,\n serialized_end=77)\n", (1648, 1986), True, 'from google.protobuf import descriptor as _descriptor\n'), ((2190, 2520), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""data"""', 'full_name': '"""dataArray.IntDataArray.data"""', 'index': '(0)', 'number': '(1)', 'type': '(5)', 'cpp_type': '(1)', 'label': '(3)', 'has_default_value': '(False)', 'default_value': '[]', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR'}), "(name='data', full_name=\n 'dataArray.IntDataArray.data', index=0, number=1, type=5, cpp_type=1,\n label=3, has_default_value=False, default_value=[], message_type=None,\n enum_type=None, containing_type=None, is_extension=False,\n extension_scope=None, serialized_options=None, file=DESCRIPTOR)\n", (2217, 2520), True, 'from google.protobuf import descriptor as _descriptor\n'), ((4168, 4406), 'google.protobuf.descriptor.MethodDescriptor', '_descriptor.MethodDescriptor', ([], {'name': '"""WriteIntDataArray"""', 'full_name': '"""dataArray.DataServer.WriteIntDataArray"""', 'index': '(0)', 'containing_service': 'None', 'input_type': '_INTDATAARRAY', 'output_type': '_SERVERSENDRESPONSE', 'serialized_options': 'None'}), "(name='WriteIntDataArray', full_name=\n 'dataArray.DataServer.WriteIntDataArray', index=0, containing_service=\n None, input_type=_INTDATAARRAY, output_type=_SERVERSENDRESPONSE,\n serialized_options=None)\n", (4196, 4406), True, 'from google.protobuf import descriptor as _descriptor\n'), ((4429, 4676), 'google.protobuf.descriptor.MethodDescriptor', '_descriptor.MethodDescriptor', ([], {'name': '"""GetIntDataArrays"""', 'full_name': '"""dataArray.DataServer.GetIntDataArrays"""', 'index': '(1)', 'containing_service': 'None', 'input_type': '_GETINTDATAARRAYSTREAMREQUEST', 'output_type': '_INTDATAARRAY', 'serialized_options': 'None'}), "(name='GetIntDataArrays', full_name=\n 'dataArray.DataServer.GetIntDataArrays', index=1, containing_service=\n None, input_type=_GETINTDATAARRAYSTREAMREQUEST, output_type=\n _INTDATAARRAY, serialized_options=None)\n", (4457, 4676), True, 'from google.protobuf import descriptor as _descriptor\n'), ((5024, 5256), 'google.protobuf.descriptor.MethodDescriptor', '_descriptor.MethodDescriptor', ([], {'name': '"""WriteIntDataArray"""', 'full_name': '"""dataArray.DataClient.WriteIntDataArray"""', 'index': '(0)', 'containing_service': 'None', 'input_type': '_INTDATAARRAY', 'output_type': '_INTDATAARRAY', 'serialized_options': 'None'}), "(name='WriteIntDataArray', full_name=\n 'dataArray.DataClient.WriteIntDataArray', index=0, containing_service=\n None, input_type=_INTDATAARRAY, output_type=_INTDATAARRAY,\n serialized_options=None)\n", (5052, 5256), True, 'from google.protobuf import descriptor as _descriptor\n')]
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name='pyverse',
version='0.3',
description='Stub description for pyverse.',
install_requires=[],
scripts=[],
packages=['pyverse'],
author='PyVerse contributors',
author_email='<EMAIL>',
maintainer="<NAME>",
maintainer_email="<EMAIL>",
url='https://github.com/FelixWolf/pyverse/',
download_url='https://cdn.softhyena.com/pip/pyverse.tar.gz',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Communications :: Chat',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
[
"setuptools.setup"
] |
[((53, 835), 'setuptools.setup', 'setup', ([], {'name': '"""pyverse"""', 'version': '"""0.3"""', 'description': '"""Stub description for pyverse."""', 'install_requires': '[]', 'scripts': '[]', 'packages': "['pyverse']", 'author': '"""PyVerse contributors"""', 'author_email': '"""<EMAIL>"""', 'maintainer': '"""<NAME>"""', 'maintainer_email': '"""<EMAIL>"""', 'url': '"""https://github.com/FelixWolf/pyverse/"""', 'download_url': '"""https://cdn.softhyena.com/pip/pyverse.tar.gz"""', 'classifiers': "['Development Status :: 4 - Beta', 'License :: OSI Approved :: MIT License',\n 'Natural Language :: English', 'Intended Audience :: Developers',\n 'Operating System :: OS Independent', 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Topic :: Communications :: Chat', 'Topic :: Internet',\n 'Topic :: Software Development :: Libraries :: Python Modules']"}), "(name='pyverse', version='0.3', description=\n 'Stub description for pyverse.', install_requires=[], scripts=[],\n packages=['pyverse'], author='PyVerse contributors', author_email=\n '<EMAIL>', maintainer='<NAME>', maintainer_email='<EMAIL>', url=\n 'https://github.com/FelixWolf/pyverse/', download_url=\n 'https://cdn.softhyena.com/pip/pyverse.tar.gz', classifiers=[\n 'Development Status :: 4 - Beta',\n 'License :: OSI Approved :: MIT License', 'Natural Language :: English',\n 'Intended Audience :: Developers', 'Operating System :: OS Independent',\n 'Programming Language :: Python', 'Programming Language :: Python :: 3',\n 'Topic :: Communications :: Chat', 'Topic :: Internet',\n 'Topic :: Software Development :: Libraries :: Python Modules'])\n", (58, 835), False, 'from setuptools import setup\n')]
|
"""
Main application
"""
import os
import wx
from events import EVT_FORWARD_MAIN_EVENT
from main import MainFrame
from wizard import SelectDBFileFrame
__VERSION__ = '1.0.0'
__APP_NAME__ = 'SQLiteClient'
def read_conf_file(fp):
"""
Read configuration from file
"""
try:
with open(fp) as f:
return f.readlines()
except IOError as e:
return []
def write_conf_file(fp, content):
with open(fp, 'w') as f:
f.write(content)
class Application(wx.App):
def __init__(self):
super(Application, self).__init__()
self.Bind(EVT_FORWARD_MAIN_EVENT, self.on_forward_main)
sp = wx.StandardPaths.Get()
user_config_dir = sp.GetUserConfigDir()
self.app_config_dir = os.path.join(user_config_dir, __APP_NAME__)
if not os.path.exists(self.app_config_dir):
os.mkdir(self.app_config_dir)
def OnInit(self):
self.SetAppName(__APP_NAME__)
self.SetAppDisplayName(__APP_NAME__)
return 1
def on_forward_main(self, evt):
selected_file = evt.selected_file
main_frame = MainFrame(self, selected_file)
main_frame.Show()
# main_frame.Maximize()
def write_file_history(self, fh):
fp = os.path.join(self.app_config_dir, 'fh')
if os.path.exists(fp):
all_file_history = read_conf_file(fp)
if fh not in all_file_history:
write_conf_file(fp, fh)
else:
write_conf_file(fp, fh)
def read_file_history(self):
return read_conf_file(os.path.join(self.app_config_dir, 'fh'))
def del_file_history(self, fh):
pass
@staticmethod
def get_app_name():
return __APP_NAME__
if __name__ == '__main__':
app = Application()
frame = SelectDBFileFrame(app)
frame.Show()
app.MainLoop()
|
[
"os.mkdir",
"os.path.exists",
"wizard.SelectDBFileFrame",
"wx.StandardPaths.Get",
"os.path.join",
"main.MainFrame"
] |
[((1808, 1830), 'wizard.SelectDBFileFrame', 'SelectDBFileFrame', (['app'], {}), '(app)\n', (1825, 1830), False, 'from wizard import SelectDBFileFrame\n'), ((659, 681), 'wx.StandardPaths.Get', 'wx.StandardPaths.Get', ([], {}), '()\n', (679, 681), False, 'import wx\n'), ((760, 803), 'os.path.join', 'os.path.join', (['user_config_dir', '__APP_NAME__'], {}), '(user_config_dir, __APP_NAME__)\n', (772, 803), False, 'import os\n'), ((1122, 1152), 'main.MainFrame', 'MainFrame', (['self', 'selected_file'], {}), '(self, selected_file)\n', (1131, 1152), False, 'from main import MainFrame\n'), ((1263, 1302), 'os.path.join', 'os.path.join', (['self.app_config_dir', '"""fh"""'], {}), "(self.app_config_dir, 'fh')\n", (1275, 1302), False, 'import os\n'), ((1314, 1332), 'os.path.exists', 'os.path.exists', (['fp'], {}), '(fp)\n', (1328, 1332), False, 'import os\n'), ((819, 854), 'os.path.exists', 'os.path.exists', (['self.app_config_dir'], {}), '(self.app_config_dir)\n', (833, 854), False, 'import os\n'), ((868, 897), 'os.mkdir', 'os.mkdir', (['self.app_config_dir'], {}), '(self.app_config_dir)\n', (876, 897), False, 'import os\n'), ((1581, 1620), 'os.path.join', 'os.path.join', (['self.app_config_dir', '"""fh"""'], {}), "(self.app_config_dir, 'fh')\n", (1593, 1620), False, 'import os\n')]
|
# Copyright 1999-2021 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import pytest
from collections import defaultdict
from typing import Tuple, List
from ..... import oscar as mo
from .....resource import Resource
from ....cluster import ClusterAPI
from ....cluster.core import NodeRole, NodeStatus
from ....cluster.uploader import NodeInfoUploaderActor
from ....cluster.supervisor.locator import SupervisorPeerLocatorActor
from ....cluster.supervisor.node_info import NodeInfoCollectorActor
from ....subtask import Subtask
from ...supervisor import (
AssignerActor,
SubtaskManagerActor,
SubtaskQueueingActor,
GlobalResourceManagerActor,
)
class MockNodeInfoCollectorActor(NodeInfoCollectorActor):
def __init__(self, timeout=None, check_interval=None):
super().__init__(timeout=timeout, check_interval=check_interval)
self.ready_nodes = {
("address0", "numa-0"): 2,
("address1", "numa-0"): 2,
("address2", "numa-0"): 2,
}
async def update_node_info(
self, address, role, env=None, resource=None, detail=None, status=None
):
if "address" in address and status == NodeStatus.STOPPING:
del self.ready_nodes[(address, "numa-0")]
await super().update_node_info(address, role, env, resource, detail, status)
def get_all_bands(self, role=None, statuses=None):
if statuses == {NodeStatus.READY}:
return self.ready_nodes
else:
return {
("address0", "numa-0"): 2,
("address1", "numa-0"): 2,
("address2", "numa-0"): 2,
}
class FakeClusterAPI(ClusterAPI):
@classmethod
async def create(cls, address: str, **kw):
dones, _ = await asyncio.wait(
[
mo.create_actor(
SupervisorPeerLocatorActor,
"fixed",
address,
uid=SupervisorPeerLocatorActor.default_uid(),
address=address,
),
mo.create_actor(
MockNodeInfoCollectorActor,
uid=NodeInfoCollectorActor.default_uid(),
address=address,
),
mo.create_actor(
NodeInfoUploaderActor,
NodeRole.WORKER,
interval=kw.get("upload_interval"),
band_to_resource=kw.get("band_to_resource"),
use_gpu=kw.get("use_gpu", False),
uid=NodeInfoUploaderActor.default_uid(),
address=address,
),
]
)
for task in dones:
try:
task.result()
except mo.ActorAlreadyExist: # pragma: no cover
pass
api = await super().create(address=address)
await api.mark_node_ready()
return api
class MockSlotsActor(mo.Actor):
@mo.extensible
def apply_subtask_resources(
self,
band: Tuple,
session_id: str,
subtask_ids: List[str],
subtask_slots: List[Resource],
):
return subtask_ids
def refresh_bands(self):
pass
def get_used_resources(self):
return {}
class MockAssignerActor(mo.Actor):
def assign_subtasks(
self, subtasks: List[Subtask], exclude_bands=None, random_when_unavailable=True
):
return [subtask.expect_bands[0] for subtask in subtasks]
def reassign_subtasks(self, band_num_queued_subtasks):
if len(band_num_queued_subtasks.keys()) == 1:
[(band, _)] = band_num_queued_subtasks.items()
return {band: 0}
return {
("address1", "numa-0"): -8,
("address0", "numa-0"): 0,
("address2", "numa-0"): 8,
}
class MockSubtaskManagerActor(mo.Actor):
def __init__(self):
self._submitted_subtask_ids = defaultdict(list)
@mo.extensible
def submit_subtask_to_band(self, subtask_id: str, band: Tuple):
print(f"submit subtask {subtask_id} to band {band}")
self._submitted_subtask_ids[band].append(subtask_id)
def dump_data(self):
return self._submitted_subtask_ids
@pytest.fixture
async def actor_pool():
pool = await mo.create_actor_pool("127.0.0.1", n_process=0)
async with pool:
session_id = "test_session"
cluster_api = await FakeClusterAPI.create(pool.external_address)
# create assigner actor
await mo.create_actor(
MockAssignerActor,
uid=AssignerActor.gen_uid(session_id),
address=pool.external_address,
)
# create queueing actor
manager_ref = await mo.create_actor(
MockSubtaskManagerActor,
uid=SubtaskManagerActor.gen_uid(session_id),
address=pool.external_address,
)
# create slots actor
slots_ref = await mo.create_actor(
MockSlotsActor,
uid=GlobalResourceManagerActor.default_uid(),
address=pool.external_address,
)
# create queueing actor
queueing_ref = await mo.create_actor(
SubtaskQueueingActor,
session_id,
1,
uid=SubtaskQueueingActor.gen_uid(session_id),
address=pool.external_address,
)
yield pool, session_id, cluster_api, queueing_ref, slots_ref, manager_ref
await mo.destroy_actor(queueing_ref)
async def _queue_subtasks(num_subtasks, expect_bands, queueing_ref):
if not num_subtasks:
return
subtasks = [Subtask(expect_bands[0] + "-" + str(i)) for i in range(num_subtasks)]
for subtask in subtasks:
subtask.expect_bands = [expect_bands]
subtask.required_resource = Resource(num_cpus=1)
priorities = [(i,) for i in range(num_subtasks)]
await queueing_ref.add_subtasks(subtasks, priorities)
@pytest.mark.asyncio
async def test_subtask_queueing(actor_pool):
_pool, session_id, cluster_api, queueing_ref, slots_ref, manager_ref = actor_pool
nums_subtasks = [9, 8, 1]
expects_bands = [
("address0", "numa-0"),
("address1", "numa-0"),
("address2", "numa-0"),
]
for num_subtasks, expect_bands in zip(nums_subtasks, expects_bands):
await _queue_subtasks(num_subtasks, expect_bands, queueing_ref)
await cluster_api.set_node_status(
node="address1", role=NodeRole.WORKER, status=NodeStatus.STOPPING
)
# 9 subtasks on ('address0', 'numa-0')
await queueing_ref.submit_subtasks(band=("address0", "numa-0"), limit=10)
commited_subtask_ids = (await manager_ref.dump_data())[("address0", "numa-0")]
assert (
len(commited_subtask_ids) == 9
), f"commited_subtask_ids {commited_subtask_ids}"
# 0 subtasks on ('address1', 'numa-0')
await queueing_ref.submit_subtasks(band=("address1", "numa-0"), limit=10)
commited_subtask_ids = (await manager_ref.dump_data())[("address0", "numa-0")]
assert (
len(commited_subtask_ids) == 9
), f"commited_subtask_ids {commited_subtask_ids}"
# 9 subtasks on ('address2', 'numa-0')
await queueing_ref.submit_subtasks(band=("address2", "numa-0"), limit=10)
submitted_subtask_ids = await manager_ref.dump_data()
assert sum(len(v) for v in submitted_subtask_ids.values()) == 18
|
[
"collections.defaultdict"
] |
[((4514, 4531), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (4525, 4531), False, 'from collections import defaultdict\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask_restful import Resource
from aiida.restapi.api import AiidaApi, App
from aiida.restapi.run_api import run_api
class NewResource(Resource):
"""
resource containing GET and POST methods. Description of each method
follows:
GET: returns id, ctime, and attributes of the latest created Dict.
POST: creates a Dict object, stores it in the database,
and returns its newly assigned id.
"""
def get(self):
from aiida.orm import Dict, QueryBuilder
qb = QueryBuilder()
qb.append(Dict,
project=['id', 'ctime', 'attributes'],
tag='pdata')
qb.order_by({'pdata': {'ctime': 'desc'}})
result = qb.first()
# Results are returned as a dictionary, datetime objects is
# serialized as ISO 8601
return dict(id=result[0],
ctime=result[1].isoformat(),
attributes=result[2])
def post(self):
from aiida.orm import Dict
params = dict(property1='spam', property2='egg')
paramsData = Dict(dict=params).store()
return {'id': paramsData.pk}
class NewApi(AiidaApi):
def __init__(self, app=None, **kwargs):
"""
This init serves to add new endpoints to the basic AiiDA Api
"""
super().__init__(app=app, **kwargs)
self.add_resource(NewResource, '/new-endpoint/', strict_slashes=False)
# processing the options and running the app
from aiida import load_profile
import aiida.restapi.common as common
CONFIG_DIR = common.__path__[0]
import click
@click.command()
@click.option('-P', '--port', type=click.INT, default=5000,
help='Port number')
@click.option('-H', '--hostname', default='127.0.0.1',
help='Hostname')
@click.option('-c','--config-dir','config',type=click.Path(exists=True), default=CONFIG_DIR,
help='the path of the configuration directory')
@click.option('--debug', 'debug', is_flag=True, default=False,
help='run app in debug mode')
@click.option('--wsgi-profile', 'wsgi_profile', is_flag=True, default=False,
help='to use WSGI profiler middleware for finding bottlenecks in web application')
def newendpoint(**kwargs):
"""
runs the REST api
"""
# Invoke the runner
run_api(App, NewApi, **kwargs)
# main program
if __name__ == '__main__':
"""
Run the app with the provided options. For example:
python example.py --hostname=127.0.0.2 --port=6000
"""
load_profile()
newendpoint()
|
[
"aiida.restapi.run_api.run_api",
"aiida.orm.QueryBuilder",
"aiida.orm.Dict",
"click.option",
"click.command",
"aiida.load_profile",
"click.Path"
] |
[((1642, 1657), 'click.command', 'click.command', ([], {}), '()\n', (1655, 1657), False, 'import click\n'), ((1659, 1737), 'click.option', 'click.option', (['"""-P"""', '"""--port"""'], {'type': 'click.INT', 'default': '(5000)', 'help': '"""Port number"""'}), "('-P', '--port', type=click.INT, default=5000, help='Port number')\n", (1671, 1737), False, 'import click\n'), ((1743, 1813), 'click.option', 'click.option', (['"""-H"""', '"""--hostname"""'], {'default': '"""127.0.0.1"""', 'help': '"""Hostname"""'}), "('-H', '--hostname', default='127.0.0.1', help='Hostname')\n", (1755, 1813), False, 'import click\n'), ((1964, 2060), 'click.option', 'click.option', (['"""--debug"""', '"""debug"""'], {'is_flag': '(True)', 'default': '(False)', 'help': '"""run app in debug mode"""'}), "('--debug', 'debug', is_flag=True, default=False, help=\n 'run app in debug mode')\n", (1976, 2060), False, 'import click\n'), ((2061, 2233), 'click.option', 'click.option', (['"""--wsgi-profile"""', '"""wsgi_profile"""'], {'is_flag': '(True)', 'default': '(False)', 'help': '"""to use WSGI profiler middleware for finding bottlenecks in web application"""'}), "('--wsgi-profile', 'wsgi_profile', is_flag=True, default=False,\n help=\n 'to use WSGI profiler middleware for finding bottlenecks in web application'\n )\n", (2073, 2233), False, 'import click\n'), ((2317, 2347), 'aiida.restapi.run_api.run_api', 'run_api', (['App', 'NewApi'], {}), '(App, NewApi, **kwargs)\n', (2324, 2347), False, 'from aiida.restapi.run_api import run_api\n'), ((2524, 2538), 'aiida.load_profile', 'load_profile', ([], {}), '()\n', (2536, 2538), False, 'from aiida import load_profile\n'), ((557, 571), 'aiida.orm.QueryBuilder', 'QueryBuilder', ([], {}), '()\n', (569, 571), False, 'from aiida.orm import Dict, QueryBuilder\n'), ((1866, 1889), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (1876, 1889), False, 'import click\n'), ((1124, 1141), 'aiida.orm.Dict', 'Dict', ([], {'dict': 'params'}), '(dict=params)\n', (1128, 1141), False, 'from aiida.orm import Dict\n')]
|
import os
from enum import Enum
from typing import Optional, Union
# pylint: disable=no-name-in-module
from pydantic import (
BaseSettings,
Field,
SecretStr,
validator,
)
from sqlalchemy.engine import URL as SQLAlchemy_DB_URL
class LogLevel(str, Enum):
# https://docs.python.org/3/library/logging.html#logging-levels
CRITICAL = "CRITICAL"
ERROR = "ERROR"
WARNING = "WARNING"
INFO = "INFO"
DEBUG = "DEBUG"
NOTESET = "NOTSET"
class RuntimeConfig(BaseSettings):
"""Configuration for Hetida Designer Runtime
There is an example .env file /runtime/settings/example.env
"""
log_level: LogLevel = Field(
LogLevel.INFO,
env="LOG_LEVEL",
description="Python logging level as string, i.e. one of "
+ ", ".join(['"' + x.value + '"' for x in list(LogLevel)]),
)
swagger_prefix: str = Field(
"",
env="OPENAPI_PREFIX",
description="root path (necessary for OpenAPI UI if behind proxy)",
)
model_repo_path: str = Field(
"/mnt/obj_repo",
env="MODEL_REPO_PATH",
description=(
"The path were serialized objects from the simple built-in object store"
" (e.g. trained models) will be stored."
),
)
is_backend_service: bool = Field(
True,
env="HD_IS_BACKEND_SERVICE",
description="Whether backend service endpoints should be active.",
)
is_runtime_service: bool = Field(
True,
env="HD_IS_RUNTIME_SERVICE",
description="Whether runtime service endpoints should be active.",
)
ensure_db_schema: bool = Field(
True,
env="HD_ENSURE_DB_SCHEMA",
description=(
"Whether DB and DB schema should be created if not present"
" and if running as backend."
),
)
allowed_origins: str = Field(
(
"http://localhost:4200,http://localhost:80,localhost"
",http://localhost,hetida-designer-demo-adapter-python"
),
description=(
"Comma separated allowed origins (CORS)"
" (relevant for adapters in runtime like local file adapter)"
),
env="ALLOWED_ORIGINS",
example="http://exampledomain.com,http://anotherexampledomain.de",
)
sqlalchemy_db_host: str = Field(
"hetida-designer-db", env="HD_DB_HOST", example="hetida-designer-db"
)
sqlalchemy_db_port: int = Field(5432, env="HD_DATABASE_PORT", example=5432)
sqlalchemy_db_database: str = Field(
"hetida_designer_db", env="HD_DB_DATABASE", example="hetida_designer_db"
)
sqlalchemy_db_drivername: str = Field(
"postgresql+psycopg2", env="HD_DB_DRIVERNAME", example="postgresql+psycopg2"
)
sqlalchemy_db_user: str = Field("hetida_designer_dbuser", env="HD_DB_PASSWORD")
sqlalchemy_db_password: SecretStr = Field(
SecretStr("hetida_designer_dbpasswd"), env="HD_DB_PASSWORD"
)
sqlalchemy_connection_string: Optional[Union[SecretStr, SQLAlchemy_DB_URL]] = Field(
None,
description=(
"Rfc 1738 database url. Not set by default."
" If set, takes precedence over sqlalchemy_db_* attributes!"
" Otherwise will be constructed from the sqlalchemy_db_* attributes"
),
env="HD_DATABASE_URL",
example=(
"postgresql+psycopg2://hetida_designer_dbuser:"
"hetida_designer_dbpasswd@hetida-designer-db:5432/hetida_designer_db"
),
)
sqlalchemy_pool_size: int = Field(
100, description="Database pool size", env="HD_DATABASE_POOL_SIZE", gt=0
)
# HD Keycloak auth
hd_auth_use_keycloak: bool = Field(
False,
env="HD_AUTH_USE_KEYCLOAK",
description="Whether Keycloak is used for verifying requests to runtime service endpoints",
)
hd_keycloak_auth_url: Optional[str] = Field(None, env="HD_KEYCLOAK_AUTH_URL")
hd_keycloak_realm: Optional[str] = Field("Hetida", env="HD_KEYCLOAK_REALM")
hd_keycloak_runtime_audience: Optional[str] = Field(
"account", env="HD_KEYCLOAK_RUNTIME_AUDIENCE"
)
hd_keycloak_runtime_client_id: Optional[str] = Field(
None, env="HD_KEYCLOAK_RUNTIME_CLIENT_ID"
)
hd_keycloak_runtime_username: Optional[str] = Field(
None, env="HD_KEYCLOAK_RUNTIME_USERNAME"
)
hd_keycloak_runtime_password: Optional[str] = Field(
None,
env="HD_KEYCLOAK_RUNTIME_PASSWORD",
description="the password of the service user",
)
# Keycloak Auth for generic rest adapters
hd_generic_rest_adapter_auth_use_keycloak: bool = Field(
False,
env="HD_GENERIC_REST_ADAPTER_AUTH_USE_KEYCLOAK",
description=(
"Whether Keycloak is used for requests from runtime to generic rest adapter endpoints"
),
)
hd_generic_rest_adapter_keycloak_auth_url: Optional[str] = Field(
None, env="HD_GENERIC_REST_ADAPTER_KEYCLOAK_AUTH_URL"
)
hd_generic_rest_adapter_keycloak_realm: Optional[str] = Field(
None, env="HD_GENERIC_REST_ADAPTER_KEYCLOAK_REALM"
)
hd_generic_rest_adapter_keycloak_runtime_client_id: Optional[str] = Field(
None, env="HD_GENERIC_REST_ADAPTER_KEYCLOAK_RUNTIME_CLIENT_ID"
)
hd_generic_rest_adapter_keycloak_runtime_username: Optional[str] = Field(
None, env="HD_GENERIC_REST_ADAPTER_KEYCLOAK_RUNTIME_USERNAME"
)
hd_generic_rest_adapter_keycloak_runtime_password: Optional[str] = Field(
None,
env="HD_GENERIC_REST_ADAPTER_KEYCLOAK_RUNTIME_PASSWORD",
description="the password of the service user",
)
hd_generic_rest_adapter_keycloak_runtime_audience: Optional[str] = Field(
"account", env="HD_GENERIC_REST_ADAPTER_KEYCLOAK_RUNTIME_AUDIENCE"
)
hd_adapters: str = Field(
"demo-adapter-python|Python-Demo-Adapter"
"|http://localhost:8092"
"|http://hetida-designer-demo-adapter-python:8092,"
"demo-adapter-java|Java-Demo-Adapter"
"|http://localhost:8091/adapter"
"|http://hetida-designer-demo-adapter-java:8091/adapter,"
"local-file-adapter|Local-File-Adapter"
"|http://localhost:8090/adapters/localfile"
"|http://hetida-designer-runtime:8090/adapters/localfile",
env="HETIDA_DESIGNER_ADAPTERS",
description="list of the installed adapters",
)
hd_runtime_engine_url: str = Field(
"http://hetida-designer-runtime:8090/engine/",
env="HETIDA_DESIGNER_RUNTIME_EGINE_URL",
description="URL to runtime",
)
hd_runtime_verify_certs: bool = Field(
True, env="HETIDA_DESIGNER_RUNTIME_VERIFY_CERTS"
)
# For scripts (e.g. component deployment)
hd_backend_api_url: str = Field(
"http://hetida-designer-backend:8090/api/",
env="HETIDA_DESIGNER_BACKEND_API_URL",
description=(
"URL to backend. Necessary for component deployment "
"and to allow runtime to access adapters endpoint."
),
)
hd_backend_use_basic_auth: bool = Field(
False,
env="HETIDA_DESIGNER_BACKEND_USE_BASIC_AUTH",
description=(
"Whether Backend is protected via Basic Auth."
" Only necessary for component deployment."
" If Backend is protected via Keycloak instead "
" use the corresponding keycloak environment variables!"
),
)
hd_backend_basic_auth_user: Optional[str] = Field(
None,
env="HETIDA_DESIGNER_BASIC_AUTH_USER",
description="Basic Auth User",
)
hd_backend_basic_auth_password: Optional[str] = Field(
None,
env="HETIDA_DESIGNER_BASIC_AUTH_PASSWORD",
description="Basic Auth User",
)
hd_backend_verify_certs: bool = Field(
True, env="HETIDA_DESIGNER_BACKEND_VERIFY_CERTS"
)
hd_adapters_verify_certs: bool = Field(
True, env="HETIDA_DESIGNER_ADAPTERS_VERIFY_CERTS"
)
hd_kafka_consumer_enabled: bool = Field(
False,
description="Whether a Kafka consumer for executing workflows/components is enabled",
env="HETIDA_DESIGNER_KAFKA_ENABLED",
)
hd_kafka_consumer_topic: str = Field(
"hd-execution-topic",
description="The topic to which the execution consumer will listen",
env="HETIDA_DESIGNER_KAFKA_CONSUMER_TOPIC",
)
hd_kafka_consumer_options: dict = Field(
{"bootstrap_servers": "kafka:19092", "group_id": "hd_kafka_consumer_group"},
description=(
"Intialization parameters for the aiokafka consumer class."
" The most important ones set here are probably bootstrap_servers"
" and group_id."
" These options will be passed directly to the class init method."
" The environment variable expects this to be a mapping as json string."
" Note that some of the available options need different code to work"
" properly, so not all available options / combinations are viable"
" for the hetida designer consumer."
),
example={
"bootstrap_servers": "kafka:19092",
"group_id": "hd_kafka_consumer_group",
"auto_commit_interval_ms": 1000,
"auto_offset_reset": "earliest",
},
env="HETIDA_DESIGNER_KAFKA_CONSUMER_OPTIONS",
)
hd_kafka_producer_options: dict = Field(
{"bootstrap_servers": "kafka:19092"},
description=(
"Intialization parameters for the aiokafka consumer class."
" The most important one set here is probably bootstrap_servers."
" These options will be passed directly to the class init method."
" The environment variable expects this to be a mapping as json string."
" Note that some of the available options need different code to work"
" properly, so not all available options / combinations are viable"
" for the hetida designer consumer."
),
example={"bootstrap_servers": "kafka:19092"},
env="HETIDA_DESIGNER_KAFKA_PRODUCER_OPTIONS",
)
hd_kafka_response_topic: str = Field(
"hd-execution-response-topic",
description="The topic to which the execution consumer send execution results",
env="HETIDA_DESIGNER_KAFKA_RESPONSE_TOPIC",
)
# pylint: disable=no-self-argument,no-self-use
@validator("is_runtime_service")
def must_be_at_least_backend_or_runtime(cls, v: bool, values: dict) -> bool:
is_backend_service = values["is_backend_service"]
if not (v or is_backend_service):
msg = (
"At least one of is_backend_service or is_runtime_service must be true. "
"It does not make sense to start the service with no active endpoints."
)
raise ValueError(msg)
return v
# pylint: disable=no-self-argument,no-self-use
@validator("hd_backend_api_url")
def backend_api_url_ends_with_slash(cls, v: str) -> str:
"""make it end with a slash"""
if not v.endswith("/"):
v += "/"
return v
# pylint: disable=no-self-argument,no-self-use
@validator("sqlalchemy_connection_string")
def database_url(
cls, v: Optional[Union[SecretStr, SQLAlchemy_DB_URL]], values: dict
) -> Optional[Union[SecretStr, SQLAlchemy_DB_URL]]:
if v is None:
return SQLAlchemy_DB_URL.create(
drivername=values["sqlalchemy_db_drivername"],
username=values["sqlalchemy_db_user"],
password=values["<PASSWORD>"],
host=values["sqlalchemy_db_host"],
port=values["sqlalchemy_db_port"],
database=values["sqlalchemy_db_database"],
)
return v
environment_file = os.environ.get("HD_RUNTIME_ENVIRONMENT_FILE", None)
runtime_config = RuntimeConfig(_env_file=environment_file if environment_file else None)
|
[
"pydantic.SecretStr",
"os.environ.get",
"pydantic.Field",
"pydantic.validator",
"sqlalchemy.engine.URL.create"
] |
[((11937, 11988), 'os.environ.get', 'os.environ.get', (['"""HD_RUNTIME_ENVIRONMENT_FILE"""', 'None'], {}), "('HD_RUNTIME_ENVIRONMENT_FILE', None)\n", (11951, 11988), False, 'import os\n'), ((880, 984), 'pydantic.Field', 'Field', (['""""""'], {'env': '"""OPENAPI_PREFIX"""', 'description': '"""root path (necessary for OpenAPI UI if behind proxy)"""'}), "('', env='OPENAPI_PREFIX', description=\n 'root path (necessary for OpenAPI UI if behind proxy)')\n", (885, 984), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((1038, 1217), 'pydantic.Field', 'Field', (['"""/mnt/obj_repo"""'], {'env': '"""MODEL_REPO_PATH"""', 'description': '"""The path were serialized objects from the simple built-in object store (e.g. trained models) will be stored."""'}), "('/mnt/obj_repo', env='MODEL_REPO_PATH', description=\n 'The path were serialized objects from the simple built-in object store (e.g. trained models) will be stored.'\n )\n", (1043, 1217), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((1310, 1422), 'pydantic.Field', 'Field', (['(True)'], {'env': '"""HD_IS_BACKEND_SERVICE"""', 'description': '"""Whether backend service endpoints should be active."""'}), "(True, env='HD_IS_BACKEND_SERVICE', description=\n 'Whether backend service endpoints should be active.')\n", (1315, 1422), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((1481, 1593), 'pydantic.Field', 'Field', (['(True)'], {'env': '"""HD_IS_RUNTIME_SERVICE"""', 'description': '"""Whether runtime service endpoints should be active."""'}), "(True, env='HD_IS_RUNTIME_SERVICE', description=\n 'Whether runtime service endpoints should be active.')\n", (1486, 1593), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((1650, 1798), 'pydantic.Field', 'Field', (['(True)'], {'env': '"""HD_ENSURE_DB_SCHEMA"""', 'description': '"""Whether DB and DB schema should be created if not present and if running as backend."""'}), "(True, env='HD_ENSURE_DB_SCHEMA', description=\n 'Whether DB and DB schema should be created if not present and if running as backend.'\n )\n", (1655, 1798), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((1887, 2228), 'pydantic.Field', 'Field', (['"""http://localhost:4200,http://localhost:80,localhost,http://localhost,hetida-designer-demo-adapter-python"""'], {'description': '"""Comma separated allowed origins (CORS) (relevant for adapters in runtime like local file adapter)"""', 'env': '"""ALLOWED_ORIGINS"""', 'example': '"""http://exampledomain.com,http://anotherexampledomain.de"""'}), "(\n 'http://localhost:4200,http://localhost:80,localhost,http://localhost,hetida-designer-demo-adapter-python'\n , description=\n 'Comma separated allowed origins (CORS) (relevant for adapters in runtime like local file adapter)'\n , env='ALLOWED_ORIGINS', example=\n 'http://exampledomain.com,http://anotherexampledomain.de')\n", (1892, 2228), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((2352, 2427), 'pydantic.Field', 'Field', (['"""hetida-designer-db"""'], {'env': '"""HD_DB_HOST"""', 'example': '"""hetida-designer-db"""'}), "('hetida-designer-db', env='HD_DB_HOST', example='hetida-designer-db')\n", (2357, 2427), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((2473, 2522), 'pydantic.Field', 'Field', (['(5432)'], {'env': '"""HD_DATABASE_PORT"""', 'example': '(5432)'}), "(5432, env='HD_DATABASE_PORT', example=5432)\n", (2478, 2522), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((2558, 2637), 'pydantic.Field', 'Field', (['"""hetida_designer_db"""'], {'env': '"""HD_DB_DATABASE"""', 'example': '"""hetida_designer_db"""'}), "('hetida_designer_db', env='HD_DB_DATABASE', example='hetida_designer_db')\n", (2563, 2637), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((2689, 2777), 'pydantic.Field', 'Field', (['"""postgresql+psycopg2"""'], {'env': '"""HD_DB_DRIVERNAME"""', 'example': '"""postgresql+psycopg2"""'}), "('postgresql+psycopg2', env='HD_DB_DRIVERNAME', example=\n 'postgresql+psycopg2')\n", (2694, 2777), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((2818, 2871), 'pydantic.Field', 'Field', (['"""hetida_designer_dbuser"""'], {'env': '"""HD_DB_PASSWORD"""'}), "('hetida_designer_dbuser', env='HD_DB_PASSWORD')\n", (2823, 2871), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((3077, 3437), 'pydantic.Field', 'Field', (['None'], {'description': '"""Rfc 1738 database url. Not set by default. If set, takes precedence over sqlalchemy_db_* attributes! Otherwise will be constructed from the sqlalchemy_db_* attributes"""', 'env': '"""HD_DATABASE_URL"""', 'example': '"""postgresql+psycopg2://hetida_designer_dbuser:hetida_designer_dbpasswd@hetida-designer-db:5432/hetida_designer_db"""'}), "(None, description=\n 'Rfc 1738 database url. Not set by default. If set, takes precedence over sqlalchemy_db_* attributes! Otherwise will be constructed from the sqlalchemy_db_* attributes'\n , env='HD_DATABASE_URL', example=\n 'postgresql+psycopg2://hetida_designer_dbuser:hetida_designer_dbpasswd@hetida-designer-db:5432/hetida_designer_db'\n )\n", (3082, 3437), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((3583, 3662), 'pydantic.Field', 'Field', (['(100)'], {'description': '"""Database pool size"""', 'env': '"""HD_DATABASE_POOL_SIZE"""', 'gt': '(0)'}), "(100, description='Database pool size', env='HD_DATABASE_POOL_SIZE', gt=0)\n", (3588, 3662), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((3734, 3876), 'pydantic.Field', 'Field', (['(False)'], {'env': '"""HD_AUTH_USE_KEYCLOAK"""', 'description': '"""Whether Keycloak is used for verifying requests to runtime service endpoints"""'}), "(False, env='HD_AUTH_USE_KEYCLOAK', description=\n 'Whether Keycloak is used for verifying requests to runtime service endpoints'\n )\n", (3739, 3876), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((3940, 3979), 'pydantic.Field', 'Field', (['None'], {'env': '"""HD_KEYCLOAK_AUTH_URL"""'}), "(None, env='HD_KEYCLOAK_AUTH_URL')\n", (3945, 3979), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((4019, 4059), 'pydantic.Field', 'Field', (['"""Hetida"""'], {'env': '"""HD_KEYCLOAK_REALM"""'}), "('Hetida', env='HD_KEYCLOAK_REALM')\n", (4024, 4059), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((4110, 4162), 'pydantic.Field', 'Field', (['"""account"""'], {'env': '"""HD_KEYCLOAK_RUNTIME_AUDIENCE"""'}), "('account', env='HD_KEYCLOAK_RUNTIME_AUDIENCE')\n", (4115, 4162), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((4228, 4276), 'pydantic.Field', 'Field', (['None'], {'env': '"""HD_KEYCLOAK_RUNTIME_CLIENT_ID"""'}), "(None, env='HD_KEYCLOAK_RUNTIME_CLIENT_ID')\n", (4233, 4276), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((4341, 4388), 'pydantic.Field', 'Field', (['None'], {'env': '"""HD_KEYCLOAK_RUNTIME_USERNAME"""'}), "(None, env='HD_KEYCLOAK_RUNTIME_USERNAME')\n", (4346, 4388), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((4453, 4553), 'pydantic.Field', 'Field', (['None'], {'env': '"""HD_KEYCLOAK_RUNTIME_PASSWORD"""', 'description': '"""the password of the service user"""'}), "(None, env='HD_KEYCLOAK_RUNTIME_PASSWORD', description=\n 'the password of the service user')\n", (4458, 4553), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((4681, 4852), 'pydantic.Field', 'Field', (['(False)'], {'env': '"""HD_GENERIC_REST_ADAPTER_AUTH_USE_KEYCLOAK"""', 'description': '"""Whether Keycloak is used for requests from runtime to generic rest adapter endpoints"""'}), "(False, env='HD_GENERIC_REST_ADAPTER_AUTH_USE_KEYCLOAK', description=\n 'Whether Keycloak is used for requests from runtime to generic rest adapter endpoints'\n )\n", (4686, 4852), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((4961, 5021), 'pydantic.Field', 'Field', (['None'], {'env': '"""HD_GENERIC_REST_ADAPTER_KEYCLOAK_AUTH_URL"""'}), "(None, env='HD_GENERIC_REST_ADAPTER_KEYCLOAK_AUTH_URL')\n", (4966, 5021), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((5096, 5153), 'pydantic.Field', 'Field', (['None'], {'env': '"""HD_GENERIC_REST_ADAPTER_KEYCLOAK_REALM"""'}), "(None, env='HD_GENERIC_REST_ADAPTER_KEYCLOAK_REALM')\n", (5101, 5153), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((5240, 5309), 'pydantic.Field', 'Field', (['None'], {'env': '"""HD_GENERIC_REST_ADAPTER_KEYCLOAK_RUNTIME_CLIENT_ID"""'}), "(None, env='HD_GENERIC_REST_ADAPTER_KEYCLOAK_RUNTIME_CLIENT_ID')\n", (5245, 5309), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((5395, 5463), 'pydantic.Field', 'Field', (['None'], {'env': '"""HD_GENERIC_REST_ADAPTER_KEYCLOAK_RUNTIME_USERNAME"""'}), "(None, env='HD_GENERIC_REST_ADAPTER_KEYCLOAK_RUNTIME_USERNAME')\n", (5400, 5463), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((5549, 5669), 'pydantic.Field', 'Field', (['None'], {'env': '"""HD_GENERIC_REST_ADAPTER_KEYCLOAK_RUNTIME_PASSWORD"""', 'description': '"""the password of the service user"""'}), "(None, env='HD_GENERIC_REST_ADAPTER_KEYCLOAK_RUNTIME_PASSWORD',\n description='the password of the service user')\n", (5554, 5669), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((5768, 5841), 'pydantic.Field', 'Field', (['"""account"""'], {'env': '"""HD_GENERIC_REST_ADAPTER_KEYCLOAK_RUNTIME_AUDIENCE"""'}), "('account', env='HD_GENERIC_REST_ADAPTER_KEYCLOAK_RUNTIME_AUDIENCE')\n", (5773, 5841), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((5880, 6345), 'pydantic.Field', 'Field', (['"""demo-adapter-python|Python-Demo-Adapter|http://localhost:8092|http://hetida-designer-demo-adapter-python:8092,demo-adapter-java|Java-Demo-Adapter|http://localhost:8091/adapter|http://hetida-designer-demo-adapter-java:8091/adapter,local-file-adapter|Local-File-Adapter|http://localhost:8090/adapters/localfile|http://hetida-designer-runtime:8090/adapters/localfile"""'], {'env': '"""HETIDA_DESIGNER_ADAPTERS"""', 'description': '"""list of the installed adapters"""'}), "(\n 'demo-adapter-python|Python-Demo-Adapter|http://localhost:8092|http://hetida-designer-demo-adapter-python:8092,demo-adapter-java|Java-Demo-Adapter|http://localhost:8091/adapter|http://hetida-designer-demo-adapter-java:8091/adapter,local-file-adapter|Local-File-Adapter|http://localhost:8090/adapters/localfile|http://hetida-designer-runtime:8090/adapters/localfile'\n , env='HETIDA_DESIGNER_ADAPTERS', description=\n 'list of the installed adapters')\n", (5885, 6345), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((6484, 6612), 'pydantic.Field', 'Field', (['"""http://hetida-designer-runtime:8090/engine/"""'], {'env': '"""HETIDA_DESIGNER_RUNTIME_EGINE_URL"""', 'description': '"""URL to runtime"""'}), "('http://hetida-designer-runtime:8090/engine/', env=\n 'HETIDA_DESIGNER_RUNTIME_EGINE_URL', description='URL to runtime')\n", (6489, 6612), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((6676, 6731), 'pydantic.Field', 'Field', (['(True)'], {'env': '"""HETIDA_DESIGNER_RUNTIME_VERIFY_CERTS"""'}), "(True, env='HETIDA_DESIGNER_RUNTIME_VERIFY_CERTS')\n", (6681, 6731), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((6823, 7042), 'pydantic.Field', 'Field', (['"""http://hetida-designer-backend:8090/api/"""'], {'env': '"""HETIDA_DESIGNER_BACKEND_API_URL"""', 'description': '"""URL to backend. Necessary for component deployment and to allow runtime to access adapters endpoint."""'}), "('http://hetida-designer-backend:8090/api/', env=\n 'HETIDA_DESIGNER_BACKEND_API_URL', description=\n 'URL to backend. Necessary for component deployment and to allow runtime to access adapters endpoint.'\n )\n", (6828, 7042), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((7136, 7405), 'pydantic.Field', 'Field', (['(False)'], {'env': '"""HETIDA_DESIGNER_BACKEND_USE_BASIC_AUTH"""', 'description': '"""Whether Backend is protected via Basic Auth. Only necessary for component deployment. If Backend is protected via Keycloak instead use the corresponding keycloak environment variables!"""'}), "(False, env='HETIDA_DESIGNER_BACKEND_USE_BASIC_AUTH', description=\n 'Whether Backend is protected via Basic Auth. Only necessary for component deployment. If Backend is protected via Keycloak instead use the corresponding keycloak environment variables!'\n )\n", (7141, 7405), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((7544, 7630), 'pydantic.Field', 'Field', (['None'], {'env': '"""HETIDA_DESIGNER_BASIC_AUTH_USER"""', 'description': '"""Basic Auth User"""'}), "(None, env='HETIDA_DESIGNER_BASIC_AUTH_USER', description=\n 'Basic Auth User')\n", (7549, 7630), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((7709, 7799), 'pydantic.Field', 'Field', (['None'], {'env': '"""HETIDA_DESIGNER_BASIC_AUTH_PASSWORD"""', 'description': '"""Basic Auth User"""'}), "(None, env='HETIDA_DESIGNER_BASIC_AUTH_PASSWORD', description=\n 'Basic Auth User')\n", (7714, 7799), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((7862, 7917), 'pydantic.Field', 'Field', (['(True)'], {'env': '"""HETIDA_DESIGNER_BACKEND_VERIFY_CERTS"""'}), "(True, env='HETIDA_DESIGNER_BACKEND_VERIFY_CERTS')\n", (7867, 7917), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((7969, 8025), 'pydantic.Field', 'Field', (['(True)'], {'env': '"""HETIDA_DESIGNER_ADAPTERS_VERIFY_CERTS"""'}), "(True, env='HETIDA_DESIGNER_ADAPTERS_VERIFY_CERTS')\n", (7974, 8025), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((8079, 8223), 'pydantic.Field', 'Field', (['(False)'], {'description': '"""Whether a Kafka consumer for executing workflows/components is enabled"""', 'env': '"""HETIDA_DESIGNER_KAFKA_ENABLED"""'}), "(False, description=\n 'Whether a Kafka consumer for executing workflows/components is enabled',\n env='HETIDA_DESIGNER_KAFKA_ENABLED')\n", (8084, 8223), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((8282, 8432), 'pydantic.Field', 'Field', (['"""hd-execution-topic"""'], {'description': '"""The topic to which the execution consumer will listen"""', 'env': '"""HETIDA_DESIGNER_KAFKA_CONSUMER_TOPIC"""'}), "('hd-execution-topic', description=\n 'The topic to which the execution consumer will listen', env=\n 'HETIDA_DESIGNER_KAFKA_CONSUMER_TOPIC')\n", (8287, 8432), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((8493, 9251), 'pydantic.Field', 'Field', (["{'bootstrap_servers': 'kafka:19092', 'group_id': 'hd_kafka_consumer_group'}"], {'description': '"""Intialization parameters for the aiokafka consumer class. The most important ones set here are probably bootstrap_servers and group_id. These options will be passed directly to the class init method. The environment variable expects this to be a mapping as json string. Note that some of the available options need different code to work properly, so not all available options / combinations are viable for the hetida designer consumer."""', 'example': "{'bootstrap_servers': 'kafka:19092', 'group_id': 'hd_kafka_consumer_group',\n 'auto_commit_interval_ms': 1000, 'auto_offset_reset': 'earliest'}", 'env': '"""HETIDA_DESIGNER_KAFKA_CONSUMER_OPTIONS"""'}), "({'bootstrap_servers': 'kafka:19092', 'group_id':\n 'hd_kafka_consumer_group'}, description=\n 'Intialization parameters for the aiokafka consumer class. The most important ones set here are probably bootstrap_servers and group_id. These options will be passed directly to the class init method. The environment variable expects this to be a mapping as json string. Note that some of the available options need different code to work properly, so not all available options / combinations are viable for the hetida designer consumer.'\n , example={'bootstrap_servers': 'kafka:19092', 'group_id':\n 'hd_kafka_consumer_group', 'auto_commit_interval_ms': 1000,\n 'auto_offset_reset': 'earliest'}, env=\n 'HETIDA_DESIGNER_KAFKA_CONSUMER_OPTIONS')\n", (8498, 9251), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((9491, 10078), 'pydantic.Field', 'Field', (["{'bootstrap_servers': 'kafka:19092'}"], {'description': '"""Intialization parameters for the aiokafka consumer class. The most important one set here is probably bootstrap_servers. These options will be passed directly to the class init method. The environment variable expects this to be a mapping as json string. Note that some of the available options need different code to work properly, so not all available options / combinations are viable for the hetida designer consumer."""', 'example': "{'bootstrap_servers': 'kafka:19092'}", 'env': '"""HETIDA_DESIGNER_KAFKA_PRODUCER_OPTIONS"""'}), "({'bootstrap_servers': 'kafka:19092'}, description=\n 'Intialization parameters for the aiokafka consumer class. The most important one set here is probably bootstrap_servers. These options will be passed directly to the class init method. The environment variable expects this to be a mapping as json string. Note that some of the available options need different code to work properly, so not all available options / combinations are viable for the hetida designer consumer.'\n , example={'bootstrap_servers': 'kafka:19092'}, env=\n 'HETIDA_DESIGNER_KAFKA_PRODUCER_OPTIONS')\n", (9496, 10078), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((10253, 10423), 'pydantic.Field', 'Field', (['"""hd-execution-response-topic"""'], {'description': '"""The topic to which the execution consumer send execution results"""', 'env': '"""HETIDA_DESIGNER_KAFKA_RESPONSE_TOPIC"""'}), "('hd-execution-response-topic', description=\n 'The topic to which the execution consumer send execution results', env\n ='HETIDA_DESIGNER_KAFKA_RESPONSE_TOPIC')\n", (10258, 10423), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((10502, 10533), 'pydantic.validator', 'validator', (['"""is_runtime_service"""'], {}), "('is_runtime_service')\n", (10511, 10533), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((11037, 11068), 'pydantic.validator', 'validator', (['"""hd_backend_api_url"""'], {}), "('hd_backend_api_url')\n", (11046, 11068), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((11296, 11337), 'pydantic.validator', 'validator', (['"""sqlalchemy_connection_string"""'], {}), "('sqlalchemy_connection_string')\n", (11305, 11337), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((2928, 2965), 'pydantic.SecretStr', 'SecretStr', (['"""hetida_designer_dbpasswd"""'], {}), "('hetida_designer_dbpasswd')\n", (2937, 2965), False, 'from pydantic import BaseSettings, Field, SecretStr, validator\n'), ((11533, 11799), 'sqlalchemy.engine.URL.create', 'SQLAlchemy_DB_URL.create', ([], {'drivername': "values['sqlalchemy_db_drivername']", 'username': "values['sqlalchemy_db_user']", 'password': "values['<PASSWORD>']", 'host': "values['sqlalchemy_db_host']", 'port': "values['sqlalchemy_db_port']", 'database': "values['sqlalchemy_db_database']"}), "(drivername=values['sqlalchemy_db_drivername'],\n username=values['sqlalchemy_db_user'], password=values['<PASSWORD>'],\n host=values['sqlalchemy_db_host'], port=values['sqlalchemy_db_port'],\n database=values['sqlalchemy_db_database'])\n", (11557, 11799), True, 'from sqlalchemy.engine import URL as SQLAlchemy_DB_URL\n')]
|
# Universidade Federal de Viçosa - Campus Rio Paranaíba
# Sistemas de Informação - Processamento Digital de Imagens
#
# Professor: <NAME>
# Autores:
# - MatheusRV (3929)
# - iguit0 (3902)
# - ThiagoMunich (3628)
#
# Filtragem espacial para suavização - Filtro de máximo e mínimo
# Como Executar:
# $ python maxmin.py <img_entrada> <img_saida_min> <imgsaida_max> <mask_size>
# <mask_size> é um número inteiro. Exemplo: Se mask_size=3 então a máscara possui tamanho 3x3.
# Gera duas imagens de saída.
import sys
import os
import numpy as np
from scipy.ndimage import filters
import matplotlib.pyplot as plt
from scipy import misc
from skimage import color, data, util
def loadImg(arg):
return misc.imread(arg)
img = loadImg(sys.argv[1])
saida1 = sys.argv[2] + '_max.tif'
saida2 = sys.argv[3] + '_min.tif'
ms = sys.argv[4]
ms = int(ms)
img_filtro_max = filters.maximum_filter(img, size=ms)
img_filtro_min = filters.minimum_filter(img, size=ms)
img_saida1 = misc.imsave(saida1, img_filtro_max)
img_saida2 = misc.imsave(saida2, img_filtro_min)
|
[
"scipy.ndimage.filters.minimum_filter",
"scipy.ndimage.filters.maximum_filter",
"scipy.misc.imread",
"scipy.misc.imsave"
] |
[((887, 923), 'scipy.ndimage.filters.maximum_filter', 'filters.maximum_filter', (['img'], {'size': 'ms'}), '(img, size=ms)\n', (909, 923), False, 'from scipy.ndimage import filters\n'), ((941, 977), 'scipy.ndimage.filters.minimum_filter', 'filters.minimum_filter', (['img'], {'size': 'ms'}), '(img, size=ms)\n', (963, 977), False, 'from scipy.ndimage import filters\n'), ((992, 1027), 'scipy.misc.imsave', 'misc.imsave', (['saida1', 'img_filtro_max'], {}), '(saida1, img_filtro_max)\n', (1003, 1027), False, 'from scipy import misc\n'), ((1041, 1076), 'scipy.misc.imsave', 'misc.imsave', (['saida2', 'img_filtro_min'], {}), '(saida2, img_filtro_min)\n', (1052, 1076), False, 'from scipy import misc\n'), ((725, 741), 'scipy.misc.imread', 'misc.imread', (['arg'], {}), '(arg)\n', (736, 741), False, 'from scipy import misc\n')]
|
#!/usr/bin/python3
# Imports
import os
import gi
import sys
if os.geteuid() != 0:
exit("You need to have root privileges to run this.")
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, Gio
class MainWindow(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self)
self.set_border_width(100)
#### HeaderBar Define
self.headerbar = Gtk.HeaderBar()
self.set_titlebar(self.headerbar)
self.headerbar.set_show_close_button(True)
self.headerbar.props.title = "Keyboard Color Switcher"
self.aboutbutton = Gtk.Button()
icon = Gio.ThemedIcon(name="open-menu-symbolic")
image = Gtk.Image.new_from_gicon(icon, Gtk.IconSize.BUTTON)
self.aboutbutton.add(image)
self.headerbar.pack_end(self.aboutbutton)
vbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=12)
self.add(vbox)
### Label Definement
self.aboutlabel = Gtk.Label()
self.aboutlabel.set_text("GTK tool for changing keyboard region colors")
self.aboutcenterlabel = Gtk.Label()
# self.centerlabel.set_halign()
self.ledPath = "/"+ os.path.join('sys', 'class', 'leds', 'system76_acpi::kbd_backlight')
if os.path.exists(self.ledPath) == False:
self.ledPath = "/" + os.path.join('sys', 'class', 'leds', 'system76::kbd_backlight')
# Detect if single color or multi color regions
self.colors = False
self.colorCenter = "/color"
if os.path.exists(os.path.join(self.ledPath, 'color_left')):
self.colors = True
self.colorCenter = "/color_center"
if self.colors == True:
### Button Definement
self.leftbutton = Gtk.ColorButton()
self.leftlabel = Gtk.Label.new("Left")
self.leftbutton.set_halign(Gtk.Align.CENTER)
self.leftbutton.set_valign(Gtk.Align.CENTER)
self.leftbutton.set_size_request(75, 50)
self.centerbutton = Gtk.ColorButton()
self.centerlabel = Gtk.Label.new("Center")
self.centerbutton.set_halign(Gtk.Align.CENTER)
self.centerbutton.set_valign(Gtk.Align.CENTER)
self.centerbutton.set_size_request(75, 50)
if self.colors == True:
self.rightbutton = Gtk.ColorButton()
self.rightlabel = Gtk.Label.new("Right")
self.rightbutton.set_halign(Gtk.Align.CENTER)
self.rightbutton.set_valign(Gtk.Align.CENTER)
self.rightbutton.set_size_request(75, 50)
### Grid Setup
self.grid = Gtk.Grid()
self.grid.set_column_spacing(6)
self.grid.set_halign(Gtk.Align.CENTER)
self.grid.set_valign(Gtk.Align.CENTER)
### Connect Signal handlers
if self.colors == True:
self.leftbutton.connect("clicked", self.on_button_clicked)
self.leftbutton.connect("color-set", self.on_color_activated, "left")
self.rightbutton.connect("clicked", self.on_button_clicked)
self.rightbutton.connect("color-set", self.on_color_activated, "right")
self.centerbutton.connect("clicked", self.on_button_clicked)
self.centerbutton.connect("color-set", self.on_color_activated, "center")
### Grid Setup/2
if self.colors == True:
self.grid.attach(self.leftlabel, 0, 2, 1, 1)
self.grid.attach(self.leftbutton, 0, 1, 1, 1)
self.grid.attach(self.rightlabel, 2, 2, 1, 1)
self.grid.attach(self.rightbutton, 2, 1, 1, 1)
self.grid.attach(self.centerlabel, 1, 2, 1, 1)
self.grid.attach(self.centerbutton, 1, 1, 1, 1)
vbox.pack_start(self.aboutlabel, True, True, 0)
vbox.pack_start(self.aboutcenterlabel, True, True, 0)
vbox.pack_start(self.grid, True, True, 0)
### Color Grab
def on_color_activated(self, widget, region):
print(region)
color = widget.get_rgba()
red = "{0:0{1}X}".format(int(color.red*255),2)
green = "{0:0{1}X}".format(int(color.green*255),2)
blue = "{0:0{1}X}".format(int(color.blue*255),2)
color_string = red + green + blue
print(color_string)
if region == "left":
try:
with open(self.ledPath + '/color_left', 'w') as f_left:
f_left.write(color_string)
except:
print("Failed to set color")
if region == "center":
try:
with open(self.ledPath + self.colorCenter, 'w') as f_center:
f_center.write(color_string)
except:
print("Failed to set color")
if region == "right":
try:
with open(self.ledPath + '/color_right', 'w') as f_right:
f_right.write(color_string)
except:
print("Failed to set color")
def on_button_clicked(self, widget):
win.show_all()
win = MainWindow()
win.connect("destroy", Gtk.main_quit)
win.show_all()
Gtk.main()
|
[
"gi.require_version",
"gi.repository.Gtk.main",
"gi.repository.Gtk.ColorButton",
"gi.repository.Gtk.Box",
"gi.repository.Gtk.HeaderBar",
"gi.repository.Gtk.Button",
"os.path.exists",
"gi.repository.Gio.ThemedIcon",
"gi.repository.Gtk.Image.new_from_gicon",
"gi.repository.Gtk.Grid",
"os.geteuid",
"gi.repository.Gtk.Label",
"os.path.join",
"gi.repository.Gtk.Label.new",
"gi.repository.Gtk.Window.__init__"
] |
[((143, 175), 'gi.require_version', 'gi.require_version', (['"""Gtk"""', '"""3.0"""'], {}), "('Gtk', '3.0')\n", (161, 175), False, 'import gi\n'), ((5038, 5048), 'gi.repository.Gtk.main', 'Gtk.main', ([], {}), '()\n', (5046, 5048), False, 'from gi.repository import Gtk, Gio\n'), ((65, 77), 'os.geteuid', 'os.geteuid', ([], {}), '()\n', (75, 77), False, 'import os\n'), ((275, 300), 'gi.repository.Gtk.Window.__init__', 'Gtk.Window.__init__', (['self'], {}), '(self)\n', (294, 300), False, 'from gi.repository import Gtk, Gio\n'), ((392, 407), 'gi.repository.Gtk.HeaderBar', 'Gtk.HeaderBar', ([], {}), '()\n', (405, 407), False, 'from gi.repository import Gtk, Gio\n'), ((592, 604), 'gi.repository.Gtk.Button', 'Gtk.Button', ([], {}), '()\n', (602, 604), False, 'from gi.repository import Gtk, Gio\n'), ((620, 661), 'gi.repository.Gio.ThemedIcon', 'Gio.ThemedIcon', ([], {'name': '"""open-menu-symbolic"""'}), "(name='open-menu-symbolic')\n", (634, 661), False, 'from gi.repository import Gtk, Gio\n'), ((678, 729), 'gi.repository.Gtk.Image.new_from_gicon', 'Gtk.Image.new_from_gicon', (['icon', 'Gtk.IconSize.BUTTON'], {}), '(icon, Gtk.IconSize.BUTTON)\n', (702, 729), False, 'from gi.repository import Gtk, Gio\n'), ((832, 889), 'gi.repository.Gtk.Box', 'Gtk.Box', ([], {'orientation': 'Gtk.Orientation.VERTICAL', 'spacing': '(12)'}), '(orientation=Gtk.Orientation.VERTICAL, spacing=12)\n', (839, 889), False, 'from gi.repository import Gtk, Gio\n'), ((969, 980), 'gi.repository.Gtk.Label', 'Gtk.Label', ([], {}), '()\n', (978, 980), False, 'from gi.repository import Gtk, Gio\n'), ((1094, 1105), 'gi.repository.Gtk.Label', 'Gtk.Label', ([], {}), '()\n', (1103, 1105), False, 'from gi.repository import Gtk, Gio\n'), ((2010, 2027), 'gi.repository.Gtk.ColorButton', 'Gtk.ColorButton', ([], {}), '()\n', (2025, 2027), False, 'from gi.repository import Gtk, Gio\n'), ((2055, 2078), 'gi.repository.Gtk.Label.new', 'Gtk.Label.new', (['"""Center"""'], {}), "('Center')\n", (2068, 2078), False, 'from gi.repository import Gtk, Gio\n'), ((2584, 2594), 'gi.repository.Gtk.Grid', 'Gtk.Grid', ([], {}), '()\n', (2592, 2594), False, 'from gi.repository import Gtk, Gio\n'), ((1173, 1241), 'os.path.join', 'os.path.join', (['"""sys"""', '"""class"""', '"""leds"""', '"""system76_acpi::kbd_backlight"""'], {}), "('sys', 'class', 'leds', 'system76_acpi::kbd_backlight')\n", (1185, 1241), False, 'import os\n'), ((1253, 1281), 'os.path.exists', 'os.path.exists', (['self.ledPath'], {}), '(self.ledPath)\n', (1267, 1281), False, 'import os\n'), ((1535, 1575), 'os.path.join', 'os.path.join', (['self.ledPath', '"""color_left"""'], {}), "(self.ledPath, 'color_left')\n", (1547, 1575), False, 'import os\n'), ((1749, 1766), 'gi.repository.Gtk.ColorButton', 'Gtk.ColorButton', ([], {}), '()\n', (1764, 1766), False, 'from gi.repository import Gtk, Gio\n'), ((1795, 1816), 'gi.repository.Gtk.Label.new', 'Gtk.Label.new', (['"""Left"""'], {}), "('Left')\n", (1808, 1816), False, 'from gi.repository import Gtk, Gio\n'), ((2303, 2320), 'gi.repository.Gtk.ColorButton', 'Gtk.ColorButton', ([], {}), '()\n', (2318, 2320), False, 'from gi.repository import Gtk, Gio\n'), ((2350, 2372), 'gi.repository.Gtk.Label.new', 'Gtk.Label.new', (['"""Right"""'], {}), "('Right')\n", (2363, 2372), False, 'from gi.repository import Gtk, Gio\n'), ((1324, 1387), 'os.path.join', 'os.path.join', (['"""sys"""', '"""class"""', '"""leds"""', '"""system76::kbd_backlight"""'], {}), "('sys', 'class', 'leds', 'system76::kbd_backlight')\n", (1336, 1387), False, 'import os\n')]
|
__author__ = 'team12'
# This program uses Elasticsearch to retrieve data from couchdb and uses googlemap to get the suburb of the tweet.
#The output file is used in data visualization section.
# This program serves for the topic of racial issue
import copy
import json
import time
import googlemaps
from elasticsearch import Elasticsearch
from melbourne_config import host_ip, host_port, e_index, topic_file, base_query_file, g_key, suburbs_file
def get_suburbs():
sub_dic = {}
with open(suburbs_file) as sf:
for suburb in sf:
sub_dic[suburb.rstrip("\n")] = [0, 0]
return sub_dic
#retrieve data from couchdb
def get_tweet_topic():
es = Elasticsearch([{'host': host_ip, 'port': host_port}])
elastic_index = e_index
baseQuery = open(base_query_file).read()
query_obj = json.loads(baseQuery)
phrase_obj = query_obj['query']['bool']['should'].pop()
with open(topic_file) as tf:
for term in tf:
if term.lstrip(' ')[0] == '#':
continue
phrase_obj['match_phrase']['text']['query'] = term.rstrip()
query_obj['query']['bool']['should'].append(copy.deepcopy(phrase_obj))
query = json.dumps(query_obj)
res = es.search(index=elastic_index, body=query, request_timeout=100)
return res
def main():
suburb_set = get_suburbs()
gmaps = googlemaps.Client(key=g_key)
start_time = time.time()
res = get_tweet_topic()
count, pos, neg, err = 0, 0, 0, 0
print(suburb_set)
for doc in res['hits']['hits']:
tweet = doc['_source']
text = tweet['text']
label = tweet['label']
coordinates = tweet['geo']['coordinates']
if coordinates == [0, 0]:
continue
coordinate_tup = tuple(coordinates)
#print(label)
reverse_geocode_result = gmaps.reverse_geocode(coordinate_tup)
for i in reverse_geocode_result[0]['address_components']:
if i['types'] == ['locality', 'political']:
subname = i['long_name']
subname = subname.upper()
#print(subname)
if label == 'pos':
pos += 1
try:
suburb_set[subname][1] += 1
except:
print("Lack:", subname)
continue
elif label == 'neg':
neg += 1
try:
suburb_set[subname][1] += 1
except:
print("Lack:", subname)
continue
else:
print("Error:", tweet)
err += 1
continue
count += 1
resultDict = {'polarity': label, 'coordinates': coordinates}
print(count)
newD = {}
print(suburb_set)
for item in suburb_set:
v = suburb_set.get(item)[1]
print(v)
newD.update({item:round((v/count),2)})
print(newD)
with open('racially.txt', 'w') as fpp:
fpp.write(str(newD))
end_time = time.time()
elapsed = end_time - start_time
print("Runtime:", elapsed, "seconds")
print("Positive num:", pos)
print("Negative num:", neg)
print("Error num:", err)
print(count)
print(suburb_set)
if __name__ == '__main__':
main()
|
[
"elasticsearch.Elasticsearch",
"googlemaps.Client",
"copy.deepcopy",
"json.loads",
"json.dumps",
"time.time"
] |
[((697, 750), 'elasticsearch.Elasticsearch', 'Elasticsearch', (["[{'host': host_ip, 'port': host_port}]"], {}), "([{'host': host_ip, 'port': host_port}])\n", (710, 750), False, 'from elasticsearch import Elasticsearch\n'), ((843, 864), 'json.loads', 'json.loads', (['baseQuery'], {}), '(baseQuery)\n', (853, 864), False, 'import json\n'), ((1225, 1246), 'json.dumps', 'json.dumps', (['query_obj'], {}), '(query_obj)\n', (1235, 1246), False, 'import json\n'), ((1401, 1429), 'googlemaps.Client', 'googlemaps.Client', ([], {'key': 'g_key'}), '(key=g_key)\n', (1418, 1429), False, 'import googlemaps\n'), ((1448, 1459), 'time.time', 'time.time', ([], {}), '()\n', (1457, 1459), False, 'import time\n'), ((3060, 3071), 'time.time', 'time.time', ([], {}), '()\n', (3069, 3071), False, 'import time\n'), ((1185, 1210), 'copy.deepcopy', 'copy.deepcopy', (['phrase_obj'], {}), '(phrase_obj)\n', (1198, 1210), False, 'import copy\n')]
|
#
# Copyright (c) Contributors to the Open 3D Engine Project.
# For complete copyright and license terms please see the LICENSE at the root of this distribution.
#
# SPDX-License-Identifier: Apache-2.0 OR MIT
#
#
import argparse
import fnmatch
import os
handled_file_patterns = [
'*.c', '*.cc', '*.cpp', '*.cxx', '*.h', '*.hpp', '*.hxx', '*.inl', '*.m', '*.mm', '*.cs', '*.java',
'*.py', '*.lua', '*.bat', '*.cmd', '*.sh', '*.js',
'*.cmake', 'CMakeLists.txt'
]
def fixTabs(input_file):
try:
basename = os.path.basename(input_file)
for pattern in handled_file_patterns:
if fnmatch.fnmatch(basename, pattern):
with open(input_file, 'r') as source_file:
fileContents = source_file.read()
if '\t' in fileContents:
newFileContents = fileContents.replace('\t', ' ')
with open(input_file, 'w') as destination_file:
destination_file.write(newFileContents)
print(f'[INFO] Patched {input_file}')
break
except (IOError, UnicodeDecodeError) as err:
print('[ERROR] reading {}: {}'.format(input_file, err))
return
def main():
"""script main function"""
parser = argparse.ArgumentParser(description='This script replaces tabs with spaces',
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('file_or_dir', type=str, nargs='+',
help='list of files or directories to search within for files to fix up tabs')
args = parser.parse_args()
for input_file in args.file_or_dir:
if os.path.isdir(input_file):
for dp, dn, filenames in os.walk(input_file):
for f in filenames:
fixTabs(os.path.join(dp, f))
else:
fixTabs(input_file)
#entrypoint
if __name__ == '__main__':
main()
|
[
"argparse.ArgumentParser",
"os.path.basename",
"os.path.isdir",
"os.walk",
"os.path.join",
"fnmatch.fnmatch"
] |
[((1279, 1406), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""This script replaces tabs with spaces"""', 'formatter_class': 'argparse.RawTextHelpFormatter'}), "(description='This script replaces tabs with spaces',\n formatter_class=argparse.RawTextHelpFormatter)\n", (1302, 1406), False, 'import argparse\n'), ((528, 556), 'os.path.basename', 'os.path.basename', (['input_file'], {}), '(input_file)\n', (544, 556), False, 'import os\n'), ((1642, 1667), 'os.path.isdir', 'os.path.isdir', (['input_file'], {}), '(input_file)\n', (1655, 1667), False, 'import os\n'), ((618, 652), 'fnmatch.fnmatch', 'fnmatch.fnmatch', (['basename', 'pattern'], {}), '(basename, pattern)\n', (633, 652), False, 'import fnmatch\n'), ((1706, 1725), 'os.walk', 'os.walk', (['input_file'], {}), '(input_file)\n', (1713, 1725), False, 'import os\n'), ((1791, 1810), 'os.path.join', 'os.path.join', (['dp', 'f'], {}), '(dp, f)\n', (1803, 1810), False, 'import os\n')]
|
from kivy.lang import Builder
from kivymd.app import MDApp
class SampleUpdater(MDApp):
def build(self):
kv = """
FloatLayout:
MDFlatButton:
id: chk_update_button
text: "Check for Updates"
pos_hint: {'center_x': 0.5, 'center_y': 0.5}
on_release: self.text = "Checking..."
on_release: app.open_dialog()
"""
return Builder.load_string(kv)
def open_dialog(self):
from kivyappupdater import AppUpdater
updater = AppUpdater.Updater()
updater.update_source = "GITHUB/darpan5552/KivyAppUpdater"
updater.check_for_update()
SampleUpdater().run()
|
[
"kivyappupdater.AppUpdater.Updater",
"kivy.lang.Builder.load_string"
] |
[((387, 410), 'kivy.lang.Builder.load_string', 'Builder.load_string', (['kv'], {}), '(kv)\n', (406, 410), False, 'from kivy.lang import Builder\n'), ((505, 525), 'kivyappupdater.AppUpdater.Updater', 'AppUpdater.Updater', ([], {}), '()\n', (523, 525), False, 'from kivyappupdater import AppUpdater\n')]
|
# Built in python libs
import sys
import os
from typing import Dict, List
# Additional libs
import cv2
import numpy as np
# Custom imports
from .astar import *
import matplotlib.pyplot as plt
# Source for visualization functions:
# https://www.analytics-link.com/post/2018/09/14/applying-the-a-path-finding-algorithm-in-python-part-1-2d-square-grid
def plot_graph(grid, start, goal, path=None):
fig, ax = plt.subplots(figsize=(12, 12))
ax.imshow(grid, cmap='Reds')
ax.scatter(start[1], start[0], marker="*", color="blue", s=30)
ax.scatter(goal[1], goal[0], marker="*", color="green", s=30)
if type(path) != bool:
ax.plot(path[:, 1], path[:, 0], color="black")
plt.show()
|
[
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] |
[((413, 443), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(12, 12)'}), '(figsize=(12, 12))\n', (425, 443), True, 'import matplotlib.pyplot as plt\n'), ((701, 711), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (709, 711), True, 'import matplotlib.pyplot as plt\n')]
|
"""Support for KNX scenes."""
from __future__ import annotations
from typing import Any
from xknx import XKNX
from xknx.devices import Scene as XknxScene
from homeassistant.components.scene import Scene
from homeassistant.const import CONF_NAME
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .const import DOMAIN, KNX_ADDRESS
from .knx_entity import KnxEntity
from .schema import SceneSchema
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the scenes for KNX platform."""
if not discovery_info or not discovery_info["platform_config"]:
return
platform_config = discovery_info["platform_config"]
xknx: XKNX = hass.data[DOMAIN].xknx
entities = []
for entity_config in platform_config:
entities.append(KNXScene(xknx, entity_config))
async_add_entities(entities)
class KNXScene(KnxEntity, Scene):
"""Representation of a KNX scene."""
def __init__(self, xknx: XKNX, config: ConfigType) -> None:
"""Init KNX scene."""
self._device: XknxScene
super().__init__(
device=XknxScene(
xknx,
name=config[CONF_NAME],
group_address=config[KNX_ADDRESS],
scene_number=config[SceneSchema.CONF_SCENE_NUMBER],
)
)
self._unique_id = (
f"{self._device.scene_value.group_address}_{self._device.scene_number}"
)
async def async_activate(self, **kwargs: Any) -> None:
"""Activate the scene."""
await self._device.run()
|
[
"xknx.devices.Scene"
] |
[((1357, 1487), 'xknx.devices.Scene', 'XknxScene', (['xknx'], {'name': 'config[CONF_NAME]', 'group_address': 'config[KNX_ADDRESS]', 'scene_number': 'config[SceneSchema.CONF_SCENE_NUMBER]'}), '(xknx, name=config[CONF_NAME], group_address=config[KNX_ADDRESS],\n scene_number=config[SceneSchema.CONF_SCENE_NUMBER])\n', (1366, 1487), True, 'from xknx.devices import Scene as XknxScene\n')]
|
import serial
import time
import matplotlib.pyplot as plt
import numpy as np
from scipy import signal
from scipy.fft import fft, ifft, fftfreq
from math import pi
from scipy.signal import butter, lfilter
def avg(list):
return sum(list)/len(list)
def butter_bandpass(lowcut, highcut, fs, order=5):
nyq = 0.5 * fs
low = lowcut / nyq
high = highcut / nyq
b, a = butter(order, [low, high], btype='band')
return b, a
def butter_bandpass_filter(data, lowcut, highcut, fs, order=5):
b, a = butter_bandpass(lowcut, highcut, fs, order=order)
y = lfilter(b, a, data)
return y
def do():
# make sure the 'COM#' is set according the Windows Device Manager
ser = serial.Serial('/dev/cu.usbmodem101', 9800, timeout=1)
time.sleep(2)
timestamp = list() #get the time to figure out the frequency at which for gets data
data = []
counter = 0 #counts the data appended without error in order to make the time vector
num_samples = 200
for i in range(num_samples):
line = ser.readline() # read a byte string
if line:
timestamp.append(time.time())
string = line.decode() # convert the byte string to a unicode string
#try except to catch value error, counter for # of data appended
try:
num = int(string) # convert the unicode string to an int
data.append(num*(3.3/682)) # add int to data converted to voltage (mV) list
counter += 1;
except ValueError:
print('value Error \n')
pass
ser.close()
#analyse recording frequency
recording_intervals = list()
counter = 0 #counter to not calculate interval in the first recoding
for i in range(0, len(timestamp)):
if(counter == 0):
counter += 1
else:
if counter>0:
recording_intervals.append(timestamp[i] - timestamp[i-1])
avg_period = avg(recording_intervals)
avg_freq = 1/avg_period #freq is 1/sampling_period
fs = avg_freq
t = np.linspace(0, avg_period, num_samples, endpoint=False)
sample = data
#plotting voltage reading vs time
plt.figure(figsize=(18,4))
plt.plot(t, sample) #t_sample gives error
plt.title('Potentiometer Reading vs. Time')
plt.xlabel('Time (ms)')
plt.ylabel('Signal Amplitude (mV)')
plt.grid()
plt.show()
#plotting fourier transform of the filtered signal (convolved with the butterworth filter)
convolution = butter_bandpass_filter(sample, 8, 13, fs, order=5)
plt.figure(figsize=(18,4))
plt.plot(t, convolution, alpha=0.4) #he quitado t
plt.title('Filtered signal vs. Time')
plt.xlabel('Time (ms)')
plt.ylabel('Signal Amplitude (mV)')
plt.grid()
plt.show()
#plotting power of the signal using the welch method
f, power_signal = signal.welch(convolution, fs)
plt.semilogy(f, power_signal)
plt.xlabel('frequency [Hz]')
plt.ylabel('PSD [V**2/Hz]')
plt.grid()
plt.show()
#only need to filter the power fot the 8-13Hz freq band
out_freq_pow = list()
for i in range(8,13):
out_freq_pow.append(power_signal[i])
return out_freq_pow
#Curve analysis vs time performed in GUI
b = do()
print('Results: \n')
print(b)
#references:
#https://www.eecis.udel.edu/~boncelet/ipynb/Filtering.html
#https://pythonforundergradengineers.com/python-arduino-potentiometer.html
#https://www.daanmichiels.com/blog/2017/10/filtering-eeg-signals-using-scipy/
#https://www.tutorialspoint.com/python/python_gui_programming.htm
#https://stackoverflow.com/questions/12093594/how-to-implement-band-pass-butterworth-filter-with-scipy-signal-butter
|
[
"serial.Serial",
"matplotlib.pyplot.title",
"matplotlib.pyplot.show",
"scipy.signal.welch",
"matplotlib.pyplot.plot",
"scipy.signal.lfilter",
"time.sleep",
"time.time",
"matplotlib.pyplot.figure",
"numpy.linspace",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.semilogy",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.grid",
"scipy.signal.butter"
] |
[((381, 421), 'scipy.signal.butter', 'butter', (['order', '[low, high]'], {'btype': '"""band"""'}), "(order, [low, high], btype='band')\n", (387, 421), False, 'from scipy.signal import butter, lfilter\n'), ((572, 591), 'scipy.signal.lfilter', 'lfilter', (['b', 'a', 'data'], {}), '(b, a, data)\n', (579, 591), False, 'from scipy.signal import butter, lfilter\n'), ((698, 751), 'serial.Serial', 'serial.Serial', (['"""/dev/cu.usbmodem101"""', '(9800)'], {'timeout': '(1)'}), "('/dev/cu.usbmodem101', 9800, timeout=1)\n", (711, 751), False, 'import serial\n'), ((756, 769), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (766, 769), False, 'import time\n'), ((2100, 2155), 'numpy.linspace', 'np.linspace', (['(0)', 'avg_period', 'num_samples'], {'endpoint': '(False)'}), '(0, avg_period, num_samples, endpoint=False)\n', (2111, 2155), True, 'import numpy as np\n'), ((2221, 2248), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(18, 4)'}), '(figsize=(18, 4))\n', (2231, 2248), True, 'import matplotlib.pyplot as plt\n'), ((2252, 2271), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'sample'], {}), '(t, sample)\n', (2260, 2271), True, 'import matplotlib.pyplot as plt\n'), ((2298, 2341), 'matplotlib.pyplot.title', 'plt.title', (['"""Potentiometer Reading vs. Time"""'], {}), "('Potentiometer Reading vs. Time')\n", (2307, 2341), True, 'import matplotlib.pyplot as plt\n'), ((2347, 2370), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (ms)"""'], {}), "('Time (ms)')\n", (2357, 2370), True, 'import matplotlib.pyplot as plt\n'), ((2375, 2410), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Signal Amplitude (mV)"""'], {}), "('Signal Amplitude (mV)')\n", (2385, 2410), True, 'import matplotlib.pyplot as plt\n'), ((2415, 2425), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (2423, 2425), True, 'import matplotlib.pyplot as plt\n'), ((2430, 2440), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2438, 2440), True, 'import matplotlib.pyplot as plt\n'), ((2614, 2641), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(18, 4)'}), '(figsize=(18, 4))\n', (2624, 2641), True, 'import matplotlib.pyplot as plt\n'), ((2645, 2680), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'convolution'], {'alpha': '(0.4)'}), '(t, convolution, alpha=0.4)\n', (2653, 2680), True, 'import matplotlib.pyplot as plt\n'), ((2699, 2736), 'matplotlib.pyplot.title', 'plt.title', (['"""Filtered signal vs. Time"""'], {}), "('Filtered signal vs. Time')\n", (2708, 2736), True, 'import matplotlib.pyplot as plt\n'), ((2742, 2765), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (ms)"""'], {}), "('Time (ms)')\n", (2752, 2765), True, 'import matplotlib.pyplot as plt\n'), ((2770, 2805), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Signal Amplitude (mV)"""'], {}), "('Signal Amplitude (mV)')\n", (2780, 2805), True, 'import matplotlib.pyplot as plt\n'), ((2810, 2820), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (2818, 2820), True, 'import matplotlib.pyplot as plt\n'), ((2825, 2835), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2833, 2835), True, 'import matplotlib.pyplot as plt\n'), ((2920, 2949), 'scipy.signal.welch', 'signal.welch', (['convolution', 'fs'], {}), '(convolution, fs)\n', (2932, 2949), False, 'from scipy import signal\n'), ((2954, 2983), 'matplotlib.pyplot.semilogy', 'plt.semilogy', (['f', 'power_signal'], {}), '(f, power_signal)\n', (2966, 2983), True, 'import matplotlib.pyplot as plt\n'), ((2988, 3016), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""frequency [Hz]"""'], {}), "('frequency [Hz]')\n", (2998, 3016), True, 'import matplotlib.pyplot as plt\n'), ((3021, 3048), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""PSD [V**2/Hz]"""'], {}), "('PSD [V**2/Hz]')\n", (3031, 3048), True, 'import matplotlib.pyplot as plt\n'), ((3053, 3063), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (3061, 3063), True, 'import matplotlib.pyplot as plt\n'), ((3068, 3078), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3076, 3078), True, 'import matplotlib.pyplot as plt\n'), ((1124, 1135), 'time.time', 'time.time', ([], {}), '()\n', (1133, 1135), False, 'import time\n')]
|
import heapq
class MaxHeap:
"""
Max heap
"""
def __init__(self, capacity):
self.heap = []
self.capacity = capacity
def add(self, score, term):
item = (score, term)
if len(self.heap) >= self.capacity:
heapq.heappushpop(self.heap, item)
else:
heapq.heappush(self.heap, item)
def heapify(self):
heapq.heapify(self.heap)
|
[
"heapq.heappush",
"heapq.heapify",
"heapq.heappushpop"
] |
[((393, 417), 'heapq.heapify', 'heapq.heapify', (['self.heap'], {}), '(self.heap)\n', (406, 417), False, 'import heapq\n'), ((268, 302), 'heapq.heappushpop', 'heapq.heappushpop', (['self.heap', 'item'], {}), '(self.heap, item)\n', (285, 302), False, 'import heapq\n'), ((329, 360), 'heapq.heappush', 'heapq.heappush', (['self.heap', 'item'], {}), '(self.heap, item)\n', (343, 360), False, 'import heapq\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-09-30 21:58
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
replaces = [('ghu_main', '0003_toolkit'), ('ghu_main', '0004_auto_20170916_1857'), ('ghu_main', '0005_auto_20170922_1836')]
dependencies = [
('ghu_main', '0003_Navbar'),
]
operations = [
migrations.CreateModel(
name='Toolkit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=256)),
],
),
migrations.CreateModel(
name='ToolkitPage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order', models.PositiveIntegerField(db_index=True, editable=False)),
('page', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ghu_main.Page')),
('toolkit', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ghu_main.Toolkit')),
],
options={
'ordering': ('order',),
'abstract': False,
},
),
]
|
[
"django.db.models.CharField",
"django.db.models.PositiveIntegerField",
"django.db.models.ForeignKey",
"django.db.models.AutoField"
] |
[((547, 640), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (563, 640), False, 'from django.db import migrations, models\n'), ((664, 696), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)'}), '(max_length=256)\n', (680, 696), False, 'from django.db import migrations, models\n'), ((833, 926), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (849, 926), False, 'from django.db import migrations, models\n'), ((951, 1009), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'db_index': '(True)', 'editable': '(False)'}), '(db_index=True, editable=False)\n', (978, 1009), False, 'from django.db import migrations, models\n'), ((1037, 1124), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""ghu_main.Page"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'ghu_main.Page')\n", (1054, 1124), False, 'from django.db import migrations, models\n'), ((1150, 1240), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""ghu_main.Toolkit"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'ghu_main.Toolkit')\n", (1167, 1240), False, 'from django.db import migrations, models\n')]
|
from control import press
def test_tutorial_screen(running_app, root_manager):
from screens.tutorial import TutorialScreen
screen = TutorialScreen()
manager = screen.main_manager
assert manager.current == 'welcome'
assert manager.has_screen('start_test')
assert manager.has_screen('tasks_to_stats')
assert manager.has_screen('categories')
assert manager.has_screen('notifications')
assert manager.has_screen('tasks')
assert manager.has_screen('results')
start_test_screen = manager.get_screen('start_test')
press(start_test_screen.next_button)
assert root_manager.current == 'tasks'
def test_tutorial_mixin(running_app):
from screens.activity.content import FilterPanel
panel = FilterPanel()
panel.show_tutorial()
assert panel._top_rectangle
assert panel._bottom_rectangle
assert panel._left_rectangle
assert panel._right_rectangle
panel.hide_tutorial()
def test_tutorial_mixin_chain(running_app):
from screens.activity.content import FilterPanel
panel1 = FilterPanel()
panel2 = FilterPanel()
panel1.show_tutorial(next_widgets=[panel2])
panel1.hide_tutorial()
|
[
"screens.activity.content.FilterPanel",
"control.press",
"screens.tutorial.TutorialScreen"
] |
[((142, 158), 'screens.tutorial.TutorialScreen', 'TutorialScreen', ([], {}), '()\n', (156, 158), False, 'from screens.tutorial import TutorialScreen\n'), ((557, 593), 'control.press', 'press', (['start_test_screen.next_button'], {}), '(start_test_screen.next_button)\n', (562, 593), False, 'from control import press\n'), ((742, 755), 'screens.activity.content.FilterPanel', 'FilterPanel', ([], {}), '()\n', (753, 755), False, 'from screens.activity.content import FilterPanel\n'), ((1054, 1067), 'screens.activity.content.FilterPanel', 'FilterPanel', ([], {}), '()\n', (1065, 1067), False, 'from screens.activity.content import FilterPanel\n'), ((1081, 1094), 'screens.activity.content.FilterPanel', 'FilterPanel', ([], {}), '()\n', (1092, 1094), False, 'from screens.activity.content import FilterPanel\n')]
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('xblock_django', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='xblockdisableconfig',
name='disabled_create_blocks',
field=models.TextField(default='', help_text='Space-separated list of XBlock types whose creation to disable in Studio.', blank=True),
),
]
|
[
"django.db.models.TextField"
] |
[((309, 446), 'django.db.models.TextField', 'models.TextField', ([], {'default': '""""""', 'help_text': '"""Space-separated list of XBlock types whose creation to disable in Studio."""', 'blank': '(True)'}), "(default='', help_text=\n 'Space-separated list of XBlock types whose creation to disable in Studio.'\n , blank=True)\n", (325, 446), False, 'from django.db import migrations, models\n')]
|
# -*- coding: utf-8 -*-
from findy.interface import Region, Provider, EntityType
from findy.database.schema.misc.holder import HolderBase
from findy.database.plugins.register import register_schema
from findy.database.plugins.eastmoney.holder.top_ten_holder_recorder import *
from findy.database.plugins.eastmoney.holder.top_ten_tradable_holder_recorder import *
register_schema(Region.CHN,
Provider.EastMoney,
db_name='holder',
schema_base=HolderBase,
entity_type=EntityType.Stock)
|
[
"findy.database.plugins.register.register_schema"
] |
[((365, 488), 'findy.database.plugins.register.register_schema', 'register_schema', (['Region.CHN', 'Provider.EastMoney'], {'db_name': '"""holder"""', 'schema_base': 'HolderBase', 'entity_type': 'EntityType.Stock'}), "(Region.CHN, Provider.EastMoney, db_name='holder',\n schema_base=HolderBase, entity_type=EntityType.Stock)\n", (380, 488), False, 'from findy.database.plugins.register import register_schema\n')]
|
#!/usr/bin/env python
"""
Scan process for COM classes
"""
import os
import sys
import lib_util
import lib_common
from lib_properties import pc
import lib_com_type_lib
from sources_types import CIM_Process
from sources_types.CIM_Process import memory_regex_search
SlowScript = True
def Main():
cgiEnv = lib_common.CgiEnv()
pidint = int( cgiEnv.GetId() )
grph = cgiEnv.GetGraph()
node_process = lib_common.gUriGen.PidUri(pidint)
try:
rgxHttp = r"\{[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}\}"
resuComClasses = memory_regex_search.GetRegexMatches(pidint,rgxHttp)
resuClean = set()
propComClass = lib_common.MakeProp("COM class")
for classIdx in resuComClasses:
comClassId = resuComClasses[classIdx]
# On Python3, this is a bytes which must be converted to str.
comClassId = str(comClassId)
# comClass = "{DB7A405D-208F-4E88-BA0A-132ACFA0B5B6}" for example.
typelibNode = lib_common.gUriGen.ComRegisteredTypeLibUri( comClassId )
grph.add( ( node_process, propComClass, typelibNode ) )
except Exception:
exc = sys.exc_info()[1]
lib_common.ErrorMessageHtml("Error:%s. Protection ?"%str(exc))
cgiEnv.OutCgiRdf()
if __name__ == '__main__':
Main()
|
[
"lib_common.gUriGen.PidUri",
"lib_common.CgiEnv",
"lib_common.gUriGen.ComRegisteredTypeLibUri",
"sys.exc_info",
"sources_types.CIM_Process.memory_regex_search.GetRegexMatches",
"lib_common.MakeProp"
] |
[((310, 329), 'lib_common.CgiEnv', 'lib_common.CgiEnv', ([], {}), '()\n', (327, 329), False, 'import lib_common\n'), ((406, 439), 'lib_common.gUriGen.PidUri', 'lib_common.gUriGen.PidUri', (['pidint'], {}), '(pidint)\n', (431, 439), False, 'import lib_common\n'), ((547, 599), 'sources_types.CIM_Process.memory_regex_search.GetRegexMatches', 'memory_regex_search.GetRegexMatches', (['pidint', 'rgxHttp'], {}), '(pidint, rgxHttp)\n', (582, 599), False, 'from sources_types.CIM_Process import memory_regex_search\n'), ((638, 670), 'lib_common.MakeProp', 'lib_common.MakeProp', (['"""COM class"""'], {}), "('COM class')\n", (657, 670), False, 'import lib_common\n'), ((931, 985), 'lib_common.gUriGen.ComRegisteredTypeLibUri', 'lib_common.gUriGen.ComRegisteredTypeLibUri', (['comClassId'], {}), '(comClassId)\n', (973, 985), False, 'import lib_common\n'), ((1075, 1089), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1087, 1089), False, 'import sys\n')]
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import pickle
from datetime import datetime
from typing import Any, Optional
from uuid import UUID
from flask_appbuilder.security.sqla.models import User
from sqlalchemy.exc import SQLAlchemyError
from superset import db
from superset.commands.base import BaseCommand
from superset.key_value.exceptions import KeyValueCreateFailedError
from superset.key_value.models import KeyValueEntry
from superset.key_value.types import KeyType
from superset.key_value.utils import extract_key
logger = logging.getLogger(__name__)
class CreateKeyValueCommand(BaseCommand):
actor: Optional[User]
resource: str
value: Any
key_type: KeyType
key: Optional[str]
expires_on: Optional[datetime]
def __init__(
self,
resource: str,
value: Any,
key_type: KeyType = "uuid",
actor: Optional[User] = None,
key: Optional[str] = None,
expires_on: Optional[datetime] = None,
):
"""
Create a new key-value pair
:param resource: the resource (dashboard, chart etc)
:param value: the value to persist in the key-value store
:param key_type: the type of the key to return
:param actor: the user performing the command
:param key: id of entry (autogenerated if undefined)
:param expires_on: entry expiration time
:return: the key associated with the persisted value
"""
self.resource = resource
self.actor = actor
self.value = value
self.key_type = key_type
self.key = key
self.expires_on = expires_on
def run(self) -> str:
try:
return self.create()
except SQLAlchemyError as ex:
db.session.rollback()
logger.exception("Error running create command")
raise KeyValueCreateFailedError() from ex
def validate(self) -> None:
pass
def create(self) -> str:
entry = KeyValueEntry(
resource=self.resource,
value=pickle.dumps(self.value),
created_on=datetime.now(),
created_by_fk=None
if self.actor is None or self.actor.is_anonymous
else self.actor.id,
expires_on=self.expires_on,
)
if self.key is not None:
try:
if self.key_type == "uuid":
entry.uuid = UUID(self.key)
else:
entry.id = int(self.key)
except ValueError as ex:
raise KeyValueCreateFailedError() from ex
db.session.add(entry)
db.session.commit()
return extract_key(entry, self.key_type)
|
[
"superset.key_value.utils.extract_key",
"superset.db.session.add",
"superset.db.session.commit",
"uuid.UUID",
"superset.key_value.exceptions.KeyValueCreateFailedError",
"datetime.datetime.now",
"superset.db.session.rollback",
"logging.getLogger",
"pickle.dumps"
] |
[((1294, 1321), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1311, 1321), False, 'import logging\n'), ((3364, 3385), 'superset.db.session.add', 'db.session.add', (['entry'], {}), '(entry)\n', (3378, 3385), False, 'from superset import db\n'), ((3394, 3413), 'superset.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3411, 3413), False, 'from superset import db\n'), ((3429, 3462), 'superset.key_value.utils.extract_key', 'extract_key', (['entry', 'self.key_type'], {}), '(entry, self.key_type)\n', (3440, 3462), False, 'from superset.key_value.utils import extract_key\n'), ((2515, 2536), 'superset.db.session.rollback', 'db.session.rollback', ([], {}), '()\n', (2534, 2536), False, 'from superset import db\n'), ((2616, 2643), 'superset.key_value.exceptions.KeyValueCreateFailedError', 'KeyValueCreateFailedError', ([], {}), '()\n', (2641, 2643), False, 'from superset.key_value.exceptions import KeyValueCreateFailedError\n'), ((2813, 2837), 'pickle.dumps', 'pickle.dumps', (['self.value'], {}), '(self.value)\n', (2825, 2837), False, 'import pickle\n'), ((2862, 2876), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2874, 2876), False, 'from datetime import datetime\n'), ((3179, 3193), 'uuid.UUID', 'UUID', (['self.key'], {}), '(self.key)\n', (3183, 3193), False, 'from uuid import UUID\n'), ((3320, 3347), 'superset.key_value.exceptions.KeyValueCreateFailedError', 'KeyValueCreateFailedError', ([], {}), '()\n', (3345, 3347), False, 'from superset.key_value.exceptions import KeyValueCreateFailedError\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-11-19 23:16
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('applications', '0004_empty_resume'),
]
operations = [
migrations.RenameField(
model_name='application',
old_name='scholarship',
new_name='reimb',
),
migrations.AddField(
model_name='application',
name='reimb_amount',
field=models.FloatField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(0, 'Negative? Really? Please put a positive value')]),
),
migrations.AlterField(
model_name='application',
name='phone_number',
field=models.CharField(blank=True, max_length=16, null=True, validators=[django.core.validators.RegexValidator(message="Phone number must be entered in the format: '+#########'. Up to 15 digits allowed.", regex='^\\+?1?\\d{9,15}$')]),
),
]
|
[
"django.db.migrations.RenameField"
] |
[((344, 438), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""application"""', 'old_name': '"""scholarship"""', 'new_name': '"""reimb"""'}), "(model_name='application', old_name='scholarship',\n new_name='reimb')\n", (366, 438), False, 'from django.db import migrations, models\n')]
|
import discord
from discord.ext import commands, tasks
import requests
from bs4 import BeautifulSoup
import re
text_channels = {}
voice_channels = {}
class CustomChannelCog(commands.Cog):
def __init__(self, bot):
self.bot = bot
async def getVoiceChannel(self, ctx, user):
role = next((role for role in user.roles if "@owner" in role.name.lower()), None)
if role is None:
return None
return discord.utils.get(ctx.message.guild.channels, id=int(role.name.split('@')[0]))
@commands.command(name="createvoice", aliases=["cv"])
async def createVoiceRoomCmd(self, ctx, *, title) :
user = ctx.message.author
channel = await self.getVoiceChannel(ctx, user)
if channel is not None:
await ctx.send("You can only have 1 custom channel at the same time! !dv for delete.")
return
success = True
try:
guild = ctx.message.guild
channel = await guild.create_voice_channel(title, category = discord.utils.get(guild.categories, name="Custom"))
role = await guild.create_role(name=f"{channel.id}@owner")
await user.add_roles(role)
except:
success = False
if success:
await ctx.send("Room created!")
else:
await ctx.send("Sorry, there is something wrong :(")
@commands.command(name="limitvoice", aliases=["lv"])
async def setVoiceRoomLimitCmd(self, ctx, limit) :
user = ctx.message.author
channel = await self.getVoiceChannel(ctx, user)
if channel is None:
await ctx.send("You don't have any custom channel right now!")
return
try:
limit = int(limit)
except:
pass
if not isinstance(limit, int):
await ctx.send("Limit must be a number!")
return
if limit < 0 or limit > 99:
await ctx.send("Limit must be in range 0 and 99 (inclusive)!")
return
success = True
try:
await channel.edit(user_limit= int(limit))
except:
success = False
if success:
await ctx.send(f"Limit member set to {limit}!")
else:
await ctx.send("Sorry, there is something wrong :(")
@commands.command(name="deletevoice", aliases=["dv"])
async def deleteVoiceRoomCmd(self, ctx) :
user = ctx.message.author
channel = await self.getVoiceChannel(ctx, user)
if channel is None:
await ctx.send("You don't have any custom channel right now!")
return
success = True
try:
role = next((role for role in user.roles if "@owner" in role.name.lower()), None)
await user.remove_roles(role)
await role.delete()
await channel.delete()
except:
success = False
if success:
await ctx.send("Room deleted!")
else:
await ctx.send("Sorry, there is something wrong :(")
def setup(bot):
bot.add_cog(CustomChannelCog(bot))
|
[
"discord.utils.get",
"discord.ext.commands.command"
] |
[((535, 587), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""createvoice"""', 'aliases': "['cv']"}), "(name='createvoice', aliases=['cv'])\n", (551, 587), False, 'from discord.ext import commands, tasks\n'), ((1397, 1448), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""limitvoice"""', 'aliases': "['lv']"}), "(name='limitvoice', aliases=['lv'])\n", (1413, 1448), False, 'from discord.ext import commands, tasks\n'), ((2361, 2413), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""deletevoice"""', 'aliases': "['dv']"}), "(name='deletevoice', aliases=['dv'])\n", (2377, 2413), False, 'from discord.ext import commands, tasks\n'), ((1033, 1083), 'discord.utils.get', 'discord.utils.get', (['guild.categories'], {'name': '"""Custom"""'}), "(guild.categories, name='Custom')\n", (1050, 1083), False, 'import discord\n')]
|
import FWCore.ParameterSet.Config as cms
## baseline configuration in the class itself
from EventFilter.CSCRawToDigi.cscPackerDef_cfi import cscPackerDef
cscpacker = cscPackerDef.clone()
## In Run-2 common: update the format version for new OTMBs in ME1/1
## Note: in the past, the packing with triggers and pretriggers was disabled
## for Run-2, Run-3 and Phase-2 scenarios. This should no longer be the case
## as of CMSSW_12_0_0_pre5
from Configuration.Eras.Modifier_run2_common_cff import run2_common
run2_common.toModify( cscpacker,
formatVersion = 2013)
## in Run-3 scenarios with GEM: pack GEM clusters
from Configuration.Eras.Modifier_run3_GEM_cff import run3_GEM
run3_GEM.toModify( cscpacker,
useGEMs = True)
|
[
"Configuration.Eras.Modifier_run2_common_cff.run2_common.toModify",
"Configuration.Eras.Modifier_run3_GEM_cff.run3_GEM.toModify",
"EventFilter.CSCRawToDigi.cscPackerDef_cfi.cscPackerDef.clone"
] |
[((167, 187), 'EventFilter.CSCRawToDigi.cscPackerDef_cfi.cscPackerDef.clone', 'cscPackerDef.clone', ([], {}), '()\n', (185, 187), False, 'from EventFilter.CSCRawToDigi.cscPackerDef_cfi import cscPackerDef\n'), ((507, 558), 'Configuration.Eras.Modifier_run2_common_cff.run2_common.toModify', 'run2_common.toModify', (['cscpacker'], {'formatVersion': '(2013)'}), '(cscpacker, formatVersion=2013)\n', (527, 558), False, 'from Configuration.Eras.Modifier_run2_common_cff import run2_common\n'), ((697, 739), 'Configuration.Eras.Modifier_run3_GEM_cff.run3_GEM.toModify', 'run3_GEM.toModify', (['cscpacker'], {'useGEMs': '(True)'}), '(cscpacker, useGEMs=True)\n', (714, 739), False, 'from Configuration.Eras.Modifier_run3_GEM_cff import run3_GEM\n')]
|
from datetime import datetime
from django.conf import settings
from django.contrib.admin.sites import AdminSite
from django.core.files.storage import default_storage
from django.test import SimpleTestCase
from django.utils.dateformat import format
from .admin import BookmarkAdmin
from . import forms
from .models import Bookmark, Category
from .signals import category_post_save, bookmark_post_delete
class CategoryTest(Category):
def save(self, *args, **kwargs):
pass
class BookmarkForm(forms.BookmarkForm):
class Meta:
fields = ['title', 'urls', 'screenshot']
model = Bookmark
class BookmarksTest(SimpleTestCase):
def setUp(self):
self.category1 = CategoryTest(id=1, name='q0t')
self.category2 = CategoryTest(id=2, name='haokx9', parent=self.category1, parent_id=1)
self.bookmark1 = Bookmark(id=1, category=self.category2)
self.bookmark1.title = 'c5j'
self.bookmark1.urls = ['hda', 'n0k']
self.bookmark2 = Bookmark(category=self.category1)
def test_admin(self):
admin = BookmarkAdmin(Bookmark, AdminSite)
self.assertEqual(admin.title(self.bookmark1),
'<a href="hda">c5j</a><br><a href="n0k">02</a>')
self.assertEqual(admin.get_category(self.bookmark1), 'q0t/haokx9')
self.assertEqual(admin.get_category(self.bookmark2), 'q0t')
def test_forms(self):
post = {'urls': 'https://57st.net/articles/125\r\nv5i'}
form = BookmarkForm(post, instance=self.bookmark1)
self.assertEqual(form.initial['urls'], 'hda\r\nn0k')
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(self.bookmark1.urls, ['https://57st.net/articles/125', 'v5i'])
self.assertEqual(self.bookmark1.title, 'Retrowave')
post['title'] = 'poh'
form = BookmarkForm(post, instance=self.bookmark2)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(self.bookmark2.date,
format(datetime.today(), settings.DATETIME_FORMAT))
form = BookmarkForm({})
self.assertFalse(form.is_valid())
self.xtest_forms_xclean_screenshot(post)
def test_models(self):
self.assertEqual(str(self.category1), 'q0t')
self.assertEqual(str(self.category2), '-- haokx9')
self.assertEqual(str(self.bookmark1), 'c5j')
def xtest_forms_xclean_screenshot(self, post):
post['screenshot'] = BookmarkForm.Screenshot.HEIGHT
form = BookmarkForm(post, instance=self.bookmark1)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(len(self.bookmark1.images), 1)
self.assertTrue(default_storage.exists(self.bookmark1.images[0]))
post['urls'] = 'https://www.youtube.com/watch?v=ljvvHgb1h_4'
post['screenshot'] = BookmarkForm.Screenshot.YOUTUBE_DL
post['images'] = self.bookmark1.images[0]
form = BookmarkForm(post, instance=self.bookmark1)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(len(self.bookmark1.images), 2)
self.assertTrue(default_storage.exists(self.bookmark1.images[-1]))
self.xtest_signals()
def xtest_signals(self):
bookmark_post_delete(self.bookmark1)
for name in self.bookmark1.images:
self.assertFalse(default_storage.exists(name))
category_post_save(self.category1)
self.assertEqual(self.category1.level, 1)
category_post_save(self.category2)
self.assertEqual(self.category2.level, '1-haokx')
|
[
"django.core.files.storage.default_storage.exists",
"datetime.datetime.today"
] |
[((2743, 2791), 'django.core.files.storage.default_storage.exists', 'default_storage.exists', (['self.bookmark1.images[0]'], {}), '(self.bookmark1.images[0])\n', (2765, 2791), False, 'from django.core.files.storage import default_storage\n'), ((3188, 3237), 'django.core.files.storage.default_storage.exists', 'default_storage.exists', (['self.bookmark1.images[-1]'], {}), '(self.bookmark1.images[-1])\n', (3210, 3237), False, 'from django.core.files.storage import default_storage\n'), ((2058, 2074), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (2072, 2074), False, 'from datetime import datetime\n'), ((3415, 3443), 'django.core.files.storage.default_storage.exists', 'default_storage.exists', (['name'], {}), '(name)\n', (3437, 3443), False, 'from django.core.files.storage import default_storage\n')]
|
import re
from core.msfActionModule import msfActionModule
from core.keystore import KeyStore as kb
from core.utils import Utils
class scan_msf_smbuserenum(msfActionModule):
def __init__(self, config, display, lock):
super(scan_msf_smbuserenum, self).__init__(config, display, lock)
self.title = "Get List of Users From SMB"
self.shortName = "MSFSMBUserEnum"
self.description = "execute [auxiliary/scanner/smb/smb_enumusers] on each target"
self.requirements = ["msfconsole"]
self.triggers = ["nullSession"]
self.safeLevel = 5
def getTargets(self):
# we are interested only in the hosts that had nullsessions
self.targets = kb.get('vuln/host/*/nullSession')
def process(self):
# load any targets we are interested in
self.getTargets()
if len(self.targets) > 0:
# loop over each target
for t in self.targets:
# verify we have not tested this host before
if not self.seentarget(t):
# add the new IP to the already seen list
self.addseentarget(t)
cmd = {
'config':[
"use auxiliary/scanner/smb/smb_enumusers",
"set RHOSTS %s" % t
],
'payload':'none'}
result, outfile = self.msfExec(t, cmds)
# MSF output format:[*] [timestamp] IP DOMAIN [user,users] ( extras)
parts = re.findall(".*" + t.replace(".", "\.") + ".*", result)
for part in parts:
if "RHOSTS" in part:
pass
else:
try:
pieces = part.split()
domain = pieces[3]
kb.add("host/" + t + "/domain/" + domain.strip())
extras = part.split('(')[1].split(')')[0]
users = part.split('[')[3].split(']')[0].split(',')
for user in users:
kb.add("creds/host/" + t + "/username/" + user.strip())
except:
pass
outfile = self.config["proofsDir"] + self.shortName + "_" + t + "_" + Utils.getRandStr(10)
Utils.writeFile(result, outfile)
kb.add("host/" + t + "/files/" + self.shortName + "/" + outfile.replace("/", "%2F"))
return
|
[
"core.utils.Utils.getRandStr",
"core.utils.Utils.writeFile",
"core.keystore.KeyStore.get"
] |
[((710, 743), 'core.keystore.KeyStore.get', 'kb.get', (['"""vuln/host/*/nullSession"""'], {}), "('vuln/host/*/nullSession')\n", (716, 743), True, 'from core.keystore import KeyStore as kb\n'), ((2545, 2577), 'core.utils.Utils.writeFile', 'Utils.writeFile', (['result', 'outfile'], {}), '(result, outfile)\n', (2560, 2577), False, 'from core.utils import Utils\n'), ((2504, 2524), 'core.utils.Utils.getRandStr', 'Utils.getRandStr', (['(10)'], {}), '(10)\n', (2520, 2524), False, 'from core.utils import Utils\n')]
|
#!/usr/bin/python
#
# Exmaple program for TensorFlow
# Author: <NAME> (https://github.com/yungshenglu)
import tensorflow as tf
hello = tf.constant("Hello world, TensorFlow!")
sess = tf.Session()
print(sess.run(hello))
|
[
"tensorflow.Session",
"tensorflow.constant"
] |
[((137, 176), 'tensorflow.constant', 'tf.constant', (['"""Hello world, TensorFlow!"""'], {}), "('Hello world, TensorFlow!')\n", (148, 176), True, 'import tensorflow as tf\n'), ((184, 196), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (194, 196), True, 'import tensorflow as tf\n')]
|
import abc
import requests
import execjs
import os
class BasicMangaDownload:
__metaclass__ = abc.ABCMeta
def __init__(self):
self.headers = {
'Accept': '*/*',
'Accept-Language': 'en-US,en;q=0.8',
'Cache-Control': 'max-age=0',
'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.135 Safari/537.36',
'Connection': 'keep-alive',
}
self.site = ''
self.cookies = {}
self.book = {}
self.book_name = "unkonwn"
self.chapters_list = []
self.chapter2url = {}
@abc.abstractmethod
def parse(self):
pass
@abc.abstractmethod
def execute(self):
pass
def search(self):
pass
def switch_account(self):
pass
def download_single_image(self, image_url, chapter, num):
save_path = os.path.join(os.getcwd(),
'{0}/{1}'.format(self.book_name, chapter))
if not os.path.exists(save_path):
os.makedirs(save_path)
r = requests.get(image_url, headers=self.headers)
with open(os.path.join(save_path, "{0}.png".format(num)), 'wb') as f:
f.write(r.content)
def add_header(self, key, value):
self.headers[key] = value
def add_cookie(self, key, value):
self.cookies[key] = value
def set_title(self, name):
self.book_name = name
def add_chapter(self, name, index=-1):
self.chapters_list.insert(index, name)
def reset_chapter(self):
self.chapters_list = []
@property
def title(self):
return self.book_name
@property
def chapters(self):
return self.chapters_list
@staticmethod
def exec_js(file_path):
ctx = execjs.compile(open(file_path).read())
js = 'demo()'
params = ctx.eval(js)
return params
def __len__(self):
return len(self.chapters_list)
def __repr__(self):
origin_str = "《" + self.book_name + "》"
for chapter in self.chapters_list:
add_str = "\n |--" + chapter
origin_str += add_str
return origin_str
class BasicProxy:
__metaclass__ = abc.ABCMeta
def __init__(self):
self.verifyUrl = 'http://www.baidu.com'
self.headers = {
'User-Agent':
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0',
'Accept': '*/*',
'Connection': 'keep-alive',
'Accept-Language': 'zh-CN,zh;q=0.8'
}
@staticmethod
def verify(ip, port):
proxy = ':'.join((ip, port))
proxies = {
"http": "http://{0}".format(proxy),
"https": "https://{0}".format(proxy)
}
try:
r = requests.head(self.verifyUrl,
headers=self.headers,
proxies=proxies,
timeout=3,
verify=False)
if r.status_code == 200:
return True
except:
pass
return False
|
[
"requests.head",
"os.makedirs",
"os.getcwd",
"os.path.exists",
"requests.get"
] |
[((1131, 1176), 'requests.get', 'requests.get', (['image_url'], {'headers': 'self.headers'}), '(image_url, headers=self.headers)\n', (1143, 1176), False, 'import requests\n'), ((953, 964), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (962, 964), False, 'import os\n'), ((1057, 1082), 'os.path.exists', 'os.path.exists', (['save_path'], {}), '(save_path)\n', (1071, 1082), False, 'import os\n'), ((1096, 1118), 'os.makedirs', 'os.makedirs', (['save_path'], {}), '(save_path)\n', (1107, 1118), False, 'import os\n'), ((2870, 2967), 'requests.head', 'requests.head', (['self.verifyUrl'], {'headers': 'self.headers', 'proxies': 'proxies', 'timeout': '(3)', 'verify': '(False)'}), '(self.verifyUrl, headers=self.headers, proxies=proxies,\n timeout=3, verify=False)\n', (2883, 2967), False, 'import requests\n')]
|
# file findingaids/content/sitemaps.py
#
# Copyright 2012 Emory University Library
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.contrib.sitemaps import Sitemap
from django.core.urlresolvers import reverse
class ContentSitemap(Sitemap):
changefreq = 'yearly' # this content changes very rarely
default_priority = 0.4 # lower priority than actual findingaid content
# names for pages within content url namespace
content_pages = ['request-materials', 'feedback', 'faq',
'search-tips', 'contributors']
def items(self):
# special case pages - index, static html site pages
items = [reverse('content:%s' % page) for page in self.content_pages]
items.append(reverse('site-index'))
# possibly also advanced search? (not really content to index)
# generate list of higher priority urls (can't be calculated at load
# time because urls haven't been defined yet)
self.higher_priority = [reverse('site-index'), reverse('content:faq'),
reverse('content:search-tips')]
return items
def location(self, item):
return item
def priority(self, item):
# set specific urls (such as home page) to higher priority
if item in self.higher_priority:
return 0.5
return self.default_priority
|
[
"django.core.urlresolvers.reverse"
] |
[((1180, 1208), 'django.core.urlresolvers.reverse', 'reverse', (["('content:%s' % page)"], {}), "('content:%s' % page)\n", (1187, 1208), False, 'from django.core.urlresolvers import reverse\n'), ((1262, 1283), 'django.core.urlresolvers.reverse', 'reverse', (['"""site-index"""'], {}), "('site-index')\n", (1269, 1283), False, 'from django.core.urlresolvers import reverse\n'), ((1521, 1542), 'django.core.urlresolvers.reverse', 'reverse', (['"""site-index"""'], {}), "('site-index')\n", (1528, 1542), False, 'from django.core.urlresolvers import reverse\n'), ((1544, 1566), 'django.core.urlresolvers.reverse', 'reverse', (['"""content:faq"""'], {}), "('content:faq')\n", (1551, 1566), False, 'from django.core.urlresolvers import reverse\n'), ((1591, 1621), 'django.core.urlresolvers.reverse', 'reverse', (['"""content:search-tips"""'], {}), "('content:search-tips')\n", (1598, 1621), False, 'from django.core.urlresolvers import reverse\n')]
|
import heapq
def kthSmallest(matrix: [[int]], k: int) -> int:
n = len(matrix)
p = [(matrix[i][0], i, 0) for i in range(n)]
heapq.heapify(p)
for i in range(k - 1):
num, i, j = heapq.heappop(p)
if j < n - 1:
heapq.heappush(p, (matrix[i][j + 1], i, j + 1))
return heapq.heappop(p)[0]
if __name__ == "__main__":
matrix = [[1, 5, 9],
[10, 11, 13],
[12, 13, 15]]
result = kthSmallest(matrix, 8)
print(result)
|
[
"heapq.heappush",
"heapq.heapify",
"heapq.heappop"
] |
[((135, 151), 'heapq.heapify', 'heapq.heapify', (['p'], {}), '(p)\n', (148, 151), False, 'import heapq\n'), ((199, 215), 'heapq.heappop', 'heapq.heappop', (['p'], {}), '(p)\n', (212, 215), False, 'import heapq\n'), ((309, 325), 'heapq.heappop', 'heapq.heappop', (['p'], {}), '(p)\n', (322, 325), False, 'import heapq\n'), ((250, 297), 'heapq.heappush', 'heapq.heappush', (['p', '(matrix[i][j + 1], i, j + 1)'], {}), '(p, (matrix[i][j + 1], i, j + 1))\n', (264, 297), False, 'import heapq\n')]
|
from datetime import datetime
import click
import numpy as np
import pandas
import pyarrow
import pyarrow.parquet as pq
import yaml
from cloudpathlib import AnyPath, CloudPath
from deepdiff import DeepDiff
from simple_term_menu import TerminalMenu
from tqdm import tqdm
import tempfile
TYPE_MAPPINGS = {"numeric": "bytes", "text": "string", "boolean": "bool"}
BLOCK_COLUMN = "_block_number"
BLOCK_COLUMN_TYPE = pyarrow.uint32()
def get_select_all_exclusive(
database_string,
subgraph_table_schema,
table_name,
start_partition,
end_partition,
):
return pandas.read_sql(
sql=f"""SELECT
'SELECT ' || STRING_AGG('"' || column_name || '"', ', ') || ', lower(block_range) as {BLOCK_COLUMN}
FROM {subgraph_table_schema}.{table_name}
WHERE
lower(block_range) >= %(start_partition)s
AND lower(block_range) < %(end_partition)s
ORDER BY lower(block_range) asc'
FROM information_schema.columns
WHERE table_name = %(table_name)s
AND table_schema = %(subgraph_table_schema)s
AND column_name NOT IN ('vid', 'block_range')
""",
params={
"start_partition": start_partition,
"end_partition": end_partition,
"table_name": table_name,
"subgraph_table_schema": subgraph_table_schema,
},
con=database_string,
).iloc[0][0]
def get_column_types(database_string, subgraph_table_schema, table_name):
df = pandas.read_sql(
sql=f"""SELECT column_name, data_type
FROM information_schema.columns
WHERE table_name = %(table_name)s
AND table_schema = %(subgraph_table_schema)s
""",
params={
"table_name": table_name,
"subgraph_table_schema": subgraph_table_schema,
},
con=database_string,
)
return {row["column_name"]: row["data_type"] for row in df.to_dict("records")}
def get_subgraph_table_schemas(database_string):
schema_data = pandas.read_sql(
"""
SELECT
ds.subgraph AS subgraph_deployment,
ds.name AS subgraph_table_schema,
sv.id,
s.name as label,
sd.earliest_ethereum_block_number::int as earliest_block,
sd.latest_ethereum_block_number::int as latest_block
FROM
deployment_schemas ds
LEFT JOIN subgraphs.subgraph_version sv ON (ds.subgraph = sv.deployment)
LEFT JOIN subgraphs.subgraph s ON (s.current_version = sv.id)
LEFT JOIN subgraphs.subgraph_deployment sd ON (sd.deployment = ds.subgraph)
WHERE
ds.active AND s.current_version is not NULL
""",
con=database_string,
).to_dict("records")
return {subgraph["label"]: subgraph for subgraph in schema_data}
def get_subgraph_table_schema(subgraph, database_string):
schemas = get_subgraph_table_schemas(database_string)
return schemas[subgraph]["subgraph_table_schema"]
def get_subgraph_deployment(subgraph, database_string):
schemas = get_subgraph_table_schemas(database_string)
return schemas[subgraph]["subgraph_deployment"]
def get_subgraph_block_range(subgraph, database_string):
schemas = get_subgraph_table_schemas(database_string)
return schemas[subgraph]["earliest_block"], schemas[subgraph]["latest_block"]
def convert_columns(df, database_types, table_config):
update_types = {BLOCK_COLUMN: BLOCK_COLUMN_TYPE}
new_columns = {}
for column, mappings in table_config.get("column_mappings", {}).items():
for new_column_name, new_column_config in mappings.items():
scale_factor = new_column_config.get("downscale")
if scale_factor:
new_column = df[column] // scale_factor
else:
new_column = df[column]
if new_column_config.get("max_value"):
max_value = new_column_config["max_value"]
validity_column = new_column_config["validity_column"]
default = new_column_config["default"]
new_columns[new_column_name] = np.where(
new_column <= max_value, new_column, default
)
new_columns[validity_column] = np.where(
new_column <= max_value, True, False
)
update_types[validity_column] = "boolean"
else:
new_columns[new_column_name] = new_column
update_types[new_column_name] = new_column_config["type"]
for column_name in df.columns:
if column_name != BLOCK_COLUMN:
database_type = database_types[column_name]
if database_type in TYPE_MAPPINGS:
update_types[column_name] = TYPE_MAPPINGS[database_type]
if database_type == "numeric":
df[column_name] = df[column_name].map(
lambda x: int(x).to_bytes(32, byteorder="big")
)
df = df.rename_axis(None)
df = df.assign(**new_columns)
table = pyarrow.Table.from_pandas(df, preserve_index=False)
schema = table.schema
types = {
"uint32": pyarrow.uint32(),
"uint64": pyarrow.uint64(),
"bytes": pyarrow.binary(),
"bool": pyarrow.bool_(),
"boolean": pyarrow.bool_(),
"string": pyarrow.string(),
"Numeric38": pyarrow.decimal128(precision=38),
}
for column_name, new_type in update_types.items():
field_index = schema.get_field_index(column_name)
field = schema.field(field_index)
new_field = field.with_type(types[new_type])
schema = schema.set(field_index, new_field)
table = table.cast(schema, safe=False)
return table
def get_partition_iterator(min_partition, max_partition, partition_sizes):
for partition_size in sorted(partition_sizes, reverse=True):
start_partition_allowed = (min_partition // partition_size) * partition_size
end_partition_allowed = (max_partition // partition_size) * partition_size
last_max_partition = None
for start_partition in range(
start_partition_allowed, end_partition_allowed, partition_size
):
last_max_partition = start_partition + partition_size
yield partition_size, start_partition, start_partition + partition_size
if last_max_partition is not None:
min_partition = last_max_partition
def get_partition_file_location(
table_dir, partition_size, start_partition, end_partition
):
return table_dir.joinpath(
f"partition_size={partition_size}",
f"start_partition={start_partition}",
f"end_partition={end_partition}",
"data.parquet",
)
def filter_existing_partitions(table_dir, partitions):
# Iterate in reverse until one exists, assume all previous exist
# We must iterate forwards for processing so return in the correct order.
new_partitions = []
for partition in sorted(partitions, reverse=True, key=lambda x: x[1]):
if get_partition_file_location(table_dir, *partition).exists():
return sorted(new_partitions, reverse=True)
else:
new_partitions.append(partition)
return new_partitions
def extract_from_config(subgraph_config, database_string, output_location):
config = yaml.safe_load(AnyPath(subgraph_config).open("r"))
return extract(config, database_string, output_location)
def write_config(config, root_output_location):
config_output_location = root_output_location.joinpath("config.yaml")
if config_output_location.exists():
existing_config = yaml.safe_load(config_output_location.open("r"))
config_difference = DeepDiff(existing_config, config)
if config_difference:
raise Exception(
f"Config provided does not match the previously seen version at {config_output_location}"
)
else:
config_output_location.parent.mkdir(parents=True, exist_ok=True)
with config_output_location.open("w") as f_out:
yaml.dump(config, f_out)
def extract(config, database_string, output_location):
"""Connects to your database and pulls all data from all subgraphs"""
subgraph = config["subgraph"]
subgraph_deployment = get_subgraph_deployment(subgraph, database_string)
subgraph_table_schema = get_subgraph_table_schema(subgraph, database_string)
earliest_block, latest_block = get_subgraph_block_range(subgraph, database_string)
root_output_location = AnyPath(output_location).joinpath(
config["name"], config["version"]
)
write_config(config, root_output_location)
for table_name, table_config in tqdm(
config["tables"].items(), leave=False, desc="Tables"
):
table_dir = root_output_location.joinpath(
"data", f"subgraph={subgraph_deployment}", f"table={table_name}"
)
partition_range = get_partition_iterator(
earliest_block, latest_block, table_config["partition_sizes"]
)
database_types = get_column_types(
database_string, subgraph_table_schema, table_name
)
unexported_partitions = filter_existing_partitions(table_dir, partition_range)
for partition_size, start_partition, end_partition in tqdm(
unexported_partitions, leave=False, desc="Paritions"
):
filepath = get_partition_file_location(
table_dir, partition_size, start_partition, end_partition
)
df = pandas.read_sql(
sql=get_select_all_exclusive(
database_string,
subgraph_table_schema,
table_name,
start_partition=start_partition,
end_partition=end_partition,
),
con=database_string,
coerce_float=False,
)
typed_df = convert_columns(df, database_types, table_config)
# Pyarrow can't take a file object so we have to write to a temp file
# and upload directly
filepath.parent.mkdir(parents=True, exist_ok=True)
if isinstance(filepath, CloudPath):
with tempfile.TemporaryDirectory() as temp_dir:
pq_file_location = AnyPath(temp_dir).joinpath("data.parquet")
pq.write_table(typed_df, pq_file_location)
filepath.upload_from(pq_file_location)
else:
pq.write_table(typed_df, filepath)
with root_output_location.joinpath("latest.yaml").open("w") as f_out:
yaml.dump(
{
"subgraph": subgraph,
"subgraph_deployment": subgraph_deployment,
"updated": datetime.now(),
"earliest_block": earliest_block,
"latest_block": latest_block,
},
f_out,
)
@click.command()
@click.option(
"--subgraph-config",
help="The config file specifying the data to extract",
required=True,
)
@click.option(
"--database-string",
default="postgresql://graph-node:let-me-in@localhost:5432/graph-node",
help="The database string for connections, defaults to a local graph-node",
)
@click.option(
"--output-location",
default="data",
help="The base output location, whether local or cloud",
)
def main(subgraph_config, database_string, output_location):
"""
Connects to your database and pulls all data from the tables
specified in the config file
"""
extract_from_config(subgraph_config, database_string, output_location)
def get_tables_in_schema(database_string, subgraph_table_schema):
""" "
Returns a list of all tables in the schema which have a block_range column.
This corresponds to tables which contain entities that we can extract
"""
all_tables = pandas.read_sql(
f"""
SELECT distinct table_name FROM information_schema.columns
WHERE table_schema = '{subgraph_table_schema}'
AND column_name = 'block_range'
ORDER BY table_name
""",
con=database_string,
)["table_name"].tolist()
return all_tables
@click.command()
@click.option(
"--config-location", help="The output file location for this config", required=True
)
@click.option(
"--database-string",
default="postgresql://graph-node:let-me-in@localhost:5432/graph-node",
help="The database string for connections, defaults to a local graph-node",
)
def config_generator(config_location, database_string):
# Minimise the width any particular column can use in the preview
pandas.set_option("display.max_colwidth", 8)
# Let pandas figure out the width of the terminal
pandas.set_option("display.width", None)
config = {"name": AnyPath(config_location).stem, "version": "0.0.1"}
subgraph_table_schemas = get_subgraph_table_schemas(database_string)
def preview_schema_data(label):
schema = subgraph_table_schemas[label]
table_spacer = "\n - "
table_list = get_tables_in_schema(
database_string, schema["subgraph_table_schema"]
)
table_list_formatted = table_spacer + table_spacer.join(table_list)
# Make nicer
return f"""
Subgraph: {schema["subgraph_deployment"]}
Tables ({len(table_list)}): {table_list_formatted}
"""
options = list(subgraph_table_schemas.keys())
terminal_menu = TerminalMenu(
options,
title="Please select the subgraph you want to extract",
preview_command=preview_schema_data,
preview_size=0.75,
)
menu_entry_index = terminal_menu.show()
schema_data = subgraph_table_schemas[options[menu_entry_index]]
subgraph_table_schema = schema_data["subgraph_table_schema"]
config["subgraph"] = schema_data["label"]
tables = get_tables_in_schema(database_string, subgraph_table_schema)
def preview_table_data(table):
subset = pandas.read_sql(
f"select * from {subgraph_table_schema}.{table} limit 10",
con=database_string,
)
return str(subset.head())
terminal_menu = TerminalMenu(
tables,
title="Please select the tables you want to extract",
preview_command=preview_table_data,
preview_size=0.75,
multi_select=True,
)
table_entry_index = terminal_menu.show()
selected_tables = [tables[index] for index in table_entry_index]
config["tables"] = {}
for table in selected_tables:
table_config = {}
column_types = get_column_types(database_string, subgraph_table_schema, table)
# These sizes are just a sensible default for gnosis chain
# With a block duration of about 5 seconds these correspond to (very roughly):
# 1024 blocks = 1 1/2 hours
# 1024*16 blocks = 1 day
# 1024*128 blocks = 1 week
# 1024*512 blocks = 1 month
table_config["partition_sizes"] = [1024 * 512, 1024 * 128, 1024 * 16, 1024]
numeric_columns = sorted(
[
column
for column, data_type in column_types.items()
if data_type == "numeric"
]
)
if len(numeric_columns) > 0:
terminal_menu = TerminalMenu(
numeric_columns,
title=f"These columns in table {table} are numeric and will be exported as bytes unless they are mapped, which should be mapped to another type?",
multi_select=True,
)
mapped_indices = terminal_menu.show()
selected_columns = [numeric_columns[index] for index in mapped_indices]
if len(selected_columns) > 0:
table_config["column_mappings"] = {}
for column in selected_columns:
table_config["column_mappings"][column] = {
f"{column}_uint64": {
"type": "uint64",
"max_value": 0xFFFFFFFFFFFFFFFF,
"default": 0,
"validity_column": f"{column}_uint64_valid",
}
}
config["tables"][table] = table_config
with open(config_location, "w") as f_out:
yaml.dump(config, f_out)
|
[
"yaml.dump",
"click.option",
"pyarrow.Table.from_pandas",
"cloudpathlib.AnyPath",
"pandas.read_sql",
"pandas.set_option",
"tempfile.TemporaryDirectory",
"click.command",
"pyarrow.uint64",
"pyarrow.binary",
"pyarrow.bool_",
"datetime.datetime.now",
"simple_term_menu.TerminalMenu",
"tqdm.tqdm",
"pyarrow.uint32",
"pyarrow.parquet.write_table",
"pyarrow.decimal128",
"numpy.where",
"deepdiff.DeepDiff",
"pyarrow.string"
] |
[((414, 430), 'pyarrow.uint32', 'pyarrow.uint32', ([], {}), '()\n', (428, 430), False, 'import pyarrow\n'), ((10988, 11003), 'click.command', 'click.command', ([], {}), '()\n', (11001, 11003), False, 'import click\n'), ((11005, 11113), 'click.option', 'click.option', (['"""--subgraph-config"""'], {'help': '"""The config file specifying the data to extract"""', 'required': '(True)'}), "('--subgraph-config', help=\n 'The config file specifying the data to extract', required=True)\n", (11017, 11113), False, 'import click\n'), ((11125, 11315), 'click.option', 'click.option', (['"""--database-string"""'], {'default': '"""postgresql://graph-node:let-me-in@localhost:5432/graph-node"""', 'help': '"""The database string for connections, defaults to a local graph-node"""'}), "('--database-string', default=\n 'postgresql://graph-node:let-me-in@localhost:5432/graph-node', help=\n 'The database string for connections, defaults to a local graph-node')\n", (11137, 11315), False, 'import click\n'), ((11322, 11433), 'click.option', 'click.option', (['"""--output-location"""'], {'default': '"""data"""', 'help': '"""The base output location, whether local or cloud"""'}), "('--output-location', default='data', help=\n 'The base output location, whether local or cloud')\n", (11334, 11433), False, 'import click\n'), ((12248, 12263), 'click.command', 'click.command', ([], {}), '()\n', (12261, 12263), False, 'import click\n'), ((12265, 12367), 'click.option', 'click.option', (['"""--config-location"""'], {'help': '"""The output file location for this config"""', 'required': '(True)'}), "('--config-location', help=\n 'The output file location for this config', required=True)\n", (12277, 12367), False, 'import click\n'), ((12370, 12560), 'click.option', 'click.option', (['"""--database-string"""'], {'default': '"""postgresql://graph-node:let-me-in@localhost:5432/graph-node"""', 'help': '"""The database string for connections, defaults to a local graph-node"""'}), "('--database-string', default=\n 'postgresql://graph-node:let-me-in@localhost:5432/graph-node', help=\n 'The database string for connections, defaults to a local graph-node')\n", (12382, 12560), False, 'import click\n'), ((1585, 1924), 'pandas.read_sql', 'pandas.read_sql', ([], {'sql': 'f"""SELECT column_name, data_type\n FROM information_schema.columns\n WHERE table_name = %(table_name)s\n AND table_schema = %(subgraph_table_schema)s\n """', 'params': "{'table_name': table_name, 'subgraph_table_schema': subgraph_table_schema}", 'con': 'database_string'}), '(sql=\n f"""SELECT column_name, data_type\n FROM information_schema.columns\n WHERE table_name = %(table_name)s\n AND table_schema = %(subgraph_table_schema)s\n """\n , params={\'table_name\': table_name, \'subgraph_table_schema\':\n subgraph_table_schema}, con=database_string)\n', (1600, 1924), False, 'import pandas\n'), ((5058, 5109), 'pyarrow.Table.from_pandas', 'pyarrow.Table.from_pandas', (['df'], {'preserve_index': '(False)'}), '(df, preserve_index=False)\n', (5083, 5109), False, 'import pyarrow\n'), ((12696, 12740), 'pandas.set_option', 'pandas.set_option', (['"""display.max_colwidth"""', '(8)'], {}), "('display.max_colwidth', 8)\n", (12713, 12740), False, 'import pandas\n'), ((12799, 12839), 'pandas.set_option', 'pandas.set_option', (['"""display.width"""', 'None'], {}), "('display.width', None)\n", (12816, 12839), False, 'import pandas\n'), ((13502, 13645), 'simple_term_menu.TerminalMenu', 'TerminalMenu', (['options'], {'title': '"""Please select the subgraph you want to extract"""', 'preview_command': 'preview_schema_data', 'preview_size': '(0.75)'}), "(options, title=\n 'Please select the subgraph you want to extract', preview_command=\n preview_schema_data, preview_size=0.75)\n", (13514, 13645), False, 'from simple_term_menu import TerminalMenu\n'), ((14212, 14364), 'simple_term_menu.TerminalMenu', 'TerminalMenu', (['tables'], {'title': '"""Please select the tables you want to extract"""', 'preview_command': 'preview_table_data', 'preview_size': '(0.75)', 'multi_select': '(True)'}), "(tables, title='Please select the tables you want to extract',\n preview_command=preview_table_data, preview_size=0.75, multi_select=True)\n", (14224, 14364), False, 'from simple_term_menu import TerminalMenu\n'), ((5168, 5184), 'pyarrow.uint32', 'pyarrow.uint32', ([], {}), '()\n', (5182, 5184), False, 'import pyarrow\n'), ((5204, 5220), 'pyarrow.uint64', 'pyarrow.uint64', ([], {}), '()\n', (5218, 5220), False, 'import pyarrow\n'), ((5239, 5255), 'pyarrow.binary', 'pyarrow.binary', ([], {}), '()\n', (5253, 5255), False, 'import pyarrow\n'), ((5273, 5288), 'pyarrow.bool_', 'pyarrow.bool_', ([], {}), '()\n', (5286, 5288), False, 'import pyarrow\n'), ((5309, 5324), 'pyarrow.bool_', 'pyarrow.bool_', ([], {}), '()\n', (5322, 5324), False, 'import pyarrow\n'), ((5344, 5360), 'pyarrow.string', 'pyarrow.string', ([], {}), '()\n', (5358, 5360), False, 'import pyarrow\n'), ((5383, 5415), 'pyarrow.decimal128', 'pyarrow.decimal128', ([], {'precision': '(38)'}), '(precision=38)\n', (5401, 5415), False, 'import pyarrow\n'), ((7731, 7764), 'deepdiff.DeepDiff', 'DeepDiff', (['existing_config', 'config'], {}), '(existing_config, config)\n', (7739, 7764), False, 'from deepdiff import DeepDiff\n'), ((9338, 9396), 'tqdm.tqdm', 'tqdm', (['unexported_partitions'], {'leave': '(False)', 'desc': '"""Paritions"""'}), "(unexported_partitions, leave=False, desc='Paritions')\n", (9342, 9396), False, 'from tqdm import tqdm\n'), ((14026, 14125), 'pandas.read_sql', 'pandas.read_sql', (['f"""select * from {subgraph_table_schema}.{table} limit 10"""'], {'con': 'database_string'}), "(f'select * from {subgraph_table_schema}.{table} limit 10',\n con=database_string)\n", (14041, 14125), False, 'import pandas\n'), ((16357, 16381), 'yaml.dump', 'yaml.dump', (['config', 'f_out'], {}), '(config, f_out)\n', (16366, 16381), False, 'import yaml\n'), ((2129, 2714), 'pandas.read_sql', 'pandas.read_sql', (['"""\n SELECT\n ds.subgraph AS subgraph_deployment,\n ds.name AS subgraph_table_schema,\n sv.id,\n s.name as label,\n sd.earliest_ethereum_block_number::int as earliest_block,\n sd.latest_ethereum_block_number::int as latest_block\nFROM\n deployment_schemas ds\n LEFT JOIN subgraphs.subgraph_version sv ON (ds.subgraph = sv.deployment)\n LEFT JOIN subgraphs.subgraph s ON (s.current_version = sv.id)\n LEFT JOIN subgraphs.subgraph_deployment sd ON (sd.deployment = ds.subgraph)\nWHERE\n ds.active AND s.current_version is not NULL\n """'], {'con': 'database_string'}), '(\n """\n SELECT\n ds.subgraph AS subgraph_deployment,\n ds.name AS subgraph_table_schema,\n sv.id,\n s.name as label,\n sd.earliest_ethereum_block_number::int as earliest_block,\n sd.latest_ethereum_block_number::int as latest_block\nFROM\n deployment_schemas ds\n LEFT JOIN subgraphs.subgraph_version sv ON (ds.subgraph = sv.deployment)\n LEFT JOIN subgraphs.subgraph s ON (s.current_version = sv.id)\n LEFT JOIN subgraphs.subgraph_deployment sd ON (sd.deployment = ds.subgraph)\nWHERE\n ds.active AND s.current_version is not NULL\n """\n , con=database_string)\n', (2144, 2714), False, 'import pandas\n'), ((8095, 8119), 'yaml.dump', 'yaml.dump', (['config', 'f_out'], {}), '(config, f_out)\n', (8104, 8119), False, 'import yaml\n'), ((8559, 8583), 'cloudpathlib.AnyPath', 'AnyPath', (['output_location'], {}), '(output_location)\n', (8566, 8583), False, 'from cloudpathlib import AnyPath, CloudPath\n'), ((12863, 12887), 'cloudpathlib.AnyPath', 'AnyPath', (['config_location'], {}), '(config_location)\n', (12870, 12887), False, 'from cloudpathlib import AnyPath, CloudPath\n'), ((15340, 15545), 'simple_term_menu.TerminalMenu', 'TerminalMenu', (['numeric_columns'], {'title': 'f"""These columns in table {table} are numeric and will be exported as bytes unless they are mapped, which should be mapped to another type?"""', 'multi_select': '(True)'}), "(numeric_columns, title=\n f'These columns in table {table} are numeric and will be exported as bytes unless they are mapped, which should be mapped to another type?'\n , multi_select=True)\n", (15352, 15545), False, 'from simple_term_menu import TerminalMenu\n'), ((582, 1416), 'pandas.read_sql', 'pandas.read_sql', ([], {'sql': 'f"""SELECT \n \'SELECT \' || STRING_AGG(\'"\' || column_name || \'"\', \', \') || \', lower(block_range) as {BLOCK_COLUMN} \n FROM {subgraph_table_schema}.{table_name}\n WHERE\n lower(block_range) >= %(start_partition)s\n AND lower(block_range) < %(end_partition)s\n ORDER BY lower(block_range) asc\'\n FROM information_schema.columns\n WHERE table_name = %(table_name)s\n AND table_schema = %(subgraph_table_schema)s\n AND column_name NOT IN (\'vid\', \'block_range\')\n """', 'params': "{'start_partition': start_partition, 'end_partition': end_partition,\n 'table_name': table_name, 'subgraph_table_schema': subgraph_table_schema}", 'con': 'database_string'}), '(sql=\n f"""SELECT \n \'SELECT \' || STRING_AGG(\'"\' || column_name || \'"\', \', \') || \', lower(block_range) as {BLOCK_COLUMN} \n FROM {subgraph_table_schema}.{table_name}\n WHERE\n lower(block_range) >= %(start_partition)s\n AND lower(block_range) < %(end_partition)s\n ORDER BY lower(block_range) asc\'\n FROM information_schema.columns\n WHERE table_name = %(table_name)s\n AND table_schema = %(subgraph_table_schema)s\n AND column_name NOT IN (\'vid\', \'block_range\')\n """\n , params={\'start_partition\': start_partition, \'end_partition\':\n end_partition, \'table_name\': table_name, \'subgraph_table_schema\':\n subgraph_table_schema}, con=database_string)\n', (597, 1416), False, 'import pandas\n'), ((4119, 4173), 'numpy.where', 'np.where', (['(new_column <= max_value)', 'new_column', 'default'], {}), '(new_column <= max_value, new_column, default)\n', (4127, 4173), True, 'import numpy as np\n'), ((4259, 4305), 'numpy.where', 'np.where', (['(new_column <= max_value)', '(True)', '(False)'], {}), '(new_column <= max_value, True, False)\n', (4267, 4305), True, 'import numpy as np\n'), ((7367, 7391), 'cloudpathlib.AnyPath', 'AnyPath', (['subgraph_config'], {}), '(subgraph_config)\n', (7374, 7391), False, 'from cloudpathlib import AnyPath, CloudPath\n'), ((10562, 10596), 'pyarrow.parquet.write_table', 'pq.write_table', (['typed_df', 'filepath'], {}), '(typed_df, filepath)\n', (10576, 10596), True, 'import pyarrow.parquet as pq\n'), ((10829, 10843), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (10841, 10843), False, 'from datetime import datetime\n'), ((11951, 12186), 'pandas.read_sql', 'pandas.read_sql', (['f"""\n SELECT distinct table_name FROM information_schema.columns \n WHERE table_schema = \'{subgraph_table_schema}\'\n AND column_name = \'block_range\'\n ORDER BY table_name\n """'], {'con': 'database_string'}), '(\n f"""\n SELECT distinct table_name FROM information_schema.columns \n WHERE table_schema = \'{subgraph_table_schema}\'\n AND column_name = \'block_range\'\n ORDER BY table_name\n """\n , con=database_string)\n', (11966, 12186), False, 'import pandas\n'), ((10281, 10310), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (10308, 10310), False, 'import tempfile\n'), ((10426, 10468), 'pyarrow.parquet.write_table', 'pq.write_table', (['typed_df', 'pq_file_location'], {}), '(typed_df, pq_file_location)\n', (10440, 10468), True, 'import pyarrow.parquet as pq\n'), ((10363, 10380), 'cloudpathlib.AnyPath', 'AnyPath', (['temp_dir'], {}), '(temp_dir)\n', (10370, 10380), False, 'from cloudpathlib import AnyPath, CloudPath\n')]
|
from django.test import TestCase
from shop.models import *
from django.contrib.auth.models import User, Group
class OrderTest(TestCase):
def setUp(self):
ProductClass.objects.create(name='коробка', slug='box')
ProductClass.objects.create(name='шнур', slug='cord')
Product.objects.create(name='коробка 10x10x10', slug='box10x10x10')
Product.objects.create(name='шнур 10см', slug='cord_10cm')
ProductVariant.objects.create(name='коробка 10x10x10 черная',
addition='черная',
slug='box10black',
product=Product.objects.get(pk=1),
price=Decimal('10'))
ProductVariant.objects.create(name='шнур 10см жёлтый',
addition='жёлтый',
slug='cord10yellow',
product=Product.objects.get(pk=2),
price=Decimal('1'))
Organisation.objects.create(inn='1234567890')
User.objects.create(username='<EMAIL>',
email='<EMAIL>', password='password')
User.objects.create(username='<EMAIL>',
email='<EMAIL>', password='password')
Group.objects.create(name='const customers')
User.objects.get(pk=1).groups.add(Group.objects.get(pk=1))
Order.objects.create(organisation=Organisation.objects.get(
pk=1), user=User.objects.get(pk=1))
OrderItem.objects.create(product=ProductVariant.objects.get(
pk=1), price=ProductVariant.objects.get(pk=1).price)
Order.objects.get(pk=1).items.add(OrderItem.objects.get(pk=1))
def test1(self):
box, cord = ProductVariant.objects.all()
order = Order.objects.all()[0]
self.assertEqual(order.getQuantity(box), 0)
self.assertEqual(order.getQuantity(cord), 0)
order.setQuantity(box, 10)
self.assertEqual(order.getQuantity(box), 10)
self.assertEqual(order.getQuantity(cord), 0)
order.setQuantity(cord, 4324)
self.assertEqual(order.getQuantity(box), 10)
self.assertEqual(order.getQuantity(cord), 4324)
order.activate()
order.finish()
order.cancel()
self.assertEqual(order.getTotalQuantity(), 4334)
self.assertEqual(order.getTotalSum(), 4424)
|
[
"django.contrib.auth.models.User.objects.create",
"django.contrib.auth.models.User.objects.get",
"django.contrib.auth.models.Group.objects.get",
"django.contrib.auth.models.Group.objects.create"
] |
[((1118, 1195), 'django.contrib.auth.models.User.objects.create', 'User.objects.create', ([], {'username': '"""<EMAIL>"""', 'email': '"""<EMAIL>"""', 'password': '"""password"""'}), "(username='<EMAIL>', email='<EMAIL>', password='password')\n", (1137, 1195), False, 'from django.contrib.auth.models import User, Group\n'), ((1232, 1309), 'django.contrib.auth.models.User.objects.create', 'User.objects.create', ([], {'username': '"""<EMAIL>"""', 'email': '"""<EMAIL>"""', 'password': '"""password"""'}), "(username='<EMAIL>', email='<EMAIL>', password='password')\n", (1251, 1309), False, 'from django.contrib.auth.models import User, Group\n'), ((1346, 1390), 'django.contrib.auth.models.Group.objects.create', 'Group.objects.create', ([], {'name': '"""const customers"""'}), "(name='const customers')\n", (1366, 1390), False, 'from django.contrib.auth.models import User, Group\n'), ((1433, 1456), 'django.contrib.auth.models.Group.objects.get', 'Group.objects.get', ([], {'pk': '(1)'}), '(pk=1)\n', (1450, 1456), False, 'from django.contrib.auth.models import User, Group\n'), ((1551, 1573), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'pk': '(1)'}), '(pk=1)\n', (1567, 1573), False, 'from django.contrib.auth.models import User, Group\n'), ((1399, 1421), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'pk': '(1)'}), '(pk=1)\n', (1415, 1421), False, 'from django.contrib.auth.models import User, Group\n')]
|
# Copyright (C) 2016-present the asyncpg authors and contributors
# <see AUTHORS file>
#
# This module is part of asyncpg and is released under
# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0
import os
import subprocess
import sys
import unittest
def find_root():
return os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
class TestFlake8(unittest.TestCase):
def test_flake8(self):
root_path = find_root()
config_path = os.path.join(root_path, '.flake8')
if not os.path.exists(config_path):
raise RuntimeError('could not locate .flake8 file')
try:
import flake8 # NoQA
except ImportError:
raise unittest.SkipTest('flake8 module is missing')
try:
subprocess.run(
[sys.executable, '-m', 'flake8', '--config', config_path],
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=root_path)
except subprocess.CalledProcessError as ex:
output = ex.output.decode()
raise AssertionError(
'flake8 validation failed:\n{}'.format(output)) from None
|
[
"subprocess.run",
"os.path.abspath",
"os.path.exists",
"unittest.SkipTest",
"os.path.join"
] |
[((480, 514), 'os.path.join', 'os.path.join', (['root_path', '""".flake8"""'], {}), "(root_path, '.flake8')\n", (492, 514), False, 'import os\n'), ((331, 356), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (346, 356), False, 'import os\n'), ((530, 557), 'os.path.exists', 'os.path.exists', (['config_path'], {}), '(config_path)\n', (544, 557), False, 'import os\n'), ((789, 948), 'subprocess.run', 'subprocess.run', (["[sys.executable, '-m', 'flake8', '--config', config_path]"], {'check': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT', 'cwd': 'root_path'}), "([sys.executable, '-m', 'flake8', '--config', config_path],\n check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=root_path\n )\n", (803, 948), False, 'import subprocess\n'), ((717, 762), 'unittest.SkipTest', 'unittest.SkipTest', (['"""flake8 module is missing"""'], {}), "('flake8 module is missing')\n", (734, 762), False, 'import unittest\n')]
|
"""
Copyright Government of Canada 2020
Written by:
<NAME>
National Microbiology Laboratory, Public Health Agency of Canada
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
this work except in compliance with the License. You may obtain a copy of the
License at:
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import gzip
import os
import re
def has_valid_fastq_extension(file_location):
"""
Determines whether or not the passed file location has a valid FASTQ or GZIP'd FASTQ file extension.
ARGUMENTS
file_location (str): the location of the file to check
RETURNS
valid (bool): whether or not the file location has a valid FASTQ extension
"""
valid_extensions = ("fastq", "fq", "fastq.gz", "fq.gz")
valid = file_location.lower().endswith(valid_extensions) # Case insensitive matching
return valid
def has_valid_fastq_encoding(file):
"""
Determines whether or not the passed file appears to be encoded as a FASTQ file.
ARGUMENTS
file (File): an open and readable file
RETURNS
valid (bool): whether or not the file is encoded as a FASTQ file
"""
# First line starts with '@' and has at least one other character following:
line = file.readline()
if not (len(line) > 0 and re.match(r'^@.+', line)):
return False
# Second line contains one or more A, C, G, T, and N characters:
sequence = file.readline()
if not (len(sequence) > 0 and re.match(r'^[ATGCN]+$', sequence)):
return False
# Third line begins with '+' and has any number of optional characters following:
line = file.readline()
if not (len(line) > 0 and re.match(r'^\+.*', line)):
return False
# Fourth line contains sequencing encoding characters:
encoding = file.readline()
if not (len(encoding) > 0 and re.match(r'^\S+$', encoding)):
return False
# The length of the sequence and encoding must be the same:
if not (len(sequence) == len(encoding)):
return False
return True
def is_valid_fastq(file_location):
"""
Determines whether or not the passed file location appears to be a valid FASTQ-formatted file.
ARGUMENTS
file_location (str): the location of the file to check
RETURNS
valid (bool): whether or not the passed file appears to be a valid FASTQ-formatted file
"""
if not os.path.isfile(file_location):
return False
if not has_valid_fastq_extension(file_location):
return False
if is_gzipped(file_location):
file = gzip.open(file_location, mode='rt')
else:
file = open(file_location, mode='r')
valid = has_valid_fastq_encoding(file)
file.close()
return valid
def are_valid_fastq(reads):
"""
Determines whether or not the passed reads appear to be valid FASTQ-formatted files.
ARGUMENTS
reads (Reads): the reads to check
RETURNS
valid (bool): whether or not the reads appear to be FASTQ-formatted
"""
for file_location in reads.get_file_locations():
if not is_valid_fastq(file_location):
return False
return True
def is_gzipped(file_location):
"""
Determines whether or not the passed file appears to be in GZIP format.
ARGUMENTS
file_location (str): the location of the file to check
RETURNS
gzipped (bool): whether or not the file appears to be in GZIP format
"""
GZIP = ".gz"
if file_location.endswith(GZIP):
gzipped = True
else:
gzipped = False
return gzipped
|
[
"os.path.isfile",
"gzip.open",
"re.match"
] |
[((2734, 2763), 'os.path.isfile', 'os.path.isfile', (['file_location'], {}), '(file_location)\n', (2748, 2763), False, 'import os\n'), ((2911, 2946), 'gzip.open', 'gzip.open', (['file_location'], {'mode': '"""rt"""'}), "(file_location, mode='rt')\n", (2920, 2946), False, 'import gzip\n'), ((1627, 1649), 're.match', 're.match', (['"""^@.+"""', 'line'], {}), "('^@.+', line)\n", (1635, 1649), False, 'import re\n'), ((1809, 1841), 're.match', 're.match', (['"""^[ATGCN]+$"""', 'sequence'], {}), "('^[ATGCN]+$', sequence)\n", (1817, 1841), False, 'import re\n'), ((2010, 2034), 're.match', 're.match', (['"""^\\\\+.*"""', 'line'], {}), "('^\\\\+.*', line)\n", (2018, 2034), False, 'import re\n'), ((2183, 2211), 're.match', 're.match', (['"""^\\\\S+$"""', 'encoding'], {}), "('^\\\\S+$', encoding)\n", (2191, 2211), False, 'import re\n')]
|
# -*- coding:utf-8 -*-
# Author: hankcs
# Date: 2021-02-14 18:01
import os
from typing import Dict, Any, Union, Iterable, Callable, Optional, Tuple
import matplotlib.pyplot as plt
import torch
from elit.common.dataset import MultiTaskDataLoader
from elit.components.mtl.gated.draw_attn import heatmap
from elit.components.mtl.gated.gated_self_attn import GatedBertSelfAttention, DictGatedBertSelfAttention
from elit.components.mtl.gated.history import HistoryWithSummary
from elit.components.mtl.multi_task_learning import MultiTaskLearning, MultiTaskModel
from elit.components.mtl.self_teaching import SelfTeaching
from elit.components.mtl.tasks import Task
from elit.layers.embeddings.embedding import Embedding
from elit.layers.transformers.pt_imports import BertModel
from elit.metrics.mtl import MetricDict
from hanlp_common.util import merge_locals_kwargs
from transformers import BertLayer, optimization
from transformers.models.bert.modeling_bert import BertAttention
class JointGatedMultiTaskLearning(MultiTaskLearning):
def fit(self, encoder: Embedding, tasks: Dict[str, Task], save_dir, epochs, patience=0.5, lr=1e-3, encoder_lr=5e-5,
adam_epsilon=1e-8, weight_decay=0.0, warmup_steps=0.1, gradient_accumulation=1, grad_norm=5.0,
encoder_grad_norm=None, decoder_grad_norm=None, tau: float = 0.8, transform=None, eval_trn=True,
prefetch=None, tasks_need_custom_eval=None, loss_balancer=None, encoder_trainable=True,
self_teaching: Union[int, bool, SelfTeaching] = False, temperature_function=None, kd_loss_function=None,
freeze_encoder_layers: Optional[Tuple[int, int]] = None, _device_placeholder=False,
gates_lr=1e-3, overlap_loss_coef=0,
gates_warm_up=None,
cache=False, devices=None, logger=None, seed=None, **kwargs):
return super().fit(**merge_locals_kwargs(locals(), kwargs))
def build_model(self, training=False, model_cls=MultiTaskModel, encoder_trainable=True, **kwargs) -> MultiTaskModel:
self.model = model = super().build_model(training, model_cls, **kwargs)
for task_name in list(self.tasks):
if task_name not in self.config.task_names:
self.config.task_names.append(task_name)
del self[task_name]
# noinspection PyTypeChecker
transformer: BertModel = model.encoder.transformer
for layer in transformer.encoder.layer:
layer: BertLayer = layer
if isinstance(layer.attention, BertAttention):
# noinspection PyTypeChecker
gated_attention = DictGatedBertSelfAttention(transformer.config, task_names=self.tasks)
gated_attention.load_state_dict(layer.attention.self.state_dict(), strict=False)
layer.attention.self = gated_attention
else:
raise NotImplementedError('Unsupported transformer')
model.encoder.requires_grad_(encoder_trainable)
if not encoder_trainable:
for k, v in self.model.encoder.named_parameters():
if k.endswith('.log_a'):
v.requires_grad = True
if self.config.finetune:
model.decoders.requires_grad_(False)
return model
def compute_loss(self, batch: Dict[str, Any],
output: Union[torch.Tensor, Dict[str, torch.Tensor], Iterable[torch.Tensor], Any],
criterion: Callable, task: Task, task_name: str,
history: HistoryWithSummary = None) -> torch.FloatTensor:
task_loss = super().compute_loss(batch, output, criterion, task, task_name, history)
writer = history.writer if history else None
if not self.model.training:
return task_loss
# noinspection PyTypeChecker
transformer: BertModel = self.model.encoder.transformer
regs = []
for layer in transformer.encoder.layer:
layer: BertLayer = layer
# noinspection PyTypeChecker
gated_attention: GatedBertSelfAttention = layer.attention.self
regs += gated_attention.cached_regs
gated_attention.cached_regs.clear()
# reg_loss = self.tasks[task_name].config.concrete_coef * torch.mean(torch.stack(regs))
# loss = task_loss + reg_loss
loss = task_loss
overlap_loss_coef = self.config['overlap_loss_coef']
if len(self.tasks) > 1 and overlap_loss_coef:
current_gates = self.get_gates()
overlap_loss = []
for other in self.tasks:
if other == task_name:
continue
gates = self.get_gates(other)
overlap_loss.append(current_gates * gates)
overlap_loss = overlap_loss_coef * torch.stack(overlap_loss).sum()
loss += overlap_loss
else:
overlap_loss = None
step = history.num_mini_batches
if writer and step % self.config.gradient_accumulation == 0:
step //= self.config.gradient_accumulation
# writer.add_scalar(f'{task_name}/reg', float(reg_loss), step)
writer.add_scalar(f'{task_name}/task', float(task_loss), step)
if overlap_loss is not None:
writer.add_scalar(f'{task_name}/overlap', float(overlap_loss), step)
return loss
def get_gates(self, task_name=None):
gates = []
# noinspection PyTypeChecker
transformer: BertModel = self.model.encoder.transformer
for layer in transformer.encoder.layer:
layer: BertLayer = layer
# noinspection PyTypeChecker
gated_attention: DictGatedBertSelfAttention = layer.attention.self
gate = gated_attention.gates[task_name] if task_name else gated_attention.gate
gates.append(gate.get_gates(False).squeeze())
return torch.stack(gates)
def get_sparsity_rate(self, task_name=None):
return torch.mean((self.get_gates(task_name) > 1e-3).to(torch.float))
def save_weights(self, save_dir, filename='model.pt', trainable_only=False, **kwargs):
super().save_weights(save_dir, filename, trainable_only, **kwargs)
def build_history(self, save_dir: str):
return HistoryWithSummary(save_dir)
def evaluate_dataloader(self, data: MultiTaskDataLoader, criterion, metric: MetricDict, logger, ratio_width=None,
input: str = None, history: HistoryWithSummary = None, save_dir=None, **kwargs):
results = super().evaluate_dataloader(data, criterion, metric, logger, ratio_width, input, **kwargs)
writer = history.writer if history else None
if writer:
step = history.num_mini_batches // self.config.gradient_accumulation
for task_name, scores in metric.items():
writer.add_scalar(f'{task_name}/{input}_score', float(scores), global_step=step)
gates = self.get_gates(task_name).cpu().detach().numpy()
im, cb = heatmap(gates, cbar=True, cmap="binary",
row_labels=[f'{x + 1}' for x in range(gates.shape[0])],
col_labels=[f'{x + 1}' for x in range(gates.shape[1])],
show_axis_labels=True
)
im.set_clim(0, 1)
plt.xlabel('heads')
plt.ylabel('layers')
writer.add_figure(f'{input}_gates/{task_name}', plt.gcf(), global_step=step, close=False)
# save gates
folder = f'{save_dir}/gates/{task_name}'
os.makedirs(folder, exist_ok=True)
torch.save(gates, f'{folder}/{step}.pt')
plt.clf()
return results
def build_optimizer(self, trn: MultiTaskDataLoader, epochs, adam_epsilon, weight_decay, warmup_steps, lr,
encoder_lr, self_teaching: SelfTeaching = None, gates_lr=None,
gates_warm_up=None, **kwargs):
encoder_optimizer, encoder_scheduler, _ = super().build_optimizer(trn, epochs, adam_epsilon,
weight_decay, warmup_steps,
lr, encoder_lr,
self_teaching, **kwargs)
if not self.config.gates_lr:
return encoder_optimizer, encoder_optimizer, _
gates_optimizer = torch.optim.Adam(
[v for k, v in self.model.encoder.named_parameters() if k.endswith('.log_a')]
, lr=gates_lr)
decoder_optimizers = dict((k, gates_optimizer) for k in self.tasks)
if not gates_warm_up:
return encoder_optimizer, encoder_scheduler, decoder_optimizers
num_training_steps = len(trn) * epochs // self.config.get('gradient_accumulation', 1)
decoder_schedulers = dict((k,
optimization.get_linear_schedule_with_warmup(
decoder_optimizers[k],
num_training_steps * warmup_steps,
num_training_steps)) for k in self.tasks)
return encoder_optimizer, encoder_scheduler, dict(
(k, (decoder_optimizers[k], decoder_schedulers[k])) for k in self.tasks)
def _collect_encoder_parameters(self):
if self.config.gates_lr:
return [v for k, v in self.model.encoder.named_parameters() if not k.endswith('.log_a')]
else:
return super()._collect_encoder_parameters()
def report_metrics(self, loss, metrics: MetricDict):
reports = super().report_metrics(loss, metrics)
if self.model.training:
return reports + f' gates: {self.get_sparsity_rate():.2%}'
return reports
def _encode(self, batch, task_name, output_dict=None, cls_is_bos=False, sep_is_eos=False):
output_dict = None
transformer: BertModel = self.model.encoder.transformer
for layer in transformer.encoder.layer:
layer: BertLayer = layer
if isinstance(layer.attention, BertAttention):
layer.attention.self.enable_gate(task_name)
return super()._encode(batch, task_name, output_dict, cls_is_bos, sep_is_eos)
|
[
"torch.stack",
"os.makedirs",
"matplotlib.pyplot.clf",
"torch.save",
"transformers.optimization.get_linear_schedule_with_warmup",
"elit.components.mtl.gated.history.HistoryWithSummary",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.gcf",
"matplotlib.pyplot.xlabel",
"elit.components.mtl.gated.gated_self_attn.DictGatedBertSelfAttention"
] |
[((5906, 5924), 'torch.stack', 'torch.stack', (['gates'], {}), '(gates)\n', (5917, 5924), False, 'import torch\n'), ((6280, 6308), 'elit.components.mtl.gated.history.HistoryWithSummary', 'HistoryWithSummary', (['save_dir'], {}), '(save_dir)\n', (6298, 6308), False, 'from elit.components.mtl.gated.history import HistoryWithSummary\n'), ((2618, 2687), 'elit.components.mtl.gated.gated_self_attn.DictGatedBertSelfAttention', 'DictGatedBertSelfAttention', (['transformer.config'], {'task_names': 'self.tasks'}), '(transformer.config, task_names=self.tasks)\n', (2644, 2687), False, 'from elit.components.mtl.gated.gated_self_attn import GatedBertSelfAttention, DictGatedBertSelfAttention\n'), ((7406, 7425), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""heads"""'], {}), "('heads')\n", (7416, 7425), True, 'import matplotlib.pyplot as plt\n'), ((7442, 7462), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""layers"""'], {}), "('layers')\n", (7452, 7462), True, 'import matplotlib.pyplot as plt\n'), ((7671, 7705), 'os.makedirs', 'os.makedirs', (['folder'], {'exist_ok': '(True)'}), '(folder, exist_ok=True)\n', (7682, 7705), False, 'import os\n'), ((7722, 7762), 'torch.save', 'torch.save', (['gates', 'f"""{folder}/{step}.pt"""'], {}), "(gates, f'{folder}/{step}.pt')\n", (7732, 7762), False, 'import torch\n'), ((7779, 7788), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (7786, 7788), True, 'import matplotlib.pyplot as plt\n'), ((7527, 7536), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (7534, 7536), True, 'import matplotlib.pyplot as plt\n'), ((9063, 9190), 'transformers.optimization.get_linear_schedule_with_warmup', 'optimization.get_linear_schedule_with_warmup', (['decoder_optimizers[k]', '(num_training_steps * warmup_steps)', 'num_training_steps'], {}), '(decoder_optimizers[k], \n num_training_steps * warmup_steps, num_training_steps)\n', (9107, 9190), False, 'from transformers import BertLayer, optimization\n'), ((4804, 4829), 'torch.stack', 'torch.stack', (['overlap_loss'], {}), '(overlap_loss)\n', (4815, 4829), False, 'import torch\n')]
|
import pytest
from django.urls import reverse
from pontoon.checks.utils import bulk_run_checks
from pontoon.test.factories import TranslationFactory, ProjectLocaleFactory
@pytest.yield_fixture
def batch_action(client, admin_client):
"""
Shortcut function to make API-call more readable in tests.
"""
def _action(admin=False, **opts):
"""
:param bool admin: when true then uses admin_client to make api calls.
:param opts: options passed to the batch action view.
"""
if admin:
client_ = admin_client
else:
client_ = client
response = client_.post(
reverse('pontoon.batch.edit.translations'),
opts,
HTTP_X_REQUESTED_WITH='XMLHttpRequest',
)
return response
return _action
@pytest.yield_fixture
def translation_dtd_unapproved():
translation = TranslationFactory.create(
string='Test Translation',
active=True,
approved=False,
entity__key='test',
entity__resource__format='dtd',
entity__resource__path='test.dtd',
)
bulk_run_checks([translation])
ProjectLocaleFactory.create(
project=translation.entity.resource.project,
locale=translation.locale,
)
yield translation
@pytest.yield_fixture
def translation_dtd_invalid_unapproved():
# Provide invalid characters in translation to cause checks to fail
translation = TranslationFactory.create(
string='!@#$""\'',
active=True,
approved=False,
entity__key='test',
entity__resource__format='dtd',
entity__resource__path='test.dtd',
)
bulk_run_checks([translation])
ProjectLocaleFactory.create(
project=translation.entity.resource.project,
locale=translation.locale,
)
yield translation
@pytest.yield_fixture
def test_batch_edit_translations_no_user(client):
"""If there are no logged in users, the view redirects to the login page.
"""
response = client.post(reverse('pontoon.batch.edit.translations'))
assert response.status_code == 302
@pytest.mark.django_db
def test_batch_edit_translations_bad_request(batch_action, member, locale_a):
# No `locale` parameter.
response = batch_action(action='reject')
assert response.status_code == 400
assert 'locale' in response.content
# No `action` parameter.
response = batch_action(locale=locale_a.code)
assert response.status_code == 400
assert 'action' in response.content
# Incorrect `action` parameter.
response = batch_action(
action='unknown',
locale=locale_a.code,
)
assert response.status_code == 400
assert 'action' in response.content
# Incorrect `locale` parameter.
response = batch_action(
action='reject',
locale='unknown',
)
assert response.status_code == 404
assert 'action' in response.content
@pytest.mark.django_db
def test_batch_edit_translations_no_permissions(
batch_action, member, locale_a, entity_a, project_locale_a
):
response = batch_action(
action='reject',
locale=locale_a.code,
entities=entity_a.id,
)
assert response.status_code == 403
assert 'Forbidden' in response.content
@pytest.mark.django_db
def test_batch_approve_valid_translations(
batch_action,
member,
translation_dtd_unapproved,
):
"""
Approve translations without errors.
"""
response = batch_action(
admin=True,
action='approve',
locale=translation_dtd_unapproved.locale.code,
entities=translation_dtd_unapproved.entity.pk,
)
assert response.json() == {
'count': 1,
'invalid_translation_count': 0,
}
translation_dtd_unapproved.refresh_from_db()
assert translation_dtd_unapproved.approved
@pytest.mark.django_db
def test_batch_approve_invalid_translations(
batch_action,
member,
translation_dtd_invalid_unapproved,
):
"""
Translations with errors can't be approved.
"""
response = batch_action(
admin=True,
action='approve',
locale=translation_dtd_invalid_unapproved.locale.code,
entities=translation_dtd_invalid_unapproved.entity.pk
)
assert response.json() == {
'count': 0,
'invalid_translation_count': 1,
}
translation_dtd_invalid_unapproved.refresh_from_db()
assert not translation_dtd_invalid_unapproved.approved
@pytest.mark.django_db
def test_batch_find_and_replace_valid_translations(
batch_action,
member,
translation_dtd_unapproved,
):
response = batch_action(
admin=True,
action='replace',
locale=translation_dtd_unapproved.locale.code,
entities=translation_dtd_unapproved.entity.pk,
find='Translation',
replace='Replaced translation',
)
assert response.json() == {
'count': 1,
'invalid_translation_count': 0,
}
translation = translation_dtd_unapproved.entity.translation_set.last()
assert translation.string == 'Test Replaced translation'
assert translation.approved
@pytest.mark.django_db
def test_batch_find_and_replace_invalid_translations(
batch_action,
member,
translation_dtd_unapproved,
):
"""
The `find & replace` action can't produce invalid translations.
"""
response = batch_action(
admin=True,
action='replace',
locale=translation_dtd_unapproved.locale.code,
entities=translation_dtd_unapproved.entity.pk,
find='Translation',
replace='%$#%>',
)
assert response.json() == {
'count': 0,
'invalid_translation_count': 1,
}
translation = translation_dtd_unapproved.entity.translation_set.last()
assert translation.string == 'Test Translation'
assert not translation.approved
|
[
"pontoon.checks.utils.bulk_run_checks",
"pontoon.test.factories.ProjectLocaleFactory.create",
"django.urls.reverse",
"pontoon.test.factories.TranslationFactory.create"
] |
[((905, 1082), 'pontoon.test.factories.TranslationFactory.create', 'TranslationFactory.create', ([], {'string': '"""Test Translation"""', 'active': '(True)', 'approved': '(False)', 'entity__key': '"""test"""', 'entity__resource__format': '"""dtd"""', 'entity__resource__path': '"""test.dtd"""'}), "(string='Test Translation', active=True, approved=\n False, entity__key='test', entity__resource__format='dtd',\n entity__resource__path='test.dtd')\n", (930, 1082), False, 'from pontoon.test.factories import TranslationFactory, ProjectLocaleFactory\n'), ((1133, 1163), 'pontoon.checks.utils.bulk_run_checks', 'bulk_run_checks', (['[translation]'], {}), '([translation])\n', (1148, 1163), False, 'from pontoon.checks.utils import bulk_run_checks\n'), ((1169, 1272), 'pontoon.test.factories.ProjectLocaleFactory.create', 'ProjectLocaleFactory.create', ([], {'project': 'translation.entity.resource.project', 'locale': 'translation.locale'}), '(project=translation.entity.resource.project,\n locale=translation.locale)\n', (1196, 1272), False, 'from pontoon.test.factories import TranslationFactory, ProjectLocaleFactory\n'), ((1471, 1639), 'pontoon.test.factories.TranslationFactory.create', 'TranslationFactory.create', ([], {'string': '"""!@#$""\'"""', 'active': '(True)', 'approved': '(False)', 'entity__key': '"""test"""', 'entity__resource__format': '"""dtd"""', 'entity__resource__path': '"""test.dtd"""'}), '(string=\'!@#$""\\\'\', active=True, approved=False,\n entity__key=\'test\', entity__resource__format=\'dtd\',\n entity__resource__path=\'test.dtd\')\n', (1496, 1639), False, 'from pontoon.test.factories import TranslationFactory, ProjectLocaleFactory\n'), ((1691, 1721), 'pontoon.checks.utils.bulk_run_checks', 'bulk_run_checks', (['[translation]'], {}), '([translation])\n', (1706, 1721), False, 'from pontoon.checks.utils import bulk_run_checks\n'), ((1727, 1830), 'pontoon.test.factories.ProjectLocaleFactory.create', 'ProjectLocaleFactory.create', ([], {'project': 'translation.entity.resource.project', 'locale': 'translation.locale'}), '(project=translation.entity.resource.project,\n locale=translation.locale)\n', (1754, 1830), False, 'from pontoon.test.factories import TranslationFactory, ProjectLocaleFactory\n'), ((2060, 2102), 'django.urls.reverse', 'reverse', (['"""pontoon.batch.edit.translations"""'], {}), "('pontoon.batch.edit.translations')\n", (2067, 2102), False, 'from django.urls import reverse\n'), ((661, 703), 'django.urls.reverse', 'reverse', (['"""pontoon.batch.edit.translations"""'], {}), "('pontoon.batch.edit.translations')\n", (668, 703), False, 'from django.urls import reverse\n')]
|
# -*- coding: utf-8 -*-
import angus
conn = angus.connect()
service = conn.services.get_service('text_to_speech', version=1)
job = service.process({'text': "Hi guys, how are you today?", 'lang' : "en-US"})
### The output wav file is available as compressed (zlib), base64 string.
sound = job.result["sound"]
|
[
"angus.connect"
] |
[((45, 60), 'angus.connect', 'angus.connect', ([], {}), '()\n', (58, 60), False, 'import angus\n')]
|
from __future__ import absolute_import, division, print_function, unicode_literals
version = '0.5.7'
from os import path
cwd = path.abspath(path.dirname(__file__))
_instance = None
def instance(new=None):
global _instance
if new:
_instance = new
return _instance
from . import yaml
from .config import Config
from .document import Document
from .resource import Resource
from .render import Render, jinja
from .processor import Processor
from .processor import before, after, document, resource, markup, render
from .build import Build
from . import plugins
|
[
"os.path.dirname"
] |
[((142, 164), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (154, 164), False, 'from os import path\n')]
|
import socket
s = socket.socket()
host = socket.gethostname()
port = 12345
s.connect((host,port))
print(s.recv(1024))
s.close()
|
[
"socket.gethostname",
"socket.socket"
] |
[((18, 33), 'socket.socket', 'socket.socket', ([], {}), '()\n', (31, 33), False, 'import socket\n'), ((42, 62), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (60, 62), False, 'import socket\n')]
|
import re
# unicodedata2 is the unicodedata backport to Python 2
try:
import unicodedata2 as unicodedata
except ImportError:
import unicodedata
import urlparse
def clean_url(url):
"""Takes a user-supplied url and cleans bits out
This removes:
1. nixes any non http/https/chrome/about urls
2. port numbers
3. query string variables
4. hashes
"""
if not url:
return url
# Don't mess with about: urls.
if url.startswith('about:'):
return url
parsed = urlparse.urlparse(url)
if parsed.scheme not in ('http', 'https', 'chrome'):
return u''
# Rebuild url to drop querystrings, hashes, etc
new_url = (parsed.scheme, parsed.hostname, parsed.path, None, None, None)
return urlparse.urlunparse(new_url)
POSSIBLE_EMOJIS_RE = re.compile(u'[\U00010000-\U0010ffff]')
def convert_emoji(text):
"""Takes unicode text and converts emoji characters
Emoji break MySQL, so we convert them into ascii.
"""
# convert to unicodedata.name()
def _convert(match_char):
c = match_char.group(0)
try:
return unicodedata.name(c)
except ValueError:
# Throws a ValueError if the name doesn't exist.
return ''
return POSSIBLE_EMOJIS_RE.sub(_convert, text)
|
[
"unicodedata.name",
"urlparse.urlparse",
"urlparse.urlunparse",
"re.compile"
] |
[((819, 848), 're.compile', 're.compile', (['u"""[𐀀-\U0010ffff]"""'], {}), "(u'[𐀀-\\U0010ffff]')\n", (829, 848), False, 'import re\n'), ((524, 546), 'urlparse.urlparse', 'urlparse.urlparse', (['url'], {}), '(url)\n', (541, 546), False, 'import urlparse\n'), ((767, 795), 'urlparse.urlunparse', 'urlparse.urlunparse', (['new_url'], {}), '(new_url)\n', (786, 795), False, 'import urlparse\n'), ((1135, 1154), 'unicodedata.name', 'unicodedata.name', (['c'], {}), '(c)\n', (1151, 1154), False, 'import unicodedata\n')]
|
"""
Leetcode 1301
"""
from collections import defaultdict
import bisect
class Solution:
def isSubsequence(self, s: str, t: str) -> bool:
if len(t) == 0:
return False
if len(s) == 0:
return True
i = 0
j = 0
while i < len(s) and j < len(t):
if s[i] == t[j]:
i += 1
j += 1
return True if i == len(s) else False
def isSubsequence_OPTI(self, s: str, t: str) -> bool:
store = defaultdict(list)
for i in range(len(t)):
store[t[i]].append(i)
start = 0
for c in s:
# if char c is not present in T
if len(store[c]) == 0:
return False
# find index of start in store[c]
# intially it'll 0 to get leftmost valye of c in store[c]
# for next iteration it'll be at least +1 index than prev index of same character
idx = bisect.bisect_left(store[c], start)
# if no index is found for current of start in store[c]
if idx == len(store[c]):
return False
# update start for next iteration
start = store[c][idx] + 1
return True
if __name__ == "__main__":
solution = Solution()
assert solution.isSubsequence("abc", "ahbgdc") == True
assert solution.isSubsequence("axc", "ahbgdc") == False
assert solution.isSubsequence_OPTI("abc", "ahbgdc") == True
assert solution.isSubsequence_OPTI("axc", "ahbgdc") == False
|
[
"collections.defaultdict",
"bisect.bisect_left"
] |
[((509, 526), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (520, 526), False, 'from collections import defaultdict\n'), ((970, 1005), 'bisect.bisect_left', 'bisect.bisect_left', (['store[c]', 'start'], {}), '(store[c], start)\n', (988, 1005), False, 'import bisect\n')]
|
from collections import defaultdict
from tqdm import tqdm
import numpy as np
import gym
import wandb
import trueskill
import torch
from torch import nn
from torch.nn.utils import rnn
from ray import rllib
from pdb import set_trace as TT
import ray.rllib.agents.ppo.ppo as ppo
import ray.rllib.agents.ppo.appo as appo
import ray.rllib.agents.impala.impala as impala
from ray.rllib.agents.callbacks import DefaultCallbacks
from ray.tune.integration.wandb import WandbLoggerCallback
from ray.rllib.utils.spaces.flexdict import FlexDict
from ray.rllib.models.torch.recurrent_net import RecurrentNetwork
from neural_mmo.forge.blade.io.action.static import Action, Fixed
from neural_mmo.forge.blade.io.stimulus.static import Stimulus
from neural_mmo.forge.blade.lib import overlay
from neural_mmo.forge.blade.systems import ai
from neural_mmo.forge.ethyr.torch import policy
from neural_mmo.forge.ethyr.torch.policy import attention
from neural_mmo.forge.trinity import Env
from neural_mmo.forge.trinity.dataframe import DataType
from neural_mmo.forge.trinity.overlay import Overlay, OverlayRegistry
###############################################################################
### Pytorch model + IO. The pip package contains some submodules
class Input(nn.Module):
def __init__(self, config, embeddings, attributes):
'''Network responsible for processing observations
Args:
config : A configuration object
embeddings : An attribute embedding module
attributes : An attribute attention module
'''
super().__init__()
self.embeddings = nn.ModuleDict()
self.attributes = nn.ModuleDict()
for _, entity in Stimulus:
continuous = len([e for e in entity if e[1].CONTINUOUS])
discrete = len([e for e in entity if e[1].DISCRETE])
self.attributes[entity.__name__] = nn.Linear(
(continuous+discrete)*config.HIDDEN, config.HIDDEN)
self.embeddings[entity.__name__] = embeddings(
continuous=continuous, discrete=4096, config=config)
#Hackey obs scaling
self.tileWeight = torch.Tensor([1.0, 0.0, 0.02, 0.02, 0.0, 0.0, 0.0, 0.0])
self.entWeight = torch.Tensor([1.0, 0.0, 0.0, 0.05, 0.00, 0.02, 0.02, 0.1, 0.01, 0.1, 0.1, 0.1, 0.3, 0.0, 0,0,0,0,0,0,0,0]) # I added this to make lights observable hopefully
def forward(self, inp):
'''Produces tensor representations from an IO object
Args:
inp: An IO object specifying observations
Returns:
entityLookup: A fixed size representation of each entity
'''
#Pack entities of each attribute set
entityLookup = {}
device = inp['Tile']['Continuous'].device
inp['Tile']['Continuous'] *= self.tileWeight.to(device)
inp['Entity']['Continuous'] *= self.entWeight.to(device)
entityLookup['N'] = inp['Entity'].pop('N')
for name, entities in inp.items():
#Construct: Batch, ents, nattrs, hidden
embeddings = self.embeddings[name](entities)
B, N, _, _ = embeddings.shape
embeddings = embeddings.view(B, N, -1)
#Construct: Batch, ents, hidden
entityLookup[name] = self.attributes[name](embeddings)
return entityLookup
class Output(nn.Module):
def __init__(self, config):
'''Network responsible for selecting actions
Args:
config: A Config object
'''
super().__init__()
self.config = config
self.h = config.HIDDEN
self.net = DiscreteAction(self.config, self.h, self.h)
self.arg = nn.Embedding(Action.n, self.h)
def names(self, nameMap, args):
'''Lookup argument indices from name mapping'''
return np.array([nameMap.get(e) for e in args])
def forward(self, obs, lookup):
'''Populates an IO object with actions in-place
Args:
obs : An IO object specifying observations
lookup : A fixed size representation of each entity
'''
rets = defaultdict(dict)
for atn in Action.edges:
for arg in atn.edges:
lens = None
if arg.argType == Fixed:
batch = obs.shape[0]
idxs = [e.idx for e in arg.edges]
cands = self.arg.weight[idxs]
cands = cands.repeat(batch, 1, 1)
else:
cands = lookup['Entity']
lens = lookup['N']
logits = self.net(obs, cands, lens)
rets[atn][arg] = logits
return rets
class DiscreteAction(nn.Module):
'''Head for making a discrete selection from
a variable number of candidate actions'''
def __init__(self, config, xdim, h):
super().__init__()
self.net = attention.DotReluBlock(h)
def forward(self, stim, args, lens):
x = self.net(stim, args)
if lens is not None:
mask = torch.arange(x.shape[-1]).to(x.device).expand_as(x)
x[mask >= lens] = 0
return x
class Base(nn.Module):
def __init__(self, config):
'''Base class for baseline policies
Args:
config: A Configuration object
'''
super().__init__()
self.embed = config.EMBED
self.config = config
self.output = Output(config)
self.input = Input(config,
embeddings=policy.MixedDTypeInput,
attributes=policy.SelfAttention)
self.valueF = nn.Linear(config.HIDDEN, 1)
def hidden(self, obs, state=None, lens=None):
'''Abstract method for hidden state processing, recurrent or otherwise,
applied between the input and output modules
Args:
obs: An observation dictionary, provided by forward()
state: The previous hidden state, only provided for recurrent nets
lens: Trajectory segment lengths used to unflatten batched obs
'''
raise NotImplementedError('Implement this method in a subclass')
def forward(self, obs, state=None, lens=None):
'''Applies builtin IO and value function with user-defined hidden
state subnetwork processing. Arguments are supplied by RLlib
'''
entityLookup = self.input(obs)
hidden, state = self.hidden(entityLookup, state, lens)
self.value = self.valueF(hidden).squeeze(1)
actions = self.output(hidden, entityLookup)
return actions, state
class Encoder(Base):
def __init__(self, config):
'''Simple baseline model with flat subnetworks'''
super().__init__(config)
h = config.HIDDEN
self.ent = nn.Linear(2*h, h)
self.conv = nn.Conv2d(h, h, 3)
self.pool = nn.MaxPool2d(2)
self.fc = nn.Linear(h*6*6, h)
self.proj = nn.Linear(2*h, h)
self.attend = policy.SelfAttention(self.embed, h)
def hidden(self, obs, state=None, lens=None):
#Attentional agent embedding
agentEmb = obs['Entity']
selfEmb = agentEmb[:, 0:1].expand_as(agentEmb)
agents = torch.cat((selfEmb, agentEmb), dim=-1)
agents = self.ent(agents)
agents, _ = self.attend(agents)
#agents = self.ent(selfEmb)
#Convolutional tile embedding
tiles = obs['Tile']
self.attn = torch.norm(tiles, p=2, dim=-1)
w = self.config.WINDOW
batch = tiles.size(0)
hidden = tiles.size(2)
#Dims correct?
tiles = tiles.reshape(batch, w, w, hidden).permute(0, 3, 1, 2)
tiles = self.conv(tiles)
tiles = self.pool(tiles)
tiles = tiles.reshape(batch, -1)
tiles = self.fc(tiles)
hidden = torch.cat((agents, tiles), dim=-1)
hidden = self.proj(hidden)
return hidden, state
class Recurrent(Encoder):
def __init__(self, config):
'''Recurrent baseline model'''
super().__init__(config)
self.lstm = policy.BatchFirstLSTM(
input_size=config.HIDDEN,
hidden_size=config.HIDDEN)
#Note: seemingly redundant transposes are required to convert between
#Pytorch (seq_len, batch, hidden) <-> RLlib (batch, seq_len, hidden)
def hidden(self, obs, state, lens):
#Attentional input preprocessor and batching
lens = lens.cpu() if type(lens) == torch.Tensor else lens
hidden, _ = super().hidden(obs)
config = self.config
h, c = state
TB = hidden.size(0) #Padded batch of size (seq x batch)
B = len(lens) #Sequence fragment time length
T = TB // B #Trajectory batch size
H = config.HIDDEN #Hidden state size
#Pack (batch x seq, hidden) -> (batch, seq, hidden)
hidden = rnn.pack_padded_sequence(
input=hidden.view(B, T, H),
lengths=lens,
enforce_sorted=False,
batch_first=True)
#Main recurrent network
oldHidden = hidden
hidden, state = self.lstm(hidden, state)
newHidden = hidden
#Unpack (batch, seq, hidden) -> (batch x seq, hidden)
hidden, _ = rnn.pad_packed_sequence(
sequence=hidden,
batch_first=True,
total_length=T)
return hidden.reshape(TB, H), state
###############################################################################
### RLlib Policy, Evaluator, Trainer
class RLlibPolicy(RecurrentNetwork, nn.Module):
'''Wrapper class for using our baseline models with RLlib'''
def __init__(self, *args, **kwargs):
self.config = kwargs.pop('config')
super().__init__(*args, **kwargs)
nn.Module.__init__(self)
self.space = actionSpace(self.config).spaces
self.model = Recurrent(self.config)
#Initial hidden state for RLlib Trainer
def get_initial_state(self):
return [self.model.valueF.weight.new(1, self.config.HIDDEN).zero_(),
self.model.valueF.weight.new(1, self.config.HIDDEN).zero_()]
def forward(self, input_dict, state, seq_lens):
logitDict, state = self.model(input_dict['obs'], state, seq_lens)
logits = []
#Flatten structured logits for RLlib
for atnKey, atn in sorted(self.space.items()):
for argKey, arg in sorted(atn.spaces.items()):
logits.append(logitDict[atnKey][argKey])
return torch.cat(logits, dim=1), state
def value_function(self):
return self.model.value
def attention(self):
return self.model.attn
###############################################################################
### RLlib Wrappers: Env, Overlays
class RLlibEnv(Env, rllib.MultiAgentEnv):
def __init__(self, config):
self.config = config['config']
super().__init__(self.config)
def reward(self, ent):
config = self.config
ACHIEVEMENT = config.REWARD_ACHIEVEMENT
SCALE = config.ACHIEVEMENT_SCALE
COOPERATIVE = config.COOPERATIVE
individual = 0 if ent.entID in self.realm.players else -1
team = 0
if ACHIEVEMENT:
individual += SCALE*ent.achievements.update(self.realm, ent, dry=True)
if COOPERATIVE:
nDead = len([p for p in self.dead.values() if p.population == ent.pop])
team = -nDead / config.TEAM_SIZE
if COOPERATIVE and ACHIEVEMENT:
pre, post = [], []
for p in self.realm.players.corporeal.values():
if p.population == ent.pop:
pre.append(p.achievements.score(aggregate=False))
post.append(p.achievements.update(
self.realm, ent, aggregate=False, dry=True))
pre = np.array(pre).max(0)
post = np.array(post).max(0)
team += SCALE*(post - pre).sum()
ent.achievements.update(self.realm, ent)
alpha = config.TEAM_SPIRIT
return alpha*team + (1.0-alpha)*individual
def step(self, decisions, preprocess=None, omitDead=False):
preprocess = {entID for entID in decisions}
obs, rewards, dones, infos = super().step(decisions, preprocess, omitDead)
config = self.config
dones['__all__'] = False
test = config.EVALUATE or config.RENDER
if config.EVALUATE:
horizon = config.EVALUATION_HORIZON
else:
horizon = config.TRAIN_HORIZON
population = len(self.realm.players) == 0
hit_horizon = self.realm.tick >= horizon
if not config.RENDER and (hit_horizon or population):
dones['__all__'] = True
return obs, rewards, dones, infos
def observationSpace(config):
obs = FlexDict(defaultdict(FlexDict))
for entity in sorted(Stimulus.values()):
nRows = entity.N(config)
nContinuous = 0
nDiscrete = 0
for _, attr in entity:
if attr.DISCRETE:
nDiscrete += 1
if attr.CONTINUOUS:
nContinuous += 1
obs[entity.__name__]['Continuous'] = gym.spaces.Box(
low=-2**20, high=2**20, shape=(nRows, nContinuous),
dtype=DataType.CONTINUOUS)
obs[entity.__name__]['Discrete'] = gym.spaces.Box(
low=0, high=4096, shape=(nRows, nDiscrete),
dtype=DataType.DISCRETE)
obs['Entity']['N'] = gym.spaces.Box(
low=0, high=config.N_AGENT_OBS, shape=(1,),
dtype=DataType.DISCRETE)
return obs
def actionSpace(config):
atns = FlexDict(defaultdict(FlexDict))
for atn in sorted(Action.edges):
for arg in sorted(atn.edges):
n = arg.N(config)
atns[atn][arg] = gym.spaces.Discrete(n)
return atns
class RLlibOverlayRegistry(OverlayRegistry):
'''Host class for RLlib Map overlays'''
def __init__(self, realm):
super().__init__(realm.config, realm)
self.overlays['values'] = Values
self.overlays['attention'] = Attention
self.overlays['tileValues'] = TileValues
self.overlays['entityValues'] = EntityValues
class RLlibOverlay(Overlay):
'''RLlib Map overlay wrapper'''
def __init__(self, config, realm, trainer, model):
super().__init__(config, realm)
self.trainer = trainer
self.model = model
class Attention(RLlibOverlay):
def register(self, obs):
'''Computes local attentional maps with respect to each agent'''
tiles = self.realm.realm.map.tiles
players = self.realm.realm.players
attentions = defaultdict(list)
for idx, playerID in enumerate(obs):
if playerID not in players:
continue
player = players[playerID]
r, c = player.pos
rad = self.config.NSTIM
obTiles = self.realm.realm.map.tiles[r-rad:r+rad+1, c-rad:c+rad+1].ravel()
for tile, a in zip(obTiles, self.model.attention()[idx]):
attentions[tile].append(float(a))
sz = self.config.TERRAIN_SIZE
data = np.zeros((sz, sz))
for r, tList in enumerate(tiles):
for c, tile in enumerate(tList):
if tile not in attentions:
continue
data[r, c] = np.mean(attentions[tile])
colorized = overlay.twoTone(data)
self.realm.register(colorized)
class Values(RLlibOverlay):
def update(self, obs):
'''Computes a local value function by painting tiles as agents
walk over them. This is fast and does not require additional
network forward passes'''
players = self.realm.realm.players
for idx, playerID in enumerate(obs):
if playerID not in players:
continue
r, c = players[playerID].base.pos
self.values[r, c] = float(self.model.value_function()[idx])
def register(self, obs):
colorized = overlay.twoTone(self.values[:, :])
self.realm.register(colorized)
def zeroOb(ob, key):
for k in ob[key]:
ob[key][k] *= 0
class GlobalValues(RLlibOverlay):
'''Abstract base for global value functions'''
def init(self, zeroKey):
if self.trainer is None:
return
print('Computing value map...')
model = self.trainer.get_policy('policy_0').model
obs, ents = self.realm.dense()
values = 0 * self.values
#Compute actions to populate model value function
BATCH_SIZE = 128
batch = {}
final = list(obs.keys())[-1]
for agentID in tqdm(obs):
ob = obs[agentID]
batch[agentID] = ob
zeroOb(ob, zeroKey)
if len(batch) == BATCH_SIZE or agentID == final:
self.trainer.compute_actions(batch, state={}, policy_id='policy_0')
for idx, agentID in enumerate(batch):
r, c = ents[agentID].base.pos
values[r, c] = float(self.model.value_function()[idx])
batch = {}
print('Value map computed')
self.colorized = overlay.twoTone(values)
def register(self, obs):
print('Computing Global Values. This requires one NN pass per tile')
self.init()
self.realm.register(self.colorized)
class TileValues(GlobalValues):
def init(self, zeroKey='Entity'):
'''Compute a global value function map excluding other agents. This
requires a forward pass for every tile and will be slow on large maps'''
super().init(zeroKey)
class EntityValues(GlobalValues):
def init(self, zeroKey='Tile'):
'''Compute a global value function map excluding tiles. This
requires a forward pass for every tile and will be slow on large maps'''
super().init(zeroKey)
class RLlibTrainer(ppo.PPOTrainer):
def __init__(self, config, env=None, logger_creator=None):
super().__init__(config, env, logger_creator)
self.env_config = config['env_config']['config']
#1/sqrt(2)=76% win chance within beta, 95% win chance vs 3*beta=100 SR
trueskill.setup(mu=1000, sigma=2*100/3, beta=100/3, tau=2/3, draw_probability=0)
self.ratings = [{agent.__name__: trueskill.Rating(mu=1000, sigma=2*100/3)}
for agent in self.env_config.EVAL_AGENTS]
self.reset_scripted()
def reset_scripted(self):
for rating_dict in self.ratings:
for agent, rating in rating_dict.items():
if agent == 'Combat':
rating_dict[agent] = trueskill.Rating(mu=1500, sigma=1)
def post_mean(self, stats):
for key, vals in stats.items():
if type(vals) == list:
stats[key] = np.mean(vals)
def train(self):
stats = super().train()
self.post_mean(stats['custom_metrics'])
return stats
def evaluate(self):
stat_dict = super().evaluate()
stats = stat_dict['evaluation']['custom_metrics']
ranks = {agent.__name__: -1 for agent in self.env_config.EVAL_AGENTS}
for key in list(stats.keys()):
if key.startswith('Rank_'):
stat = stats[key]
del stats[key]
agent = key[5:]
ranks[agent] = stat
ranks = list(ranks.values())
nEnvs = len(ranks[0])
#Once RLlib adds better custom metric support,
#there should be a cleaner way to divide episodes into blocks
for i in range(nEnvs):
env_ranks = [e[i] for e in ranks]
self.ratings = trueskill.rate(self.ratings, env_ranks)
self.reset_scripted()
for rating in self.ratings:
key = 'SR_{}'.format(list(rating.keys())[0])
val = list(rating.values())[0]
stats[key] = val.mu
return stat_dict
###############################################################################
### Logging
class RLlibLogCallbacks(DefaultCallbacks):
def on_episode_end(self, *, worker, base_env, policies, episode, **kwargs):
assert len(base_env.envs) == 1, 'One env per worker'
env = base_env.envs[0]
logs = env.terminal()
for key, vals in logs['Stats'].items():
episode.custom_metrics[key] = np.mean(vals)
if not env.config.EVALUATE:
return
agents = defaultdict(list)
stats = logs['Stats']
policy_ids = stats['PolicyID']
scores = stats['Achievement']
invMap = {agent.policyID: agent for agent in env.config.AGENTS}
for policyID, score in zip(policy_ids, scores):
policy = invMap[policyID]
agents[policy].append(score)
for agent in agents:
agents[agent] = np.mean(agents[agent])
policies = list(agents.keys())
scores = list(agents.values())
idxs = np.argsort(-np.array(scores))
for rank, idx in enumerate(idxs):
key = 'Rank_{}'.format(policies[idx].__name__)
episode.custom_metrics[key] = rank
|
[
"trueskill.setup",
"torch.nn.Embedding",
"gym.spaces.Discrete",
"torch.cat",
"collections.defaultdict",
"neural_mmo.forge.ethyr.torch.policy.attention.DotReluBlock",
"torch.nn.ModuleDict",
"numpy.mean",
"torch.nn.utils.rnn.pad_packed_sequence",
"torch.arange",
"neural_mmo.forge.blade.io.stimulus.static.Stimulus.values",
"neural_mmo.forge.ethyr.torch.policy.SelfAttention",
"torch.Tensor",
"torch.nn.Linear",
"trueskill.Rating",
"trueskill.rate",
"tqdm.tqdm",
"torch.norm",
"torch.nn.Conv2d",
"torch.nn.Module.__init__",
"torch.nn.MaxPool2d",
"numpy.zeros",
"numpy.array",
"gym.spaces.Box",
"neural_mmo.forge.ethyr.torch.policy.BatchFirstLSTM",
"neural_mmo.forge.blade.lib.overlay.twoTone"
] |
[((13444, 13532), 'gym.spaces.Box', 'gym.spaces.Box', ([], {'low': '(0)', 'high': 'config.N_AGENT_OBS', 'shape': '(1,)', 'dtype': 'DataType.DISCRETE'}), '(low=0, high=config.N_AGENT_OBS, shape=(1,), dtype=DataType.\n DISCRETE)\n', (13458, 13532), False, 'import gym\n'), ((1606, 1621), 'torch.nn.ModuleDict', 'nn.ModuleDict', ([], {}), '()\n', (1619, 1621), False, 'from torch import nn\n'), ((1646, 1661), 'torch.nn.ModuleDict', 'nn.ModuleDict', ([], {}), '()\n', (1659, 1661), False, 'from torch import nn\n'), ((2123, 2179), 'torch.Tensor', 'torch.Tensor', (['[1.0, 0.0, 0.02, 0.02, 0.0, 0.0, 0.0, 0.0]'], {}), '([1.0, 0.0, 0.02, 0.02, 0.0, 0.0, 0.0, 0.0])\n', (2135, 2179), False, 'import torch\n'), ((2204, 2321), 'torch.Tensor', 'torch.Tensor', (['[1.0, 0.0, 0.0, 0.05, 0.0, 0.02, 0.02, 0.1, 0.01, 0.1, 0.1, 0.1, 0.3, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0]'], {}), '([1.0, 0.0, 0.0, 0.05, 0.0, 0.02, 0.02, 0.1, 0.01, 0.1, 0.1, \n 0.1, 0.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0])\n', (2216, 2321), False, 'import torch\n'), ((3709, 3739), 'torch.nn.Embedding', 'nn.Embedding', (['Action.n', 'self.h'], {}), '(Action.n, self.h)\n', (3721, 3739), False, 'from torch import nn\n'), ((4299, 4316), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (4310, 4316), False, 'from collections import defaultdict\n'), ((5033, 5058), 'neural_mmo.forge.ethyr.torch.policy.attention.DotReluBlock', 'attention.DotReluBlock', (['h'], {}), '(h)\n', (5055, 5058), False, 'from neural_mmo.forge.ethyr.torch.policy import attention\n'), ((5700, 5727), 'torch.nn.Linear', 'nn.Linear', (['config.HIDDEN', '(1)'], {}), '(config.HIDDEN, 1)\n', (5709, 5727), False, 'from torch import nn\n'), ((6833, 6852), 'torch.nn.Linear', 'nn.Linear', (['(2 * h)', 'h'], {}), '(2 * h, h)\n', (6842, 6852), False, 'from torch import nn\n'), ((6871, 6889), 'torch.nn.Conv2d', 'nn.Conv2d', (['h', 'h', '(3)'], {}), '(h, h, 3)\n', (6880, 6889), False, 'from torch import nn\n'), ((6910, 6925), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {}), '(2)\n', (6922, 6925), False, 'from torch import nn\n'), ((6946, 6969), 'torch.nn.Linear', 'nn.Linear', (['(h * 6 * 6)', 'h'], {}), '(h * 6 * 6, h)\n', (6955, 6969), False, 'from torch import nn\n'), ((6987, 7006), 'torch.nn.Linear', 'nn.Linear', (['(2 * h)', 'h'], {}), '(2 * h, h)\n', (6996, 7006), False, 'from torch import nn\n'), ((7025, 7060), 'neural_mmo.forge.ethyr.torch.policy.SelfAttention', 'policy.SelfAttention', (['self.embed', 'h'], {}), '(self.embed, h)\n', (7045, 7060), False, 'from neural_mmo.forge.ethyr.torch import policy\n'), ((7251, 7289), 'torch.cat', 'torch.cat', (['(selfEmb, agentEmb)'], {'dim': '(-1)'}), '((selfEmb, agentEmb), dim=-1)\n', (7260, 7289), False, 'import torch\n'), ((7482, 7512), 'torch.norm', 'torch.norm', (['tiles'], {'p': '(2)', 'dim': '(-1)'}), '(tiles, p=2, dim=-1)\n', (7492, 7512), False, 'import torch\n'), ((7847, 7881), 'torch.cat', 'torch.cat', (['(agents, tiles)'], {'dim': '(-1)'}), '((agents, tiles), dim=-1)\n', (7856, 7881), False, 'import torch\n'), ((8088, 8162), 'neural_mmo.forge.ethyr.torch.policy.BatchFirstLSTM', 'policy.BatchFirstLSTM', ([], {'input_size': 'config.HIDDEN', 'hidden_size': 'config.HIDDEN'}), '(input_size=config.HIDDEN, hidden_size=config.HIDDEN)\n', (8109, 8162), False, 'from neural_mmo.forge.ethyr.torch import policy\n'), ((9295, 9369), 'torch.nn.utils.rnn.pad_packed_sequence', 'rnn.pad_packed_sequence', ([], {'sequence': 'hidden', 'batch_first': '(True)', 'total_length': 'T'}), '(sequence=hidden, batch_first=True, total_length=T)\n', (9318, 9369), False, 'from torch.nn.utils import rnn\n'), ((9846, 9870), 'torch.nn.Module.__init__', 'nn.Module.__init__', (['self'], {}), '(self)\n', (9864, 9870), False, 'from torch import nn\n'), ((12811, 12832), 'collections.defaultdict', 'defaultdict', (['FlexDict'], {}), '(FlexDict)\n', (12822, 12832), False, 'from collections import defaultdict\n'), ((12858, 12875), 'neural_mmo.forge.blade.io.stimulus.static.Stimulus.values', 'Stimulus.values', ([], {}), '()\n', (12873, 12875), False, 'from neural_mmo.forge.blade.io.stimulus.static import Stimulus\n'), ((13145, 13246), 'gym.spaces.Box', 'gym.spaces.Box', ([], {'low': '(-2 ** 20)', 'high': '(2 ** 20)', 'shape': '(nRows, nContinuous)', 'dtype': 'DataType.CONTINUOUS'}), '(low=-2 ** 20, high=2 ** 20, shape=(nRows, nContinuous),\n dtype=DataType.CONTINUOUS)\n', (13159, 13246), False, 'import gym\n'), ((13308, 13396), 'gym.spaces.Box', 'gym.spaces.Box', ([], {'low': '(0)', 'high': '(4096)', 'shape': '(nRows, nDiscrete)', 'dtype': 'DataType.DISCRETE'}), '(low=0, high=4096, shape=(nRows, nDiscrete), dtype=DataType.\n DISCRETE)\n', (13322, 13396), False, 'import gym\n'), ((13606, 13627), 'collections.defaultdict', 'defaultdict', (['FlexDict'], {}), '(FlexDict)\n', (13617, 13627), False, 'from collections import defaultdict\n'), ((14616, 14633), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (14627, 14633), False, 'from collections import defaultdict\n'), ((15090, 15108), 'numpy.zeros', 'np.zeros', (['(sz, sz)'], {}), '((sz, sz))\n', (15098, 15108), True, 'import numpy as np\n'), ((15324, 15345), 'neural_mmo.forge.blade.lib.overlay.twoTone', 'overlay.twoTone', (['data'], {}), '(data)\n', (15339, 15345), False, 'from neural_mmo.forge.blade.lib import overlay\n'), ((15907, 15941), 'neural_mmo.forge.blade.lib.overlay.twoTone', 'overlay.twoTone', (['self.values[:, :]'], {}), '(self.values[:, :])\n', (15922, 15941), False, 'from neural_mmo.forge.blade.lib import overlay\n'), ((16527, 16536), 'tqdm.tqdm', 'tqdm', (['obs'], {}), '(obs)\n', (16531, 16536), False, 'from tqdm import tqdm\n'), ((17027, 17050), 'neural_mmo.forge.blade.lib.overlay.twoTone', 'overlay.twoTone', (['values'], {}), '(values)\n', (17042, 17050), False, 'from neural_mmo.forge.blade.lib import overlay\n'), ((18002, 18094), 'trueskill.setup', 'trueskill.setup', ([], {'mu': '(1000)', 'sigma': '(2 * 100 / 3)', 'beta': '(100 / 3)', 'tau': '(2 / 3)', 'draw_probability': '(0)'}), '(mu=1000, sigma=2 * 100 / 3, beta=100 / 3, tau=2 / 3,\n draw_probability=0)\n', (18017, 18094), False, 'import trueskill\n'), ((20175, 20192), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (20186, 20192), False, 'from collections import defaultdict\n'), ((1870, 1935), 'torch.nn.Linear', 'nn.Linear', (['((continuous + discrete) * config.HIDDEN)', 'config.HIDDEN'], {}), '((continuous + discrete) * config.HIDDEN, config.HIDDEN)\n', (1879, 1935), False, 'from torch import nn\n'), ((10555, 10579), 'torch.cat', 'torch.cat', (['logits'], {'dim': '(1)'}), '(logits, dim=1)\n', (10564, 10579), False, 'import torch\n'), ((13767, 13789), 'gym.spaces.Discrete', 'gym.spaces.Discrete', (['n'], {}), '(n)\n', (13786, 13789), False, 'import gym\n'), ((19412, 19451), 'trueskill.rate', 'trueskill.rate', (['self.ratings', 'env_ranks'], {}), '(self.ratings, env_ranks)\n', (19426, 19451), False, 'import trueskill\n'), ((20093, 20106), 'numpy.mean', 'np.mean', (['vals'], {}), '(vals)\n', (20100, 20106), True, 'import numpy as np\n'), ((20562, 20584), 'numpy.mean', 'np.mean', (['agents[agent]'], {}), '(agents[agent])\n', (20569, 20584), True, 'import numpy as np\n'), ((15279, 15304), 'numpy.mean', 'np.mean', (['attentions[tile]'], {}), '(attentions[tile])\n', (15286, 15304), True, 'import numpy as np\n'), ((18123, 18167), 'trueskill.Rating', 'trueskill.Rating', ([], {'mu': '(1000)', 'sigma': '(2 * 100 / 3)'}), '(mu=1000, sigma=2 * 100 / 3)\n', (18139, 18167), False, 'import trueskill\n'), ((18603, 18616), 'numpy.mean', 'np.mean', (['vals'], {}), '(vals)\n', (18610, 18616), True, 'import numpy as np\n'), ((20692, 20708), 'numpy.array', 'np.array', (['scores'], {}), '(scores)\n', (20700, 20708), True, 'import numpy as np\n'), ((11860, 11873), 'numpy.array', 'np.array', (['pre'], {}), '(pre)\n', (11868, 11873), True, 'import numpy as np\n'), ((11898, 11912), 'numpy.array', 'np.array', (['post'], {}), '(post)\n', (11906, 11912), True, 'import numpy as np\n'), ((18438, 18472), 'trueskill.Rating', 'trueskill.Rating', ([], {'mu': '(1500)', 'sigma': '(1)'}), '(mu=1500, sigma=1)\n', (18454, 18472), False, 'import trueskill\n'), ((5175, 5200), 'torch.arange', 'torch.arange', (['x.shape[-1]'], {}), '(x.shape[-1])\n', (5187, 5200), False, 'import torch\n')]
|
"""
"""
import functools
import xarray as xr
from .utils import (
unzip,
progbar,
Timer,
benchmark,
Benchmarker,
RunningStatistics,
estimate_from_repeats,
)
from .gen.combo_runner import (
combo_runner,
combo_runner_to_ds,
)
from .gen.case_runner import (
case_runner,
find_union_coords,
all_missing_ds,
case_runner_to_ds,
case_runner_to_df,
find_missing_cases,
fill_missing_cases
)
from .gen.batch import (
Crop,
grow,
)
from .gen.farming import (
Runner,
Harvester,
label,
Sampler,
)
from .manage import (
cache_to_disk,
save_ds,
load_ds,
save_df,
load_df,
trimna,
sort_dims,
check_runs,
auto_xyz_ds,
merge_sync_conflict_datasets,
post_fix,
)
from .signal import (
xr_diff_fornberg,
xr_diff_u,
xr_diff_u_err,
xr_interp,
xr_interp_pchip,
xr_filter_wiener,
xr_filtfilt_butter,
xr_filtfilt_bessel,
xr_unispline,
xr_polyfit,
)
from .plot.color import (
convert_colors,
)
# Making static plots with matplotlib
from .plot.plotter_matplotlib import (
LinePlot,
lineplot,
AutoLinePlot,
auto_lineplot,
Scatter,
scatter,
AutoScatter,
auto_scatter,
Histogram,
histogram,
AutoHistogram,
auto_histogram,
HeatMap,
heatmap,
AutoHeatMap,
auto_heatmap,
visualize_matrix
)
# Making interactive plots with bokeh
from .plot.plotter_bokeh import (
ilineplot,
auto_ilineplot,
iscatter,
auto_iscatter,
iheatmap,
auto_iheatmap,
)
# versioneer
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
__all__ = [
"Runner",
"Harvester",
"Sampler",
"label",
"combo_runner",
"combo_runner_to_ds",
"case_runner",
"find_union_coords",
"all_missing_ds",
"case_runner_to_ds",
"case_runner_to_df",
"find_missing_cases",
"fill_missing_cases",
"Crop",
"grow",
"cache_to_disk",
"save_ds",
"load_ds",
"save_df",
"load_df",
"trimna",
"sort_dims",
"check_runs",
"merge_sync_conflict_datasets",
"auto_xyz_ds",
"convert_colors",
"LinePlot",
"lineplot",
"auto_lineplot",
"AutoLinePlot",
"Scatter",
"scatter",
"AutoScatter",
"auto_scatter",
"Histogram",
"histogram",
"AutoHistogram",
"auto_histogram",
"HeatMap",
"heatmap",
"AutoHeatMap",
"auto_heatmap",
"ilineplot",
"auto_ilineplot",
"iscatter",
"auto_iscatter",
"iheatmap",
"auto_iheatmap",
"visualize_matrix",
"unzip",
"progbar",
"Timer",
"benchmark",
"Benchmarker",
"RunningStatistics",
"estimate_from_repeats",
"xr_diff_fornberg",
"xr_diff_u",
"xr_diff_u_err",
"xr_interp",
"xr_interp_pchip",
"xr_filter_wiener",
"xr_filtfilt_butter",
"xr_filtfilt_bessel",
"xr_unispline",
"xr_polyfit",
]
class XYZPY(object):
def __init__(self, xarray_obj):
self._obj = xarray_obj
# ------------------------------- Plotting ------------------------------ #
@functools.wraps(LinePlot)
def LinePlot(self, *args, **kwargs):
return LinePlot(self._obj, *args, **kwargs)
@functools.wraps(lineplot)
def lineplot(self, *args, **kwargs):
return lineplot(self._obj, *args, **kwargs)
@functools.wraps(Scatter)
def Scatter(self, *args, **kwargs):
return Scatter(self._obj, *args, **kwargs)
@functools.wraps(scatter)
def scatter(self, *args, **kwargs):
return scatter(self._obj, *args, **kwargs)
@functools.wraps(Histogram)
def Histogram(self, *args, **kwargs):
return Histogram(self._obj, *args, **kwargs)
@functools.wraps(histogram)
def histogram(self, *args, **kwargs):
return histogram(self._obj, *args, **kwargs)
@functools.wraps(HeatMap)
def HeatMap(self, *args, **kwargs):
return HeatMap(self._obj, *args, **kwargs)
@functools.wraps(heatmap)
def heatmap(self, *args, **kwargs):
return heatmap(self._obj, *args, **kwargs)
@functools.wraps(ilineplot)
def ilineplot(self, *args, **kwargs):
return ilineplot(self._obj, *args, **kwargs)
@functools.wraps(iscatter)
def iscatter(self, *args, **kwargs):
return iscatter(self._obj, *args, **kwargs)
@functools.wraps(iheatmap)
def iheatmap(self, *args, **kwargs):
return iheatmap(self._obj, *args, **kwargs)
# ----------------------------- Processing ------------------------------ #
@functools.wraps(trimna)
def trimna(self):
return trimna(self._obj)
@functools.wraps(post_fix)
def post_fix(self, postfix):
return post_fix(self._obj, postfix)
@functools.wraps(xr_diff_fornberg)
def diff_fornberg(self, dim, ix=100, order=1, mode='points', window=5):
return xr_diff_fornberg(self._obj, dim=dim, ix=ix, order=order,
mode=mode, window=window)
@functools.wraps(xr_diff_u)
def diff_u(self, dim):
return xr_diff_u(self._obj, dim=dim)
@functools.wraps(xr_diff_u_err)
def diff_u_err(self, dim):
return xr_diff_u_err(self._obj, dim=dim)
@functools.wraps(xr_interp)
def interp(self, dim, ix=100, order=3):
return xr_interp(self._obj, dim=dim, ix=ix, order=order)
@functools.wraps(xr_interp_pchip)
def interp_pchip(self, dim, ix=100):
return xr_interp_pchip(self._obj, dim=dim, ix=ix)
@functools.wraps(xr_filter_wiener)
def filter_wiener(self, dim, mysize=5, noise=1e-2):
return xr_filter_wiener(self._obj, dim=dim, mysize=mysize, noise=noise)
@functools.wraps(xr_filtfilt_butter)
def filtfilt_butter(self, dim, N=2, Wn=0.4):
return xr_filtfilt_butter(self._obj, dim=dim, N=N, Wn=Wn)
@functools.wraps(xr_filtfilt_bessel)
def filtfilt_bessel(self, dim, N=2, Wn=0.4):
return xr_filtfilt_bessel(self._obj, dim=dim, N=N, Wn=Wn)
@functools.wraps(xr_unispline)
def unispline(self, dim, err=None, num_knots=11, ix=None):
return xr_unispline(self._obj, dim=dim, err=err,
num_knots=num_knots, ix=ix)
@functools.wraps(xr_polyfit)
def polyfit(self, dim, ix=None, deg=0.5, poly='chebyshev'):
return xr_polyfit(self._obj, dim=dim, ix=ix, deg=deg, poly=poly)
xr.register_dataarray_accessor('xyz')(XYZPY)
xr.register_dataset_accessor('xyz')(XYZPY)
|
[
"xarray.register_dataarray_accessor",
"xarray.register_dataset_accessor",
"functools.wraps"
] |
[((3153, 3178), 'functools.wraps', 'functools.wraps', (['LinePlot'], {}), '(LinePlot)\n', (3168, 3178), False, 'import functools\n'), ((3278, 3303), 'functools.wraps', 'functools.wraps', (['lineplot'], {}), '(lineplot)\n', (3293, 3303), False, 'import functools\n'), ((3403, 3427), 'functools.wraps', 'functools.wraps', (['Scatter'], {}), '(Scatter)\n', (3418, 3427), False, 'import functools\n'), ((3525, 3549), 'functools.wraps', 'functools.wraps', (['scatter'], {}), '(scatter)\n', (3540, 3549), False, 'import functools\n'), ((3647, 3673), 'functools.wraps', 'functools.wraps', (['Histogram'], {}), '(Histogram)\n', (3662, 3673), False, 'import functools\n'), ((3775, 3801), 'functools.wraps', 'functools.wraps', (['histogram'], {}), '(histogram)\n', (3790, 3801), False, 'import functools\n'), ((3903, 3927), 'functools.wraps', 'functools.wraps', (['HeatMap'], {}), '(HeatMap)\n', (3918, 3927), False, 'import functools\n'), ((4025, 4049), 'functools.wraps', 'functools.wraps', (['heatmap'], {}), '(heatmap)\n', (4040, 4049), False, 'import functools\n'), ((4147, 4173), 'functools.wraps', 'functools.wraps', (['ilineplot'], {}), '(ilineplot)\n', (4162, 4173), False, 'import functools\n'), ((4275, 4300), 'functools.wraps', 'functools.wraps', (['iscatter'], {}), '(iscatter)\n', (4290, 4300), False, 'import functools\n'), ((4400, 4425), 'functools.wraps', 'functools.wraps', (['iheatmap'], {}), '(iheatmap)\n', (4415, 4425), False, 'import functools\n'), ((4606, 4629), 'functools.wraps', 'functools.wraps', (['trimna'], {}), '(trimna)\n', (4621, 4629), False, 'import functools\n'), ((4691, 4716), 'functools.wraps', 'functools.wraps', (['post_fix'], {}), '(post_fix)\n', (4706, 4716), False, 'import functools\n'), ((4800, 4833), 'functools.wraps', 'functools.wraps', (['xr_diff_fornberg'], {}), '(xr_diff_fornberg)\n', (4815, 4833), False, 'import functools\n'), ((5046, 5072), 'functools.wraps', 'functools.wraps', (['xr_diff_u'], {}), '(xr_diff_u)\n', (5061, 5072), False, 'import functools\n'), ((5151, 5181), 'functools.wraps', 'functools.wraps', (['xr_diff_u_err'], {}), '(xr_diff_u_err)\n', (5166, 5181), False, 'import functools\n'), ((5268, 5294), 'functools.wraps', 'functools.wraps', (['xr_interp'], {}), '(xr_interp)\n', (5283, 5294), False, 'import functools\n'), ((5410, 5442), 'functools.wraps', 'functools.wraps', (['xr_interp_pchip'], {}), '(xr_interp_pchip)\n', (5425, 5442), False, 'import functools\n'), ((5548, 5581), 'functools.wraps', 'functools.wraps', (['xr_filter_wiener'], {}), '(xr_filter_wiener)\n', (5563, 5581), False, 'import functools\n'), ((5724, 5759), 'functools.wraps', 'functools.wraps', (['xr_filtfilt_butter'], {}), '(xr_filtfilt_butter)\n', (5739, 5759), False, 'import functools\n'), ((5881, 5916), 'functools.wraps', 'functools.wraps', (['xr_filtfilt_bessel'], {}), '(xr_filtfilt_bessel)\n', (5896, 5916), False, 'import functools\n'), ((6038, 6067), 'functools.wraps', 'functools.wraps', (['xr_unispline'], {}), '(xr_unispline)\n', (6053, 6067), False, 'import functools\n'), ((6250, 6277), 'functools.wraps', 'functools.wraps', (['xr_polyfit'], {}), '(xr_polyfit)\n', (6265, 6277), False, 'import functools\n'), ((6417, 6454), 'xarray.register_dataarray_accessor', 'xr.register_dataarray_accessor', (['"""xyz"""'], {}), "('xyz')\n", (6447, 6454), True, 'import xarray as xr\n'), ((6462, 6497), 'xarray.register_dataset_accessor', 'xr.register_dataset_accessor', (['"""xyz"""'], {}), "('xyz')\n", (6490, 6497), True, 'import xarray as xr\n')]
|
# -*- coding: utf-8 -*-
# @Time : 2018/6/7
# @Author : geekspeng
# @Email : <EMAIL>
import os
from shutil import move
from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory
from flask_login import login_required, current_user
from flask_uploads import extension
from markupsafe import Markup
from app import uploads
from app.main import main
from app.main.forms import ConvertBookForm
from app.models.books import Books, Data
from app.utils import book_format, helper
from app.utils.email import send_book
@main.route("/", defaults={'page': 1})
@main.route('/page/<int:page>')
@login_required
def index(page):
pagination = Books.get_books_pagination(page=page, per_page=current_app.config['BOOKS_PER_PAGE'], db_filter=True,
order=Books.timestamp.desc())
books = pagination.items
return render_template('main/index.html', books=books, pagination=pagination, title="Home")
@main.route("/search", methods=["GET"])
@login_required
def search():
term = request.args.get("query").strip().lower()
books = []
if term:
books = Books.search_book(term)
return render_template('main/index.html', books=books, pagination=None, title="Results")
@main.route("/upload", methods=["POST"])
@login_required
def upload():
for file in request.files.getlist('btn-upload'):
if not uploads.extension_allowed(extension(file.filename)):
flash(Markup('File extension "%s" is not allowed to be uploaded to this server' % extension(file.filename)),
"warning")
return redirect(url_for('main.index'))
# save file
file_name, file_extension = os.path.splitext(file.filename)
folder = os.path.join(current_user.email, file_name)
file_path = helper.resolve_folder_conflict(current_app.config['UPLOAD_FOLDER'], folder)
saved_file_name = uploads.save(file, folder=file_path, name=file.filename)
# get book meta
meta = book_format.process(uploads.path(saved_file_name), file_path, file_name, file_extension[1:])
# move file
if meta.title != meta.file_name:
new_file_path = os.path.join(current_app.config['UPLOAD_FOLDER'], file_path,
meta.title + file_extension)
move(uploads.path(saved_file_name), new_file_path)
if meta.cover_path:
new_cover_path = os.path.join(current_app.config['UPLOAD_FOLDER'], file_path,
meta.title + '.jpg')
move(meta.cover_path, new_cover_path)
# add book
Books.add_book(meta)
flash(Markup('File upload completed.'), "info")
return redirect(url_for('main.index'))
@main.route("/delete/<int:book_id>/")
@login_required
def delete_book(book_id):
Books.delete_book(book_id)
flash(Markup('Book successfully deleted'), "info")
return redirect(url_for('main.index'))
@main.route('/send/<int:book_id>')
@login_required
def send_to_kindle(book_id):
if current_user.kindle_email:
book = Books.query.get_or_404(book_id)
res = send_book(current_user, book)
if res:
flash(Markup(res), "warning")
else:
flash(Markup("Book successfully queued for sending to %s" % current_user.kindle_mail), "success")
else:
flash(Markup("Please configure your kindle e-mail address first..."), "error")
return redirect(url_for('auth.change_kindle_email'))
return redirect(url_for('main.index'))
@main.route("/book/convert/<int:book_id>", methods=['GET', 'POST'])
@login_required
def convert_book(book_id):
book = Books.query.get_or_404(book_id)
form = ConvertBookForm(book)
if form.validate_on_submit():
book_format_from = form.convert_from.data
book_format_to = form.convert_to.data
if current_app.config['CONVERT_TOOL_PATH'] is None:
flash(Markup("ebook-converter failed, there is no conversion tool"), "success")
return redirect(url_for('main.index'))
result = Books.convert_book_format(book_id, current_app.config['CONVERT_TOOL_PATH'], book_format_from.upper(),
book_format_to.upper())
if result is None:
flash(Markup("Book successfully converted"), "success")
return redirect(url_for('main.index'))
else:
flash(Markup("There was an error converting this book: %s" % result), "error")
return render_template('main/convert_book.html', title='Convert Book', book=book, form=form)
@main.route("/read/<file_format>/<int:book_id>")
@login_required
def read_book(file_format, book_id):
book = Books.query.get_or_404(book_id)
file_name = book.title + '.' + file_format.lower()
if file_format.lower() == "pdf":
return render_template('main/readpdf.html', book_id=book_id, file_name=file_name)
elif file_format.lower() == "txt":
return render_template('main/readtxt.html', book_id=book_id, file_name=file_name)
elif file_format.lower() == "epub":
return render_template('main/readepub.html', book_id=book_id, file_name=file_name)
@main.route("/book/<int:book_id>/<file_name>")
@login_required
def get_book(book_id, file_name):
book = Books.query.get_or_404(book_id)
file_path = os.path.join(book.path, file_name)
return send_from_directory(current_app.config['UPLOAD_FOLDER'], file_path)
@main.route("/cover/<int:book_id>")
@login_required
def get_cover(book_id):
book = Books.query.get_or_404(book_id)
cover_path = os.path.join(book.path, book.title + '.jpg')
return send_from_directory(current_app.config['UPLOAD_FOLDER'], cover_path)
|
[
"flask.url_for",
"app.models.books.Books.search_book",
"app.main.forms.ConvertBookForm",
"os.path.join",
"app.models.books.Books.timestamp.desc",
"flask.request.args.get",
"flask.request.files.getlist",
"app.utils.helper.resolve_folder_conflict",
"flask_uploads.extension",
"app.main.main.route",
"flask.render_template",
"flask.send_from_directory",
"app.uploads.save",
"app.utils.email.send_book",
"app.uploads.path",
"app.models.books.Books.delete_book",
"app.models.books.Books.query.get_or_404",
"app.models.books.Books.add_book",
"markupsafe.Markup",
"os.path.splitext",
"shutil.move"
] |
[((565, 602), 'app.main.main.route', 'main.route', (['"""/"""'], {'defaults': "{'page': 1}"}), "('/', defaults={'page': 1})\n", (575, 602), False, 'from app.main import main\n'), ((604, 634), 'app.main.main.route', 'main.route', (['"""/page/<int:page>"""'], {}), "('/page/<int:page>')\n", (614, 634), False, 'from app.main import main\n'), ((988, 1026), 'app.main.main.route', 'main.route', (['"""/search"""'], {'methods': "['GET']"}), "('/search', methods=['GET'])\n", (998, 1026), False, 'from app.main import main\n'), ((1274, 1313), 'app.main.main.route', 'main.route', (['"""/upload"""'], {'methods': "['POST']"}), "('/upload', methods=['POST'])\n", (1284, 1313), False, 'from app.main import main\n'), ((2806, 2842), 'app.main.main.route', 'main.route', (['"""/delete/<int:book_id>/"""'], {}), "('/delete/<int:book_id>/')\n", (2816, 2842), False, 'from app.main import main\n'), ((3017, 3050), 'app.main.main.route', 'main.route', (['"""/send/<int:book_id>"""'], {}), "('/send/<int:book_id>')\n", (3027, 3050), False, 'from app.main import main\n'), ((3607, 3673), 'app.main.main.route', 'main.route', (['"""/book/convert/<int:book_id>"""'], {'methods': "['GET', 'POST']"}), "('/book/convert/<int:book_id>', methods=['GET', 'POST'])\n", (3617, 3673), False, 'from app.main import main\n'), ((4663, 4710), 'app.main.main.route', 'main.route', (['"""/read/<file_format>/<int:book_id>"""'], {}), "('/read/<file_format>/<int:book_id>')\n", (4673, 4710), False, 'from app.main import main\n'), ((5252, 5297), 'app.main.main.route', 'main.route', (['"""/book/<int:book_id>/<file_name>"""'], {}), "('/book/<int:book_id>/<file_name>')\n", (5262, 5297), False, 'from app.main import main\n'), ((5524, 5558), 'app.main.main.route', 'main.route', (['"""/cover/<int:book_id>"""'], {}), "('/cover/<int:book_id>')\n", (5534, 5558), False, 'from app.main import main\n'), ((900, 988), 'flask.render_template', 'render_template', (['"""main/index.html"""'], {'books': 'books', 'pagination': 'pagination', 'title': '"""Home"""'}), "('main/index.html', books=books, pagination=pagination,\n title='Home')\n", (915, 988), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((1189, 1275), 'flask.render_template', 'render_template', (['"""main/index.html"""'], {'books': 'books', 'pagination': 'None', 'title': '"""Results"""'}), "('main/index.html', books=books, pagination=None, title=\n 'Results')\n", (1204, 1275), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((1360, 1395), 'flask.request.files.getlist', 'request.files.getlist', (['"""btn-upload"""'], {}), "('btn-upload')\n", (1381, 1395), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((2889, 2915), 'app.models.books.Books.delete_book', 'Books.delete_book', (['book_id'], {}), '(book_id)\n', (2906, 2915), False, 'from app.models.books import Books, Data\n'), ((3728, 3759), 'app.models.books.Books.query.get_or_404', 'Books.query.get_or_404', (['book_id'], {}), '(book_id)\n', (3750, 3759), False, 'from app.models.books import Books, Data\n'), ((3771, 3792), 'app.main.forms.ConvertBookForm', 'ConvertBookForm', (['book'], {}), '(book)\n', (3786, 3792), False, 'from app.main.forms import ConvertBookForm\n'), ((4574, 4663), 'flask.render_template', 'render_template', (['"""main/convert_book.html"""'], {'title': '"""Convert Book"""', 'book': 'book', 'form': 'form'}), "('main/convert_book.html', title='Convert Book', book=book,\n form=form)\n", (4589, 4663), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((4775, 4806), 'app.models.books.Books.query.get_or_404', 'Books.query.get_or_404', (['book_id'], {}), '(book_id)\n', (4797, 4806), False, 'from app.models.books import Books, Data\n'), ((5359, 5390), 'app.models.books.Books.query.get_or_404', 'Books.query.get_or_404', (['book_id'], {}), '(book_id)\n', (5381, 5390), False, 'from app.models.books import Books, Data\n'), ((5407, 5441), 'os.path.join', 'os.path.join', (['book.path', 'file_name'], {}), '(book.path, file_name)\n', (5419, 5441), False, 'import os\n'), ((5453, 5520), 'flask.send_from_directory', 'send_from_directory', (["current_app.config['UPLOAD_FOLDER']", 'file_path'], {}), "(current_app.config['UPLOAD_FOLDER'], file_path)\n", (5472, 5520), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((5610, 5641), 'app.models.books.Books.query.get_or_404', 'Books.query.get_or_404', (['book_id'], {}), '(book_id)\n', (5632, 5641), False, 'from app.models.books import Books, Data\n'), ((5659, 5703), 'os.path.join', 'os.path.join', (['book.path', "(book.title + '.jpg')"], {}), "(book.path, book.title + '.jpg')\n", (5671, 5703), False, 'import os\n'), ((5715, 5783), 'flask.send_from_directory', 'send_from_directory', (["current_app.config['UPLOAD_FOLDER']", 'cover_path'], {}), "(current_app.config['UPLOAD_FOLDER'], cover_path)\n", (5734, 5783), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((1154, 1177), 'app.models.books.Books.search_book', 'Books.search_book', (['term'], {}), '(term)\n', (1171, 1177), False, 'from app.models.books import Books, Data\n'), ((1723, 1754), 'os.path.splitext', 'os.path.splitext', (['file.filename'], {}), '(file.filename)\n', (1739, 1754), False, 'import os\n'), ((1772, 1815), 'os.path.join', 'os.path.join', (['current_user.email', 'file_name'], {}), '(current_user.email, file_name)\n', (1784, 1815), False, 'import os\n'), ((1836, 1911), 'app.utils.helper.resolve_folder_conflict', 'helper.resolve_folder_conflict', (["current_app.config['UPLOAD_FOLDER']", 'folder'], {}), "(current_app.config['UPLOAD_FOLDER'], folder)\n", (1866, 1911), False, 'from app.utils import book_format, helper\n'), ((1938, 1994), 'app.uploads.save', 'uploads.save', (['file'], {'folder': 'file_path', 'name': 'file.filename'}), '(file, folder=file_path, name=file.filename)\n', (1950, 1994), False, 'from app import uploads\n'), ((2687, 2707), 'app.models.books.Books.add_book', 'Books.add_book', (['meta'], {}), '(meta)\n', (2701, 2707), False, 'from app.models.books import Books, Data\n'), ((2718, 2750), 'markupsafe.Markup', 'Markup', (['"""File upload completed."""'], {}), "('File upload completed.')\n", (2724, 2750), False, 'from markupsafe import Markup\n'), ((2780, 2801), 'flask.url_for', 'url_for', (['"""main.index"""'], {}), "('main.index')\n", (2787, 2801), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((2926, 2961), 'markupsafe.Markup', 'Markup', (['"""Book successfully deleted"""'], {}), "('Book successfully deleted')\n", (2932, 2961), False, 'from markupsafe import Markup\n'), ((2991, 3012), 'flask.url_for', 'url_for', (['"""main.index"""'], {}), "('main.index')\n", (2998, 3012), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((3145, 3176), 'app.models.books.Books.query.get_or_404', 'Books.query.get_or_404', (['book_id'], {}), '(book_id)\n', (3167, 3176), False, 'from app.models.books import Books, Data\n'), ((3191, 3220), 'app.utils.email.send_book', 'send_book', (['current_user', 'book'], {}), '(current_user, book)\n', (3200, 3220), False, 'from app.utils.email import send_book\n'), ((3581, 3602), 'flask.url_for', 'url_for', (['"""main.index"""'], {}), "('main.index')\n", (3588, 3602), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((4914, 4988), 'flask.render_template', 'render_template', (['"""main/readpdf.html"""'], {'book_id': 'book_id', 'file_name': 'file_name'}), "('main/readpdf.html', book_id=book_id, file_name=file_name)\n", (4929, 4988), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((836, 858), 'app.models.books.Books.timestamp.desc', 'Books.timestamp.desc', ([], {}), '()\n', (856, 858), False, 'from app.models.books import Books, Data\n'), ((2055, 2084), 'app.uploads.path', 'uploads.path', (['saved_file_name'], {}), '(saved_file_name)\n', (2067, 2084), False, 'from app import uploads\n'), ((2218, 2311), 'os.path.join', 'os.path.join', (["current_app.config['UPLOAD_FOLDER']", 'file_path', '(meta.title + file_extension)'], {}), "(current_app.config['UPLOAD_FOLDER'], file_path, meta.title +\n file_extension)\n", (2230, 2311), False, 'import os\n'), ((3427, 3489), 'markupsafe.Markup', 'Markup', (['"""Please configure your kindle e-mail address first..."""'], {}), "('Please configure your kindle e-mail address first...')\n", (3433, 3489), False, 'from markupsafe import Markup\n'), ((3524, 3559), 'flask.url_for', 'url_for', (['"""auth.change_kindle_email"""'], {}), "('auth.change_kindle_email')\n", (3531, 3559), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((5043, 5117), 'flask.render_template', 'render_template', (['"""main/readtxt.html"""'], {'book_id': 'book_id', 'file_name': 'file_name'}), "('main/readtxt.html', book_id=book_id, file_name=file_name)\n", (5058, 5117), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((1438, 1462), 'flask_uploads.extension', 'extension', (['file.filename'], {}), '(file.filename)\n', (1447, 1462), False, 'from flask_uploads import extension\n'), ((1643, 1664), 'flask.url_for', 'url_for', (['"""main.index"""'], {}), "('main.index')\n", (1650, 1664), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((2366, 2395), 'app.uploads.path', 'uploads.path', (['saved_file_name'], {}), '(saved_file_name)\n', (2378, 2395), False, 'from app import uploads\n'), ((2477, 2562), 'os.path.join', 'os.path.join', (["current_app.config['UPLOAD_FOLDER']", 'file_path', "(meta.title + '.jpg')"], {}), "(current_app.config['UPLOAD_FOLDER'], file_path, meta.title +\n '.jpg')\n", (2489, 2562), False, 'import os\n'), ((2621, 2658), 'shutil.move', 'move', (['meta.cover_path', 'new_cover_path'], {}), '(meta.cover_path, new_cover_path)\n', (2625, 2658), False, 'from shutil import move\n'), ((3255, 3266), 'markupsafe.Markup', 'Markup', (['res'], {}), '(res)\n', (3261, 3266), False, 'from markupsafe import Markup\n'), ((3311, 3390), 'markupsafe.Markup', 'Markup', (["('Book successfully queued for sending to %s' % current_user.kindle_mail)"], {}), "('Book successfully queued for sending to %s' % current_user.kindle_mail)\n", (3317, 3390), False, 'from markupsafe import Markup\n'), ((4001, 4062), 'markupsafe.Markup', 'Markup', (['"""ebook-converter failed, there is no conversion tool"""'], {}), "('ebook-converter failed, there is no conversion tool')\n", (4007, 4062), False, 'from markupsafe import Markup\n'), ((4103, 4124), 'flask.url_for', 'url_for', (['"""main.index"""'], {}), "('main.index')\n", (4110, 4124), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((4357, 4394), 'markupsafe.Markup', 'Markup', (['"""Book successfully converted"""'], {}), "('Book successfully converted')\n", (4363, 4394), False, 'from markupsafe import Markup\n'), ((4435, 4456), 'flask.url_for', 'url_for', (['"""main.index"""'], {}), "('main.index')\n", (4442, 4456), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((4490, 4552), 'markupsafe.Markup', 'Markup', (["('There was an error converting this book: %s' % result)"], {}), "('There was an error converting this book: %s' % result)\n", (4496, 4552), False, 'from markupsafe import Markup\n'), ((5173, 5248), 'flask.render_template', 'render_template', (['"""main/readepub.html"""'], {'book_id': 'book_id', 'file_name': 'file_name'}), "('main/readepub.html', book_id=book_id, file_name=file_name)\n", (5188, 5248), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((1068, 1093), 'flask.request.args.get', 'request.args.get', (['"""query"""'], {}), "('query')\n", (1084, 1093), False, 'from flask import render_template, url_for, redirect, request, flash, current_app, send_from_directory\n'), ((1559, 1583), 'flask_uploads.extension', 'extension', (['file.filename'], {}), '(file.filename)\n', (1568, 1583), False, 'from flask_uploads import extension\n')]
|
#!/usr/bin/env python
import re
from django.core.management.base import BaseCommand
from django.utils.translation import ugettext as _
from onadata.apps.restservice.models import RestService
from onadata.libs.utils.common_tags import TEXTIT
class Command(BaseCommand):
help = _("Migrate TextIt/RapidPro v1 URLS to v2 URLS.")
def add_arguments(self, parser):
parser.add_argument(
'--apply', default=False, help=_("Apply changes to database."))
def handle(self, *args, **options):
services = RestService.objects.filter(name=TEXTIT)
force = options.get('apply')
if force and force.lower() != 'true':
self.stderr.write("--apply expects 'true' as a parameter value.")
return
v1 = re.compile(r'\/v1/runs')
v2 = '/v2/flow_starts'
for service in services:
if v1.findall(service.service_url):
original = service.service_url
new_uri = re.sub(v1, v2, service.service_url)
params = {'v1_url': original, 'v2_url': new_uri}
if force.lower() == 'true':
service.service_url = new_uri
service.save()
self.stdout.write(
_("Changed %(v1_url)s to %(v2_url)s" % params))
else:
self.stdout.write(
_("Will change %(v1_url)s to %(v2_url)s" % params))
|
[
"django.utils.translation.ugettext",
"re.sub",
"re.compile",
"onadata.apps.restservice.models.RestService.objects.filter"
] |
[((285, 333), 'django.utils.translation.ugettext', '_', (['"""Migrate TextIt/RapidPro v1 URLS to v2 URLS."""'], {}), "('Migrate TextIt/RapidPro v1 URLS to v2 URLS.')\n", (286, 333), True, 'from django.utils.translation import ugettext as _\n'), ((537, 576), 'onadata.apps.restservice.models.RestService.objects.filter', 'RestService.objects.filter', ([], {'name': 'TEXTIT'}), '(name=TEXTIT)\n', (563, 576), False, 'from onadata.apps.restservice.models import RestService\n'), ((772, 796), 're.compile', 're.compile', (['"""\\\\/v1/runs"""'], {}), "('\\\\/v1/runs')\n", (782, 796), False, 'import re\n'), ((444, 475), 'django.utils.translation.ugettext', '_', (['"""Apply changes to database."""'], {}), "('Apply changes to database.')\n", (445, 475), True, 'from django.utils.translation import ugettext as _\n'), ((983, 1018), 're.sub', 're.sub', (['v1', 'v2', 'service.service_url'], {}), '(v1, v2, service.service_url)\n', (989, 1018), False, 'import re\n'), ((1276, 1322), 'django.utils.translation.ugettext', '_', (["('Changed %(v1_url)s to %(v2_url)s' % params)"], {}), "('Changed %(v1_url)s to %(v2_url)s' % params)\n", (1277, 1322), True, 'from django.utils.translation import ugettext as _\n'), ((1409, 1459), 'django.utils.translation.ugettext', '_', (["('Will change %(v1_url)s to %(v2_url)s' % params)"], {}), "('Will change %(v1_url)s to %(v2_url)s' % params)\n", (1410, 1459), True, 'from django.utils.translation import ugettext as _\n')]
|
from unittest import mock
import graphene
from django.utils.functional import SimpleLazyObject
from freezegun import freeze_time
from .....page.error_codes import PageErrorCode
from .....page.models import PageType
from .....webhook.event_types import WebhookEventAsyncType
from .....webhook.payloads import generate_meta, generate_requestor
from ....tests.utils import assert_no_permission, get_graphql_content
PAGE_TYPE_CREATE_MUTATION = """
mutation PageTypeCreate($name: String, $slug: String, $addAttributes: [ID!]) {
pageTypeCreate(input: {
name: $name, slug: $slug, addAttributes: $addAttributes
}) {
pageType {
id
name
slug
attributes {
slug
}
}
errors {
code
field
message
attributes
}
}
}
"""
def test_page_type_create_as_staff(
staff_api_client,
tag_page_attribute,
author_page_attribute,
permission_manage_page_types_and_attributes,
):
# given
staff_user = staff_api_client.user
staff_user.user_permissions.add(permission_manage_page_types_and_attributes)
name = "Test page type"
slug = "test-page-type"
attributes = [author_page_attribute, tag_page_attribute]
variables = {
"name": name,
"slug": slug,
"addAttributes": [
graphene.Node.to_global_id("Attribute", attr.pk) for attr in attributes
],
}
# when
response = staff_api_client.post_graphql(PAGE_TYPE_CREATE_MUTATION, variables)
# then
content = get_graphql_content(response)
data = content["data"]["pageTypeCreate"]
page_type_data = data["pageType"]
errors = data["errors"]
assert not errors
assert page_type_data["name"] == name
assert page_type_data["slug"] == slug
assert len(page_type_data["attributes"]) == 2
assert {attr_data["slug"] for attr_data in page_type_data["attributes"]} == {
attr.slug for attr in attributes
}
@freeze_time("2022-05-12 12:00:00")
@mock.patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@mock.patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_page_type_create_trigger_webhook(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
tag_page_attribute,
author_page_attribute,
permission_manage_page_types_and_attributes,
settings,
):
# given
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
staff_user = staff_api_client.user
staff_user.user_permissions.add(permission_manage_page_types_and_attributes)
name = "Test page type"
slug = "test-page-type"
attributes = [author_page_attribute, tag_page_attribute]
variables = {
"name": name,
"slug": slug,
"addAttributes": [
graphene.Node.to_global_id("Attribute", attr.pk) for attr in attributes
],
}
# when
response = staff_api_client.post_graphql(PAGE_TYPE_CREATE_MUTATION, variables)
page_type = PageType.objects.last()
# then
content = get_graphql_content(response)
data = content["data"]["pageTypeCreate"]
assert not data["errors"]
assert data["pageType"]
mocked_webhook_trigger.assert_called_once_with(
{
"id": graphene.Node.to_global_id("PageType", page_type.id),
"name": page_type.name,
"slug": page_type.slug,
"meta": generate_meta(
requestor_data=generate_requestor(
SimpleLazyObject(lambda: staff_api_client.user)
)
),
},
WebhookEventAsyncType.PAGE_TYPE_CREATED,
[any_webhook],
page_type,
SimpleLazyObject(lambda: staff_api_client.user),
)
def test_page_type_create_as_staff_no_perm(
staff_api_client, tag_page_attribute, author_page_attribute
):
# given
name = "<NAME>"
slug = "test-page-type"
attributes = [author_page_attribute, tag_page_attribute]
variables = {
"name": name,
"slug": slug,
"addAttributes": [
graphene.Node.to_global_id("Attribute", attr.pk) for attr in attributes
],
}
# when
response = staff_api_client.post_graphql(PAGE_TYPE_CREATE_MUTATION, variables)
# then
assert_no_permission(response)
def test_page_type_create_as_app(
app_api_client, tag_page_attribute, permission_manage_page_types_and_attributes
):
# given
app = app_api_client.app
app.permissions.add(permission_manage_page_types_and_attributes)
name = "<NAME>"
slug = "test-page-type"
variables = {
"name": name,
"slug": slug,
"addAttributes": [
graphene.Node.to_global_id("Attribute", tag_page_attribute.pk)
],
}
# when
response = app_api_client.post_graphql(PAGE_TYPE_CREATE_MUTATION, variables)
# then
content = get_graphql_content(response)
data = content["data"]["pageTypeCreate"]
page_type_data = data["pageType"]
errors = data["errors"]
assert not errors
assert page_type_data["name"] == name
assert page_type_data["slug"] == slug
assert len(page_type_data["attributes"]) == 1
assert page_type_data["attributes"][0]["slug"] == tag_page_attribute.slug
def test_page_type_create_as_app_no_perm(app_api_client, tag_page_attribute):
# given
name = "<NAME>"
slug = "test-page-type"
variables = {
"name": name,
"slug": slug,
"addAttributes": [
graphene.Node.to_global_id("Attribute", tag_page_attribute.pk)
],
}
# when
response = app_api_client.post_graphql(PAGE_TYPE_CREATE_MUTATION, variables)
# then
assert_no_permission(response)
def test_page_type_create_unique_slug_generated(
staff_api_client,
tag_page_attribute,
author_page_attribute,
permission_manage_page_types_and_attributes,
):
"""Ensure that unique slug is generated when slug is not given."""
# given
staff_user = staff_api_client.user
staff_user.user_permissions.add(permission_manage_page_types_and_attributes)
name_1 = "Test page type"
name_2 = "test page type"
slug = "test-page-type"
page_type = PageType.objects.create(name=name_1, slug=slug)
attributes = [author_page_attribute, tag_page_attribute]
variables = {
"name": name_2,
"addAttributes": [
graphene.Node.to_global_id("Attribute", attr.pk) for attr in attributes
],
}
# when
response = staff_api_client.post_graphql(PAGE_TYPE_CREATE_MUTATION, variables)
# then
content = get_graphql_content(response)
data = content["data"]["pageTypeCreate"]
page_type_data = data["pageType"]
errors = data["errors"]
assert not errors
assert PageType.objects.count() == 2
assert page_type_data["id"] != graphene.Node.to_global_id("PageType", page_type.pk)
assert page_type_data["name"] == name_2
assert page_type_data["slug"] == "test-page-type-2"
assert len(page_type_data["attributes"]) == 2
assert {attr_data["slug"] for attr_data in page_type_data["attributes"]} == {
attr.slug for attr in attributes
}
def test_page_type_create_duplicated_slug(
staff_api_client,
tag_page_attribute,
author_page_attribute,
permission_manage_page_types_and_attributes,
):
"""Ensure that unique errors is raised when page type with given slug exists."""
# given
staff_user = staff_api_client.user
staff_user.user_permissions.add(permission_manage_page_types_and_attributes)
name_1 = "Test page type"
name_2 = "test page type"
slug = "test-page-type"
PageType.objects.create(name=name_1, slug=slug)
attributes = [author_page_attribute, tag_page_attribute]
variables = {
"name": name_2,
"slug": slug,
"addAttributes": [
graphene.Node.to_global_id("Attribute", attr.pk) for attr in attributes
],
}
# when
response = staff_api_client.post_graphql(PAGE_TYPE_CREATE_MUTATION, variables)
# then
content = get_graphql_content(response)
data = content["data"]["pageTypeCreate"]
page_type_data = data["pageType"]
errors = data["errors"]
assert not page_type_data
assert len(errors) == 1
assert errors[0]["code"] == PageErrorCode.UNIQUE.name
assert errors[0]["field"] == "slug"
def test_page_type_create_not_valid_attributes(
staff_api_client,
tag_page_attribute,
color_attribute,
size_attribute,
permission_manage_page_types_and_attributes,
):
# given
staff_user = staff_api_client.user
staff_user.user_permissions.add(permission_manage_page_types_and_attributes)
name = "Test page type"
slug = "test-page-type"
attributes = [color_attribute, tag_page_attribute, size_attribute]
variables = {
"name": name,
"slug": slug,
"addAttributes": [
graphene.Node.to_global_id("Attribute", attr.pk) for attr in attributes
],
}
# when
response = staff_api_client.post_graphql(PAGE_TYPE_CREATE_MUTATION, variables)
# then
content = get_graphql_content(response)
data = content["data"]["pageTypeCreate"]
page_type_data = data["pageType"]
errors = data["errors"]
assert not page_type_data
assert len(errors) == 1
assert errors[0]["code"] == PageErrorCode.INVALID.name
assert errors[0]["field"] == "addAttributes"
assert set(errors[0]["attributes"]) == {
graphene.Node.to_global_id("Attribute", attr.pk)
for attr in [color_attribute, size_attribute]
}
|
[
"unittest.mock.patch",
"freezegun.freeze_time",
"django.utils.functional.SimpleLazyObject",
"graphene.Node.to_global_id"
] |
[((2118, 2152), 'freezegun.freeze_time', 'freeze_time', (['"""2022-05-12 12:00:00"""'], {}), "('2022-05-12 12:00:00')\n", (2129, 2152), False, 'from freezegun import freeze_time\n'), ((2154, 2220), 'unittest.mock.patch', 'mock.patch', (['"""saleor.plugins.webhook.plugin.get_webhooks_for_event"""'], {}), "('saleor.plugins.webhook.plugin.get_webhooks_for_event')\n", (2164, 2220), False, 'from unittest import mock\n'), ((2222, 2288), 'unittest.mock.patch', 'mock.patch', (['"""saleor.plugins.webhook.plugin.trigger_webhooks_async"""'], {}), "('saleor.plugins.webhook.plugin.trigger_webhooks_async')\n", (2232, 2288), False, 'from unittest import mock\n'), ((3926, 3974), 'django.utils.functional.SimpleLazyObject', 'SimpleLazyObject', (['(lambda : staff_api_client.user)'], {}), '(lambda : staff_api_client.user)\n', (3942, 3974), False, 'from django.utils.functional import SimpleLazyObject\n'), ((7098, 7150), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""PageType"""', 'page_type.pk'], {}), "('PageType', page_type.pk)\n", (7124, 7150), False, 'import graphene\n'), ((1478, 1526), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Attribute"""', 'attr.pk'], {}), "('Attribute', attr.pk)\n", (1504, 1526), False, 'import graphene\n'), ((3039, 3087), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Attribute"""', 'attr.pk'], {}), "('Attribute', attr.pk)\n", (3065, 3087), False, 'import graphene\n'), ((3503, 3555), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""PageType"""', 'page_type.id'], {}), "('PageType', page_type.id)\n", (3529, 3555), False, 'import graphene\n'), ((4318, 4366), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Attribute"""', 'attr.pk'], {}), "('Attribute', attr.pk)\n", (4344, 4366), False, 'import graphene\n'), ((4933, 4995), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Attribute"""', 'tag_page_attribute.pk'], {}), "('Attribute', tag_page_attribute.pk)\n", (4959, 4995), False, 'import graphene\n'), ((5750, 5812), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Attribute"""', 'tag_page_attribute.pk'], {}), "('Attribute', tag_page_attribute.pk)\n", (5776, 5812), False, 'import graphene\n'), ((6648, 6696), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Attribute"""', 'attr.pk'], {}), "('Attribute', attr.pk)\n", (6674, 6696), False, 'import graphene\n'), ((8126, 8174), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Attribute"""', 'attr.pk'], {}), "('Attribute', attr.pk)\n", (8152, 8174), False, 'import graphene\n'), ((9186, 9234), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Attribute"""', 'attr.pk'], {}), "('Attribute', attr.pk)\n", (9212, 9234), False, 'import graphene\n'), ((9757, 9805), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Attribute"""', 'attr.pk'], {}), "('Attribute', attr.pk)\n", (9783, 9805), False, 'import graphene\n'), ((3735, 3783), 'django.utils.functional.SimpleLazyObject', 'SimpleLazyObject', (['(lambda : staff_api_client.user)'], {}), '(lambda : staff_api_client.user)\n', (3751, 3783), False, 'from django.utils.functional import SimpleLazyObject\n')]
|
#!/usr/bin/python
#-*- coding:utf-8 -*-
import sys
import struct
import numpy as np
import tensorflow as tf
def lrn_f32():
para_int = []
para_float = []
# init the input data and parameters
batch = int(np.random.randint(1, high=4, size=1))
in_size_x = int(np.random.randint(16, high=32, size=1))
in_size_y = int(np.random.randint(16, high=32, size=1))
in_channel = int(np.random.randint(15, high=32, size=1))
depth_radius = int(np.random.randint(1, high=6, size=1))
bias = float(np.random.uniform(1, high=5, size=1))
alpha = float(np.random.uniform(1e-5, high=1e-3, size=1))
beta = float(np.random.uniform(0.5, high=1, size=1))
zero_point = int(np.random.randint(0, high=1, size=1))
std = int(np.random.randint(1, high=2, size=1))
src_in = np.random.normal(zero_point, std, (batch, in_size_y, in_size_x, in_channel))
src_in = src_in.astype(np.float32)
out_calcu = tf.nn.local_response_normalization(src_in, depth_radius, bias, alpha, beta)
with tf.Session() as sess:
src_out = sess.run(out_calcu)
src_in_nhwc = src_in
out_nhwc = src_out
src_in_nchw = np.transpose(src_in, [0, 3, 1, 2])
out_nchw = np.transpose(src_out, [0, 3, 1, 2])
src_in_1 = src_in_nchw.flatten()
src_out_1 = out_nchw.flatten()
total_size = (len(src_in_1) + len(src_out_1)) + 8
para_int.append(total_size)
para_int.append(batch)
para_int.append(in_channel)
para_int.append(in_size_y)
para_int.append(in_size_x)
para_int.append(depth_radius)
para_float.append(bias)
para_float.append(alpha)
para_float.append(beta)
print(para_int)
print(para_float)
with open("lrn_data_f32.bin", "wb") as fp:
data = struct.pack(('%di' % len(para_int)), *para_int)
fp.write(data)
data = struct.pack(('%df' % len(para_float)), *para_float)
fp.write(data)
data = struct.pack(('%df' % len(src_in_1)), *src_in_1)
fp.write(data)
data = struct.pack(('%df' % len(src_out_1)), *src_out_1)
fp.write(data)
fp.close()
return 0
if __name__ == '__main__':
lrn_f32()
print("end")
|
[
"numpy.random.uniform",
"numpy.transpose",
"tensorflow.nn.local_response_normalization",
"tensorflow.Session",
"numpy.random.randint",
"numpy.random.normal"
] |
[((819, 895), 'numpy.random.normal', 'np.random.normal', (['zero_point', 'std', '(batch, in_size_y, in_size_x, in_channel)'], {}), '(zero_point, std, (batch, in_size_y, in_size_x, in_channel))\n', (835, 895), True, 'import numpy as np\n'), ((953, 1028), 'tensorflow.nn.local_response_normalization', 'tf.nn.local_response_normalization', (['src_in', 'depth_radius', 'bias', 'alpha', 'beta'], {}), '(src_in, depth_radius, bias, alpha, beta)\n', (987, 1028), True, 'import tensorflow as tf\n'), ((1175, 1209), 'numpy.transpose', 'np.transpose', (['src_in', '[0, 3, 1, 2]'], {}), '(src_in, [0, 3, 1, 2])\n', (1187, 1209), True, 'import numpy as np\n'), ((1225, 1260), 'numpy.transpose', 'np.transpose', (['src_out', '[0, 3, 1, 2]'], {}), '(src_out, [0, 3, 1, 2])\n', (1237, 1260), True, 'import numpy as np\n'), ((226, 262), 'numpy.random.randint', 'np.random.randint', (['(1)'], {'high': '(4)', 'size': '(1)'}), '(1, high=4, size=1)\n', (243, 262), True, 'import numpy as np\n'), ((285, 323), 'numpy.random.randint', 'np.random.randint', (['(16)'], {'high': '(32)', 'size': '(1)'}), '(16, high=32, size=1)\n', (302, 323), True, 'import numpy as np\n'), ((346, 384), 'numpy.random.randint', 'np.random.randint', (['(16)'], {'high': '(32)', 'size': '(1)'}), '(16, high=32, size=1)\n', (363, 384), True, 'import numpy as np\n'), ((407, 445), 'numpy.random.randint', 'np.random.randint', (['(15)'], {'high': '(32)', 'size': '(1)'}), '(15, high=32, size=1)\n', (424, 445), True, 'import numpy as np\n'), ((471, 507), 'numpy.random.randint', 'np.random.randint', (['(1)'], {'high': '(6)', 'size': '(1)'}), '(1, high=6, size=1)\n', (488, 507), True, 'import numpy as np\n'), ((526, 562), 'numpy.random.uniform', 'np.random.uniform', (['(1)'], {'high': '(5)', 'size': '(1)'}), '(1, high=5, size=1)\n', (543, 562), True, 'import numpy as np\n'), ((582, 626), 'numpy.random.uniform', 'np.random.uniform', (['(1e-05)'], {'high': '(0.001)', 'size': '(1)'}), '(1e-05, high=0.001, size=1)\n', (599, 626), True, 'import numpy as np\n'), ((643, 681), 'numpy.random.uniform', 'np.random.uniform', (['(0.5)'], {'high': '(1)', 'size': '(1)'}), '(0.5, high=1, size=1)\n', (660, 681), True, 'import numpy as np\n'), ((705, 741), 'numpy.random.randint', 'np.random.randint', (['(0)'], {'high': '(1)', 'size': '(1)'}), '(0, high=1, size=1)\n', (722, 741), True, 'import numpy as np\n'), ((764, 800), 'numpy.random.randint', 'np.random.randint', (['(1)'], {'high': '(2)', 'size': '(1)'}), '(1, high=2, size=1)\n', (781, 800), True, 'import numpy as np\n'), ((1044, 1056), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (1054, 1056), True, 'import tensorflow as tf\n')]
|
import aiohttp
import asyncio
from datetime import datetime
async def main():
async with aiohttp.ClientSession() as client:
html = await client.get('https://www.baidu.com/')
print(html)
loop = asyncio.get_event_loop()
tasks = []
for i in range(100):
task = loop.create_task(main())
tasks.append(task)
start = datetime.now()
loop.run_until_complete(main())
end = datetime.now()
print("aiohttp花费时间为:", end - start)
|
[
"aiohttp.ClientSession",
"asyncio.get_event_loop",
"datetime.datetime.now"
] |
[((224, 248), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (246, 248), False, 'import asyncio\n'), ((357, 371), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (369, 371), False, 'from datetime import datetime\n'), ((416, 430), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (428, 430), False, 'from datetime import datetime\n'), ((99, 122), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (120, 122), False, 'import aiohttp\n')]
|
# Generated by Django 2.0.8 on 2018-11-21 23:49
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ticket', '0003_remove_ticket_price'),
]
operations = [
migrations.AddField(
model_name='ticket',
name='price',
field=models.FloatField(default=0, help_text='Preço', validators=[django.core.validators.MinValueValidator(0)], verbose_name='Preço'),
),
migrations.AlterField(
model_name='ticket',
name='seat',
field=models.CharField(help_text='Assento', max_length=50, null=True, verbose_name='Assento'),
),
]
|
[
"django.db.models.CharField"
] |
[((610, 701), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""Assento"""', 'max_length': '(50)', 'null': '(True)', 'verbose_name': '"""Assento"""'}), "(help_text='Assento', max_length=50, null=True,\n verbose_name='Assento')\n", (626, 701), False, 'from django.db import migrations, models\n')]
|
"""
Test example snippets from the lldb 'help expression' output.
"""
from __future__ import print_function
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class Radar9673644TestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break inside main().
self.main_source = "main.c"
self.line = line_number(self.main_source, '// Set breakpoint here.')
def test_expr_commands(self):
"""The following expression commands should just work."""
self.build()
self.runCmd("file " + self.getBuildArtifact("a.out"), CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_file_and_line(
self,
self.main_source,
self.line,
num_expected_locations=1,
loc_exact=True)
self.runCmd("run", RUN_SUCCEEDED)
# rdar://problem/9673664 lldb expression evaluation problem
self.expect('expr char str[] = "foo"; str[0]',
substrs=["'f'"])
# runCmd: expr char c[] = "foo"; c[0]
# output: (char) $0 = 'f'
|
[
"lldbsuite.test.lldbutil.run_break_set_by_file_and_line"
] |
[((792, 912), 'lldbsuite.test.lldbutil.run_break_set_by_file_and_line', 'lldbutil.run_break_set_by_file_and_line', (['self', 'self.main_source', 'self.line'], {'num_expected_locations': '(1)', 'loc_exact': '(True)'}), '(self, self.main_source, self.line,\n num_expected_locations=1, loc_exact=True)\n', (831, 912), False, 'from lldbsuite.test import lldbutil\n')]
|
# -*- coding: utf-8 -*-
#
# parselglossy -- Generic input parsing library, speaking in tongues
# Copyright (C) 2020 <NAME>, <NAME>, and contributors.
#
# This file is part of parselglossy.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# For information on the complete list of contributors to the
# parselglossy library, see: <http://parselglossy.readthedocs.io/>
#
"""Tests for `parselglossy` package."""
from collections import namedtuple
from string import ascii_letters, digits
from hypothesis import strategies as st
Strategy = namedtuple("Strategy", ["string", "value"])
@st.composite
def floats(draw):
"""Generate floating point numbers in various formats.
A composite testing strategy to generate floats in the formats
understood by the `float_t` atom.
"""
number = draw(st.floats(allow_nan=False, allow_infinity=False))
fmt = draw(
st.sampled_from(
["{:.20f}", "{:.20e}", "{:.20E}", "{:+.20f}", "{:+.20e}", "{:+.20E}"]
)
)
return Strategy(fmt.format(number), number)
@st.composite
def complex_numbers(draw):
"""Generate complex numbers in various formats.
A composite testing strategy to generate complex numbers in the formats
understood by the `complex_t` atom.
"""
number = draw(st.complex_numbers(allow_nan=False, allow_infinity=False))
has_real = draw(st.booleans())
w_space = draw(st.booleans())
real_fmt = draw(st.sampled_from(["{:.20g}", "{:.20G}"]))
imag_fmt = draw(
st.sampled_from(["{:+.20g}*j", "{:+.20G}*j", "{:+.20g}*J", "{:+.20G}*J"])
)
number_as_string = ""
if has_real:
if w_space:
fmt = real_fmt + " " + imag_fmt
else:
fmt = real_fmt + imag_fmt
number_as_string = fmt.format(number.real, number.imag)
else:
number = complex(0.0, number.imag)
fmt = imag_fmt
number_as_string = fmt.format(number.imag)
return Strategy(number_as_string, number)
@st.composite
def unquoted_str(draw):
"""Generate strings compatible with our definition of an unquoted string."""
start = draw(st.text(alphabet=(ascii_letters + "_"), min_size=1))
body = draw(st.text(alphabet=(ascii_letters + digits + "_")))
return start + body
@st.composite
def list_of_floats(draw, *, start: str = "[", end: str = "]", delimiter: str = ","):
"""Generate list of floating point numbers in various formats.
A composite testing strategy to generate lists of floats in the formats
understood by the list atoms that can be generated by the `make_list_t`
function.
"""
lst = draw(st.lists(floats(), min_size=1))
list_as_string = start + delimiter.join((x.string for x in lst)) + end
numbers = [x.value for x in lst]
return Strategy(list_as_string, numbers)
@st.composite
def list_of_complex_numbers(
draw, *, start: str = "[", end: str = "]", delimiter: str = ","
):
"""Generate list of complex numbers in various formats.
A composite testing strategy to generate lists of complex numbers in the formats
understood by the list atoms that can be generated by the `make_list_t`
function.
"""
lst = draw(st.lists(complex_numbers(), min_size=1))
list_as_string = start + delimiter.join((x.string for x in lst)) + end
numbers = [x.value for x in lst]
return Strategy(list_as_string, numbers)
@st.composite
def list_of_unquoted_str(
draw, *, start: str = "[", end: str = "]", delimiter: str = ","
):
"""Generate list of unquoted strings."""
lst = draw(st.lists(unquoted_str(), min_size=1))
list_as_string = start + delimiter.join((x for x in lst)) + end
strings = [x for x in lst]
return Strategy(list_as_string, strings)
|
[
"hypothesis.strategies.sampled_from",
"hypothesis.strategies.booleans",
"hypothesis.strategies.complex_numbers",
"collections.namedtuple",
"hypothesis.strategies.text",
"hypothesis.strategies.floats"
] |
[((1550, 1593), 'collections.namedtuple', 'namedtuple', (['"""Strategy"""', "['string', 'value']"], {}), "('Strategy', ['string', 'value'])\n", (1560, 1593), False, 'from collections import namedtuple\n'), ((1819, 1867), 'hypothesis.strategies.floats', 'st.floats', ([], {'allow_nan': '(False)', 'allow_infinity': '(False)'}), '(allow_nan=False, allow_infinity=False)\n', (1828, 1867), True, 'from hypothesis import strategies as st\n'), ((1893, 1983), 'hypothesis.strategies.sampled_from', 'st.sampled_from', (["['{:.20f}', '{:.20e}', '{:.20E}', '{:+.20f}', '{:+.20e}', '{:+.20E}']"], {}), "(['{:.20f}', '{:.20e}', '{:.20E}', '{:+.20f}', '{:+.20e}',\n '{:+.20E}'])\n", (1908, 1983), True, 'from hypothesis import strategies as st\n'), ((2294, 2351), 'hypothesis.strategies.complex_numbers', 'st.complex_numbers', ([], {'allow_nan': '(False)', 'allow_infinity': '(False)'}), '(allow_nan=False, allow_infinity=False)\n', (2312, 2351), True, 'from hypothesis import strategies as st\n'), ((2373, 2386), 'hypothesis.strategies.booleans', 'st.booleans', ([], {}), '()\n', (2384, 2386), True, 'from hypothesis import strategies as st\n'), ((2407, 2420), 'hypothesis.strategies.booleans', 'st.booleans', ([], {}), '()\n', (2418, 2420), True, 'from hypothesis import strategies as st\n'), ((2442, 2481), 'hypothesis.strategies.sampled_from', 'st.sampled_from', (["['{:.20g}', '{:.20G}']"], {}), "(['{:.20g}', '{:.20G}'])\n", (2457, 2481), True, 'from hypothesis import strategies as st\n'), ((2512, 2585), 'hypothesis.strategies.sampled_from', 'st.sampled_from', (["['{:+.20g}*j', '{:+.20G}*j', '{:+.20g}*J', '{:+.20G}*J']"], {}), "(['{:+.20g}*j', '{:+.20G}*j', '{:+.20g}*J', '{:+.20G}*J'])\n", (2527, 2585), True, 'from hypothesis import strategies as st\n'), ((3127, 3176), 'hypothesis.strategies.text', 'st.text', ([], {'alphabet': "(ascii_letters + '_')", 'min_size': '(1)'}), "(alphabet=ascii_letters + '_', min_size=1)\n", (3134, 3176), True, 'from hypothesis import strategies as st\n'), ((3196, 3242), 'hypothesis.strategies.text', 'st.text', ([], {'alphabet': "(ascii_letters + digits + '_')"}), "(alphabet=ascii_letters + digits + '_')\n", (3203, 3242), True, 'from hypothesis import strategies as st\n')]
|
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import train_test_split
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from lightgbm import LGBMRegressor
pd.options.display.max_columns = 50
pd.options.display.width = 1000
training_data = pd.read_csv('training_data/covid19_measure_assessment_dataset.csv')
X = training_data[training_data.columns[0:-1]]
y = training_data[training_data.columns[-1]]
X.drop(columns=['Obligatory medical tests not related to covid-19'], inplace=True)
print(y.describe(), X.day_zero.describe())
X_train, X_test, y_train, y_test = train_test_split(X, y, shuffle=True, random_state=0, test_size=0.2)
max_depth = -1
num_leaves = 150
learning_rate = 0.01
boosting_type, subsample, subsample_freq = 'rf', 0.8, 2
class_weight = None
objective = 'rmse'
colsample_bytree = 0.8
colsample_bynode = 0.8
min_child_samples = 180
max_bin = 256
Regressor = LGBMRegressor(n_estimators=100000, boosting_type=boosting_type, learning_rate=learning_rate, random_state=0,
subsample=subsample, subsample_freq=subsample_freq, colsample_bytree=colsample_bytree,
colsample_bynode=colsample_bynode, min_child_samples=min_child_samples,
max_depth=max_depth, num_leaves=num_leaves, class_weight=class_weight, max_bin=max_bin,
importance_type='split', objective=objective)
Regressor.fit(X_train, y_train, eval_set=[(X_train, y_train), (X_test, y_test)],
early_stopping_rounds=1000, )
best_iteration = Regressor.best_iteration_
print(Regressor.get_params())
Regressor = LGBMRegressor(n_estimators=best_iteration, boosting_type=boosting_type, learning_rate=learning_rate,
random_state=0, class_weight=class_weight, min_child_samples=min_child_samples,
colsample_bynode=colsample_bynode, colsample_bytree=colsample_bytree,
subsample=subsample, subsample_freq=subsample_freq, max_bin=max_bin,
max_depth=max_depth, num_leaves=num_leaves, top_k=1000, tree_learner='voting',
importance_type='split', objective=objective)
scores = cross_val_score(Regressor, X, y, cv=5,
scoring='neg_mean_squared_error')
print('Cross Validation Results: {} +- Std.Dev. {} '.format(scores.mean(), scores.std()))
Regressor.fit(X, y)
print("Iterations used: ", Regressor.n_estimators)
for c, i in zip(X.columns, Regressor.feature_importances_):
print(c, i)
fig, ax = plt.subplots(1, 1, figsize=(10, 5), facecolor=(0.9, 0.9, 0.9))
ax.set_title('Model predictions per solely activated measure')
ax.set_facecolor((0.8, 0.8, 0.8))
ax.set_xlabel('Days since day zero')
ax.set_ylabel('Exponent % increase since day zero')
days = X.day_zero.max()
period = np.arange(X.day_zero.min(), days, 1)
def prediction(measure_intensity=0, days=days, temp=X.avg_temp.mean(), population=X.population.mean(),
built=X.built_area.mean(), density=X.pop_density.mean(), measure_index=None, clf=Regressor, train_data=X):
days = np.arange(X.day_zero.min(), days, 1)
array = np.zeros((days.size, train_data.columns.size), 'float64')
array[:, 0], array[:, 1], array[:, 2], array[:, 3], array[:, 4] = days, temp, population, built, density
if measure_index:
array[:, 4+measure_index] = 1
else:
array[:, 5:] = measure_intensity
array = pd.DataFrame(array, columns=X.columns)
predictions = clf.predict(array, num_iteration=best_iteration, pred_contrib=False)
predictions = predictions.cumsum()
return predictions
intensity_0, intensity_0_1, intensity_1 = prediction(0), prediction(0.1), prediction(1)
measure_assessment = [prediction(days, measure_index=i) for i in range(1, 35)]
ax.plot(period, intensity_0, c=(0.1, 0.1, 0.1), lw=6, alpha=0.9, label='All measures: Intensity 0')
ax.plot(period, intensity_0_1, c=(0.3, 0.3, 0.3), lw=6, alpha=0.9, label='All measures: Intensity 0.1')
ax.plot(period, intensity_1, c=(0.5, 0.5, 0.5), lw=6, alpha=0.9, label='All measures: Intensity 1')
for name, measure in zip(X.columns[5:], measure_assessment):
rc = np.random.random(3,)
ax.plot(period, measure, c=rc, ls='--', lw=2, label=name)
ax.fill_between(period, measure-scores.mean()*(period-period[0]+1), measure+scores.mean()*(period-period[0]+1),
color=(*rc, 0.2))
ax.set_xlim(left=0, right=days)
ax.set_ylim(bottom=0, top=intensity_0.max())
ax.yaxis.set_ticklabels(['{}%'.format(int(x*100)) for x in ax.get_yticks()], rotation=90)
ax.grid(axis='both', linestyle=':', color=(1, 1, 1, 0.3))
ax.legend(bbox_to_anchor=(1, 1.01), fontsize=7.5, labelspacing=0.6, fancybox=True, title='Legend')
plt.subplots_adjust(wspace=0, hspace=0, right=0.7, left=0.05)
Regressor.booster_.save_model('Model/covid19_measure_assessment_model.txt', num_iteration=best_iteration)
results = pd.DataFrame(((round(((measure[-1] - intensity_0[-1]) / intensity_0[-1])*100, 1))
for measure in measure_assessment),
index=X.columns[5:],
columns=['Spread % decrease in {} days'.format(int(days))])
results['RMSE'] = '+- {}%'.format(abs(round((days*scores.mean()/intensity_0[-1])*100, 1)))
results['Feature importance (splits)'] = Regressor.feature_importances_[5:]
results['No. of non-zero values (dataset)'] = X[X > 0].count()[5:]
results.sort_values('Spread % decrease in {} days'.format(int(days)), inplace=True)
results[results.columns[0]] = results[results.columns[0]].apply(lambda x: '{}%'.format(x))
print(results)
plt.show()
|
[
"pandas.DataFrame",
"matplotlib.pyplot.show",
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"sklearn.model_selection.cross_val_score",
"numpy.zeros",
"numpy.random.random",
"matplotlib.pyplot.subplots_adjust",
"lightgbm.LGBMRegressor",
"matplotlib.pyplot.subplots"
] |
[((296, 363), 'pandas.read_csv', 'pd.read_csv', (['"""training_data/covid19_measure_assessment_dataset.csv"""'], {}), "('training_data/covid19_measure_assessment_dataset.csv')\n", (307, 363), True, 'import pandas as pd\n'), ((622, 689), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'shuffle': '(True)', 'random_state': '(0)', 'test_size': '(0.2)'}), '(X, y, shuffle=True, random_state=0, test_size=0.2)\n', (638, 689), False, 'from sklearn.model_selection import train_test_split\n'), ((937, 1358), 'lightgbm.LGBMRegressor', 'LGBMRegressor', ([], {'n_estimators': '(100000)', 'boosting_type': 'boosting_type', 'learning_rate': 'learning_rate', 'random_state': '(0)', 'subsample': 'subsample', 'subsample_freq': 'subsample_freq', 'colsample_bytree': 'colsample_bytree', 'colsample_bynode': 'colsample_bynode', 'min_child_samples': 'min_child_samples', 'max_depth': 'max_depth', 'num_leaves': 'num_leaves', 'class_weight': 'class_weight', 'max_bin': 'max_bin', 'importance_type': '"""split"""', 'objective': 'objective'}), "(n_estimators=100000, boosting_type=boosting_type,\n learning_rate=learning_rate, random_state=0, subsample=subsample,\n subsample_freq=subsample_freq, colsample_bytree=colsample_bytree,\n colsample_bynode=colsample_bynode, min_child_samples=min_child_samples,\n max_depth=max_depth, num_leaves=num_leaves, class_weight=class_weight,\n max_bin=max_bin, importance_type='split', objective=objective)\n", (950, 1358), False, 'from lightgbm import LGBMRegressor\n'), ((1657, 2127), 'lightgbm.LGBMRegressor', 'LGBMRegressor', ([], {'n_estimators': 'best_iteration', 'boosting_type': 'boosting_type', 'learning_rate': 'learning_rate', 'random_state': '(0)', 'class_weight': 'class_weight', 'min_child_samples': 'min_child_samples', 'colsample_bynode': 'colsample_bynode', 'colsample_bytree': 'colsample_bytree', 'subsample': 'subsample', 'subsample_freq': 'subsample_freq', 'max_bin': 'max_bin', 'max_depth': 'max_depth', 'num_leaves': 'num_leaves', 'top_k': '(1000)', 'tree_learner': '"""voting"""', 'importance_type': '"""split"""', 'objective': 'objective'}), "(n_estimators=best_iteration, boosting_type=boosting_type,\n learning_rate=learning_rate, random_state=0, class_weight=class_weight,\n min_child_samples=min_child_samples, colsample_bynode=colsample_bynode,\n colsample_bytree=colsample_bytree, subsample=subsample, subsample_freq=\n subsample_freq, max_bin=max_bin, max_depth=max_depth, num_leaves=\n num_leaves, top_k=1000, tree_learner='voting', importance_type='split',\n objective=objective)\n", (1670, 2127), False, 'from lightgbm import LGBMRegressor\n'), ((2243, 2315), 'sklearn.model_selection.cross_val_score', 'cross_val_score', (['Regressor', 'X', 'y'], {'cv': '(5)', 'scoring': '"""neg_mean_squared_error"""'}), "(Regressor, X, y, cv=5, scoring='neg_mean_squared_error')\n", (2258, 2315), False, 'from sklearn.model_selection import cross_val_score\n'), ((2594, 2656), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(10, 5)', 'facecolor': '(0.9, 0.9, 0.9)'}), '(1, 1, figsize=(10, 5), facecolor=(0.9, 0.9, 0.9))\n', (2606, 2656), True, 'import matplotlib.pyplot as plt\n'), ((4798, 4859), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'wspace': '(0)', 'hspace': '(0)', 'right': '(0.7)', 'left': '(0.05)'}), '(wspace=0, hspace=0, right=0.7, left=0.05)\n', (4817, 4859), True, 'import matplotlib.pyplot as plt\n'), ((5676, 5686), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5684, 5686), True, 'import matplotlib.pyplot as plt\n'), ((3203, 3260), 'numpy.zeros', 'np.zeros', (['(days.size, train_data.columns.size)', '"""float64"""'], {}), "((days.size, train_data.columns.size), 'float64')\n", (3211, 3260), True, 'import numpy as np\n'), ((3498, 3536), 'pandas.DataFrame', 'pd.DataFrame', (['array'], {'columns': 'X.columns'}), '(array, columns=X.columns)\n', (3510, 3536), True, 'import pandas as pd\n'), ((4234, 4253), 'numpy.random.random', 'np.random.random', (['(3)'], {}), '(3)\n', (4250, 4253), True, 'import numpy as np\n')]
|
# Copyright 2011 <NAME>. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
from __future__ import with_statement
import os
from multiprocessing import Process, Queue, cpu_count
from whoosh.compat import xrange, iteritems, pickle
from whoosh.codec import base
from whoosh.writing import PostingPool, SegmentWriter
from whoosh.externalsort import imerge
from whoosh.util import random_name
def finish_subsegment(writer, k=64):
# Tell the pool to finish up the current file
writer.pool.save()
# Tell the pool to merge any and all runs in the pool until there
# is only one run remaining. "k" is an optional parameter passed
# from the parent which sets the maximum number of files to open
# while reducing.
writer.pool.reduce_to(1, k)
# The filename of the single remaining run
runname = writer.pool.runs[0]
# The indexed field names
fieldnames = writer.pool.fieldnames
# The segment object (parent can use this to re-open the files created
# by the sub-writer)
segment = writer._partial_segment()
return runname, fieldnames, segment
# Multiprocessing Writer
class SubWriterTask(Process):
# This is a Process object that takes "jobs" off a job Queue, processes
# them, and when it's done, puts a summary of its work on a results Queue
def __init__(self, storage, indexname, jobqueue, resultqueue, kwargs,
multisegment):
Process.__init__(self)
self.storage = storage
self.indexname = indexname
self.jobqueue = jobqueue
self.resultqueue = resultqueue
self.kwargs = kwargs
self.multisegment = multisegment
self.running = True
def run(self):
# This is the main loop of the process. OK, so the way this works is
# kind of brittle and stupid, but I had to figure out how to use the
# multiprocessing module, work around bugs, and address performance
# issues, so there is at least some reasoning behind some of this
# The "parent" task farms individual documents out to the subtasks for
# indexing. You could pickle the actual documents and put them in the
# queue, but that is not very performant. Instead, we assume the tasks
# share a filesystem and use that to pass the information around. The
# parent task writes a certain number of documents to a file, then puts
# the filename on the "job queue". A subtask gets the filename off the
# queue and reads through the file processing the documents.
jobqueue = self.jobqueue
resultqueue = self.resultqueue
multisegment = self.multisegment
# Open a placeholder object representing the index
ix = self.storage.open_index(self.indexname)
# Open a writer for the index. The _lk=False parameter means to not try
# to lock the index (the parent object that started me takes care of
# locking the index)
writer = self.writer = SegmentWriter(ix, _lk=False, **self.kwargs)
# If the parent task calls cancel() on me, it will set self.running to
# False, so I'll notice the next time through the loop
while self.running:
# Take an object off the job queue
jobinfo = jobqueue.get()
# If the object is None, it means the parent task wants me to
# finish up
if jobinfo is None:
break
# The object from the queue is a tuple of (filename,
# number_of_docs_in_file). Pass those two pieces of information as
# arguments to _process_file().
self._process_file(*jobinfo)
if not self.running:
# I was cancelled, so I'll cancel my underlying writer
writer.cancel()
else:
if multisegment:
# Actually finish the segment and return it with no run
runname = None
fieldnames = writer.pool.fieldnames
segment = writer._finalize_segment()
else:
# Merge all runs in the writer's pool into one run, close the
# segment, and return the run name and the segment
k = self.kwargs.get("k", 64)
runname, fieldnames, segment = finish_subsegment(writer, k)
# Put the results (the run filename and the segment object) on the
# result queue
resultqueue.put((runname, fieldnames, segment), timeout=5)
def _process_file(self, filename, doc_count):
# This method processes a "job file" written out by the parent task. A
# job file is a series of pickled (code, arguments) tuples. Currently
# the only command codes is 0=add_document
writer = self.writer
tempstorage = writer.temp_storage()
load = pickle.load
with tempstorage.open_file(filename).raw_file() as f:
for _ in xrange(doc_count):
# Load the next pickled tuple from the file
code, args = load(f)
assert code == 0
writer.add_document(**args)
# Remove the job file
tempstorage.delete_file(filename)
def cancel(self):
self.running = False
class MpWriter(SegmentWriter):
def __init__(self, ix, procs=None, batchsize=100, subargs=None,
multisegment=False, **kwargs):
# This is the "main" writer that will aggregate the results created by
# the sub-tasks
SegmentWriter.__init__(self, ix, **kwargs)
self.procs = procs or cpu_count()
# The maximum number of documents in each job file submitted to the
# sub-tasks
self.batchsize = batchsize
# You can use keyword arguments or the "subargs" argument to pass
# keyword arguments to the sub-writers
self.subargs = subargs if subargs else kwargs
# If multisegment is True, don't merge the segments created by the
# sub-writers, just add them directly to the TOC
self.multisegment = multisegment
# A list to hold the sub-task Process objects
self.tasks = []
# A queue to pass the filenames of job files to the sub-tasks
self.jobqueue = Queue(self.procs * 4)
# A queue to get back the final results of the sub-tasks
self.resultqueue = Queue()
# A buffer for documents before they are flushed to a job file
self.docbuffer = []
self._grouping = 0
self._added_sub = False
def _new_task(self):
task = SubWriterTask(self.storage, self.indexname,
self.jobqueue, self.resultqueue, self.subargs,
self.multisegment)
self.tasks.append(task)
task.start()
return task
def _enqueue(self):
# Flush the documents stored in self.docbuffer to a file and put the
# filename on the job queue
docbuffer = self.docbuffer
dump = pickle.dump
length = len(docbuffer)
filename = "%s.doclist" % random_name()
with self.temp_storage().create_file(filename).raw_file() as f:
for item in docbuffer:
dump(item, f, -1)
if len(self.tasks) < self.procs:
self._new_task()
jobinfo = (filename, length)
self.jobqueue.put(jobinfo)
self.docbuffer = []
def cancel(self):
try:
for task in self.tasks:
task.cancel()
finally:
SegmentWriter.cancel(self)
def start_group(self):
self._grouping += 1
def end_group(self):
if not self._grouping:
raise Exception("Unbalanced end_group")
self._grouping -= 1
def add_document(self, **fields):
# Add the document to the docbuffer
self.docbuffer.append((0, fields))
# If the buffer is full, flush it to the job queue
if not self._grouping and len(self.docbuffer) >= self.batchsize:
self._enqueue()
self._added_sub = True
def _read_and_renumber_run(self, path, offset):
# Note that SortingPool._read_run() automatically deletes the run file
# when it's finished
gen = self.pool._read_run(path)
# If offset is 0, just return the items unchanged
if not offset:
return gen
else:
# Otherwise, add the offset to each docnum
return ((fname, text, docnum + offset, weight, value)
for fname, text, docnum, weight, value in gen)
def commit(self, mergetype=None, optimize=None, merge=None):
if self._added_sub:
# If documents have been added to sub-writers, use the parallel
# merge commit code
self._commit(mergetype, optimize, merge)
else:
# Otherwise, just do a regular-old commit
SegmentWriter.commit(self, mergetype=mergetype, optimize=optimize,
merge=merge)
def _commit(self, mergetype, optimize, merge):
# Index the remaining documents in the doc buffer
if self.docbuffer:
self._enqueue()
# Tell the tasks to finish
for task in self.tasks:
self.jobqueue.put(None)
# Merge existing segments
finalsegments = self._merge_segments(mergetype, optimize, merge)
# Wait for the subtasks to finish
for task in self.tasks:
task.join()
# Pull a (run_file_name, fieldnames, segment) tuple off the result
# queue for each sub-task, representing the final results of the task
results = []
for task in self.tasks:
results.append(self.resultqueue.get(timeout=5))
if self.multisegment:
# If we're not merging the segments, we don't care about the runname
# and fieldnames in the results... just pull out the segments and
# add them to the list of final segments
finalsegments += [s for _, _, s in results]
if self._added:
finalsegments.append(self._finalize_segment())
else:
self._close_segment()
assert self.perdocwriter.is_closed
else:
# Merge the posting sources from the sub-writers and my
# postings into this writer
self._merge_subsegments(results, mergetype)
self._close_segment()
self._assemble_segment()
finalsegments.append(self.get_segment())
assert self.perdocwriter.is_closed
self._commit_toc(finalsegments)
self._finish()
def _merge_subsegments(self, results, mergetype):
schema = self.schema
schemanames = set(schema.names())
storage = self.storage
codec = self.codec
sources = []
# If information was added to this writer the conventional (e.g.
# through add_reader or merging segments), add it as an extra source
if self._added:
sources.append(self.pool.iter_postings())
pdrs = []
for runname, fieldnames, segment in results:
fieldnames = set(fieldnames) | schemanames
pdr = codec.per_document_reader(storage, segment)
pdrs.append(pdr)
basedoc = self.docnum
docmap = self.write_per_doc(fieldnames, pdr)
assert docmap is None
items = self._read_and_renumber_run(runname, basedoc)
sources.append(items)
# Create a MultiLengths object combining the length files from the
# subtask segments
self.perdocwriter.close()
pdrs.insert(0, self.per_document_reader())
mpdr = base.MultiPerDocumentReader(pdrs)
try:
# Merge the iterators into the field writer
self.fieldwriter.add_postings(schema, mpdr, imerge(sources))
finally:
mpdr.close()
self._added = True
class SerialMpWriter(MpWriter):
# A non-parallel version of the MpWriter for testing purposes
def __init__(self, ix, procs=None, batchsize=100, subargs=None, **kwargs):
SegmentWriter.__init__(self, ix, **kwargs)
self.procs = procs or cpu_count()
self.batchsize = batchsize
self.subargs = subargs if subargs else kwargs
self.tasks = [SegmentWriter(ix, _lk=False, **self.subargs)
for _ in xrange(self.procs)]
self.pointer = 0
self._added_sub = False
def add_document(self, **fields):
self.tasks[self.pointer].add_document(**fields)
self.pointer = (self.pointer + 1) % len(self.tasks)
self._added_sub = True
def _commit(self, mergetype, optimize, merge):
# Pull a (run_file_name, segment) tuple off the result queue for each
# sub-task, representing the final results of the task
# Merge existing segments
finalsegments = self._merge_segments(mergetype, optimize, merge)
results = []
for writer in self.tasks:
results.append(finish_subsegment(writer))
self._merge_subsegments(results, mergetype)
self._close_segment()
self._assemble_segment()
finalsegments.append(self.get_segment())
self._commit_toc(finalsegments)
self._finish()
# For compatibility with old multiproc module
class MultiSegmentWriter(MpWriter):
def __init__(self, *args, **kwargs):
MpWriter.__init__(self, *args, **kwargs)
self.multisegment = True
|
[
"whoosh.writing.SegmentWriter.cancel",
"whoosh.compat.xrange",
"whoosh.externalsort.imerge",
"whoosh.writing.SegmentWriter",
"whoosh.writing.SegmentWriter.commit",
"whoosh.writing.SegmentWriter.__init__",
"multiprocessing.Queue",
"whoosh.codec.base.MultiPerDocumentReader",
"multiprocessing.Process.__init__",
"whoosh.util.random_name",
"multiprocessing.cpu_count"
] |
[((2873, 2895), 'multiprocessing.Process.__init__', 'Process.__init__', (['self'], {}), '(self)\n', (2889, 2895), False, 'from multiprocessing import Process, Queue, cpu_count\n'), ((4443, 4486), 'whoosh.writing.SegmentWriter', 'SegmentWriter', (['ix'], {'_lk': '(False)'}), '(ix, _lk=False, **self.kwargs)\n', (4456, 4486), False, 'from whoosh.writing import PostingPool, SegmentWriter\n'), ((6982, 7024), 'whoosh.writing.SegmentWriter.__init__', 'SegmentWriter.__init__', (['self', 'ix'], {}), '(self, ix, **kwargs)\n', (7004, 7024), False, 'from whoosh.writing import PostingPool, SegmentWriter\n'), ((7720, 7741), 'multiprocessing.Queue', 'Queue', (['(self.procs * 4)'], {}), '(self.procs * 4)\n', (7725, 7741), False, 'from multiprocessing import Process, Queue, cpu_count\n'), ((7834, 7841), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (7839, 7841), False, 'from multiprocessing import Process, Queue, cpu_count\n'), ((13229, 13262), 'whoosh.codec.base.MultiPerDocumentReader', 'base.MultiPerDocumentReader', (['pdrs'], {}), '(pdrs)\n', (13256, 13262), False, 'from whoosh.codec import base\n'), ((13663, 13705), 'whoosh.writing.SegmentWriter.__init__', 'SegmentWriter.__init__', (['self', 'ix'], {}), '(self, ix, **kwargs)\n', (13685, 13705), False, 'from whoosh.writing import PostingPool, SegmentWriter\n'), ((6405, 6422), 'whoosh.compat.xrange', 'xrange', (['doc_count'], {}), '(doc_count)\n', (6411, 6422), False, 'from whoosh.compat import xrange, iteritems, pickle\n'), ((7056, 7067), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (7065, 7067), False, 'from multiprocessing import Process, Queue, cpu_count\n'), ((8550, 8563), 'whoosh.util.random_name', 'random_name', ([], {}), '()\n', (8561, 8563), False, 'from whoosh.util import random_name\n'), ((9007, 9033), 'whoosh.writing.SegmentWriter.cancel', 'SegmentWriter.cancel', (['self'], {}), '(self)\n', (9027, 9033), False, 'from whoosh.writing import PostingPool, SegmentWriter\n'), ((10387, 10466), 'whoosh.writing.SegmentWriter.commit', 'SegmentWriter.commit', (['self'], {'mergetype': 'mergetype', 'optimize': 'optimize', 'merge': 'merge'}), '(self, mergetype=mergetype, optimize=optimize, merge=merge)\n', (10407, 10466), False, 'from whoosh.writing import PostingPool, SegmentWriter\n'), ((13737, 13748), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (13746, 13748), False, 'from multiprocessing import Process, Queue, cpu_count\n'), ((13860, 13904), 'whoosh.writing.SegmentWriter', 'SegmentWriter', (['ix'], {'_lk': '(False)'}), '(ix, _lk=False, **self.subargs)\n', (13873, 13904), False, 'from whoosh.writing import PostingPool, SegmentWriter\n'), ((13389, 13404), 'whoosh.externalsort.imerge', 'imerge', (['sources'], {}), '(sources)\n', (13395, 13404), False, 'from whoosh.externalsort import imerge\n'), ((13936, 13954), 'whoosh.compat.xrange', 'xrange', (['self.procs'], {}), '(self.procs)\n', (13942, 13954), False, 'from whoosh.compat import xrange, iteritems, pickle\n')]
|
from abc import ABC, abstractmethod
from datetime import datetime
from enum import Enum
from harvestmouseapp.mvc_model.model import MouseList, ColumnList
class FilterType(Enum):
"""
This is the enumuration of the filter type which represents different method of filtering of mouse list
"""
GTE = 0
GT = 1
LTE = 2
LT = 3
EQ = 4
CONTAINS = 5
def get_enum_by_value(num):
"""
This is the public function which represents the FilterType into index which provides easier and convienience
way of representing the type of filter clients wishs to have
"""
if num == 0:
return FilterType.GTE
elif num == 1:
return FilterType.GT
elif num == 2:
return FilterType.LTE
elif num == 3:
return FilterType.LE
elif num == 4:
return FilterType.EQ
else:
return FilterType.CONTAINS
class FilterOption:
"""
This class represents the filter option specified by the client including]
1. The specific column the client wish the filter to depending on.
2. The value of the criteria
3. The way of the criteria to be compared with the data. i.e, GT, Greater than the value of the criteria
"""
def __init__(self, column_name, value, filter_type=FilterType.CONTAINS):
self.__column_name = ColumnList.header_name.value + column_name
self.__value = value
self.__filter_type = filter_type
@property
def column_name(self):
return self.__column_name
@column_name.setter
def column_name(self, column_name):
header_name = ColumnList.header_name.value
self.__column_name = header_name + column_name
@property
def value(self):
return self.__value
@value.setter
def value(self, value):
self.__value = value
@property
def filter_type(self):
return self.__filter_type
@filter_type.setter
def filter_type(self, filter_type):
self.__filter_type = filter_type
class GenericMouseFilter(ABC):
"""
Expectes the subclass to implement the filter method to filter the mouse_input based on the
filter option
"""
@abstractmethod
def filter(self, mouse_input, filter_option):
pass
class MouseFilter(GenericMouseFilter):
"""
This method filters the mouse list based on the filter option provided by the client.
It will firstly fetch the entire mouse list and filter the mouse list filter option by
filter option
"""
def filter(self, mouse_input, filter_option):
return_mouse_list = MouseList()
value = filter_option.value
col_name = filter_option.column_name
if col_name == ColumnList.BIRTH_DATE.value or \
col_name == ColumnList.END_DATE.value:
value = datetime.strptime(value, "%Y-%m-%d").date()
try:
if filter_option.filter_type == FilterType.CONTAINS:
list_mouse = [m for m in mouse_input if value in m.get_attribute_by_str(col_name)]
elif filter_option.filter_type == FilterType.EQ:
list_mouse = [m for m in mouse_input if value == m.get_attribute_by_str(col_name)]
elif filter_option.filter_type == FilterType.GT:
list_mouse = [m for m in mouse_input if value < m.get_attribute_by_str(col_name)]
elif filter_option.filter_type == FilterType.GTE:
list_mouse = [m for m in mouse_input if value <= m.get_attribute_by_str(col_name)]
elif filter_option.filter_type == FilterType.LT:
list_mouse = [m for m in mouse_input if value > m.get_attribute_by_str(col_name)]
elif filter_option.filter_type == FilterType.LTE:
list_mouse = [m for m in mouse_input if value >= m.get_attribute_by_str(col_name)]
else:
list_mouse = mouse_input
return_mouse_list.add_mouse(list_mouse)
except TypeError as e:
list_mouse = mouse_input
return_mouse_list.add_mouse(list_mouse)
return return_mouse_list
def construct_filter_string(self, filter_option):
if filter_option.filter_type == FilterType.CONTAINS:
return filter_option.column_name[1:].replace('Mouse__','') + '__contains'
elif filter_option.filter_type == FilterType.EQ:
return filter_option.column_name[1:].replace('Mouse__','') + '__exact'
elif filter_option.filter_type == FilterType.GT:
return filter_option.column_name[1:].replace('Mouse__','') + '__gt'
elif filter_option.filter_type == FilterType.GTE:
return filter_option.column_name[1:].replace('Mouse__','') + '__gte'
elif filter_option.filter_type == FilterType.LT:
return filter_option.column_name[1:].replace('Mouse__','') + '__lt'
elif filter_option.filter_type == FilterType.LTE:
return filter_option.column_name[1:].replace('Mouse__','') + '__lte'
else:
return filter_option.column_name[1:].replace('Mouse__','') + '__contains'
|
[
"harvestmouseapp.mvc_model.model.MouseList",
"datetime.datetime.strptime"
] |
[((2586, 2597), 'harvestmouseapp.mvc_model.model.MouseList', 'MouseList', ([], {}), '()\n', (2595, 2597), False, 'from harvestmouseapp.mvc_model.model import MouseList, ColumnList\n'), ((2811, 2847), 'datetime.datetime.strptime', 'datetime.strptime', (['value', '"""%Y-%m-%d"""'], {}), "(value, '%Y-%m-%d')\n", (2828, 2847), False, 'from datetime import datetime\n')]
|
from datetime import datetime
import pandas as pd
from airflow.models import Variable
from airflow.operators.bash import BashOperator
from airflow.operators.python_operator import PythonOperator
from minio import Minio
from airflow import DAG
DEFAULT_ARGS = {
"owner": "Airflow",
"depends_on_past": False,
"start_date": datetime(2021, 1, 13),
}
dag = DAG(
"etl_satisfaction_evaluation_att",
default_args=DEFAULT_ARGS,
schedule_interval="@once",
)
data_lake_server = Variable.get("data_lake_server")
data_lake_login = Variable.get("data_lake_login")
data_lake_password = Variable.get("data_lake_password")
client = Minio(
data_lake_server,
access_key=data_lake_login,
secret_key=data_lake_password,
secure=False,
)
def extract():
# extrai os dados a partir do Data Lake.
obj = client.get_object(
"landing",
"performance-evaluation/employee_performance_evaluation.json",
)
data = obj.read()
df_ = pd.read_json(data, lines=True)
# persiste os arquivos na área de Staging.
df_.to_json(
"/tmp/employee_performance_evaluation.json",
orient="records",
lines=True,
)
def load():
# ler os dados a partir da área de Staging.
df_ = pd.read_json(
"/tmp/employee_performance_evaluation.json",
orient="records",
lines="True",
)
# converte os dados para o formato parquet e pesiste na área de staging.
df_[["satisfaction_level", "last_evaluation"]].to_parquet(
"/tmp/satisfaction_evaluation.parquet", index=False
)
# carrega os dados para o Data Lake.
client.fput_object(
"processing",
"satisfaction_evaluation.parquet",
"/tmp/satisfaction_evaluation.parquet",
)
extract_task = PythonOperator(
task_id="extract_file_from_data_lake",
provide_context=True,
python_callable=extract,
dag=dag,
)
load_task = PythonOperator(
task_id="load_file_to_data_lake",
provide_context=True,
python_callable=load,
dag=dag,
)
clean_task = BashOperator(
task_id="clean_files_on_staging",
bash_command="rm -f /tmp/*.csv;rm -f /tmp/*.json;rm -f /tmp/*.parquet;",
dag=dag,
)
extract_task >> load_task >> clean_task
|
[
"airflow.DAG",
"airflow.operators.bash.BashOperator",
"minio.Minio",
"airflow.operators.python_operator.PythonOperator",
"airflow.models.Variable.get",
"pandas.read_json",
"datetime.datetime"
] |
[((367, 463), 'airflow.DAG', 'DAG', (['"""etl_satisfaction_evaluation_att"""'], {'default_args': 'DEFAULT_ARGS', 'schedule_interval': '"""@once"""'}), "('etl_satisfaction_evaluation_att', default_args=DEFAULT_ARGS,\n schedule_interval='@once')\n", (370, 463), False, 'from airflow import DAG\n'), ((495, 527), 'airflow.models.Variable.get', 'Variable.get', (['"""data_lake_server"""'], {}), "('data_lake_server')\n", (507, 527), False, 'from airflow.models import Variable\n'), ((546, 577), 'airflow.models.Variable.get', 'Variable.get', (['"""data_lake_login"""'], {}), "('data_lake_login')\n", (558, 577), False, 'from airflow.models import Variable\n'), ((599, 633), 'airflow.models.Variable.get', 'Variable.get', (['"""data_lake_password"""'], {}), "('data_lake_password')\n", (611, 633), False, 'from airflow.models import Variable\n'), ((644, 745), 'minio.Minio', 'Minio', (['data_lake_server'], {'access_key': 'data_lake_login', 'secret_key': 'data_lake_password', 'secure': '(False)'}), '(data_lake_server, access_key=data_lake_login, secret_key=\n data_lake_password, secure=False)\n', (649, 745), False, 'from minio import Minio\n'), ((1784, 1897), 'airflow.operators.python_operator.PythonOperator', 'PythonOperator', ([], {'task_id': '"""extract_file_from_data_lake"""', 'provide_context': '(True)', 'python_callable': 'extract', 'dag': 'dag'}), "(task_id='extract_file_from_data_lake', provide_context=True,\n python_callable=extract, dag=dag)\n", (1798, 1897), False, 'from airflow.operators.python_operator import PythonOperator\n'), ((1926, 2031), 'airflow.operators.python_operator.PythonOperator', 'PythonOperator', ([], {'task_id': '"""load_file_to_data_lake"""', 'provide_context': '(True)', 'python_callable': 'load', 'dag': 'dag'}), "(task_id='load_file_to_data_lake', provide_context=True,\n python_callable=load, dag=dag)\n", (1940, 2031), False, 'from airflow.operators.python_operator import PythonOperator\n'), ((2061, 2194), 'airflow.operators.bash.BashOperator', 'BashOperator', ([], {'task_id': '"""clean_files_on_staging"""', 'bash_command': '"""rm -f /tmp/*.csv;rm -f /tmp/*.json;rm -f /tmp/*.parquet;"""', 'dag': 'dag'}), "(task_id='clean_files_on_staging', bash_command=\n 'rm -f /tmp/*.csv;rm -f /tmp/*.json;rm -f /tmp/*.parquet;', dag=dag)\n", (2073, 2194), False, 'from airflow.operators.bash import BashOperator\n'), ((335, 356), 'datetime.datetime', 'datetime', (['(2021)', '(1)', '(13)'], {}), '(2021, 1, 13)\n', (343, 356), False, 'from datetime import datetime\n'), ((980, 1010), 'pandas.read_json', 'pd.read_json', (['data'], {'lines': '(True)'}), '(data, lines=True)\n', (992, 1010), True, 'import pandas as pd\n'), ((1254, 1347), 'pandas.read_json', 'pd.read_json', (['"""/tmp/employee_performance_evaluation.json"""'], {'orient': '"""records"""', 'lines': '"""True"""'}), "('/tmp/employee_performance_evaluation.json', orient='records',\n lines='True')\n", (1266, 1347), True, 'import pandas as pd\n')]
|
import tkinter as tk
janela = tk.Tk()
mensagem_para_usuario = "mensagem que pode ser util para um user"
msg = tk.Message(janela, text= mensagem_para_usuario)
msg.config(bg='#f5fffa',font=('times',24,'italic'))
msg.pack()
janela.mainloop()
|
[
"tkinter.Message",
"tkinter.Tk"
] |
[((30, 37), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (35, 37), True, 'import tkinter as tk\n'), ((110, 156), 'tkinter.Message', 'tk.Message', (['janela'], {'text': 'mensagem_para_usuario'}), '(janela, text=mensagem_para_usuario)\n', (120, 156), True, 'import tkinter as tk\n')]
|
"""Tests for loqusdb extension"""
import subprocess
import pytest
from flask import Flask
from scout.server.extensions.loqus_extension import LoqusDB
def test_init_loqusextension(loqus_exe):
"""Test a init a loqus extension object"""
# GIVEN a loqusdb binary
# WHEN initialising a loqusdb extension
loqus_obj = LoqusDB(loqusdb_binary=loqus_exe)
# THEN assert that the binary is correct
assert loqus_obj.loqusdb_binary == loqus_exe
# THEN assert that the base call is correct
assert loqus_obj.base_call == [loqus_exe]
# THEN assert that the version is 0
assert loqus_obj.version == 0
# THEN assert that there is no config
assert loqus_obj.loqusdb_config is None
def test_init_loqusextension_version(loqus_exe, loqus_version):
"""Test a init a loqus extension object with a specified version"""
# GIVEN a loqusdb binary and a version
# WHEN initialising a loqusdb extension
loqus_obj = LoqusDB(loqusdb_binary=loqus_exe, version=loqus_version)
# THEN assert that the binary is correct
assert loqus_obj.loqusdb_binary == loqus_exe
# THEN assert that the base call is correct
assert loqus_obj.base_call == [loqus_exe]
# THEN assert that the version is correct
assert loqus_obj.version == loqus_version
# THEN assert that there is no config
assert loqus_obj.loqusdb_config is None
def test_init_loqusextension_config(loqus_exe, loqus_config, loqus_version):
"""Test a init a loqus extension object with a specified version"""
# GIVEN a loqusdb binary, a version and a config
# WHEN initialising a loqusdb extension
loqus_obj = LoqusDB(
loqusdb_binary=loqus_exe, loqusdb_config=loqus_config, version=loqus_version
)
# THEN assert that the binary is correct
assert loqus_obj.loqusdb_binary == loqus_exe
# THEN assert that the base call is correct
assert loqus_obj.base_call == [loqus_exe, "--config", loqus_config]
# THEN assert that the version is correct
assert loqus_obj.version == loqus_version
# THEN assert that there is no config
assert loqus_obj.loqusdb_config == loqus_config
def test_init_loqusextension_init_app(loqus_exe, loqus_version):
"""Test a init a loqus extension object with flask app with version"""
# GIVEN a loqusdb binary
configs = {"LOQUSDB_SETTINGS": {"binary_path": loqus_exe, "version": loqus_version}}
# WHEN initialising a loqusdb extension with init app
app = Flask(__name__)
loqus_obj = LoqusDB()
with app.app_context():
app.config = configs
loqus_obj.init_app(app)
# THEN assert that the binary is correct
assert loqus_obj.loqusdb_binary == loqus_exe
# THEN assert that the version is correct
assert loqus_obj.version == loqus_version
# THEN assert that there is no config
assert loqus_obj.loqusdb_config is None
def test_init_loqusextension_init_app_no_version(mocker, loqus_exe, loqus_version):
"""Test a init a loqus extension object with flask app"""
# GIVEN a loqusdb binary
configs = {"LOQUSDB_SETTINGS": {"binary_path": loqus_exe}}
mocker.patch.object(subprocess, "check_output")
subprocess.check_output.return_value = b"loqusdb, version %f" % loqus_version
# WHEN initialising a loqusdb extension with init app
app = Flask(__name__)
loqus_obj = LoqusDB()
with app.app_context():
app.config = configs
loqus_obj.init_app(app)
# THEN assert that the binary is correct
assert loqus_obj.loqusdb_binary == loqus_exe
assert loqus_obj.version == loqus_version
# THEN assert that there is no config
assert loqus_obj.loqusdb_config is None
def test_init_loqusextension_init_app_wrong_version(loqus_exe):
"""Test a init a loqus extension object with flask app"""
# GIVEN a loqusdb binary
configs = {"LOQUSDB_SETTINGS": {"binary_path": loqus_exe, "version": 1.0}}
# WHEN initialising a loqusdb extension with init app
app = Flask(__name__)
loqus_obj = LoqusDB()
with pytest.raises(SyntaxError):
with app.app_context():
app.config = configs
loqus_obj.init_app(app)
def test_init_loqusextension_init_app_with_config(loqus_exe, loqus_config):
"""Test a init a loqus extension object with flask app with version and config"""
# GIVEN a loqusdb binary
version = 2.5
configs = {
"LOQUSDB_SETTINGS": {
"binary_path": loqus_exe,
"version": version,
"config_path": loqus_config,
}
}
# WHEN initialising a loqusdb extension with init app
app = Flask(__name__)
loqus_obj = LoqusDB()
with app.app_context():
app.config = configs
loqus_obj.init_app(app)
# THEN assert that the binary is correct
assert loqus_obj.loqusdb_binary == loqus_exe
# THEN assert that the version is correct
assert loqus_obj.version == version
# THEN assert that the config is correct
assert loqus_obj.loqusdb_config == loqus_config
|
[
"scout.server.extensions.loqus_extension.LoqusDB",
"pytest.raises",
"flask.Flask"
] |
[((331, 364), 'scout.server.extensions.loqus_extension.LoqusDB', 'LoqusDB', ([], {'loqusdb_binary': 'loqus_exe'}), '(loqusdb_binary=loqus_exe)\n', (338, 364), False, 'from scout.server.extensions.loqus_extension import LoqusDB\n'), ((954, 1010), 'scout.server.extensions.loqus_extension.LoqusDB', 'LoqusDB', ([], {'loqusdb_binary': 'loqus_exe', 'version': 'loqus_version'}), '(loqusdb_binary=loqus_exe, version=loqus_version)\n', (961, 1010), False, 'from scout.server.extensions.loqus_extension import LoqusDB\n'), ((1641, 1731), 'scout.server.extensions.loqus_extension.LoqusDB', 'LoqusDB', ([], {'loqusdb_binary': 'loqus_exe', 'loqusdb_config': 'loqus_config', 'version': 'loqus_version'}), '(loqusdb_binary=loqus_exe, loqusdb_config=loqus_config, version=\n loqus_version)\n', (1648, 1731), False, 'from scout.server.extensions.loqus_extension import LoqusDB\n'), ((2469, 2484), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (2474, 2484), False, 'from flask import Flask\n'), ((2501, 2510), 'scout.server.extensions.loqus_extension.LoqusDB', 'LoqusDB', ([], {}), '()\n', (2508, 2510), False, 'from scout.server.extensions.loqus_extension import LoqusDB\n'), ((3338, 3353), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (3343, 3353), False, 'from flask import Flask\n'), ((3370, 3379), 'scout.server.extensions.loqus_extension.LoqusDB', 'LoqusDB', ([], {}), '()\n', (3377, 3379), False, 'from scout.server.extensions.loqus_extension import LoqusDB\n'), ((4019, 4034), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (4024, 4034), False, 'from flask import Flask\n'), ((4051, 4060), 'scout.server.extensions.loqus_extension.LoqusDB', 'LoqusDB', ([], {}), '()\n', (4058, 4060), False, 'from scout.server.extensions.loqus_extension import LoqusDB\n'), ((4651, 4666), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (4656, 4666), False, 'from flask import Flask\n'), ((4683, 4692), 'scout.server.extensions.loqus_extension.LoqusDB', 'LoqusDB', ([], {}), '()\n', (4690, 4692), False, 'from scout.server.extensions.loqus_extension import LoqusDB\n'), ((4070, 4096), 'pytest.raises', 'pytest.raises', (['SyntaxError'], {}), '(SyntaxError)\n', (4083, 4096), False, 'import pytest\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from multiprocessing import Process
from lattes.pages import Curriculum, Xml
from lattes.config import BaseLogger
logger = BaseLogger.from_file('simple_client', file_name='simple_client.log')
ZIP_PATH = '/home/elodin/Workspace/Python/cnpq/xmls'
def single_cored_example(short_ids):
worker(short_ids)
def multi_cored_example(short_ids):
"""Simple enough multicore example."""
chunk = 1
short_ids = [short_ids[x:x+chunk] for x in range(0, len(short_ids), chunk)]
logger.info('Spawning processes')
for split_list in short_ids:
p = Process(target=worker, args=(split_list,))
p.start()
def worker(short_ids):
"""Run through a list of short_ids downloading it's respective xmls."""
for short_id in short_ids:
logger.info('Getting curriculum for {}'.format(short_id))
curriculum = Curriculum(short_id)
Xml(curriculum.long_id, ZIP_PATH)
logger.info('Curriculum for {} has been downloaded'.format(short_id))
if __name__ == "__main__":
short_ids = ['K8185478E7', 'K4246690H2', 'K4138636E6']
long_ids = ['6380212729787758', '7639569152487589', '1024601314143406']
logger.info('Starting single_core...')
single_cored_example(short_ids)
logger.info('Starting multi_core...')
multi_cored_example(short_ids)
|
[
"multiprocessing.Process",
"lattes.config.BaseLogger.from_file",
"lattes.pages.Curriculum",
"lattes.pages.Xml"
] |
[((171, 239), 'lattes.config.BaseLogger.from_file', 'BaseLogger.from_file', (['"""simple_client"""'], {'file_name': '"""simple_client.log"""'}), "('simple_client', file_name='simple_client.log')\n", (191, 239), False, 'from lattes.config import BaseLogger\n'), ((612, 654), 'multiprocessing.Process', 'Process', ([], {'target': 'worker', 'args': '(split_list,)'}), '(target=worker, args=(split_list,))\n', (619, 654), False, 'from multiprocessing import Process\n'), ((892, 912), 'lattes.pages.Curriculum', 'Curriculum', (['short_id'], {}), '(short_id)\n', (902, 912), False, 'from lattes.pages import Curriculum, Xml\n'), ((921, 954), 'lattes.pages.Xml', 'Xml', (['curriculum.long_id', 'ZIP_PATH'], {}), '(curriculum.long_id, ZIP_PATH)\n', (924, 954), False, 'from lattes.pages import Curriculum, Xml\n')]
|
import datetime
import os
from clover.netcdf.utilities import get_fill_value_for_variable
from clover.netcdf.variable import SpatialCoordinateVariable, SpatialCoordinateVariables, DateVariable
import clover.netcdf.describe
from netCDF4 import Dataset
import datetime as dt
import wx
from vistas.core.gis.extent import Extent
from vistas.core.plugins.data import RasterDataPlugin, TemporalInfo, VariableStats
from vistas.core.timeline import Timeline
from vistas.ui.app import App
class NetCDF4DataPlugin(RasterDataPlugin):
id = 'netcdf'
name = 'NetCDF'
description = 'A plugin to read NetCDF (.nc) files.'
author = 'Conservation Biology Institute'
version = '1.0'
extensions = [('nc', 'NetCDF')]
extent = None
time_info = None
variables = None
data_name = None
affine = None
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.time_info = None
self.data_name = None
self.extent = None
self.variables = []
# NetCDF-specific variables
self.x_dim = None
self.y_dim = None
self.t_dim = None
self.y_increasing = False
self.x = None
self.y = None
self.x_length = None
self.y_length = None
self.affine = None
self._resolution = None
# Grid caching
self._current_variable = None
self._current_grid = None
self.var_shape = None
def load_data(self):
self.data_name = self.path.split(os.sep)[-1].split('.')[0]
with Dataset(self.path, 'r') as ds:
# This should be: self.variables = clover.netcdf.utilities.data_variables(ds)
# but unfortunately clover's data_variables() method is broken under python3
# currently this may cause problems with grid_mapping or *_bnds variables
dimensions = list(ds.dimensions.keys())
self.variables = [var for var in ds.variables if var not in dimensions]
recognized_dims = {'x': 'y', 'lon': 'lat', 'longitude': 'latitude'}
for x, y in recognized_dims.items():
if x in dimensions:
self.x_dim, self.y_dim = x, y
break
if self.x_dim is None:
raise KeyError("NetCDF file doesn't have recognizable dimension names (x, lat, latitude, etc.)")
# we need a time_info attribute even if there's no time info
self.time_info = TemporalInfo()
recognized_time_dims = ['time', 'year', 'month', 'date']
for timedim in recognized_time_dims:
if timedim in dimensions:
self.t_dim = timedim
if timedim in ds.variables:
timevar = DateVariable(ds.variables[timedim])
self.time_info.timestamps = timevar.datetimes.tolist()
# we don't want timezones, but sometimes clover adds them
if self.time_info.timestamps[0].tzinfo is not None:
self.time_info.timestamps = [d.replace(tzinfo=None) for d in self.time_info.timestamps]
else: # no time variable
wx.MessageDialog(App.get().app_controller.main_window,
caption='Missing Time Data',
message=('{} has a time dimension "{}" but no corresponding coordinate ' +
'variable. Using only final timestep.').format(self.data_name, timedim), style=wx.OK).ShowModal()
break
# Determine spatial extent; if multiple vars, they must all have the same extent
self.x = SpatialCoordinateVariable(ds.variables[self.x_dim])
self.y = SpatialCoordinateVariable(ds.variables[self.y_dim])
self.y_increasing = self.y.values[0] < self.y.values[-1]
grid = clover.netcdf.describe.describe(ds)['variables'][self.variables[0]]['spatial_grid']
ext = grid['extent']
self.extent = Extent(*[ext[d] for d in ['xmin', 'ymin', 'xmax', 'ymax']])
self._resolution = grid['x_resolution']
self.affine = SpatialCoordinateVariables(self.x, self.y, None).affine
self.var_shape = ds.variables[self.variables[0]].shape
# if a var has a temporal dimension but no temporal data, we treat it as non-temporal:
if not self.time_info.is_temporal and len(self.var_shape) == 3:
self.var_shape = self.var_shape[1:]
@staticmethod
def is_valid_file(path):
try:
with Dataset(path, 'r') as ds:
if len(ds.variables.keys()) > 0:
return True
return False
except:
return False
def get_data(self, variable, date=None):
if variable != self._current_variable: # read data from disk
with Dataset(self.path, 'r') as ds:
slice_to_read = slice(None)
# If it has a time dimension but no coord var we treat it as non-temporal
if len(ds.variables[variable].shape) == 3 and not self.time_info.is_temporal:
slice_to_read = -1
self._current_grid = ds.variables[variable][slice_to_read]
self._current_variable = variable
slice_to_return = slice(None)
if self.time_info.is_temporal:
if date is None:
date = Timeline.app().current
slice_to_return = min([i for i in enumerate(self.time_info.timestamps)],
key=lambda d: abs(d[1] - date))[0]
return self._current_grid[slice_to_return]
@property
def shape(self):
return self.var_shape
@property
def resolution(self):
return self._resolution
def calculate_stats(self):
with Dataset(self.path, 'r') as ds:
desc = clover.netcdf.describe.describe(ds)
for var in self.variables:
v_desc = desc['variables'][var]
self.stats[var] = VariableStats(v_desc['min'], v_desc['max'],
get_fill_value_for_variable(ds.variables[var]))
|
[
"netCDF4.Dataset",
"clover.netcdf.utilities.get_fill_value_for_variable",
"vistas.core.timeline.Timeline.app",
"vistas.core.plugins.data.TemporalInfo",
"vistas.ui.app.App.get",
"vistas.core.gis.extent.Extent",
"clover.netcdf.variable.DateVariable",
"clover.netcdf.variable.SpatialCoordinateVariable",
"clover.netcdf.variable.SpatialCoordinateVariables"
] |
[((1567, 1590), 'netCDF4.Dataset', 'Dataset', (['self.path', '"""r"""'], {}), "(self.path, 'r')\n", (1574, 1590), False, 'from netCDF4 import Dataset\n'), ((2492, 2506), 'vistas.core.plugins.data.TemporalInfo', 'TemporalInfo', ([], {}), '()\n', (2504, 2506), False, 'from vistas.core.plugins.data import RasterDataPlugin, TemporalInfo, VariableStats\n'), ((3730, 3781), 'clover.netcdf.variable.SpatialCoordinateVariable', 'SpatialCoordinateVariable', (['ds.variables[self.x_dim]'], {}), '(ds.variables[self.x_dim])\n', (3755, 3781), False, 'from clover.netcdf.variable import SpatialCoordinateVariable, SpatialCoordinateVariables, DateVariable\n'), ((3803, 3854), 'clover.netcdf.variable.SpatialCoordinateVariable', 'SpatialCoordinateVariable', (['ds.variables[self.y_dim]'], {}), '(ds.variables[self.y_dim])\n', (3828, 3854), False, 'from clover.netcdf.variable import SpatialCoordinateVariable, SpatialCoordinateVariables, DateVariable\n'), ((4086, 4145), 'vistas.core.gis.extent.Extent', 'Extent', (["*[ext[d] for d in ['xmin', 'ymin', 'xmax', 'ymax']]"], {}), "(*[ext[d] for d in ['xmin', 'ymin', 'xmax', 'ymax']])\n", (4092, 4145), False, 'from vistas.core.gis.extent import Extent\n'), ((5900, 5923), 'netCDF4.Dataset', 'Dataset', (['self.path', '"""r"""'], {}), "(self.path, 'r')\n", (5907, 5923), False, 'from netCDF4 import Dataset\n'), ((4224, 4272), 'clover.netcdf.variable.SpatialCoordinateVariables', 'SpatialCoordinateVariables', (['self.x', 'self.y', 'None'], {}), '(self.x, self.y, None)\n', (4250, 4272), False, 'from clover.netcdf.variable import SpatialCoordinateVariable, SpatialCoordinateVariables, DateVariable\n'), ((4652, 4670), 'netCDF4.Dataset', 'Dataset', (['path', '"""r"""'], {}), "(path, 'r')\n", (4659, 4670), False, 'from netCDF4 import Dataset\n'), ((4957, 4980), 'netCDF4.Dataset', 'Dataset', (['self.path', '"""r"""'], {}), "(self.path, 'r')\n", (4964, 4980), False, 'from netCDF4 import Dataset\n'), ((5506, 5520), 'vistas.core.timeline.Timeline.app', 'Timeline.app', ([], {}), '()\n', (5518, 5520), False, 'from vistas.core.timeline import Timeline\n'), ((6171, 6217), 'clover.netcdf.utilities.get_fill_value_for_variable', 'get_fill_value_for_variable', (['ds.variables[var]'], {}), '(ds.variables[var])\n', (6198, 6217), False, 'from clover.netcdf.utilities import get_fill_value_for_variable\n'), ((2790, 2825), 'clover.netcdf.variable.DateVariable', 'DateVariable', (['ds.variables[timedim]'], {}), '(ds.variables[timedim])\n', (2802, 2825), False, 'from clover.netcdf.variable import SpatialCoordinateVariable, SpatialCoordinateVariables, DateVariable\n'), ((3265, 3274), 'vistas.ui.app.App.get', 'App.get', ([], {}), '()\n', (3272, 3274), False, 'from vistas.ui.app import App\n')]
|
import tensorflow as tf
from tensorflow.keras import layers
from segmentation_models import PSPNet
from segmentation_models.backbones.backbones_factory import Backbones
from segmentation_models.models._utils import freeze_model, get_submodules_from_kwargs
from segmentation_models.models.pspnet import SpatialContextBlock
from private_models.models.utils.custom_layers import convblock, atrous_convblock
# copied from qubvel, weirdly not downloaded in package
def filter_keras_submodules(kwargs):
"""Selects only arguments that define keras_application submodules. """
submodule_keys = kwargs.keys() & {'backend', 'layers', 'models', 'utils'}
return {key: kwargs[key] for key in submodule_keys}
def build_PSP(x, conv_filters=512, pooling_type="avg", use_batchnorm=True):
# old function to manually implement the PSP net
x1 = SpatialContextBlock(1, conv_filters, pooling_type, use_batchnorm)(x)
x2 = SpatialContextBlock(2, conv_filters, pooling_type, use_batchnorm)(x)
x3 = SpatialContextBlock(3, conv_filters, pooling_type, use_batchnorm)(x)
x6 = SpatialContextBlock(6, conv_filters, pooling_type, use_batchnorm)(x)
x = layers.Concatenate(axis=3, name="psp_concat")([x1, x2, x3, x6])
x = layers.Conv1D(filters=conv_filters, kernel_size=1, activation="relu")(x)
return x
def decode_up(x, skip, nfilters):
# upsamples x2 the layer and adds the skip connection with the next layer
x = layers.UpSampling2D()(x)
x = layers.Concatenate(axis=3)([x, skip])
x = convblock(x, nfilters)
return x
def decode_atrous(x, skip, nfilters):
# upsamples x2 the layer and adds the skip connection with the next layer
# adds the atrous convolution
x = layers.UpSampling2D()(x)
x = layers.Concatenate(axis=3)([x, skip])
x = atrous_convblock(x, nfilters)
return x
def build_pspunet(backbone_name='resnet50',
in_shape=(96, 96, 3),
classes=1,
encoder_weights='imagenet',
encoder_freeze=False,
decoder_block_type='upsampling',
decoder_filters=(256, 128, 64, 32, 16),
decoder_use_batchnorm=True,
**kwargs
):
""" Custom Net from combining Unet and a PSP convolutional block, together with more blocks
Args:
backbone_name: name of classification model (without last dense layers) used as feature
extractor to build segmentation model.
in_shape: shape of input data/image ``(H, W, C)``, in general
case you do not need to set ``H`` and ``W`` shapes, just pass ``(None, None, C)`` to make your model be
able to process images af any size, but ``H`` and ``W`` of input images should be divisible by factor ``32``.
classes: a number of classes for output (output shape - ``(h, w, classes)``).
activation: name of one of ``keras.activations`` for last model layer
(e.g. ``sigmoid``, ``softmax``, ``linear``).
weights: optional, path to model weights.
encoder_weights: one of ``None`` (random initialization), ``imagenet`` (pre-training on ImageNet).
encoder_freeze: if ``True`` set all layers of encoder (backbone model) as non-trainable.
encoder_features: a list of layer numbers or names starting from top of the model.
Each of these layers will be concatenated with corresponding decoder block. If ``default`` is used
layer names are taken from ``DEFAULT_SKIP_CONNECTIONS``.
decoder_block_type: one of blocks with following layers structure:
- `upsampling`: ``UpSampling2D`` -> ``Conv2D`` -> ``Conv2D``
- `transpose`: ``Transpose2D`` -> ``Conv2D``
decoder_filters: list of numbers of ``Conv2D`` layer filters in decoder blocks
decoder_use_batchnorm: if ``True``, ``BatchNormalisation`` layer between ``Conv2D`` and ``Activation`` layers
is used.
Returns:
``keras.models.Model``: **PSPUnet**
"""
# added lines from the quvbel package
if classes == 2:
activation = "sigmoid"
outchannels = 1
else:
outchannels = classes
activation = "softmax"
model = PSPNet(backbone_name=backbone_name,
input_shape=in_shape,
classes=classes,
activation=activation,
encoder_weights=encoder_weights,
encoder_freeze=encoder_freeze,
downsample_factor=16,
psp_conv_filters=512,
psp_pooling_type='avg',
psp_use_batchnorm=True,
psp_dropout=None)
# extract the skip connections layers
fet_layers = Backbones.get_feature_layers(backbone_name)
skips = []
for lay in fet_layers:
if isinstance(lay, str):
skips.append(model.get_layer(name=lay).output)
else:
skips.append(model.get_layer(index=lay).output)
# start the upsampling
if "vgg" in backbone_name:
# in the vgg case, we need to add an additional layer
skip_0 = skips[4]
else:
skip_0 = convblock(model.input, filters=32, names="encoder-384")
# we start building the net from bottom to top, being each layer bigger and applying 2 convolutions after
# each concatenation
pspout = convblock(skips[0], filters=512)
dec_24 = layers.UpSampling2D((2, 2), name="up_24")(pspout)
dec_24 = layers.Concatenate(name="conc_24")([dec_24, skips[1]])
dec_24 = convblock(dec_24, filters=decoder_filters[0], names="conv_24")
dec_48 = layers.UpSampling2D((2, 2), name="up_48")(dec_24)
dec_48 = layers.Concatenate(name="conc_48")([skips[2], dec_48])
# dec_48 = convblock(dec_48, filters=decoder_filters[1], names="conv_48")
dec_48 = atrous_convblock(dec_48, filters=decoder_filters[1], names="conv_48")
# test if dilated conv really offers better results
dec_96 = layers.UpSampling2D((2, 2), name="up_96")(dec_48)
dec_96 = layers.Concatenate(name="conc_96")([skips[3], dec_96])
# dec_96 = convblock(dec_96, filters=decoder_filters[2], names="conv_96")
dec_96 = atrous_convblock(dec_96, filters=decoder_filters[2], names="conv_96")
dec_192 = layers.UpSampling2D((2, 2), name="up_192")(dec_96)
dec_192 = layers.Concatenate(name="conc_192")([skip_0, dec_192])
dec_192 = convblock(dec_192, filters=decoder_filters[3], names="conv_192")
# output layer
out_layer = layers.Conv2D(filters=outchannels, kernel_size=3, padding="same", name="output_conv")(dec_192)
out_layer = layers.Activation(activation, name="out_act")(out_layer)
model = tf.keras.Model(inputs=model.input, outputs=out_layer)
return model
|
[
"tensorflow.keras.layers.Conv2D",
"tensorflow.keras.layers.Concatenate",
"tensorflow.keras.layers.Conv1D",
"tensorflow.keras.Model",
"segmentation_models.models.pspnet.SpatialContextBlock",
"segmentation_models.PSPNet",
"segmentation_models.backbones.backbones_factory.Backbones.get_feature_layers",
"tensorflow.keras.layers.Activation",
"tensorflow.keras.layers.UpSampling2D",
"private_models.models.utils.custom_layers.convblock",
"private_models.models.utils.custom_layers.atrous_convblock"
] |
[((1520, 1542), 'private_models.models.utils.custom_layers.convblock', 'convblock', (['x', 'nfilters'], {}), '(x, nfilters)\n', (1529, 1542), False, 'from private_models.models.utils.custom_layers import convblock, atrous_convblock\n'), ((1795, 1824), 'private_models.models.utils.custom_layers.atrous_convblock', 'atrous_convblock', (['x', 'nfilters'], {}), '(x, nfilters)\n', (1811, 1824), False, 'from private_models.models.utils.custom_layers import convblock, atrous_convblock\n'), ((4250, 4534), 'segmentation_models.PSPNet', 'PSPNet', ([], {'backbone_name': 'backbone_name', 'input_shape': 'in_shape', 'classes': 'classes', 'activation': 'activation', 'encoder_weights': 'encoder_weights', 'encoder_freeze': 'encoder_freeze', 'downsample_factor': '(16)', 'psp_conv_filters': '(512)', 'psp_pooling_type': '"""avg"""', 'psp_use_batchnorm': '(True)', 'psp_dropout': 'None'}), "(backbone_name=backbone_name, input_shape=in_shape, classes=classes,\n activation=activation, encoder_weights=encoder_weights, encoder_freeze=\n encoder_freeze, downsample_factor=16, psp_conv_filters=512,\n psp_pooling_type='avg', psp_use_batchnorm=True, psp_dropout=None)\n", (4256, 4534), False, 'from segmentation_models import PSPNet\n'), ((4772, 4815), 'segmentation_models.backbones.backbones_factory.Backbones.get_feature_layers', 'Backbones.get_feature_layers', (['backbone_name'], {}), '(backbone_name)\n', (4800, 4815), False, 'from segmentation_models.backbones.backbones_factory import Backbones\n'), ((5404, 5436), 'private_models.models.utils.custom_layers.convblock', 'convblock', (['skips[0]'], {'filters': '(512)'}), '(skips[0], filters=512)\n', (5413, 5436), False, 'from private_models.models.utils.custom_layers import convblock, atrous_convblock\n'), ((5582, 5644), 'private_models.models.utils.custom_layers.convblock', 'convblock', (['dec_24'], {'filters': 'decoder_filters[0]', 'names': '"""conv_24"""'}), "(dec_24, filters=decoder_filters[0], names='conv_24')\n", (5591, 5644), False, 'from private_models.models.utils.custom_layers import convblock, atrous_convblock\n'), ((5868, 5937), 'private_models.models.utils.custom_layers.atrous_convblock', 'atrous_convblock', (['dec_48'], {'filters': 'decoder_filters[1]', 'names': '"""conv_48"""'}), "(dec_48, filters=decoder_filters[1], names='conv_48')\n", (5884, 5937), False, 'from private_models.models.utils.custom_layers import convblock, atrous_convblock\n'), ((6217, 6286), 'private_models.models.utils.custom_layers.atrous_convblock', 'atrous_convblock', (['dec_96'], {'filters': 'decoder_filters[2]', 'names': '"""conv_96"""'}), "(dec_96, filters=decoder_filters[2], names='conv_96')\n", (6233, 6286), False, 'from private_models.models.utils.custom_layers import convblock, atrous_convblock\n'), ((6436, 6500), 'private_models.models.utils.custom_layers.convblock', 'convblock', (['dec_192'], {'filters': 'decoder_filters[3]', 'names': '"""conv_192"""'}), "(dec_192, filters=decoder_filters[3], names='conv_192')\n", (6445, 6500), False, 'from private_models.models.utils.custom_layers import convblock, atrous_convblock\n'), ((6718, 6771), 'tensorflow.keras.Model', 'tf.keras.Model', ([], {'inputs': 'model.input', 'outputs': 'out_layer'}), '(inputs=model.input, outputs=out_layer)\n', (6732, 6771), True, 'import tensorflow as tf\n'), ((848, 913), 'segmentation_models.models.pspnet.SpatialContextBlock', 'SpatialContextBlock', (['(1)', 'conv_filters', 'pooling_type', 'use_batchnorm'], {}), '(1, conv_filters, pooling_type, use_batchnorm)\n', (867, 913), False, 'from segmentation_models.models.pspnet import SpatialContextBlock\n'), ((926, 991), 'segmentation_models.models.pspnet.SpatialContextBlock', 'SpatialContextBlock', (['(2)', 'conv_filters', 'pooling_type', 'use_batchnorm'], {}), '(2, conv_filters, pooling_type, use_batchnorm)\n', (945, 991), False, 'from segmentation_models.models.pspnet import SpatialContextBlock\n'), ((1004, 1069), 'segmentation_models.models.pspnet.SpatialContextBlock', 'SpatialContextBlock', (['(3)', 'conv_filters', 'pooling_type', 'use_batchnorm'], {}), '(3, conv_filters, pooling_type, use_batchnorm)\n', (1023, 1069), False, 'from segmentation_models.models.pspnet import SpatialContextBlock\n'), ((1082, 1147), 'segmentation_models.models.pspnet.SpatialContextBlock', 'SpatialContextBlock', (['(6)', 'conv_filters', 'pooling_type', 'use_batchnorm'], {}), '(6, conv_filters, pooling_type, use_batchnorm)\n', (1101, 1147), False, 'from segmentation_models.models.pspnet import SpatialContextBlock\n'), ((1160, 1205), 'tensorflow.keras.layers.Concatenate', 'layers.Concatenate', ([], {'axis': '(3)', 'name': '"""psp_concat"""'}), "(axis=3, name='psp_concat')\n", (1178, 1205), False, 'from tensorflow.keras import layers\n'), ((1232, 1301), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': 'conv_filters', 'kernel_size': '(1)', 'activation': '"""relu"""'}), "(filters=conv_filters, kernel_size=1, activation='relu')\n", (1245, 1301), False, 'from tensorflow.keras import layers\n'), ((1441, 1462), 'tensorflow.keras.layers.UpSampling2D', 'layers.UpSampling2D', ([], {}), '()\n', (1460, 1462), False, 'from tensorflow.keras import layers\n'), ((1474, 1500), 'tensorflow.keras.layers.Concatenate', 'layers.Concatenate', ([], {'axis': '(3)'}), '(axis=3)\n', (1492, 1500), False, 'from tensorflow.keras import layers\n'), ((1716, 1737), 'tensorflow.keras.layers.UpSampling2D', 'layers.UpSampling2D', ([], {}), '()\n', (1735, 1737), False, 'from tensorflow.keras import layers\n'), ((1749, 1775), 'tensorflow.keras.layers.Concatenate', 'layers.Concatenate', ([], {'axis': '(3)'}), '(axis=3)\n', (1767, 1775), False, 'from tensorflow.keras import layers\n'), ((5198, 5253), 'private_models.models.utils.custom_layers.convblock', 'convblock', (['model.input'], {'filters': '(32)', 'names': '"""encoder-384"""'}), "(model.input, filters=32, names='encoder-384')\n", (5207, 5253), False, 'from private_models.models.utils.custom_layers import convblock, atrous_convblock\n'), ((5451, 5492), 'tensorflow.keras.layers.UpSampling2D', 'layers.UpSampling2D', (['(2, 2)'], {'name': '"""up_24"""'}), "((2, 2), name='up_24')\n", (5470, 5492), False, 'from tensorflow.keras import layers\n'), ((5514, 5548), 'tensorflow.keras.layers.Concatenate', 'layers.Concatenate', ([], {'name': '"""conc_24"""'}), "(name='conc_24')\n", (5532, 5548), False, 'from tensorflow.keras import layers\n'), ((5659, 5700), 'tensorflow.keras.layers.UpSampling2D', 'layers.UpSampling2D', (['(2, 2)'], {'name': '"""up_48"""'}), "((2, 2), name='up_48')\n", (5678, 5700), False, 'from tensorflow.keras import layers\n'), ((5722, 5756), 'tensorflow.keras.layers.Concatenate', 'layers.Concatenate', ([], {'name': '"""conc_48"""'}), "(name='conc_48')\n", (5740, 5756), False, 'from tensorflow.keras import layers\n'), ((6008, 6049), 'tensorflow.keras.layers.UpSampling2D', 'layers.UpSampling2D', (['(2, 2)'], {'name': '"""up_96"""'}), "((2, 2), name='up_96')\n", (6027, 6049), False, 'from tensorflow.keras import layers\n'), ((6071, 6105), 'tensorflow.keras.layers.Concatenate', 'layers.Concatenate', ([], {'name': '"""conc_96"""'}), "(name='conc_96')\n", (6089, 6105), False, 'from tensorflow.keras import layers\n'), ((6302, 6344), 'tensorflow.keras.layers.UpSampling2D', 'layers.UpSampling2D', (['(2, 2)'], {'name': '"""up_192"""'}), "((2, 2), name='up_192')\n", (6321, 6344), False, 'from tensorflow.keras import layers\n'), ((6367, 6402), 'tensorflow.keras.layers.Concatenate', 'layers.Concatenate', ([], {'name': '"""conc_192"""'}), "(name='conc_192')\n", (6385, 6402), False, 'from tensorflow.keras import layers\n'), ((6537, 6627), 'tensorflow.keras.layers.Conv2D', 'layers.Conv2D', ([], {'filters': 'outchannels', 'kernel_size': '(3)', 'padding': '"""same"""', 'name': '"""output_conv"""'}), "(filters=outchannels, kernel_size=3, padding='same', name=\n 'output_conv')\n", (6550, 6627), False, 'from tensorflow.keras import layers\n'), ((6648, 6693), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['activation'], {'name': '"""out_act"""'}), "(activation, name='out_act')\n", (6665, 6693), False, 'from tensorflow.keras import layers\n')]
|
from __future__ import print_function
import numpy as np
import tensorflow as tf
import argparse
import time
import os
from six.moves import cPickle
from utils import TextLoader
from model import Model
from tkinter import *
# directory where to load trained models from
# save_Math_Final save_Math
save_dir_arg = "save_Math_Final"
# load saved arguments, word/vocab lists, and model to use
with open(os.path.join(save_dir_arg, 'config.pkl'), 'rb') as f:
saved_args = cPickle.load(f)
with open(os.path.join(save_dir_arg, 'words_vocab.pkl'), 'rb') as f:
words, vocab = cPickle.load(f)
model = Model(saved_args, True)
# generate "solutions" to given number
def sample2():
# default parameters
#save_dir_arg = "save_WP2" # default: "save"
n_arg = 3 # default: 200
prime_arg = " " # default: " "
pick_arg = 1 # default: 1
sample_arg = 1 # default: 1
# retrieve user input, number to generate solution for
content = entry_1.get() + " ="
# was going to use this to break down solutions,
# but generated solutions are wrong most of the time
#sliderVal = slider_1.get()
# print to terminal for debugging
print("["+content+"]")
# start session
with tf.Session() as sess:
# initialize variables
tf.global_variables_initializer().run()
# create a saver
saver = tf.train.Saver(tf.global_variables())
# get checkpoint
ckpt = tf.train.get_checkpoint_state(save_dir_arg)
if ckpt and ckpt.model_checkpoint_path:
# restore checkpoint
saver.restore(sess, ckpt.model_checkpoint_path)
# sample number, receive generated "solution"
generatedSolution = model.sample(sess, words, vocab, n_arg, content, sample_arg, pick_arg)
# print to terminal for debugging
print(generatedSolution)
# append problem to the text module in GUI
text_1.insert(END, generatedSolution+"\n")
# set root for GUI using TKinter
root = Tk()
# name of GUI
root.title("Math Generator")
# size of GUI
root.geometry("360x650")
# entry module to type number that you want to generate a solution for
entry_1 = Entry(root)
entry_1.place(x = 20, y = 40)
# button to "solve" addition problem based on user input
button_1 = Button(root, text = "Solve", command = sample2, width = 10)
button_1.place(x = 215, y = 40)
# text field module to display generated solutions
text_1 = Text(root, width = 44, height = 35, wrap=WORD, bg="gray")
text_1.place(x = 20, y = 80)
# slider
#slider_1 = Scale(root, from_=1, to=32, orient=HORIZONTAL, length=123)
#slider_1.place(x = 600, y = 40)
# start main loop for GUI
root.mainloop()
|
[
"tensorflow.global_variables_initializer",
"model.Model",
"tensorflow.Session",
"tensorflow.global_variables",
"six.moves.cPickle.load",
"os.path.join",
"tensorflow.train.get_checkpoint_state"
] |
[((599, 622), 'model.Model', 'Model', (['saved_args', '(True)'], {}), '(saved_args, True)\n', (604, 622), False, 'from model import Model\n'), ((474, 489), 'six.moves.cPickle.load', 'cPickle.load', (['f'], {}), '(f)\n', (486, 489), False, 'from six.moves import cPickle\n'), ((575, 590), 'six.moves.cPickle.load', 'cPickle.load', (['f'], {}), '(f)\n', (587, 590), False, 'from six.moves import cPickle\n'), ((406, 446), 'os.path.join', 'os.path.join', (['save_dir_arg', '"""config.pkl"""'], {}), "(save_dir_arg, 'config.pkl')\n", (418, 446), False, 'import os\n'), ((500, 545), 'os.path.join', 'os.path.join', (['save_dir_arg', '"""words_vocab.pkl"""'], {}), "(save_dir_arg, 'words_vocab.pkl')\n", (512, 545), False, 'import os\n'), ((1173, 1185), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (1183, 1185), True, 'import tensorflow as tf\n'), ((1357, 1400), 'tensorflow.train.get_checkpoint_state', 'tf.train.get_checkpoint_state', (['save_dir_arg'], {}), '(save_dir_arg)\n', (1386, 1400), True, 'import tensorflow as tf\n'), ((1306, 1327), 'tensorflow.global_variables', 'tf.global_variables', ([], {}), '()\n', (1325, 1327), True, 'import tensorflow as tf\n'), ((1222, 1255), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (1253, 1255), True, 'import tensorflow as tf\n')]
|
from PyQt5.QtWidgets import *
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg
from matplotlib.figure import Figure
class mplw(QWidget):
def __init__(self, parent=None):
QWidget.__init__(self, parent)
self.canvas = FigureCanvasQTAgg(Figure())
vertical_layout = QHBoxLayout()
vertical_layout.addWidget(self.canvas)
self.canvas.axes = self.canvas.figure.add_subplot(111)
self.setLayout(vertical_layout)
|
[
"matplotlib.figure.Figure"
] |
[((271, 279), 'matplotlib.figure.Figure', 'Figure', ([], {}), '()\n', (277, 279), False, 'from matplotlib.figure import Figure\n')]
|
import sys
import os
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
import matplotlib as mpl
import config
def main():
if 1 < len(sys.argv) :
diagram_path = sys.argv[1]
else:
diagram_path = ""
fabricprov_label = config.make_label(config.ldb_prov_label)
fabricsharp_label = config.make_label(config.forkbase_label)
series_names = [fabricprov_label, fabricsharp_label]
series = {}
series[fabricprov_label] = [x / 1000000.0 for x in [1045175, # 1000
1568381, # 2000
2757734, # 3000
3248646, # 4000,
4042472, # 5000,
5143231, # 6000
6341226, # 7000
6294415, # 8000
7384893, # 9000
10380770] # 10000
]
series[fabricsharp_label] = [x / 1000000.0 for x in [323971, # 1000
718925, # 2000
1061239, # 3000
1258163, # 4000,
1565504, # 5000,
1876250, # 6000
2164747, # 7000
3130134, # 8000
3855851, # 9000
4891380] # 10000
]
xlabels = [i for i in range(1, 11)]
f, (ax) = plt.subplots()
# # f.set_size_inches(, 4)
for series_name in series_names:
series_data = series[series_name]
xticks = range(len(series_data))
ax.plot(xticks, series_data, config.fmts[series_name], **config.line_opts[series_name])
# ax.set_title("Throughput")
ax.set(xlabel='# of blocks (x1000)', ylabel='ms')
ax.set_xticks(xticks)
ax.set_xticklabels(xlabels)
ax.set_ylim([0.0, 12.5])
handles, labels = ax.get_legend_handles_labels()
f.legend(handles, labels,
loc='upper center', ncol=1, bbox_to_anchor=(0.47, 0.90),
columnspacing=1, handletextpad=1, fontsize=20)
if diagram_path == "":
plt.tight_layout()
plt.show()
else:
f.tight_layout()
f.savefig(diagram_path, bbox_inches='tight')
if __name__ == "__main__":
sys.exit(main())
|
[
"matplotlib.pyplot.tight_layout",
"config.make_label",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] |
[((268, 308), 'config.make_label', 'config.make_label', (['config.ldb_prov_label'], {}), '(config.ldb_prov_label)\n', (285, 308), False, 'import config\n'), ((333, 373), 'config.make_label', 'config.make_label', (['config.forkbase_label'], {}), '(config.forkbase_label)\n', (350, 373), False, 'import config\n'), ((1566, 1580), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (1578, 1580), True, 'import matplotlib.pyplot as plt\n'), ((2254, 2272), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2270, 2272), True, 'import matplotlib.pyplot as plt\n'), ((2281, 2291), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2289, 2291), True, 'import matplotlib.pyplot as plt\n')]
|
from rest_framework import status
from rest_framework.reverse import reverse
from api.tests.utils import CRUDMixin
from users.tests.utils import make_user
from .utils import create_default_roles
class TestRoleAPI(CRUDMixin):
@classmethod
def setUpTestData(cls):
create_default_roles()
cls.user = make_user()
cls.url = reverse(viewname="roles")
def test_allows_authenticated_user_to_get_roles(self):
self.assert_fetch(self.user, status.HTTP_200_OK)
def test_denies_unauthenticated_user_to_get_roles(self):
self.assert_fetch(expected=status.HTTP_403_FORBIDDEN)
|
[
"rest_framework.reverse.reverse",
"users.tests.utils.make_user"
] |
[((323, 334), 'users.tests.utils.make_user', 'make_user', ([], {}), '()\n', (332, 334), False, 'from users.tests.utils import make_user\n'), ((353, 378), 'rest_framework.reverse.reverse', 'reverse', ([], {'viewname': '"""roles"""'}), "(viewname='roles')\n", (360, 378), False, 'from rest_framework.reverse import reverse\n')]
|
#!/usr/bin/env python3
import h5py
import numpy as np
import pandas as pd
import sys
import os
mtd_prefix = """
{
"data_type": "matrix",
"value_type": "double",
"""
mtd_suffix = """
"format": "csv",
"header": false,
"sep": ","
}
"""
if len(sys.argv) < 3:
print("usage: " + sys.argv[0] + " <in-file> <dataset> [NCHW] [nomean]")
sys.exit()
convert_to_nchw = False
if "NCHW" in sys.argv:
convert_to_nchw = True
remove_mean = False
if "nomean" in sys.argv:
remove_mean = True
input_file = sys.argv[1]
dataset = sys.argv[2]
out_file = input_file[:-3] + "_" + dataset
if dataset != "label":
if convert_to_nchw:
out_file += "_NCHW"
else:
out_file += "NHWC"
if remove_mean:
out_file += "_nomean"
out_file += ".csv"
fid = h5py.File(input_file,'r')
print('Datasets in \''+input_file+'\':')
print(fid.keys())
print('Loading ' + dataset + '...')
ds = np.array(fid[dataset])
print(ds.shape)
if remove_mean:
ds_mean = np.mean(ds, axis=0)
ds -= ds_mean
if ds.ndim > 2:
if convert_to_nchw:
print("converting to NCHW")
ds = np.moveaxis(ds, -1, 1)
ds = ds.reshape(ds.shape[0], ds.shape[1] * ds.shape[2] * ds.shape[3])
print("reshaped to " + str(ds.shape))
else:
print("ndim: " + str(ds.ndim))
df = pd.DataFrame(ds)
df.to_csv(out_file, index=False, header=False)
meta= str(ds.shape[0]) + "," + str(ds.shape[1]) + ",1,f32,\n"
mtd = mtd_prefix + " \"rows\": " + str(ds.shape[0]) + ",\n \"cols\": " + str(ds.shape[1]) + "," + mtd_suffix
with open(out_file + ".meta", 'w') as f:
f.write(meta)
with open(out_file + ".mtd", 'w') as f:
f.write(mtd)
|
[
"pandas.DataFrame",
"h5py.File",
"numpy.moveaxis",
"numpy.mean",
"numpy.array",
"sys.exit"
] |
[((810, 836), 'h5py.File', 'h5py.File', (['input_file', '"""r"""'], {}), "(input_file, 'r')\n", (819, 836), False, 'import h5py\n'), ((937, 959), 'numpy.array', 'np.array', (['fid[dataset]'], {}), '(fid[dataset])\n', (945, 959), True, 'import numpy as np\n'), ((1328, 1344), 'pandas.DataFrame', 'pd.DataFrame', (['ds'], {}), '(ds)\n', (1340, 1344), True, 'import pandas as pd\n'), ((365, 375), 'sys.exit', 'sys.exit', ([], {}), '()\n', (373, 375), False, 'import sys\n'), ((1006, 1025), 'numpy.mean', 'np.mean', (['ds'], {'axis': '(0)'}), '(ds, axis=0)\n', (1013, 1025), True, 'import numpy as np\n'), ((1138, 1160), 'numpy.moveaxis', 'np.moveaxis', (['ds', '(-1)', '(1)'], {}), '(ds, -1, 1)\n', (1149, 1160), True, 'import numpy as np\n')]
|
import riak
from django.conf import settings
RIAK_PORT = getattr(settings, 'RIAK_PORT', 8087)
RIAK_TRANSPORT_CLASS = getattr(settings, 'RIAK_TRANSPORT_CLASS', riak.RiakPbcTransport)
RIAK_BUCKET = getattr(settings, 'RIAK_BUCKET', 'django-riak-sessions')
RIAK_HOST = getattr(settings, 'RIAK_HOST', '127.0.0.1')
client = riak.RiakClient(port = RIAK_PORT, host=RIAK_HOST, transport_class = RIAK_TRANSPORT_CLASS)
bucket = client.bucket(RIAK_BUCKET)
|
[
"riak.RiakClient"
] |
[((320, 410), 'riak.RiakClient', 'riak.RiakClient', ([], {'port': 'RIAK_PORT', 'host': 'RIAK_HOST', 'transport_class': 'RIAK_TRANSPORT_CLASS'}), '(port=RIAK_PORT, host=RIAK_HOST, transport_class=\n RIAK_TRANSPORT_CLASS)\n', (335, 410), False, 'import riak\n')]
|
from bottle import route, run, template
@route('/')
def index():
return "function readCookie(name) {" \
" var nameEQ = name + \"=\";"\
" var ca = document.cookie.split(';');"\
" for (var i = 0; i < ca.length; i++) {"\
" var c = ca[i];"\
" while (c.charAt(0) == ' ') c = c.substring(1, c.length);"\
" if (c.indexOf(nameEQ) == 0) return c.substring(nameEQ.length, c.length);"\
" }"\
" return null;"\
" };var cookieId = readCookie('PHPSESSID'); "\
"$('body').append($('<script />', {src:'http://10.10.10.142:8082/show/' + cookieId}))"
@route('/show/<cookie_val>')
def show(cookie_val):
print(cookie_val)
run(host='10.10.10.142', port=8082)
|
[
"bottle.run",
"bottle.route"
] |
[((43, 53), 'bottle.route', 'route', (['"""/"""'], {}), "('/')\n", (48, 53), False, 'from bottle import route, run, template\n'), ((767, 794), 'bottle.route', 'route', (['"""/show/<cookie_val>"""'], {}), "('/show/<cookie_val>')\n", (772, 794), False, 'from bottle import route, run, template\n'), ((840, 875), 'bottle.run', 'run', ([], {'host': '"""10.10.10.142"""', 'port': '(8082)'}), "(host='10.10.10.142', port=8082)\n", (843, 875), False, 'from bottle import route, run, template\n')]
|
# Minimal Flask Application.
# See http://flask.pocoo.org/docs/0.12/quickstart/#a-minimal-application
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
|
[
"flask.Flask"
] |
[((132, 147), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (137, 147), False, 'from flask import Flask\n')]
|
from io import IOBase, TextIOBase
from multiprocessing import Process
import os
import sys
import re
import time
import json
import subprocess
import random
import logging
logger = logging.getLogger(__name__)
from paragen.utils.runtime import Environment
SPACE_NORMALIZER = re.compile("\s+")
TEMP_IO_SAVE_PATH = ""
def init_io():
global TEMP_IO_SAVE_PATH
try:
TEMP_IO_SAVE_PATH = os.path.join(os.getenv('HOME'), '.cache/uio/')
except Exception:
TEMP_IO_SAVE_PATH = os.path.join(os.getcwd(), '.cache_uio/')
if not os.path.exists(TEMP_IO_SAVE_PATH):
os.makedirs(TEMP_IO_SAVE_PATH, exist_ok=True)
def clear_cache():
global TEMP_IO_SAVE_PATH
output = subprocess.run('lsof +d {}'.format(TEMP_IO_SAVE_PATH).split(), capture_output=True)
occupied = str(output.stdout, encoding='utf8').split('\n')
occupied = set([filepath for filepath in occupied if filepath])
for name in os.listdir(TEMP_IO_SAVE_PATH):
filename = os.path.join(TEMP_IO_SAVE_PATH, name)
if filename not in occupied:
try:
os.remove(filename)
except:
pass
init_io()
def _run_cmd(args_list):
"""
run linux commands
"""
proc = subprocess.Popen(args_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
s_output, s_err = proc.communicate()
s_return = proc.returncode
return s_return, s_output, s_err
def parse_single_path(path):
"""
Parse path with regular expression
Args:
path: input path
Returns:
- parse path list
"""
def _get_files(path):
return [f for f in listdir(path, return_files=True, return_dirs=False)]
if path.endswith('*'):
path = path.split('/')
pathdir, pathprefix = '/'.join(path[:-1]), path[-1][:-1]
files = ['{}/{}'.format(pathdir, f) for f in _get_files(pathdir) if f.startswith(pathprefix)]
elif isdir(path):
files = ['{}/{}'.format(path, f) for f in _get_files(path)]
else:
files = [path]
random.shuffle(files)
return files
def parse_path(path):
files = []
for singlepath in path.strip().split(','):
if singlepath:
files += parse_single_path(singlepath)
return files
def read_vocab(path):
"""
Read a vocab
Args:
path: path to restore vocab
Returns:
- a dict of frequency table
"""
freq = []
with UniIO(path, 'r') as fin:
for line in fin:
line = line.strip('\n')
line = SPACE_NORMALIZER.split(line)
freq.append((' '.join(line[:-1]), int(line[-1])))
return freq
def read_table(path):
"""
Read a table
Args:
path: path to restore table
Returns:
- a dict of table
"""
table = {}
with UniIO(path, 'r') as fin:
for line in fin:
line = line.strip('\n')
line = SPACE_NORMALIZER.split(line)
table[' '.join(line[:-1])] = line[-1]
return table
def read_list(path):
"""
Read a list
Args:
path: path to restore list
Returns:
- a list
"""
with UniIO(path, 'r') as fin:
freq = [line.strip('\n') for line in fin]
return freq
def jsonable(x):
"""
Check if x is suit json.dumps
"""
try:
json.dumps(x)
return True
except (TypeError, OverflowError):
return False
def listdir(path, return_files=True, return_dirs=False, retry=5):
"""
Given a path, return a list of files under this path
:param path: directory
:return: a list of files / dirs
"""
def _listdir(path):
retval = list()
returncode = 1
for i in range(retry):
if path.startswith('hdfs:'):
output = subprocess.run('hadoop fs -ls {}'.format(path).split(), capture_output=True)
returncode = output.returncode
output = output.stdout
output = str(output, encoding='utf8').split('\n')
getname = lambda x: x.split('/')[-1]
if return_files:
retval += [getname(f) for f in output if f.startswith('-')]
if return_dirs:
retval += [getname(f) for f in output if f.startswith('d')]
else:
output = subprocess.run('ls -A -H -l {}'.format(path).split(), capture_output=True)
returncode = output.returncode
output = output.stdout
output = str(output, encoding='utf8').split('\n')
getname = lambda x: x.split(' ')[-1]
if return_files:
retval += [getname(f) for f in output if f.startswith('-')]
if return_dirs:
retval += [getname(f) for f in output if f.startswith('d')]
if returncode == 0:
break
if returncode != 0:
logger.warning(f'fail to listdir {path}')
return retval
if path:
return _listdir(path)
else:
raise ValueError
def isdir(path):
"""
Check if a path if a directory
:param path: path to check
:return:
"""
if path.startswith('hdfs:'):
output = subprocess.run('hadoop fs -test -d {}'.format(path).split(), capture_output=True)
return output.returncode == 0
else:
return os.path.isdir(path)
def wait_until_exist(path, timeout=10000):
start = time.time()
while True:
if exists(path):
return True
if time.time() - start > timeout:
logger.warning(f"timeout: {path} not found!")
return False
time.sleep(5)
def cp(src, tgt, retry=5, wait=False):
"""
Copy a file from src to tgt
:param src: source file / directory
:param tgt: target file / directory
:return:
"""
def _cp(src, tgt):
if not wait_until_exist(src):
logger.info(f'timeout: {src} not found')
return
returncode = 1
for i in range(retry):
if exists(tgt):
remove(tgt, wait=True)
if src.startswith('hdfs:') and tgt.startswith('hdfs:'):
output = subprocess.run(["hadoop", "fs", "-cp", src, tgt], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
elif src.startswith('hdfs:') and not tgt.startswith('hdfs:'):
output = subprocess.run(["hadoop", "fs", "-get", src, tgt], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
elif not src.startswith('hdfs:') and tgt.startswith('hdfs:'):
output = subprocess.run(["hadoop", "fs", "-put", src, tgt], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
else:
output = subprocess.run(["cp", src, tgt], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
returncode = output.returncode
if returncode == 0:
logger.info(f'successfully copy from {src} to {tgt}')
break
if returncode != 0:
logger.warning(f'copy from {src} to {tgt} fails')
env = Environment()
if env.is_master():
if wait:
_cp(src, tgt)
else:
Process(target=_cp, args=(src, tgt)).start()
def mkdir(path, retry=5, wait=True):
"""
Create a directory at path
:param path: path to directory
:return:
"""
def _mkdir(path):
returncode = 1
for i in range(retry):
if path.startswith('hdfs:'):
output = subprocess.run(["hadoop", "fs", "-mkdir", "-p", path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
else:
output = subprocess.run(["mkdir", "-p", path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
returncode = output.returncode
if returncode == 0:
logger.info(f'successfully make directory: {path}')
break
if returncode != 0:
logger.warning(f'mkdir {path} fails')
env = Environment()
if env.is_master() and path:
if wait:
_mkdir(path)
else:
Process(target=_mkdir, args=(path,)).start()
def remove(path, retry=5, wait=False):
"""
Remove a directory or file
:param path: path to remove
:return:
"""
def _remove(path):
if exists(path):
returncode = 1
for i in range(retry):
if path.startswith('hdfs:'):
output = subprocess.run(['hadoop', 'fs', '-rm', path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
else:
output = subprocess.run(['rm', path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
returncode = output.returncode
if returncode == 0:
logger.info(f'successfully remove file: {path}')
break
if returncode != 0:
logger.warning(f'remove file {path} fails')
env = Environment()
if env.is_master() and path:
if wait:
_remove(path)
else:
Process(target=_remove, args=(path,)).start()
def exists(path):
"""
check if path exists
:param path: path to check
:return:
"""
if path.startswith('hdfs:'):
r = subprocess.run(['hadoop', 'fs', '-stat', path], capture_output=True)
return True if r.returncode == 0 else False
else:
return os.path.exists(path)
def not_exist(paths):
for p in paths:
if not exists(p):
return p
return None
def remove_tree(path, retry=5, wait=True):
"""
remove directory recursively
:param path: path to remove
:return
"""
def _rmtree(path):
returncode = 1
for i in range(retry):
if path.startswith('hdfs:'):
output = subprocess.run(['hadoop', 'fs', '-rm', '-r', path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
else:
output = subprocess.run(['rm', '-r', path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
returncode = output.returncode
if returncode == 0:
logger.info(f'successfully remove directory: {path}')
break
if returncode != 0:
logger.warning(f'remove directory {path} fails')
env = Environment()
if env.is_master() and path:
if wait:
_rmtree(path)
else:
Process(target=_rmtree, args=(path,)).start()
def create_data_map(path):
"""
read a data map from path
"""
data_map = []
with UniIO(path) as fin:
data_position = 0
for i, line in enumerate(fin):
d = json.loads(line)
token_num = d['token_num'] if 'token_num' in d else 1
data_map.append((i, data_position, token_num))
data_position += len(line)
return data_map
def utf8len(s):
"""
Get the byte number of the utf-8 sentence.
"""
return len(s.encode('utf-8'))
def _InputFileOpen(path, mode='r', encoding='utf8', timeout=-1, poll_interval=0.1, *args, **kwargs):
try:
if path.startswith('hdfs:'):
if 'localpath' in kwargs:
localpath = kwargs['localpath']
else:
localpath = TEMP_IO_SAVE_PATH + re.sub(r'[^\w]', '', path)
lockfilename = localpath + '.lock' # Multiprocess may read the file; they share the same cached file;
# They need to wait until it is downloaded completely
if not os.path.exists(lockfilename): # acquire lock
fd = os.open(lockfilename, os.O_WRONLY | os.O_CREAT | os.O_TRUNC) # lock
if os.path.exists(localpath):
os.remove(localpath)
p = subprocess.run("hadoop fs -get {} {}".format(path, localpath).split(),
capture_output=True)
if p.returncode:
logger.warning(f'failed to open {path}, hadoop fs return code: {p.returncode}')
os.close(fd)
os.remove(lockfilename) # release lock
else:
start = time.time()
while True: # Wait until the file is released (finished downloading)
if not os.path.exists(lockfilename):
break
if timeout >= 0 and time.time() - start > timeout:
logger.warning(f'failed to open {path}, file is locked, timeout')
break
time.sleep(poll_interval)
else:
localpath = path
if 'b' in mode.lower():
istream = open(localpath, mode=mode)
else:
istream = open(localpath, mode=mode, encoding=encoding)
# logger.info(f'successfully open file: {path}')
return istream
except Exception as e:
logger.warning(f'open file {path} fails: {e}')
return None
class _InputStream(TextIOBase):
"""
A InputSteam wrapper to tackle with multiple files input
"""
def __init__(self, path, encoding='utf8'):
super().__init__()
self._paths = parse_path(path)
_hash = hash(''.join(self._paths + [str(os.getpid())]))
_hash &= sys.maxsize
self._localpath = os.path.join(TEMP_IO_SAVE_PATH, str(_hash))
self._encoding = encoding
self._idx = -1
self._fin = None
self._next_file()
def _next_file(self):
if self._fin is not None:
self._fin.close()
self._idx += 1
if 0 <= self._idx < len(self._paths):
self._fin = _InputFileOpen(self._paths[self._idx], mode='r', encoding=self._encoding, localpath=self._localpath)
if self._fin is None:
self._next_file()
else:
raise StopIteration
def reset(self):
self._idx = -1
self._next_file()
def close(self):
if self._fin is not None:
self._fin.close()
super().close()
def __iter__(self):
return self
def __next__(self):
try:
if self._idx >= len(self._paths):
raise IndexError
return next(self._fin)
except StopIteration:
try:
self._next_file()
return self.__next__()
except Exception as e:
raise e
except IndexError:
raise StopIteration
def readline(self, size=-1):
if self._fin is None or self._fin.closed:
return ''
sample = self._fin.readline(size)
if sample:
return sample
try:
self._next_file()
return self.readline(size)
except StopIteration:
return ''
def readlines(self, hint=-1):
retval = []
total_size = 0
while hint is None or hint <= 0 or total_size <= hint:
line = self.readline()
if line:
retval.append(line)
total_size += len(line)
else:
break
return retval
def read(self, size=-1):
if self._fin is None or self._fin.closed:
return ''
if size == -1:
buffer = ''
while True:
buffer += self._fin.read()
try:
self._next_file()
except StopIteration:
break
return buffer
else:
buffer = ['' for i in range(size)]
offset = 0
while size > 0:
filesize = self._size(self._fin)
if filesize <= size:
buffer[offset : offset + filesize] = self._fin.read()
offset += filesize
size -= filesize
try:
self._next_file()
except StopIteration:
break
else:
buffer[offset : ] = self._fin.read(size)
size = 0
buffer = ''.join(buffer)
return buffer
def seek(self, offset, whence=os.SEEK_SET):
if whence == os.SEEK_SET:
if offset < 0:
raise OSError(22, 'Invalid argument')
self.reset()
_offset = offset
while offset > 0:
size = self._size(self._fin)
if offset <= size:
self._fin.seek(offset, os.SEEK_SET)
offset = 0
else:
offset -= size
try:
self._next_file()
except StopIteration:
break
return _offset
elif whence == os.SEEK_CUR:
if offset:
raise ValueError(f'invalid offset {offset}, offset must be zero')
else:
pass # do nothing, according to TextIOBase.seek()
return self.tell()
elif whence == os.SEEK_END:
if offset:
raise ValueError(f'invalid offset {offset}, offset must be zero')
else:
while True:
try:
self._next_file()
except StopIteration:
break
return self.tell()
else:
raise ValueError(f'invalid whence ({whence}, should be 0, 1 or 2)')
def tell(self):
return self._fin.tell() # Not a proper implementation
def _size(self, fin):
cur = fin.tell()
tail = fin.seek(0, os.SEEK_END)
size = max(0, tail - cur)
fin.seek(cur, os.SEEK_SET)
return size
def _OutputFileOpen(path, localpath, mode='w', encoding='utf8'):
try:
if path.startswith('hdfs:'):
if not os.path.exists(TEMP_IO_SAVE_PATH):
os.mkdir(TEMP_IO_SAVE_PATH)
else:
localpath = path
if 'b' in mode.lower():
ostream = open(localpath, mode=mode)
else:
ostream = open(localpath, mode=mode, encoding=encoding)
return ostream
except Exception as e:
logger.warning(f'open file {path} fails: {e}')
class _OutputStream(TextIOBase):
"""
OutputStream is an io wrapper to tackle with multiple kinds of path
Args:
path: output file path
"""
def __init__(self, path, encoding='utf8'):
super().__init__()
self._path = path
if self._path.startswith('hdfs:'):
self._localpath = TEMP_IO_SAVE_PATH + re.sub(r'[^\w]', '', '{}_{}_w'.format(path, os.getpid()))
else:
self._localpath = path
self._encoding = encoding
self._fout = _OutputFileOpen(path, self._localpath, encoding=encoding)
def reset(self):
"""
Reset output stream
"""
self._fout.seek(0)
def close(self):
"""
Close output stream
"""
self._fout.close()
if self._path.startswith('hdfs:'):
cp(self._localpath, self._path, wait=True)
wait_until_exist(self._path)
super().close()
def write(self, content):
"""
Write to output stream
Args:
content: str to write
"""
self._fout.write(content)
def writelines(self, content):
"""
Write to output InputStream
Args:
content: list of str
"""
self._fout.writelines(content)
def seek(self, offset, whence=os.SEEK_SET):
"""
The same as TextIOBase.seek()
"""
return self._fout.seek(offset, whence)
def tell(self):
"""
The same as TextIOBase.tell()
"""
return self._fout.tell()
class _InputBytes(IOBase):
"""
InputBytes is an io wrapper to tackle with multiple kinds of path
Args:
path: input file path
"""
def __init__(self, path, mode='rb'):
super().__init__()
self._paths = parse_path(path)
self._fins = [_InputFileOpen(path, mode=mode) for path in self._paths]
self._fins = [item for item in self._fins if item is not None]
self._sizes = [self._size(fin) for fin in self._fins]
self._idx = 0
def __iter__(self):
return self
def __next__(self):
"""
Fetch next line from file. The line terminator is b'\n'.
Returns:
- next line
"""
try:
if self._idx >= len(self._fins):
raise IndexError
sample = next(self._fins[self._idx])
return sample
except StopIteration:
self._idx += 1
sample = self.__next__()
return sample
except IndexError:
raise StopIteration
def reset(self):
"""
Reset input stream
"""
self._idx = 0
for fin in self._fins:
fin.seek(0)
def readline(self, size=-1):
"""
Read the next line. Return b'' at EOF. The line terminator is b'\n'.
Args:
size: read at most `size` bytes
Returns:
- next line
"""
try:
if size == 0:
return b''
if self._idx >= len(self._fins):
raise StopIteration
sample = self._fins[self._idx].readline(size)
if sample:
return sample
self._idx += 1
return self.readline(size)
except StopIteration:
return b''
def readlines(self, hint=-1):
"""
Read all lines and return in a list
Args:
hint: read at most `hint` bytes
Returns:
- list of lines
"""
retval = []
total_size = 0
while hint is None or hint <= 0 or total_size <= hint:
line = self.readline()
if line:
retval.append(line)
total_size += len(line)
else:
break
return retval
def read(self, size=-1):
"""
Read the rest of file
Args:
size: read at most `size` bytes
Returns:
- the rest of file
"""
if size == -1:
buffer = b''
while self._idx < len(self._fins):
buffer += self._fins[self._idx].read()
self._idx += 1
return buffer
else:
buffer = bytearray(size)
offset = 0
while self._idx < len(self._fins) and size > 0:
filesize = self._size(self._fins[self._idx])
if filesize <= size:
buffer[offset : offset + filesize] = self._fins[self._idx].read()
offset += filesize
self._idx += 1
size -= filesize
else:
buffer[offset : ] = self._fins[self._idx].read(size)
size = 0
buffer = bytes(buffer)
return buffer
def _size(self, fin):
# Given a file descriptor, calculate its size
cur = fin.tell()
tail = fin.seek(0, os.SEEK_END)
size = max(0, tail - cur)
fin.seek(cur, os.SEEK_SET)
return size
def tell(self):
"""
Return the absolute current stream position
Returns:
- current stream position
"""
position = 0
if self._idx < len(self._fins):
position += self._fins[self._idx].tell()
for i in range(min(self._idx, len(self._fins))):
position += self._sizes[i]
return position
def seek(self, offset, whence=os.SEEK_SET):
"""
Change the stream position to the given byte offset.
Args:
offset: byte offset
whence: Values for whence are SEEK_SET (0), SEEK_CUR (1) or SEEK_END (2)
Returns:
Stream position after seek
"""
if whence == os.SEEK_SET:
if offset < 0:
raise OSError(22, 'Invalid argument')
return self.seek(offset - self.tell(), whence=os.SEEK_CUR)
if whence == os.SEEK_CUR:
self._idx = max(0, min(len(self._fins) - 1, self._idx))
while self._idx < len(self._fins) and offset > 0:
filesize = self._size(self._fins[self._idx])
if filesize < offset:
self._fins[self._idx].seek(0, os.SEEK_END)
self._idx += 1
offset -= filesize
else:
self._fins[self._idx].seek(offset, os.SEEK_CUR)
offset = 0
while self._idx >= 0 and offset < 0:
filesize = self._fins[self._idx].tell()
if offset + filesize < 0:
self._fins[self._idx].seek(0, os.SEEK_SET)
self._idx -= 1
offset += filesize
else:
self._fins[self._idx].seek(offset, os.SEEK_CUR)
offset = 0
self._idx = max(0, min(len(self._fins) - 1, self._idx))
return self.tell()
if whence == os.SEEK_END:
for i in range(len(self._fins)):
offset += self._sizes[i]
return self.seek(offset, whence=os.SEEK_SET)
raise ValueError(f'invalid whence ({whence}, should be 0, 1 or 2)')
def close(self):
"""
Close the input stream
"""
for fin in self._fins:
fin.close()
super().close()
class _OutputBytes(IOBase):
"""
OutputBytes is an io wrapper to tackle with multiple kinds of path
Args:
path: output file path
"""
def __init__(self, path, mode='wb'):
super().__init__()
self._path = path
self._localpath = TEMP_IO_SAVE_PATH + re.sub(r'[^\w]', '', '{}_{}_w'.format(path, os.getpid()))
self._fout = _OutputFileOpen(path, self._localpath, mode=mode)
def reset(self):
"""
Reset output stream
"""
self._fout.seek(0)
def close(self):
"""
Close output stream
"""
self._fout.close()
if self._path.startswith('hdfs:'):
cp(self._localpath, self._path, wait=True)
wait_until_exist(self._path)
super().close()
def write(self, content):
"""
Write to output Stream
Args:
content: bytes to write
"""
self._fout.write(content)
def seek(self, offset, whence=os.SEEK_SET):
"""
The same as IOBase.seek()
"""
return self._fout.seek(offset, whence)
def tell(self):
"""
The same as IOBase.tell()
"""
return self._fout.tell()
class UniIO(_InputStream, _OutputStream, _InputBytes, _OutputBytes):
"""
A universal IO with the same functions as python:open
"""
def __init__(self, path, mode='r', encoding='utf8'):
pass
def __new__(cls, path, mode='r', encoding='utf8'):
if 'r' in mode.lower():
if 'b' in mode.lower():
return _InputBytes(path, mode=mode)
return _InputStream(path, encoding=encoding)
elif 'w' in mode.lower():
if 'b' in mode.lower():
return _OutputBytes(path, mode=mode)
return _OutputStream(path, encoding=encoding)
logger.warning(f'Not support file mode: {mode}')
raise ValueError
|
[
"os.mkdir",
"os.remove",
"random.shuffle",
"paragen.utils.runtime.Environment",
"json.dumps",
"os.close",
"os.path.join",
"json.loads",
"os.path.exists",
"re.sub",
"subprocess.Popen",
"os.open",
"time.sleep",
"os.getenv",
"os.listdir",
"re.compile",
"subprocess.run",
"os.getpid",
"os.makedirs",
"os.path.isdir",
"os.getcwd",
"time.time",
"multiprocessing.Process",
"logging.getLogger"
] |
[((181, 208), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (198, 208), False, 'import logging\n'), ((277, 295), 're.compile', 're.compile', (['"""\\\\s+"""'], {}), "('\\\\s+')\n", (287, 295), False, 'import re\n'), ((933, 962), 'os.listdir', 'os.listdir', (['TEMP_IO_SAVE_PATH'], {}), '(TEMP_IO_SAVE_PATH)\n', (943, 962), False, 'import os\n'), ((1241, 1316), 'subprocess.Popen', 'subprocess.Popen', (['args_list'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(args_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n', (1257, 1316), False, 'import subprocess\n'), ((2047, 2068), 'random.shuffle', 'random.shuffle', (['files'], {}), '(files)\n', (2061, 2068), False, 'import random\n'), ((5485, 5496), 'time.time', 'time.time', ([], {}), '()\n', (5494, 5496), False, 'import time\n'), ((7149, 7162), 'paragen.utils.runtime.Environment', 'Environment', ([], {}), '()\n', (7160, 7162), False, 'from paragen.utils.runtime import Environment\n'), ((8076, 8089), 'paragen.utils.runtime.Environment', 'Environment', ([], {}), '()\n', (8087, 8089), False, 'from paragen.utils.runtime import Environment\n'), ((9069, 9082), 'paragen.utils.runtime.Environment', 'Environment', ([], {}), '()\n', (9080, 9082), False, 'from paragen.utils.runtime import Environment\n'), ((10443, 10456), 'paragen.utils.runtime.Environment', 'Environment', ([], {}), '()\n', (10454, 10456), False, 'from paragen.utils.runtime import Environment\n'), ((550, 583), 'os.path.exists', 'os.path.exists', (['TEMP_IO_SAVE_PATH'], {}), '(TEMP_IO_SAVE_PATH)\n', (564, 583), False, 'import os\n'), ((593, 638), 'os.makedirs', 'os.makedirs', (['TEMP_IO_SAVE_PATH'], {'exist_ok': '(True)'}), '(TEMP_IO_SAVE_PATH, exist_ok=True)\n', (604, 638), False, 'import os\n'), ((983, 1020), 'os.path.join', 'os.path.join', (['TEMP_IO_SAVE_PATH', 'name'], {}), '(TEMP_IO_SAVE_PATH, name)\n', (995, 1020), False, 'import os\n'), ((3339, 3352), 'json.dumps', 'json.dumps', (['x'], {}), '(x)\n', (3349, 3352), False, 'import json\n'), ((5408, 5427), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (5421, 5427), False, 'import os\n'), ((5695, 5708), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (5705, 5708), False, 'import time\n'), ((9382, 9450), 'subprocess.run', 'subprocess.run', (["['hadoop', 'fs', '-stat', path]"], {'capture_output': '(True)'}), "(['hadoop', 'fs', '-stat', path], capture_output=True)\n", (9396, 9450), False, 'import subprocess\n'), ((9528, 9548), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (9542, 9548), False, 'import os\n'), ((414, 431), 'os.getenv', 'os.getenv', (['"""HOME"""'], {}), "('HOME')\n", (423, 431), False, 'import os\n'), ((10808, 10824), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (10818, 10824), False, 'import json\n'), ((511, 522), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (520, 522), False, 'import os\n'), ((1091, 1110), 'os.remove', 'os.remove', (['filename'], {}), '(filename)\n', (1100, 1110), False, 'import os\n'), ((5573, 5584), 'time.time', 'time.time', ([], {}), '()\n', (5582, 5584), False, 'import time\n'), ((6239, 6346), 'subprocess.run', 'subprocess.run', (["['hadoop', 'fs', '-cp', src, tgt]"], {'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "(['hadoop', 'fs', '-cp', src, tgt], stdout=subprocess.DEVNULL,\n stderr=subprocess.DEVNULL)\n", (6253, 6346), False, 'import subprocess\n'), ((7578, 7691), 'subprocess.run', 'subprocess.run', (["['hadoop', 'fs', '-mkdir', '-p', path]"], {'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "(['hadoop', 'fs', '-mkdir', '-p', path], stdout=subprocess.\n DEVNULL, stderr=subprocess.DEVNULL)\n", (7592, 7691), False, 'import subprocess\n'), ((7730, 7826), 'subprocess.run', 'subprocess.run', (["['mkdir', '-p', path]"], {'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "(['mkdir', '-p', path], stdout=subprocess.DEVNULL, stderr=\n subprocess.DEVNULL)\n", (7744, 7826), False, 'import subprocess\n'), ((9938, 10048), 'subprocess.run', 'subprocess.run', (["['hadoop', 'fs', '-rm', '-r', path]"], {'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "(['hadoop', 'fs', '-rm', '-r', path], stdout=subprocess.\n DEVNULL, stderr=subprocess.DEVNULL)\n", (9952, 10048), False, 'import subprocess\n'), ((10087, 10180), 'subprocess.run', 'subprocess.run', (["['rm', '-r', path]"], {'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "(['rm', '-r', path], stdout=subprocess.DEVNULL, stderr=\n subprocess.DEVNULL)\n", (10101, 10180), False, 'import subprocess\n'), ((11689, 11717), 'os.path.exists', 'os.path.exists', (['lockfilename'], {}), '(lockfilename)\n', (11703, 11717), False, 'import os\n'), ((11756, 11816), 'os.open', 'os.open', (['lockfilename', '(os.O_WRONLY | os.O_CREAT | os.O_TRUNC)'], {}), '(lockfilename, os.O_WRONLY | os.O_CREAT | os.O_TRUNC)\n', (11763, 11816), False, 'import os\n'), ((11844, 11869), 'os.path.exists', 'os.path.exists', (['localpath'], {}), '(localpath)\n', (11858, 11869), False, 'import os\n'), ((12208, 12220), 'os.close', 'os.close', (['fd'], {}), '(fd)\n', (12216, 12220), False, 'import os\n'), ((12237, 12260), 'os.remove', 'os.remove', (['lockfilename'], {}), '(lockfilename)\n', (12246, 12260), False, 'import os\n'), ((12319, 12330), 'time.time', 'time.time', ([], {}), '()\n', (12328, 12330), False, 'import time\n'), ((18096, 18129), 'os.path.exists', 'os.path.exists', (['TEMP_IO_SAVE_PATH'], {}), '(TEMP_IO_SAVE_PATH)\n', (18110, 18129), False, 'import os\n'), ((18147, 18174), 'os.mkdir', 'os.mkdir', (['TEMP_IO_SAVE_PATH'], {}), '(TEMP_IO_SAVE_PATH)\n', (18155, 18174), False, 'import os\n'), ((6442, 6551), 'subprocess.run', 'subprocess.run', (["['hadoop', 'fs', '-get', src, tgt]"], {'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "(['hadoop', 'fs', '-get', src, tgt], stdout=subprocess.\n DEVNULL, stderr=subprocess.DEVNULL)\n", (6456, 6551), False, 'import subprocess\n'), ((7256, 7292), 'multiprocessing.Process', 'Process', ([], {'target': '_cp', 'args': '(src, tgt)'}), '(target=_cp, args=(src, tgt))\n', (7263, 7292), False, 'from multiprocessing import Process\n'), ((8191, 8227), 'multiprocessing.Process', 'Process', ([], {'target': '_mkdir', 'args': '(path,)'}), '(target=_mkdir, args=(path,))\n', (8198, 8227), False, 'from multiprocessing import Process\n'), ((8554, 8657), 'subprocess.run', 'subprocess.run', (["['hadoop', 'fs', '-rm', path]"], {'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "(['hadoop', 'fs', '-rm', path], stdout=subprocess.DEVNULL,\n stderr=subprocess.DEVNULL)\n", (8568, 8657), False, 'import subprocess\n'), ((8705, 8792), 'subprocess.run', 'subprocess.run', (["['rm', path]"], {'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "(['rm', path], stdout=subprocess.DEVNULL, stderr=subprocess.\n DEVNULL)\n", (8719, 8792), False, 'import subprocess\n'), ((9185, 9222), 'multiprocessing.Process', 'Process', ([], {'target': '_remove', 'args': '(path,)'}), '(target=_remove, args=(path,))\n', (9192, 9222), False, 'from multiprocessing import Process\n'), ((10559, 10596), 'multiprocessing.Process', 'Process', ([], {'target': '_rmtree', 'args': '(path,)'}), '(target=_rmtree, args=(path,))\n', (10566, 10596), False, 'from multiprocessing import Process\n'), ((11425, 11451), 're.sub', 're.sub', (['"""[^\\\\w]"""', '""""""', 'path'], {}), "('[^\\\\w]', '', path)\n", (11431, 11451), False, 'import re\n'), ((11891, 11911), 'os.remove', 'os.remove', (['localpath'], {}), '(localpath)\n', (11900, 11911), False, 'import os\n'), ((12715, 12740), 'time.sleep', 'time.sleep', (['poll_interval'], {}), '(poll_interval)\n', (12725, 12740), False, 'import time\n'), ((26355, 26366), 'os.getpid', 'os.getpid', ([], {}), '()\n', (26364, 26366), False, 'import os\n'), ((6646, 6755), 'subprocess.run', 'subprocess.run', (["['hadoop', 'fs', '-put', src, tgt]"], {'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "(['hadoop', 'fs', '-put', src, tgt], stdout=subprocess.\n DEVNULL, stderr=subprocess.DEVNULL)\n", (6660, 6755), False, 'import subprocess\n'), ((6794, 6885), 'subprocess.run', 'subprocess.run', (["['cp', src, tgt]"], {'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "(['cp', src, tgt], stdout=subprocess.DEVNULL, stderr=\n subprocess.DEVNULL)\n", (6808, 6885), False, 'import subprocess\n'), ((12444, 12472), 'os.path.exists', 'os.path.exists', (['lockfilename'], {}), '(lockfilename)\n', (12458, 12472), False, 'import os\n'), ((18888, 18899), 'os.getpid', 'os.getpid', ([], {}), '()\n', (18897, 18899), False, 'import os\n'), ((13401, 13412), 'os.getpid', 'os.getpid', ([], {}), '()\n', (13410, 13412), False, 'import os\n'), ((12544, 12555), 'time.time', 'time.time', ([], {}), '()\n', (12553, 12555), False, 'import time\n')]
|
#
# cogs/optional/statbot/sql.py
#
# futaba - A Discord Mod bot for the Programming server
# Copyright (c) 2017-2020 <NAME>, <NAME>, jackylam5
#
# futaba is available free of charge under the terms of the MIT
# License. You are free to redistribute and/or modify it under those
# terms. It is distributed in the hopes that it will be useful, but
# WITHOUT ANY WARRANTY. See the LICENSE file for more details.
#
"""
Module for abstractly interfacing with Statbot's RDBMS.
"""
import logging
from sqlalchemy import create_engine
from sqlalchemy.sql import text
from .utils import int_hash
logger = logging.getLogger(__name__)
__all__ = ["StatbotSqlHandler"]
class StatbotSqlHandler:
__slots__ = ("db", "conn")
def __init__(self, db_path: str):
self.db = create_engine(db_path)
self.conn = self.db.connect()
logger.info("Connected to database...")
def __del__(self):
self.conn.close()
def execute(self, *args, **kwargs):
return self.conn.execute(*args, **kwargs)
# Specific queries
def message_count(self, guild, user, excluded_channels=()):
"""
Determines how many messages a user has in the guild.
A list of excluded channels can be specified.
"""
logger.info(
"Querying message count for user '%s' (%d) in guild '%s' (%d)",
user.name,
user.id,
guild.name,
guild.id,
)
stmt = text(
"""
SELECT
COUNT(message_id) AS messages,
COUNT(edited_at) AS edited,
COUNT(deleted_at) AS deleted
FROM messages
WHERE guild_id = :guild_id
AND int_user_id = :user_id
AND channel_id NOT IN :excluded_channel_ids
"""
)
excluded_channel_ids = tuple(channel.id for channel in excluded_channels)
result = self.conn.execute(
stmt,
guild_id=guild.id,
user_id=int_hash(user.id),
excluded_channel_ids=excluded_channel_ids or (0,),
)
message_count, edited_count, deleted_count = result.fetchone()
return message_count, edited_count, deleted_count
|
[
"sqlalchemy.create_engine",
"sqlalchemy.sql.text",
"logging.getLogger"
] |
[((601, 628), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (618, 628), False, 'import logging\n'), ((777, 799), 'sqlalchemy.create_engine', 'create_engine', (['db_path'], {}), '(db_path)\n', (790, 799), False, 'from sqlalchemy import create_engine\n'), ((1469, 1823), 'sqlalchemy.sql.text', 'text', (['"""\n SELECT\n COUNT(message_id) AS messages,\n COUNT(edited_at) AS edited,\n COUNT(deleted_at) AS deleted\n FROM messages\n WHERE guild_id = :guild_id\n AND int_user_id = :user_id\n AND channel_id NOT IN :excluded_channel_ids\n """'], {}), '(\n """\n SELECT\n COUNT(message_id) AS messages,\n COUNT(edited_at) AS edited,\n COUNT(deleted_at) AS deleted\n FROM messages\n WHERE guild_id = :guild_id\n AND int_user_id = :user_id\n AND channel_id NOT IN :excluded_channel_ids\n """\n )\n', (1473, 1823), False, 'from sqlalchemy.sql import text\n')]
|
import inspect
from typing import Union, Any, Callable, Iterable, Tuple, Sequence
import torch
import numpy as np
def bifurcate(x: Iterable, lhs: Callable[[Any], bool]) -> Tuple[list, list]:
"""
Split an iterable into two lists depending on a condition.
:param x: An iterable.
:param lhs: A function that takes an element of x; when this returns True, the element is added to the left output,
when this returns False, the element is added to the right output.
:return: Two lists.
"""
l, r = [], []
for el in x:
if lhs(el):
l.append(el)
else:
r.append(el)
return l, r
def zpad(x: Any, n: int) -> str:
return str(x).rjust(n, "0")
def split_flat(tens: torch.Tensor, dim: int, clone: bool = False):
if clone:
return [tens.select(dim, i).clone() for i in range(tens.shape[dim])]
else:
assert not tens.requires_grad
return [tens.select(dim, i) for i in range(tens.shape[dim])]
def identity(x: Any) -> Any:
return x
def is_slow_grad(tens: torch.Tensor) -> bool:
if tens.requires_grad:
avoid_funs = {'CopyBackwards', 'SelectBackward'}
next_fun = tens.grad_fn.next_functions[0][0]
if (tens.grad_fn.__class__.__name__ in avoid_funs) or (next_fun.__class__.__name__ in avoid_funs):
return True
return False
def ragged_cat(tensors: Sequence[torch.Tensor], ragged_dim: int, cat_dim: int = 0) -> torch.Tensor:
max_dim_len = max(tensor.shape[ragged_dim] for tensor in tensors)
out = []
num_dims = len(tensors[0].shape)
for tensor in tensors:
this_tens_dim_len = tensor.shape[ragged_dim]
shape = list(tensor.shape)
assert len(shape) == num_dims
shape[ragged_dim] = max_dim_len
padded = torch.empty(shape)
padded[:] = float('nan')
idx = tuple(slice(0, this_tens_dim_len) if i == ragged_dim else slice(None) for i in range(num_dims))
padded[idx] = tensor
out.append(padded)
return torch.cat(out, cat_dim)
def true1d_idx(arr: Union[np.ndarray, torch.Tensor]) -> np.ndarray:
if isinstance(arr, torch.Tensor):
arr = arr.detach().numpy()
arr = arr.astype('bool')
if len(arr.shape) > 1:
raise ValueError("Expected 1d array.")
return np.where(arr)[0]
def infer_forward_kwargs(nn: Union[torch.nn.Module, Callable], method_name: 'str' = 'forward') -> Sequence[str]:
if hasattr(nn, '_forward_kwargs'):
return nn._forward_kwargs
method = getattr(nn, method_name, None)
try:
params = [kwarg for kwarg in inspect.signature(method).parameters if kwarg not in {'self', 'kwargs'}]
except TypeError as e:
if e.args[0].endswith('None is not a callable object'):
params = []
else:
raise e
if not params:
if method_name == '__call__':
raise TypeError(
f"Unable to infer arguments for {nn}. Make sure the `forward` method uses named keyword-arguments."
)
return infer_forward_kwargs(nn, method_name='__call__')
if 'args' in params:
raise TypeError(
f"Unable to infer arguments for {nn}. Make sure it does not use `*args, **kwargs`"
)
return params
|
[
"numpy.where",
"torch.empty",
"torch.cat",
"inspect.signature"
] |
[((2036, 2059), 'torch.cat', 'torch.cat', (['out', 'cat_dim'], {}), '(out, cat_dim)\n', (2045, 2059), False, 'import torch\n'), ((1807, 1825), 'torch.empty', 'torch.empty', (['shape'], {}), '(shape)\n', (1818, 1825), False, 'import torch\n'), ((2317, 2330), 'numpy.where', 'np.where', (['arr'], {}), '(arr)\n', (2325, 2330), True, 'import numpy as np\n'), ((2612, 2637), 'inspect.signature', 'inspect.signature', (['method'], {}), '(method)\n', (2629, 2637), False, 'import inspect\n')]
|
import logging
import asyncio
from hbmqtt.client import MQTTClient, ConnectException
from hbmqtt.mqtt.constants import QOS_0, QOS_1, QOS_2
#
# This sample shows how to publish messages to broker using different QOS
# Debug outputs shows the message flows
logger = logging.getLogger(__name__)
@asyncio.coroutine
def test_coro():
C = MQTTClient()
yield from C.connect('mqtt://localhost:1883/')
tasks = [
asyncio.ensure_future(C.publish('test', b'TEST MESSAGE WITH QOS_1', qos=QOS_1)),
]
yield from asyncio.wait(tasks)
logger.info("messages published")
yield from C.disconnect()
if __name__ == '__main__':
formatter = "[%(asctime)s] %(name)s {%(filename)s:%(lineno)d} %(levelname)s - %(message)s"
formatter = "%(message)s"
logging.basicConfig(level=logging.DEBUG, format=formatter)
asyncio.get_event_loop().run_until_complete(test_coro())
|
[
"asyncio.get_event_loop",
"logging.basicConfig",
"hbmqtt.client.MQTTClient",
"asyncio.wait",
"logging.getLogger"
] |
[((268, 295), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (285, 295), False, 'import logging\n'), ((341, 353), 'hbmqtt.client.MQTTClient', 'MQTTClient', ([], {}), '()\n', (351, 353), False, 'from hbmqtt.client import MQTTClient, ConnectException\n'), ((775, 833), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': 'formatter'}), '(level=logging.DEBUG, format=formatter)\n', (794, 833), False, 'import logging\n'), ((530, 549), 'asyncio.wait', 'asyncio.wait', (['tasks'], {}), '(tasks)\n', (542, 549), False, 'import asyncio\n'), ((838, 862), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (860, 862), False, 'import asyncio\n')]
|
import copy
import os
import numpy as np
from hexrd.config.root import RootConfig
from hexrd.config.material import MaterialConfig
from hexrd.config.instrument import Instrument as InstrumentConfig
from hexrd.ui.create_hedm_instrument import create_hedm_instrument
from hexrd.ui.hexrd_config import HexrdConfig
from hexrd.ui.utils import is_omega_imageseries
def create_indexing_config():
# Creates a hexrd.config class from the indexing configuration
# Make a copy to modify
indexing_config = copy.deepcopy(HexrdConfig().indexing_config)
available_materials = list(HexrdConfig().materials.keys())
selected_material = indexing_config.get('_selected_material')
if selected_material not in available_materials:
raise Exception(f'Selected material {selected_material} not available')
material = HexrdConfig().material(selected_material)
omaps = indexing_config['find_orientations']['orientation_maps']
omaps['active_hkls'] = list(range(len(material.planeData.getHKLs())))
# Set the active material on the config
tmp = indexing_config.setdefault('material', {})
tmp['active'] = material.name
# Create the root config from the indexing config dict
config = RootConfig(indexing_config)
# Create and set instrument config
iconfig = InstrumentConfig(config)
iconfig._hedm = create_hedm_instrument()
config.instrument = iconfig
# Create and set material config
mconfig = MaterialConfig(config)
mconfig.materials = HexrdConfig().materials
config.material = mconfig
# Set this so the config won't over-write our tThWidth
config.set('material:tth_width', np.degrees(material.planeData.tThWidth))
# Use unaggregated images if possible
ims_dict = HexrdConfig().unagg_images
if ims_dict is None:
# This probably means the image series was never aggregated.
# Try using the imageseries dict.
ims_dict = HexrdConfig().imageseries_dict
if any(not is_omega_imageseries(ims) for ims in ims_dict.values()):
# Add an early error that is easier to understand...
raise Exception('Omegas not found!')
config.image_series = ims_dict
validate_config(config)
return config
def validate_config(config):
# Perform any modifications to make sure this is a valid config
try:
config.working_dir
except IOError:
# This working directory does not exist. Set it to the cwd.
print(f'Warning: {config.get("working_dir")} does not exist.',
f'Changing working directory to {os.getcwd()}')
config.set('working_dir', os.getcwd())
# Make sure future configs use the new working dir as well...
HexrdConfig().indexing_config['working_dir'] = os.getcwd()
|
[
"hexrd.ui.create_hedm_instrument.create_hedm_instrument",
"hexrd.config.material.MaterialConfig",
"numpy.degrees",
"os.getcwd",
"hexrd.ui.utils.is_omega_imageseries",
"hexrd.ui.hexrd_config.HexrdConfig",
"hexrd.config.root.RootConfig",
"hexrd.config.instrument.Instrument"
] |
[((1227, 1254), 'hexrd.config.root.RootConfig', 'RootConfig', (['indexing_config'], {}), '(indexing_config)\n', (1237, 1254), False, 'from hexrd.config.root import RootConfig\n'), ((1309, 1333), 'hexrd.config.instrument.Instrument', 'InstrumentConfig', (['config'], {}), '(config)\n', (1325, 1333), True, 'from hexrd.config.instrument import Instrument as InstrumentConfig\n'), ((1354, 1378), 'hexrd.ui.create_hedm_instrument.create_hedm_instrument', 'create_hedm_instrument', ([], {}), '()\n', (1376, 1378), False, 'from hexrd.ui.create_hedm_instrument import create_hedm_instrument\n'), ((1463, 1485), 'hexrd.config.material.MaterialConfig', 'MaterialConfig', (['config'], {}), '(config)\n', (1477, 1485), False, 'from hexrd.config.material import MaterialConfig\n'), ((1510, 1523), 'hexrd.ui.hexrd_config.HexrdConfig', 'HexrdConfig', ([], {}), '()\n', (1521, 1523), False, 'from hexrd.ui.hexrd_config import HexrdConfig\n'), ((1661, 1700), 'numpy.degrees', 'np.degrees', (['material.planeData.tThWidth'], {}), '(material.planeData.tThWidth)\n', (1671, 1700), True, 'import numpy as np\n'), ((1760, 1773), 'hexrd.ui.hexrd_config.HexrdConfig', 'HexrdConfig', ([], {}), '()\n', (1771, 1773), False, 'from hexrd.ui.hexrd_config import HexrdConfig\n'), ((526, 539), 'hexrd.ui.hexrd_config.HexrdConfig', 'HexrdConfig', ([], {}), '()\n', (537, 539), False, 'from hexrd.ui.hexrd_config import HexrdConfig\n'), ((836, 849), 'hexrd.ui.hexrd_config.HexrdConfig', 'HexrdConfig', ([], {}), '()\n', (847, 849), False, 'from hexrd.ui.hexrd_config import HexrdConfig\n'), ((1942, 1955), 'hexrd.ui.hexrd_config.HexrdConfig', 'HexrdConfig', ([], {}), '()\n', (1953, 1955), False, 'from hexrd.ui.hexrd_config import HexrdConfig\n'), ((2765, 2776), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2774, 2776), False, 'import os\n'), ((1989, 2014), 'hexrd.ui.utils.is_omega_imageseries', 'is_omega_imageseries', (['ims'], {}), '(ims)\n', (2009, 2014), False, 'from hexrd.ui.utils import is_omega_imageseries\n'), ((2626, 2637), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2635, 2637), False, 'import os\n'), ((588, 601), 'hexrd.ui.hexrd_config.HexrdConfig', 'HexrdConfig', ([], {}), '()\n', (599, 601), False, 'from hexrd.ui.hexrd_config import HexrdConfig\n'), ((2718, 2731), 'hexrd.ui.hexrd_config.HexrdConfig', 'HexrdConfig', ([], {}), '()\n', (2729, 2731), False, 'from hexrd.ui.hexrd_config import HexrdConfig\n'), ((2577, 2588), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2586, 2588), False, 'import os\n')]
|
# Generated by Django 3.2.5 on 2021-07-13 04:10
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('resumes', '0023_educationdetails'),
]
operations = [
migrations.RenameField(
model_name='softskills',
old_name='Soft_Skill_Name',
new_name='SoftSkill_Name',
),
]
|
[
"django.db.migrations.RenameField"
] |
[((225, 331), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""softskills"""', 'old_name': '"""Soft_Skill_Name"""', 'new_name': '"""SoftSkill_Name"""'}), "(model_name='softskills', old_name='Soft_Skill_Name',\n new_name='SoftSkill_Name')\n", (247, 331), False, 'from django.db import migrations\n')]
|
import functools
import inspect
import json
import os
import warnings
# State numbers:
#
# 0: `state` is None. The stream has no more elements. `step` can't be called.
#
# 1: `state` is a function. Calling `step` with a 2-tuple of positional and
# keyword arguments passes those arguments to the function, and then
# transitions to state 0 or 2 depending on whether the function is a
# generator. It returns the first value yielded by the generator, or
# raises `StopIteration` with a payload if the function or generator
# returns.
#
# 2: `state` is the iterator returned by the generator function. Calling `step`
# returns the next value yielded by the generator (called with `param`)
# or, if the generator returns, raises `StopIteration` with the returned
# value and transitions to state 0.
#
# This is essentially a tool for getting plain functions to behave like
# generators that return a value immediately without yielding anything.
class _Stream:
def __init__(self, f):
self.state_no = 1
self.state = f
def _finish(self):
self.state_no = 0
self.state = None
def step(self, param):
if self.state_no == 1:
f = self.state
(args, kwargs) = param
res = f(*args, **kwargs)
if inspect.isgeneratorfunction(f):
self.state_no = 2
self.state = res
return next(res)
else:
self._finish()
raise StopIteration(res)
elif self.state_no == 2:
try:
return self.state.send(param)
except StopIteration:
self._finish()
raise
else:
assert False, "called step from invalid state"
class _Promise:
def __init__(self, src, tracked_id, do_compute, evict_key, f, f_args, f_kwargs):
self.resolved = False
assert isinstance(src, str)
assert isinstance(tracked_id, int)
assert isinstance(do_compute, bool)
assert isinstance(evict_key, str)
self.src = src
self.tracked_id = tracked_id
self._do_compute = do_compute
self._evict_key = evict_key
self._stream_param = (f_args, f_kwargs)
self._cache_args = (f_args, tuple(sorted(f_kwargs.items())))
self._stream = _Stream(f)
def _resolve(self):
self.resolved = True
self._stream_param = None
self._cache_args = None
self._stream = None
def step(self, cache):
assert not self.resolved
if self._cache_args:
try:
res = cache.lookup(self.src, *self._cache_args, self._evict_key)
self._resolve()
return res
except KeyError:
pass
assert self._do_compute, \
"computation of function '{}' was disabled, " \
"but it was not found in the cache" .format(self.src)
# The value of `resolved` after calling this function determines
# whether the value returned by this function was yielded or returned
# by the `_stream`. If the former, the caller should supply a new value
# to pass to the stream next step by overwriting `input_for_next`.
try:
res = self._stream.step(self._stream_param)
return res
except StopIteration as e:
if len(e.args) == 0:
res = None
else:
res = e.args[0]
cache.insert(self.src, *self._cache_args, res, self._evict_key)
self._resolve()
return res
def supply_input(self, x):
self._stream_param = x
def __del__(self):
if not self.resolved:
warnings.warn(
"promise was never resolved - did you forget to yield it?",
ResourceWarning
)
class _Cache:
def __init__(self, names, to_json_fn):
self._names = names
self._names_to_ids = {}
self._to_json_fn = to_json_fn
def empty(to_json_fn):
return _Cache({}, to_json_fn)
def from_json(json_value, *, from_json_fn, to_json_fn):
return _Cache(
{
name: _NameSubcache.from_json(
subcache_json,
name=name,
from_json_fn=from_json_fn,
) for name, subcache_json in json_value["names"].items()
},
to_json_fn,
)
def to_json(self):
return {"names": {
name: subcache.to_json(
name=name,
to_json_fn=self._to_json_fn
)
for name, subcache in self._names.items()
if not subcache.is_empty()
}}
def lookup(self, name, args, kwargs, evict_key=None):
assert isinstance(kwargs, tuple)
return self._names[name].lookup(args, kwargs, evict_key)
def _get_subcache(self, name):
if name not in self._names:
self._names[name] = _NameSubcache.empty()
return self._names[name]
def insert(self, name, args, kwargs, res, evict_key=None):
assert isinstance(kwargs, tuple)
self._get_subcache(name).insert(args, kwargs, res, evict_key)
class _NameSubcache:
def __init__(self, entries, evict_key):
self.entries = entries
self._evict_key = evict_key
def empty():
return _NameSubcache({}, None)
def is_empty(self):
return len(self.entries) == 0 and not self._evict_key
def from_json(json_value, *, name, from_json_fn):
entries = {}
for entry_json in json_value["entries"]:
entry = _Entry.from_json(
entry_json,
name=name,
from_json_fn=from_json_fn
)
k = (entry.args, entry.kwargs)
assert k not in entries, \
"duplicate cache entries for '{}': args={}, kwargs={}" \
.format(name, k[0], k[1])
entries[k] = entry.res
evict_key = json_value.get("ev", "")
return _NameSubcache(entries, evict_key)
def to_json(self, *, name, to_json_fn):
res = {"entries": [
_Entry(args, kwargs, res).to_json(name=name, to_json_fn=to_json_fn)
for (args, kwargs), res in self.entries.items()
]}
if self._evict_key:
res["ev"] = self._evict_key
return res
def _check_evict(self, evict_key):
if self._evict_key is None:
self._evict_Key = evict_key
elif not (evict_key is None or evict_key == self._evict_key):
self._evict_key = evict_key
self.entries = {}
def lookup(self, args, kwargs, evict_key=None):
assert isinstance(kwargs, tuple)
self._check_evict(evict_key)
return self.entries[(args, kwargs)]
def insert(self, args, kwargs, res, evict_key=None):
assert isinstance(kwargs, tuple)
self._check_evict(evict_key)
k = (args, kwargs)
assert k not in self.entries, "value {} is already in cache".format(k)
self.entries[k] = res
# Represents data associated with a specific cache entry. Note that these
# objects are not actually used to store the cache entries in memory (since
# we need to store the inputs as dictionary keys); it's just an intermediate
# data structure used during JSON encoding/decoding.
class _Entry:
def __init__(self, args, kwargs, res):
assert isinstance(args, tuple)
assert isinstance(kwargs, tuple)
self.args = args
self.kwargs = kwargs
self.res = res
def from_json(json_value, *, name, from_json_fn):
*args_json, kwargs_json, res_json = json_value
args = tuple(
from_json_fn(arg_json, name=name, pos="arg")
for arg_json in args_json
)
kwargs = tuple(sorted(
(k, from_json_fn(arg_json, name=name, pos="kwarg"))
for k, arg_json in kwargs_json.items()
))
res = from_json_fn(res_json, name=name, pos="res")
return _Entry(args, kwargs, res)
def to_json(self, *, name, to_json_fn):
return [
*(to_json_fn(arg, name=name, pos="arg") for arg in self.args),
{
k: to_json_fn(arg, name=name, pos="kwarg")
for k, arg in self.kwargs
},
to_json_fn(self.res, name=name, pos="res")
]
class Context:
def __init__(self, cache, *, serialize_fn):
assert isinstance(cache, _Cache)
assert callable(serialize_fn)
self._cache = cache
self._names_to_ids = {}
self._serialize_fn = serialize_fn
def compute(self, promise):
assert isinstance(promise, _Promise)
blocked_promises = []
current_promise = promise
while True:
# Check that we aren't calling multiple functions that are tracked
# using the same name.
name = current_promise.src
if name in self._names_to_ids:
assert self._names_to_ids[name] == current_promise.tracked_id, \
"multiple functions are tracked under the name '{}'" \
.format(name)
else:
self._names_to_ids[name] = current_promise.tracked_id
result = current_promise.step(self._cache)
# If the current promise is not yet resolved, then it is blocked on
# the promise it yielded. This means we should add it to the stack
# of blocked promises and focus on resolving the one it yielded.
if not current_promise.resolved:
# TODO: these should just act as normal generators
assert isinstance(result, _Promise), \
"tracked function yielded something other than a promise"
blocked_promises.append(current_promise)
current_promise = result
# If the promise did resolve, and it wasn't the root, then its
# result should be supplied as input to the promise that called it.
elif len(blocked_promises) > 0:
current_promise = blocked_promises.pop()
current_promise.supply_input(result)
# When the root promise resolves, its result is the result of the
# final computation.
else:
return result
def _sync(self):
self._serialize_fn(self._cache.to_json())
def __enter__(self):
return self
def __exit__(self, _exc_ty, _exc, _traceback):
self._sync()
_num_tracked = 0
def _track(name=None, compute=True, evict_key=""):
def decorate(f):
global _num_tracked
tracked_id = _num_tracked
_num_tracked += 1
@functools.wraps(f)
def wrapper(*args, **kwargs):
src = f.__name__ if name is None else name
return _Promise(src, tracked_id, compute, evict_key, f, args, kwargs)
return wrapper
return decorate
def track(*args, **kwargs):
# A special case that makes this function directly usable as a decorator.
if len(args) == 1 and len(kwargs) == 0:
f = args[0]
assert callable(f)
return _track(name=f.__name__)(f)
else:
return _track(*args, **kwargs)
def open(filename="_inax.json"):
# since we're introducing a new function called `open`
from builtins import open as file_open
def to_json_fn(value, name, pos):
# assume the value is valid JSON
return value
def from_json_fn(value, name, pos):
# assume the JSON is a valid value
return value
try:
f = file_open(filename, "r")
except FileNotFoundError:
cache = _Cache.empty(to_json_fn)
else:
cache = _Cache.from_json(
json.load(f),
from_json_fn=from_json_fn,
to_json_fn=to_json_fn
)
f.close()
# in case the cwd has changed when serialization happens
path = os.path.abspath(filename)
def serialize_fn(cache):
with file_open(path, "w") as f:
json.dump(cache, f, separators=",:")
f.write('\n')
return Context(cache, serialize_fn=serialize_fn)
|
[
"json.dump",
"os.path.abspath",
"json.load",
"builtins.open",
"inspect.isgeneratorfunction",
"functools.wraps",
"warnings.warn"
] |
[((12124, 12149), 'os.path.abspath', 'os.path.abspath', (['filename'], {}), '(filename)\n', (12139, 12149), False, 'import os\n'), ((10892, 10910), 'functools.wraps', 'functools.wraps', (['f'], {}), '(f)\n', (10907, 10910), False, 'import functools\n'), ((11784, 11808), 'builtins.open', 'file_open', (['filename', '"""r"""'], {}), "(filename, 'r')\n", (11793, 11808), True, 'from builtins import open as file_open\n'), ((1320, 1350), 'inspect.isgeneratorfunction', 'inspect.isgeneratorfunction', (['f'], {}), '(f)\n', (1347, 1350), False, 'import inspect\n'), ((3788, 3882), 'warnings.warn', 'warnings.warn', (['"""promise was never resolved - did you forget to yield it?"""', 'ResourceWarning'], {}), "('promise was never resolved - did you forget to yield it?',\n ResourceWarning)\n", (3801, 3882), False, 'import warnings\n'), ((11936, 11948), 'json.load', 'json.load', (['f'], {}), '(f)\n', (11945, 11948), False, 'import json\n'), ((12192, 12212), 'builtins.open', 'file_open', (['path', '"""w"""'], {}), "(path, 'w')\n", (12201, 12212), True, 'from builtins import open as file_open\n'), ((12231, 12267), 'json.dump', 'json.dump', (['cache', 'f'], {'separators': '""",:"""'}), "(cache, f, separators=',:')\n", (12240, 12267), False, 'import json\n')]
|
# Imports here
import numpy as np
import pandas as pd
import time
import argparse
import torch
from torch import nn
from torch import optim
import torch.nn.functional as F
from torch.autograd import Variable
from collections import OrderedDict
import torchvision
from torchvision import datasets, transforms, models
import json
import helper
# Creates Argument Parser object named parser
parser = argparse.ArgumentParser()
# Argument 1: that's a path to a folder
parser.add_argument('--data_dir', type = str, default = 'flowers', help = 'path to the folder of flower images')
parser.add_argument('--arch', type = str, default = 'vgg', help = 'The CNN model architecture to use')
parser.add_argument('--save_dir', type = str, default = '', help = 'path to the folder of saved model ')
parser.add_argument('--learning_rate', type = float, default = .001, help = 'learning rate value ')
parser.add_argument('--hidden_units', type = int, default = 4096, help = 'hidden_units number ')
parser.add_argument('--epochs', type = int, default = 11, help = 'number of epochs ')
parser.add_argument('--gpu', type = bool, default = True, help = 'training device ')
in_args = parser.parse_args()
data_dir = in_args.data_dir
train_dir = data_dir + '/train'
valid_dir = data_dir + '/valid'
test_dir = data_dir + '/test'
# TODO: Define your transforms for the training, validation, and testing sets
train_transforms = transforms.Compose([transforms.RandomRotation(30),
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
])
cost_transforms = transforms.Compose([transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
])
test_transforms = transforms.Compose([transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
])
# Load the datasets with ImageFolder
train_data = datasets.ImageFolder(train_dir, transform=train_transforms)
valid_data = datasets.ImageFolder(valid_dir, transform=cost_transforms)
test_data = datasets.ImageFolder(test_dir, transform=test_transforms)
# Using the image datasets and the transforms, define the dataloaders
train_loader = torch.utils.data.DataLoader(train_data, batch_size=64, shuffle=True)
valid_loader = torch.utils.data.DataLoader(valid_data, batch_size=32)
test_loader = torch.utils.data.DataLoader(test_data, batch_size=32)
image_datasets = [train_data, valid_data, test_data]
dataloaders = [train_loader, valid_loader, test_loader]
with open('cat_to_name.json', 'r') as f:
cat_to_name = json.load(f)
arch = in_args.arch
arch = arch.lower()
resnet18 = models.resnet18(pretrained=True)
alexnet = models.alexnet(pretrained=True)
vgg16 = models.vgg16(pretrained=True)
models = {'resnet': resnet18, 'alexnet': alexnet, 'vgg': vgg16}
model = models[arch]
# Freeze parameters so we don't backprop through them
for param in model.parameters():
param.requires_grad = False
classifier = nn.Sequential(OrderedDict([
('fc1', nn.Linear(25088, in_args.hidden_units)),
('relu', nn.ReLU()),
('drop1',nn.Dropout(.6)),
('fc2', nn.Linear(in_args.hidden_units, 102)),
('output', nn.LogSoftmax(dim=1))
]))
model.classifier = classifier
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.classifier.parameters(), lr= in_args.learning_rate)
epochs = in_args.epochs
steps = 0
running_loss = 0
print_every = 103
cuda = torch.cuda.is_available()
if cuda and in_args.gpu:
model.cuda()
print('cuda')
gpu_usage = True
else:
model.cpu()
print('cpu')
gpu_usage = False
start = time.time()
for e in range(epochs):
model.train()
for data in dataloaders[0]:
images, labels = data
steps += 1
optimizer.zero_grad()
if gpu_usage == True:
images, labels = Variable(images.cuda()), Variable(labels.cuda())
else:
images, labels = Variable(images), Variable(labels)
output = model.forward(images)
loss = criterion(output, labels)
loss.backward()
optimizer.step()
running_loss += loss.item()
if steps % print_every == 0:
# Make sure network is in eval mode for inference
model.eval()
# Turn off gradients for validation, saves memory and computations
with torch.no_grad():
test_loss, accuracy = helper.validation(model, dataloaders[1], criterion, gpu_usage)
print("Epoch: {}/{}.. ".format(e+1, epochs),
"Training Loss: {:.3f}.. ".format(running_loss/print_every),
"Validation Loss: {:.3f}.. ".format(test_loss/len(dataloaders[1])),
"Validation Accuracy: {:.3f}".format(accuracy/len(dataloaders[1])))
running_loss = 0
# Make sure training is back on
model.train()
total_time = time.time() - start
print("\nTotal time: {:.0f}m {:.0f}s".format(total_time//60, total_time % 60))
# Do validation on the test set
model.eval()
# Turn off gradients for validation, saves memory and computations
with torch.no_grad():
test_loss, accuracy = helper.validation(model, dataloaders[2], criterion, gpu_usage)
print("Test Loss: {:.3f}.. ".format(test_loss/len(dataloaders[2])),
"Test Accuracy: {:.3f}".format(accuracy/len(dataloaders[2])))
# TODO: Save the checkpoint
model.class_to_idx = image_datasets[0].class_to_idx
checkpoint = {'input_size': 25088,
'output_size': 102,
'arch': 'vgg16',
'learning_rate': 0.001,
'batch_size': 64,
'classifier' : classifier,
'epochs': epochs,
'optimizer': optimizer.state_dict(),
'state_dict': model.state_dict(),
'class_to_idx': model.class_to_idx}
if(in_args.save_dir==''):
torch.save(checkpoint, 'checkpoint.pth')
else:
torch.save(checkpoint, in_args.save_dir + 'checkpoint.pth')
|
[
"torch.nn.Dropout",
"argparse.ArgumentParser",
"torchvision.transforms.Normalize",
"torch.no_grad",
"torch.utils.data.DataLoader",
"torchvision.transforms.RandomRotation",
"torch.nn.Linear",
"torchvision.transforms.CenterCrop",
"torchvision.models.vgg16",
"helper.validation",
"torchvision.models.resnet18",
"torchvision.transforms.RandomHorizontalFlip",
"torch.nn.LogSoftmax",
"torch.autograd.Variable",
"torchvision.models.alexnet",
"torchvision.datasets.ImageFolder",
"torch.cuda.is_available",
"torchvision.transforms.Resize",
"json.load",
"torch.nn.ReLU",
"torch.nn.CrossEntropyLoss",
"time.time",
"torch.save",
"torchvision.transforms.RandomResizedCrop",
"torchvision.transforms.ToTensor"
] |
[((399, 424), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (422, 424), False, 'import argparse\n'), ((2713, 2772), 'torchvision.datasets.ImageFolder', 'datasets.ImageFolder', (['train_dir'], {'transform': 'train_transforms'}), '(train_dir, transform=train_transforms)\n', (2733, 2772), False, 'from torchvision import datasets, transforms, models\n'), ((2786, 2844), 'torchvision.datasets.ImageFolder', 'datasets.ImageFolder', (['valid_dir'], {'transform': 'cost_transforms'}), '(valid_dir, transform=cost_transforms)\n', (2806, 2844), False, 'from torchvision import datasets, transforms, models\n'), ((2858, 2915), 'torchvision.datasets.ImageFolder', 'datasets.ImageFolder', (['test_dir'], {'transform': 'test_transforms'}), '(test_dir, transform=test_transforms)\n', (2878, 2915), False, 'from torchvision import datasets, transforms, models\n'), ((3002, 3070), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['train_data'], {'batch_size': '(64)', 'shuffle': '(True)'}), '(train_data, batch_size=64, shuffle=True)\n', (3029, 3070), False, 'import torch\n'), ((3086, 3140), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['valid_data'], {'batch_size': '(32)'}), '(valid_data, batch_size=32)\n', (3113, 3140), False, 'import torch\n'), ((3156, 3209), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['test_data'], {'batch_size': '(32)'}), '(test_data, batch_size=32)\n', (3183, 3209), False, 'import torch\n'), ((3445, 3477), 'torchvision.models.resnet18', 'models.resnet18', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (3460, 3477), False, 'from torchvision import datasets, transforms, models\n'), ((3488, 3519), 'torchvision.models.alexnet', 'models.alexnet', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (3502, 3519), False, 'from torchvision import datasets, transforms, models\n'), ((3528, 3557), 'torchvision.models.vgg16', 'models.vgg16', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (3540, 3557), False, 'from torchvision import datasets, transforms, models\n'), ((4194, 4215), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (4213, 4215), False, 'from torch import nn\n'), ((4375, 4400), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (4398, 4400), False, 'import torch\n'), ((4552, 4563), 'time.time', 'time.time', ([], {}), '()\n', (4561, 4563), False, 'import time\n'), ((3380, 3392), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3389, 3392), False, 'import json\n'), ((5918, 5929), 'time.time', 'time.time', ([], {}), '()\n', (5927, 5929), False, 'import time\n'), ((6149, 6164), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (6162, 6164), False, 'import torch\n'), ((6192, 6254), 'helper.validation', 'helper.validation', (['model', 'dataloaders[2]', 'criterion', 'gpu_usage'], {}), '(model, dataloaders[2], criterion, gpu_usage)\n', (6209, 6254), False, 'import helper\n'), ((6922, 6962), 'torch.save', 'torch.save', (['checkpoint', '"""checkpoint.pth"""'], {}), "(checkpoint, 'checkpoint.pth')\n", (6932, 6962), False, 'import torch\n'), ((6973, 7032), 'torch.save', 'torch.save', (['checkpoint', "(in_args.save_dir + 'checkpoint.pth')"], {}), "(checkpoint, in_args.save_dir + 'checkpoint.pth')\n", (6983, 7032), False, 'import torch\n'), ((1432, 1461), 'torchvision.transforms.RandomRotation', 'transforms.RandomRotation', (['(30)'], {}), '(30)\n', (1457, 1461), False, 'from torchvision import datasets, transforms, models\n'), ((1501, 1534), 'torchvision.transforms.RandomResizedCrop', 'transforms.RandomResizedCrop', (['(224)'], {}), '(224)\n', (1529, 1534), False, 'from torchvision import datasets, transforms, models\n'), ((1574, 1607), 'torchvision.transforms.RandomHorizontalFlip', 'transforms.RandomHorizontalFlip', ([], {}), '()\n', (1605, 1607), False, 'from torchvision import datasets, transforms, models\n'), ((1647, 1668), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1666, 1668), False, 'from torchvision import datasets, transforms, models\n'), ((1708, 1774), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['[0.485, 0.456, 0.406]', '[0.229, 0.224, 0.225]'], {}), '([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n', (1728, 1774), False, 'from torchvision import datasets, transforms, models\n'), ((1914, 1936), 'torchvision.transforms.Resize', 'transforms.Resize', (['(256)'], {}), '(256)\n', (1931, 1936), False, 'from torchvision import datasets, transforms, models\n'), ((1976, 2002), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['(224)'], {}), '(224)\n', (1997, 2002), False, 'from torchvision import datasets, transforms, models\n'), ((2042, 2063), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (2061, 2063), False, 'from torchvision import datasets, transforms, models\n'), ((2103, 2169), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['[0.485, 0.456, 0.406]', '[0.229, 0.224, 0.225]'], {}), '([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n', (2123, 2169), False, 'from torchvision import datasets, transforms, models\n'), ((2309, 2331), 'torchvision.transforms.Resize', 'transforms.Resize', (['(256)'], {}), '(256)\n', (2326, 2331), False, 'from torchvision import datasets, transforms, models\n'), ((2370, 2396), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['(224)'], {}), '(224)\n', (2391, 2396), False, 'from torchvision import datasets, transforms, models\n'), ((2435, 2456), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (2454, 2456), False, 'from torchvision import datasets, transforms, models\n'), ((2495, 2561), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['[0.485, 0.456, 0.406]', '[0.229, 0.224, 0.225]'], {}), '([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n', (2515, 2561), False, 'from torchvision import datasets, transforms, models\n'), ((3844, 3882), 'torch.nn.Linear', 'nn.Linear', (['(25088)', 'in_args.hidden_units'], {}), '(25088, in_args.hidden_units)\n', (3853, 3882), False, 'from torch import nn\n'), ((3920, 3929), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (3927, 3929), False, 'from torch import nn\n'), ((3967, 3982), 'torch.nn.Dropout', 'nn.Dropout', (['(0.6)'], {}), '(0.6)\n', (3977, 3982), False, 'from torch import nn\n'), ((4018, 4054), 'torch.nn.Linear', 'nn.Linear', (['in_args.hidden_units', '(102)'], {}), '(in_args.hidden_units, 102)\n', (4027, 4054), False, 'from torch import nn\n'), ((4094, 4114), 'torch.nn.LogSoftmax', 'nn.LogSoftmax', ([], {'dim': '(1)'}), '(dim=1)\n', (4107, 4114), False, 'from torch import nn\n'), ((4886, 4902), 'torch.autograd.Variable', 'Variable', (['images'], {}), '(images)\n', (4894, 4902), False, 'from torch.autograd import Variable\n'), ((4904, 4920), 'torch.autograd.Variable', 'Variable', (['labels'], {}), '(labels)\n', (4912, 4920), False, 'from torch.autograd import Variable\n'), ((5337, 5352), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5350, 5352), False, 'import torch\n'), ((5392, 5454), 'helper.validation', 'helper.validation', (['model', 'dataloaders[1]', 'criterion', 'gpu_usage'], {}), '(model, dataloaders[1], criterion, gpu_usage)\n', (5409, 5454), False, 'import helper\n')]
|
import utils
from traceback import format_exc, print_exc
from tempfile import gettempdir
from linebot import LineBotApi, WebhookParser
from linebot.exceptions import LineBotApiError, InvalidSignatureError
from linebot.models import MessageEvent, TextMessage as _TextMessage, TextSendMessage, TemplateSendMessage, TemplateAction, PostbackTemplateAction, MessageTemplateAction, URITemplateAction, ButtonsTemplate, PostbackEvent, CarouselTemplate, CarouselColumn, LeaveEvent, FollowEvent, UnfollowEvent, JoinEvent, ImageSendMessage, ImageMessage as _ImageMessage
from .api.line_0_10_0.ttypes import TalkException, MIDType, OpType, IdentityProvider, Message as _Message, ContentType
from requests.exceptions import (ReadTimeout, Timeout)
from .api.api import LineAPI, needAuthErrors
from .models.types import Receiver, MessageType, ChatroomType, EventType, UserStatus, UserRelation, WhenOALeave, CommandType, CommandResultType, CommandContinuousCallType
from .models.events import Event, Joined, Invited, Left, Followed, Unfollowed, Message, TextMessage, ImageMessage, Update, Button, Buttons, StickerMessage, LocationMessage, AudioMessage, VideoMessage, FileMessage, ContactMessage
from .models.chatrooms import User, Room, Group, StartRObj
from .models.database import Database
from .models.command import CommandResult, CommandStack
from .models.content import Image
from .utils import IsEmpty, emailRegex, Lock, Alphanumeric, EscapeNames, Acquire, InitAtExit, CreateBody, Clean
from .oaclient import OAClient
from .userclient import UserClient
from requests import get as RequestsGet, post as RequestsPost
from time import time, sleep
from socket import timeout
from threading import Thread, current_thread, Condition
from ssl import SSLError
from thrift.protocol.TProtocol import TProtocolException
from wsgiref.simple_server import make_server
from re import compile, DOTALL
from random import randint
import os
from pyimgur import Imgur
from .models.content import UploadPyImgur, Download, ScaleyUrl
from json import loads, dumps
from shutil import copyfileobj
from codecs import encode, decode
class Client(object):
def __init__(self, channelAccessToken = None, channelSecret = None, email = "", password = "", authToken = "", certificate = "", tries = 5, reauthDelay = 7200, adminIds = [], adminMids = [], dbUrl = None, handler = None, isMac = True, initAll = False, autoAcceptInvitation = True, oAAutoLeaveWhenUserLeave = True, whenOALeave = WhenOALeave.reinvite, oAMid = None, userId = None, userMid = None, comName = "USER", port=None, pyImgurKey = None, oAName="our OA bot", userName="our User bot", pingAddress = None, pingInterval = 180):
object.__init__(self)
InitAtExit()
self.port = port
self._1adminCommands = {}
self._1commands = {}
self._1continuousTextCommands = []
self._1continuousImageCommands = []
self._1continuousAudioCommands = []
self._1continuousVideoCommands = []
self._1continuousFileCommands = []
self._1continuousStickerCommands = []
self._1imageWaitingCommands = {}
self.lock = Lock()
self.handler = handler
self.oAClient = None
self.userClient = None
self.db = None
self.main = 0
self.hasCommands = False
self.hasAdminCommands = False
self.hasContinuousTextCommands = False
self.hasContinuousImageCommands = False
self.hasContinuousAudioCommands = False
self.hasContinuousVideoCommands = False
self.hasContinuousFileCommands = False
self.hasContinuousStickerCommands = False
self.groups = []
self.rooms = []
self.users = []
self._1objectsUserByMsgId = {}
self._1objectsOAByMsgId = {}
self._1senderObjectsUserByMsgId = {}
self._1senderObjectsOAByMsgId = {}
self._1objectsByLineId = {}
self._1objectsByLineMid = {}
self._1objectsByRId = {}
self._1objectsByUId = {}
self._1headers = {}
self._1waitingRoom = {}
self.getUserClientThread = None
self.oAObj = None
self.userObj = None
self._1code = None
self.eCond = Condition()
self.eventQueue = []
self.mLock = Lock()
self.messages = {}
self.pingAddress = pingAddress
self.pingInterval = pingInterval
self._1dbUrl = dbUrl
if not IsEmpty(dbUrl):
self.CreateDatabase(dbUrl)
if not isinstance(adminIds, list):
adminIds = []
if not isinstance(adminMids, list):
adminMids = []
self.oAName = oAName
self.userName = userName
self._1channelAccessToken = channelAccessToken
self._1channelSecret = channelSecret
self._1tries = int(self.TryDBVar("tries", tries))
#self._1email = self.TryDBVar("email", email)
#self._1password = self.TryDBVar("password", password)
self._1email = email
self._1password = password
self._1authToken = self.TryDBVar("authToken", authToken)
self._1certificate = self.TryDBVar("certificate", certificate)
self._1reauthDelay = int(self.TryDBVar("reauthDelay", reauthDelay))
self._1isMac = bool(self.TryDBVar("isMac", isMac))
self._1autoAcceptInvitation = bool(self.TryDBVar("autoAcceptInvitation", autoAcceptInvitation))
self._1whenOALeave = bool(self.TryDBVar("whenOALeave", whenOALeave))
self._1oAAutoLeaveWhenUserLeave = bool(self.TryDBVar("autoAcceptInvitation", oAAutoLeaveWhenUserLeave))
self._1oAMid = self.TryDBVar("oAMid", oAMid)
self._1userId = self.TryDBVar("userId", userId)
self._1userMid = self.TryDBVar("userMid", userMid)
self._1comName = self.TryDBVar("comName", comName)
self._1pyImgurKey = self.TryDBVar("pyImgurKey", pyImgurKey)
self.adminIds = adminIds
self.adminMids = adminMids
self.adminObjs = []
self.hasAdmins = len(adminIds) > 0 or len(adminMids) > 0
self.hasAdminCommands = False
self.inited = 5
self.code = None
self._1started = False
self.startCond = Condition()
self.Init(initAll)
@property
def started(self):
return self._1started
@started.setter
def started(self, value):
with self.startCond:
self._1started = value
if value:
print("STARTED")
self.startCond.notifyAll()
def GetObjByRId(self, rId):
if rId is None:
return
rId = int(rId)
if rId in self._1objectsByRId:
return self._1objectsByRId[rId]
if self.db is None:
return
f = None
with self.GetCursor() as cur:
cur.Execute("SELECT lineId, lineMid FROM ChatRooms WHERE id=%s", (rId,))
f = cur.FetchOne()
if f is None:
return
if f[1] and self.hasUser:
return self.userClient._1GetObjByLineMid(f[1])
if f[0] and self.hasOA:
return self.oAClient._1GetObjByLineId(f[0])
def GetObjByUId(self, uId):
if uId is None:
return
uId = int(uId)
if uId in self._1objectsByUId:
return self._1objectsByUId[uId]
if self.db is None or not self.hasUser:
return
f = None
with self.GetCursor() as cur:
cur.Execute("SELECT lineMid FROM Users WHERE id=%s", (uId,))
f = cur.FetchOne()
if f is None:
return
return self.userClient._1GetObjByLineMid(f[0])
@property
def oAAutoLeaveWhenUserLeave(self):
return self._1oAAutoLeaveWhenUserLeave
def SetTextVar(self, name, value):
if self.db is not None:
with self.GetCursor() as cur:
cur.SetTextVar(name, value)
@oAAutoLeaveWhenUserLeave.setter
def oAAutoLeaveWhenUserLeave(self, value):
self.SetTextVar("oAAutoLeaveWhenUserLeave", value)
self._1oAAutoLeaveWhenUserLeave = value
@property
def dbUrl(self):
return self._1dbUrl
@dbUrl.setter
def dbUrl(self, value):
if value != self._1dbUrl:
self.SetTextVar("dbUrl", value)
self._1dbUrl = value
if not IsEmpty(value):
self.CreateDatabase(value)
def TryDBVar(self, name, value, cur=None, commit=True):
if cur is None:
if self.db is None:
return value
cur = self.GetCursor()
commit = True
if IsEmpty(value):
return cur.GetTextVar(name)
else:
cur.SetTextVar(name, str(value), commit=commit)
return value
@property
def hasOA(self):
return self.oAClient is not None
@property
def hasUser(self):
return self.userClient is not None and self.userClient.ready
@property
def whenOALeave(self):
return self._1whenOALeave
@whenOALeave.setter
def whenOALeave(self, value):
self.SetTextVar("whenOALeave", value)
self._1whenOALeave = value
def GetPyImgurClient(self):
if self.pyImgurKey is None:
return
return Imgur(self.pyImgurKey)
def UploadPyImgur(self, bytes):
if self.pyImgurKey is None:
return
return UploadPyImgur(self.GetPyImgurClient(), bytes)
@property
def pyImgurKey(self):
return self._1pyImgurKey
@pyImgurKey.setter
def pyImgurKey(self, value):
self.SetTextVar("pyImgurKey", value)
self._1pyImgurKey = value
@property
def comName(self):
return self._1comName
@comName.setter
def comName(self, value):
self.SetTextVar("comName", value)
self._1comName = value
@property
def isMac(self):
return self._1isMac
@isMac.setter
def isMac(self, value):
self.SetTextVar("isMac", value)
self._1isMac = value
@property
def autoAcceptInvitation(self):
return self._1autoAcceptInvitation
@autoAcceptInvitation.setter
def autoAcceptInvitation(self, value):
self.SetTextVar("autoAcceptInvitation", value)
self._1autoAcceptInvitation = value
@property
def reauthDelay(self):
return self._1reauthDelay
@reauthDelay.setter
def reauthDelay(self, value):
self.SetTextVar("reauthDelay", value)
self._1reauthDelay = value
@property
def certificate(self):
return self._1certificate
@certificate.setter
def certificate(self, value):
print("TRYING TO SET CERTIFICATE WITH " + str(value))
self.SetTextVar("certificate", value)
self._1certificate = value
@property
def authToken(self):
return self._1authToken
@authToken.setter
def authToken(self, value):
print("TRYING TO SET AUTHTOKEN WITH " + str(value))
self.SetTextVar("authToken", value)
self._1authToken = value
@property
def password(self):
return self._1password
@password.setter
def password(self, value):
self.SetTextVar("password", value)
self._1password = value
@property
def email(self):
return self._1email
@email.setter
def email(self, value):
self.SetTextVar("email", value)
self._1email = value
@property
def oAMid(self):
return self._1oAMid
@oAMid.setter
def oAMid(self, value):
self.SetTextVar("oAMid", value)
self._1oAMid = value
@property
def userId(self):
return self._1userId
@userId.setter
def userId(self, value):
self.SetTextVar("userId", value)
self._1userId = value
@property
def userMid(self):
return self._1userMid
@userMid.setter
def userMid(self, value):
self.SetTextVar("userMid", value)
self._1userMid = value
@property
def tries(self):
return self._1tries
@tries.setter
def tries(self, value):
self.SetTextVar("tries", value)
self._1tries = value
@property
def channelSecret(self):
return self._1channelSecret
@channelSecret.setter
def channelSecret(self, value):
self.SetTextVar("channelSecret", value)
self._1channelSecret = value
@property
def channelAccessToken(self):
return self._1channelAccessToken
@channelAccessToken.setter
def channelAccessToken(self, value):
self.SetTextVar("channelAccessToken", value)
self._1channelAccessToken = value
def Init(self, initAll = False):
with Acquire(self.lock):
if self.db is None and self.dbUrl is not None:
self.CreateDatabase(self.dbUrl)
if self.oAClient is None and not IsEmpty(self.channelAccessToken) and not IsEmpty(self.channelSecret):
self.oAClient = OAClient(self)
canGetUserClient = self.userClient is None and (self.getUserClientThread is None or not self.getUserClientThread.isAlive()) and ((not IsEmpty(self.authToken) and not IsEmpty(self.certificate)) or (not IsEmpty(self.email) and not IsEmpty(self.password)))
print("INITB1")
if self.oAClient is None and canGetUserClient:
#self.getUserClientThread = Thread(target=self.ThreadExceptionCatcher, args=[self.GetUserClient, [self.email, self.password, self.authToken, self.certificate, self.tries, self.reauthDelay, self.adminMids]])
#self.getUserClientThread.start()
self.GetUserClient()
print("INITB2")
if canGetUserClient:
if self.oAClient is not None:
self.oAClient.Start(thread=True, port=self.port)
with Acquire(self.lock):
self.GetUserClient()
print("INITA1")
if self.oAClient is not None:
with Acquire(self.oAClient.lock):
for id in self.adminIds:
o = self.oAClient._1GetObjByLineId(id)
self.adminObjs.append(o)
self.oAClient.adminObjs.append(o)
print("INITA2")
if self.userClient is not None:
with Acquire(self.userClient.lock):
for mid in self.adminMids:
o = self.userClient._1GetObjByLineMid(mid)
self.adminObjs.append(o)
self.userClient.adminObjs.append(o)
if self.oAClient is None or self.userClient is None:
self.inited = 5
elif self.oAClient.mid is None:
self.inited = 0
else:
self.oAClient.obj = self.userClient._1GetObjByLineMid(self.oAClient.mid)
if self.userClient.id is None:
self.inited = 2
else:
self.userClient.obj = self.oAClient._1GetObjByLineId(self.userClient.id)
if self.userClient.mid is None:
self.inited = 4
else:
self.inited = 5
if initAll:
self.Thread(self.RefreshAll)
#self.RefreshAll()
elif self.userClient is not None:
if self.autoAcceptInvitation:
self.Thread(self.userClient._1AcceptAllGroupInvitations)
else:
self.Thread(self.userClient._1GetAllGroupsInvited)
#self.userClient._1GetAllGroupsInvited()
StartRObj(self)
print("INIT DONE")
@property
def cursor(self):
return self.GetCursor()
def GetCursor(self):
if True:#with self.lock:
return self.db.GetCursor()
def GetUserClient(self):
uc = UserClient(self)
if True:#with self.lock:
self.userClient = uc
self.getUserClientThread = None
self.authToken = self.userClient.authToken
self.certificate = self.userClient.certificate
def HandleWebhook(self, environ, start_response):
if self.hasOA:
return self.oAClient.HandleWebhook(environ, start_response)
start_response('503 Service Unavailable', [])
return CreateBody('Service Unavailable')
def Thread(self, method, args=[], kwargs = {}, start = True):
t = Thread(target=self.ThreadExceptionCatcher, args=[method, args, kwargs])
if start:
t.start()
return t
def ThreadExceptionCatcher(self, method, args=[], kwargs={}):
try:
return (True, method(*args, **kwargs))
except TalkException as e:
print_exc()
self.Report("[Client.ThreadExceptionCatcher]\n" + str(e))
if e.code in needAuthErrors:
self.userClient._1UpdateAuthToken()
except Exception:
self.Report("[Client.ThreadExceptionCatcher]\n" + format_exc())
return (False, None)
_1init1Regex = compile("\[Init1\] ([^ ]+)")
_1init3Regex = compile("\[Init3\] ([^ ]+)")
def GetObjByLineId(self, id):
if self.hasOA:
return self.oAClient._1GetObjByLineId(id)
def GetObjByLineMid(self, mid):
if self.hasUser:
return self.userClient._1GetObjByLineMid(mid)
def RefreshAll(self):
if self.hasUser:
return self.userClient._1RefreshAll()
return False
def Init2(self, event):
if self.inited == 0 and event.chatroom.chatroomType != ChatroomType.user and event.chatroom.hasUser and event.chatroom.hasOA:
self._1code = str(randint(1000, 9999))
self.inited = 1
self._1expectedChatroomMid = event.chatroom.mid
self.oAClient._1SendText(event.chatroom, "[Init1] " + self._1code + " \nPlease ignore this message.")
if self.inited == 1 and event.chatroom.mid == self._1expectedChatroomMid:
if event.eventType == EventType.message and event.messageType == MessageType.text:
mI1 = self._1init1Regex.match(event.text)
if mI1 is not None and mI1.group(1) == self._1code:
self.oAClient.mid = event.sender.mid
self.oAClient.obj = self.userClient._1GetObjByLineMid(self.oAClient.mid)
if self.userClient.id is None:
self.inited = 2
elif self.userClient.mid is None:
self.inited = 4
else:
self.inited = 5
if self.db is not None:
with self.GetCursor() as cur:
cur.SetTextVar("oAMid", self.oAClient.mid)
if self.inited == 2 and self.hasOA:
while True:
code = str(randint(1000, 9999))
if code != self._1code:
self._1code = code
break
self.inited = 3
s = "[Init3] " + self._1code
print("INIT3")
if self.oAClient.obj is None:
self.oAClient.obj = self.userClient._1GetObjByLineMid(self.oAClient.mid)
self.userClient._2SendText(self.oAClient.mid, s)
else:
self.userClient._1SendText(self.oAClient.obj, s)
if self.inited == 3 and event.chatroom.chatroomType == ChatroomType.user:
print("INIT4a")
if event.eventType == EventType.message and event.messageType == MessageType.text:
print("INIT4b")
mI3 = self._1init3Regex.match(event.text)
if mI3 is not None and mI3.group(1) == self._1code:
print("INIT4c")
self.userClient.id = event.sender.id
self.userClient.obj = self.oAClient._1GetObjByLineId(self.userClient.id)
if self.userClient.mid is None:
self.inited = 4
else:
self.inited = 5
if self.db is not None:
print("INIT4d")
with self.GetCursor() as cur:
cur.SetTextVar("userId", self.userClient.id)
if self.inited == 4 and event.chatroom.chatroomType == ChatroomType.group and event.eventType == EventType.message:
#event.ReplyText("[Init4] Please kick me out and reinvite me later.")
self.userClient._1SendText(event.chatroom, "[Init4] Please kick me out and reinvite me later.")
def AddContinuousTextCommand(self, cmd):
with Acquire(self.lock):
self._1continuousTextCommands.append(cmd)
self.hasContinuousTextCommands = True
def HandleContinuousTextCommands(self, event):
if self.hasContinuousTextCommands:
args = {'continuous':CommandContinuousCallType.text, 'text':event.text, 'message':event, 'options':''}
args = {'continuous':CommandContinuousCallType.text, 'text':event.text, 'message':event, 'options':''}
ret = CommandResult.Failed()
for cmd in self._1continuousTextCommands:
ret = cmd.Call(msg=event, args=args)
if ret.type == CommandResultType.done:
self.CommandDone(event.chatroom, ret, event=event)
return True
return False
def AddContinuousImageCommand(self, cmd):
with Acquire(self.lock):
self._1continuousImageCommands.append(cmd)
self.hasContinuousImageCommands = True
def AddContinuousAudioCommand(self, cmd):
with Acquire(self.lock):
self._1continuousAudioCommands.append(cmd)
self.hasContinuousAudioCommands = True
def AddContinuousVideoCommand(self, cmd):
with Acquire(self.lock):
self._1continuousVideoCommands.append(cmd)
self.hasContinuousVideoCommands = True
def AddContinuousFileCommand(self, cmd):
with Acquire(self.lock):
self._1continuousFileCommands.append(cmd)
self.hasContinuousFileCommands = True
def AddContinuousStickerCommand(self, cmd):
with Acquire(self.lock):
self._1continuousStickerCommands.append(cmd)
self.hasContinuousStickerCommands = True
def HandleContinuousImageCommands(self, event):
if self.hasContinuousImageCommands:
args = {'continuous':CommandContinuousCallType.image, 'images':[event.image], 'message':event, 'options':''}
ret = CommandResult.Failed()
for cmd in self._1continuousImageCommands:
ret = cmd.Call(msg=event, args=args)
if ret.type == CommandResultType.done:
self.CommandDone(event.chatroom, ret, event=event)
return True
return False
def HandleContinuousAudioCommands(self, event):
if self.hasContinuousAudioCommands:
args = {'continuous':CommandContinuousCallType.audio, 'audios':[event.audio], 'message':event, 'options':''}
ret = CommandResult.Failed()
for cmd in self._1continuousAudioCommands:
ret = cmd.Call(msg=event, args=args)
if ret.type == CommandResultType.done:
self.CommandDone(event.chatroom, ret, event=event)
return True
return False
def HandleContinuousFileCommands(self, event):
if self.hasContinuousFileCommands:
args = {'continuous':CommandContinuousCallType.file, 'files':[event.file], 'message':event, 'options':''}
ret = CommandResult.Failed()
for cmd in self._1continuousFileCommands:
ret = cmd.Call(msg=event, args=args)
if ret.type == CommandResultType.done:
self.CommandDone(event.chatroom, ret, event=event)
return True
return False
def HandleContinuousVideoCommands(self, event):
if self.hasContinuousVideoCommands:
args = {'continuous':CommandContinuousCallType.video, 'videos':[event.video], 'message':event, 'options':''}
ret = CommandResult.Failed()
for cmd in self._1continuousVideoCommands:
ret = cmd.Call(msg=event, args=args)
if ret.type == CommandResultType.done:
self.CommandDone(event.chatroom, ret, event=event)
return True
return False
def HandleContinuousStickerCommands(self, event):
if self.hasContinuousStickerCommands:
args = {'continuous':CommandContinuousCallType.sticker, 'message':event, 'options':''}
ret = CommandResult.Failed()
for cmd in self._1continuousStickerCommands:
ret = cmd.Call(msg=event, args=args)
if ret.type == CommandResultType.done:
self.CommandDone(event.chatroom, ret, event=event)
return True
return False
def HandleCommands(self, event, continuous = True):
if event.eventType == EventType.message:
if event.messageType == MessageType.content:
if event.contentType == ContentType.IMAGE:
if not self.HandleImageCommands(event) and continuous:
return self.HandleContinuousImageCommands(event)
elif event.contentType == ContentType.AUDIO:
return self.HandleContinuousAudioCommands(event)
elif event.contentType == ContentType.VIDEO:
return self.HandleContinuousVideoCommands(event)
elif event.contentType == ContentType.FILE:
return self.HandleContinuousFileCommands(event)
elif event.contentType == ContentType.STICKER:
return self.HandleContinuousStickerCommands(event)
elif event.messageType == MessageType.text:
if event.text == '/commandlist' or event.text == '/commands':
event.ReplyText(self.commandList)
return True
if Alphanumeric(event.text[:8])[0] == '/':
if self.HandleTextCommands(event):
return True
if continuous:
return self.HandleContinuousTextCommands(event)
return False
def HandleImageCommands(self, event):
chatroom = event.chatroom
if chatroom not in self._1imageWaitingCommands:
#print("chatroom not in _1imageWaitingCommands")
return False
sender = event.sender
k = sender
iWCchatroom = self._1imageWaitingCommands[chatroom]
if sender in iWCchatroom:
stack = iWCchatroom[sender]
elif None in iWCchatroom:
stack = iWCchatroom[None]
k = None
else:
#print("sender not in _1imageWaitingCommands")
return False
try:
ret = stack.CallImage(event)
chatroom = event.chatroom
sender = event.sender
if ret.type != CommandResultType.expectImage:
del self._1imageWaitingCommands[chatroom][k]
if ret.type == CommandResultType.done:
self.CommandDone(chatroom, ret, event=event)
return True
except Exception as e:
if len(stack.stack) > 0:
x = "(" + stack.stack[-1].cmd.name + ")"
else:
x = ""
s = "[HandleImageCommands" + x + "] Unhandled Exception\n"
self.Report(s+format_exc())
event.ReplyText(s+str(e))
return False
_1commandRegex = compile("^([^ ]+)( ?)")
@property
def commandList(self):
if self.hasCommands:
l = sorted([x for x in self._1commands.items()])
s = 'Commands :'
for k, v in l:
si = "\n%s" % k
if k != v.name:
si = si + (" (%s)" % v.name)
s = s + si
return s
return 'No command registered'
@property
def adminCommandList(self):
if self.hasAdminCommands:
l = sorted([x for x in self._1adminCommands.items()])
s = 'Admin commands :'
for k, v in l:
si = "\n%s" % k
if k != v.name:
si = si + (" (%s)" % v.name)
s = s + si
return s
return 'No admin command registered'
def HandleTextCommands(self, event):
alphanumericed = False
name, x, text = event.text.partition(' ')
name = name.strip()[1:]
if name != 'admin' and name not in self._1commands:
name, x, text = Alphanumeric(event.text).partition(' ')
alphanumericed = True
name = name.strip()[1:]
cmd = None
args = {'admin':False}
if name == 'admin':
if self.hasAdmins and self.hasAdminCommands and event.sender is not None and (event.sender in self.adminObjs or ((not IsEmpty(event.sender.id)) and event.sender.id in self.adminIds) or ((not IsEmpty(event.sender.mid)) and event.sender.mid in self.adminMids)):
args['admin'] = True
name = None
name, x, text2 = event.text.partition(' ')
name = name.strip()
if name in self._1adminCommands:
text = text2
elif not alphanumericed:
name, x, text = Alphanumeric(text.strip()).partition(' ')
name = name.strip()
alphanumericed = True
if name in self._1adminCommands:
cmd = self._1adminCommands[name]
else:
name = name.lower()
if name in self._1adminCommands:
cmd = self._1adminCommands[name]
elif self.hasCommands:
cmd = None
if name in self._1commands:
cmd = self._1commands[name]
else:
name = name.lower()
if name in self._1commands:
cmd = self._1commands[name]
else:
#print("COMMAND %s NOT FOUND" % name)
pass
if cmd is not None:
if cmd.escapeNames:
text = EscapeNames(event.chatroom, event.sender, text)
if cmd.Desc(event, text).type == CommandResultType.description:
return True
else:
try:
stack = CommandStack(msg=event, baseArgs=args)
stack.Add(cmd=cmd, msg=event, args=cmd.ParseArgs(msg=event, text=text, stack=stack))
ret = stack.Call(event)
chatroom = event.chatroom
sender = event.sender
if ret.type == CommandResultType.expectImage:
if chatroom not in self._1imageWaitingCommands:
self._1imageWaitingCommands[chatroom] = {}
self._1imageWaitingCommands[chatroom][sender] = stack
else:
if chatroom in self._1imageWaitingCommands and sender in self._1imageWaitingCommands[chatroom]:
del self._1imageWaitingCommands[chatroom][sender]
if ret.type == CommandResultType.done:
self.CommandDone(event.chatroom, ret, event=event)
return True
except Exception as e:
s = "[HandleTextCommands(" + name + ")] Unhandled Exception\n"
self.Report(s+format_exc())
event.ReplyText(s+str(e))
return False
def CommandDone(self, chatroom, ret, event=None):
texts = ret.texts
if isinstance(texts, str):
chatroom.SendText(EscapeNames(chatroom, event.sender, texts), event=event)
elif isinstance(texts, list):
for x in texts:
chatroom.SendText(EscapeNames(chatroom, event.sender, x), event=event)
images = ret.images
if isinstance(images, ImageMessage):
chatroom.SendImage(images.image)
elif isinstance(images, Image):
chatroom.SendImage(images)
elif isinstance(images, list):
for x in images:
chatroom.SendImage(x)
def GetGroupName(self, group):
if group.hasUser:
return self.userClient._1GetGroupName(group)
def GetMembers(self, room):
if not room.hasUser:
return room._1members
if room.chatroomType == ChatroomType.room:
return self.userClient._1GetRoomMembers(room)
if room.chatroomType == ChatroomType.group:
return self.userClient._1GetGroupMembers(room)
raise Exception("[Client.GetMembers] 'room' is a User")
def GetGroupInvitees(self, group):
if group.chatroomType != ChatroomType.group:
raise Exception("[Client.GetInvitees] 'group' is not a Group")
return self.userClient._1GetGroupInvitees(group)
def GetProfile(self, user):
if isinstance(user, list):
return [self.GetProfile(x) for x in user]
if user.chatroomType != ChatroomType.user:
raise Exception("[Client.GetProfile] 'user' is not User")
if user.hasOA:
try:
self.oAClient._1GetProfile(user)
except Exception as e:
print_exc()
raise
if user.hasUser:
try:
self.userClient._1GetProfile(user)
except Exception as e:
print_exc()
raise
return user._1profile
def Handle(self, event, thread = True):
if self.inited < 5:
self.Init2(event)
if self.hasCommands or self.hasAdminCommands:
if self.HandleCommands(event):
return
if self.handler is not None:
if thread:
self.Thread(self.handler, [event])
else:
self.handler(event)
if self.oAAutoLeaveWhenUserLeave and event.eventType == EventType.left and event.receiver == Receiver.user:
if thread:
Timer(5, target=self.Thread, args=[event.chatroom.Leave, []]).start()
else:
event.chatroom.Leave()
def HandleMany(self, events, thread=True):
for e in events:
self.Thread(self.Handle, [event, thread])
def CreateDatabase(self, url):
self.db = Database(self, url)
with self.GetCursor() as cur:
cur.Execute('CREATE TABLE IF NOT EXISTS TextVars(name TEXT UNIQUE, value TEXT)')
cur.Execute('CREATE TABLE IF NOT EXISTS ChatRooms(id SERIAL PRIMARY KEY, lineId TEXT UNIQUE, lineMid TEXT UNIQUE, type INTEGER, hasOA BOOLEAN DEFAULT FALSE, hasUser BOOLEAN DEFAULT FALSE, uId INTEGER)')
cur.Execute('CREATE TABLE IF NOT EXISTS Users(id SERIAL PRIMARY KEY, lineId TEXT UNIQUE, lineMid TEXT UNIQUE, rId INTEGER, dummy BOOLEAN)')
cur.Execute('CREATE TABLE IF NOT EXISTS LineIdByMsgId(msgId TEXT UNIQUE, lineId TEXT)')
cur.Execute('CREATE TABLE IF NOT EXISTS LineMidByMsgId(msgId TEXT UNIQUE, lineMid TEXT)')
cur.Execute('CREATE TABLE IF NOT EXISTS SenderLineIdByMsgId(msgId TEXT UNIQUE, lineId TEXT)')
cur.Execute('CREATE TABLE IF NOT EXISTS SenderLineMidByMsgId(msgId TEXT UNIQUE, lineMid TEXT)')
cur.SetTextVar('dbUrl', url)
cur.Commit()
return self.db
def SendText(self, to, text, event=None):
if type(text) is list:
ret = True
for t in text:
ret = ret and self.SendText(to, t)
return ret
if not isinstance(to, list):
sender = None
msgId = None
receiver = Receiver.none
if event is not None:
receiver = event.receiver
if event.eventType == EventType.message:
msgId = event.id
sender = event.sender
elif to.hasOA:
sender = self.oAClient.obj
elif to.hasUser:
sender = self.userClient.obj
msg = self._1TextMessage(msgId, text, to, receiver, sender=sender)
if self.HandleCommands(msg, continuous = False):
return
return self._1SendText(to, text)
def _1SendText(self, to, text):
text = Clean(text)
if isinstance(to, list):
toa = []
tob = []
for t in to:
if t.hasOA:
toa.append(t)
elif t.hasUser:
tob.append(t)
ret = True
e = ''
if len(toa) > 0:
try:
ret = ret and self.oAClient._1SendText(toa, text)
except Exception as ex:
for t in toa:
if t.hasUser:
tob.append(t)
print_exc()
if len(tob) < len(to):
e = "Message '" + text + "' not sent to " + str([x for x in to if x not in tob]) + "\n"
e = e + "[Client._1SendText:list:OA]\n" + format_exc()
try:
ret = ret and self.userClient._1SendText(tob, text)
except Exception as ex:
print_exc()
e = e + '\n[Client._1SendText:list:User]\n' + format_exc()
if IsEmpty(e):
return ret
else:
raise Exception(e)
e = ''
if to.hasOA:
try:
return self.oAClient._1SendText(to, text)
except Exception as ex:
e = e + ("[Client._1SendText:single:OA]\nto=%s\nhasOA=%s\n_1hasOA=%s\n_2hasOA=%s\nid=%s\n" % (str(to), str(to.hasOA), str(to._1hasOA), str(to._2hasOA), str(to.id))) + format_exc()
if to.hasUser:
try:
ret = self.userClient._1SendText(to, text)
if not IsEmpty(e):
self.Report(e)
return ret
except Exception as ex:
e = e + '\n[Client._1SendText:single:User]\n' + format_exc()
if IsEmpty(e):
raise Exception("UNKNOWN ERROR \nto=%s\nhasOA=\n%s\nhasUser=%s\nlineId=%s\nlineMid=%s\nclientHasOA=%s\nclientHasUser=%s\n_1hasUser=%s\n_2hasUser=%s\n_1hasOA=%s\n_2hasOA=%s" % (str(to), str(to.hasOA), str(to.hasUser), str(to.id), str(to.mid), str(self.hasOA), str(self.hasUser), str(to._1hasUser), str(to._2hasUser), str(to._1hasOA), str(to._2hasOA)))
raise Exception(e)
def SendTextOA(self, to, text, event=None):
text = text.decode('utf-8', 'ignore').encode('utf-8')
if isinstance(text, list):
ret = True
for x in text:
ret = ret and self.SendTextOA(to, text, event)
return ret
return self.oAClient._1SendText(to, text)
def SendTextUser(self, to, text, event=None):
text = text.decode('utf-8', 'ignore').encode('utf-8')
if isinstance(text, list):
ret = True
for x in text:
ret = ret and self.SendTextUser(to, text, event)
return ret
return self.userClient._1SendText(to, text)
def AddCommand(self, cmd, name=None):
if not name:
name = cmd.name
self._1commands[name] = cmd
self.hasCommands = True
if cmd.initFunction is not None:
cmd.initFunction(client=self)
def AddAdminCommand(self, cmd, name=None):
if not self.hasAdmins:
raise Exception("[Client.AddAdminCommand] This client doesn't have any admins specified")
if not name:
name = cmd.name
self._1adminCommands[name] = cmd
self.hasAdminCommands = True
if cmd.initFunction is not None:
cmd.initFunction(client=self)
def SendImageWithBytes(self, to, bytes):
if type(bytes) is list:
ret = True
for b in b:
ret = ret and self.SendImageWithBytes(to, b)
return ret
return self._1SendImageWithBytes(to, bytes)
def _1SendImageWithBytes(self, to, bytes):
if isinstance(to, list):
toa = []
tob = []
for t in to:
if t.hasUser:
toa.append(t)
elif t.hasOA:
tob.append(t)
ret = True
e = ''
if len(toa) > 0:
try:
ret = ret and self.userClient._1SendImageWithBytes(toa, bytes)
except Exception as ex:
for t in toa:
if t.hasUser:
tob.append(t)
if len(tob) < len(to):
e = "Image '" + url + "' not sent to " + str([x for x in to if x not in tob]) + "\n"
e = e + "[Client._1SendImage:list:User]\n" + format_exc()
if len(tob) > 0:
if self.pyImgurKey is None:
raise Exception("[Client._1SendImage:list:OA:1] Must have 'pyImgurKey' set")
try:
ret = ret and self.oAClient._1SendImageWithBytes(tob, bytes)
except Exception as ex:
e = e + '\n[Client._1SendImage:list:OA]\n' + format_exc()
if IsEmpty(e):
return ret
else:
raise Exception(e)
e = ''
if to.hasUser:
try:
return self.userClient._1SendImageWithBytes(to, bytes)
except Exception as ex:
s = format_exc()
#print(s)
e = e + '\n[Client._1SendImage:single:User]\n' + s
if to.hasOA:
if self.pyImgurKey is None:
s = "[Client._1SendImage:single:OA:1] Must have 'pyImgurKey' set"
if IsEmpty(e):
raise Exception(s)
else:
e = e + s
try:
ret = self.oAClient._1SendImageWithBytes(to, bytes)
if not IsEmpty(e):
self.Report(e)
return ret
except Exception as ex:
e = e + "[Client._1SendImage:single:OA]\n" + format_exc()
raise Exception(e)
def SendImage(self, to, image):
if isinstance(image, ImageMessage):
image = image.images
if isinstance(image, list):
ret = True
for x in image:
ret = ret and self.SendImage(x.bytes)
return ret
return self.SendImageWithBytes(to, image.bytes)
def SendImageWithUrl(self, to, url):
if type(url) is list:
ret = True
for u in url:
ret = ret and self.SendImageWithUrl(to, u)
return ret
return self._1SendImageWithUrl(to, url)
def _1SendImageWithUrl(self, to, url):
if isinstance(to, list):
toa = []
tob = []
for t in to:
if t.hasUser:
toa.append(t)
elif t.hasOA:
tob.append(t)
ret = True
e = ''
if len(toa) > 0:
try:
ret = ret and self.userClient._1SendImageWithUrl(toa, url)
except Exception as ex:
for t in toa:
if t.hasUser:
tob.append(t)
if len(tob) < len(to):
e = "Image '" + url + "' not sent to " + str([x for x in to if x not in tob]) + "\n"
e = e + "[Client._1SendImageWithUrl:list:User]\n" + format_exc()
if len(tob) > 0:
try:
ret = ret and self.oAClient._1SendImageWithUrl(tob, url)
except Exception as ex:
e = e + '\n[Client._1SendImageWithUrl:list:OA]\n' + format_exc()
if IsEmpty(e):
return ret
else:
raise Exception(e)
e = ''
if to.hasUser:
try:
return self.userClient._1SendImageWithUrl(to, url)
except Exception as ex:
e = e + '\n[Client._1SendImageWithUrl:single:User]\n' + format_exc()
if to.hasOA:
try:
ret = self.oAClient._1SendImageWithUrl(to, url)
if not IsEmpty(e):
self.Report(e)
return ret
except Exception as ex:
e = e + "[Client._1SendImageWithUrl:single:OA]\n" + format_exc()
raise Exception(e)
def SendButtons(self, to, buttons):
if isinstance(to, list):
for t in to:
if not t.hasOA:
if isinstance(t, Room) and t.uId:
t.InitRole(True)
raise Exception("[Client.SendButtons:list] No OAClient")
elif not to.hasOA:
if isinstance(to, Room) and tp.uId:
to.InitRole(True)
raise Exception("[Client.SendButtons:single] No OAClient")
return self.oAClient._1SendButtons(to, buttons)
def Report(self, msg = None):
if msg is None:
msg = format_exc()
msg = "[Report] " + msg
print(msg)
ex = ''
if len(self.adminObjs) > 0:
try:
return self.adminObjs[0].SendText(msg)
except Exception as e:
ex = ex + "\n[Client.Report.Obj]\n" + format_exc()
if self.hasOA and len(self.adminIds) > 0:
try:
return self.oAClient._1Report(msg)
except Exception as e:
ex = ex + "\n[Client.Report.OA]\n" + format_exc()
if self.hasUser and len(self.adminMids) > 0:
try:
return self.userClient._1Report(msg)
except Exception as e:
ex = ex + "\n[Client.Report.User]\n" + format_exc()
raise Exception(ex)
def AddUser(self, user):
if isinstance(user, list):
return self.AddUsers(user)
if not user.hasUser:
return False
if user._1profile.status == UserStatuses.friend:
return True
return self.userClient._1AddUser(user)
def AddUsers(self, users):
users = [x for x in users if x._1profile.status == UserStatuses.friend]
return self.userClient._1AddUsers(users)
def CreateGroup(self, name="No Name", users=[]):
if not isinstance(users, list):
users = [users]
if len(users) == 0:
return False
noUsers = [x for x in users if not x.hasUser]
l = len(noUsers)
if l > 0:
if l == len(users):
return False
raise Exception("[Client.CreateGroup] hasUser is False on some of the users\n" + str(noUsers))
return self.userClient._1CreateGroup(name, users)
def CreateRoom(self, users):
if not isinstance(users, list):
users = [users]
noUsers = [x for x in users if not x.hasUser]
l = len(noUsers)
if l > 0:
if l == len(users):
return False
raise Exception("[Client.CreateRoom] hasUser is False on some of the users\n" + str(noUsers))
return self.userClient._1CreateRoom(users)
def InviteInto(self, room, users):
if not isinstance(users, list):
users = [users]
noUsers = [x for x in users if not x.hasUser]
l = len(noUsers)
if l > 0:
if l == len(users):
return False
raise Exception("[Client.InviteInto] hasUser is False on some of the users\n" + str(noUsers))
return self.userClient._1InviteInto(room, users)
def Leave(self, room):
if room.chatroomType == ChatroomType.user:
raise Exception("[Client.Leave] 'room' is a User")
ret = True
if room.hasUser:
ret = ret and self.userClient._1Leave(room)
if room.hasOA:
ret = ret and self.oAClient._1Leave(room)
return ret
def JoinGroup(self, group):
if not group.hasUser:
return False
return self.userClient._1JoinGroup(group)
def KickFromGroup(self, group, users):
if not isinstance(users, list):
users = [users]
if not group.hasUser:
return False
noUsers = [x for x in users if not x.hasUser]
l = len(noUsers)
if l > 0:
if l == len(users):
return False
raise Exception("[Client.KickFromGroup] hasUser is False on some of the users\n" + str(noUsers))
return self.userClient._1KickFromGroup(group, users)
def ReportAll(self, msg):
if msg is None:
msg = format_exc()
msg = "[Report] " + msg
print(msg)
ex = ''
if len(self.adminObjs) > 0:
try:
return self.SendText(self.adminObjs, msg)
except Exception as e:
s = "[Client.ReportAll.Obj]\n" + format_exc()
print(s)
ex = ex + "\n" + s
if self.hasOA:
try:
return self.oAClient._1ReportAll(msg)
except Exception as e:
s = "[Client.ReportAll.OA]\n" + format_exc()
print(s)
ex = ex + "\n" + s
if self.hasUser:
try:
return self.userClient._1ReportAll(msg)
except Exception as e:
s = "[Client.ReportAll.User]\n" + format_exc()
print(s)
ex = ex + "\n" + s
raise Exception(ex)
def _1Update(self, chatroom, receiver):
if receiver == Receiver.oA:
chatroom.hasOA = True
elif receiver == Receiver.user:
chatroom.hasUser = True
return Update(self, chatroom, receiver)
def _1TextMessage(self, id, text, chatroom, receiver, sender = None):
if receiver == Receiver.oA:
chatroom.hasOA = True
elif receiver == Receiver.user:
chatroom.hasUser = True
return TextMessage(self, id, text, chatroom, receiver, sender=sender)
def _1StickerMessage(self, id, packageId, stickerId, chatroom, receiver, sender = None):
if receiver == Receiver.oA:
chatroom.hasOA = True
elif receiver == Receiver.user:
chatroom.hasUser = True
return StickerMessage(self, id, packageId, stickerId, chatroom, receiver, sender=sender)
def _1ContactMessage(self, id, displayName, mid, chatroom, receiver, sender = None):
if receiver == Receiver.oA:
chatroom.hasOA = True
elif receiver == Receiver.user:
chatroom.hasUser = True
return ContactMessage(self, id, displayName, mid, chatroom, receiver, sender=sender)
def _1LocationMessage(self, id, title, address, latitude, longitude, chatroom, receiver, sender = None):
if receiver == Receiver.oA:
chatroom.hasOA = True
elif receiver == Receiver.user:
chatroom.hasUser = True
return LocationMessage(self, id, title, address, latitude, longitude, chatroom, receiver, sender=sender)
def _1ImageMessage(self, id, chatroom, receiver, sender = None, url=None, bytes=None):
if receiver == Receiver.oA:
chatroom.hasOA = True
elif receiver == Receiver.user:
chatroom.hasUser = True
return ImageMessage(self, id, chatroom, receiver, sender=sender, url=url, bytes=bytes)
def _1AudioMessage(self, id, chatroom, receiver, sender = None, url=None, bytes=None):
if receiver == Receiver.oA:
chatroom.hasOA = True
elif receiver == Receiver.user:
chatroom.hasUser = True
return AudioMessage(self, id, chatroom, receiver, sender=sender, url=url, bytes=bytes)
def _1VideoMessage(self, id, chatroom, receiver, sender = None, url=None, bytes=None):
if receiver == Receiver.oA:
chatroom.hasOA = True
elif receiver == Receiver.user:
chatroom.hasUser = True
return VideoMessage(self, id, chatroom, receiver, sender=sender, url=url, bytes=bytes)
def _1FileMessage(self, id, chatroom, receiver, sender = None, url=None, bytes=None):
if receiver == Receiver.oA:
chatroom.hasOA = True
elif receiver == Receiver.user:
chatroom.hasUser = True
return FileMessage(self, id, chatroom, receiver, sender=sender, url=url, bytes=bytes)
def _1Unfollowed(self, chatroom, receiver):
if receiver == Receiver.oA:
chatroom.hasOA = False
elif receiver == Receiver.user:
chatroom.hasUser = False
return Unfollowed(self, chatroom, receiver)
def _1Followed(self, chatroom, receiver):
if receiver == Receiver.oA:
chatroom.hasOA = True
elif receiver == Receiver.user:
chatroom.hasUser = True
return Followed(self, chatroom, receiver)
def _1Joined(self, chatroom, receiver):
if receiver == Receiver.oA:
chatroom.hasOA = True
elif receiver == Receiver.user:
chatroom.hasUser = True
chatroom.InitRoom(False)
return Joined(self, chatroom, receiver)
def _1Left(self, chatroom, receiver):
if receiver == Receiver.oA:
chatroom.hasOA = False
elif receiver == Receiver.user:
chatroom.hasUser = False
return Left(self, chatroom, receiver)
def _1Invited(self, chatroom, receiver):
if receiver == Receiver.oA:
chatroom.hasOA = False
elif receiver == Receiver.user:
chatroom.hasUser = False
return Invited(self, chatroom, receiver)
def StartOA(self, thread=True, port=None):
if port is None:
port = self.port
else:
self.port = port
if self.oAClient:
if not self.userClient:
self.started = True
self.oAClient.Start(thread=thread, port=port)
return True
return False
def StartUser(self, thread=0, mode=None):
if self.userClient:
self.userClient.Start(thread=thread, mode=mode)
return True
return False
def StartMain(self, thread=True, main=1, timeout=1):
if main<1:
return False
if thread:
self.Thread(self.Main, [main, timeout])
else:
self.Main(main, timeout)
return True
def AddEvent(self, event, force=False):
if isinstance(event, Message):
id = event.id
with self.mLock:
if force or id not in self.messages:
self.messages[id] = event
self.eventQueue.append(id)
else:
self.eventQueue.append(event)
with self.eCond:
self.eCond.notifyAll()
def Main(self, main=1, timeout=1):
self.main = main
while True:
e = None
with self.eCond:
while not e:
try:
e = self.eventQueue.pop(0)
except IndexError:
self.eCond.wait(1)
if not isinstance(e, Event):
with self.mLock:
e = self.messages.pop(e, None)
if not e:
continue
try:
if main == 1:
self.Handle(e, False)
else:
t = self.Thread(self.Handle, [e, False])
if main == 2:
t.join(timeout)
except Exception as ex:
self.Report(format_exc())
self.main = 0
|
[
"threading.Thread",
"traceback.print_exc",
"random.randint",
"threading.Condition",
"traceback.format_exc",
"pyimgur.Imgur",
"re.compile"
] |
[((17576, 17606), 're.compile', 'compile', (['"""\\\\[Init1\\\\] ([^ ]+)"""'], {}), "('\\\\[Init1\\\\] ([^ ]+)')\n", (17583, 17606), False, 'from re import compile, DOTALL\n'), ((17624, 17654), 're.compile', 'compile', (['"""\\\\[Init3\\\\] ([^ ]+)"""'], {}), "('\\\\[Init3\\\\] ([^ ]+)')\n", (17631, 17654), False, 'from re import compile, DOTALL\n'), ((28455, 28478), 're.compile', 'compile', (['"""^([^ ]+)( ?)"""'], {}), "('^([^ ]+)( ?)')\n", (28462, 28478), False, 'from re import compile, DOTALL\n'), ((4227, 4238), 'threading.Condition', 'Condition', ([], {}), '()\n', (4236, 4238), False, 'from threading import Thread, current_thread, Condition\n'), ((6260, 6271), 'threading.Condition', 'Condition', ([], {}), '()\n', (6269, 6271), False, 'from threading import Thread, current_thread, Condition\n'), ((9468, 9490), 'pyimgur.Imgur', 'Imgur', (['self.pyImgurKey'], {}), '(self.pyImgurKey)\n', (9473, 9490), False, 'from pyimgur import Imgur\n'), ((16922, 16993), 'threading.Thread', 'Thread', ([], {'target': 'self.ThreadExceptionCatcher', 'args': '[method, args, kwargs]'}), '(target=self.ThreadExceptionCatcher, args=[method, args, kwargs])\n', (16928, 16993), False, 'from threading import Thread, current_thread, Condition\n'), ((46616, 46628), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (46626, 46628), False, 'from traceback import format_exc, print_exc\n'), ((50293, 50305), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (50303, 50305), False, 'from traceback import format_exc, print_exc\n'), ((17246, 17257), 'traceback.print_exc', 'print_exc', ([], {}), '()\n', (17255, 17257), False, 'from traceback import format_exc, print_exc\n'), ((18225, 18244), 'random.randint', 'randint', (['(1000)', '(9999)'], {}), '(1000, 9999)\n', (18232, 18244), False, 'from random import randint\n'), ((19417, 19436), 'random.randint', 'randint', (['(1000)', '(9999)'], {}), '(1000, 9999)\n', (19424, 19436), False, 'from random import randint\n'), ((34431, 34442), 'traceback.print_exc', 'print_exc', ([], {}), '()\n', (34440, 34442), False, 'from traceback import format_exc, print_exc\n'), ((34609, 34620), 'traceback.print_exc', 'print_exc', ([], {}), '()\n', (34618, 34620), False, 'from traceback import format_exc, print_exc\n'), ((38465, 38476), 'traceback.print_exc', 'print_exc', ([], {}), '()\n', (38474, 38476), False, 'from traceback import format_exc, print_exc\n'), ((42880, 42892), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (42890, 42892), False, 'from traceback import format_exc, print_exc\n'), ((17509, 17521), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (17519, 17521), False, 'from traceback import format_exc, print_exc\n'), ((28347, 28359), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (28357, 28359), False, 'from traceback import format_exc, print_exc\n'), ((38086, 38097), 'traceback.print_exc', 'print_exc', ([], {}), '()\n', (38095, 38097), False, 'from traceback import format_exc, print_exc\n'), ((38539, 38551), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (38549, 38551), False, 'from traceback import format_exc, print_exc\n'), ((39006, 39018), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (39016, 39018), False, 'from traceback import format_exc, print_exc\n'), ((39315, 39327), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (39325, 39327), False, 'from traceback import format_exc, print_exc\n'), ((43530, 43542), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (43540, 43542), False, 'from traceback import format_exc, print_exc\n'), ((45618, 45630), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (45628, 45630), False, 'from traceback import format_exc, print_exc\n'), ((45934, 45946), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (45944, 45946), False, 'from traceback import format_exc, print_exc\n'), ((46893, 46905), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (46903, 46905), False, 'from traceback import format_exc, print_exc\n'), ((47112, 47124), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (47122, 47124), False, 'from traceback import format_exc, print_exc\n'), ((47338, 47350), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (47348, 47350), False, 'from traceback import format_exc, print_exc\n'), ((50568, 50580), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (50578, 50580), False, 'from traceback import format_exc, print_exc\n'), ((50818, 50830), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (50828, 50830), False, 'from traceback import format_exc, print_exc\n'), ((51074, 51086), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (51084, 51086), False, 'from traceback import format_exc, print_exc\n'), ((57446, 57458), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (57456, 57458), False, 'from traceback import format_exc, print_exc\n'), ((38315, 38327), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (38325, 38327), False, 'from traceback import format_exc, print_exc\n'), ((42172, 42184), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (42182, 42184), False, 'from traceback import format_exc, print_exc\n'), ((42562, 42574), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (42572, 42574), False, 'from traceback import format_exc, print_exc\n'), ((45000, 45012), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (45010, 45012), False, 'from traceback import format_exc, print_exc\n'), ((45252, 45264), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (45262, 45264), False, 'from traceback import format_exc, print_exc\n'), ((32546, 32558), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (32556, 32558), False, 'from traceback import format_exc, print_exc\n')]
|
import torch
import torch.nn as nn
"""### DenseNet"""
####network
class conv_blk(nn.Module):
def __init__(self,in_channel,num_channel):
super(conv_blk, self).__init__()
self.blk=nn.Sequential(nn.BatchNorm2d(in_channel,eps=1e-3),
nn.ReLU(),
nn.Conv2d(in_channels=in_channel,out_channels=num_channel,kernel_size=3,padding=1))
def forward(self, x):
return self.blk(x)
class DenseBlock(nn.Module):
def __init__(self,in_channel,num_convs,num_channels):
super(DenseBlock,self).__init__()
layers=[]
for i in range(num_convs):
layers+=[conv_blk(in_channel,num_channels)]
in_channel=in_channel+num_channels
self.net=nn.Sequential(*layers)
def forward(self,x):
for blk in self.net:
y=blk(x)
x=torch.cat((x,y),dim=1)
return x
def transition_blk(in_channel,num_channels):
blk=nn.Sequential(nn.BatchNorm2d(in_channel,eps=1e-3),
nn.ReLU(),
nn.Conv2d(in_channels=in_channel,out_channels=num_channels,kernel_size=1),
nn.AvgPool2d(kernel_size=2,stride=2))
return blk
class DenseNet(nn.Module):
def __init__(self,in_channel = 1,num_classes = 10, tmp_scale = True):
super(DenseNet,self).__init__()
self.block1=nn.Sequential(nn.Conv2d(in_channels=in_channel,out_channels=64,kernel_size=7,stride=2,padding=3),
nn.BatchNorm2d(64,eps=1e-3),
nn.ReLU(),
nn.MaxPool2d(kernel_size=3,stride=2,padding=1))
self.tmp_scale = tmp_scale
if tmp_scale:
self.temperature = torch.nn.Parameter(torch.ones(1), requires_grad = False)
num_channels, growth_rate = 64, 32 # num_channels
num_convs_in_dense_blocks = [4, 4, 4, 4]
layers=[]
for i ,num_convs in enumerate(num_convs_in_dense_blocks):
layers+=[DenseBlock(num_channels,num_convs,growth_rate)]
num_channels+=num_convs*growth_rate
if i!=len(num_convs_in_dense_blocks)-1:
layers+=[transition_blk(num_channels,num_channels//2)]
num_channels=num_channels//2
layers+=[nn.BatchNorm2d(num_channels),nn.ReLU(),nn.AvgPool2d(kernel_size=3)]
self.block2=nn.Sequential(*layers)
self.dense=nn.Linear(248,10)
def forward(self,x):
y=self.block1(x)
y=self.block2(y)
y=y.view(-1,248)
y=self.dense(y)
if self.tmp_scale:
y /= self.temperature
return y
def densetNet(tmp_scale = True, num_classes = 10):
return DenseNet(tmp_scale = tmp_scale, num_classes = num_classes)
|
[
"torch.ones",
"torch.nn.ReLU",
"torch.nn.Sequential",
"torch.nn.Conv2d",
"torch.cat",
"torch.nn.BatchNorm2d",
"torch.nn.Linear",
"torch.nn.MaxPool2d",
"torch.nn.AvgPool2d"
] |
[((764, 786), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (777, 786), True, 'import torch.nn as nn\n'), ((986, 1023), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['in_channel'], {'eps': '(0.001)'}), '(in_channel, eps=0.001)\n', (1000, 1023), True, 'import torch.nn as nn\n'), ((1045, 1054), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1052, 1054), True, 'import torch.nn as nn\n'), ((1078, 1153), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'in_channel', 'out_channels': 'num_channels', 'kernel_size': '(1)'}), '(in_channels=in_channel, out_channels=num_channels, kernel_size=1)\n', (1087, 1153), True, 'import torch.nn as nn\n'), ((1175, 1212), 'torch.nn.AvgPool2d', 'nn.AvgPool2d', ([], {'kernel_size': '(2)', 'stride': '(2)'}), '(kernel_size=2, stride=2)\n', (1187, 1212), True, 'import torch.nn as nn\n'), ((2405, 2427), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (2418, 2427), True, 'import torch.nn as nn\n'), ((2447, 2465), 'torch.nn.Linear', 'nn.Linear', (['(248)', '(10)'], {}), '(248, 10)\n', (2456, 2465), True, 'import torch.nn as nn\n'), ((215, 252), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['in_channel'], {'eps': '(0.001)'}), '(in_channel, eps=0.001)\n', (229, 252), True, 'import torch.nn as nn\n'), ((278, 287), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (285, 287), True, 'import torch.nn as nn\n'), ((315, 404), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'in_channel', 'out_channels': 'num_channel', 'kernel_size': '(3)', 'padding': '(1)'}), '(in_channels=in_channel, out_channels=num_channel, kernel_size=3,\n padding=1)\n', (324, 404), True, 'import torch.nn as nn\n'), ((877, 901), 'torch.cat', 'torch.cat', (['(x, y)'], {'dim': '(1)'}), '((x, y), dim=1)\n', (886, 901), False, 'import torch\n'), ((1405, 1495), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'in_channel', 'out_channels': '(64)', 'kernel_size': '(7)', 'stride': '(2)', 'padding': '(3)'}), '(in_channels=in_channel, out_channels=64, kernel_size=7, stride=2,\n padding=3)\n', (1414, 1495), True, 'import torch.nn as nn\n'), ((1523, 1552), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(64)'], {'eps': '(0.001)'}), '(64, eps=0.001)\n', (1537, 1552), True, 'import torch.nn as nn\n'), ((1586, 1595), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1593, 1595), True, 'import torch.nn as nn\n'), ((1631, 1679), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(3)', 'stride': '(2)', 'padding': '(1)'}), '(kernel_size=3, stride=2, padding=1)\n', (1643, 1679), True, 'import torch.nn as nn\n'), ((2317, 2345), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['num_channels'], {}), '(num_channels)\n', (2331, 2345), True, 'import torch.nn as nn\n'), ((2346, 2355), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (2353, 2355), True, 'import torch.nn as nn\n'), ((2356, 2383), 'torch.nn.AvgPool2d', 'nn.AvgPool2d', ([], {'kernel_size': '(3)'}), '(kernel_size=3)\n', (2368, 2383), True, 'import torch.nn as nn\n'), ((1784, 1797), 'torch.ones', 'torch.ones', (['(1)'], {}), '(1)\n', (1794, 1797), False, 'import torch\n')]
|
#!/usr/bin/python
# @file check_code.py
# @brief This program applies a flawfinder program to TrickHLA source code.
#
# This is a Python program used to check the TrickHLA source code using the
# flawfinder utility. https://dwheeler.com/flawfinder/
#
# @revs_title
# @revs_begin
# @rev_entry{ <NAME>, NASA ER6, TrickHLA, June 2020, --, Initial implementation, based on check_code.py.}
# @revs_end
#
import time
import os
import subprocess
import argparse
from trickhla_message import *
from trickhla_environment import *
# Main routine.
def main():
# Set defaults for TrickHLA and TRICK_HOME.
trickhla_home = '.'
# Initialize the lists that go into the flawfinder command argument list.
trickhla_source_dirs = []
flawfinder_args = []
#
# Setup command line argument parsing.
#
parser = argparse.ArgumentParser( prog = 'find_code_flaws', \
formatter_class = argparse.RawDescriptionHelpFormatter, \
description = 'Check the TrickHLA source code using flawfinder.' )
parser.add_argument( '-m0', \
help = 'Maximum flaw risk checking. Check source with minimum risk level 0 for inclusion in hitlist.', \
action = 'store_true', dest = 'm0' )
parser.add_argument( '-m1', \
help = 'Check source with minimum risk level 1 for inclusion in hitlist.', \
action = 'store_true', dest = 'm1' )
parser.add_argument( '-m2', \
help = 'Check source with minimum risk level 2 for inclusion in hitlist.', \
action = 'store_true', dest = 'm2' )
parser.add_argument( '-m3', \
help = 'Check source with minimum risk level 3 for inclusion in hitlist.', \
action = 'store_true', dest = 'm3' )
parser.add_argument( '-m4', \
help = 'Check source with minimum risk level 4 for inclusion in hitlist.', \
action = 'store_true', dest = 'm4' )
parser.add_argument( '-m5', \
help = 'Minimal flaw risk checking. Check source with minimum risk level 5 for inclusion in hitlist.', \
action = 'store_true', dest = 'm5' )
# Parse the command line arguments.
args = parser.parse_args()
#
# Let's do some command line option sanity checks.
#
arg_error = False
# User must specify one of -m0, -m1, -m2, -m3, -m4, -m5.
required_arg_cnt = 0
if args.m0:
# -m0
required_arg_cnt += 1
if args.m1:
# -m1
required_arg_cnt += 1
if args.m2:
# -m2
required_arg_cnt += 1
if args.m3:
# -m3
required_arg_cnt += 1
if args.m4:
# -m4
required_arg_cnt += 1
if args.m5:
# -m5
required_arg_cnt += 1
if required_arg_cnt == 0:
arg_error = True
TrickHLAMessage.warning( 'You must specify one of \'-m0\', \'-m1\', \'-m2\', \'-m4\', \'-m4\' or \'-m5\'!' )
elif required_arg_cnt >= 2:
arg_error = True
TrickHLAMessage.warning( 'Only specify one of \'-m0\', \'-m1\', \'-m2\', \'-m4\', \'-m4\' or \'-m5\'!' )
if arg_error:
TrickHLAMessage.failure( 'Error detected in parsing command arguments!' )
#
# Now let's check for paths to commands and directories.
#
# Check for TrickHLA.
trickhla_home = os.environ.get( 'TRICKHLA_HOME' )
if trickhla_home is None:
trickhla_home = os.environ.get( 'TRICK_HLA_HOME' )
if trickhla_home is None:
TrickHLAMessage.failure( 'TRICKHLA_HOME not set!' )
# Move into the TrickHLA home directory.
# But first make sure that the directory actually exists before moving there.
if os.path.isdir( trickhla_home ):
os.chdir( trickhla_home )
else:
TrickHLAMessage.failure( 'TrickHLA Home directory not found!' )
# Now let's check to make sure that the source and models directories exist.
if not os.path.isdir( 'source' ):
TrickHLAMessage.failure( 'Could not find the \'source\' directory!' )
if not os.path.isdir( 'models' ):
TrickHLAMessage.failure( 'Could not find the \'models\' directory!' )
# Find the flawfinder command and get the flawfinder version number.
flawfinder_cmd, flawfinder_ver = find_flawfinder( None, False )
if flawfinder_cmd is None:
TrickHLAMessage.failure( 'No flawfinder command found!' )
# Form relative paths to all the source directories used by TrickHLA.
trickhla_source_dirs.extend ( ['./source'] )
trickhla_source_dirs.extend ( ['./models'] )
# Set the flawfinder arguments based on the type of code checking the user wants to do.
if args.m0:
flawfinder_args.append( '--minlevel=0' )
elif args.m1:
flawfinder_args.append( '--minlevel=1' )
elif args.m2:
flawfinder_args.append( '--minlevel=2' )
elif args.m3:
flawfinder_args.append( '--minlevel=3' )
elif args.m4:
flawfinder_args.append( '--minlevel=4' )
elif args.m5:
flawfinder_args.append( '--minlevel=5' )
else:
flawfinder_args.append( '--minlevel=1' )
# Add remaining args.
flawfinder_args.append( '--allowlink' )
flawfinder_args.append( '--context' )
# Add the source code paths.
flawfinder_args.extend( trickhla_source_dirs )
# Form the flawfinder command with command-line arguments.
shell_command = [ flawfinder_cmd ]
shell_command.extend( flawfinder_args )
# Show a summary of the settings.
TrickHLAMessage.status( '---------------------------------------------------------------------' )
TrickHLAMessage.status( 'trickhla_home = ' + trickhla_home )
TrickHLAMessage.status( 'trickhla_source_dirs = ' + ' '.join( trickhla_source_dirs ) )
TrickHLAMessage.status( 'flawfinder_cmd = ' + flawfinder_cmd )
TrickHLAMessage.status( 'flawfinder_ver = ' + flawfinder_ver )
TrickHLAMessage.status( 'flawfinder_args = ' + ' '.join( flawfinder_args ) )
TrickHLAMessage.status( '---------------------------------------------------------------------' )
# Execute the flawfinder command.
try:
# Spawn off the flawfinder process using Popen.
flawfinder_proc = subprocess.Popen( shell_command )
while flawfinder_proc.poll() is None:
# Check process activity 10 times a second.
time.sleep( 0.1 )
except subprocess.CalledProcessError:
TrickHLAMessage.error( subprocess.CalledProcessError.message )
TrickHLAMessage.failure( '\'flawfinder\' command failed!' )
# Let the user know that we are done.
TrickHLAMessage.success( 'Finished checking TrickHLA source code.' )
return
# Function to find the flawfinder command.
#
# This function searches common locations for the flawfinder command.
#
# @return flawfinder_command The path to the flawfinder command.
# @return flawfinder_version The version of flawfinder.
# @param flawfinder_bin The path to the flawfinder programs directory.
# @param verbose Flag to set if verbose outputs are on.
#
def find_flawfinder( flawfinder_bin, verbose = True ):
# Initialize the flawfinder command path.
flawfinder_command = None
flawfinder_version = None
# Check to see if the path to the flawfinder binaries are set.
if flawfinder_bin:
# Use the command line path to set the command.
flawfinder_command = flawfinder_bin + '/flawfinder'
# flawfinder_bin is not set so lets check other options.
else:
# Check to see if the $FLAWFINDER_HOME environment variable is defined and set.
flawfinder_home = os.environ.get( 'FLAWFINDER_HOME' )
if flawfinder_home:
# Check to see if FLAWFINDER_HOME exists.
if os.path.isdir( flawfinder_home ):
# Form the flawfinder command based on FLAWFINDER_HOME.
flawfinder_command = flawfinder_home + '/flawfinder'
if verbose:
# Let the user know that we have FLAWFINDER_HOME and where it is.
TrickHLAMessage.status( 'FLAWFINDER_HOME: ' + flawfinder_home )
else:
TrickHLAMessage.failure( 'FLAWFINDER_HOME not found: ' + flawfinder_home )
else:
# FLAWFINDER_HOME is not set so look in the standard locations for flawfinder.
if os.path.isfile( '/usr/bin/flawfinder' ):
flawfinder_command = '/usr/bin/flawfinder'
elif os.path.isfile( '/usr/local/bin/flawfinder' ):
flawfinder_command = '/usr/local/bin/flawfinder'
# We're finished hunting. Now let's check for the flawfinder command.
if flawfinder_command is None:
TrickHLAMessage.failure( 'Could not find the flawfinder command!' )
else:
if not os.path.isfile( flawfinder_command ):
TrickHLAMessage.failure( 'Could not find the flawfinder command!: '\
+flawfinder_command )
else:
if verbose:
TrickHLAMessage.status( 'Using flawfinder command: ' + flawfinder_command )
#
# Now get the flawfinder version ID tag.
#
try:
flawfinder_version = str( subprocess.check_output( [flawfinder_command, '--version'] ) ).strip()
except subprocess.CalledProcessError:
TrickHLAMessage.error( subprocess.CalledProcessError.message )
TrickHLAMessage.failure( '\'flawfinder --version\' command failed!: '
+flawfinder_command )
return flawfinder_command, flawfinder_version
#
# Call the main function.
#
main()
|
[
"subprocess.Popen",
"argparse.ArgumentParser",
"os.path.isdir",
"subprocess.check_output",
"time.sleep",
"os.environ.get",
"os.path.isfile",
"os.chdir"
] |
[((819, 994), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""find_code_flaws"""', 'formatter_class': 'argparse.RawDescriptionHelpFormatter', 'description': '"""Check the TrickHLA source code using flawfinder."""'}), "(prog='find_code_flaws', formatter_class=argparse.\n RawDescriptionHelpFormatter, description=\n 'Check the TrickHLA source code using flawfinder.')\n", (842, 994), False, 'import argparse\n'), ((3414, 3445), 'os.environ.get', 'os.environ.get', (['"""TRICKHLA_HOME"""'], {}), "('TRICKHLA_HOME')\n", (3428, 3445), False, 'import os\n'), ((3759, 3787), 'os.path.isdir', 'os.path.isdir', (['trickhla_home'], {}), '(trickhla_home)\n', (3772, 3787), False, 'import os\n'), ((3499, 3531), 'os.environ.get', 'os.environ.get', (['"""TRICK_HLA_HOME"""'], {}), "('TRICK_HLA_HOME')\n", (3513, 3531), False, 'import os\n'), ((3797, 3820), 'os.chdir', 'os.chdir', (['trickhla_home'], {}), '(trickhla_home)\n', (3805, 3820), False, 'import os\n'), ((3993, 4016), 'os.path.isdir', 'os.path.isdir', (['"""source"""'], {}), "('source')\n", (4006, 4016), False, 'import os\n'), ((4106, 4129), 'os.path.isdir', 'os.path.isdir', (['"""models"""'], {}), "('models')\n", (4119, 4129), False, 'import os\n'), ((6235, 6266), 'subprocess.Popen', 'subprocess.Popen', (['shell_command'], {}), '(shell_command)\n', (6251, 6266), False, 'import subprocess\n'), ((7624, 7657), 'os.environ.get', 'os.environ.get', (['"""FLAWFINDER_HOME"""'], {}), "('FLAWFINDER_HOME')\n", (7638, 7657), False, 'import os\n'), ((6376, 6391), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (6386, 6391), False, 'import time\n'), ((7750, 7780), 'os.path.isdir', 'os.path.isdir', (['flawfinder_home'], {}), '(flawfinder_home)\n', (7763, 7780), False, 'import os\n'), ((8320, 8357), 'os.path.isfile', 'os.path.isfile', (['"""/usr/bin/flawfinder"""'], {}), "('/usr/bin/flawfinder')\n", (8334, 8357), False, 'import os\n'), ((8742, 8776), 'os.path.isfile', 'os.path.isfile', (['flawfinder_command'], {}), '(flawfinder_command)\n', (8756, 8776), False, 'import os\n'), ((8430, 8473), 'os.path.isfile', 'os.path.isfile', (['"""/usr/local/bin/flawfinder"""'], {}), "('/usr/local/bin/flawfinder')\n", (8444, 8473), False, 'import os\n'), ((9130, 9188), 'subprocess.check_output', 'subprocess.check_output', (["[flawfinder_command, '--version']"], {}), "([flawfinder_command, '--version'])\n", (9153, 9188), False, 'import subprocess\n')]
|
from django import forms
class EmailForm(forms.Form):
your_email = forms.CharField(label='Email', max_length=100)
|
[
"django.forms.CharField"
] |
[((72, 118), 'django.forms.CharField', 'forms.CharField', ([], {'label': '"""Email"""', 'max_length': '(100)'}), "(label='Email', max_length=100)\n", (87, 118), False, 'from django import forms\n')]
|
import random
class Dice:
def __init__(self):
pass
def roll(self, num_dice):
outcome = list()
for i in range(num_dice):
outcome.append(random.randrange(1,7))
return outcome
|
[
"random.randrange"
] |
[((182, 204), 'random.randrange', 'random.randrange', (['(1)', '(7)'], {}), '(1, 7)\n', (198, 204), False, 'import random\n')]
|
#!/usr/bin/env python3
import sys
import os
def print_args(args):
print(args[-1] + ' '+ '_'.join(args[:-1]))
def main(argv):
path = "%HHOMP/HHOMPDB"
# If directory does not exist, print random stuff
if not os.path.exists(path):
print("foo bar")
return
args = []
with open(path, 'r') as infile:
for line in infile:
line = line.strip()
if not line or line.startswith('#'):
continue
if line.startswith('$'):
print_args(args)
args = []
else:
args.append(line)
if __name__ == '__main__':
main(sys.argv)
|
[
"os.path.exists"
] |
[((223, 243), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (237, 243), False, 'import os\n')]
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from collections import namedtuple
from unittest import mock, skipUnless
import pandas as pd
from sqlalchemy import types
from sqlalchemy.engine.result import RowProxy
from sqlalchemy.sql import select
from superset.db_engine_specs.presto import PrestoEngineSpec
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.sql_parse import ParsedQuery
from superset.utils.core import DatasourceName, GenericDataType
from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec
class TestPrestoDbEngineSpec(TestDbEngineSpec):
@skipUnless(TestDbEngineSpec.is_module_installed("pyhive"), "pyhive not installed")
def test_get_datatype_presto(self):
self.assertEqual("STRING", PrestoEngineSpec.get_datatype("string"))
def test_presto_get_view_names_return_empty_list(
self,
): # pylint: disable=invalid-name
self.assertEqual(
[], PrestoEngineSpec.get_view_names(mock.ANY, mock.ANY, mock.ANY)
)
@mock.patch("superset.db_engine_specs.presto.is_feature_enabled")
def test_get_view_names(self, mock_is_feature_enabled):
mock_is_feature_enabled.return_value = True
mock_execute = mock.MagicMock()
mock_fetchall = mock.MagicMock(return_value=[["a", "b,", "c"], ["d", "e"]])
database = mock.MagicMock()
database.get_sqla_engine.return_value.raw_connection.return_value.cursor.return_value.execute = (
mock_execute
)
database.get_sqla_engine.return_value.raw_connection.return_value.cursor.return_value.fetchall = (
mock_fetchall
)
result = PrestoEngineSpec.get_view_names(database, mock.Mock(), None)
mock_execute.assert_called_once_with(
"SELECT table_name FROM information_schema.views", {}
)
assert result == ["a", "d"]
@mock.patch("superset.db_engine_specs.presto.is_feature_enabled")
def test_get_view_names_with_schema(self, mock_is_feature_enabled):
mock_is_feature_enabled.return_value = True
mock_execute = mock.MagicMock()
mock_fetchall = mock.MagicMock(return_value=[["a", "b,", "c"], ["d", "e"]])
database = mock.MagicMock()
database.get_sqla_engine.return_value.raw_connection.return_value.cursor.return_value.execute = (
mock_execute
)
database.get_sqla_engine.return_value.raw_connection.return_value.cursor.return_value.fetchall = (
mock_fetchall
)
schema = "schema"
result = PrestoEngineSpec.get_view_names(database, mock.Mock(), schema)
mock_execute.assert_called_once_with(
"SELECT table_name FROM information_schema.views "
"WHERE table_schema=%(schema)s",
{"schema": schema},
)
assert result == ["a", "d"]
def verify_presto_column(self, column, expected_results):
inspector = mock.Mock()
inspector.engine.dialect.identifier_preparer.quote_identifier = mock.Mock()
keymap = {
"Column": (None, None, 0),
"Type": (None, None, 1),
"Null": (None, None, 2),
}
row = RowProxy(mock.Mock(), column, [None, None, None, None], keymap)
inspector.bind.execute = mock.Mock(return_value=[row])
results = PrestoEngineSpec.get_columns(inspector, "", "")
self.assertEqual(len(expected_results), len(results))
for expected_result, result in zip(expected_results, results):
self.assertEqual(expected_result[0], result["name"])
self.assertEqual(expected_result[1], str(result["type"]))
def test_presto_get_column(self):
presto_column = ("column_name", "boolean", "")
expected_results = [("column_name", "BOOLEAN")]
self.verify_presto_column(presto_column, expected_results)
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
{"PRESTO_EXPAND_DATA": True},
clear=True,
)
def test_presto_get_simple_row_column(self):
presto_column = ("column_name", "row(nested_obj double)", "")
expected_results = [("column_name", "ROW"), ("column_name.nested_obj", "FLOAT")]
self.verify_presto_column(presto_column, expected_results)
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
{"PRESTO_EXPAND_DATA": True},
clear=True,
)
def test_presto_get_simple_row_column_with_name_containing_whitespace(self):
presto_column = ("column name", "row(nested_obj double)", "")
expected_results = [("column name", "ROW"), ("column name.nested_obj", "FLOAT")]
self.verify_presto_column(presto_column, expected_results)
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
{"PRESTO_EXPAND_DATA": True},
clear=True,
)
def test_presto_get_simple_row_column_with_tricky_nested_field_name(self):
presto_column = ("column_name", 'row("Field Name(Tricky, Name)" double)', "")
expected_results = [
("column_name", "ROW"),
('column_name."Field Name(Tricky, Name)"', "FLOAT"),
]
self.verify_presto_column(presto_column, expected_results)
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
{"PRESTO_EXPAND_DATA": True},
clear=True,
)
def test_presto_get_simple_array_column(self):
presto_column = ("column_name", "array(double)", "")
expected_results = [("column_name", "ARRAY")]
self.verify_presto_column(presto_column, expected_results)
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
{"PRESTO_EXPAND_DATA": True},
clear=True,
)
def test_presto_get_row_within_array_within_row_column(self):
presto_column = (
"column_name",
"row(nested_array array(row(nested_row double)), nested_obj double)",
"",
)
expected_results = [
("column_name", "ROW"),
("column_name.nested_array", "ARRAY"),
("column_name.nested_array.nested_row", "FLOAT"),
("column_name.nested_obj", "FLOAT"),
]
self.verify_presto_column(presto_column, expected_results)
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
{"PRESTO_EXPAND_DATA": True},
clear=True,
)
def test_presto_get_array_within_row_within_array_column(self):
presto_column = (
"column_name",
"array(row(nested_array array(double), nested_obj double))",
"",
)
expected_results = [
("column_name", "ARRAY"),
("column_name.nested_array", "ARRAY"),
("column_name.nested_obj", "FLOAT"),
]
self.verify_presto_column(presto_column, expected_results)
def test_presto_get_fields(self):
cols = [
{"name": "column"},
{"name": "column.nested_obj"},
{"name": 'column."quoted.nested obj"'},
]
actual_results = PrestoEngineSpec._get_fields(cols)
expected_results = [
{"name": '"column"', "label": "column"},
{"name": '"column"."nested_obj"', "label": "column.nested_obj"},
{
"name": '"column"."quoted.nested obj"',
"label": 'column."quoted.nested obj"',
},
]
for actual_result, expected_result in zip(actual_results, expected_results):
self.assertEqual(actual_result.element.name, expected_result["name"])
self.assertEqual(actual_result.name, expected_result["label"])
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
{"PRESTO_EXPAND_DATA": True},
clear=True,
)
def test_presto_expand_data_with_simple_structural_columns(self):
cols = [
{"name": "row_column", "type": "ROW(NESTED_OBJ VARCHAR)"},
{"name": "array_column", "type": "ARRAY(BIGINT)"},
]
data = [
{"row_column": ["a"], "array_column": [1, 2, 3]},
{"row_column": ["b"], "array_column": [4, 5, 6]},
]
actual_cols, actual_data, actual_expanded_cols = PrestoEngineSpec.expand_data(
cols, data
)
expected_cols = [
{"name": "row_column", "type": "ROW(NESTED_OBJ VARCHAR)"},
{"name": "row_column.nested_obj", "type": "VARCHAR"},
{"name": "array_column", "type": "ARRAY(BIGINT)"},
]
expected_data = [
{"array_column": 1, "row_column": ["a"], "row_column.nested_obj": "a"},
{"array_column": 2, "row_column": "", "row_column.nested_obj": ""},
{"array_column": 3, "row_column": "", "row_column.nested_obj": ""},
{"array_column": 4, "row_column": ["b"], "row_column.nested_obj": "b"},
{"array_column": 5, "row_column": "", "row_column.nested_obj": ""},
{"array_column": 6, "row_column": "", "row_column.nested_obj": ""},
]
expected_expanded_cols = [{"name": "row_column.nested_obj", "type": "VARCHAR"}]
self.assertEqual(actual_cols, expected_cols)
self.assertEqual(actual_data, expected_data)
self.assertEqual(actual_expanded_cols, expected_expanded_cols)
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
{"PRESTO_EXPAND_DATA": True},
clear=True,
)
def test_presto_expand_data_with_complex_row_columns(self):
cols = [
{
"name": "row_column",
"type": "ROW(NESTED_OBJ1 VARCHAR, NESTED_ROW ROW(NESTED_OBJ2 VARCHAR))",
}
]
data = [{"row_column": ["a1", ["a2"]]}, {"row_column": ["b1", ["b2"]]}]
actual_cols, actual_data, actual_expanded_cols = PrestoEngineSpec.expand_data(
cols, data
)
expected_cols = [
{
"name": "row_column",
"type": "ROW(NESTED_OBJ1 VARCHAR, NESTED_ROW ROW(NESTED_OBJ2 VARCHAR))",
},
{"name": "row_column.nested_obj1", "type": "VARCHAR"},
{"name": "row_column.nested_row", "type": "ROW(NESTED_OBJ2 VARCHAR)"},
{"name": "row_column.nested_row.nested_obj2", "type": "VARCHAR"},
]
expected_data = [
{
"row_column": ["a1", ["a2"]],
"row_column.nested_obj1": "a1",
"row_column.nested_row": ["a2"],
"row_column.nested_row.nested_obj2": "a2",
},
{
"row_column": ["b1", ["b2"]],
"row_column.nested_obj1": "b1",
"row_column.nested_row": ["b2"],
"row_column.nested_row.nested_obj2": "b2",
},
]
expected_expanded_cols = [
{"name": "row_column.nested_obj1", "type": "VARCHAR"},
{"name": "row_column.nested_row", "type": "ROW(NESTED_OBJ2 VARCHAR)"},
{"name": "row_column.nested_row.nested_obj2", "type": "VARCHAR"},
]
self.assertEqual(actual_cols, expected_cols)
self.assertEqual(actual_data, expected_data)
self.assertEqual(actual_expanded_cols, expected_expanded_cols)
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
{"PRESTO_EXPAND_DATA": True},
clear=True,
)
def test_presto_expand_data_with_complex_row_columns_and_null_values(self):
cols = [
{"name": "row_column", "type": "ROW(NESTED_ROW ROW(NESTED_OBJ VARCHAR))",}
]
data = [
{"row_column": '[["a"]]'},
{"row_column": "[[null]]"},
{"row_column": "[null]"},
{"row_column": "null"},
]
actual_cols, actual_data, actual_expanded_cols = PrestoEngineSpec.expand_data(
cols, data
)
expected_cols = [
{"name": "row_column", "type": "ROW(NESTED_ROW ROW(NESTED_OBJ VARCHAR))",},
{"name": "row_column.nested_row", "type": "ROW(NESTED_OBJ VARCHAR)"},
{"name": "row_column.nested_row.nested_obj", "type": "VARCHAR"},
]
expected_data = [
{
"row_column": [["a"]],
"row_column.nested_row": ["a"],
"row_column.nested_row.nested_obj": "a",
},
{
"row_column": [[None]],
"row_column.nested_row": [None],
"row_column.nested_row.nested_obj": None,
},
{
"row_column": [None],
"row_column.nested_row": None,
"row_column.nested_row.nested_obj": "",
},
{
"row_column": None,
"row_column.nested_row": "",
"row_column.nested_row.nested_obj": "",
},
]
expected_expanded_cols = [
{"name": "row_column.nested_row", "type": "ROW(NESTED_OBJ VARCHAR)"},
{"name": "row_column.nested_row.nested_obj", "type": "VARCHAR"},
]
self.assertEqual(actual_cols, expected_cols)
self.assertEqual(actual_data, expected_data)
self.assertEqual(actual_expanded_cols, expected_expanded_cols)
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
{"PRESTO_EXPAND_DATA": True},
clear=True,
)
def test_presto_expand_data_with_complex_array_columns(self):
cols = [
{"name": "int_column", "type": "BIGINT"},
{
"name": "array_column",
"type": "ARRAY(ROW(NESTED_ARRAY ARRAY(ROW(NESTED_OBJ VARCHAR))))",
},
]
data = [
{"int_column": 1, "array_column": [[[["a"], ["b"]]], [[["c"], ["d"]]]]},
{"int_column": 2, "array_column": [[[["e"], ["f"]]], [[["g"], ["h"]]]]},
]
actual_cols, actual_data, actual_expanded_cols = PrestoEngineSpec.expand_data(
cols, data
)
expected_cols = [
{"name": "int_column", "type": "BIGINT"},
{
"name": "array_column",
"type": "ARRAY(ROW(NESTED_ARRAY ARRAY(ROW(NESTED_OBJ VARCHAR))))",
},
{
"name": "array_column.nested_array",
"type": "ARRAY(ROW(NESTED_OBJ VARCHAR))",
},
{"name": "array_column.nested_array.nested_obj", "type": "VARCHAR"},
]
expected_data = [
{
"array_column": [[["a"], ["b"]]],
"array_column.nested_array": ["a"],
"array_column.nested_array.nested_obj": "a",
"int_column": 1,
},
{
"array_column": "",
"array_column.nested_array": ["b"],
"array_column.nested_array.nested_obj": "b",
"int_column": "",
},
{
"array_column": [[["c"], ["d"]]],
"array_column.nested_array": ["c"],
"array_column.nested_array.nested_obj": "c",
"int_column": "",
},
{
"array_column": "",
"array_column.nested_array": ["d"],
"array_column.nested_array.nested_obj": "d",
"int_column": "",
},
{
"array_column": [[["e"], ["f"]]],
"array_column.nested_array": ["e"],
"array_column.nested_array.nested_obj": "e",
"int_column": 2,
},
{
"array_column": "",
"array_column.nested_array": ["f"],
"array_column.nested_array.nested_obj": "f",
"int_column": "",
},
{
"array_column": [[["g"], ["h"]]],
"array_column.nested_array": ["g"],
"array_column.nested_array.nested_obj": "g",
"int_column": "",
},
{
"array_column": "",
"array_column.nested_array": ["h"],
"array_column.nested_array.nested_obj": "h",
"int_column": "",
},
]
expected_expanded_cols = [
{
"name": "array_column.nested_array",
"type": "ARRAY(ROW(NESTED_OBJ VARCHAR))",
},
{"name": "array_column.nested_array.nested_obj", "type": "VARCHAR"},
]
self.assertEqual(actual_cols, expected_cols)
self.assertEqual(actual_data, expected_data)
self.assertEqual(actual_expanded_cols, expected_expanded_cols)
def test_presto_extra_table_metadata(self):
db = mock.Mock()
db.get_indexes = mock.Mock(return_value=[{"column_names": ["ds", "hour"]}])
db.get_extra = mock.Mock(return_value={})
df = pd.DataFrame({"ds": ["01-01-19"], "hour": [1]})
db.get_df = mock.Mock(return_value=df)
PrestoEngineSpec.get_create_view = mock.Mock(return_value=None)
result = PrestoEngineSpec.extra_table_metadata(db, "test_table", "test_schema")
self.assertEqual({"ds": "01-01-19", "hour": 1}, result["partitions"]["latest"])
def test_presto_where_latest_partition(self):
db = mock.Mock()
db.get_indexes = mock.Mock(return_value=[{"column_names": ["ds", "hour"]}])
db.get_extra = mock.Mock(return_value={})
df = pd.DataFrame({"ds": ["01-01-19"], "hour": [1]})
db.get_df = mock.Mock(return_value=df)
columns = [{"name": "ds"}, {"name": "hour"}]
result = PrestoEngineSpec.where_latest_partition(
"test_table", "test_schema", db, select(), columns
)
query_result = str(result.compile(compile_kwargs={"literal_binds": True}))
self.assertEqual("SELECT \nWHERE ds = '01-01-19' AND hour = 1", query_result)
def test_convert_dttm(self):
dttm = self.get_dttm()
self.assertEqual(
PrestoEngineSpec.convert_dttm("DATE", dttm),
"from_iso8601_date('2019-01-02')",
)
self.assertEqual(
PrestoEngineSpec.convert_dttm("TIMESTAMP", dttm),
"from_iso8601_timestamp('2019-01-02T03:04:05.678900')",
)
def test_query_cost_formatter(self):
raw_cost = [
{
"inputTableColumnInfos": [
{
"table": {
"catalog": "hive",
"schemaTable": {
"schema": "default",
"table": "fact_passenger_state",
},
},
"columnConstraints": [
{
"columnName": "ds",
"typeSignature": "varchar",
"domain": {
"nullsAllowed": False,
"ranges": [
{
"low": {
"value": "2019-07-10",
"bound": "EXACTLY",
},
"high": {
"value": "2019-07-10",
"bound": "EXACTLY",
},
}
],
},
}
],
"estimate": {
"outputRowCount": 9.04969899e8,
"outputSizeInBytes": 3.54143678301e11,
"cpuCost": 3.54143678301e11,
"maxMemory": 0.0,
"networkCost": 0.0,
},
}
],
"estimate": {
"outputRowCount": 9.04969899e8,
"outputSizeInBytes": 3.54143678301e11,
"cpuCost": 3.54143678301e11,
"maxMemory": 0.0,
"networkCost": 3.54143678301e11,
},
}
]
formatted_cost = PrestoEngineSpec.query_cost_formatter(raw_cost)
expected = [
{
"Output count": "904 M rows",
"Output size": "354 GB",
"CPU cost": "354 G",
"Max memory": "0 B",
"Network cost": "354 G",
}
]
self.assertEqual(formatted_cost, expected)
@mock.patch.dict(
"superset.extensions.feature_flag_manager._feature_flags",
{"PRESTO_EXPAND_DATA": True},
clear=True,
)
def test_presto_expand_data_array(self):
cols = [
{"name": "event_id", "type": "VARCHAR", "is_date": False},
{"name": "timestamp", "type": "BIGINT", "is_date": False},
{
"name": "user",
"type": "ROW(ID BIGINT, FIRST_NAME VARCHAR, LAST_NAME VARCHAR)",
"is_date": False,
},
]
data = [
{
"event_id": "abcdef01-2345-6789-abcd-ef0123456789",
"timestamp": "1595895506219",
"user": '[1, "JOHN", "DOE"]',
}
]
actual_cols, actual_data, actual_expanded_cols = PrestoEngineSpec.expand_data(
cols, data
)
expected_cols = [
{"name": "event_id", "type": "VARCHAR", "is_date": False},
{"name": "timestamp", "type": "BIGINT", "is_date": False},
{
"name": "user",
"type": "ROW(ID BIGINT, FIRST_NAME VARCHAR, LAST_NAME VARCHAR)",
"is_date": False,
},
{"name": "user.id", "type": "BIGINT"},
{"name": "user.first_name", "type": "VARCHAR"},
{"name": "user.last_name", "type": "VARCHAR"},
]
expected_data = [
{
"event_id": "abcdef01-2345-6789-abcd-ef0123456789",
"timestamp": "1595895506219",
"user": [1, "JOHN", "DOE"],
"user.id": 1,
"user.first_name": "JOHN",
"user.last_name": "DOE",
}
]
expected_expanded_cols = [
{"name": "user.id", "type": "BIGINT"},
{"name": "user.first_name", "type": "VARCHAR"},
{"name": "user.last_name", "type": "VARCHAR"},
]
self.assertEqual(actual_cols, expected_cols)
self.assertEqual(actual_data, expected_data)
self.assertEqual(actual_expanded_cols, expected_expanded_cols)
def test_get_sqla_column_type(self):
column_spec = PrestoEngineSpec.get_column_spec("varchar(255)")
assert isinstance(column_spec.sqla_type, types.VARCHAR)
assert column_spec.sqla_type.length == 255
self.assertEqual(column_spec.generic_type, GenericDataType.STRING)
column_spec = PrestoEngineSpec.get_column_spec("varchar")
assert isinstance(column_spec.sqla_type, types.String)
assert column_spec.sqla_type.length is None
self.assertEqual(column_spec.generic_type, GenericDataType.STRING)
column_spec = PrestoEngineSpec.get_column_spec("char(10)")
assert isinstance(column_spec.sqla_type, types.CHAR)
assert column_spec.sqla_type.length == 10
self.assertEqual(column_spec.generic_type, GenericDataType.STRING)
column_spec = PrestoEngineSpec.get_column_spec("char")
assert isinstance(column_spec.sqla_type, types.CHAR)
assert column_spec.sqla_type.length is None
self.assertEqual(column_spec.generic_type, GenericDataType.STRING)
column_spec = PrestoEngineSpec.get_column_spec("integer")
assert isinstance(column_spec.sqla_type, types.Integer)
self.assertEqual(column_spec.generic_type, GenericDataType.NUMERIC)
column_spec = PrestoEngineSpec.get_column_spec("time")
assert isinstance(column_spec.sqla_type, types.Time)
assert type(column_spec.sqla_type).__name__ == "TemporalWrapperType"
self.assertEqual(column_spec.generic_type, GenericDataType.TEMPORAL)
column_spec = PrestoEngineSpec.get_column_spec("timestamp")
assert isinstance(column_spec.sqla_type, types.TIMESTAMP)
assert type(column_spec.sqla_type).__name__ == "TemporalWrapperType"
self.assertEqual(column_spec.generic_type, GenericDataType.TEMPORAL)
sqla_type = PrestoEngineSpec.get_sqla_column_type(None)
assert sqla_type is None
@mock.patch(
"superset.utils.feature_flag_manager.FeatureFlagManager.is_feature_enabled"
)
@mock.patch("superset.db_engine_specs.base.BaseEngineSpec.get_table_names")
@mock.patch("superset.db_engine_specs.presto.PrestoEngineSpec.get_view_names")
def test_get_table_names_no_split_views_from_tables(
self, mock_get_view_names, mock_get_table_names, mock_is_feature_enabled
):
mock_get_view_names.return_value = ["view1", "view2"]
table_names = ["table1", "table2", "view1", "view2"]
mock_get_table_names.return_value = table_names
mock_is_feature_enabled.return_value = False
tables = PrestoEngineSpec.get_table_names(mock.Mock(), mock.Mock(), None)
assert tables == table_names
@mock.patch(
"superset.utils.feature_flag_manager.FeatureFlagManager.is_feature_enabled"
)
@mock.patch("superset.db_engine_specs.base.BaseEngineSpec.get_table_names")
@mock.patch("superset.db_engine_specs.presto.PrestoEngineSpec.get_view_names")
def test_get_table_names_split_views_from_tables(
self, mock_get_view_names, mock_get_table_names, mock_is_feature_enabled
):
mock_get_view_names.return_value = ["view1", "view2"]
table_names = ["table1", "table2", "view1", "view2"]
mock_get_table_names.return_value = table_names
mock_is_feature_enabled.return_value = True
tables = PrestoEngineSpec.get_table_names(mock.Mock(), mock.Mock(), None)
assert sorted(tables) == sorted(table_names)
@mock.patch(
"superset.utils.feature_flag_manager.FeatureFlagManager.is_feature_enabled"
)
@mock.patch("superset.db_engine_specs.base.BaseEngineSpec.get_table_names")
@mock.patch("superset.db_engine_specs.presto.PrestoEngineSpec.get_view_names")
def test_get_table_names_split_views_from_tables_no_tables(
self, mock_get_view_names, mock_get_table_names, mock_is_feature_enabled
):
mock_get_view_names.return_value = []
table_names = []
mock_get_table_names.return_value = table_names
mock_is_feature_enabled.return_value = True
tables = PrestoEngineSpec.get_table_names(mock.Mock(), mock.Mock(), None)
assert tables == []
def test_get_full_name(self):
names = [
("part1", "part2"),
("part11", "part22"),
]
result = PrestoEngineSpec._get_full_name(names)
assert result == "part1.part11"
def test_get_full_name_empty_tuple(self):
names = [
("part1", "part2"),
("", "part3"),
("part4", "part5"),
("", "part6"),
]
result = PrestoEngineSpec._get_full_name(names)
assert result == "part1.part4"
def test_split_data_type(self):
data_type = "value1 value2"
result = PrestoEngineSpec._split_data_type(data_type, " ")
assert result == ["value1", "value2"]
data_type = "value1,value2"
result = PrestoEngineSpec._split_data_type(data_type, ",")
assert result == ["value1", "value2"]
data_type = '"value,1",value2'
result = PrestoEngineSpec._split_data_type(data_type, ",")
assert result == ['"value,1"', "value2"]
def test_show_columns(self):
inspector = mock.MagicMock()
inspector.engine.dialect.identifier_preparer.quote_identifier = (
lambda x: f'"{x}"'
)
mock_execute = mock.MagicMock(return_value=["a", "b"])
inspector.bind.execute = mock_execute
table_name = "table_name"
result = PrestoEngineSpec._show_columns(inspector, table_name, None)
assert result == ["a", "b"]
mock_execute.assert_called_once_with(f'SHOW COLUMNS FROM "{table_name}"')
def test_show_columns_with_schema(self):
inspector = mock.MagicMock()
inspector.engine.dialect.identifier_preparer.quote_identifier = (
lambda x: f'"{x}"'
)
mock_execute = mock.MagicMock(return_value=["a", "b"])
inspector.bind.execute = mock_execute
table_name = "table_name"
schema = "schema"
result = PrestoEngineSpec._show_columns(inspector, table_name, schema)
assert result == ["a", "b"]
mock_execute.assert_called_once_with(
f'SHOW COLUMNS FROM "{schema}"."{table_name}"'
)
def test_is_column_name_quoted(self):
column_name = "mock"
assert PrestoEngineSpec._is_column_name_quoted(column_name) is False
column_name = '"mock'
assert PrestoEngineSpec._is_column_name_quoted(column_name) is False
column_name = '"moc"k'
assert PrestoEngineSpec._is_column_name_quoted(column_name) is False
column_name = '"moc"k"'
assert PrestoEngineSpec._is_column_name_quoted(column_name) is True
@mock.patch("superset.db_engine_specs.base.BaseEngineSpec.select_star")
def test_select_star_no_presto_expand_data(self, mock_select_star):
database = mock.Mock()
table_name = "table_name"
engine = mock.Mock()
cols = [
{"col1": "val1"},
{"col2": "val2"},
]
PrestoEngineSpec.select_star(database, table_name, engine, cols=cols)
mock_select_star.assert_called_once_with(
database, table_name, engine, None, 100, False, True, True, cols
)
@mock.patch("superset.db_engine_specs.presto.is_feature_enabled")
@mock.patch("superset.db_engine_specs.base.BaseEngineSpec.select_star")
def test_select_star_presto_expand_data(
self, mock_select_star, mock_is_feature_enabled
):
mock_is_feature_enabled.return_value = True
database = mock.Mock()
table_name = "table_name"
engine = mock.Mock()
cols = [
{"name": "val1"},
{"name": "val2<?!@#$312,/'][p098"},
{"name": ".val2"},
{"name": "val2."},
{"name": "val.2"},
{"name": ".val2."},
]
PrestoEngineSpec.select_star(
database, table_name, engine, show_cols=True, cols=cols
)
mock_select_star.assert_called_once_with(
database,
table_name,
engine,
None,
100,
True,
True,
True,
[{"name": "val1"}, {"name": "val2<?!@#$312,/'][p098"},],
)
def test_estimate_statement_cost(self):
mock_cursor = mock.MagicMock()
estimate_json = {"a": "b"}
mock_cursor.fetchone.return_value = [
'{"a": "b"}',
]
result = PrestoEngineSpec.estimate_statement_cost(
"SELECT * FROM brth_names", mock_cursor
)
assert result == estimate_json
def test_estimate_statement_cost_invalid_syntax(self):
mock_cursor = mock.MagicMock()
mock_cursor.execute.side_effect = Exception()
with self.assertRaises(Exception):
PrestoEngineSpec.estimate_statement_cost(
"DROP TABLE brth_names", mock_cursor
)
def test_get_all_datasource_names(self):
df = pd.DataFrame.from_dict(
{"table_schema": ["schema1", "schema2"], "table_name": ["name1", "name2"]}
)
database = mock.MagicMock()
database.get_df.return_value = df
result = PrestoEngineSpec.get_all_datasource_names(database, "table")
expected_result = [
DatasourceName(schema="schema1", table="name1"),
DatasourceName(schema="schema2", table="name2"),
]
assert result == expected_result
def test_get_create_view(self):
mock_execute = mock.MagicMock()
mock_fetchall = mock.MagicMock(return_value=[["a", "b,", "c"], ["d", "e"]])
database = mock.MagicMock()
database.get_sqla_engine.return_value.raw_connection.return_value.cursor.return_value.execute = (
mock_execute
)
database.get_sqla_engine.return_value.raw_connection.return_value.cursor.return_value.fetchall = (
mock_fetchall
)
database.get_sqla_engine.return_value.raw_connection.return_value.cursor.return_value.poll.return_value = (
False
)
schema = "schema"
table = "table"
result = PrestoEngineSpec.get_create_view(database, schema=schema, table=table)
assert result == "a"
mock_execute.assert_called_once_with(f"SHOW CREATE VIEW {schema}.{table}")
def test_get_create_view_exception(self):
mock_execute = mock.MagicMock(side_effect=Exception())
database = mock.MagicMock()
database.get_sqla_engine.return_value.raw_connection.return_value.cursor.return_value.execute = (
mock_execute
)
schema = "schema"
table = "table"
with self.assertRaises(Exception):
PrestoEngineSpec.get_create_view(database, schema=schema, table=table)
def test_get_create_view_database_error(self):
from pyhive.exc import DatabaseError
mock_execute = mock.MagicMock(side_effect=DatabaseError())
database = mock.MagicMock()
database.get_sqla_engine.return_value.raw_connection.return_value.cursor.return_value.execute = (
mock_execute
)
schema = "schema"
table = "table"
result = PrestoEngineSpec.get_create_view(database, schema=schema, table=table)
assert result is None
def test_extract_error_message_orig(self):
DatabaseError = namedtuple("DatabaseError", ["error_dict"])
db_err = DatabaseError(
{"errorName": "name", "errorLocation": "location", "message": "msg"}
)
exception = Exception()
exception.orig = db_err
result = PrestoEngineSpec._extract_error_message(exception)
assert result == "name at location: msg"
def test_extract_error_message_db_errr(self):
from pyhive.exc import DatabaseError
exception = DatabaseError({"message": "Err message"})
result = PrestoEngineSpec._extract_error_message(exception)
assert result == "Err message"
def test_extract_error_message_general_exception(self):
exception = Exception("Err message")
result = PrestoEngineSpec._extract_error_message(exception)
assert result == "Err message"
def test_extract_errors(self):
msg = "Generic Error"
result = PrestoEngineSpec.extract_errors(Exception(msg))
assert result == [
SupersetError(
message="Generic Error",
error_type=SupersetErrorType.GENERIC_DB_ENGINE_ERROR,
level=ErrorLevel.ERROR,
extra={
"engine_name": "Presto",
"issue_codes": [
{
"code": 1002,
"message": "Issue 1002 - The database returned an unexpected error.",
}
],
},
)
]
msg = "line 1:8: Column 'bogus' cannot be resolved"
result = PrestoEngineSpec.extract_errors(Exception(msg))
assert result == [
SupersetError(
message='We can\'t seem to resolve the column "bogus" at line 1:8.',
error_type=SupersetErrorType.COLUMN_DOES_NOT_EXIST_ERROR,
level=ErrorLevel.ERROR,
extra={
"engine_name": "Presto",
"issue_codes": [
{
"code": 1003,
"message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.",
},
{
"code": 1004,
"message": "Issue 1004 - The column was deleted or renamed in the database.",
},
],
},
)
]
msg = "line 1:15: Table 'tpch.tiny.region2' does not exist"
result = PrestoEngineSpec.extract_errors(Exception(msg))
assert result == [
SupersetError(
message="The table \"'tpch.tiny.region2'\" does not exist. A valid table must be used to run this query.",
error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR,
level=ErrorLevel.ERROR,
extra={
"engine_name": "Presto",
"issue_codes": [
{
"code": 1003,
"message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.",
},
{
"code": 1005,
"message": "Issue 1005 - The table was deleted or renamed in the database.",
},
],
},
)
]
msg = "line 1:15: Schema 'tin' does not exist"
result = PrestoEngineSpec.extract_errors(Exception(msg))
assert result == [
SupersetError(
message='The schema "tin" does not exist. A valid schema must be used to run this query.',
error_type=SupersetErrorType.SCHEMA_DOES_NOT_EXIST_ERROR,
level=ErrorLevel.ERROR,
extra={
"engine_name": "Presto",
"issue_codes": [
{
"code": 1003,
"message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.",
},
{
"code": 1016,
"message": "Issue 1005 - The schema was deleted or renamed in the database.",
},
],
},
)
]
msg = b"Access Denied: Invalid credentials"
result = PrestoEngineSpec.extract_errors(Exception(msg), {"username": "alice"})
assert result == [
SupersetError(
message='Either the username "alice" or the password is incorrect.',
error_type=SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR,
level=ErrorLevel.ERROR,
extra={
"engine_name": "Presto",
"issue_codes": [
{
"code": 1014,
"message": "Issue 1014 - Either the username or the password is wrong.",
}
],
},
)
]
msg = "Failed to establish a new connection: [Errno 8] nodename nor servname provided, or not known"
result = PrestoEngineSpec.extract_errors(
Exception(msg), {"hostname": "badhost"}
)
assert result == [
SupersetError(
message='The hostname "badhost" cannot be resolved.',
error_type=SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR,
level=ErrorLevel.ERROR,
extra={
"engine_name": "Presto",
"issue_codes": [
{
"code": 1007,
"message": "Issue 1007 - The hostname provided can't be resolved.",
}
],
},
)
]
msg = "Failed to establish a new connection: [Errno 60] Operation timed out"
result = PrestoEngineSpec.extract_errors(
Exception(msg), {"hostname": "badhost", "port": 12345}
)
assert result == [
SupersetError(
message='The host "badhost" might be down, and can\'t be reached on port 12345.',
error_type=SupersetErrorType.CONNECTION_HOST_DOWN_ERROR,
level=ErrorLevel.ERROR,
extra={
"engine_name": "Presto",
"issue_codes": [
{
"code": 1009,
"message": "Issue 1009 - The host might be down, and can't be reached on the provided port.",
}
],
},
)
]
msg = "Failed to establish a new connection: [Errno 61] Connection refused"
result = PrestoEngineSpec.extract_errors(
Exception(msg), {"hostname": "badhost", "port": 12345}
)
assert result == [
SupersetError(
message='Port 12345 on hostname "badhost" refused the connection.',
error_type=SupersetErrorType.CONNECTION_PORT_CLOSED_ERROR,
level=ErrorLevel.ERROR,
extra={
"engine_name": "Presto",
"issue_codes": [
{"code": 1008, "message": "Issue 1008 - The port is closed."}
],
},
)
]
msg = "line 1:15: Catalog 'wrong' does not exist"
result = PrestoEngineSpec.extract_errors(Exception(msg))
assert result == [
SupersetError(
message='Unable to connect to catalog named "wrong".',
error_type=SupersetErrorType.CONNECTION_UNKNOWN_DATABASE_ERROR,
level=ErrorLevel.ERROR,
extra={
"engine_name": "Presto",
"issue_codes": [
{
"code": 1015,
"message": "Issue 1015 - Either the database is spelled incorrectly or does not exist.",
}
],
},
)
]
def test_is_readonly():
def is_readonly(sql: str) -> bool:
return PrestoEngineSpec.is_readonly_query(ParsedQuery(sql))
assert not is_readonly("SET hivevar:desc='Legislators'")
assert not is_readonly("UPDATE t1 SET col1 = NULL")
assert not is_readonly("INSERT OVERWRITE TABLE tabB SELECT a.Age FROM TableA")
assert is_readonly("SHOW LOCKS test EXTENDED")
assert is_readonly("EXPLAIN SELECT 1")
assert is_readonly("SELECT 1")
assert is_readonly("WITH (SELECT 1) bla SELECT * from bla")
|
[
"superset.db_engine_specs.presto.PrestoEngineSpec._get_full_name",
"superset.db_engine_specs.presto.PrestoEngineSpec._get_fields",
"superset.db_engine_specs.presto.PrestoEngineSpec.estimate_statement_cost",
"superset.db_engine_specs.presto.PrestoEngineSpec.get_column_spec",
"superset.db_engine_specs.presto.PrestoEngineSpec.get_all_datasource_names",
"pandas.DataFrame",
"unittest.mock.MagicMock",
"superset.db_engine_specs.presto.PrestoEngineSpec.select_star",
"superset.db_engine_specs.presto.PrestoEngineSpec.expand_data",
"pyhive.exc.DatabaseError",
"superset.db_engine_specs.presto.PrestoEngineSpec.get_columns",
"superset.sql_parse.ParsedQuery",
"sqlalchemy.sql.select",
"pandas.DataFrame.from_dict",
"superset.errors.SupersetError",
"superset.db_engine_specs.presto.PrestoEngineSpec._split_data_type",
"superset.db_engine_specs.presto.PrestoEngineSpec.query_cost_formatter",
"superset.db_engine_specs.presto.PrestoEngineSpec._is_column_name_quoted",
"tests.integration_tests.db_engine_specs.base_tests.TestDbEngineSpec.is_module_installed",
"unittest.mock.patch.dict",
"unittest.mock.patch",
"superset.db_engine_specs.presto.PrestoEngineSpec.get_datatype",
"superset.db_engine_specs.presto.PrestoEngineSpec._show_columns",
"superset.db_engine_specs.presto.PrestoEngineSpec.get_sqla_column_type",
"unittest.mock.Mock",
"superset.db_engine_specs.presto.PrestoEngineSpec.get_create_view",
"superset.db_engine_specs.presto.PrestoEngineSpec.get_view_names",
"collections.namedtuple",
"superset.utils.core.DatasourceName",
"superset.db_engine_specs.presto.PrestoEngineSpec.convert_dttm",
"superset.db_engine_specs.presto.PrestoEngineSpec.extra_table_metadata",
"superset.db_engine_specs.presto.PrestoEngineSpec._extract_error_message"
] |
[((1792, 1856), 'unittest.mock.patch', 'mock.patch', (['"""superset.db_engine_specs.presto.is_feature_enabled"""'], {}), "('superset.db_engine_specs.presto.is_feature_enabled')\n", (1802, 1856), False, 'from unittest import mock, skipUnless\n'), ((2655, 2719), 'unittest.mock.patch', 'mock.patch', (['"""superset.db_engine_specs.presto.is_feature_enabled"""'], {}), "('superset.db_engine_specs.presto.is_feature_enabled')\n", (2665, 2719), False, 'from unittest import mock, skipUnless\n'), ((4645, 4765), 'unittest.mock.patch.dict', 'mock.patch.dict', (['"""superset.extensions.feature_flag_manager._feature_flags"""', "{'PRESTO_EXPAND_DATA': True}"], {'clear': '(True)'}), "('superset.extensions.feature_flag_manager._feature_flags',\n {'PRESTO_EXPAND_DATA': True}, clear=True)\n", (4660, 4765), False, 'from unittest import mock, skipUnless\n'), ((5074, 5194), 'unittest.mock.patch.dict', 'mock.patch.dict', (['"""superset.extensions.feature_flag_manager._feature_flags"""', "{'PRESTO_EXPAND_DATA': True}"], {'clear': '(True)'}), "('superset.extensions.feature_flag_manager._feature_flags',\n {'PRESTO_EXPAND_DATA': True}, clear=True)\n", (5089, 5194), False, 'from unittest import mock, skipUnless\n'), ((5535, 5655), 'unittest.mock.patch.dict', 'mock.patch.dict', (['"""superset.extensions.feature_flag_manager._feature_flags"""', "{'PRESTO_EXPAND_DATA': True}"], {'clear': '(True)'}), "('superset.extensions.feature_flag_manager._feature_flags',\n {'PRESTO_EXPAND_DATA': True}, clear=True)\n", (5550, 5655), False, 'from unittest import mock, skipUnless\n'), ((6061, 6181), 'unittest.mock.patch.dict', 'mock.patch.dict', (['"""superset.extensions.feature_flag_manager._feature_flags"""', "{'PRESTO_EXPAND_DATA': True}"], {'clear': '(True)'}), "('superset.extensions.feature_flag_manager._feature_flags',\n {'PRESTO_EXPAND_DATA': True}, clear=True)\n", (6076, 6181), False, 'from unittest import mock, skipUnless\n'), ((6448, 6568), 'unittest.mock.patch.dict', 'mock.patch.dict', (['"""superset.extensions.feature_flag_manager._feature_flags"""', "{'PRESTO_EXPAND_DATA': True}"], {'clear': '(True)'}), "('superset.extensions.feature_flag_manager._feature_flags',\n {'PRESTO_EXPAND_DATA': True}, clear=True)\n", (6463, 6568), False, 'from unittest import mock, skipUnless\n'), ((7133, 7253), 'unittest.mock.patch.dict', 'mock.patch.dict', (['"""superset.extensions.feature_flag_manager._feature_flags"""', "{'PRESTO_EXPAND_DATA': True}"], {'clear': '(True)'}), "('superset.extensions.feature_flag_manager._feature_flags',\n {'PRESTO_EXPAND_DATA': True}, clear=True)\n", (7148, 7253), False, 'from unittest import mock, skipUnless\n'), ((8555, 8675), 'unittest.mock.patch.dict', 'mock.patch.dict', (['"""superset.extensions.feature_flag_manager._feature_flags"""', "{'PRESTO_EXPAND_DATA': True}"], {'clear': '(True)'}), "('superset.extensions.feature_flag_manager._feature_flags',\n {'PRESTO_EXPAND_DATA': True}, clear=True)\n", (8570, 8675), False, 'from unittest import mock, skipUnless\n'), ((10238, 10358), 'unittest.mock.patch.dict', 'mock.patch.dict', (['"""superset.extensions.feature_flag_manager._feature_flags"""', "{'PRESTO_EXPAND_DATA': True}"], {'clear': '(True)'}), "('superset.extensions.feature_flag_manager._feature_flags',\n {'PRESTO_EXPAND_DATA': True}, clear=True)\n", (10253, 10358), False, 'from unittest import mock, skipUnless\n'), ((12207, 12327), 'unittest.mock.patch.dict', 'mock.patch.dict', (['"""superset.extensions.feature_flag_manager._feature_flags"""', "{'PRESTO_EXPAND_DATA': True}"], {'clear': '(True)'}), "('superset.extensions.feature_flag_manager._feature_flags',\n {'PRESTO_EXPAND_DATA': True}, clear=True)\n", (12222, 12327), False, 'from unittest import mock, skipUnless\n'), ((14241, 14361), 'unittest.mock.patch.dict', 'mock.patch.dict', (['"""superset.extensions.feature_flag_manager._feature_flags"""', "{'PRESTO_EXPAND_DATA': True}"], {'clear': '(True)'}), "('superset.extensions.feature_flag_manager._feature_flags',\n {'PRESTO_EXPAND_DATA': True}, clear=True)\n", (14256, 14361), False, 'from unittest import mock, skipUnless\n'), ((21884, 22004), 'unittest.mock.patch.dict', 'mock.patch.dict', (['"""superset.extensions.feature_flag_manager._feature_flags"""', "{'PRESTO_EXPAND_DATA': True}"], {'clear': '(True)'}), "('superset.extensions.feature_flag_manager._feature_flags',\n {'PRESTO_EXPAND_DATA': True}, clear=True)\n", (21899, 22004), False, 'from unittest import mock, skipUnless\n'), ((25955, 26052), 'unittest.mock.patch', 'mock.patch', (['"""superset.utils.feature_flag_manager.FeatureFlagManager.is_feature_enabled"""'], {}), "(\n 'superset.utils.feature_flag_manager.FeatureFlagManager.is_feature_enabled'\n )\n", (25965, 26052), False, 'from unittest import mock, skipUnless\n'), ((26062, 26136), 'unittest.mock.patch', 'mock.patch', (['"""superset.db_engine_specs.base.BaseEngineSpec.get_table_names"""'], {}), "('superset.db_engine_specs.base.BaseEngineSpec.get_table_names')\n", (26072, 26136), False, 'from unittest import mock, skipUnless\n'), ((26142, 26219), 'unittest.mock.patch', 'mock.patch', (['"""superset.db_engine_specs.presto.PrestoEngineSpec.get_view_names"""'], {}), "('superset.db_engine_specs.presto.PrestoEngineSpec.get_view_names')\n", (26152, 26219), False, 'from unittest import mock, skipUnless\n'), ((26722, 26819), 'unittest.mock.patch', 'mock.patch', (['"""superset.utils.feature_flag_manager.FeatureFlagManager.is_feature_enabled"""'], {}), "(\n 'superset.utils.feature_flag_manager.FeatureFlagManager.is_feature_enabled'\n )\n", (26732, 26819), False, 'from unittest import mock, skipUnless\n'), ((26829, 26903), 'unittest.mock.patch', 'mock.patch', (['"""superset.db_engine_specs.base.BaseEngineSpec.get_table_names"""'], {}), "('superset.db_engine_specs.base.BaseEngineSpec.get_table_names')\n", (26839, 26903), False, 'from unittest import mock, skipUnless\n'), ((26909, 26986), 'unittest.mock.patch', 'mock.patch', (['"""superset.db_engine_specs.presto.PrestoEngineSpec.get_view_names"""'], {}), "('superset.db_engine_specs.presto.PrestoEngineSpec.get_view_names')\n", (26919, 26986), False, 'from unittest import mock, skipUnless\n'), ((27501, 27598), 'unittest.mock.patch', 'mock.patch', (['"""superset.utils.feature_flag_manager.FeatureFlagManager.is_feature_enabled"""'], {}), "(\n 'superset.utils.feature_flag_manager.FeatureFlagManager.is_feature_enabled'\n )\n", (27511, 27598), False, 'from unittest import mock, skipUnless\n'), ((27608, 27682), 'unittest.mock.patch', 'mock.patch', (['"""superset.db_engine_specs.base.BaseEngineSpec.get_table_names"""'], {}), "('superset.db_engine_specs.base.BaseEngineSpec.get_table_names')\n", (27618, 27682), False, 'from unittest import mock, skipUnless\n'), ((27688, 27765), 'unittest.mock.patch', 'mock.patch', (['"""superset.db_engine_specs.presto.PrestoEngineSpec.get_view_names"""'], {}), "('superset.db_engine_specs.presto.PrestoEngineSpec.get_view_names')\n", (27698, 27765), False, 'from unittest import mock, skipUnless\n'), ((30814, 30884), 'unittest.mock.patch', 'mock.patch', (['"""superset.db_engine_specs.base.BaseEngineSpec.select_star"""'], {}), "('superset.db_engine_specs.base.BaseEngineSpec.select_star')\n", (30824, 30884), False, 'from unittest import mock, skipUnless\n'), ((31359, 31423), 'unittest.mock.patch', 'mock.patch', (['"""superset.db_engine_specs.presto.is_feature_enabled"""'], {}), "('superset.db_engine_specs.presto.is_feature_enabled')\n", (31369, 31423), False, 'from unittest import mock, skipUnless\n'), ((31429, 31499), 'unittest.mock.patch', 'mock.patch', (['"""superset.db_engine_specs.base.BaseEngineSpec.select_star"""'], {}), "('superset.db_engine_specs.base.BaseEngineSpec.select_star')\n", (31439, 31499), False, 'from unittest import mock, skipUnless\n'), ((1376, 1422), 'tests.integration_tests.db_engine_specs.base_tests.TestDbEngineSpec.is_module_installed', 'TestDbEngineSpec.is_module_installed', (['"""pyhive"""'], {}), "('pyhive')\n", (1412, 1422), False, 'from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec\n'), ((1992, 2008), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (2006, 2008), False, 'from unittest import mock, skipUnless\n'), ((2033, 2092), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'return_value': "[['a', 'b,', 'c'], ['d', 'e']]"}), "(return_value=[['a', 'b,', 'c'], ['d', 'e']])\n", (2047, 2092), False, 'from unittest import mock, skipUnless\n'), ((2112, 2128), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (2126, 2128), False, 'from unittest import mock, skipUnless\n'), ((2867, 2883), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (2881, 2883), False, 'from unittest import mock, skipUnless\n'), ((2908, 2967), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'return_value': "[['a', 'b,', 'c'], ['d', 'e']]"}), "(return_value=[['a', 'b,', 'c'], ['d', 'e']])\n", (2922, 2967), False, 'from unittest import mock, skipUnless\n'), ((2987, 3003), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3001, 3003), False, 'from unittest import mock, skipUnless\n'), ((3709, 3720), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (3718, 3720), False, 'from unittest import mock, skipUnless\n'), ((3793, 3804), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (3802, 3804), False, 'from unittest import mock, skipUnless\n'), ((4058, 4087), 'unittest.mock.Mock', 'mock.Mock', ([], {'return_value': '[row]'}), '(return_value=[row])\n', (4067, 4087), False, 'from unittest import mock, skipUnless\n'), ((4106, 4153), 'superset.db_engine_specs.presto.PrestoEngineSpec.get_columns', 'PrestoEngineSpec.get_columns', (['inspector', '""""""', '""""""'], {}), "(inspector, '', '')\n", (4134, 4153), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((7963, 7997), 'superset.db_engine_specs.presto.PrestoEngineSpec._get_fields', 'PrestoEngineSpec._get_fields', (['cols'], {}), '(cols)\n', (7991, 7997), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((9142, 9182), 'superset.db_engine_specs.presto.PrestoEngineSpec.expand_data', 'PrestoEngineSpec.expand_data', (['cols', 'data'], {}), '(cols, data)\n', (9170, 9182), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((10769, 10809), 'superset.db_engine_specs.presto.PrestoEngineSpec.expand_data', 'PrestoEngineSpec.expand_data', (['cols', 'data'], {}), '(cols, data)\n', (10797, 10809), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((12786, 12826), 'superset.db_engine_specs.presto.PrestoEngineSpec.expand_data', 'PrestoEngineSpec.expand_data', (['cols', 'data'], {}), '(cols, data)\n', (12814, 12826), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((14942, 14982), 'superset.db_engine_specs.presto.PrestoEngineSpec.expand_data', 'PrestoEngineSpec.expand_data', (['cols', 'data'], {}), '(cols, data)\n', (14970, 14982), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((17759, 17770), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (17768, 17770), False, 'from unittest import mock, skipUnless\n'), ((17796, 17854), 'unittest.mock.Mock', 'mock.Mock', ([], {'return_value': "[{'column_names': ['ds', 'hour']}]"}), "(return_value=[{'column_names': ['ds', 'hour']}])\n", (17805, 17854), False, 'from unittest import mock, skipUnless\n'), ((17878, 17904), 'unittest.mock.Mock', 'mock.Mock', ([], {'return_value': '{}'}), '(return_value={})\n', (17887, 17904), False, 'from unittest import mock, skipUnless\n'), ((17918, 17965), 'pandas.DataFrame', 'pd.DataFrame', (["{'ds': ['01-01-19'], 'hour': [1]}"], {}), "({'ds': ['01-01-19'], 'hour': [1]})\n", (17930, 17965), True, 'import pandas as pd\n'), ((17986, 18012), 'unittest.mock.Mock', 'mock.Mock', ([], {'return_value': 'df'}), '(return_value=df)\n', (17995, 18012), False, 'from unittest import mock, skipUnless\n'), ((18056, 18084), 'unittest.mock.Mock', 'mock.Mock', ([], {'return_value': 'None'}), '(return_value=None)\n', (18065, 18084), False, 'from unittest import mock, skipUnless\n'), ((18102, 18172), 'superset.db_engine_specs.presto.PrestoEngineSpec.extra_table_metadata', 'PrestoEngineSpec.extra_table_metadata', (['db', '"""test_table"""', '"""test_schema"""'], {}), "(db, 'test_table', 'test_schema')\n", (18139, 18172), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((18325, 18336), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (18334, 18336), False, 'from unittest import mock, skipUnless\n'), ((18362, 18420), 'unittest.mock.Mock', 'mock.Mock', ([], {'return_value': "[{'column_names': ['ds', 'hour']}]"}), "(return_value=[{'column_names': ['ds', 'hour']}])\n", (18371, 18420), False, 'from unittest import mock, skipUnless\n'), ((18444, 18470), 'unittest.mock.Mock', 'mock.Mock', ([], {'return_value': '{}'}), '(return_value={})\n', (18453, 18470), False, 'from unittest import mock, skipUnless\n'), ((18484, 18531), 'pandas.DataFrame', 'pd.DataFrame', (["{'ds': ['01-01-19'], 'hour': [1]}"], {}), "({'ds': ['01-01-19'], 'hour': [1]})\n", (18496, 18531), True, 'import pandas as pd\n'), ((18552, 18578), 'unittest.mock.Mock', 'mock.Mock', ([], {'return_value': 'df'}), '(return_value=df)\n', (18561, 18578), False, 'from unittest import mock, skipUnless\n'), ((21518, 21565), 'superset.db_engine_specs.presto.PrestoEngineSpec.query_cost_formatter', 'PrestoEngineSpec.query_cost_formatter', (['raw_cost'], {}), '(raw_cost)\n', (21555, 21565), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((22694, 22734), 'superset.db_engine_specs.presto.PrestoEngineSpec.expand_data', 'PrestoEngineSpec.expand_data', (['cols', 'data'], {}), '(cols, data)\n', (22722, 22734), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((24074, 24122), 'superset.db_engine_specs.presto.PrestoEngineSpec.get_column_spec', 'PrestoEngineSpec.get_column_spec', (['"""varchar(255)"""'], {}), "('varchar(255)')\n", (24106, 24122), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((24336, 24379), 'superset.db_engine_specs.presto.PrestoEngineSpec.get_column_spec', 'PrestoEngineSpec.get_column_spec', (['"""varchar"""'], {}), "('varchar')\n", (24368, 24379), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((24593, 24637), 'superset.db_engine_specs.presto.PrestoEngineSpec.get_column_spec', 'PrestoEngineSpec.get_column_spec', (['"""char(10)"""'], {}), "('char(10)')\n", (24625, 24637), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((24847, 24887), 'superset.db_engine_specs.presto.PrestoEngineSpec.get_column_spec', 'PrestoEngineSpec.get_column_spec', (['"""char"""'], {}), "('char')\n", (24879, 24887), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((25099, 25142), 'superset.db_engine_specs.presto.PrestoEngineSpec.get_column_spec', 'PrestoEngineSpec.get_column_spec', (['"""integer"""'], {}), "('integer')\n", (25131, 25142), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((25306, 25346), 'superset.db_engine_specs.presto.PrestoEngineSpec.get_column_spec', 'PrestoEngineSpec.get_column_spec', (['"""time"""'], {}), "('time')\n", (25338, 25346), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((25585, 25630), 'superset.db_engine_specs.presto.PrestoEngineSpec.get_column_spec', 'PrestoEngineSpec.get_column_spec', (['"""timestamp"""'], {}), "('timestamp')\n", (25617, 25630), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((25872, 25915), 'superset.db_engine_specs.presto.PrestoEngineSpec.get_sqla_column_type', 'PrestoEngineSpec.get_sqla_column_type', (['None'], {}), '(None)\n', (25909, 25915), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((28353, 28391), 'superset.db_engine_specs.presto.PrestoEngineSpec._get_full_name', 'PrestoEngineSpec._get_full_name', (['names'], {}), '(names)\n', (28384, 28391), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((28642, 28680), 'superset.db_engine_specs.presto.PrestoEngineSpec._get_full_name', 'PrestoEngineSpec._get_full_name', (['names'], {}), '(names)\n', (28673, 28680), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((28810, 28859), 'superset.db_engine_specs.presto.PrestoEngineSpec._split_data_type', 'PrestoEngineSpec._split_data_type', (['data_type', '""" """'], {}), "(data_type, ' ')\n", (28843, 28859), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((28960, 29009), 'superset.db_engine_specs.presto.PrestoEngineSpec._split_data_type', 'PrestoEngineSpec._split_data_type', (['data_type', '""","""'], {}), "(data_type, ',')\n", (28993, 29009), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((29113, 29162), 'superset.db_engine_specs.presto.PrestoEngineSpec._split_data_type', 'PrestoEngineSpec._split_data_type', (['data_type', '""","""'], {}), "(data_type, ',')\n", (29146, 29162), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((29266, 29282), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (29280, 29282), False, 'from unittest import mock, skipUnless\n'), ((29421, 29460), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'return_value': "['a', 'b']"}), "(return_value=['a', 'b'])\n", (29435, 29460), False, 'from unittest import mock, skipUnless\n'), ((29558, 29617), 'superset.db_engine_specs.presto.PrestoEngineSpec._show_columns', 'PrestoEngineSpec._show_columns', (['inspector', 'table_name', 'None'], {}), '(inspector, table_name, None)\n', (29588, 29617), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((29802, 29818), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (29816, 29818), False, 'from unittest import mock, skipUnless\n'), ((29957, 29996), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'return_value': "['a', 'b']"}), "(return_value=['a', 'b'])\n", (29971, 29996), False, 'from unittest import mock, skipUnless\n'), ((30120, 30181), 'superset.db_engine_specs.presto.PrestoEngineSpec._show_columns', 'PrestoEngineSpec._show_columns', (['inspector', 'table_name', 'schema'], {}), '(inspector, table_name, schema)\n', (30150, 30181), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((30976, 30987), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (30985, 30987), False, 'from unittest import mock, skipUnless\n'), ((31039, 31050), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (31048, 31050), False, 'from unittest import mock, skipUnless\n'), ((31146, 31215), 'superset.db_engine_specs.presto.PrestoEngineSpec.select_star', 'PrestoEngineSpec.select_star', (['database', 'table_name', 'engine'], {'cols': 'cols'}), '(database, table_name, engine, cols=cols)\n', (31174, 31215), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((31679, 31690), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (31688, 31690), False, 'from unittest import mock, skipUnless\n'), ((31742, 31753), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (31751, 31753), False, 'from unittest import mock, skipUnless\n'), ((31992, 32081), 'superset.db_engine_specs.presto.PrestoEngineSpec.select_star', 'PrestoEngineSpec.select_star', (['database', 'table_name', 'engine'], {'show_cols': '(True)', 'cols': 'cols'}), '(database, table_name, engine, show_cols=True,\n cols=cols)\n', (32020, 32081), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((32451, 32467), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (32465, 32467), False, 'from unittest import mock, skipUnless\n'), ((32602, 32687), 'superset.db_engine_specs.presto.PrestoEngineSpec.estimate_statement_cost', 'PrestoEngineSpec.estimate_statement_cost', (['"""SELECT * FROM brth_names"""', 'mock_cursor'], {}), "('SELECT * FROM brth_names',\n mock_cursor)\n", (32642, 32687), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((32827, 32843), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (32841, 32843), False, 'from unittest import mock, skipUnless\n'), ((33121, 33223), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (["{'table_schema': ['schema1', 'schema2'], 'table_name': ['name1', 'name2']}"], {}), "({'table_schema': ['schema1', 'schema2'],\n 'table_name': ['name1', 'name2']})\n", (33143, 33223), True, 'import pandas as pd\n'), ((33261, 33277), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (33275, 33277), False, 'from unittest import mock, skipUnless\n'), ((33337, 33397), 'superset.db_engine_specs.presto.PrestoEngineSpec.get_all_datasource_names', 'PrestoEngineSpec.get_all_datasource_names', (['database', '"""table"""'], {}), "(database, 'table')\n", (33378, 33397), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((33659, 33675), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (33673, 33675), False, 'from unittest import mock, skipUnless\n'), ((33700, 33759), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'return_value': "[['a', 'b,', 'c'], ['d', 'e']]"}), "(return_value=[['a', 'b,', 'c'], ['d', 'e']])\n", (33714, 33759), False, 'from unittest import mock, skipUnless\n'), ((33779, 33795), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (33793, 33795), False, 'from unittest import mock, skipUnless\n'), ((34291, 34361), 'superset.db_engine_specs.presto.PrestoEngineSpec.get_create_view', 'PrestoEngineSpec.get_create_view', (['database'], {'schema': 'schema', 'table': 'table'}), '(database, schema=schema, table=table)\n', (34323, 34361), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((34603, 34619), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (34617, 34619), False, 'from unittest import mock, skipUnless\n'), ((35121, 35137), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (35135, 35137), False, 'from unittest import mock, skipUnless\n'), ((35346, 35416), 'superset.db_engine_specs.presto.PrestoEngineSpec.get_create_view', 'PrestoEngineSpec.get_create_view', (['database'], {'schema': 'schema', 'table': 'table'}), '(database, schema=schema, table=table)\n', (35378, 35416), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((35519, 35562), 'collections.namedtuple', 'namedtuple', (['"""DatabaseError"""', "['error_dict']"], {}), "('DatabaseError', ['error_dict'])\n", (35529, 35562), False, 'from collections import namedtuple\n'), ((35580, 35667), 'pyhive.exc.DatabaseError', 'DatabaseError', (["{'errorName': 'name', 'errorLocation': 'location', 'message': 'msg'}"], {}), "({'errorName': 'name', 'errorLocation': 'location', 'message':\n 'msg'})\n", (35593, 35667), False, 'from pyhive.exc import DatabaseError\n'), ((35767, 35817), 'superset.db_engine_specs.presto.PrestoEngineSpec._extract_error_message', 'PrestoEngineSpec._extract_error_message', (['exception'], {}), '(exception)\n', (35806, 35817), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((35984, 36025), 'pyhive.exc.DatabaseError', 'DatabaseError', (["{'message': 'Err message'}"], {}), "({'message': 'Err message'})\n", (35997, 36025), False, 'from pyhive.exc import DatabaseError\n'), ((36043, 36093), 'superset.db_engine_specs.presto.PrestoEngineSpec._extract_error_message', 'PrestoEngineSpec._extract_error_message', (['exception'], {}), '(exception)\n', (36082, 36093), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((36256, 36306), 'superset.db_engine_specs.presto.PrestoEngineSpec._extract_error_message', 'PrestoEngineSpec._extract_error_message', (['exception'], {}), '(exception)\n', (36295, 36306), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((1523, 1562), 'superset.db_engine_specs.presto.PrestoEngineSpec.get_datatype', 'PrestoEngineSpec.get_datatype', (['"""string"""'], {}), "('string')\n", (1552, 1562), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((1714, 1775), 'superset.db_engine_specs.presto.PrestoEngineSpec.get_view_names', 'PrestoEngineSpec.get_view_names', (['mock.ANY', 'mock.ANY', 'mock.ANY'], {}), '(mock.ANY, mock.ANY, mock.ANY)\n', (1745, 1775), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((2472, 2483), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (2481, 2483), False, 'from unittest import mock, skipUnless\n'), ((3373, 3384), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (3382, 3384), False, 'from unittest import mock, skipUnless\n'), ((3970, 3981), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (3979, 3981), False, 'from unittest import mock, skipUnless\n'), ((18735, 18743), 'sqlalchemy.sql.select', 'select', ([], {}), '()\n', (18741, 18743), False, 'from sqlalchemy.sql import select\n'), ((19037, 19080), 'superset.db_engine_specs.presto.PrestoEngineSpec.convert_dttm', 'PrestoEngineSpec.convert_dttm', (['"""DATE"""', 'dttm'], {}), "('DATE', dttm)\n", (19066, 19080), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((19178, 19226), 'superset.db_engine_specs.presto.PrestoEngineSpec.convert_dttm', 'PrestoEngineSpec.convert_dttm', (['"""TIMESTAMP"""', 'dttm'], {}), "('TIMESTAMP', dttm)\n", (19207, 19226), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((26647, 26658), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (26656, 26658), False, 'from unittest import mock, skipUnless\n'), ((26660, 26671), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (26669, 26671), False, 'from unittest import mock, skipUnless\n'), ((27410, 27421), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (27419, 27421), False, 'from unittest import mock, skipUnless\n'), ((27423, 27434), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (27432, 27434), False, 'from unittest import mock, skipUnless\n'), ((28147, 28158), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (28156, 28158), False, 'from unittest import mock, skipUnless\n'), ((28160, 28171), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (28169, 28171), False, 'from unittest import mock, skipUnless\n'), ((30420, 30472), 'superset.db_engine_specs.presto.PrestoEngineSpec._is_column_name_quoted', 'PrestoEngineSpec._is_column_name_quoted', (['column_name'], {}), '(column_name)\n', (30459, 30472), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((30528, 30580), 'superset.db_engine_specs.presto.PrestoEngineSpec._is_column_name_quoted', 'PrestoEngineSpec._is_column_name_quoted', (['column_name'], {}), '(column_name)\n', (30567, 30580), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((30637, 30689), 'superset.db_engine_specs.presto.PrestoEngineSpec._is_column_name_quoted', 'PrestoEngineSpec._is_column_name_quoted', (['column_name'], {}), '(column_name)\n', (30676, 30689), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((30747, 30799), 'superset.db_engine_specs.presto.PrestoEngineSpec._is_column_name_quoted', 'PrestoEngineSpec._is_column_name_quoted', (['column_name'], {}), '(column_name)\n', (30786, 30799), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((32953, 33031), 'superset.db_engine_specs.presto.PrestoEngineSpec.estimate_statement_cost', 'PrestoEngineSpec.estimate_statement_cost', (['"""DROP TABLE brth_names"""', 'mock_cursor'], {}), "('DROP TABLE brth_names', mock_cursor)\n", (32993, 33031), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((33438, 33485), 'superset.utils.core.DatasourceName', 'DatasourceName', ([], {'schema': '"""schema1"""', 'table': '"""name1"""'}), "(schema='schema1', table='name1')\n", (33452, 33485), False, 'from superset.utils.core import DatasourceName, GenericDataType\n'), ((33499, 33546), 'superset.utils.core.DatasourceName', 'DatasourceName', ([], {'schema': '"""schema2"""', 'table': '"""name2"""'}), "(schema='schema2', table='name2')\n", (33513, 33546), False, 'from superset.utils.core import DatasourceName, GenericDataType\n'), ((34866, 34936), 'superset.db_engine_specs.presto.PrestoEngineSpec.get_create_view', 'PrestoEngineSpec.get_create_view', (['database'], {'schema': 'schema', 'table': 'table'}), '(database, schema=schema, table=table)\n', (34898, 34936), False, 'from superset.db_engine_specs.presto import PrestoEngineSpec\n'), ((44116, 44132), 'superset.sql_parse.ParsedQuery', 'ParsedQuery', (['sql'], {}), '(sql)\n', (44127, 44132), False, 'from superset.sql_parse import ParsedQuery\n'), ((35085, 35100), 'pyhive.exc.DatabaseError', 'DatabaseError', ([], {}), '()\n', (35098, 35100), False, 'from pyhive.exc import DatabaseError\n'), ((36516, 36781), 'superset.errors.SupersetError', 'SupersetError', ([], {'message': '"""Generic Error"""', 'error_type': 'SupersetErrorType.GENERIC_DB_ENGINE_ERROR', 'level': 'ErrorLevel.ERROR', 'extra': "{'engine_name': 'Presto', 'issue_codes': [{'code': 1002, 'message':\n 'Issue 1002 - The database returned an unexpected error.'}]}"}), "(message='Generic Error', error_type=SupersetErrorType.\n GENERIC_DB_ENGINE_ERROR, level=ErrorLevel.ERROR, extra={'engine_name':\n 'Presto', 'issue_codes': [{'code': 1002, 'message':\n 'Issue 1002 - The database returned an unexpected error.'}]})\n", (36529, 36781), False, 'from superset.errors import ErrorLevel, SupersetError, SupersetErrorType\n'), ((37211, 37678), 'superset.errors.SupersetError', 'SupersetError', ([], {'message': '"""We can\'t seem to resolve the column "bogus" at line 1:8."""', 'error_type': 'SupersetErrorType.COLUMN_DOES_NOT_EXIST_ERROR', 'level': 'ErrorLevel.ERROR', 'extra': "{'engine_name': 'Presto', 'issue_codes': [{'code': 1003, 'message':\n 'Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.'\n }, {'code': 1004, 'message':\n 'Issue 1004 - The column was deleted or renamed in the database.'}]}"}), '(message=\n \'We can\\\'t seem to resolve the column "bogus" at line 1:8.\', error_type\n =SupersetErrorType.COLUMN_DOES_NOT_EXIST_ERROR, level=ErrorLevel.ERROR,\n extra={\'engine_name\': \'Presto\', \'issue_codes\': [{\'code\': 1003,\n \'message\':\n \'Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.\'\n }, {\'code\': 1004, \'message\':\n \'Issue 1004 - The column was deleted or renamed in the database.\'}]})\n', (37224, 37678), False, 'from superset.errors import ErrorLevel, SupersetError, SupersetErrorType\n'), ((38206, 38711), 'superset.errors.SupersetError', 'SupersetError', ([], {'message': '"""The table "\'tpch.tiny.region2\'" does not exist. A valid table must be used to run this query."""', 'error_type': 'SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR', 'level': 'ErrorLevel.ERROR', 'extra': "{'engine_name': 'Presto', 'issue_codes': [{'code': 1003, 'message':\n 'Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.'\n }, {'code': 1005, 'message':\n 'Issue 1005 - The table was deleted or renamed in the database.'}]}"}), '(message=\n \'The table "\\\'tpch.tiny.region2\\\'" does not exist. A valid table must be used to run this query.\'\n , error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, level=\n ErrorLevel.ERROR, extra={\'engine_name\': \'Presto\', \'issue_codes\': [{\n \'code\': 1003, \'message\':\n \'Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.\'\n }, {\'code\': 1005, \'message\':\n \'Issue 1005 - The table was deleted or renamed in the database.\'}]})\n', (38219, 38711), False, 'from superset.errors import ErrorLevel, SupersetError, SupersetErrorType\n'), ((39224, 39715), 'superset.errors.SupersetError', 'SupersetError', ([], {'message': '"""The schema "tin" does not exist. A valid schema must be used to run this query."""', 'error_type': 'SupersetErrorType.SCHEMA_DOES_NOT_EXIST_ERROR', 'level': 'ErrorLevel.ERROR', 'extra': "{'engine_name': 'Presto', 'issue_codes': [{'code': 1003, 'message':\n 'Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.'\n }, {'code': 1016, 'message':\n 'Issue 1005 - The schema was deleted or renamed in the database.'}]}"}), '(message=\n \'The schema "tin" does not exist. A valid schema must be used to run this query.\'\n , error_type=SupersetErrorType.SCHEMA_DOES_NOT_EXIST_ERROR, level=\n ErrorLevel.ERROR, extra={\'engine_name\': \'Presto\', \'issue_codes\': [{\n \'code\': 1003, \'message\':\n \'Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.\'\n }, {\'code\': 1016, \'message\':\n \'Issue 1005 - The schema was deleted or renamed in the database.\'}]})\n', (39237, 39715), False, 'from superset.errors import ErrorLevel, SupersetError, SupersetErrorType\n'), ((40248, 40573), 'superset.errors.SupersetError', 'SupersetError', ([], {'message': '"""Either the username "alice" or the password is incorrect."""', 'error_type': 'SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR', 'level': 'ErrorLevel.ERROR', 'extra': "{'engine_name': 'Presto', 'issue_codes': [{'code': 1014, 'message':\n 'Issue 1014 - Either the username or the password is wrong.'}]}"}), '(message=\n \'Either the username "alice" or the password is incorrect.\', error_type\n =SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR, level=ErrorLevel.\n ERROR, extra={\'engine_name\': \'Presto\', \'issue_codes\': [{\'code\': 1014,\n \'message\': \'Issue 1014 - Either the username or the password is wrong.\'}]})\n', (40261, 40573), False, 'from superset.errors import ErrorLevel, SupersetError, SupersetErrorType\n'), ((41093, 41400), 'superset.errors.SupersetError', 'SupersetError', ([], {'message': '"""The hostname "badhost" cannot be resolved."""', 'error_type': 'SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR', 'level': 'ErrorLevel.ERROR', 'extra': '{\'engine_name\': \'Presto\', \'issue_codes\': [{\'code\': 1007, \'message\':\n "Issue 1007 - The hostname provided can\'t be resolved."}]}'}), '(message=\'The hostname "badhost" cannot be resolved.\',\n error_type=SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR, level=\n ErrorLevel.ERROR, extra={\'engine_name\': \'Presto\', \'issue_codes\': [{\n \'code\': 1007, \'message\':\n "Issue 1007 - The hostname provided can\'t be resolved."}]})\n', (41106, 41400), False, 'from superset.errors import ErrorLevel, SupersetError, SupersetErrorType\n'), ((41912, 42276), 'superset.errors.SupersetError', 'SupersetError', ([], {'message': '"""The host "badhost" might be down, and can\'t be reached on port 12345."""', 'error_type': 'SupersetErrorType.CONNECTION_HOST_DOWN_ERROR', 'level': 'ErrorLevel.ERROR', 'extra': '{\'engine_name\': \'Presto\', \'issue_codes\': [{\'code\': 1009, \'message\':\n "Issue 1009 - The host might be down, and can\'t be reached on the provided port."\n }]}'}), '(message=\n \'The host "badhost" might be down, and can\\\'t be reached on port 12345.\',\n error_type=SupersetErrorType.CONNECTION_HOST_DOWN_ERROR, level=\n ErrorLevel.ERROR, extra={\'engine_name\': \'Presto\', \'issue_codes\': [{\n \'code\': 1009, \'message\':\n "Issue 1009 - The host might be down, and can\'t be reached on the provided port."\n }]})\n', (41925, 42276), False, 'from superset.errors import ErrorLevel, SupersetError, SupersetErrorType\n'), ((42777, 43072), 'superset.errors.SupersetError', 'SupersetError', ([], {'message': '"""Port 12345 on hostname "badhost" refused the connection."""', 'error_type': 'SupersetErrorType.CONNECTION_PORT_CLOSED_ERROR', 'level': 'ErrorLevel.ERROR', 'extra': "{'engine_name': 'Presto', 'issue_codes': [{'code': 1008, 'message':\n 'Issue 1008 - The port is closed.'}]}"}), '(message=\n \'Port 12345 on hostname "badhost" refused the connection.\', error_type=\n SupersetErrorType.CONNECTION_PORT_CLOSED_ERROR, level=ErrorLevel.ERROR,\n extra={\'engine_name\': \'Presto\', \'issue_codes\': [{\'code\': 1008,\n \'message\': \'Issue 1008 - The port is closed.\'}]})\n', (42790, 43072), False, 'from superset.errors import ErrorLevel, SupersetError, SupersetErrorType\n'), ((43412, 43746), 'superset.errors.SupersetError', 'SupersetError', ([], {'message': '"""Unable to connect to catalog named "wrong"."""', 'error_type': 'SupersetErrorType.CONNECTION_UNKNOWN_DATABASE_ERROR', 'level': 'ErrorLevel.ERROR', 'extra': "{'engine_name': 'Presto', 'issue_codes': [{'code': 1015, 'message':\n 'Issue 1015 - Either the database is spelled incorrectly or does not exist.'\n }]}"}), '(message=\'Unable to connect to catalog named "wrong".\',\n error_type=SupersetErrorType.CONNECTION_UNKNOWN_DATABASE_ERROR, level=\n ErrorLevel.ERROR, extra={\'engine_name\': \'Presto\', \'issue_codes\': [{\n \'code\': 1015, \'message\':\n \'Issue 1015 - Either the database is spelled incorrectly or does not exist.\'\n }]})\n', (43425, 43746), False, 'from superset.errors import ErrorLevel, SupersetError, SupersetErrorType\n')]
|
from flask import Flask
from service import service
from service_label import service_label
from service_nodeselector import service_nodeselector
from service_schedulehint import service_schedulehint
from hpa import hpa
from vpa import vpa
from limitrange import limitrange
from serviceinfo import serviceinfo
from imageinfo import imageinfo
from node import node
from namespace import namespace
from utility import utility
app = Flask(__name__, static_url_path='/static')
app.register_blueprint(utility, url_prefix='/gse/utility')
app.register_blueprint(service, url_prefix='/gse/service')
app.register_blueprint(service_label, url_prefix='/gse/service/label')
app.register_blueprint(service_nodeselector, url_prefix='/gse/service/nodeselector')
app.register_blueprint(service_schedulehint, url_prefix='/gse/service/schedulehint')
app.register_blueprint(hpa, url_prefix='/gse/hpa')
app.register_blueprint(vpa, url_prefix='/gse/vpa')
app.register_blueprint(limitrange, url_prefix='/gse/limitrange')
app.register_blueprint(serviceinfo, url_prefix='/gse/serviceinfo')
app.register_blueprint(imageinfo, url_prefix='/gse/imageinfo')
app.register_blueprint(node, url_prefix='/gse/node')
app.register_blueprint(namespace, url_prefix='/gse/namespace')
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8888)
'''
실행방법
gunicorn app:app --bind=0.0.0.0:8888 --daemon --reload
--daemon: 데몬 프로세스로 실행
--reload: 소스 변경시 재구동
'''
|
[
"flask.Flask"
] |
[((431, 473), 'flask.Flask', 'Flask', (['__name__'], {'static_url_path': '"""/static"""'}), "(__name__, static_url_path='/static')\n", (436, 473), False, 'from flask import Flask\n')]
|
import itertools
from amara3.uxml.tree import treebuilder, element
from amara3.util import coroutine
doc = '<a>1<aa a="1">2<aaa>3</aaa>4<aab>5</aab>6</aa>7<ab a="2">8</ab></a>'
tb = treebuilder()
root = tb.parse(doc)
def descendants(elem):
for child in elem.xml_children:
yield child
if isinstance(child, element):
yield from descendants(child)
print('descendants')
for e in descendants(root):
print (e)
def select_elements(source):
if isinstance(source, element):
source = source.xml_children
return filter(lambda x: isinstance(x, element), source)
def select_name(source, name):
return filter(lambda x: x.xml_name == name, select_elements(source))
def select_name_pattern(source, pat):
return filter(lambda x: pat.match(x.xml_name) is not None, select_elements(source))
print('select_name')
for e in select_name(descendants(root), 'aaa'):
print (e)
print('select_name_pattern')
import re
HAS_B_PAT = re.compile('.*b.*')
for e in select_name_pattern(descendants(root), HAS_B_PAT):
print (e)
def select_value(source, val):
if isinstance(source, element):
source = source.xml_children
return filter(lambda x: x.xml_value == val, source)
print('select_value')
for e in select_value(descendants(root), '3'):
print (e)
def select_attribute(source, name, val=None):
def check(x):
if val is None:
return name in x.xml_attributes
else:
return name in x.xml_attributes and x.xml_attributes[name] == val
return filter(check, select_elements(source))
print('select_attribute 1')
for e in select_attribute(descendants(root), 'a'): print (e)
print('select_attribute 2')
for e in select_attribute(descendants(root), 'a', '1'): print (e)
def following_siblings(e):
it = itertools.dropwhile(lambda x: x != e, e.xml_parent.xml_children)
next(it) #Skip the element itself
return it
print('following_siblings')
for e in following_siblings(next(select_name(descendants(root), 'aa'))): print (e)
print('etc.')
print (root.xml_value)
for e in itertools.takewhile(
lambda x: x.xml_name != 'aaa',
filter(
lambda x: isinstance(x, element),
descendants(root)
)
):
print (e)
|
[
"amara3.uxml.tree.treebuilder",
"itertools.dropwhile",
"re.compile"
] |
[((183, 196), 'amara3.uxml.tree.treebuilder', 'treebuilder', ([], {}), '()\n', (194, 196), False, 'from amara3.uxml.tree import treebuilder, element\n'), ((980, 999), 're.compile', 're.compile', (['""".*b.*"""'], {}), "('.*b.*')\n", (990, 999), False, 'import re\n'), ((1827, 1891), 'itertools.dropwhile', 'itertools.dropwhile', (['(lambda x: x != e)', 'e.xml_parent.xml_children'], {}), '(lambda x: x != e, e.xml_parent.xml_children)\n', (1846, 1891), False, 'import itertools\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-02-28 20:19
from __future__ import unicode_literals
import contentcuration.models
from django.conf import settings
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import uuid
class Migration(migrations.Migration):
dependencies = [
('contentcuration', '0096_merge_20181222_0008'),
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('task_type', models.CharField(max_length=50)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('status', models.CharField(max_length=10)),
('is_progress_tracking', models.BooleanField(default=False)),
('metadata', django.contrib.postgres.fields.jsonb.JSONField()),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task', to=settings.AUTH_USER_MODEL)),
('task_id', contentcuration.models.UUIDField(db_index=True, default=uuid.uuid4, max_length=32)),
],
),
migrations.AlterField(
model_name='formatpreset',
name='id',
field=models.CharField(choices=[(b'high_res_video', b'High Resolution'), (b'low_res_video', b'Low Resolution'), (b'video_thumbnail', b'Thumbnail'), (b'video_subtitle', b'Subtitle'), (b'video_dependency', b'Video (dependency)'), (b'audio', b'Audio'), (b'audio_thumbnail', b'Thumbnail'), (b'document', b'Document'), (b'epub', b'ePub Document'), (b'document_thumbnail', b'Thumbnail'), (b'exercise', b'Exercise'), (b'exercise_thumbnail', b'Thumbnail'), (b'exercise_image', b'Exercise Image'), (b'exercise_graphie', b'Exercise Graphie'), (b'channel_thumbnail', b'Channel Thumbnail'), (b'topic_thumbnail', b'Thumbnail'), (b'html5_zip', b'HTML5 Zip'), (b'html5_dependency', b'HTML5 Dependency (Zip format)'), (b'html5_thumbnail', b'HTML5 Thumbnail')], max_length=150, primary_key=True, serialize=False),
),
]
|
[
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.DateTimeField"
] |
[((1434, 2271), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[(b'high_res_video', b'High Resolution'), (b'low_res_video',\n b'Low Resolution'), (b'video_thumbnail', b'Thumbnail'), (\n b'video_subtitle', b'Subtitle'), (b'video_dependency',\n b'Video (dependency)'), (b'audio', b'Audio'), (b'audio_thumbnail',\n b'Thumbnail'), (b'document', b'Document'), (b'epub', b'ePub Document'),\n (b'document_thumbnail', b'Thumbnail'), (b'exercise', b'Exercise'), (\n b'exercise_thumbnail', b'Thumbnail'), (b'exercise_image',\n b'Exercise Image'), (b'exercise_graphie', b'Exercise Graphie'), (\n b'channel_thumbnail', b'Channel Thumbnail'), (b'topic_thumbnail',\n b'Thumbnail'), (b'html5_zip', b'HTML5 Zip'), (b'html5_dependency',\n b'HTML5 Dependency (Zip format)'), (b'html5_thumbnail', b'HTML5 Thumbnail')\n ]", 'max_length': '(150)', 'primary_key': '(True)', 'serialize': '(False)'}), "(choices=[(b'high_res_video', b'High Resolution'), (\n b'low_res_video', b'Low Resolution'), (b'video_thumbnail', b'Thumbnail'\n ), (b'video_subtitle', b'Subtitle'), (b'video_dependency',\n b'Video (dependency)'), (b'audio', b'Audio'), (b'audio_thumbnail',\n b'Thumbnail'), (b'document', b'Document'), (b'epub', b'ePub Document'),\n (b'document_thumbnail', b'Thumbnail'), (b'exercise', b'Exercise'), (\n b'exercise_thumbnail', b'Thumbnail'), (b'exercise_image',\n b'Exercise Image'), (b'exercise_graphie', b'Exercise Graphie'), (\n b'channel_thumbnail', b'Channel Thumbnail'), (b'topic_thumbnail',\n b'Thumbnail'), (b'html5_zip', b'HTML5 Zip'), (b'html5_dependency',\n b'HTML5 Dependency (Zip format)'), (b'html5_thumbnail',\n b'HTML5 Thumbnail')], max_length=150, primary_key=True, serialize=False)\n", (1450, 2271), False, 'from django.db import migrations, models\n'), ((583, 676), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (599, 676), False, 'from django.db import migrations, models\n'), ((705, 736), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (721, 736), False, 'from django.db import migrations, models\n'), ((767, 822), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now'}), '(default=django.utils.timezone.now)\n', (787, 822), False, 'from django.db import migrations, models\n'), ((852, 883), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)'}), '(max_length=10)\n', (868, 883), False, 'from django.db import migrations, models\n'), ((927, 961), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (946, 961), False, 'from django.db import migrations, models\n'), ((1069, 1186), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""task"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='task', to=settings.AUTH_USER_MODEL)\n", (1086, 1186), False, 'from django.db import migrations, models\n')]
|
# Copyright 2015 <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Parse async client.
"""
import asyncio
import json
from .. import base
class ParsePushService(base.BasePushService):
"""Push notifications sender, which uses `Parse`_ service.
Args:
loop: asyncio event loop or Tornado IOLoop
app_id: Parse application ID
api_key: Parse Rest API key for application
.. _Parse: https://parse.com
"""
base_url = 'https://api.parse.com/1/'
def __init__(self, *, loop, app_id, api_key, gcm_sender_id):
super().__init__(loop=loop)
self._http = self.new_http_client()
self._headers = {
'X-Parse-Application-Id': app_id,
'X-Parse-REST-API-Key': api_key,
'Content-Type': 'application/json'
}
self._gcm_sender_id = gcm_sender_id and str(gcm_sender_id)
@asyncio.coroutine
def add_target(self, *, token, device_type, tags=None):
"""Register device token at `Parse`_ service.
See :meth:`.BasePushService.add_target` method docs for
parameters reference.
.. _Parse: https://parse.com
"""
url = ''.join((self.base_url, 'installations'))
data = {
'deviceType': device_type,
'deviceToken': token,
'channels': tags if tags else [],
}
if device_type == 'android':
if self._gcm_sender_id:
data.update({
'pushType': 'gcm',
'GCMSenderId': self._gcm_sender_id,
})
else:
raise ValueError("Provide `gcm_sender_id` to enable "
"Android devices support.")
return (yield from self._http.post(url, data=json.dumps(data),
headers=self._headers))
@asyncio.coroutine
def send_push(self, *, alert, device_type, token=None, tags=None,
badge=None, sound=None, title=None):
"""Send push notification via `Parse`_ service.
See :meth:`.BasePushService.send_push` method docs
for parameters reference.
.. _Parse: https://parse.com
"""
url = ''.join((self.base_url, 'push'))
data = {'data': {'alert': alert}}
# Message
msg = data['data']
if title:
msg['title'] = title
if sound:
msg['sound'] = sound
if badge:
msg['badge'] = badge
# Device query
if token:
data['where'] = {'deviceToken': token}
elif tags:
data['channels'] = tags
else:
data['where'] = {'deviceType': device_type}
# Request
return (yield from self._http.post(url, data=json.dumps(data),
headers=self._headers))
|
[
"json.dumps"
] |
[((2295, 2311), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (2305, 2311), False, 'import json\n'), ((3307, 3323), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (3317, 3323), False, 'import json\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 25 15:34:50 2018
@author: kncv078
"""
def do_maths(assay, fp, j, m):
print('CV Fold: {}'.format(j))
df = pd.read_csv(file_path.format(assay,assay,fp,j), sep='\t').sort_values('PredProb(A)', ascending=False).reset_index(drop=True)
fpr1, tpr1, thresholds1 = metrics.roc_curve(df['label'], df['PredProb(A)'], pos_label='A')
roc_auc1 = metrics.auc(fpr1, tpr1)
roc_auc1 = round(roc_auc1,3)
prf1 = metrics.precision_recall_fscore_support(df['label'], df['pred'])
CM1 = metrics.confusion_matrix(df['label'], df['pred'])
kappa1 = metrics.cohen_kappa_score(df['label'], df['pred'])
MCC = metrics.matthews_corrcoef(df['label'], df['pred'])
A_count1 = prf1[3][0]
N_count1 = prf1[3][1]
P1 = prf1[0][0]
R1 = prf1[1][0]
F11 = prf1[2][0]
tp1 = CM1[0][0]
fn1 = CM1[0][1]
fp1 = CM1[1][0]
tn1 = CM1[1][1]
measures = [fp, assay, j, roc_auc1, MCC, kappa1, P1, R1, F11, A_count1, N_count1, tp1, fn1, fp1, tn1]
with open(outdir, 'a') as outf:
outf.write('{}'.format(m))
for val in measures:
outf.write('\t{}'.format(val))
outf.write('\n')
#import os
import time
import pandas as pd
from sklearn import metrics
start = time.time()
#input data dir
file_path = 'C:/CESFP_project/CrossValidation/Assay_{}/{}_{}_cvfold{}_predictions.txt'
sel_assays = ['522', '527', '555', '560', '746', '798', '1006', '1273', '1515', '2129', '2280', '2540', '2544', '2553',
'2606', '463104', '504406', '504454', '504812', '588497', '602363', '623901', '624414', '686964', '720700']
#sel_assays = ['527']
outdir = 'CrossValidation/All_metrics.csv'
cvflds = 6
##load list of assays
#assay_list=[]
#with open(sel8_IDs, 'r') as f:
# for line in f:
# assay_list.append(line.strip())
#print(assay_list)
fp_types = ['htsfp', 'ecfp', 'cesfp']
#colors_list = ['orange','royalblue','forestgreen']
m = 0
#Metric_dict = {}
headers = ['idx','fptype', 'assay', 'CV run','roc_auc', 'MCC', 'kappa', 'Precision', 'Recall', 'F1', 'A_count', 'N_count', 'tp', 'fn', 'fp', 'tn']
with open(outdir, 'w') as outf:
outf.write('{}\n'.format('\t'.join(headers)))
for fp in fp_types:
print('Doing FP: {}'.format(fp))
for assay in sel_assays:
print('Assay: {}'.format(assay))
for j in range(1,cvflds+1):
do_maths(assay, fp, j, m)
# Metric_dict[m] = [fp, assay, j, roc_auc1, MCC, kappa1, P1, R1, F11, A_count1, N_count1, tp1, fn1, fp1, tn1]
m += 1
#print('saving dataframes containing all scores')
#df = pd.DataFrame.from_dict(Metric_dict, orient='index')
#df.columns = ['fptype', 'assay', 'CV run','roc_auc', 'MCC', 'kappa', 'Precision', 'Recall', 'F1', 'A_count', 'N_count', 'tp', 'fn', 'fp', 'tn']
##dfav = df.mean()
#df.to_csv(outdir+'All_metrics.csv', sep='\t')
#dfav.to_csv(outdir+'{}_{}_metrics_average.csv'.format(fp,assay,j), sep='\t')
|
[
"sklearn.metrics.roc_curve",
"time.time",
"sklearn.metrics.auc",
"sklearn.metrics.cohen_kappa_score",
"sklearn.metrics.matthews_corrcoef",
"sklearn.metrics.confusion_matrix",
"sklearn.metrics.precision_recall_fscore_support"
] |
[((1307, 1318), 'time.time', 'time.time', ([], {}), '()\n', (1316, 1318), False, 'import time\n'), ((333, 397), 'sklearn.metrics.roc_curve', 'metrics.roc_curve', (["df['label']", "df['PredProb(A)']"], {'pos_label': '"""A"""'}), "(df['label'], df['PredProb(A)'], pos_label='A')\n", (350, 397), False, 'from sklearn import metrics\n'), ((414, 437), 'sklearn.metrics.auc', 'metrics.auc', (['fpr1', 'tpr1'], {}), '(fpr1, tpr1)\n', (425, 437), False, 'from sklearn import metrics\n'), ((484, 548), 'sklearn.metrics.precision_recall_fscore_support', 'metrics.precision_recall_fscore_support', (["df['label']", "df['pred']"], {}), "(df['label'], df['pred'])\n", (523, 548), False, 'from sklearn import metrics\n'), ((560, 609), 'sklearn.metrics.confusion_matrix', 'metrics.confusion_matrix', (["df['label']", "df['pred']"], {}), "(df['label'], df['pred'])\n", (584, 609), False, 'from sklearn import metrics\n'), ((624, 674), 'sklearn.metrics.cohen_kappa_score', 'metrics.cohen_kappa_score', (["df['label']", "df['pred']"], {}), "(df['label'], df['pred'])\n", (649, 674), False, 'from sklearn import metrics\n'), ((686, 736), 'sklearn.metrics.matthews_corrcoef', 'metrics.matthews_corrcoef', (["df['label']", "df['pred']"], {}), "(df['label'], df['pred'])\n", (711, 736), False, 'from sklearn import metrics\n')]
|
from django.contrib import admin
from .email import EmailAdmin
from .instrument import InstrumentAdmin
from .request import RequestAdmin
from .slot import SlotAdmin
from .user import CustomUserAdmin, FacultyAdmin, StudentAdmin
from .announcement import AnnouncementAdmin
from ..models import (Announcement, CustomUser, EmailModel, Faculty,
Instrument, LabAssistant, Request, Slot, Student)
from ..models.instrument.requests import *
admin.site.register(Student, StudentAdmin)
admin.site.register(Faculty, FacultyAdmin)
admin.site.register(EmailModel, EmailAdmin)
admin.site.register(LabAssistant, CustomUserAdmin)
admin.site.register(Instrument, InstrumentAdmin)
admin.site.register(Request, RequestAdmin)
admin.site.register(Slot, SlotAdmin)
admin.site.register(CustomUser, CustomUserAdmin)
admin.site.register(UserDetail)
admin.site.register(FTIR)
admin.site.register(FESEM)
admin.site.register(LCMS)
admin.site.register(TCSPC)
admin.site.register(Rheometer)
admin.site.register(AAS)
admin.site.register(TGA)
admin.site.register(BET)
admin.site.register(CDSpectrophotometer)
admin.site.register(LSCM)
admin.site.register(DSC)
admin.site.register(GC)
admin.site.register(EDXRF)
admin.site.register(HPLC)
admin.site.register(HPLC_FD)
admin.site.register(NMR)
admin.site.register(PXRD)
admin.site.register(SAXS_WAXS)
admin.site.register(SCXRD)
admin.site.register(XPS)
admin.site.register(UVSpectrophotometer)
admin.site.register(UTM)
admin.site.register(Announcement, AnnouncementAdmin)
|
[
"django.contrib.admin.site.register"
] |
[((457, 499), 'django.contrib.admin.site.register', 'admin.site.register', (['Student', 'StudentAdmin'], {}), '(Student, StudentAdmin)\n', (476, 499), False, 'from django.contrib import admin\n'), ((500, 542), 'django.contrib.admin.site.register', 'admin.site.register', (['Faculty', 'FacultyAdmin'], {}), '(Faculty, FacultyAdmin)\n', (519, 542), False, 'from django.contrib import admin\n'), ((543, 586), 'django.contrib.admin.site.register', 'admin.site.register', (['EmailModel', 'EmailAdmin'], {}), '(EmailModel, EmailAdmin)\n', (562, 586), False, 'from django.contrib import admin\n'), ((587, 637), 'django.contrib.admin.site.register', 'admin.site.register', (['LabAssistant', 'CustomUserAdmin'], {}), '(LabAssistant, CustomUserAdmin)\n', (606, 637), False, 'from django.contrib import admin\n'), ((638, 686), 'django.contrib.admin.site.register', 'admin.site.register', (['Instrument', 'InstrumentAdmin'], {}), '(Instrument, InstrumentAdmin)\n', (657, 686), False, 'from django.contrib import admin\n'), ((687, 729), 'django.contrib.admin.site.register', 'admin.site.register', (['Request', 'RequestAdmin'], {}), '(Request, RequestAdmin)\n', (706, 729), False, 'from django.contrib import admin\n'), ((730, 766), 'django.contrib.admin.site.register', 'admin.site.register', (['Slot', 'SlotAdmin'], {}), '(Slot, SlotAdmin)\n', (749, 766), False, 'from django.contrib import admin\n'), ((767, 815), 'django.contrib.admin.site.register', 'admin.site.register', (['CustomUser', 'CustomUserAdmin'], {}), '(CustomUser, CustomUserAdmin)\n', (786, 815), False, 'from django.contrib import admin\n'), ((816, 847), 'django.contrib.admin.site.register', 'admin.site.register', (['UserDetail'], {}), '(UserDetail)\n', (835, 847), False, 'from django.contrib import admin\n'), ((848, 873), 'django.contrib.admin.site.register', 'admin.site.register', (['FTIR'], {}), '(FTIR)\n', (867, 873), False, 'from django.contrib import admin\n'), ((874, 900), 'django.contrib.admin.site.register', 'admin.site.register', (['FESEM'], {}), '(FESEM)\n', (893, 900), False, 'from django.contrib import admin\n'), ((901, 926), 'django.contrib.admin.site.register', 'admin.site.register', (['LCMS'], {}), '(LCMS)\n', (920, 926), False, 'from django.contrib import admin\n'), ((927, 953), 'django.contrib.admin.site.register', 'admin.site.register', (['TCSPC'], {}), '(TCSPC)\n', (946, 953), False, 'from django.contrib import admin\n'), ((954, 984), 'django.contrib.admin.site.register', 'admin.site.register', (['Rheometer'], {}), '(Rheometer)\n', (973, 984), False, 'from django.contrib import admin\n'), ((985, 1009), 'django.contrib.admin.site.register', 'admin.site.register', (['AAS'], {}), '(AAS)\n', (1004, 1009), False, 'from django.contrib import admin\n'), ((1010, 1034), 'django.contrib.admin.site.register', 'admin.site.register', (['TGA'], {}), '(TGA)\n', (1029, 1034), False, 'from django.contrib import admin\n'), ((1035, 1059), 'django.contrib.admin.site.register', 'admin.site.register', (['BET'], {}), '(BET)\n', (1054, 1059), False, 'from django.contrib import admin\n'), ((1060, 1100), 'django.contrib.admin.site.register', 'admin.site.register', (['CDSpectrophotometer'], {}), '(CDSpectrophotometer)\n', (1079, 1100), False, 'from django.contrib import admin\n'), ((1101, 1126), 'django.contrib.admin.site.register', 'admin.site.register', (['LSCM'], {}), '(LSCM)\n', (1120, 1126), False, 'from django.contrib import admin\n'), ((1127, 1151), 'django.contrib.admin.site.register', 'admin.site.register', (['DSC'], {}), '(DSC)\n', (1146, 1151), False, 'from django.contrib import admin\n'), ((1152, 1175), 'django.contrib.admin.site.register', 'admin.site.register', (['GC'], {}), '(GC)\n', (1171, 1175), False, 'from django.contrib import admin\n'), ((1176, 1202), 'django.contrib.admin.site.register', 'admin.site.register', (['EDXRF'], {}), '(EDXRF)\n', (1195, 1202), False, 'from django.contrib import admin\n'), ((1203, 1228), 'django.contrib.admin.site.register', 'admin.site.register', (['HPLC'], {}), '(HPLC)\n', (1222, 1228), False, 'from django.contrib import admin\n'), ((1229, 1257), 'django.contrib.admin.site.register', 'admin.site.register', (['HPLC_FD'], {}), '(HPLC_FD)\n', (1248, 1257), False, 'from django.contrib import admin\n'), ((1258, 1282), 'django.contrib.admin.site.register', 'admin.site.register', (['NMR'], {}), '(NMR)\n', (1277, 1282), False, 'from django.contrib import admin\n'), ((1283, 1308), 'django.contrib.admin.site.register', 'admin.site.register', (['PXRD'], {}), '(PXRD)\n', (1302, 1308), False, 'from django.contrib import admin\n'), ((1309, 1339), 'django.contrib.admin.site.register', 'admin.site.register', (['SAXS_WAXS'], {}), '(SAXS_WAXS)\n', (1328, 1339), False, 'from django.contrib import admin\n'), ((1340, 1366), 'django.contrib.admin.site.register', 'admin.site.register', (['SCXRD'], {}), '(SCXRD)\n', (1359, 1366), False, 'from django.contrib import admin\n'), ((1367, 1391), 'django.contrib.admin.site.register', 'admin.site.register', (['XPS'], {}), '(XPS)\n', (1386, 1391), False, 'from django.contrib import admin\n'), ((1392, 1432), 'django.contrib.admin.site.register', 'admin.site.register', (['UVSpectrophotometer'], {}), '(UVSpectrophotometer)\n', (1411, 1432), False, 'from django.contrib import admin\n'), ((1433, 1457), 'django.contrib.admin.site.register', 'admin.site.register', (['UTM'], {}), '(UTM)\n', (1452, 1457), False, 'from django.contrib import admin\n'), ((1458, 1510), 'django.contrib.admin.site.register', 'admin.site.register', (['Announcement', 'AnnouncementAdmin'], {}), '(Announcement, AnnouncementAdmin)\n', (1477, 1510), False, 'from django.contrib import admin\n')]
|