text string | size int64 | token_count int64 |
|---|---|---|
"""
Utility Functions For Transactions
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
..contents:: Table of Contents
:backlinks: none
:local:
Introduction
------------
Transactions specific utility functions used in this application.
"""
from typing import Any, Dict, Tuple
from ethereum.base_types import Bytes0
from ethereum.frontier.eth_types import Transaction
from ethereum.frontier.utils.hexadecimal import hex_to_address
from ethereum.utils.hexadecimal import hex_to_bytes, hex_to_u256
def json_to_transactions(json_data: Dict[Any, Any]) -> Tuple[Transaction, ...]:
"""
Convert json data to tuple of transaction objects.
Parameters
----------
json_data :
The transactions data where the values are hexadecimals.
Returns
-------
transactions : `Tuple[Transaction, ...]`
The transaction objects obtained from the json data.
"""
transactions = []
for transaction in json_data["transactions"]:
tx = Transaction(
nonce=hex_to_u256(transaction["nonce"]),
gas_price=hex_to_u256(transaction["gasPrice"]),
gas=hex_to_u256(transaction["gas"]),
to=(
Bytes0(b"")
if transaction["to"] == ""
else hex_to_address(transaction["to"])
),
value=hex_to_u256(transaction["value"]),
data=hex_to_bytes(transaction["input"]),
v=hex_to_u256(transaction["v"]),
r=hex_to_u256(transaction["r"]),
s=hex_to_u256(transaction["s"]),
)
transactions.append(tx)
return tuple(transactions)
| 1,622 | 496 |
import os
import subprocess
import sys
folderSRT = 'C:\\Users\\repos\\7910_Subliminal\\SRT\\'
lenS1 = 22 # 1 - 22
lenS2 = 22 # 23 - 44
lenS3 = 25 # 45 - 69
lenS4 = 26 # 70 - 95
lenS5 = 26 # 96 - 121
lenS6 = 25 # 122 - 146
lenS7 = 22 # 147 - 168
lenS8 = 26 # 169 - 194
lenS9 = 27 # 195 - 221 *double episode*
lenS10= 25 # 222 - 247 * double episode* 25/26
lenS11= 25 # 248 - 275 *triple episode* 26/27/28
s1 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22]
s2 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22]
s3 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25]
s4 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26]
s5 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26]
s6 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25]
s7 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22]
s8 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26]
s9 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27]
s10 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26]
s11 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28]
def numFormat( inNum ):
if inNum < 10:
out = "0"+str(inNum)
else:
out = str(inNum)
return out
def epFormat( inEp, inSea):
ep = numFormat(inEp)
se = numFormat(inSea)
return("S"+se+"E"+ep)
listSs = [s1,s2,s3,s4,s5,s6,s7,s8,s9,s10,s11]
listS1 = []
listS2 = []
listS3 = []
listS4 = []
listS5 = []
listS6 = []
listS7 = []
listS8 = []
listS9 = []
listS10 = []
listS11 = []
p1 = "subliminal download -l en Cheers."
p2 = "S02E03"
p3 = ".mp4"
def makeCall(inThing):
p1 = "subliminal download -l en Cheers."
p2 = "S02E03"
p3 = ".mp4"
for i in s1:
listS1.append(epFormat(i, 1))
listSeasons = []
for i in [1,2,3,4,5,6,7,8,9,10,11]:
thisSeason = []
for j in eval("s"+str(i)):
print(p1+epFormat(j,i)+p3)
pO = p1+epFormat(j,i)+p3
print(pO)
#subprocess.call(dictO)
#listSeasons.append(
| 2,383 | 1,752 |
from django import forms
from .models import Event
class EventForm(forms.ModelForm):
class Meta:
model = Event
fields = '__all__'
| 153 | 44 |
""" Optimize a base allennlp configuration with a gaussian process by providing
a hyperparam search file.
"""
from argparse import ArgumentParser
import os
import numpy as np
import json
import _jsonnet
import subprocess
from datetime import datetime
from collections import OrderedDict, defaultdict
import skopt
from skopt.space.space import Categorical, Integer, Real, Space
from skopt.utils import normalize_dimensions
from allennlp.common.params import parse_overrides
from allenopt.util import *
from allenopt.plot import *
logger = init_logger(__name__, logging.DEBUG)
def parse_args(args=[]):
parser = ArgumentParser(description="Optimize allennlp model hyperparams with random, gaussian process, or tree-based process")
parser.add_argument('base_config_path', help="Base config path")
parser.add_argument('search_config_path', help="Search space config path")
parser.add_argument('--include-package', help='Source package to pass to allennlp')
parser.add_argument('-s', '--serialization-dir', type=str, help="Base directory to save trials in." )
parser.add_argument('-o', '--overrides', type=str, default=None, help="If provided, we will override the base config with these")
parser.add_argument('-e', '--evaluate-on-test', type=str, default=None, help="If provided, we will evaluate the best model on this test set.")
parser.add_argument('-n', '--n-calls', type=int, default=10, help="Number of trials")
parser.add_argument('-r', '--random-seed', type=int, default=None, help="Set a random state.")
parser.add_argument('-m', '--mode', type=str, default='gp', choices=['random', 'tree', 'gp'], help="Minimizer type. 'gp' is gaussian process, 'random' is random search, 'tree' is extra trees search.")
parser.add_argument('--n-random-starts', type=int, default=1, help="If provided, seed process with n random function evals in addition to the defaul x0")
parser.add_argument('--xi', type=float, default=0.01, help="Exploration/expoitation param")
parser.add_argument('--kappa', type=float, default=1.96, help="Exploration/expoitation param")
parser.add_argument('--no-delete-worse', action='store_true', help='By default we delete heavy ".th" and ".gz" files for worse trials as we go to save disk space. This disables that.')
return parser.parse_args(args) if args else parser.parse_args()
def run(args):
# Create base serialization dir
if not os.path.exists(args.serialization_dir):
os.makedirs(args.serialization_dir)
# Read in search configuration and create the blackbox function to optimize
f, dimensions, x0, trial_paths, delete_worse_files_cb = setup(args)
n_random_starts = max(1,args.n_random_starts) if x0 is None else args.n_random_starts
callback = None if args.no_delete_worse else delete_worse_files_cb
# Run the actual optimization
if args.mode == 'gp':
results = skopt.gp_minimize(
f, dimensions,
x0=x0,
n_calls=args.n_calls,
n_random_starts=n_random_starts,
random_state=args.random_seed,
verbose=True,
acq_optimizer='sampling',
xi=args.xi,
kappa=args.kappa,
callback=callback,
)
elif args.mode == 'random':
results = skopt.dummy_minimize(
f, dimensions,
x0=x0,
n_calls=args.n_calls,
random_state=args.random_seed,
verbose=True,
callback=callback,
)
elif args.mode == 'tree':
results = skopt.forest_minimize(
f, dimensions,
x0=x0,
n_calls=args.n_calls,
n_random_starts=n_random_starts,
random_state=args.random_seed,
verbose=True,
xi=args.xi,
kappa=args.kappa,
callback=callback,
)
# Maybe evaluate the best model on the test dataset
if args.evaluate_on_test:
logger.info('EVALUATE ON TEST')
evaluate_on_test(args, results, trial_paths)
# Save a bunch of visualizations of the search process
logger.info('PLOTTING RESULTS')
plot_results(args.serialization_dir, results)
logger.info('ALL DONE')
def setup(args):
""" Create the blackbox function to optimize.
This is a complex function that wraps the true parameter setting and training
in subprocess calls to allennlp.
"""
base_config = json.loads(_jsonnet.evaluate_file(args.base_config_path))
search_config = json.loads(_jsonnet.evaluate_file(args.search_config_path))
arg_overrides = parse_overrides(args.overrides)
# Flatten configs and get shorthand mappings
flat_base_config = flatten(base_config)
flat_search_config = flatten(search_config)
shorthands = get_shorthands(flat_search_config)
# Extract any variable dimensions and the mapping to their keys
search_space = extract_search_space(flat_search_config)
lambdas = extract_lambdas(flat_search_config)
dimensions = list(search_space.values())
# We no longer use the base config as an initial point because the base config
# needs to be minimal -- cannot contain fields which aren't used by certain hp
# configurations since overrides cannot "delete" a field in the base config.
x0 = None # get_x0(flat_base_config, search_space)
trial_num = 0
trial_paths = dict()
# Construct f
def f(x):
nonlocal trial_num
nonlocal trial_paths
# Map the x to the config keys that need updated
newx = []
for d,p in zip(dimensions, x):
print(d.name, d, p, type(p))
if 'numpy' in str(type(p)):
p = p.item()
newx.append(p)
x = newx
overrides = skopt.utils.point_asdict(search_space, x)
overrides = fill_search_constants(overrides, flat_search_config)
overrides = restrict_type_overrides(overrides, flat_search_config)
# print(f'Overrides after fill and restrict: {json.dumps(overrides, indent=2)}')
# Construct the trial serialization path
trial_str = construct_trial_name(overrides, shorthands, trial_num)
trial_path = os.path.join(args.serialization_dir, trial_str)
trial_paths[trial_num] = trial_path
# Construct the overrides string
processed_overrides = format_overrides(overrides, lambdas, base_config, arg_overrides)
print(f'Sampled config: {json.dumps(processed_overrides, indent=2)}')
override_str = json.dumps(processed_overrides, indent=None)
# Run Allennlp train subprocess
cmd = f"allennlp train {args.base_config_path} -f -s {trial_path} -o '{override_str}' --file-friendly-logging --include-package {args.include_package}"
print(f'CMD: {cmd}')
try:
subprocess.check_call(cmd, shell=True)
except Exception as e:
logger.error(e, exc_info=True)
raise e
trial_num += 1
# Retrieve the best validation metric and return that value
metrics = json.load(open(os.path.join(trial_path, 'metrics.json')))
validation_metric = base_config['trainer']['validation_metric']
negate = validation_metric.startswith('+')
validation_metric = validation_metric.lstrip('+-')
y = metrics[f'best_validation_{validation_metric}']
if negate:
y = -y
return y
# Construct a callback which maintains only the best weights/archive
def delete_worse_files_cb(results):
""" Remove .th and .gz files for any trials that aren't the best so far.
"""
nonlocal trial_num
nonlocal trial_paths
logger.info(f'DELETE WORSE FILES, trial num:{trial_num}')
best_trial_num = np.argmin(results.func_vals).item()
logger.info(f'Func values: {results.func_vals}, best is {best_trial_num} with path {trial_paths[best_trial_num]}')
for i in range(trial_num):
if i != best_trial_num:
logger.info(f'Deleting .th and .gz files at {trial_paths[i]}')
th_path = os.path.join(trial_paths[i], '*.th')
gz_path = os.path.join(trial_paths[i], '*.gz')
cmd = f"rm -f {th_path} && rm -f {gz_path}"
try:
subprocess.check_call(cmd, shell=True)
except Exception as e:
logger.error(e, exc_info=True)
raise e
return f, dimensions, x0, trial_paths, delete_worse_files_cb
def evaluate_on_test(args, results, trial_paths):
""" Look at all models in serialization dir for the argmaximizer
of the 'best_validation_metric', then evaluate that model on the test set.
"""
# Find the best trial model
best_trial_num = np.argmin(results.func_vals).item()
best_trial_path = trial_paths[best_trial_num]
model_path = os.path.join(best_trial_path, 'model.tar.gz')
# Evaluate that model on the test dataset, dumping to best_trial_test_results.jsons
output_path = os.path.join(args.serialization_dir, 'best_trial_test_metrics.json')
cuda_device = json.loads(_jsonnet.evaluate_file(args.base_config_path))['trainer'].get('cuda_device', -1)
cmd = f"allennlp evaluate {model_path} {args.evaluate_on_test} --output-file {output_path} --cuda-device {cuda_device} --include-package {args.include_package}"
logger.info(f'EVALUATE CMD: {cmd}')
try:
subprocess.check_call(cmd, shell=True)
except Exception as e:
logger.error(e, exc_info=True)
raise e
# Open the results and add the path of the best model so we know who won.
test_metrics = json.load(open(output_path))
test_metrics['best_trial_path'] = best_trial_path
logger.info(f'Best trial path was {best_trial_path} with test metrics:{json.dumps(test_metrics, indent=2)}')
with open(output_path, 'w') as outf:
json.dump(test_metrics, outf)
if __name__ == '__main__':
args = parse_args()
run(args)
| 10,031 | 3,104 |
#print_backwards
# @brupoon 2014
def print_backwards(string):
index = len(string)
while index > 0:
letter = string[index-1]
print(letter)
index = index - 1
if __name__ == '__main__':
print_backwards("antidisestablishmentarianism") | 279 | 105 |
# Copyright 2014 Klaudiusz Staniek
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
fiblary.common.net
~~~~~~~~~~~~~~~~~~
Net Utilities Implementation
"""
from netaddr import EUI
from netaddr import mac_unix
from uuid import getnode
def get_mac():
"""Returns the device mac address in unix format
@rtype: string
@return: The mac address in unix format
"""
mac = EUI(getnode())
mac.dialect = mac_unix
return str(mac)
| 970 | 302 |
from typing import Callable, Dict, Tuple, TYPE_CHECKING, Union
from requests import get
from telegram import MessageEntity
if TYPE_CHECKING:
import telegram
import telegram.ext
def animal(update: 'telegram.Update', _context: 'telegram.ext.CallbackContext') -> None:
"""Get animal"""
if update.message:
message: 'telegram.Message' = update.message
else:
return
animal_choice: str
if update.message.caption:
animal_choice = list(message.parse_caption_entities([MessageEntity.BOT_COMMAND]).values())[0]
elif update.message.text:
animal_choice = list(message.parse_entities([MessageEntity.BOT_COMMAND]).values())[0]
animal_choice = animal_choice.partition('@')[0]
urls: Dict[str, Tuple[str, Callable]] = {
"/shiba": (
'http://shibe.online/api/shibes?count=1&urls=true&httpsUrls=false',
lambda resp: message.reply_photo(resp[0])
),
"/fox": (
'https://randomfox.ca/floof/',
lambda resp: message.reply_photo(resp['image'])
),
"/cat": (
'https://api.thecatapi.com/v1/images/search',
lambda resp: message.reply_photo(resp[0]['url'])
),
"/catfact": (
'https://cat-fact.herokuapp.com/facts/random',
lambda resp: message.reply_text(resp['text'])
),
}
response: Union[list, dict] = get(urls[animal_choice][0]).json()
urls[animal_choice][1](response)
| 1,498 | 478 |
import base64
import binascii
import datetime
import hashlib
import re
from typing import Optional, Any, Dict, List, Callable
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat
from cryptography.x509 import load_pem_x509_certificate
from appgate.customloaders import CustomFieldsEntityLoader
from appgate.openapi.attribmaker import SimpleAttribMaker
from appgate.openapi.types import OpenApiDict, AttribType, AttributesDict, \
K8S_LOADERS_FIELD_NAME, InstanceMakerConfig, Entity_T, LoaderFunc
__all__ = [
'checksum_attrib_maker',
'size_attrib_maker',
'certificate_attrib_maker',
]
def datetime_utc(d: datetime.datetime) -> datetime.datetime:
if not d.utcoffset():
return d.astimezone()
return d
def create_certificate_loader(loader: LoaderFunc, entity_type: type) -> Callable[..., Any]:
def certificate_bytes(value: Any, data: str) -> Entity_T:
"""
Creates an Entity_T with the details of a PEM certificate.
NOTE: Entity_T must be compatible with the fields in the dict returned here
NOTE: We need to increase version one since:
Version ::= INTEGER { v1(0), v2(1), v3(2) }
"""
cert = load_pem_x509_certificate(data.encode()) # type: ignore
valid_from = re.sub(r'\+\d\d:\d\d', 'Z',
datetime_utc(cert.not_valid_before).isoformat(timespec='milliseconds'))
valid_to = re.sub(r'\+\d\d:\d\d', 'Z',
datetime_utc(cert.not_valid_after).isoformat(timespec='milliseconds'))
public_key = cert.public_key().public_bytes(
Encoding.PEM,
PublicFormat.SubjectPublicKeyInfo).decode().splitlines()
del public_key[0]
del public_key[-1]
cert_data = {
'version': cert.version.value + 1,
'serial': str(cert.serial_number),
'issuer': ', '.join(cert.issuer.rfc4514_string().split(',')),
'subject': ', '.join(cert.subject.rfc4514_string().split(',')),
'validFrom': valid_from,
'validTo': valid_to,
'fingerprint': binascii.hexlify(cert.fingerprint(hashes.SHA256())).decode(),
'certificate': base64.b64encode(cert.public_bytes(Encoding.PEM)).decode(),
'subjectPublicKey': ''.join(public_key),
}
return loader(cert_data, None, entity_type)
return certificate_bytes
def checksum_bytes(value: Any, data: str) -> str:
bytes_decoded: bytes = base64.b64decode(data)
return hashlib.sha256(bytes_decoded).hexdigest()
def size_bytes(value: Any, data: str) -> int:
bytes_decoded: bytes = base64.b64decode(data)
return len(bytes_decoded)
class BytesFieldAttribMaker(SimpleAttribMaker):
def __init__(self, name: str, tpe: type, base_tpe: type, default: Optional[AttribType],
factory: Optional[type], definition: OpenApiDict,
source_field: str,
loader: Callable[..., Any]) -> None:
super().__init__(name, tpe, base_tpe, default, factory, definition)
self.source_field = source_field
self.loader = loader
def values(self, attributes: Dict[str, 'SimpleAttribMaker'], required_fields: List[str],
instance_maker_config: 'InstanceMakerConfig') -> AttributesDict:
values = super().values(attributes, required_fields, instance_maker_config)
values['eq'] = True
if 'metadata' not in values:
values['metadata'] = {}
values['metadata'][K8S_LOADERS_FIELD_NAME] = [CustomFieldsEntityLoader(
loader=self.loader,
dependencies=[self.source_field],
field=self.name,
)]
return values
def checksum_attrib_maker(name: str, tpe: type, base_tpe: type, default: Optional[AttribType],
factory: Optional[type], definition: OpenApiDict,
source_field: str) -> BytesFieldAttribMaker:
return BytesFieldAttribMaker(name, tpe, base_tpe, default, factory, definition, source_field,
checksum_bytes)
def size_attrib_maker(name: str, tpe: type, base_tpe: type, default: Optional[AttribType],
factory: Optional[type], definition: OpenApiDict,
source_field: str) -> BytesFieldAttribMaker:
return BytesFieldAttribMaker(name, tpe, base_tpe, default, factory, definition, source_field,
size_bytes)
def certificate_attrib_maker(name: str, tpe: type, base_tpe: type, default: Optional[AttribType],
factory: Optional[type], definition: OpenApiDict,
source_field: str,
loader: LoaderFunc) -> BytesFieldAttribMaker:
return BytesFieldAttribMaker(name, tpe, base_tpe, default, factory, definition, source_field,
create_certificate_loader(loader, base_tpe))
| 5,030 | 1,514 |
# Copyright 2019 SAP SE
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
import os
from rate_limit.rate_limit import OpenStackRateLimitMiddleware
from . import fake
WORKDIR = os.path.dirname(os.path.realpath(__file__))
CONFIGPATH = WORKDIR + '/fixtures/groups.yaml'
class TestActionGroups(unittest.TestCase):
is_setup = False
def setUp(self):
if self.is_setup:
return
self.app = OpenStackRateLimitMiddleware(
app=fake.FakeApp(),
config_file=CONFIGPATH
)
self.is_setup = True
def test_groups(self):
rl_groups = self.app.rate_limit_groups
self.assertIsNotNone(
rl_groups,
"expected rate limit groups to be '{0}' but got '{1}'".format(
"""
groups:
write:
- update
- delete
read:
- read
- read/list
""",
rl_groups
)
)
def test_mapping(self):
stimuli = [
{
'action': 'create',
'expected': 'create'
},
{
'action': 'update',
'expected': 'write'
},
{
'action': 'delete',
'expected': 'write'
},
{
'action': 'read',
'expected': 'read'
},
{
'action': 'read/list',
'expected': 'read'
},
]
for stim in stimuli:
action = stim.get('action')
expected_action = stim.get('expected')
got_action = self.app.get_action_from_rate_limit_groups(action)
self.assertEqual(
got_action,
expected_action,
"action should be '{0}' but got '{1}'".format(expected_action, got_action)
)
if __name__ == '__main__':
unittest.main()
| 2,422 | 691 |
#input the string
n = input().strip()
x = 0
while x+1 < len(n):
if n[x] == n[x+1]:
n = n[:x]+n[x+2:]
if x > 0:
x-= 1
else:
x += 1
# output the reduced string or Empty String
if n == "":
print("Empty String")
else:
print(n) | 274 | 112 |
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext as _
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from corehq.apps.userreports.ui.fields import JsonField
from corehq.motech.openmrs.const import (
ADDRESS_PROPERTIES,
IMPORT_FREQUENCY_CHOICES,
LOG_LEVEL_CHOICES,
NAME_PROPERTIES,
PERSON_PROPERTIES,
)
class OpenmrsConfigForm(forms.Form):
openmrs_provider = forms.CharField(label=_("Provider UUID"), required=False)
case_config = JsonField(expected_type=dict)
form_configs = JsonField(expected_type=list)
def __init__(self, *args, **kwargs):
super(OpenmrsConfigForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.add_input(Submit('submit', _('Save Changes')))
def clean_case_config(self):
for key in self.cleaned_data['case_config']['person_properties']:
if key not in PERSON_PROPERTIES:
raise ValidationError(
_('person property key "%(key)s" is not valid.'),
code='invalid',
params={'key': key}
)
for key in self.cleaned_data['case_config']['person_preferred_name']:
if key not in NAME_PROPERTIES:
raise ValidationError(
_('person preferred name key "%(key)s" is not valid.'),
code='invalid',
params={'key': key}
)
for key in self.cleaned_data['case_config']['person_preferred_address']:
if key not in ADDRESS_PROPERTIES:
raise ValidationError(
_('person preferred address key "%(key)s" is not valid.'),
code='invalid',
params={'key': key}
)
for id_ in self.cleaned_data['case_config']['match_on_ids']:
if id_ not in self.cleaned_data['case_config']['patient_identifiers']:
raise ValidationError(
_('ID "%(id_)s" used in "match_on_ids" is missing from "patient_identifiers".'),
code='invalid',
params={'id_': id_}
)
return self.cleaned_data['case_config']
_owner_id_label = _('Owner ID')
_location_type_name_label = _('Organization Level')
class OpenmrsImporterForm(forms.Form):
server_url = forms.CharField(label=_('OpenMRS URL'), required=True,
help_text=_('e.g. "http://www.example.com/openmrs"'))
username = forms.CharField(label=_('Username'), required=True)
password = forms.CharField(label=_('Password'), widget=forms.PasswordInput, required=False)
notify_addresses_str = forms.CharField(label=_('Addresses to send notifications'), required=False,
help_text=_('A comma-separated list of email addresses to send error '
'notifications'))
location_id = forms.CharField(label=_('Location ID'), required=False,
help_text=_('If a project space has multiple OpenMRS servers to import from, '
'for which CommCare location is this OpenMRS server authoritative?'))
import_frequency = forms.ChoiceField(label=_('Import Frequency'), choices=IMPORT_FREQUENCY_CHOICES,
help_text=_('How often should cases be imported?'), required=False)
log_level = forms.TypedChoiceField(label=_('Log Level'), required=False, choices=LOG_LEVEL_CHOICES, coerce=int)
timezone = forms.CharField(label=_('Timezone'), required=False,
help_text=_("Timezone name. If not specified, the domain's timezone will be used."))
report_uuid = forms.CharField(label=_('Report UUID'), required=True,
help_text=_('The OpenMRS UUID of the report of patients to be imported'))
report_params = JsonField(label=_('Report Parameters'), required=False, expected_type=dict)
case_type = forms.CharField(label=_('Case Type'), required=True)
owner_id = forms.CharField(label=_owner_id_label, required=False,
help_text=_('The ID of the mobile worker or location who will own new cases'))
location_type_name = forms.CharField(label=_location_type_name_label, required=False,
help_text=_('The Organization Level whose mobile worker will own new '
'cases'))
external_id_column = forms.CharField(label=_('External ID Column'), required=True,
help_text=_("The column that contains the OpenMRS UUID of the patient"))
name_columns = forms.CharField(label=_('Name Columns'), required=True,
help_text=_('Space-separated column(s) to be concatenated to create the case '
'name (e.g. "givenName familyName")'))
column_map = JsonField(label=_('Map columns to properties'), required=True, expected_type=list,
help_text=_('e.g. [{"column": "givenName", "property": "first_name"}, ...]'))
| 5,350 | 1,472 |
def relation_to_Luke(name):
if name == 'Darv':
return 'Luke, I am your father'
elif name == 'Leila':
return ' Luke, i am your sister'
elif name == 'Han':
return ' Luke, i am your brother'
print(relation_to_Luke('Leila')) | 258 | 88 |
import logging
import tflite
from tflite2onnx.layout import Layout
from tflite2onnx.op.activation import handleFusedActivation
from tflite2onnx.op.common import Operator
from tflite2onnx.op.padding import computePaddingSize
logger = logging.getLogger('tflite2onnx')
class Conv(Operator):
TypeMapping = {
tflite.BuiltinOperator.CONV_2D: 'Conv',
tflite.BuiltinOperator.DEPTHWISE_CONV_2D: 'Conv',
}
def __init__(self, TFactory, index):
super().__init__(TFactory, index)
self.attrs['kernel_shape'] = []
self.attrs['strides'] = []
# ONNX: This attribute cannot be used simultaneously with `auto_pad` attribute.
# re-initialize during self.parse(), as it needs the shape of input.
# We prefer `auto_pad`, however ONNXRuntime doesn't support
# `dilation` + `auto_pad`, such that we use `pads` to workaround it.
self.attrs['pads'] = [0, 0, 0, 0]
# XXX Not enabled as ONNXRuntime has limitation to infer pads for non-1 dilation
# self.attrs['auto_pad'] = 'SAME_UPPER' # See ComputePaddingHeightWidth() of TFLite
self.attrs['dilations'] = []
self.attrs['group'] = -1
self.setInited()
@property
def type(self):
return 'Conv'
@property
def isDepthwise(self):
op = self.tflite
opcode = self.model.OperatorCodes(op.OpcodeIndex()).BuiltinCode()
return (opcode is tflite.BuiltinOperator.DEPTHWISE_CONV_2D)
def parse(self):
logger.debug("Parsing %s...", self.type)
op = self.tflite
opcode = self.model.OperatorCodes(op.OpcodeIndex()).BuiltinCode()
assert(opcode in self.TypeMapping)
assert(op.InputsLength() == 3), "TFLite Conv always has bias"
assert(op.OutputsLength() == 1)
# input
ilayout = Layout('NHWC', 'NCHW')
it = self.parseInput(0, ilayout)
# weight
wlayout = Layout('CHWM', 'MCHW') if self.isDepthwise else Layout('OHWI', 'OIHW')
wt = self.parseInput(1, wlayout)
# bias
self.parseInput(2, is_bias=True)
# output
olayout = Layout('NHWC', 'NCHW')
ot = self.parseOutput(0, olayout)
# options
op_opt = op.BuiltinOptions()
option = tflite.DepthwiseConv2DOptions() if self.isDepthwise else tflite.Conv2DOptions()
option.Init(op_opt.Bytes, op_opt.Pos)
self.attrs['dilations'] = [option.DilationHFactor(), option.DilationWFactor()]
self.attrs['group'] = wt.shape[3] if self.isDepthwise else 1
self.attrs['kernel_shape'] = wt.shape[1:3]
self.attrs['strides'] = [option.StrideH(), option.StrideW()]
# XXX Not enabled as ONNXRuntime has limitation to infer pads for non-1 dilation
# self.attrs['auto_pad'] = PaddingMapping[option.Padding()]
if self.isDepthwise:
assert(option.DepthMultiplier() == 1)
self.attrs['pads'] = computePaddingSize(option.Padding(), it.shape[1:3],
self.attrs['kernel_shape'],
self.attrs['strides'], self.attrs['dilations'])
handleFusedActivation(self, option, ot)
self.setParsed()
def propagatableTensors(self):
return list()
def transform(self):
pass
class TransposeConv(Operator):
TypeMapping = {
tflite.BuiltinOperator.TRANSPOSE_CONV: 'ConvTranspose',
}
# FIXME: cases that untested yet (we are not fully understand the semantic gap)
# 1. Special output shape for VALID padding
# 2. Different input/output shape for SAME padding
def __init__(self, TFactory, index):
super().__init__(TFactory, index)
self.attrs['dilations'] = [1, 1] # TFLite TransposeConv doesn't have dilation
self.attrs['group'] = 1 # TFLite TransposeConv doesn't have group
self.attrs['kernel_shape'] = []
# self.attrs['output_padding'] = []
self.attrs['output_shape'] = []
# pads are overwrited by output_shape
# self.attrs['auto_pad'] = 'NOTSET'
# self.attrs['pads'] = []
self.attrs['strides'] = []
self.setInited()
@property
def type(self):
return 'ConvTranspose'
def parse(self):
logger.debug("Parsing %s...", self.type)
op = self.tflite
opcode = self.model.OperatorCodes(op.OpcodeIndex()).BuiltinCode()
assert(opcode in self.TypeMapping)
assert(op.InputsLength() == 3)
assert(op.OutputsLength() == 1)
# oshape
osi = op.Inputs(0)
oshape = self.TFactory.getData(self.model, self.graph, osi, 'int32')
# X
ilayout = Layout('NHWC', 'NCHW')
self.parseInput(2, ilayout)
# weight
wlayout = Layout('OHWI', 'IOHW')
wt = self.parseInput(1, wlayout)
# FIXME: we don't have a model containing bias.
# output
olayout = Layout('NHWC', 'NCHW')
ot = self.parseOutput(0, olayout)
assert((ot.shape == oshape).all())
# options
op_opt = op.BuiltinOptions()
option = tflite.TransposeConvOptions()
option.Init(op_opt.Bytes, op_opt.Pos)
self.attrs['kernel_shape'] = wt.shape[1:3]
self.attrs['strides'] = [option.StrideH(), option.StrideW()]
oslayout = Layout('NHWC', 'NCHW')
self.attrs['output_shape'] = oslayout.transform(oshape)
self.setParsed()
def propagatableTensors(self):
return list()
def transform(self):
pass
| 5,583 | 1,882 |
#!/usr/bin/python
#
# Channel.py
#
# Created on: 2 Feb, 2011
# Author: Skufka - adapted by black - adapted by PW
#
# class definition for the channel object not using sockets
#
# This material is based on research sponsored by DARPA under agreement
# number FA8750-10-2-0165. The U.S. Government is authorized to
# reproduce and distribute reprints for Governmental purposes
# notwithstanding any copyright notation thereon.
#
# The views and conclusions contained herein are those of the authors
# and should not be interpreted as necessarily representing the official
# policies or endorsements, either expressed or implied, of DARPA or the
# U.S. Government.
#
# ==========================================================================
#
# For use by entities other than the U.S. Government the following
# additional limitations apply:
#
# Copyright (c) 2011, Clarkson University
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of the Clarkson University nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# (license copied from http://www.opensource.org/licenses/bsd-license)
#
# ==========================================================================#
#
# Helpful References:
# http://docs.python.org/library/socketserver.html
#
#
from numpy import *
from numpy.linalg import *
#from World import World
#from Vacuum import Vacuum
from Router import Router
from SocketRouter import SocketRouter
# The xml classes used to define the messages being passed.
from XML.XMLParser import XMLParser
from XML.XMLIncomingDIF import XMLIncomingDIF
from XML.XMLMessageNetwork import XMLMessageNetwork
from XML.XMLMessageVacuumIDPosBase import XMLMessageVacuumIDPosBase
from XML.XMLMessageGetReportVacuumCommander import \
XMLMessageGetReportVacuumCommander
from XML.XMLMessageWorldStatus import \
XMLMessageWorldStatus
from XML.XMLMessageWorldWetness import \
XMLMessageWorldWetness
from XML.XMLMessageUpdateWorldPlanner import \
XMLMessageUpdateWorldPlanner
from XML.XMLMessageUpdatePlannerSensor import \
XMLMessageUpdatePlannerSensor
from XML.XMLMessageSensorWetness import \
XMLMessageSensorWetness
from XML.XMLMessageSensorWetness import \
XMLMessageSensorWetness
from XML.XMLMessageSensorStatus import \
XMLMessageSensorStatus
from XML.XMLMessageWorldVacuumCurrentTime import \
XMLMessageWorldVacuumCurrentTime
from XML.XMLMessageVacuumAddExpenditureWorld import \
XMLMessageVacuumAddExpenditureWorld
from XML.XMLMessageExternalCommand import \
XMLMessageExternalCommand
from XML.XMLMessageExternalParameter import \
XMLMessageExternalParameter
# The xml classes used to define the messages being passed.
from XML.XMLParser import XMLParser
from XML.XMLIncomingDIF import XMLIncomingDIF
from XML.XMLMessageNetwork import XMLMessageNetwork
#from XML.XMLMessagePlannerReportVacuumOrders import \
# XMLMessagePlannerReportVacuumOrders
#from XML.XMLMessageRecommendOrderCommander2Planner import \
# XMLMessageRecommendOrderCommander2Planner
#from XML.XMLMessageRecommendOrderPlanner2Commander import \
# XMLMessageRecommendOrderPlanner2Commander
from XML.XMLMessageVacuumIDPosBase import XMLMessageVacuumIDPosBase
from XML.XMLMessageMoveOrderCommanderVacuum import \
XMLMessageMoveOrderCommanderVacuum
from XML.XMLMessageMoveOrderCommanderPlanner import \
XMLMessageMoveOrderCommanderPlanner
from XML.XMLMessageGetReportVacuumCommander import \
XMLMessageGetReportVacuumCommander
from XML.XMLMessageWorldStatus import \
XMLMessageWorldStatus
from XML.XMLMessageWorldWetness import \
XMLMessageWorldWetness
from XML.XMLMessageUpdateWorldPlanner import \
XMLMessageUpdateWorldPlanner
from XML.XMLMessageUpdatePlannerSensor import \
XMLMessageUpdatePlannerSensor
from XML.XMLMessageSensorWetness import \
XMLMessageSensorWetness
from XML.XMLMessageSensorWetness import \
XMLMessageSensorWetness
from XML.XMLMessageSensorStatus import \
XMLMessageSensorStatus
from XML.XMLMessageVaccumMovedReportToPlanner import \
XMLMessageVaccumMovedReportToPlanner
from XML.XMLMessageWorldVacuumCurrentTime import \
XMLMessageWorldVacuumCurrentTime
from XML.XMLMessageVacuumAddExpenditureWorld import \
XMLMessageVacuumAddExpenditureWorld
from XML.XMLMessageVacuumCleanWorld import \
XMLMessageVacuumCleanWorld
## Channel
#
# Creates a channel, which is a medium through which simulated agents communicate.
#
# This channel uses local function calls, not sockets, for communication.
class Channel:
checkInfoType = False
DEBUG = False
def __init__(self,world=None,sensor=None,planner=None,commander=None) :
self.setWorking(True)
self.delay = 0.0 # transmission delay - not yet implemented
self.setWorld(world)
self.vacuumArray = [] # array of object handles
self.setSensor(sensor)
self.setPlanner(planner)
self.setCommander(commander)
self.router = SocketRouter(self)
self.vacuum = None
self.myAgent = None
# Utility routines.
#
# These methods are used for setting the values of certain
# parameters and is primarily used for outside objects when it is
# necessary to make a change to a Channel object.
def setWorking(self,value) :
self.isWorking = value
def getWorking(self) :
return(self.isWorking)
def setSensor(self,sensor) :
self.sensor = sensor
def getSensor(self) :
return(self.sensor)
def setCommander(self,value):
self.commander = value
def getCommander(self) :
return(self.commander)
def setPlanner(self,planner) :
self.planner = planner
def getPlanner(self) :
return(self.planner)
def getRouter(self) :
return(self.router)
def setVacuum(self,vacuum) :
self.vacuum = vacuum
def getVacuum(self):
return(self.vacuum)
def setDebug(self,value) :
Channel.DEBUG = value
self.router.setDebug(value)
def getDebug(self) :
return(Channel.DEBUG)
def setRouterChannel(self,type,channel) :
self.router.setChannel(type,channel)
def setMyAgent(self,myAgent) :
self.myAgent = myAgent
def getMyAgent(self) :
return(self.myAgent)
def printChannelInformation(self,toPrint) :
print("Channel information {0}: {1} - {2}".format(toPrint,self,self.vacuumArray))
def sendString(self,type,message,id=-1,debug=False) :
if(self.router) :
self.router.sendString(type,message,id,debug)
def addVacuum(self,vacuum,id,xpos,ypos,debug=False) :
if(vacuum != None):
for definedVacuum in self.vacuumArray :
# Check to see if this vacuum is already defined. We can
# get into this routine from a variety of places. It might
# be possible to have already called this routine.
if(vacuum == definedVacuum) :
if(debug):
print("Channel.addVacuum, Found this one...")
return
while(id>=len(self.vacuumArray)) :
# There are not enough vacuum objects defined. Create
# place holders.
self.vacuumArray.append(None)
self.vacuumArray[id] = vacuum
#self.sendPlannerVacuumMovedPosition(id,xpos,ypos) #TODO - should this be commented out?
if(debug):
print("Channel.addVacuum - vacuum array: {0}".format(self.vacuumArray))
if (vacuum and self.world):
self.world.addVacuum(vacuum,debug)
def setNumberVacuums(self,number,x=0,y=0) :
self.router.setNumberVacuums(number) # set the number of vacuums for the router.
#print("Channel.setNumbervacuums: {0} - {1}, {2}".format(len(self.vacuumArray),self.vacuumArray,self))
# Routine to set the number of vacuums that are being tracked.
if(number > len(self.vacuumArray)) :
# There are more vacuums to be used than currently
# defined. Add the extras to the list.
for i in range(number-len(self.vacuumArray)):
#vacuum = Vacuum(len(self.vacuumArray))
self.addVacuum(None,len(self.vacuumArray),x,y)
elif (number < len(self.vacuumArray)) :
# Need to have fewer vacuums than what are currently
# defined. Delete the extras.
while(len(self.vacuumArray)>number) :
vacuum = self.vacuumArray.pop()
if (self.world):
self.world.deleteVacuum(vacuum)
def setWorld(self,value) :
self.world = value
def getWorld(self) :
return(self.world)
## receiveXMLReportParseAndDecide
#
# This is a generic routine. It receives an xml report and decides
# what it is and who it is for. It then calls the specific routine
# necessary to pass along the information.
def receiveXMLReportParseAndDecide(self,xmlString) :
dif = XMLIncomingDIF()
info = dif.determineXMLInformation(xmlString)
if(Channel.checkInfoType) :
print("Got information: {0}".format(info.getMyInformationType()))
Channel.checkInfoType = False
if(info.getMyInformationType() ==
XMLParser.MESSAGE_PLANNER_REPORT_VACUUM_ORDERS) :
if(self.planner) :
pos = info.getPos()
#print("sending report to planner for {0} - {1},{2}".format(
# info.getVacuumID(),pos[0],pos[1]))
self.planner.receiveReport(pos[0],pos[1]) #,info.getVacuumID())
elif(info.getMyInformationType() ==
XMLParser.MESSAGE_RECOMMEND_ORDER_COMMANDER_PLANNER) :
#print("Channel.recieveXMLReportParseAndDecide - send!")
if(self.planner) :
pos = info.getPos()
#print("sending report to planner for {0} - {1},{2}".format(
# info.getVacuumID(),pos[0],pos[1]))
self.planner.recommendOrder(info.getVacuumID(),pos[0],pos[1])
elif(info.getMyInformationType() ==
XMLParser.MESSAGE_RECOMMEND_ORDER_PLANNER_COMMANDER) :
if(self.commander) :
pos = info.getPos()
#print("Channel.receiveXMLReportParseAndDecide sending report to commander for {0} - {1},{2}".format(
# info.getVacuumID(),pos[0],pos[1]))
self.commander.receiveReport(pos[0],pos[1],info.getVacuumID())
elif(info.getMyInformationType() ==
XMLParser.MESSAGE_MOVE_ORDER_COMMANDER_VACUUM) :
pos = info.getPos()
vacuumID = info.getVacuumID()
#print("Channel: sending report to vacuum for {0} - {1},{2}".format(
# info.getVacuumID(),pos[0],pos[1]))
#if(vacuumID < len(self.vacuumArray)) :
if(self.vacuum) :
#print("Moving this vacuum {0} - {1},{2}".format(vacuumID,pos[0],pos[1]))
self.vacuum.moveord(pos[0],pos[1])
#self.vacuumArray[vacuumID].moveord(pos[0],pos[1])
elif(info.getMyInformationType() ==
XMLParser.MESSAGE_VACUUM_WORLD_CLEAN_GRID) :
if(self.world) :
pos = info.getPos()
vacuumID = info.getVacuumID()
#print("sending cleaning report to world from vacuum for {0} - {1},{2}".format(
# info.getVacuumID(),pos[0],pos[1]))
self.world.clean(pos[0],pos[1])
elif(info.getMyInformationType() ==
XMLParser.MESSAGE_WORLD_VACUUM_CURRENT_TIME) :
time = info.getTime()
vacuumID = info.getVacuumID()
#print("sending report to vacuum for {0}".format(vacuumID))
#if(vacuumID < len(self.vacuumArray)) :
if(self.vacuum) :
#print("sending to vacuum.")
self.vacuum.timeStep(time,info.getMatrixFromArray())
#self.vacuumArray[vacuumID].timeStep(time,info.getMatrixFromArray())
elif(info.getMyInformationType() ==
XMLParser.MESSAGE_VACUUM_WORLD_ADD_EXPENDITURE) :
if(self.world) :
expenditure = info.getExpenditure()
vacuumID = info.getVacuumID()
#print("sending expenditure report to world for {0} - {1}".format(
# info.getVacuumID(),expenditure))
self.world.addExpenditure(expenditure)
elif(info.getMyInformationType() ==
XMLParser.MESSAGE_MOVE_ORDER_COMMANDER_PLANNER) :
if(self.planner) :
pos = info.getPos()
#print("sending report to planner for {0} - {1},{2}".format(
# info.getVacuumID(),pos[0],pos[1]))
self.planner.receiveOrder(info.getVacuumID(),pos[0],pos[1])
elif(info.getMyInformationType() ==
XMLParser.MESSAGE_VACUUM_NEW_POSITION_PLANNER) :
if(self.planner) :
pos = info.getPos()
#print("sending vacuum position to planner for {0} - {1},{2}".format(
# info.getVacuumID(),pos[0],pos[1]))
self.planner.setVacuumLocation(info.getVacuumID(),pos[0],pos[1])
elif(info.getMyInformationType() ==
XMLParser.MESSAGE_GET_REPORT_VACUUM_COMMANDER) :
if(self.commander) :
pos = info.getPos()
#print("sending report to commander for {0} - {1},{2} - {3}".format(
# info.getVacuumID(),pos[0],pos[1],info.getStatus()))
self.commander.getReport(pos[0],pos[1],info.getStatus(),info.getVacuumID())
elif(info.getMyInformationType() == XMLParser.MESSAGE_WORLD_STATUS) :
#print("Send world status to the sensor.")
if(self.sensor) :
# let the sensor know the world status.
# print("Channel.receiveXMLReportParseAndDecide-XMLParser.MESSAGE_WORLD_STATUS")
self.sensor.setArray(info.getMatrixFromArray())
elif(info.getMyInformationType() == XMLParser.MESSAGE_WORLD_WETNESS) :
if(self.sensor) :
# Let the sensor know the wetness levels of the world.
#print("Channel.receiveXMLReportParseAndDecide - XMLParser.MESSAGE_WORLD_WETNESS")
self.sensor.setWet(info.getMatrixFromArray())
elif(info.getMyInformationType() == XMLParser.MESSAGE_UPDATE_WORLD_PLANNER) :
#print("Send world update to planner");
if(self.planner):
# Request that the planner make an update to its view.
#print("Update planner")
self.planner.updateView()
elif(info.getMyInformationType() == XMLParser.MESSAGE_UPDATE_REQUEST_PLANNER_SENSOR) :
if(self.sensor) :
# Request that the sensor make a request to measure
# the world.
#print("asking sensor to measure.")
#print("Channel.receiveXMLReportParseAndDecide - XMLParser.MESSAGE_UPDATE_REQUEST_PLANNER_SENSOR")
self.sensor.measure()
elif(info.getMyInformationType() == XMLParser.MESSAGE_STATUS_SENSOR_PLANNER) :
if(self.planner) :
# Send the planner what the sensor things the world status is.
#print("Send planner dirt levels.")
self.planner.setDirtLevels(info.getMatrixFromArray())
elif(info.getMyInformationType() == XMLParser.MESSAGE_WETNESS_SENSOR_PLANNER) :
if(self.planner) :
# Send the planner what the sensor things is the world
# wetness levels.
#print("send planner wet levels")
self.planner.setWet(info.getMatrixFromArray())
elif(info.getMyInformationType() == XMLParser.MESSAGE_EXTERNAL_PARAMETER) :
# This is a message from the outside with information
# about a parameter to set.
# print("External message")
host = ''
port = -1
hostType = -1
vacuumID = -1
for item in info.parameterList:
if(item[0] == XMLMessageExternalParameter.DUST_RATE) :
#print("dust rate: {0}".format(item[1]))
if(self.planner) :
#print("send planner dirt rate")
self.planner.setUnnormalizedDirtRate(float(item[1]))
if(self.world) :
self.world.setDirtRate(float(item[1]))
elif(item[0] == XMLMessageExternalParameter.DUST_SIZE) :
if(self.planner) :
#print("send planner dirt size")
self.planner.setUnnormalizedDirtSize(float(item[1]))
if(self.world) :
self.world.setDirtSize(float(item[1]))
elif(item[0] == XMLMessageExternalParameter.RAIN_RATE):
if(self.world) :
self.world.setRainRate(float(item[1]))
elif(item[0] == XMLMessageExternalParameter.RAIN_SIZE):
if(self.world) :
self.world.setRainSize(float(item[1]))
elif(item[0] == XMLMessageExternalParameter.GRID_SIZE):
if(self.world) :
self.world.setGridSize(int(item[1]))
if(self.sensor):
#print("Channel.receiveXMLReportParseAndDecide - XMLParser.GRID_SIZE")
self.sensor.setGridSize(int(item[1]))
if(self.planner):
#print("send planner grid size")
self.planner.setGridSize(int(item[1]))
elif(item[0] == XMLMessageExternalParameter.NUMBER_OF_VACUUMS):
#print("number vacs: {0}".format(int(item[1])))
self.setNumberVacuums(int(item[1]))
if(self.world) :
self.world.setNumberVacuums(int(item[1]))
elif(item[0] == XMLMessageExternalParameter.HOST_ADDRESS):
#print("set host: {0}".format(item[1]))
#self.router.setHost(item[1])
host = item[1]
elif(item[0] == XMLMessageExternalParameter.HOST_PORT):
#print("set port: {0}".format(item[1]))
#self.router.setPort(item[1])
port = int(item[1])
elif(item[0] == XMLMessageExternalParameter.HOST_TYPE) :
#print("host type: {0}".format(item[1]))
hostType = int(item[1])
elif(item[0] == XMLMessageExternalParameter.VACUUM_ID) :
#print("vacuum id: {0}".format(item[1]))
vacuumID = int(item[1])
if(host or (port>-1) or (hostType>-1) or (vacuumID>-1)):
# information was passed that provides information
# about the setup of the simulation.
if(host and (port>-1) and (hostType>-1)):
# There is enough information to define another
# agent in the system. If it is a vacuum, though
# we will need the vacuum id which has to be
# checked.
if(hostType == Router.VACUUM) :
if(vacuumID>-1):
#print("Set up vacuum's host information: {0} - {1} - {2} - {3}".format(hostType,host,port,vacuumID))
self.router.setHostInformation(hostType,host,port,vacuumID)
#else :
# print("Error - Badly formed message came in. Message with vacuum information did not include a vacuum id.")
else:
# This is information for an agent that is not
# a vacuum.
#print("Set up agent's host information: {0} - {1} - {2} - {3}".format(hostType,host,port,vacuumID))
self.router.setHostInformation(hostType,host,port,vacuumID)
else:
# If you get down here then incomplete information
# was given. Assume that the file had details
# about this particular agent.
if(host) :
self.router.setHost(host)
if(port > -1) :
self.router.setPort(port)
#if(hostType > -1) :
# print("Error - badly formed message came in. The host type was specified but the other required information was incomplete.")
elif(info.getMyInformationType() == XMLParser.MESSAGE_EXTERNAL_COMMAND) :
# This is a message from the outside with information
# about a command request
#print("External Command")
for item in info.parameterList:
if(item == XMLMessageExternalCommand.STOP) :
print("stop: {0}".format(item))
if(self.world) :
pass
elif(item == XMLMessageExternalCommand.START) :
print("start: {0}".format(item))
if(self.world) :
pass
elif(item == XMLMessageExternalCommand.RESTART) :
#print("restart: {0}".format(item))
if(self.world) :
pass
if(self.sensor) :
#self.sensor.shutdownServer()
pass
if(self.planner):
#self.planner.shutdownServer()
pass
if(self.commander) :
#self.commander.shutdownServer()
pass
if(self.vacuum) :
#self.vacuum.shutdownServer()
self.vacuum.setWorking(True)
self.vacuum.setStatus(3)
self.vacuum.initializeTime(0.0)
if(self.world) :
#self.world.shutdownServer()
pass
elif(item == XMLMessageExternalCommand.RESET) :
print("reset: {0}".format(item))
if(self.world) :
pass
elif(item == XMLMessageExternalCommand.POLL) :
print("poll: {0}".format(item))
if(self.world) :
pass
elif(item == XMLMessageExternalCommand.EXIT) :
#print("exit: {0}".format(item))
if(self.sensor) :
#print("Shutting down the server")
self.sensor.shutdownServer()
if(self.planner):
#print("Shutting down the planner")
self.planner.shutdownServer()
if(self.commander) :
#print("Shutting down the commander")
self.commander.shutdownServer()
if(self.vacuum) :
#print("Shutting down the vacuum")
self.vacuum.shutdownServer()
if(self.world) :
#print("Shutting down the world")
self.world.shutdownServer()
## sendVacuumReportFromCommander2Planner
#
# Routine that takes a report from the commander that identifies a
# particular vacuum and converts it into XML and passes it along
# to the planner so it will know where the vacuum was sent.
#
def sendVacuumReportFromCommander2Planner(self,xPos,yPos,IDnum) :
#print("Sending to id: {0} pos: {1},{2}".format(IDnum,xPos,yPos))
#network = XMLMessagePlannerReportVacuumOrders()
network = XMLMessageVacuumIDPosBase()
network.setVacuumID(IDnum)
network.setPos(xPos,yPos)
network.createRootNode()
network.specifyInformationType(XMLParser.MESSAGE_PLANNER_REPORT_VACUUM_ORDERS)
self.sendString(Router.PLANNER,network.xml2Char())
#self.receiveXMLReportParseAndDecide(network.xml2Char())
## sendRecommendOrderFromCommander2Planner
#
# Routine that takes a recommendation order from the commander
# that identifies a particular vacuum and converts it into XML and
# passes the XML tree on to the planner.
def sendRecommendOrderFromCommander2Planner(self,vacuumID,xPos,yPos) :
#print("Sending to id: {0} pos: {1},{2}".format(vacuumID,xPos,yPos))
#orders = XMLMessageRecommendOrderCommander2Planner()
orders = XMLMessageVacuumIDPosBase()
orders.setVacuumID(vacuumID)
orders.setPos(xPos,yPos)
orders.createRootNode()
orders.specifyInformationType(XMLParser.MESSAGE_RECOMMEND_ORDER_COMMANDER_PLANNER)
self.sendString(Router.PLANNER,orders.xml2Char())
#self.receiveXMLReportParseAndDecide(orders.xml2Char())
## sendRecommendOrderFromPlanner2Commander
#
# Routine that takes a recomendation order from the planner that
# identifies a particular vacuum and converts it into XML and
# passes the XML tree on to the commander.
def sendRecommendOrderFromPlanner2Commander(self,xPos,yPos,IDnum) :
#print("Sending to id: {0} pos: {1},{2}".format(IDnum,xPos,yPos))
#orders = XMLMessageRecommendOrderPlanner2Commander()
orders = XMLMessageVacuumIDPosBase()
orders.setVacuumID(IDnum)
orders.setPos(xPos,yPos)
orders.createRootNode()
orders.specifyInformationType(XMLParser.MESSAGE_RECOMMEND_ORDER_PLANNER_COMMANDER)
self.sendString(Router.COMMANDER,orders.xml2Char())
#self.receiveXMLReportParseAndDecide(orders.xml2Char())
## sendMoveOrderFromCommander2Vacuum
#
# Routine that takes an order from the commander and converts it
# into XML and passed the XML to the vacuum.
def sendMoveOrderFromCommander2Vacuum(self,xPos,yPos,vacuumID) :
#print("Sending to id: {0} pos: {1},{2}".format(vacuumID,xPos,yPos))
#orders = XMLMessageMoveOrderCommanderVacuum()
orders = XMLMessageVacuumIDPosBase()
orders.setVacuumID(vacuumID)
orders.setPos(xPos,yPos)
orders.createRootNode()
orders.specifyInformationType(XMLParser.MESSAGE_MOVE_ORDER_COMMANDER_VACUUM)
self.sendString(Router.VACUUM,orders.xml2Char(),vacuumID)
#self.receiveXMLReportParseAndDecide(orders.xml2Char())
## sendReportFromVacuum2Commander
#
# Routine to take a message from the vacuum that is a report for
# the commander. This routine relays that report to the commander.
def sendReportFromVacuum2Commander(self,xPos,yPos,status,IDnum) :
#print("Sending status to id: {0} pos: {1},{2} - {3}".format(
# IDnum,xPos,yPos,status))
report = XMLMessageGetReportVacuumCommander()
report.setVacuumID(IDnum)
report.setPos(xPos,yPos)
report.setStatus(status)
report.createRootNode()
#Channel.checkInfoType = True
#print("sending vacuum to commander")
self.sendString(Router.COMMANDER,report.xml2Char(),-1,False)
#self.receiveXMLReportParseAndDecide(report.xml2Char())
## sendMoveOrderFromCommander2Planner
#
# Routine to take a message from the commander that is an order to
# move a vacuum and relay it to the planner.
def sendMoveOrderFromCommander2Planner(self,xPos,yPos,IDnum) :
#print("Sending to id: {0} pos: {1},{2}".format(IDnum,xPos,yPos))
#orders = XMLMessageMoveOrderCommanderPlanner()
orders = XMLMessageVacuumIDPosBase()
orders.setVacuumID(IDnum)
orders.setPos(xPos,yPos)
orders.createRootNode()
orders.specifyInformationType(XMLParser.MESSAGE_MOVE_ORDER_COMMANDER_PLANNER)
self.sendString(Router.PLANNER,orders.xml2Char(),IDnum)
#self.sendString(Router.PLANNER,orders.xml2Char())
#self.receiveXMLReportParseAndDecide(orders.xml2Char())
## sendMeasuredFromPlanner2Sensor
#
# Routine to take a request from the planner to get information
# from the sensor and send it to the sensor.
def sendMeasuredFromPlanner2Sensor(self) :
sensorData = XMLMessageUpdatePlannerSensor()
#print("Channel.sendMeasuredFromPlanner2Sensor: {0}".format(sensorData.getMyInformationType()))
sensorData.createRootNode()
#Channel.checkInfoType = True
self.sendString(Router.SENSORARRAY,sensorData.xml2Char()) #,-1,True)
#self.receiveXMLReportParseAndDecide(sensorData.xml2Char())
def sendStatusSensor2Planner(self,noisyView) :
sensorData = XMLMessageSensorStatus(noisyView)
#print("Channel.sendStatusSensor2Planner: {0}".format(sensorData.getMyInformationType()))
sensorData.createRootNode()
#Channel.checkInfoType = True
self.sendString(Router.PLANNER,sensorData.xml2Char()) #,-1,True)
#self.receiveXMLReportParseAndDecide(sensorData.xml2Char())
## sendWorldStatusToSensor
#
# Routine to send the world's status to a sensor.
def sendWorldStatusToSensor(self,A) :
worldData = XMLMessageWorldStatus(A)
worldData.createRootNode()
#print("Channel.sendWorldStatusToSensor: sending data")
self.sendString(Router.SENSORARRAY,worldData.xml2Char(),-1)
#self.receiveXMLReportParseAndDecide(worldData.xml2Char())
## sendWorldWetnessToSensor
#
# Routine to send the world's wetness levels to a sensor.
def sendWorldWetnessToSensor(self,Moisture):
worldWetness = XMLMessageWorldWetness(Moisture)
worldWetness.createRootNode()
self.sendString(Router.SENSORARRAY,worldWetness.xml2Char())
#self.receiveXMLReportParseAndDecide(worldWetness.xml2Char())
## sendPlannerUpdateRequest
#
# Routine to send a request for an update to the planner. This
# tells the planner that it needs to take whatever actions are
# necessary during a world time step.
def sendPlannerUpdateRequest(self) :
update = XMLMessageUpdateWorldPlanner()
#print("Channel.sendPlannerUpdateRequest: {0}".format(update.getMyInformationType()))
update.createRootNode()
#Channel.checkInfoType = True
self.sendString(Router.PLANNER,update.xml2Char()) #,-1,True)
#self.receiveXMLReportParseAndDecide(update.xml2Char())
## sendPlannerVacuumMovedPosition
#
# Routine to send the new position of a vacuum. This comes from a
# vacuum and is sent to a planner.
def sendPlannerVacuumMovedPosition(self,idnum,xpos,ypos) :
#update = XMLMessageVaccumMovedReportToPlanner()
update = XMLMessageVacuumIDPosBase()
update.setVacuumID(idnum)
update.setPos(xpos,ypos)
update.createRootNode()
update.specifyInformationType(XMLParser.MESSAGE_VACUUM_NEW_POSITION_PLANNER)
self.sendString(Router.PLANNER,update.xml2Char())
#self.receiveXMLReportParseAndDecide(update.xml2Char())
## sendVacuumWorldTime
#
# Routine to send the current world time from the world to a
# vacuum. This tells the vacuum that it needs to take whatever
# actions are appropriate for a given time step.
def sendVacuumWorldTime(self,T,id,wetness) :
newTime = XMLMessageWorldVacuumCurrentTime(T,wetness)
newTime.setVacuumID(id)
newTime.createRootNode()
#print(newTime.xml2Char())
#print("Channel.sendVacuumWorldTime - {0}".format(id))
self.sendString(Router.VACUUM,newTime.xml2Char(),id,False)
#self.receiveXMLReportParseAndDecide(newTime.xml2Char())
## sendVacuumWorldExpenditure
#
# Routine to send an expenditure from a vacuum to the world.
def sendVacuumWorldExpenditure(self,expenditure,id) :
newExpenditure = XMLMessageVacuumAddExpenditureWorld(expenditure)
newExpenditure.setVacuumID(id)
newExpenditure.createRootNode()
#print(newExpenditure.xml2Char())
self.sendString(Router.WORLD,newExpenditure.xml2Char(),id)
#self.receiveXMLReportParseAndDecide(newExpenditure.xml2Char())
## sendWorldCleanedGrid
#
# Routine to let a vacuum send an update to the world to let it
# know that a grid area has been cleaned.
def sendWorldCleanedGrid(self,idnum,xpos,ypos) :
#update = XMLMessageVacuumCleanWorld()
update = XMLMessageVacuumIDPosBase()
update.setVacuumID(idnum)
update.setPos(xpos,ypos)
update.createRootNode()
update.specifyInformationType(XMLParser.MESSAGE_VACUUM_WORLD_CLEAN_GRID)
self.sendString(Router.WORLD,update.xml2Char(),idnum)
#self.receiveXMLReportParseAndDecide(update.xml2Char())
if (__name__ =='__main__') :
from XML.XMLMessageExternalCommand import XMLMessageExternalCommand
import sys
#parameter = XMLMessageExternalCommand()
#parameter.setParameterValue(XMLMessageExternalCommand.STOP)
#parameter.setParameterValue(XMLMessageExternalCommand.START)
#parameter.setParameterValue(XMLMessageExternalCommand.RESTART)
#parameter.setParameterValue(XMLMessageExternalCommand.RESET)
#parameter.setParameterValue(XMLMessageExternalCommand.POLL)
#parameter.createRootNode()
#print(parameter.xml2Char(True))
channel = Channel()
channel.getRouter().setNumberVacuums(7)
#channel.receiveXMLReportParseAndDecide(parameter.xml2Char(False))
#sys.exit(0)
from XML.XMLMessageExternalParameter import XMLMessageExternalParameter
from XML.XMLIncomingDIF import XMLIncomingDIF
parameter = XMLMessageExternalParameter()
parameter.setParameterValue(XMLMessageExternalParameter.DUST_RATE,0.2)
parameter.setParameterValue(XMLMessageExternalParameter.RAIN_RATE,0.4)
parameter.setParameterValue(XMLMessageExternalParameter.GRID_SIZE,5)
parameter.setParameterValue(XMLMessageExternalParameter.DUST_SIZE,0.3)
parameter.setParameterValue(XMLMessageExternalParameter.RAIN_SIZE,2.0)
parameter.setParameterValue(XMLMessageExternalParameter.GRID_SIZE,6)
#parameter.setParameterValue(XMLMessageExternalParameter.NUMBER_OF_VACUUMS,10)
parameter.setParameterValue(XMLMessageExternalParameter.HOST_ADDRESS,'192.168.0.1')
parameter.setParameterValue(XMLMessageExternalParameter.HOST_PORT,'43811')
parameter.setParameterValue(XMLMessageExternalParameter.HOST_TYPE,Router.VACUUM)
parameter.setParameterValue(XMLMessageExternalParameter.VACUUM_ID,2)
parameter.createRootNode()
message = parameter.xml2Char(False)
print("\n\n{0}".format(parameter.xml2Char(True)))
channel.receiveXMLReportParseAndDecide(message)
#dif = XMLIncomingDIF()
#incoming = dif.determineXMLInformation(message)
sys.exit(0)
from Planner import Planner
channel = Channel()
planner = Planner(1.0,1.0,1.0,1.0,4)
channel.setPlanner(planner)
channel.receiveXMLReportParseAndDecide(message)
print(message)
print("one: {1}\n{0}".format(channel.vacuumArray,len(channel.vacuumArray)))
channel.setNumberVacuums(7)
print("two: {1}\n{0}".format(channel.vacuumArray,len(channel.vacuumArray)))
channel.setNumberVacuums(3)
print("three: {1}\n{0}".format(channel.vacuumArray,len(channel.vacuumArray)))
| 34,349 | 11,603 |
# Generated by Django 2.0.9 on 2018-10-23 05:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('registers', '0006_auto_20181023_1117'),
]
operations = [
migrations.AlterField(
model_name='changerequest',
name='description',
field=models.TextField(blank=True, help_text='A brief description of what the change is for and why it is being undertaken', null=True),
),
]
| 498 | 167 |
# Copyright 2019, Offchain Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import instructions
from . import value
PUSH_WEIGHT = 5
IF_ELSE_STATEMENT = 1
IF_STATEMENT = 2
WHILE_STATEMENT = 3
BLOCK_STATEMENT = 4
CALL_STATEMENT = 5
IMMEDIATE_OP = 6
INDIRECT_PUSH_STATEMENT = 7
BASIC_OP = 8
AVM_LABEL = 9
AVM_UNIQUE_LABEL = 10
FUNC_DEFINITION = 11
CAST_STATEMENT = 12
SET_ERROR_HANDLER_STATEMENT = 13
AVM_LABELED_POS = 14
AVM_LABELED_CODE_POINT = 15
class ASTNode:
def __init__(self, asttype, path):
if path is None:
path = []
self.path = path
self.asttype = asttype
def add_node(self, typ):
self.path.append(typ)
def add_label_to_ast(node, label):
node.add_node(label)
return node
# def impl(op):
# op.add_node(label)
# return op
# return node.modify_ast(impl)
class BlockStatement(ASTNode):
def __init__(self, code, path=None):
super(BlockStatement, self).__init__(BLOCK_STATEMENT, path)
assert isinstance(code, list)
self.code = code
self.stack_mod_saved = None
def clone(self):
return BlockStatement([op.clone() for op in self.code], list(self.path))
def __repr__(self):
res = "["
for i, op in enumerate(self.code):
res += str(op)
if i < len(self.code) - 1:
res += ", "
res += "]"
return res
def __len__(self):
return sum(len(op) for op in self.code)
def stack_mod(self):
if self.stack_mod_saved is None:
expectations = []
net_stacks = [0]
net = 0
for op in self.code:
mods, expect = op.stack_mod()
expectations += expect
net -= mods["pop"]
net_stacks.append(net)
net += mods["push"]
pop_count = -min(net_stacks)
push_count = max(net + pop_count, 0)
self.stack_mod_saved = (
{"pop": pop_count, "push": push_count},
expectations,
)
return self.stack_mod_saved
def typecheck(self, stack):
for op in self.code:
op.typecheck(stack)
def traverse_ast(self, func):
func(self)
for op in self.code:
op.traverse_ast(func)
def modify_ast(self, func):
self.code = [op.modify_ast(func) for op in self.code]
return func(self)
class IfElseStatement(ASTNode):
def __init__(self, true_code, false_code, path=None):
super(IfElseStatement, self).__init__(IF_ELSE_STATEMENT, path)
self.true_code = true_code
self.false_code = false_code
def clone(self):
return IfElseStatement(
self.true_code.clone(), self.false_code.clone(), list(self.path)
)
def __repr__(self):
return "IfElse({}, {})".format(self.true_code, self.false_code)
def __len__(self):
return len(self.true_code) + len(self.false_code) + 2 + 2 * PUSH_WEIGHT
def stack_mod(self):
true_mods, true_expectations = self.true_code.stack_mod()
false_mods, false_expectations = self.false_code.stack_mod()
expectations = true_expectations + false_expectations
expectations.append(
(
"eq",
true_mods["push"] - true_mods["pop"],
false_mods["push"] - false_mods["pop"],
)
)
return (
{
"pop": max(true_mods["pop"], false_mods["pop"]) + 1,
"push": max(true_mods["push"], false_mods["push"]),
},
expectations,
)
def typecheck(self, stack):
stack.pop(value.IntType())
temp = stack.clone()
self.true_code.typecheck(stack)
self.false_code.typecheck(temp)
stack.merge(temp)
def traverse_ast(self, func):
func(self)
self.true_code.traverse_ast(func)
self.false_code.traverse_ast(func)
def modify_ast(self, func):
self.true_code = self.true_code.modify_ast(func)
self.false_code = self.false_code.modify_ast(func)
return func(self)
class CastStatement(ASTNode):
def __init__(self, typ, path=None):
super(CastStatement, self).__init__(CAST_STATEMENT, path)
self.typ = typ
def clone(self):
return CastStatement(self.typ, list(self.path))
def __repr__(self):
return "CastStatement({})".format(self.typ)
def __len__(self):
return 0
def stack_mod(self):
return {"pop": 1, "push": 1}, []
def typecheck(self, stack):
stack.pop()
stack.push(self.typ)
def traverse_ast(self, func):
pass
def modify_ast(self, func):
return func(self)
class IfStatement(ASTNode):
def __init__(self, true_code, path=None):
super(IfStatement, self).__init__(IF_STATEMENT, path)
self.true_code = true_code
def clone(self):
return IfStatement(self.true_code.clone(), list(self.path))
def __repr__(self):
return "If({})".format(self.true_code)
def __len__(self):
return len(self.true_code) + 2 + PUSH_WEIGHT
def stack_mod(self):
true_mods, true_expectations = self.true_code.stack_mod()
expectations = list(true_expectations)
expectations.append(("eq", true_mods["push"] - true_mods["pop"], 0))
return {"pop": true_mods["pop"] + 1, "push": true_mods["push"]}, expectations
def typecheck(self, stack):
stack.pop(value.IntType())
temp = stack.clone()
self.true_code.typecheck(stack)
stack.merge(temp)
def traverse_ast(self, func):
func(self)
self.true_code.traverse_ast(func)
def modify_ast(self, func):
self.true_code = self.true_code.modify_ast(func)
return func(self)
class WhileStatement(ASTNode):
def __init__(self, cond_code, body_code, path=None):
super(WhileStatement, self).__init__(WHILE_STATEMENT, path)
self.cond_code = cond_code
self.body_code = body_code
def clone(self):
return WhileStatement(
self.cond_code.clone(), self.body_code.clone(), list(self.path)
)
def __repr__(self):
return "WhileStatement({}, {})".format(self.cond_code, self.body_code)
def __len__(self):
return len(self.cond_code) + len(self.body_code) + 3 + 2 * PUSH_WEIGHT
def stack_mod(self):
cmod, cond_expectations = self.cond_code.stack_mod()
bmod, body_expectation = self.body_code.stack_mod()
expectations = cond_expectations + body_expectation
expectations.append(
(
"eq",
cmod["push"] - cmod["pop"] - 1 + bmod["push"] - bmod["pop"],
0,
"while_loop({}, {}, {}, {})".format(
cmod["pop"], cmod["push"], bmod["pop"], bmod["push"]
),
)
)
pop_count = max(cmod["pop"], bmod["pop"] + 1 - cmod["push"] + cmod["pop"])
mods = {"push": pop_count + cmod["push"] - cmod["pop"] - 1, "pop": pop_count}
return mods, expectations
def typecheck(self, stack):
temp = stack.clone()
self.cond_code.typecheck(stack)
stack.pop(value.IntType())
self.body_code.typecheck(stack)
temp.merge(stack)
def traverse_ast(self, func):
func(self)
self.cond_code.traverse_ast(func)
self.body_code.traverse_ast(func)
def modify_ast(self, func):
self.cond_code = self.cond_code.modify_ast(func)
self.body_code = self.body_code.modify_ast(func)
return func(self)
class FuncDefinition(ASTNode):
def __init__(self, name, func, code, is_callable, path=None):
super(FuncDefinition, self).__init__(FUNC_DEFINITION, path)
self.name = name
self.func = func
self.code = code
self.is_callable = is_callable
if not isinstance(code, ASTNode):
raise Exception("Func definition expects ASTNode for code")
def clone(self):
raise Exception("Func definitions aren't clonable")
def __repr__(self):
return "FuncDefinition({})".format(self.code)
def __len__(self):
return len(self.code)
def can_typecheck(self):
return (
hasattr(self.func, "pops")
and hasattr(self.func, "pushes")
and self.func.typecheck
)
def typecheck(self):
stack = value.TypeStack()
for typ in self.func.pops[::-1]:
stack.push(typ)
try:
self.code.typecheck(stack)
except Exception as err:
raise Exception("Error typechecking {} body: {}".format(self.name, err))
try:
for typ in self.func.pushes:
stack.pop(typ)
except Exception as err:
raise Exception(
"Error typechecking {} return vals: {}".format(self.name, err)
)
def traverse_ast(self, func):
func(self)
self.code.traverse_ast(func)
def modify_ast(self, func):
self.code = self.code.modify_ast(func)
return func(self)
class CallStatement(ASTNode):
def __init__(self, func, path=None):
super(CallStatement, self).__init__(CALL_STATEMENT, path)
self.func = func
self.func_name = "{}.{}".format(func.__module__, func.__name__)
self.is_callable = True
if hasattr(self.func, "uncountable"):
self.mods = {"pop": 0, "push": 0}, [("invalid",)]
elif not hasattr(self.func, "pushes") or not hasattr(self.func, "pops"):
raise Exception("Call {} has unknown stack mods".format(self.func_name))
else:
self.mods = {"pop": len(self.func.pops), "push": len(self.func.pushes)}, []
self.pops = self.func.pops
self.pushes = self.func.pushes
def clone(self):
return CallStatement(self.func, list(self.path))
def __repr__(self):
return "Call({})".format(self.func_name)
def __len__(self):
# Approximation
return 12
def stack_mod(self):
return self.mods[0], self.mods[1]
def typecheck(self, stack):
try:
for typ in self.func.pops:
stack.pop(typ)
for typ in self.func.pushes[::-1]:
stack.push(typ)
except Exception as err:
raise Exception(
"Type error calling func {}: {}".format(self.func_name, err)
)
def traverse_ast(self, func):
func(self)
def modify_ast(self, func):
return func(self)
class SetErrorHandlerFunctionStatement(ASTNode):
def __init__(self, func, path=None):
super(SetErrorHandlerFunctionStatement, self).__init__(
SET_ERROR_HANDLER_STATEMENT, path
)
self.func = func
self.func_name = "{}.{}".format(func.__module__, func.__name__)
self.is_callable = False
def clone(self):
return SetErrorHandlerFunctionStatement(self.func, list(self.path))
def __repr__(self):
return "SetErrorHandlerFunction({})".format(self.func_name)
def __len__(self):
# Approximation
return 1
def stack_mod(self):
return {"pop": 0, "push": 0}, []
def typecheck(self, stack):
pass
def traverse_ast(self, func):
func(self)
def modify_ast(self, func):
return func(self)
class IndirectPushStatement(ASTNode):
def __init__(self, val, path=None):
super(IndirectPushStatement, self).__init__(INDIRECT_PUSH_STATEMENT, path)
self.val = val
def clone(self):
return IndirectPushStatement(self.val, list(self.path))
def __len__(self):
# Approximation
return 6
def stack_mod(self):
return {"pop": 0, "push": 1}, []
def typecheck(self, stack):
if isinstance(self.val, (AVMLabel,)):
typ = value.CodePointType()
else:
typ = self.val
stack.push(typ)
def traverse_ast(self, func):
func(self)
def modify_ast(self, func):
return func(self)
def __repr__(self):
return "Push({})".format(self.val)
class AVMLabel(ASTNode):
def __init__(self, name, path=None):
super(AVMLabel, self).__init__(AVM_LABEL, path)
self.name = name
# print("Label", name)
def clone(self):
raise Exception("You can't clone a label '{}'".format(self.name))
def __len__(self):
return 0
def __lt__(self, other):
return self.name < other.name
def stack_mod(self):
return {"pop": 0, "push": 0}, []
def typecheck(self, stack):
pass
def traverse_ast(self, func):
func(self)
def modify_ast(self, func):
return func(self)
def __repr__(self):
return "AVMLabel({})".format(self.name)
def __eq__(self, other):
if not isinstance(other, AVMLabel):
return False
return self.name == other.name
def __ne__(self, other):
return self.name != other.name
def __hash__(self):
assert isinstance(self.name, str)
return self.name.__hash__()
class AVMUniqueLabel(ASTNode):
def __init__(self, name, path=None):
super(AVMUniqueLabel, self).__init__(AVM_UNIQUE_LABEL, path)
self.name = name
def clone(self):
raise Exception("You can't clone a label '{}'".format(self.name))
def __len__(self):
return 0
def stack_mod(self):
return {"pop": 0, "push": 0}, []
def typecheck(self, stack):
pass
def traverse_ast(self, func):
func(self)
def modify_ast(self, func):
return func(self)
def __repr__(self):
return "AVMUniqueLabel({})".format(self.name)
def __eq__(self, other):
if not isinstance(other, AVMUniqueLabel):
return False
return self.name == other.name
def __ne__(self, other):
return self.name != other.name
def __hash__(self):
assert isinstance(self.name, str)
return self.name.__hash__()
class AVMLabeledPos(ASTNode):
def __init__(self, name, pc, path=None):
super(AVMLabeledPos, self).__init__(AVM_LABELED_POS, path)
self.name = name
self.pc = pc
def clone(self):
return AVMLabeledPos(self.name, self.pc, list(self.path))
def __len__(self):
return 0
def stack_mod(self):
return {"pop": 0, "push": 0}, []
def typecheck(self, stack):
pass
def traverse_ast(self, func):
func(self)
def modify_ast(self, func):
return func(self)
def __repr__(self):
return "AVMLabeledPos({}, {})".format(self.name, self.pc)
def __eq__(self, other):
if not isinstance(other, AVMLabeledPos):
return False
return self.pc == other.pc
class AVMLabeledCodePoint(ASTNode):
def __init__(self, name, pc, path=None):
super(AVMLabeledCodePoint, self).__init__(AVM_LABELED_CODE_POINT, path)
self.name = name
self.pc = pc
def clone(self):
return AVMLabeledCodePoint(self.name, self.pc, list(self.path))
def __len__(self):
return 0
def stack_mod(self):
return {"pop": 0, "push": 0}, []
def typecheck(self, stack):
pass
def traverse_ast(self, func):
func(self)
def modify_ast(self, func):
return func(self)
def __repr__(self):
return "AVMLabeledCodePoint({}, {})".format(self.name, self.pc)
def __eq__(self, other):
if not isinstance(other, AVMLabeledCodePoint):
return False
return self.pc == other.pc
def typecheck_tget(stack):
index = stack.pop(value.IntType())
tup = stack.pop(value.TupleType())
if isinstance(index, int) and not tup.has_member_at_index(index):
raise Exception("Tried to get index {} from tuple {}".format(index, tup))
stack.push(tup.get_tup(index))
OP_HANDLER = {
"auxpush": lambda stack: stack.push_aux(stack.pop()),
"auxpop": lambda stack: stack.push(stack.pop_aux()),
"dup0": instructions.dup0,
"dup1": instructions.dup1,
"dup2": instructions.dup2,
"swap1": instructions.swap1,
"swap2": instructions.swap2,
"tget": typecheck_tget,
"tset": instructions.tset,
}
class BasicOp(ASTNode):
def __init__(self, op_code, path=None):
super(BasicOp, self).__init__(BASIC_OP, path)
self.op_code = op_code
def clone(self):
return BasicOp(self.op_code, list(self.path))
def __len__(self):
return 1
def __repr__(self):
if self.op_code in instructions.OP_NAMES:
return instructions.OP_NAMES[self.op_code]
else:
return "Unhandled OpCode"
def get_op(self):
return self.op_code
def stack_mod(self):
info = instructions.OF_INFO[self.op_code]
mod = {"pop": len(info["pop"]), "push": len(info["push"])}
if (
instructions.OP_NAMES[self.op_code] == "jump"
or instructions.OP_NAMES[self.op_code] == "cjump"
):
return mod, [("invalid",)]
if instructions.OP_NAMES[self.op_code] == "halt":
return mod, [("invalid",)]
return mod, []
def typecheck(self, stack):
try:
name = instructions.OP_NAMES[self.op_code]
if name in OP_HANDLER:
OP_HANDLER[name](stack)
else:
info = instructions.OF_INFO[self.op_code]
for pop in info["pop"]:
stack.pop(pop)
for push in info["push"]:
stack.push(push)
except Exception as err:
raise Exception(
"Exception typechecking {}: {}".format(
instructions.OP_NAMES[self.op_code], err
)
)
def __eq__(self, other):
if not isinstance(other, BasicOp):
return False
return self.op_code == other.op_code
def traverse_ast(self, func):
func(self)
def modify_ast(self, func):
return func(self)
class ImmediateOp(ASTNode):
def __init__(self, op, val, path=None):
super(ImmediateOp, self).__init__(IMMEDIATE_OP, path)
self.op = op
self.val = val
def clone(self):
return ImmediateOp(self.op, self.val, list(self.path))
def __len__(self):
# Approximation
return 1
def __repr__(self):
return "Immediate({}, {})".format(self.op, self.val)
def get_op(self):
return self.op.get_op()
def stack_mod(self):
op_mod, constraints = self.op.stack_mod()
if op_mod["pop"] > 0:
op_mod["pop"] -= 1
else:
op_mod["push"] += 1
return op_mod, constraints
def typecheck(self, stack):
if isinstance(self.val, (AVMLabel,)):
typ = value.CodePointType()
else:
typ = self.val
stack.push(typ)
self.op.typecheck(stack)
def traverse_ast(self, func):
func(self)
def modify_ast(self, func):
return func(self)
| 19,826 | 6,419 |
load("//bazel/linux:providers.bzl", "KernelBundleInfo", "KernelImageInfo", "RuntimeBundleInfo", "RuntimeInfo")
load("//bazel/linux:utils.bzl", "expand_deps", "get_compatible")
load("//bazel/utils:messaging.bzl", "location", "package")
load("//bazel/utils:files.bzl", "files_to_dir")
load("@bazel_skylib//lib:shell.bzl", "shell")
def _kunit_bundle(ctx):
ki = ctx.attr.image[KernelImageInfo]
mods = get_compatible(ctx, ki.arch, ki.package, ctx.attr.module)
alldeps = expand_deps(ctx, mods, ctx.attr.depth)
commands = [
# modprobe does not work correctly without /sys
"mount -t sysfs sysfs /sys",
]
inputs = []
for kmod in alldeps:
commands += ["", "# module " + package(kmod.label)]
if kmod.setup:
commands += kmod.setup
for mod in kmod.files:
if mod.extension != "ko":
continue
commands.append("load " + mod.short_path)
inputs.append(mod)
init = ctx.actions.declare_file(ctx.attr.name + "-kunit.sh")
ctx.actions.expand_template(
template = ctx.file._template_kunit,
output = init,
substitutions = {
"{target}": package(ctx.label),
"{message}": "KUNIT TESTS",
"{commands}": "\n".join(commands),
},
is_executable = True,
)
check = ctx.actions.declare_file(ctx.attr.name + "-check.sh")
ctx.actions.expand_template(
template = ctx.file._template_check,
output = check,
substitutions = {
"{target}": package(ctx.label),
"{parser}": ctx.executable._parser.short_path,
},
is_executable = True,
)
outside_runfiles = ctx.runfiles(files = ctx.attr._parser.files.to_list())
outside_runfiles = outside_runfiles.merge(ctx.attr._parser.default_runfiles)
inside_runfiles = ctx.runfiles(inputs)
return [
DefaultInfo(files = depset([init, check]), runfiles = inside_runfiles.merge(outside_runfiles)),
RuntimeBundleInfo(
run = RuntimeInfo(binary = init, runfiles = inside_runfiles),
check = RuntimeInfo(binary = check, runfiles = outside_runfiles),
),
]
kunit_bundle = rule(
doc = """\
Generates a directory containing the kernel modules, all their dependencies,
and an init script to run them as a kunit test.""",
implementation = _kunit_bundle,
attrs = {
"module": attr.label(
mandatory = True,
providers = [KernelBundleInfo],
doc = "The label of the KUnit linux kernel module to be used for testing. It must define a kunit_test_suite so that when loaded, KUnit will start executing its tests.",
),
"image": attr.label(
mandatory = True,
providers = [KernelImageInfo],
doc = "The label of a kernel image this test will run against. Important to select the correct architecture and package module.",
),
"depth": attr.int(
default = 5,
doc = "Maximum recursive depth when expanding a list of kernel module dependencies.",
),
"_template_kunit": attr.label(
allow_single_file = True,
default = Label("//bazel/linux:templates/kunit.template.sh"),
doc = "The template to generate the bash script used to run the tests.",
),
"_template_check": attr.label(
allow_single_file = True,
default = Label("//bazel/linux:templates/check_kunit.template.sh"),
doc = "The template to generate the bash script used to run the tests.",
),
"_parser": attr.label(
default = Label("//bazel/linux/kunit:kunit_zip"),
doc = "KUnit TAP output parser.",
executable = True,
cfg = "host",
),
},
)
| 3,850 | 1,124 |
import sys
from PyQt5.QtWidgets import (
QApplication,
QMainWindow,
QWidget,
QVBoxLayout,
QStackedLayout,
QPushButton,
)
from PyQt5.QtGui import (
QPalette,
QColor,
)
class Color(QWidget):
def __init__(self, color, *args, **kwargs):
super(Color, self).__init__(*args, **kwargs)
self.setAutoFillBackground(True)
palette = self.palette()
palette.setColor(QPalette.Window, QColor(color))
self.setPalette(palette)
class MainWindow(QMainWindow):
def __init__(self, *args, **kwargs):
super(MainWindow, self).__init__(*args, **kwargs)
self.color_index = 3
self.setWindowTitle("Jayone's Awesome App")
layout = QVBoxLayout()
layout2 = QStackedLayout()
layout2.addWidget(Color('red'))
layout2.addWidget(Color('green'))
layout2.addWidget(Color('blue'))
layout2.addWidget(Color('yellow'))
layout2.setCurrentIndex(self.color_index)
layout.addLayout(layout2)
self.stack_layout = layout2
push_button = QPushButton('change')
push_button.clicked.connect(self.button_click)
layout.addWidget(push_button)
widget = QWidget()
widget.setLayout(layout)
self.setCentralWidget(widget)
def button_click(self):
self.color_index += 1
if self.color_index > 3:
self.color_index = 0
self.stack_layout.setCurrentIndex(self.color_index)
app = QApplication(sys.argv)
window = MainWindow()
window.show()
app.exec_()
| 1,562 | 507 |
"""
http://arxiv.org/abs/1412.7449 and https://arxiv.org/abs/1508.04025 (mostly the latter)
TODO: read from model path if provided
beam search decoding
unk replacement from attention scores
"""
import encoders
import decoders
import tensorflow as tf
import numpy as np
import os
from msc.constants import Constants
class BaseModel():
def save(self, path):
raise NotImplementedError("Override me")
def load(self, ckpt_dir):
raise NotImplementedError("Override me")
def train_on_batch(self, x_batch, x_lens, y_batch, y_lens, learning_rate=1.0):
raise NotImplementedError("Override me")
def run_on_batch(self, x_batch, x_lens, y_batch, y_lens, learning_rate=1.0):
raise NotImplementedError("Override me")
class Seq2SeqV3(object):
def __init__(self, config, dataset, sess, testing=False):
self.sess = sess
self.src_vocab_size = config.src_vocab_size
self.target_vocab_size = config.target_vocab_size
self.max_source_len = config.max_source_len
self.max_target_len = config.max_target_len
self.embedding_size = config.embedding_size
self.num_layers = config.num_layers
self.network_type = config.network_type
self.attention = config.attention
self.encoder_type = config.encoder_type
self.decoder_type = config.decoder_type
self.hidden_size = config.hidden_size
self.optimizer = config.optimizer
self.batch_size = config.batch_size
self.train_dropout = config.dropout_rate
self.max_grad_norm = config.max_grad_norm
self.dataset = dataset
self.testing = testing
self.learning_rate = tf.placeholder(tf.float32, shape=(), name="lr")
self.source = tf.placeholder(tf.int32, [self.batch_size, self.max_source_len], name="source")
self.source_len = tf.placeholder(tf.int32, [self.batch_size], name="source_len")
self.target = tf.placeholder(tf.int32, [self.batch_size, self.max_target_len], name="target")
self.target_len = tf.placeholder(tf.int32, [self.batch_size], name="target_len")
self.dropout = tf.placeholder(tf.float32, name="dropout")
self.global_step = tf.Variable(0, dtype=tf.int32, trainable=False, name='global_step')
if self.network_type == 'default':
self.decoder_output = self.build_default_seq2seq()
else:
self.decoder_output = self.build_custom_seq2seq()
# compute average per-word loss across all batches (log perplexity)
self.loss = self.cross_entropy_sequence_loss(logits=self.decoder_output,
targets=self.target,
seq_len=self.target_len)
# compute and apply gradients
self.train_step = self.backward_pass(self.loss)
# tf boilerplate
# os.environ['CUDA_VISIBLE_DEVICES'] = '0'
# gpu_options = tf.GPUOptions(allow_growth=True)
# self.sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options, allow_soft_placement=True))
self.saver = tf.train.Saver()
def build_default_seq2seq(self):
with tf.variable_scope('network'):
# projection to output embeddings
out_embed_W = tf.get_variable("o_embed_W",
shape=[self.hidden_size, self.target_vocab_size],
initializer=tf.contrib.layers.xavier_initializer())
out_embed_b = tf.get_variable("o_embed_b",
shape=[self.target_vocab_size],
initializer=tf.contrib.layers.xavier_initializer())
# projection to logits
out_W = tf.get_variable("Wo", shape=[self.hidden_size, self.target_vocab_size],
initializer=tf.contrib.layers.xavier_initializer())
out_b = tf.get_variable("bo", shape=[self.target_vocab_size],
initializer=tf.contrib.layers.xavier_initializer())
cell = self.build_rnn_cell()
source = tf.unstack(self.source, axis=1)
# if testing, only the first token will be taken. so fill up the target with starts
if self.testing:
target = tf.constant(np.full((self.batch_size, self.max_target_len), Constants.START_I))
else:
target = self.target
target = tf.unstack(target, axis=1)
if self.attention == 'off':
decoder_output, state = \
tf.nn.seq2seq.embedding_rnn_seq2seq(
source,
target,
cell,
num_encoder_symbols=self.src_vocab_size,
num_decoder_symbols=self.target_vocab_size,
embedding_size=self.embedding_size,
feed_previous=self.testing,
output_projection=(out_embed_W, out_embed_b))
else:
decoder_output, state = \
tf.nn.seq2seq.embedding_attention_seq2seq(
source,
target,
cell,
num_encoder_symbols=self.src_vocab_size,
num_decoder_symbols=self.target_vocab_size,
embedding_size=self.embedding_size,
feed_previous=self.testing,
output_projection=(out_embed_W, out_embed_b))
decoder_output = [tf.matmul(x, out_W) + out_b for x in decoder_output]
decoder_output = tf.transpose(tf.stack(decoder_output), (1, 0, 2))
return decoder_output
def build_custom_seq2seq(self):
# get word vectors for the source and target
source_embedded, target_embedded = self.get_embeddings(self.source, self.target)
# run everything through the encoder and decoder
decoder_output = self.encode_decode(source=source_embedded,
source_len=self.source_len,
target=target_embedded,
target_len=self.target_len)
return decoder_output
def save(self, path):
self.saver.save(self.sess, path, global_step=self.global_step)
def load(self, filepath=None, dir=None):
print("\t Reading checkpoints...")
if dir is not None:
ckpt = tf.train.get_checkpoint_state(dir)
if ckpt and ckpt.model_checkpoint_path:
self.saver.restore(self.sess, ckpt.model_checkpoint_path)
print '\t SUCESS: model restored from %s' % ckpt.model_checkpoint_path
else:
raise Exception("\t No checkpoint found")
elif filepath is not None:
self.saver.restore(self.sess, filepath)
else:
raise Exception('\tERROR: must provide a checkpoint filepath or directory')
def get_embeddings(self, source, target):
""" source: [batch size, max length] = source-side one-hot vectors
target: [batch size, max length] = target-side one-hot vectors
returns word embeddings for each item in the source/target sequence
"""
source_embedding = tf.get_variable('source_embeddings',
shape=[self.src_vocab_size, self.embedding_size])
source_embedded = tf.nn.embedding_lookup(source_embedding, source)
self.target_embedding = tf.get_variable('target_embeddings',
shape=[self.target_vocab_size, self.embedding_size])
target_embedded = tf.nn.embedding_lookup(self.target_embedding, target)
return source_embedded, target_embedded
def backward_pass(self, loss):
""" use the given loss to construct a training step
NOTE: Using SGD instead of adagrad or adam because those don't seem to work
"""
train_step = tf.contrib.layers.optimize_loss(self.loss,
self.global_step,
learning_rate=self.learning_rate,
optimizer=self.optimizer,
clip_gradients=5.0)
return train_step
def cross_entropy_sequence_loss(self, logits, targets, seq_len):
""" logits: [batch size, sequence len, vocab size]
targets: [batch size, sequence len]
lengths: [batch size] = length of each target sequence before padding
computes and returns per-timestep cross-entropy, then averages across sequences and batches
"""
targets = targets[:, 1:] # shift targest forward by 1: ignore start symbol
logits = logits[:, :-1, :] # take off last group of logits so dimensions match up
seq_len = seq_len - 1 # update sequence lengths to reflect this
# cross entropy
losses = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=logits,
labels=targets)
# Mask out the losses we don't care about
loss_mask = tf.sequence_mask(seq_len, targets.get_shape()[1], dtype=tf.float32)
losses = losses * loss_mask
# get mean log perplexity across all batches
loss = tf.reduce_sum(losses) / tf.to_float(tf.reduce_sum(seq_len))
return loss
def encode_decode(self, source, source_len, target, target_len):
""" source: [batch size, sequence len]
source_len: [batch size] = pre-padding lengths of source sequences
target: [batch size, sequence len]
target_len: [batch size] = pre-padding lengths of targets
runs the source through an encoder, then runs decoder on final hidden state
"""
with tf.variable_scope('encoder'):
encoder_cell = self.build_rnn_cell()
outputs, final_state = self.run_encoder(source, source_len, encoder_cell)
with tf.variable_scope('decoder'):
decoder_cell = self.build_rnn_cell()
decoder_output = self.run_decoder(target,
target_len,
decoder_cell,
final_state if self.attention == 'off' else outputs)
return decoder_output
def run_encoder(self, source, source_len, cell):
""" source: [batch size, seq len]
source_len: [batch size] = pre-padding lengths
cell: RNNCell
runs the cell inputs for source-len timesteps
"""
if self.encoder_type == 'default':
encoder = encoders.DefaultEncoder(cell)
elif self.encoder_type == 'bidirectional' and self.num_layers == 1:
encoder = encoders.DefaultBidirectionalEncoder(cell)
elif self.encoder_type == 'bidirectional' and self.num_layers > 1:
encoder = encoders.StackedBidirectionalEncoder(cell)
elif self.encoder_type == 'handmade':
encoder = encoders.HandmadeEncoder(cell,
self.embedding_size,
self.hidden_size,
self.max_source_len,
self.batch_size)
elif self.encoder_type == 'handmade_bidirectional':
encoder = encoders.HandmadeBidirectionalEncoder(cell,
self.embedding_size,
self.hidden_size,
self.max_source_len,
self.batch_size)
outputs, final_state = encoder(source, source_len)
return outputs, final_state
def run_decoder(self, target, target_len, cell, encoder_result):
""" target: [batch size, sequence len]
target_len: [batch_size] = pre-padded target lengths
cell: RNNCell
encoder_result:
[batch_size, max_time, hidden state size] (encoder outputs) if attention
[batch size, hidden state size] (encoder final state) otherwisea
runs a decoder on target and returns its predictions at each timestep
"""
if self.decoder_type == 'argmax':
self.decoder = decoders.ArgmaxDecoder(cell,
self.embedding_size,
self.hidden_size,
self.target_vocab_size,
self.batch_size,
self.max_target_len,
self.target_embedding,
self.testing,
self.attention,
self.encoder_type)
logits = self.decoder(target, target_len, encoder_result)
return logits
def build_rnn_cell(self):
""" builds a stacked RNNCell according to specs defined in the model's config
"""
cell = tf.nn.rnn_cell.BasicLSTMCell(self.hidden_size, state_is_tuple=True)
cell = tf.nn.rnn_cell.DropoutWrapper(cell,
# input_keep_prob=(1-self.dropout),
output_keep_prob=(1-self.dropout))
stacked_cell = tf.nn.rnn_cell.MultiRNNCell([cell]*self.num_layers, state_is_tuple=True)
return stacked_cell
def train_on_batch(self, x_batch, x_lens, y_batch, y_lens, learning_rate=1.0):
""" train on a minibatch of data. x and y are assumed to be
padded to length max_seq_len, with [x/y]_lens reflecting
the original lengths of each sequence
"""
_, logits, loss, step = self.sess.run([self.train_step, self.decoder_output, self.loss, self.global_step],
feed_dict={
self.source: x_batch,
self.source_len: x_lens,
self.target: y_batch,
self.target_len: y_lens,
self.dropout: self.train_dropout,
self.learning_rate: learning_rate
})
return np.argmax(logits, axis=2), loss, step
def run_on_batch(self, x_batch, x_lens, y_batch, y_lens, learning_rate=1.0):
""" "predict" on a batch while the model is in training mode (for validation purposes)
"""
logits, loss = self.sess.run([self.decoder_output, self.loss],
feed_dict={
self.source: x_batch,
self.source_len: x_lens,
self.target: y_batch,
self.target_len: y_lens,
self.dropout: 0.0,
self.learning_rate: learning_rate
})
return np.argmax(logits, axis=2), loss
def predict_on_batch(self, x_batch, x_lens):
""" predict translation for a batch of inputs. for testing mode only.
"""
assert self.testing, 'ERROR: model must be in test mode to make predictions!'
logits = self.sess.run(self.decoder_output, feed_dict={
self.source: x_batch,
self.source_len: x_lens,
self.dropout: 0.0,
})
return np.argmax(logits, axis=2)
def sample_attentional_scores(self, x_batch, x_lens):
""" get distribution of attention scores for each translated word
NOTE: only works if attention is handmade
returns:
tuple( [sequence], [ [dist for x1], [dist for x2], ...] )
"""
assert self.attention != 'off', 'attention must be turned on!'
logits, scores = self.sess.run([self.decoder_output, self.decoder.attention_probs], feed_dict={
self.source: x_batch,
self.source_len: x_lens,
self.dropout: 0.0,
})
scores = np.squeeze(scores) # squeeze out last dim (not needed)
scores = np.transpose(scores, [1, 0, 2]) # get into [batch, timestep, distribution]
# cut out padding symbols and pair up sequencs with their distributions
out = []
for l, seq, dists in zip(x_lens, x_batch, scores):
out.append( (seq, dists) )
return out
| 17,581 | 4,800 |
"""Check option dictionaries for minimize, maximize and first_derivative."""
def check_optimization_options(options, usage, algorithm_mandatory=True):
"""Check optimize_options or maximize_options for usage in estimation functions."""
options = {} if options is None else options
if algorithm_mandatory:
if not isinstance(options, dict) or "algorithm" not in options:
raise ValueError(
"optimize_options or maximize_options must be a dict containing at "
"least the entry 'algorithm'"
)
else:
if not isinstance(options, dict):
raise ValueError(
"optimize_options or maximize_options must be a dict or None."
)
criterion_options = {
"criterion",
"criterion_kwargs",
"derivative",
"derivative_kwargs",
"criterion_and_derivative",
"criterion_and_derivative_kwargs",
}
invalid_criterion = criterion_options.intersection(options)
if invalid_criterion:
msg = (
"Entries related to the criterion function, its derivatives or keyword "
"arguments of those functions are not valid entries of optimize_options "
f"or maximize_options for {usage}. Remove: {invalid_criterion}"
)
raise ValueError(msg)
general_options = {"logging", "log_options", "constraints"}
invalid_general = general_options.intersection(options)
if invalid_general:
msg = (
"The following are not valid entries of optimize_options because they are "
"not only relevant for minimization but also for inference: "
"{invalid_general}"
)
raise ValueError(msg)
def check_numdiff_options(numdiff_options, usage):
"""Check numdiff_options for usage in estimation and optimization functions."""
numdiff_options = {} if numdiff_options is None else numdiff_options
internal_options = {
"func",
"func_kwargs",
"lower_bounds",
"upper_bounds",
"f0",
"key",
}
invalid = internal_options.intersection(numdiff_options)
if invalid:
msg = (
"The following options are set internally and are not allowed in "
f"numdiff_options for {usage}: {invalid}"
)
raise ValueError(msg)
| 2,376 | 627 |
from sklearn.model_selection import train_test_split
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import recall_score, f1_score, precision_score
#X and y should be defined from the data.
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=3)
mlp = MLPClassifier(max_iter=1000, hidden_layer_sizes=(100, 50), alpha=0.0001, solver='adam', random_state=3)
mlp.fit(X_train, y_train)
y_res = mlp.predict(X_test)
print("accuracy:", mlp.score(X_test, y_test))
print("recall: ", recall_score(y_res, y_test))
print("f1: ", f1_score(y_res, y_test))
print("precision: ", precision_score(y_res, y_test))
# This part of fitting and predicting will be used later with our database to make predictions for the optimization model.
| 760 | 281 |
import re
def normalize(name):
return re.sub(r"[-_.]+", "-", name).lower()
| 80 | 31 |
from typing import Optional, Dict
import pandas as pd
# from supply_chain.water.supply_chain_schema import SCNFO_SCHEMA
# from supply_chain.abbott.abbottdatamanager import AbbottDataManager
from supply_chain.pharma.pharmadatamanager import PharmaDataManager
from utils.dash_common_utils import ScenarioTableSchema
# from supply_chain.water.waterdatamanager import WaterDataManager
class DEPharmaDataManager(PharmaDataManager):
def __init__(self, inputs=None, outputs=None, table_schema:Dict[str, ScenarioTableSchema]=None):
super().__init__(inputs, outputs)
# self.demand_index_columns = ['customerName', 'locationName', 'productName', 'timePeriodSeq']
# self.production_activities_index_columns = ['productName', 'timePeriodSeq', 'lineName',
# 'recipeId'] # We'll be using these later on
# self.table_schema = SCNFO_SCHEMA # TODO: avoid hard-coding!?
self.table_schema = table_schema
############################################
# Hack: move to DataManager in dse-do-utils
############################################
def get_raw_table_by_name(self, table_name: str) -> Optional[pd.DataFrame]:
"""Get the 'raw' (non-indexed) table from inputs or outputs."""
if table_name in self.inputs:
df = self.inputs[table_name]
elif table_name in self.outputs:
df = self.outputs[table_name]
else:
df = None
return df
def get_table_by_name(self, table_name:str, index:bool=False, expand:bool=False) -> Optional[pd.DataFrame]:
"""Return input or output table by name.
:param table_name: can be the name of an input or an output table
:param index: index the DataFrame
:param expand: join tables from foreign-keys
:return:
"""
df = self.get_raw_table_by_name(table_name)
if df is not None:
if expand:
if table_name in self.table_schema:
for fkt in self.table_schema[table_name].foreign_tables:
foreign_df = self.get_table_by_name(fkt.table_name, expand=True)
if foreign_df is not None:
df = pd.merge(df, foreign_df, on=fkt.foreign_keys, how='inner')
else:
print(f"Error: could not find foreign-key table {fkt.table_name}")
if index:
if table_name in self.table_schema:
df = df.set_index(self.table_schema[table_name].index_columns, verify_integrity=True)
return df
def get_table_schema(self, table_name: str) -> Optional[ScenarioTableSchema]:
table_schema = None
if self.table_schema is not None and table_name in self.table_schema:
table_schema = self.table_schema[table_name]
return table_schema
############################################
# Hack: move to DataManager in dse-do-utils
############################################
# def get_raw_table_by_name(self, table_name):
# """Get the 'raw' (non-indexed) table from inputs or outputs."""
# if table_name in self.inputs:
# df = self.inputs[table_name]
# elif table_name in self.outputs:
# df = self.outputs[table_name]
# else:
# df = None
# return df
# DataManager.get_raw_table_by_name = get_raw_table_by_name | 3,392 | 982 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import StringIO
from fabric.api import env, put, run
from fabric.contrib.files import append, contains, exists
supervisord_config_template = '''\
[unix_http_server]
file={work_dir}/run/supervisor.sock
[supervisord]
logfile={work_dir}/log/supervisord.log
logfile_maxbytes=10MB
logfile_backups=10
loglevel=info
pidfile={work_dir}/run/supervisord.pid
nodaemon=false
minfds=65536
minprocs=200
[rpcinterface:supervisor]
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
[supervisorctl]
serverurl=unix://{work_dir}/run/supervisor.sock
'''
program_template = '''\
[program:{package}_{module}]
command = {python_prefix}/bin/python -m {package}.{module}
directory = {work_dir}
autostart = true
autorestart = true
redirect_stderr = true
stdout_logfile = {work_dir}/log/{module}.log
stdout_logfile_maxbytes = 100MB
stdout_logfile_backups = 10
environment = PYTHONPATH="{work_dir}/ttt", PACKAGE_CONFIG="{work_dir}/etc/{package}.config"
'''
program_template_1 = '''\
[program:{package}_{module}]
command = {python_prefix}/bin/python {path}{args}
directory = {work_dir}
autostart = true
autorestart = true
redirect_stderr = true
stdout_logfile = {work_dir}/log/{module}.log
stdout_logfile_maxbytes = 100MB
stdout_logfile_backups = 10
'''
task_les_config_template = '''\
[redis]
host = 127.0.0.1
port = 6379
db = 0
[postgres]
host = 127.0.0.1
port = 5432
database = mm-log
user = mm-log
password = ******
[mysql]
host = 127.0.0.1
port = 3306
db = mm-ms
user = edm_web
passwd = ******
[task]
server_address = 0.0.0.0
server_port = {task_port}
message_dir = /usr/local/mm-bs/data/mails-default
'''
program_list = [
'edm_app/src/wc_dispatcher',
'edm_app/web_api/server',
'mm-bs/bin/channel_chk.py',
'mm-bs/src/assign_address',
'mm-bs/src/bs_esmtpd',
'mm-bs/src/handle_mails',
'mm-bs/src/smtpsender',
'mm-bs/src/split_mails',
'mm-bs/src/sync_redis',
'mm-bs/src/testallocator',
'mm-bs/web_api/server',
'mm-log/bin/logmonitor',
'zhi_meng/manage runserver 0.0.0.0:8888'
]
# ------------------------------------------------
def install_package():
if not exists('/etc/yum.repos.d/epel.repo'):
run('rpm -ivh http://dl.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm')
run('yum groupinstall -y Development')
run('yum install -y bzip2-devel expat-devel gdbm-devel libffi-devel openssl-devel readline-devel sqlite-devel'
' zlib-devel postgresql-devel zeromq3-devel')
run('yum install -y denyhosts dnsmasq redis bash-completion bind-utils nc ntpdate openssh-clients rlwrap strace'
' telnet tmux emacs-nox vim-minimal')
def set_datetime():
try:
run('cp -f /usr/share/zoneinfo/Asia/Shanghai /etc/localtime')
run('ntpdate pool.ntp.org')
run('hwclock -w')
except:
pass
def start_service():
run('chkconfig denyhosts on')
run('chkconfig dnsmasq on')
run('chkconfig redis on')
run('service denyhosts start')
run('service dnsmasq start')
run('service redis start')
def create_pyenv(pyenv):
if not exists('/usr/local/pyenv'):
run('git clone https://github.com/yyuu/pyenv.git /usr/local/pyenv')
if not exists('/usr/local/pyenv/plugins/pyenv-virtualenv'):
run('git clone https://github.com/yyuu/pyenv-virtualenv.git /usr/local/pyenv/plugins/pyenv-virtualenv')
if not exists('/usr/local/pyenv/versions/2.7.11/bin/python'):
run('PYENV_ROOT=/usr/local/pyenv /usr/local/pyenv/bin/pyenv install -f 2.7.11')
if not exists('/usr/local/pyenv/versions/{}/bin/python'.format(pyenv)):
run('PYENV_ROOT=/usr/local/pyenv /usr/local/pyenv/bin/pyenv virtualenv -f 2.7.11 {}'.format(pyenv))
run('PYENV_ROOT=/usr/local/pyenv /usr/local/pyenv/bin/pyenv rehash')
# ------------------------------------------------
def make_dir(work_dir):
run('mkdir -p {}/etc'.format(work_dir))
run('mkdir -p {}/log'.format(work_dir))
run('mkdir -p {}/message'.format(work_dir))
run('mkdir -p {}/run'.format(work_dir))
def copy_source(base_dir, source_path):
put(source_path, '{}/lo.tar.gz'.format(base_dir))
run('tar -C {0} -xf {0}/lo.tar.gz'.format(base_dir))
def install_requirement(pyenv, file):
run('/usr/local/pyenv/versions/{}/bin/pip install -r {}'.format(pyenv, file))
def supervisord_config(base_dir, work_dir, pyenv):
l = [supervisord_config_template.format(work_dir=work_dir)]
for module in ['dispatch', 'server']:
l.append(program_template.format(
package='task_les',
module=module,
python_prefix='/usr/local/pyenv/versions/{}'.format(pyenv),
work_dir=work_dir
))
for p in program_list:
i = p.find(' ')
if i >= 0:
path = p[:i]
args = p[i:]
else:
path = p
args = ''
sp = path.split('/')
run('mkdir -p {}/{}/log'.format(base_dir, sp[0]))
l.append(program_template_1.format(
package=sp[0],
module=sp[-1],
python_prefix='/usr/local/pyenv/versions/{}'.format(pyenv),
work_dir='{}/{}'.format(base_dir, sp[0]),
path='{}/{}.pyc'.format(base_dir, path),
args=args))
put(StringIO.StringIO('\n'.join(l)), '{}/etc/supervisord.conf'.format(work_dir))
c = '/usr/local/pyenv/versions/{}/bin/supervisord -c {}/etc/supervisord.conf'.format(pyenv, work_dir)
if not contains('/etc/rc.local', c):
append('/etc/rc.local', c)
def task_les_config(work_dir, task_port):
s = task_les_config_template.format(work_dir=work_dir, task_port=task_port)
put(StringIO.StringIO(s), '{}/etc/task_les.config'.format(work_dir))
# ------------------------------------------------
def supervisord_start(work_dir, pyenv):
if not exists('{}/run/supervisor.sock'.format(work_dir)):
run('/usr/local/pyenv/versions/{}/bin/supervisord -c {}/etc/supervisord.conf'.format(pyenv, work_dir))
else:
run('/usr/local/pyenv/versions/{}/bin/supervisorctl -c {}/etc/supervisord.conf reload'.format(pyenv, work_dir))
# ------------------------------------------------
def deploy_task(host, port, user, password, base_dir, source_path):
env.abort_on_prompts = True
env.host_string = '{}@{}:{}'.format(user, host, port)
env.password = password
pyenv = 'relay'
work_dir = '{}/mail-relay'.format(base_dir)
install_package()
set_datetime()
start_service()
create_pyenv(pyenv)
install_requirement(pyenv, '{}/requirements_all.txt'.format(work_dir))
deploy_task('127.0.0.1', 22, 'root', '***', '/usr/local', '/opt/lo/lo.tar.gz')
| 7,077 | 2,541 |
def add_general_arguments(parser):
parser.add_argument('--title', type=str, help='Title for this run. Used in tensorboard and in saving of models.')
parser.add_argument('--train_file', type=str, help='File with training data')
parser.add_argument('--eval_file', type=str, help='File with eval data')
parser.add_argument('--data_limit', type=int, help='How much lines of data to process (only for fast checking)')
parser.add_argument('--model_save_dir', type=str, help='Where to save trained models')
parser.add_argument('--saved_model', type=str, help='File with trained model if not fresh train')
parser.add_argument('--log', action='store_true', help='Log performance?')
def add_batching_data_args(parser):
parser.add_argument('--seq_len', type=int, help='Recurrent layer time unrolling')
parser.add_argument('--batch_size', type=int, help='Size of batch')
def add_optimization_args(parser):
parser.add_argument('--learning_rate', type=float, help='Learning rate')
parser.add_argument('--epochs', type=int, help='Number of epochs to run model')
parser.add_argument('--decay_after_epoch', type=int, help='Multiply lr by decay_multiplier each epoch')
parser.add_argument('--decay_multiplier', type=float, help='Multiply lr by this number after decay_after_epoch')
parser.add_argument('--weight_decay', type=float, help='Weight decay for l2 regularization')
def add_recurrent_core_args(parser):
parser.add_argument('--hidden_size', type=int, help='Hidden size of recurrent part of model')
parser.add_argument('--num_layers', type=int, help='Number of recurrent layers')
parser.add_argument('--dropout', type=float, help='Dropout to apply to recurrent layer')
# Layered LSTM args, ignored if not layered
parser.add_argument('--layered_hidden_size', type=int, help='Size of hidden state in layered lstm')
parser.add_argument('--num_tree_layers', type=int, help='Number of layers to distribute hidden size')
def add_non_terminal_args(parser):
parser.add_argument('--non_terminals_num', type=int, help='Number of different non-terminals')
parser.add_argument('--non_terminal_embedding_dim', type=int, help='Dimension of non-terminal embeddings')
parser.add_argument('--non_terminals_file', type=str, help='Json file with all non-terminals')
parser.add_argument('--non_terminal_embeddings_file', type=str, help='File with pretrained non-terminal embeddings')
def add_terminal_args(parser):
parser.add_argument('--terminals_num', type=int, help='Number of different terminals')
parser.add_argument('--terminal_embedding_dim', type=int, help='Dimension of terminal embeddings')
parser.add_argument('--terminals_file', type=str, help='Json file with all terminals')
parser.add_argument('--terminal_embeddings_file', type=str, help='File with pretrained terminal embeddings')
def add_tokens_args(parser):
parser.add_argument('--tokens_num', type=int, help='Number of different tokens in train file')
parser.add_argument('--token_embedding_dim', type=int, help='Size of continuous token representation') | 3,124 | 957 |
#!/usr/bin/python #Linux shebang plus chmod to make executable
# ------------------------------------------------------------
# FILENAME: dipmath.py
# VERSION: 1.0 - Python 3.6
# PURPOSE:
# AUTHOR: MVS
# LAST CHANGE: 04/09/2018
# ------------------------------------------------------------
# tools for manipulating dipmeter interpretation files
import math
import sys
from modules import boreholemath
from modules import fileio
class DipPoint(object):
"""provides dip and dip azimuth properties to a point including a corresponding
tangential vector and its manipulation
"""
@staticmethod
def get_matrix_x(angle):
"""static function returns a matrix rotation by angle along X axis
:param angle: rotation angle (radian)
:return: 3x3 list of flt
"""
return [[1.0, 0.0, 0.0],
[0.0, math.cos(angle), -math.sin(angle)],
[0.0, math.sin(angle), math.cos(angle)]]
@staticmethod
def get_matrix_y(angle):
"""static function returns a matrix rotation by angle along Y axis
:param angle: rotation angle (radian)
:return: 3x3 list of flt
"""
return [[math.cos(angle), 0.0, math.sin(angle)],
[0.0, 1.0, 0.0],
[-math.sin(angle), 0.0, math.cos(angle)]]
@staticmethod
def get_matrix_z(angle):
"""static function returns a matrix rotation by angle along Z axis
:param angle: rotation angle (radian)
:return: 3x3 list of flt
"""
return [[math.cos(angle), -math.sin(angle), 0.0],
[math.sin(angle), math.cos(angle), 0.0],
[0.0, 0.0, 1.0]]
def __init__(self, dip=0.0, dazim=0.0):
"""
constructor for dip / dip azimuth instance and corresponding unit vector pointing in direction
of maximum falling dip
:param dip: dip of geological bed from horizontal in falling direction (grad)
:param dazim: dip azimuth of geological bed measured clockwise from grid north (grad)
"""
# debug output
self.verbose = True
# inclination angle between reference frame XY-plane and max falling dip 0 <= dip < pi
self.dip = (math.pi + math.radians(dip)) % math.pi
# azimuth angle projected to XY-plane, referenced to X-axis 0 <= dazim < 2*pi
self.dazim = (math.pi * 2.0 + math.radians(dazim)) % (math.pi * 2.0)
# define a unit vector pointing in direction of maximum falling dip
# defaults to 1,0,0
self.dipN = math.cos(self.dip) * math.cos(self.dazim)
self.dipE = math.cos(self.dip) * math.sin(self.dazim)
self.dipV = math.sin(self.dip)
def _rotate(self, matrix_func, angle):
"""
helper function to actually perform a functional-based rotation of the tangential vector by given angle.
Note, that this function does not change the instance.
:param matrix_func: functional object - either X,Y,Z rotation matrix
:param angle: angle of rotation (grad)
:return:
"""
angrad = math.radians(angle)
matrix = matrix_func(angrad)
in_vec = [self.dipN, self.dipE, self.dipV]
res_vec = [0.0, 0.0, 0.0]
for i in range(3):
for j in range(3):
res_vec[i] += matrix[i][j] * in_vec[j]
return res_vec
def _update_angles(self, newdips):
"""back-calculate dip and dip azimuth from a tangential vector changed by rotation
:param newdips: list(3) of flt corresponding to the three-component tangential vector
"""
try:
if len(newdips) != 3:
raise ValueError('Exception: Dip vector has wrong number of components: ', len(newdips))
# calculate length of vector
length = math.sqrt(newdips[0]*newdips[0]+newdips[1]*newdips[1]+newdips[2]*newdips[2])
if length > 0:
# if dip vector is too long - rescale to unit vector
self.dipN = newdips[0] / length
self.dipE = newdips[1] / length
self.dipV = newdips[2] / length
else:
raise ValueError('Exception: Dip vector has zero length')
horiz = math.sqrt(self.dipN * self.dipN + self.dipE * self.dipE)
self.dip = math.acos(horiz)
self.dazim = (math.pi * 2.0 + math.atan2(self.dipE, self.dipN)) % (math.pi * 2.0)
if self.verbose:
# print('New tangential vector:')
print(' X: {0:7.2f}, Y: {1:7.2f}, Z: {2:7.2f}'.format(*newdips))
print(' Dip: {0:8.3f}, Azimuth: {1:8.3f}'.format(math.degrees(self.dip),
math.degrees(self.dazim)))
except ValueError as err:
print(err.args)
sys.exit(1)
def __str__(self):
"""overloaded string operator"""
return 'Dip: {0:8.3f}, Azimuth: {1:8.3f}'.format(math.degrees(self.dip), math.degrees(self.dazim))
def rotate_x(self, angle):
"""
handle to perform a rotation of bed dip instance by X axis using angle and updating the dip / dip azimuth
:param angle: angle of rotation (grad)
"""
vec = self._rotate(DipPoint.get_matrix_x, angle)
self._update_angles(vec)
def rotate_y(self, angle):
"""
handle to perform a rotation of bed dip instance by Y axis using angle and updating the dip / dip azimuth
:param angle: angle of rotation (grad)
"""
vec = self._rotate(DipPoint.get_matrix_y, angle)
self._update_angles(vec)
def rotate_z(self, angle):
"""
handle to perform a rotation of bed dip instance by Z axis using angle and updating the dip / dip azimuth
:param angle: angle of rotation (grad)
"""
vec = self._rotate(DipPoint.get_matrix_z, angle)
self._update_angles(vec)
class DipMarker(DipPoint):
"""
"""
def __init__(self, md, dip=None, dazim=None, wellgeometry_in=None, verbose=False):
"""
:param md:
:param dip:
:param dazim:
:param wellgeometry_in:
:param verbose:
"""
self.md = md
if dip is not None and dazim is not None:
self.in_dip = math.radians(dip)
self.in_dazim = math.radians(dazim)
# conversion to radians in DipPoint class
super(DipMarker, self).__init__(dip, dazim)
else:
# initialize as zero
self.in_dip = 0.0
self.in_dazim = 0.0
super(DipMarker, self).__init__(0.0, 0.0)
self.verbose = verbose
if self.verbose:
print(self)
if wellgeometry_in is not None and dip is not None and dazim is not None:
self.clpoint = wellgeometry_in.calculate_cl_point(self.md)
if self.verbose:
print('Dipmarker correction:')
print('MD: {0:8.2f}, '.format(self.md) + super(DipMarker, self).__str__())
self.reorient_dip()
def __str__(self):
""" overloading string operator """
return 'MD: {0:8.3f}, Dip: {1:8.3f}, Azimuth: {2:8.3f}'.format(self.md, math.degrees(self.dip),
math.degrees(self.dazim))
def output_list(self, mymode=0):
"""
:param mymode:
:return:
"""
if mymode == 0:
return [f'{self.md:{10}.{2}f}', f'{math.degrees(self.dip):{10}.{5}f}',
f'{math.degrees(self.dazim):{10}.{5}f}']
else:
return [f'{self.md:{10}.{2}f}', f'{math.degrees(self.in_dip):{10}.{5}f}',
f'{math.degrees(self.in_dazim):{10}.{5}f}',
f'{math.degrees(self.dip):{10}.{5}f}', f'{math.degrees(self.dazim):{10}.{5}f}',
f'{math.degrees(self.clpoint.incl):{10}.{5}f}', f'{math.degrees(self.clpoint.azim):{10}.{5}f}']
def reorient_dip(self):
"""
"""
by_y = math.degrees(self.clpoint.incl)
by_z = math.degrees(self.clpoint.azim)
if self.verbose:
print(' Borehole INCL: {0:8.3f}, Borehole AZIM: {1:8.3f}'.format(by_y, by_z))
print(' Rotation on Y-Axis with borehole inclination: {0:8.3f}'.format(by_y))
self.rotate_y(by_y)
if self.verbose:
print(' Rotation on Z-Axis with borehole azimuth : {0:8.3f}'.format(by_z))
self.rotate_z(by_z)
if __name__ == '__main__': # call test environment only if module is called standalone
TWIDTH = 79 # terminal width excluding EOL
print(TWIDTH*'=')
print('module test: dipmath'.ljust(TWIDTH, '-'))
print(TWIDTH*'=')
print('Testing: Class DipPoint')
point = DipPoint(45, 0)
print('Input:')
print(point)
print('Rotation starts:')
for ang in range(0, 360, 15):
point.rotate_y(15)
# print('Rot: ', ang, ' Result:', point)
print(TWIDTH*'=')
print('Testing: Class DipMarker')
# generate a borehole deviation survey / interpolation object
wellgeometry = boreholemath.TransformBoreHoleSurvey(datadir='..\\data', mode=0, relativeCoords=True, verbose=False)
# correct one marker by extracting corresponding horehole inclination/azim
print('Apply correction on one DipMarker point:')
dmarker = DipMarker(5000, 45, 10, wellgeometry, verbose=True)
print(dmarker)
# repeat the same for data read from a file
print('Opening dipmarker file:')
inargs = {'datadir': '..\\data', 'filename_in': 'sample-dipmarker.txt',
'headerlines_in': 1, 'columns_in': (1, 2, 3)}
reader = fileio.BHReaderWriter(**inargs)
lines = reader.read_data()
result = []
for line in lines:
try:
# convert data to numbers and check for depth-sorting
line = [float(i) for i in line]
result.append(DipMarker(*line, wellgeometry, verbose=False))
except ValueError:
print('Exception: Error during conversion of survey data')
sys.exit()
print('Before - MD: {0:8.3f}, Dip: {1:8.3f}, Azimuth: {2:8.3f}'.format(*line))
print('After - ' + str(result[-1]))
print('Writing dipmarker file:')
# mode = basic(0) or detailed(1) output
mode = 1
outdata = []
for item in result:
outdata.append(item.output_list(mode))
if mode == 1:
outheader = ('Well: UNKNOWN', 'MD [depthunit]', 'DIP_ORIG [deg]',
'DAZI_ORIG [deg]', 'DIP [deg]', 'DAZI [deg]', 'INCL [deg]', 'AZIM [deg]')
else:
outheader = ('Well: UNKNOWN', 'MD [depthunit]', 'DIP [deg]', 'DAZI [deg]')
outargs = {'datadir': '..\\data', 'filename_out': 'out_sample-dipmarker.txt',
'header_out': outheader, 'data_out': outdata, 'verbose': True}
writer = fileio.BHReaderWriter(**outargs)
writer.write_data()
print(TWIDTH*'=')
else:
print('Importing ' + __name__)
| 11,178 | 3,714 |
from scrapy.item import Item, Field
from scrapy.loader import ItemLoader
from scrapy.loader.processors import Identity, MapCompose, TakeFirst, Join
from scraper.helpers.vendor_helpers import *
from slugify import slugify
from datetime import datetime
import re
def format_date(date_string):
return datetime.strptime(date_string, "%b %d, %Y")
class VendorItem(Item):
locality_id = Field(
output_processor=TakeFirst()
)
locality = Field(
output_processor=TakeFirst()
)
locality_url = Field(
output_processor=TakeFirst()
)
vendor_id = Field(
output_processor=TakeFirst()
)
guid = Field(
input_processor=MapCompose(vendor_guid),
output_processor=TakeFirst()
)
name = Field(
output_processor=TakeFirst()
)
url = Field(
output_processor=TakeFirst()
)
address = Field(
input_processor=MapCompose(vendor_address),
output_processor=TakeFirst()
)
city = Field(
input_processor=MapCompose(vendor_city),
output_processor=TakeFirst()
)
vendor_location = Field(
output_processor=TakeFirst()
)
last_inspection_date = Field(
input_processor=MapCompose(format_date),
output_processor=TakeFirst()
)
search_name = Field(
input_processor=MapCompose(vendor_search_name),
output_processor=TakeFirst()
)
type = Field(
output_processor=TakeFirst()
)
status = Field(
output_processor=TakeFirst()
)
phone = Field(
output_processor=TakeFirst()
)
slug = Field(
input_processor=MapCompose(slugify),
output_processor=TakeFirst()
)
category = Field(
input_processor=MapCompose(vendor_category),
output_processor=TakeFirst()
)
geo = Field(
# disable geocoding until SmartyStreets replacement is found
#input_processor=MapCompose(get_lat_lng),
#output_processor=TakeFirst()
)
needs_geocoding = Field(
input_processor=MapCompose(needs_geocoding),
output_processor=TakeFirst()
)
needs_geocoding_date = Field(
input_processor=MapCompose(needs_geocoding_date),
output_processor=TakeFirst()
)
inspections = Field()
class VendorItemLoader(ItemLoader):
default_item_class = VendorItem
class InspectionItem(Item):
vendor_guid = Field(
output_processor=TakeFirst()
)
date = Field(
input_processor=MapCompose(format_date),
output_processor=TakeFirst()
)
type = Field(
output_processor=TakeFirst()
)
risk_rating = Field(
output_processor=TakeFirst()
)
followup_required = Field(
output_processor=TakeFirst()
)
comments = Field(
input_processor=Join(''),
output_processor=TakeFirst()
)
violations = Field()
class InspectionItemLoader(ItemLoader):
default_item_class = InspectionItem
| 2,579 | 928 |
#!/usr/bin/env python3
import urllib.request
import os
import zipfile
import sys
import argparse
import re
import subprocess
###############################################################################
## Classes
class TextReplacer:
def __init__(self):
self.res = []
def add(self, reg: str, rep: str):
self.res.append( (reg, rep) )
return self
def replace(self, text: str) -> str:
for replacer in self.res:
reg = replacer[0]
rep = replacer[1]
text = text.replace(reg, rep)
return text
class Settings:
def __init__(self, root: str, install_dist: str, install: str, wx_root: str, build: str, appveyor_msbuild: str, platform: str):
self.root = root
self.install_dist = install_dist
self.install = install
self.wx_root = wx_root
self.build = build
self.appveyor_msbuild = appveyor_msbuild
self.platform = platform
def print(self):
print('root:', self.root)
print('install_dist:', self.install_dist)
print('install:', self.install)
print('wx_root:', self.wx_root)
print('build:', self.build)
print('appveyor_msbuild:', self.appveyor_msbuild)
print('platform:', self.platform)
def is_appveyor(self) -> bool:
key = 'APPVEYOR'
value = os.environ[key] if key in os.environ else ''
return value.lower().strip() == 'true'
def append_appveyor(self, args):
if self.is_appveyor():
args.append(self.appveyor_msbuild)
###############################################################################
## Functions
def setup() -> Settings:
root = os.getcwd()
install_dist = os.path.join(root, 'dependencies')
install = os.path.join(root, 'dist')
wx_root = os.path.join(install_dist, 'wx')
build = os.path.join(root, 'build')
appveyor_msbuild = r'/logger:C:\Program Files\AppVeyor\BuildAgent\Appveyor.MSBuildLogger.dll'
platform = 'x64'
if os.environ.get('PLATFORM', 'unknown') == 'x86':
platform = 'Win32'
return Settings(root, install_dist, install, wx_root, build, appveyor_msbuild, platform)
def verify_dir_exist(path):
if not os.path.isdir(path):
os.makedirs(path)
def download_file(url, path):
if not os.path.isfile(path):
urllib.request.urlretrieve(url, path)
else:
print("Already downloaded", path)
def list_projects_in_solution(path):
ret = []
directory_name = os.path.dirname(path)
project_line = re.compile(r'Project\("[^"]+"\) = "[^"]+", "([^"]+)"')
with open(path) as sln:
for line in sln:
# Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "richtext", "wx_richtext.vcxproj", "{7FB0902D-8579-5DCE-B883-DAF66A885005}"
project_match = project_line.match(line)
if project_match:
ret.append(os.path.join(directory_name, project_match.group(1)))
return ret
def add_definition_to_project(path, define):
# <PreprocessorDefinitions>WIN32;_LIB;_CRT_SECURE_NO_DEPRECATE=1;_CRT_NON_CONFORMING_SWPRINTFS=1;_SCL_SECURE_NO_WARNINGS=1;__WXMSW__;NDEBUG;_UNICODE;WXBUILDING;%(PreprocessorDefinitions)</PreprocessorDefinitions>
preproc = re.compile(r'([ ]*<PreprocessorDefinitions>)([^<]*</PreprocessorDefinitions>)')
lines = []
with open(path) as project:
for line in project:
preproc_match = preproc.match(line)
if preproc_match:
lines.append('{0}{1};{2}'.format(preproc_match.group(1), define, preproc_match.group(2)))
else:
lines.append(line.rstrip())
with open(path, mode='w') as project:
for line in lines:
project.write(line + '\n')
# change from:
# <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary> to <RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>
# <RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary> to <RuntimeLibrary>MultiThreaded</RuntimeLibrary>
def change_to_static_link(path):
mtdebug = re.compile(r'([ ]*)<RuntimeLibrary>MultiThreadedDebugDLL')
mtrelease = re.compile(r'([ ]*)<RuntimeLibrary>MultiThreadedDLL')
lines = []
with open(path) as project:
for line in project:
mdebug = mtdebug.match(line)
mrelease = mtrelease.match(line)
if mdebug:
print('in {project} changed to static debug'.format(project=path))
lines.append('{spaces}<RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>'.format(spaces=mdebug.group(1)))
elif mrelease:
print('in {project} changed to static release'.format(project=path))
lines.append('{spaces}<RuntimeLibrary>MultiThreaded</RuntimeLibrary>'.format(spaces=mrelease.group(1)))
else:
lines.append(line.rstrip())
with open(path, mode='w') as project:
for line in lines:
project.write(line + '\n')
def change_all_projects_to_static(sln):
projects = list_projects_in_solution(sln)
for proj in projects:
change_to_static_link(proj)
def add_definition_to_solution(sln, definition):
projects = list_projects_in_solution(sln)
for proj in projects:
add_definition_to_project(proj, definition)
def make_single_project_64(project_path, rep):
if not os.path.isfile(project_path):
print('missing ' + project_path)
return
lines = []
with open(project_path) as project:
for line in project:
new_line = rep.replace(line.rstrip())
lines.append(new_line)
with open(project_path, 'w') as project:
for line in lines:
project.write(line + '\n')
def make_projects_64(sln):
projects = list_projects_in_solution(sln)
rep = TextReplacer()
rep.add('Win32', 'x64')
rep.add('<DebugInformationFormat>EditAndContinue</DebugInformationFormat>', '<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>')
rep.add('<TargetMachine>MachineX86</TargetMachine>', '<TargetMachine>MachineX64</TargetMachine>')
# protobuf specific hack since cmake looks in x64 folder
rep.add(r'<OutDir>Release\</OutDir>', r'<OutDir>x64\Release\</OutDir>')
rep.add(r'<OutDir>Debug\</OutDir>', r'<OutDir>x64\Debug\</OutDir>')
for project in projects:
make_single_project_64(project, rep)
def make_solution_64(solution_path):
rep = TextReplacer()
rep.add('Win32', 'x64')
lines = []
with open(solution_path) as slnlines:
for line in slnlines:
new_line = rep.replace(line.rstrip())
lines.append(new_line)
with open(solution_path, 'w') as solution_handle:
for line in lines:
solution_handle.write(line + '\n')
def convert_sln_to_64(sln):
make_solution_64(sln)
make_projects_64(sln)
def extract_zip_to(path_to_zip, target):
with zipfile.ZipFile(path_to_zip, 'r') as zip_handle:
zip_handle.extractall(target)
###############################################################################
## Commands
def handle_make_solution_64_cmd(args):
convert_sln_to_64(args.sln)
def handle_change_all_projects_to_static_cmd(args):
change_all_projects_to_static(args.sln)
def handle_list_projects_cmd(cmd):
projects = list_projects_in_solution(cmd.sln)
for proj in projects:
print("project", proj)
def handle_add_definition_cmd(args):
add_definition_to_project(args.project, args.define)
def handle_change_to_static_cmd(args):
change_to_static_link(args.project)
def handle_install_cmd(args):
settings = setup()
build = args.build
wx_url = "https://github.com/wxWidgets/wxWidgets/releases/download/v3.1.4/wxWidgets-3.1.4.zip"
wx_zip = os.path.join(settings.install_dist, "wx.zip")
wx_sln = os.path.join(settings.wx_root, 'build', 'msw', 'wx_vc16.sln')
print('Root:', settings.root)
print('wxWidgets solution: ', wx_sln)
verify_dir_exist(settings.install_dist)
verify_dir_exist(settings.wx_root)
print("downloading wx...")
download_file(wx_url, os.path.join(settings.install_dist, wx_zip))
print("extracting wx")
extract_zip_to(wx_zip, settings.wx_root)
print("changing wx to static")
change_all_projects_to_static(wx_sln)
print("building wxwidgets")
print("-----------------------------------")
wx_msbuild_cmd = [
'msbuild',
'/p:Configuration=Release',
'/p:Platform={}'.format(settings.platform)
]
settings.append_appveyor(wx_msbuild_cmd)
wx_msbuild_cmd.append(wx_sln)
if build:
sys.stdout.flush()
subprocess.check_call(wx_msbuild_cmd)
else:
print(wx_msbuild_cmd)
def handle_cmake_cmd(_):
settings = setup()
subinstall = os.path.join(settings.install, 'windows', settings.platform)
os.makedirs(settings.build)
os.makedirs(settings.install)
os.makedirs(subinstall)
generator = 'Visual Studio 16 2019'
cmakecmd = [
'cmake',
"-DCMAKE_INSTALL_PREFIX={}".format(subinstall),
"-DwxWidgets_ROOT_DIR={}".format(settings.wx_root),
"-DRIDE_BUILD_COMMIT=%APPVEYOR_REPO_COMMIT%",
"-DRIDE_BUILD_NUMBER=%APPVEYOR_BUILD_NUMBER%",
"-DRIDE_BUILD_BRANCH=%APPVEYOR_REPO_BRANCH%",
"-DRIDE_BUILD_REPO=%APPVEYOR_REPO_NAME%",
'-G', generator,
'-A', settings.platform,
settings.root
]
sys.stdout.flush()
subprocess.check_call(cmakecmd, cwd=settings.build)
def handle_build_cmd(_):
settings = setup()
ride_sln = os.path.join(settings.build, 'PACKAGE.vcxproj')
ride_msbuild_cmd = [
'msbuild',
'/p:Configuration=Release',
'/p:Platform={}'.format(settings.platform),
settings.appveyor_msbuild,
ride_sln
]
sys.stdout.flush()
subprocess.check_call(ride_msbuild_cmd)
def handle_print_cmd(_):
settings = setup()
settings.print()
###############################################################################
## Main
def main():
parser = argparse.ArgumentParser(description='Does the windows build')
subparsers = parser.add_subparsers()
install_parser = subparsers.add_parser('install')
install_parser.set_defaults(func=handle_install_cmd)
install_parser.add_argument('--nobuild', dest='build', action='store_const', const=False, default=True)
install_parser = subparsers.add_parser('listprojects')
install_parser.set_defaults(func=handle_list_projects_cmd)
install_parser.add_argument('sln', help='solution file')
static_project_parser = subparsers.add_parser('static_project')
static_project_parser.set_defaults(func=handle_change_to_static_cmd)
static_project_parser.add_argument('project', help='make a project staticly link to the CRT')
static_project_parser = subparsers.add_parser('to64')
static_project_parser.set_defaults(func=handle_make_solution_64_cmd)
static_project_parser.add_argument('sln', help='the solution to upgrade')
static_solution_parser = subparsers.add_parser('static_sln')
static_solution_parser.set_defaults(func=handle_change_all_projects_to_static_cmd)
static_solution_parser.add_argument('sln', help='make all the projects in the specified solution staticly link to the CRT')
install_parser = subparsers.add_parser('add_define')
install_parser.set_defaults(func=handle_add_definition_cmd)
install_parser.add_argument('project', help='project file')
install_parser.add_argument('define', help='preprocessor to add')
cmake_parser = subparsers.add_parser('cmake')
cmake_parser.set_defaults(func=handle_cmake_cmd)
build_parser = subparsers.add_parser('build')
build_parser.set_defaults(func=handle_build_cmd)
print_parser = subparsers.add_parser('print')
print_parser.set_defaults(func=handle_print_cmd)
args = parser.parse_args()
args.func(args)
if __name__ == "__main__":
main()
| 12,000 | 4,028 |
# zhangshulin
# 2018-3-17
# e-mail: zhangslwork@yeah.net
TRAIN_IN_PATH = './datasets/rawdata/train/in.txt'
TRAIN_OUT_PATH = './datasets/rawdata/train/out.txt'
TEST_IN_PATH = './datasets/rawdata/test/in.txt'
TEST_OUT_PATH = './datasets/rawdata/test/out.txt'
TOTAL_PATH = './datasets/all_couplets.txt'
def create_data_file(train_in_path, train_out_path, test_in_path, test_out_path, total_path):
with open(train_in_path, 'r', encoding='utf8') as f:
train_in_arr = f.readlines()
with open(train_out_path, 'r', encoding='utf8') as f:
train_out_arr = f.readlines()
with open(test_in_path, 'r', encoding='utf8') as f:
test_in_arr = f.readlines()
with open(test_out_path, 'r', encoding='utf8') as f:
test_out_arr = f.readlines()
train_in_arr = map(process_in_couplet, train_in_arr)
train_out_arr = map(process_out_couplet, train_out_arr)
test_in_arr = map(process_in_couplet, test_in_arr)
test_out_arr = map(process_out_couplet, test_out_arr)
train_in_out_arr = [up + down for up, down in zip(train_in_arr, train_out_arr)
if len(up.strip()) != 0 and len(down.strip()) != 0]
test_in_out_arr = [up + down for up, down in zip(test_in_arr, test_out_arr)
if len(up.strip()) != 0 and len(down.strip()) != 0]
total_arr = train_in_out_arr + test_in_out_arr
with open(total_path, 'w', encoding='utf8') as f:
f.writelines(total_arr)
print('data file creating complete ^_^')
def process_in_couplet(couplet):
return couplet.replace(' ', '').replace('\n', ';')
def process_out_couplet(couplet):
return couplet.replace(' ', '').replace('\n', '。\n')
if __name__ == '__main__':
create_data_file(TRAIN_IN_PATH, TRAIN_OUT_PATH, TEST_IN_PATH, TEST_OUT_PATH, TOTAL_PATH) | 1,814 | 727 |
# -*- coding: utf-8 -*-
"""A interface for c4d."""
# Import built-in models
from __future__ import print_function
from __future__ import unicode_literals
import base64
import hashlib
import logging
import os
import sys
import time
from builtins import str
from rayvision_c4d.constants import PACKAGE_NAME
from rayvision_c4d.get_preferences import GetInstallPath
from rayvision_log import init_logger
from rayvision_utils import constants
from rayvision_utils import utils
from rayvision_utils.cmd import Cmd
from rayvision_utils.exception import tips_code
from rayvision_utils.exception.exception import AnalyseFailError, CGFileNotExistsError
VERSION = sys.version_info[0]
class AnalyzeC4d(object):
def __init__(self, cg_file, software_version, project_name,
plugin_config, render_software="CINEMA 4D", render_layer_type="0",
input_project_path=None, local_os=None, workspace=None,
custom_exe_path=None,
platform="2",
logger=None,
log_folder=None,
log_name=None,
log_level="DEBUG"
):
"""Initialize and examine the analysis information.
Args:
cg_file (str): Scene file path.
software_version (str): Software version.
project_name (str): The project name.
plugin_config (dict): Plugin information.
render_software (str): Software name, CINEMA 4D by default.
render_layer_type (str): 0 is render layer, 1 is render setup.
input_project_path (str): The working path of the scenario.
local_os (str): System name, linux or windows.
workspace (str): Analysis out of the result file storage path.
custom_exe_path (str): Customize the exe path for the analysis.
platform (str): Platform num.
logger (object, optional): Custom log object.
log_folder (str, optional): Custom log save location.
log_name (str, optional): Custom log file name.
log_level (string): Set log level, example: "DEBUG","INFO","WARNING","ERROR".
"""
self.logger = logger
if not self.logger:
init_logger(PACKAGE_NAME, log_folder, log_name)
self.logger = logging.getLogger(__name__)
self.logger.setLevel(level=log_level.upper())
self.check_path(cg_file)
self.cg_file = cg_file
self.render_software = render_software
self.input_project_path = input_project_path or ""
self.render_layer_type = render_layer_type
self.software_version = software_version
self.project_name = project_name
self.plugin_config = plugin_config
local_os = self.check_local_os(local_os)
self.local_os = local_os
self.tmp_mark = str(int(time.time()))
workspace = os.path.join(self.check_workspace(workspace),
self.tmp_mark)
if not os.path.exists(workspace):
os.makedirs(workspace)
self.workspace = workspace
if custom_exe_path:
self.check_path(custom_exe_path)
self.custom_exe_path = custom_exe_path
self.platform = platform
self.task_json = os.path.join(workspace, "task.json")
self.tips_json = os.path.join(workspace, "tips.json")
self.asset_json = os.path.join(workspace, "asset.json")
self.upload_json = os.path.join(workspace, "upload.json")
self.analyse_log_path = os.path.join(workspace, "analyze.log")
self.tips_info = {}
self.task_info = {}
self.asset_info = {}
self.upload_info = {}
@staticmethod
def check_path(tmp_path):
"""Check if the path exists."""
if not os.path.exists(tmp_path):
raise CGFileNotExistsError("{} is not found".format(tmp_path))
def add_tip(self, code, info):
"""Add error message.
Args:
code (str): error code.
info (str or list): Error message description.
"""
if isinstance(info, str):
self.tips_info[code] = [info]
elif isinstance(info, list):
self.tips_info[code] = info
else:
raise Exception("info must a list or str.")
def save_tips(self):
"""Write the error message to tips.json."""
utils.json_save(self.tips_json, self.tips_info, ensure_ascii=False)
@staticmethod
def check_local_os(local_os):
"""Check the system name.
Args:
local_os (str): System name.
Returns:
str
"""
if not local_os:
if "win" in sys.platform.lower():
local_os = "windows"
else:
local_os = "linux"
return local_os
def check_workspace(self, workspace):
"""Check the working environment.
Args:
workspace (str): Workspace path.
Returns:
str: Workspace path.
"""
if not workspace:
if self.local_os == "windows":
workspace = os.path.join(os.environ["USERPROFILE"], "renderfarm_sdk")
else:
workspace = os.path.join(os.environ["HOME"], "renderfarm_sdk")
else:
self.check_path(workspace)
return workspace
def analyse_cg_file(self):
"""Analyse cg file.
Analyze the scene file to get the path to the startup file of the CG
software.
"""
# Find the version from the cg file
if VERSION == 3:
version = self.check_version3(self.cg_file)
else:
version = self.check_version2(self.cg_file)
if int(float(version)) != int(float(self.software_version)):
self.add_tip(tips_code.CG_NOTMATCH, "{0} {1}".format(
self.render_software, self.software_version))
self.save_tips()
# Find the installation path with the version
if self.custom_exe_path is not None:
exe_path = self.custom_exe_path
else:
exe_path = self.find_location()
return exe_path
def write_task_json(self):
"""The initialization task.json."""
constants.TASK_INFO["task_info"]["input_cg_file"] = self.cg_file.replace("\\", "/")
constants.TASK_INFO["task_info"]["input_project_path"] = self.input_project_path.replace("\\", "/")
constants.TASK_INFO["task_info"]["render_layer_type"] = self.render_layer_type
constants.TASK_INFO["task_info"]["project_name"] = self.project_name
constants.TASK_INFO["task_info"]["cg_id"] = "2005"
constants.TASK_INFO["task_info"]["os_name"] = "1" if self.local_os == "windows" else "0"
constants.TASK_INFO["task_info"]["platform"] = self.platform
constants.TASK_INFO["software_config"] = {
"plugins": self.plugin_config,
"cg_version": self.software_version,
"cg_name": self.render_software
}
utils.json_save(self.task_json, constants.TASK_INFO)
def check_result(self):
"""Check that the analysis results file exists."""
for json_path in [self.task_json, self.asset_json,
self.tips_json]:
if not os.path.exists(json_path):
msg = "Json file is not generated: {0}".format(json_path)
return False, msg
return True, None
def get_file_md5(self, file_path):
"""Generate the md5 values for the scenario."""
hash_md5 = hashlib.md5()
if os.path.exists(file_path):
with open(file_path, 'rb') as file_path_f:
while True:
data_flow = file_path_f.read(8096)
if not data_flow:
break
hash_md5.update(data_flow)
return hash_md5.hexdigest()
def write_upload_json(self):
"""Generate the upload.json."""
assets = self.asset_info["asset"]
upload_asset = []
self.upload_info["scene"] = [
{
"local": self.cg_file.replace("\\", "/"),
"server": utils.convert_path(self.cg_file),
"hash": self.get_file_md5(self.cg_file)
}
]
for path in assets:
resources = {}
local = path.split(" (mtime")[0]
server = utils.convert_path(local)
resources["local"] = local.replace("\\", "/")
resources["server"] = server
upload_asset.append(resources)
# Add the cg file to upload.json
upload_asset.append({
"local": self.cg_file.replace("\\", "/"),
"server": utils.convert_path(self.cg_file)
})
self.upload_info["asset"] = upload_asset
utils.json_save(self.upload_json, self.upload_info)
def __copy_file(self, src, dst):
copy_cmd = 'xcopy /s /y /f /e "%s" "%s"' % (src, dst)
print('Copy command: [%s]' % copy_cmd)
os.system(copy_cmd)
def update_pyp_script(self, exe_path, cg_ver):
print('Update analyze pyp...')
curr_dir = os.path.dirname(__file__)
base_dir = os.path.abspath(curr_dir)
src_plugin = os.path.join(base_dir, 'tool')
maxon_temp_path = os.path.join(os.getenv('APPDATA'), 'MAXON')
if not os.path.exists(maxon_temp_path):
os.makedirs(maxon_temp_path)
flag = False
for dir in os.listdir(maxon_temp_path):
lower_dir = dir.lower()
lower_inst = os.path.basename(os.path.dirname(exe_path)).lower()
lower_ver = cg_ver.lower()
print(lower_dir, lower_inst, lower_ver)
if lower_dir.startswith(lower_ver) or lower_dir.startswith(lower_inst):
flag = True
maxon_plugin_path = os.path.join(maxon_temp_path, dir, 'plugins')
if not os.path.exists(maxon_plugin_path):
os.makedirs(maxon_plugin_path)
else:
try:
os.remove(os.path.join(maxon_plugin_path, 'RBAnalyzer.pyp'))
os.system('del /q /s %s\\python26\\*' % maxon_plugin_path)
os.system('del /q /s %s\\python27\\*' % maxon_plugin_path)
os.system('del /q /s %s\\python37\\*' % maxon_plugin_path)
except:
pass
print('Copy pyp: from [%s] to [%s]' % (src_plugin, maxon_plugin_path))
try:
self.__copy_file(src_plugin, maxon_plugin_path)
print('RBAnalyzer.pyp was updated...')
except:
pass
if not flag:
path_finder = GetInstallPath()
pref_path_inst = path_finder.install_path(os.path.dirname(exe_path))
if not os.path.exists(pref_path_inst):
os.makedirs(pref_path_inst)
flag = self.update_pyp_script(exe_path, cg_ver)
return flag
def analyse(self, exe_path):
"""Build a cmd command to perform an analysis scenario.
Args:
exe_path (bool): Do you not generate an upload,json file.
Raises:
AnalyseFailError: Analysis scenario failed.
"""
if not os.path.exists(exe_path):
self.logger.error("Please enter the c4d software absolute path")
raise AnalyseFailError
cg_ver = '{} {}'.format(self.render_software, self.software_version) # Cinema 4D R19
if not self.update_pyp_script(exe_path, cg_ver):
print('[ERROR] MAXON appdata "%appdata%/MAXON" not found')
raise ValueError('MAXON appdata not found')
self.write_task_json()
print('Analyze cg file: [%s]' % self.cg_file)
if sys.version_info.major == 2:
cg_file = base64.b64encode(bytes(self.cg_file)).decode("utf-8")
else:
cg_file = base64.b64encode(bytes(self.cg_file, 'utf-8')).decode("utf-8")
print('Encoded cg file: [%s]' % cg_file)
if self.local_os == 'windows':
cmd = ('"{exe_path}" -cg_file="{cg_file}" -task_json="{task_json}" '
'-asset_json="{asset_json}" -tips_json="{tips_json}" -upload_json="{upload_json}" '
'-log_path="{log_path}" '
'-parallel -nogui').format(
exe_path=exe_path,
cg_file=cg_file,
task_json=self.task_json,
asset_json=self.asset_json,
tips_json=self.tips_json,
upload_json=self.upload_json,
log_path=self.analyse_log_path
)
else:
self.logger.error("c4d does not support linux rendering")
self.logger.debug(cmd)
code, _, _ = Cmd.run(cmd, shell=True)
if code not in [0, 1]:
self.add_tip(tips_code.UNKNOW_ERR, "")
self.save_tips()
raise AnalyseFailError
# Determine whether the analysis is successful by
# determining whether a json file is generated.
status, msg = self.check_result()
if status is False:
self.add_tip(tips_code.UNKNOW_ERR, msg)
self.save_tips()
raise AnalyseFailError(msg)
self.tips_info = utils.json_load(self.tips_json)
self.asset_info = utils.json_load(self.asset_json)
self.task_info = utils.json_load(self.task_json)
| 13,623 | 4,167 |
#
# Copyright (c) 2014-present, Facebook, Inc.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from openr.Decision import ttypes as decision_types
from openr.utils import socket, consts
import zmq
class DecisionClient():
def __init__(self, zmq_ctx, decision_cmd_url, timeout=consts.Consts.TIMEOUT_MS,
proto_factory=consts.Consts.PROTO_FACTORY):
self._decision_cmd_socket = socket.Socket(zmq_ctx, zmq.REQ, timeout,
proto_factory)
self._decision_cmd_socket.connect(decision_cmd_url)
def _get_db(self, db_type, node_name=''):
req_msg = decision_types.DecisionRequest()
req_msg.cmd = db_type
req_msg.nodeName = node_name
self._decision_cmd_socket.send_thrift_obj(req_msg)
return self._decision_cmd_socket.recv_thrift_obj(decision_types.DecisionReply)
def get_route_db(self, node_name=''):
return self._get_db(decision_types.DecisionCommand.ROUTE_DB_GET,
node_name).routeDb
def get_adj_dbs(self):
return self._get_db(decision_types.DecisionCommand.ADJ_DB_GET).adjDbs
def get_prefix_dbs(self):
return self._get_db(decision_types.DecisionCommand.PREFIX_DB_GET).prefixDbs
| 1,502 | 492 |
pkgname = "xdg-utils"
pkgver = "1.1.3"
pkgrel = 0
_commit = "d11b33ec7f24cfb1546f6b459611d440013bdc72"
build_style = "gnu_configure"
make_cmd = "gmake"
make_dir = "."
hostmakedepends = ["xmlto", "lynx", "gmake"]
depends = ["xset"]
pkgdesc = "Basic desktop integration scripts"
maintainer = "q66 <q66@chimera-linux.org>"
license = "MIT"
url = "https://www.freedesktop.org/wiki/Software/xdg-utils"
source = f"https://gitlab.freedesktop.org/xdg/{pkgname}/-/archive/{_commit}.tar.gz"
sha256 = "cc7f8b1292a4c1fa2054594642ff90e3740269033a32d97bcf9bd04322d5555c"
# no check target
options = ["!check"]
def post_install(self):
self.install_license("LICENSE")
| 656 | 322 |
from setuptools import setup, find_packages
with open('README.md') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
setup(
name='file-manip-toolkit',
version='1.1',
description='collection of tools for low level binary manipulations of files',
long_description=readme,
author='M B',
author_email='dont@me',
license=license,
packages=find_packages(),
entry_points={
'console_scripts': [
'unfman=file_manip_toolkit.unfman.cli:main',
'eswap=file_manip_toolkit.eswap.cli:main'],
}
)
| 591 | 202 |
import bt
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime
from pyfinlab import data_api as api
"""
These functions backtest the efficient frontier portfolios.
"""
class OrderedWeights(bt.Algo):
def __init__(self, weights):
self.target_weights = weights
def __call__(self, target):
target.temp['weights'] = dict(zip(target.temp['selected'], self.target_weights))
return True
def backtest_parameters(portfolio, weightings, prices):
"""
Creates Backtest object combining Strategy object with price data.
:param portfolio: (int) Choose any portfolio from 1-20.
:param weightings: (pd.DataFrame) Weightings for efficient frontier portfolios.
:param prices: (pd.DataFrame) Dataframe where each column is a series of prices for an asset.
:return: (obj) Backtest object combining Strategy object with price data.
"""
target_weights = weightings[portfolio]
target_weights = target_weights[target_weights!=0].to_frame()
tickers = list(target_weights.index)
weights_dict = target_weights.to_dict().get(portfolio)
prices_df = prices[tickers]
strategy = bt.Strategy('{}'.format(portfolio), [bt.algos.RunQuarterly(),
bt.algos.SelectAll(tickers),
OrderedWeights(list(weights_dict.values())),
bt.algos.Rebalance()])
return bt.Backtest(strategy, prices_df)
def compile_backtests(weightings, prices):
"""
Compiles multiple backtest objects.
:param weightings: (pd.DataFrame) Weightings for efficient frontier portfolios.
:param prices: (pd.DataFrame) Dataframe where each column is a series of prices for an asset.
:return: (list) List of Backtest objects, one for each efficient frontier portfolio.
"""
backtests = []
for backtest in list(weightings.columns):
backtests.append(backtest_parameters(backtest, weightings, prices))
return backtests
def benchmark_strategy(benchmark_ticker='SPY'):
"""
Creates a Strategy object for the benchmark ticker.
:param benchmark_ticker: (str) Optional, benchmark ticker. Defaults to 'SPY'.
:return: (obj) Strategy object assigned to the benchmark.
"""
return bt.Strategy(
benchmark_ticker,
algos = [bt.algos.RunQuarterly(),
bt.algos.SelectAll(),
bt.algos.SelectThese([benchmark_ticker]),
bt.algos.WeighEqually(),
bt.algos.Rebalance()],
)
def benchmark_backtest(benchmark_ticker, start_date, end_date, api_source):
"""
Creates Backtest object combining Strategy object with price data from the benchmark.
:param benchmark_ticker: (str) Optional, benchmark ticker. Defaults to 'SPY'.
:param start_date: (str) Start date of requested time series. Must be in 'YYYY-MM-DD' (i.e. '2021-06-21') if
api_source is yfinance. Must be in 'MM/DD/YYYY' (i.e. '2021-06-21') format if api_source is
bloomberg.
:param end_date: (str) End date of requested time series. Must be in 'YYYY-MM-DD' (i.e. '2021-06-21') if
api_source is yfinance. Must be in 'MM/DD/YYYY' (i.e. '2021-06-21') format if api_source is
bloomberg.
:param api_source: (str) API source to pull data from. Choose from 'yfinance' or 'bloomberg'. Default is yfinance.
:return: (obj) Backtest object combining Strategy object with price data.
"""
benchmark_prices = api.price_history([benchmark_ticker], start_date, end_date, api_source)
benchmark_prices.columns = [benchmark_ticker]
benchmark_name = api.name(api_source, benchmark_ticker)
return bt.Backtest(benchmark_strategy(benchmark_ticker), benchmark_prices)
def run_backtest(backtests, benchmark):
"""
Runs the backtest.
:param backtests: (list) List of Backtest objects, one for each efficient frontier portfolio.
:param benchmark: (list) Backtest object for the benchmark_strategy.
:return: (obj) Result object containing backtest results.
"""
np.seterr(divide='ignore')
return bt.run(
backtests[0], backtests[1], backtests[2], backtests[3], backtests[4],
backtests[5], backtests[6], backtests[7], backtests[8], backtests[9],
backtests[10], backtests[11], backtests[12], backtests[13], backtests[14],
backtests[15], backtests[16], backtests[17], backtests[18], backtests[19],
benchmark
)
def linechart(Results, title='Backtest Results', figsize=(15, 9), save=False, show=True, colormap='jet'):
"""
Plots the performance for all efficient frontier portfolios.
:param Results: (object) Results object from bt.backtest.Result(*backtests). Refer to the following documentation
https://pmorissette.github.io/bt/bt.html?highlight=display#bt.backtest.Result
:param title: (str) Optional, used to name image file if saved. Defaults to 'backtests'.
:param figsize: (float, float) Optional, multiple by which to multiply the maximum weighting constraints at the ticker level.
Defaults to (15, 9).
:param save: (bool) Optional, width, height in inches. Defaults to False.
:param show: (bool) Optional, displays plot. Defaults to True.
:param colormap: (str or matplotlib colormap object) Colormap to select colors from. If string, load colormap with
that name from matplotlib. Defaults to 'jet'.
:return: (fig) Plot of performance for all efficient frontier portfolios.
"""
plot = Results.plot(title=title, figsize=figsize, colormap=colormap)
fig = plot.get_figure()
plt.legend(loc="upper left")
if save == True: plt.savefig(
'../charts/linechart_{}.png'.format(datetime.today().strftime('%m-%d-%Y')), bbox_inches='tight')
if show == False: plt.close()
def backtest_timeseries(Results, freq='d'):
"""
Plots the performance for all efficient frontier portfolios.
:param Results: (object) Results object from bt.backtest.Result(*backtests). Refer to the following documentation
https://pmorissette.github.io/bt/bt.html?highlight=display#bt.backtest.Result
:param freq: (str) Data frequency used for display purposes. Refer to pandas docs for valid freq strings.
:return: (pd.DataFrame) Time series of each portfolio's value over time according to the backtest Results object.
"""
return Results._get_series(freq).drop_duplicates().iloc[1:]
| 6,603 | 1,988 |
"""
Sentiment prediction module
"""
import nltk
import numpy as np
from cPickle import load
def get_word_index_array(words, word2index):
u"""
Transform the words into list of int(word index)
Note: Unknown words are dropped
>>> words = [u"I", u"love", u"you", u"RANDOM STUFF"]
>>> word2index = {u"I": 0, u"love": 1, u"you": 2}
>>> get_word_index_array(words, word2index)
[0, 1, 2]
"""
return [word2index[w]
for w in words
if word2index.get(w) is not None # filter out those unknown
]
def pad_sents(sents, padding_token_index):
"""
Pad the sents(in word index form) into same length so they can form a matrix
# 15447
>>> sents = [[1,2,3], [1,2], [1,2,3,4,5]]
>>> pad_sents(sents, padding_token_index = -1)
[[1, 2, 3, -1, -1], [1, 2, -1, -1, -1], [1, 2, 3, 4, 5]]
"""
max_len_sent = max(sents,
key = lambda sent: len(sent))
max_len = len(max_len_sent)
get_padding = lambda sent: [padding_token_index] * (max_len - len(sent))
padded_sents = [(sent + get_padding(sent))
for sent in sents]
return padded_sents
WORD2INDEX = load(open("sent/data/twitter.pkl"))[3]
PADDING_INDEX = WORD2INDEX[u"<PADDING>"]
from param_util import load_dcnn_model_params
from dcnn import DCNN
params = load_dcnn_model_params("sent/models/filter_widths=8,6,,batch_size=10,,ks=20,8,,fold=1,1,,conv_layer_n=2,,ebd_dm=48,,l2_regs=1e-06,1e-06,1e-06,0.0001,,dr=0.5,0.5,,nkerns=7,12.pkl")
MODEL = DCNN(params)
def sentiment_scores_of_sents(sents):
"""
Predict the sentiment positive scores for a bunch of sentences
>>> sentiment_scores_of_sents([u'simultaneously heart breaking and very funny , the last kiss is really all about performances .', u'( u ) stupid .'])
array([ 0.78528505, 0.0455901 ])
"""
word_indices = [get_word_index_array(nltk.word_tokenize(sent), WORD2INDEX)
for sent in sents]
x = np.asarray(
pad_sents(word_indices, PADDING_INDEX),
dtype = np.int32
)
scores = MODEL._p_y_given_x(x)
return scores[:, 1] # return `positiveness`
def sentiment_score(sent):
"""simple wrapper around the more general case"""
return sentiment_scores_of_sents([sent])[0]
| 2,318 | 922 |
from pathlib import Path
import numpy as np
import pandas as pd
import torch
from matplotlib import pyplot as plt
from src.tools.metrics import evaluate
def load_dataframes(din, wildcard="*fusion*.csv"):
data = {}
for f in din.glob(wildcard):
print(f)
ref = "-".join(f.with_name("").name.split("_")[-3:]).replace("-fusion", "")
print(ref)
data[ref] = pd.read_csv(f)
if __name__ == "__main__":
dir_results = Path(
Path.home().joinpath(
"Dropbox/FIW_Video/results/search_retrieval_evaluation/results/image_based"
)
)
df_list = load_dataframes(dir_results)
result = np.load(
dir_results.joinpath("gallery_probe_features.npy"), allow_pickle=True
)
gallery_feature = torch.FloatTensor(result.item().get("gallery_f"))
gallery_label = result.item().get("gallery_label")
print("gallery size:", gallery_feature.size(), gallery_label.shape)
query_feature = torch.FloatTensor(result.item().get("query_f"))
query_label = result.item().get("query_label")
print("query size:", query_feature.size(), query_label.shape)
query_feature = query_feature.squeeze(1)
gallery_feature = gallery_feature.squeeze(1)
## query-gallery
CMC = torch.IntTensor(gallery_label.shape[0]).zero_()
ap = 0.0
all_scores = []
all_predicts = []
for i in range(query_label.shape[0]):
scores, predicts, (ap_tmp, CMC_tmp) = evaluate(
query_feature[i], query_label[i], gallery_feature, gallery_label
)
all_scores.append(scores.squeeze())
all_predicts.append(predicts)
if CMC_tmp[0] == -1:
continue
CMC = CMC + CMC_tmp
ap += ap_tmp
CMC = CMC.float()
CMC = CMC / query_label.shape[0] # average CMC
print("Rank@1:%f Rank@5:%f Rank@10:%f" % (CMC[0], CMC[4], CMC[9]))
print("Rank@10:%f Rank@20:%f Rank@50:%f" % (CMC[9], CMC[19], CMC[49]))
print("mAP:%f" % (ap / query_label.shape[0]))
# save all_scores to npy
predict_result = {
"score": np.asarray(all_scores),
"predict": np.asarray(all_predicts),
}
np.save("predict_result.npy", predict_result)
CMC = CMC.numpy()
fig, ax = plt.subplots()
plt.plot(CMC)
ax.set(xscale="log")
plt.xlim(0, 1000)
plt.show()
fig.savefig("CMC_result.png")
| 2,362 | 877 |
"""Templatetags for the ``calendarium`` project."""
try:
from django.core.urlresolvers import reverse
except ImportError: # >= django 2.0
from django.urls import reverse
from django import template
from django.utils.timezone import datetime, now, timedelta, utc
from ..models import Event, EventCategory
register = template.Library()
register_tag = register.assignment_tag if hasattr(
register, 'assignment_tag') else register.simple_tag
@register.filter
def get_week_URL(date, day=0):
"""
Returns the week view URL for a given date.
:param date: A date instance.
:param day: Day number in a month.
"""
if day < 1:
day = 1
date = datetime(year=date.year, month=date.month, day=day, tzinfo=utc)
return reverse(
'calendarium:calendar_week',
kwargs={
'year': date.isocalendar()[0],
'week': date.isocalendar()[1]})
def _get_upcoming_events(amount=5, category=None):
if not isinstance(category, EventCategory):
category = None
return Event.objects.get_occurrences(
now(), now() + timedelta(days=356), category)[:amount]
@register.inclusion_tag('calendarium/upcoming_events.html')
def render_upcoming_events(event_amount=5, category=None):
"""Template tag to render a list of upcoming events."""
return {
'occurrences': _get_upcoming_events(
amount=event_amount, category=category),
}
@register_tag
def get_upcoming_events(amount=5, category=None):
"""Returns a list of upcoming events."""
return _get_upcoming_events(amount=amount, category=category)
| 1,615 | 501 |
"""Integration test suite for the playlist_along package."""
| 61 | 17 |
# Source : https://leetcode.com/problems/number-of-islands/
# Author : yhwhu
# Date : 2020-07-29
#####################################################################################################
#
# Given a 2d grid map of '1's (land) and '0's (water), count the number of islands. An island is
# surrounded by water and is formed by connecting adjacent lands horizontally or vertically. You may
# assume all four edges of the grid are all surrounded by water.
#
# Example 1:
#
# Input: grid = [
# ["1","1","1","1","0"],
# ["1","1","0","1","0"],
# ["1","1","0","0","0"],
# ["0","0","0","0","0"]
# ]
# Output: 1
#
# Example 2:
#
# Input: grid = [
# ["1","1","0","0","0"],
# ["1","1","0","0","0"],
# ["0","0","1","0","0"],
# ["0","0","0","1","1"]
# ]
# Output: 3
#####################################################################################################
from typing import List
class UF:
def __init__(self, n):
self.parent = {}
self.size = [0] * n
self.cnt = n
for i in range(n):
self.parent[i] = i
self.size[i] = 1
def find(self, x):
while x != self.parent[x]:
self.parent[x] = self.parent[self.parent[x]]
x = self.parent[x]
return x
def union(self, x, y):
father_x = self.find(x)
father_y = self.find(y)
if father_x == father_y:
return
if self.size[father_y] > self.size[father_x]:
self.parent[father_x] = father_y
self.size[father_y] += self.size[father_x]
else:
self.parent[father_y] = father_x
self.size[father_x] += self.size[father_y]
self.cnt -= 1
class Solution:
def numIslands_uf(self, grid: List[List[str]]) -> int:
m = len(grid)
n = len(grid[0])
def get_index(x, y):
return x * n + y
uf = UF(m * n + 1)
positions = [(0, 1), (1, 0)]
for i in range(m):
for j in range(n):
if grid[i][j] == "0":
uf.union(get_index(i, j), m * n)
elif grid[i][j] == "1":
for ii, jj in positions:
new_i = i + ii
new_j = j + jj
if 0 <= new_i < m and 0 <= new_j < n and grid[new_i][new_j] == "1":
uf.union(get_index(i, j), get_index(new_i, new_j))
return uf.cnt - 1
def numIslands_dfs(self, grid: List[List[str]]) -> int:
m = len(grid)
n = len(grid[0])
count = 0
visited = [[0 for _ in range(n)] for _ in range(m)]
positions = [(0, -1), (1, 0), (0, 1), (-1, 0)]
for i in range(m):
for j in range(n):
if grid[i][j] == "1" and not visited[i][j]:
count += 1
self._dfs(i, j, grid, m, n, visited, positions)
return count
def _dfs(self, i, j, grid, m, n, visited, positions):
visited[i][j] = 1
for ii, jj in positions:
new_i = i + ii
new_j = j + jj
if 0 <= new_i < m and 0 <= new_j < n and grid[new_i][new_j] == "1" and not visited[new_i][new_j]:
self._dfs(new_i, new_j, grid, m, n, visited, positions)
def numIslands_bfs(self, grid: List[List[str]]) -> int:
if not grid:
return 0
m = len(grid)
n = len(grid[0])
count = 0
visited = [[0 for _ in range(n)] for _ in range(m)]
positions = [(0, -1), (1, 0), (0, 1), (-1, 0)]
for i in range(m):
for j in range(n):
queue = [(i, j)]
if grid[i][j] == "1" and not visited[i][j]:
visited[i][j] = 1
count += 1
while queue:
cur_i, cur_j = queue.pop(0)
for ii, jj in positions:
new_i = cur_i + ii
new_j = cur_j + jj
if 0 <= new_i < m and 0 <= new_j < n and grid[new_i][new_j] == "1" and not visited[new_i][
new_j]:
queue.append((new_i, new_j))
visited[new_i][new_j] = 1
return count
if __name__ == '__main__':
grid = [["1","1","1","1","1","0","1","1","1","1","1","1","1","1","1","0","1","0","1","1"],["0","1","1","1","1","1","1","1","1","1","1","1","1","0","1","1","1","1","1","0"],["1","0","1","1","1","0","0","1","1","0","1","1","1","1","1","1","1","1","1","1"],["1","1","1","1","0","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1"],["1","0","0","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1"],["1","0","1","1","1","1","1","1","0","1","1","1","0","1","1","1","0","1","1","1"],["0","1","1","1","1","1","1","1","1","1","1","1","0","1","1","0","1","1","1","1"],["1","1","1","1","1","1","1","1","1","1","1","1","0","1","1","1","1","0","1","1"],["1","1","1","1","1","1","1","1","1","1","0","1","1","1","1","1","1","1","1","1"],["1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1"],["0","1","1","1","1","1","1","1","0","1","1","1","1","1","1","1","1","1","1","1"],["1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1"],["1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1"],["1","1","1","1","1","0","1","1","1","1","1","1","1","0","1","1","1","1","1","1"],["1","0","1","1","1","1","1","0","1","1","1","0","1","1","1","1","0","1","1","1"],["1","1","1","1","1","1","1","1","1","1","1","1","0","1","1","1","1","1","1","0"],["1","1","1","1","1","1","1","1","1","1","1","1","1","0","1","1","1","1","0","0"],["1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1"],["1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1"],["1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1","1"]]
solution = Solution()
result = solution.numIslands_uf(grid)
print(result)
| 6,104 | 2,342 |
import sys
import os
import json
import unittest
from mock import call, patch, MagicMock, Mock, ANY
# TODO: Check get params of request to ensure those match what is expected
class TestAnimeList(unittest.TestCase):
def setUp(self):
self.dir_path = os.path.dirname(os.path.realpath(__file__))
self.mock_requests = MagicMock()
self.mock_xbmc_plugin = MagicMock()
self.mock_xbmc_gui = MagicMock()
self.mock_route_factory = MagicMock()
modules = {
"requests": self.mock_requests,
"xbmcplugin": self.mock_xbmc_plugin,
"xbmcgui": self.mock_xbmc_gui,
"xbmcadddon": MagicMock(),
"resolveurl": MagicMock(),
"resources.lib.router_factory": self.mock_route_factory
}
self.module_patcher = patch.dict('sys.modules', modules)
self.module_patcher.start()
def tearDown(self):
self.module_patcher.stop()
def test_generate_routes(self):
from resources.lib.routes.animelist import generate_routes, anime_list
mock_plugin = MagicMock()
generate_routes(mock_plugin)
mock_plugin.add_route.assert_has_calls([
call(anime_list, '/anime-list'),
])
def test_get_current_params_returns_values_if_passed_in(self):
from resources.lib.routes.animelist import _get_current_params
expected_year = "2000"
expected_season = "Winter"
expected_genre = "Test,Test2"
expected_page = "Page"
mock_plugin = type('', (), {})
mock_plugin.args = {
"year": [expected_year],
"season": [expected_season],
"genres": [expected_genre],
"page": [expected_page],
}
args = _get_current_params(mock_plugin)
self.assertDictEqual(args, {
"year": expected_year,
"season": expected_season,
"genres": expected_genre,
"page": expected_page
}, "Returned parameter list does not match plugin.arg values")
def test_get_current_params_returns_empty_if_none(self):
from resources.lib.routes.animelist import _get_current_params
mock_plugin = type('', (), {})
mock_plugin.args = {}
args = _get_current_params(mock_plugin)
self.assertDictEqual(args, {}, "Returned parameter list does not match plugin.arg values")
def test_successful_retrieval_page_one_none_page(self):
handle_val = "Random"
mock_url = "Random-url"
mock_plugin = type('', (), {})
mock_plugin.args = {}
mock_plugin.handle = handle_val
mock_plugin.url_for = MagicMock()
fixture_path = self.dir_path + "/fixtures/animeList/list_success.json"
with open(fixture_path, "r") as fixture:
mock_response = fixture.read()
res_mock = MagicMock()
res_mock.json.return_value = json.loads(mock_response)
self.mock_requests.get.return_value = res_mock
from resources.lib.routes.animelist import anime_list
anime_list()
self.mock_xbmc_gui.ListItem.assert_has_calls([
call('Gintama.: Shirogane no Tamashii-hen 2'),
call().setArt({'icon': 'https://myanimelist.cdn-dena.com/images/anime/1518/95051.jpg'}),
call().setInfo(infoLabels={'plot': 'Second Season of the final arc of Gintama.'}, type='video'),
call('Gintama.: Silver Soul Arc - Second Half War'),
call().setArt({'icon': 'https://myanimelist.cdn-dena.com/images/anime/1518/95051.jpg'}),
call().setInfo(infoLabels={'plot': 'Second Season of the final arc of Gintama.'}, type='video'),
call('Gintama.: Shirogane no Tamashii-hen - Kouhan-sen'),
call().setArt({'icon': 'https://myanimelist.cdn-dena.com/images/anime/1518/95051.jpg'}),
call().setInfo(infoLabels={'plot': 'Second Season of the final arc of Gintama.'}, type='video'),
call('Next Page')
])
def test_successful_retrieval_page_one_with_selected(self):
handle_val = "Random"
mock_url = "Random-url"
mock_plugin = type('', (), {})
mock_plugin.args = {
"season": ["Summer"],
"year": ["2018"],
"genres": ["Test1,Test2"],
"page": ["1"]
}
mock_plugin.handle = handle_val
mock_plugin.url_for = Mock(return_value=mock_url)
mock_route_factory = MagicMock()
mock_route_factory.get_router_instance = mock_plugin
sys.modules['resources.lib.router_factory'] = mock_route_factory
fixture_path = self.dir_path + "/fixtures/animeList/list_success.json"
with open(fixture_path, "r") as fixture:
mock_response = fixture.read()
res_mock = MagicMock()
res_mock.json = Mock(return_value=json.loads(mock_response))
self.mock_requests.get = Mock(return_value=res_mock)
from resources.lib.routes.animelist import anime_list
anime_list()
self.mock_requests.get.assert_called_once_with(
'https://api.animepie.to/Anime/AnimeMain/List',
params={
'sort': 1,
'website': '',
'genres': 'Test1,Test2',
'season': 'Summer',
'limit': 15,
'year': 2018,
'sort2': '',
'page': 1
}
)
self.mock_xbmc_gui.ListItem.assert_has_calls([
call('Gintama.: Shirogane no Tamashii-hen 2'),
call().setArt({'icon': 'https://myanimelist.cdn-dena.com/images/anime/1518/95051.jpg'}),
call().setInfo(infoLabels={'plot': 'Second Season of the final arc of Gintama.'}, type='video'),
call('Gintama.: Silver Soul Arc - Second Half War'),
call().setArt({'icon': 'https://myanimelist.cdn-dena.com/images/anime/1518/95051.jpg'}),
call().setInfo(infoLabels={'plot': 'Second Season of the final arc of Gintama.'}, type='video'),
call('Gintama.: Shirogane no Tamashii-hen - Kouhan-sen'),
call().setArt({'icon': 'https://myanimelist.cdn-dena.com/images/anime/1518/95051.jpg'}),
call().setInfo(infoLabels={'plot': 'Second Season of the final arc of Gintama.'}, type='video'),
call('Next Page')
])
# Need to check for order of list items added
self.mock_xbmc_plugin.addDirectoryItem.assert_has_calls([
call(
handle_val,
mock_url,
ANY,
True
),
call(
handle_val,
mock_url,
ANY,
True
),
call(
handle_val,
mock_url,
ANY,
True
),
call(
handle_val,
mock_url,
ANY,
True
),
]
)
def test_successful_retrieval_no_next_on_last_page(self):
handle_val = "Random"
mock_url = "Random-url"
mock_plugin = type('', (), {})
mock_plugin.args = {
"season": ["Summer"],
"year": ["2018"],
"genres": ["Test1,Test2"],
"page": ["8"]
}
mock_plugin.handle = handle_val
mock_plugin.url_for = Mock(return_value=mock_url)
mock_route_factory = MagicMock()
mock_route_factory.get_router_instance = mock_plugin
sys.modules['resources.lib.router_factory'] = mock_route_factory
fixture_path = self.dir_path + "/fixtures/animeList/list_success.json"
with open(fixture_path, "r") as fixture:
mock_response = fixture.read()
res_mock = MagicMock()
res_mock.json = Mock(return_value=json.loads(mock_response))
self.mock_requests.get = Mock(return_value=res_mock)
from resources.lib.routes.animelist import anime_list
anime_list()
expected_list_item_calls = [
call('Gintama.: Shirogane no Tamashii-hen 2'),
call().setArt({'icon': 'https://myanimelist.cdn-dena.com/images/anime/1518/95051.jpg'}),
call().setInfo(infoLabels={'plot': 'Second Season of the final arc of Gintama.'}, type='video'),
call('Gintama.: Silver Soul Arc - Second Half War'),
call().setArt({'icon': 'https://myanimelist.cdn-dena.com/images/anime/1518/95051.jpg'}),
call().setInfo(infoLabels={'plot': 'Second Season of the final arc of Gintama.'}, type='video'),
call('Gintama.: Shirogane no Tamashii-hen - Kouhan-sen'),
call().setArt({'icon': 'https://myanimelist.cdn-dena.com/images/anime/1518/95051.jpg'}),
call().setInfo(infoLabels={'plot': 'Second Season of the final arc of Gintama.'}, type='video'),
]
self.assertEquals(self.mock_xbmc_gui.ListItem.call_count, 3)
self.mock_xbmc_gui.ListItem.assert_has_calls(expected_list_item_calls)
self.mock_requests.get.assert_called_once_with(
'https://api.animepie.to/Anime/AnimeMain/List',
params={
'sort': 1,
'website': '',
'genres': 'Test1,Test2',
'season': 'Summer',
'limit': 15,
'year': 2018,
'sort2': '',
'page': 8
}
)
# Need to check for order of list items added
expected_calls = [
call(
handle_val,
mock_url,
ANY,
True
),
call(
handle_val,
mock_url,
ANY,
True
),
call(
handle_val,
mock_url,
ANY,
True
),
]
self.assertEquals(self.mock_xbmc_plugin.addDirectoryItem.call_count, 3)
self.mock_xbmc_plugin.addDirectoryItem.assert_has_calls(expected_calls)
| 10,135 | 3,154 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
import os
import unittest
from typing import TYPE_CHECKING
from cdm.enums import CdmStatusLevel
from cdm.objectmodel import CdmCorpusDefinition
from cdm.storage import LocalAdapter
from cdm.utilities import ResolveOptions, AttributeResolutionDirectiveSet
from tests.common import TestHelper
from tests.utilities.object_validator import ObjectValidator
if TYPE_CHECKING:
from cdm.objectmodel import CdmEntityDefinition
from tests.utilities.object_validator import AttributeContextExpectedValue, AttributeExpectedValue
class CommonTest(unittest.TestCase):
# The path of the SchemaDocs project.
schema_docs_path = TestHelper.get_schema_docs_root()
# The test's data path.
tests_subpath = os.path.join('Cdm', 'ResolutionGuidance')
async def run_test_with_values(
self,
test_name: str,
source_entity_name: str,
expected_context_default: 'AttributeContextExpectedValue',
expected_context_normalized: 'AttributeContextExpectedValue',
expected_context_reference_only: 'AttributeContextExpectedValue',
expected_context_structured: 'AttributeContextExpectedValue',
expected_context_normalized_structured: 'AttributeContextExpectedValue',
expected_context_reference_only_normalized: 'AttributeContextExpectedValue',
expected_context_reference_only_structured: 'AttributeContextExpectedValue',
expected_context_reference_only_normalized_structured: 'AttributeContextExpectedValue',
expected_default: 'List[AttributeExpectedValue]',
expected_normalized: 'List[AttributeExpectedValue]',
expected_reference_only: 'List[AttributeExpectedValue]',
expected_structured: 'List[AttributeExpectedValue]',
expected_normalized_structured: 'List[AttributeExpectedValue]',
expected_reference_only_normalized: 'List[AttributeExpectedValue]',
expected_reference_only_structured: 'List[AttributeExpectedValue]',
expected_reference_only_normalized_structured: 'List[AttributeExpectedValue]'
) -> None:
"""This method runs the tests with a set expected attributes & attribute context values and validated the actual result."""
try:
test_input_path = TestHelper.get_input_folder_path(self.tests_subpath, test_name)
corpus = CdmCorpusDefinition()
corpus.ctx.report_at_level = CdmStatusLevel.WARNING
corpus.storage.mount('localInput', LocalAdapter(test_input_path))
corpus.storage.mount('cdm', LocalAdapter(self.schema_docs_path))
corpus.storage.default_namespace = 'localInput'
src_entity_def = await corpus.fetch_object_async('localInput:/{}.cdm.json/{}'.format(source_entity_name, source_entity_name))
self.assertTrue(src_entity_def is not None)
res_opt = ResolveOptions(wrt_doc=src_entity_def.in_document)
resolved_entity_def = None # type: CdmEntityDefinition
output_entity_name = ''
output_entity_file_name = ''
entity_file_name = ''
if expected_context_default and expected_default:
entity_file_name = 'default'
res_opt.directives = AttributeResolutionDirectiveSet(set())
output_entity_name = '{}_Resolved_{}'.format(source_entity_name, entity_file_name)
output_entity_file_name = '{}.cdm.json'.format(output_entity_name)
resolved_entity_def = await src_entity_def.create_resolved_entity_async(output_entity_name, res_opt)
self.validate_output_with_values(expected_context_default, expected_default, resolved_entity_def)
if expected_context_normalized and expected_normalized:
entity_file_name = 'normalized'
res_opt.directives = AttributeResolutionDirectiveSet(set({'normalized'}))
output_entity_name = '{}_Resolved_{}'.format(source_entity_name, entity_file_name)
output_entity_file_name = '{}.cdm.json'.format(output_entity_name)
resolved_entity_def = await src_entity_def.create_resolved_entity_async(output_entity_name, res_opt)
self.validate_output_with_values(expected_context_normalized, expected_normalized, resolved_entity_def)
if expected_context_reference_only and expected_reference_only:
entity_file_name = 'referenceOnly'
res_opt.directives = AttributeResolutionDirectiveSet(set({'referenceOnly'}))
output_entity_name = '{}_Resolved_{}'.format(source_entity_name, entity_file_name)
output_entity_file_name = '{}.cdm.json'.format(output_entity_name)
resolved_entity_def = await src_entity_def.create_resolved_entity_async(output_entity_name, res_opt)
self.validate_output_with_values(expected_context_reference_only, expected_reference_only, resolved_entity_def)
if expected_context_structured and expected_structured:
entity_file_name = 'structured'
res_opt.directives = AttributeResolutionDirectiveSet(set({'structured'}))
output_entity_name = '{}_Resolved_{}'.format(source_entity_name, entity_file_name)
output_entity_file_name = '{}.cdm.json'.format(output_entity_name)
resolved_entity_def = await src_entity_def.create_resolved_entity_async(output_entity_name, res_opt)
self.validate_output_with_values(expected_context_structured, expected_structured, resolved_entity_def)
if expected_context_normalized_structured and expected_normalized_structured:
entity_file_name = 'normalized_structured'
res_opt.directives = AttributeResolutionDirectiveSet(set({'normalized', 'structured'}))
output_entity_name = '{}_Resolved_{}'.format(source_entity_name, entity_file_name)
output_entity_file_name = '{}.cdm.json'.format(output_entity_name)
resolved_entity_def = await src_entity_def.create_resolved_entity_async(output_entity_name, res_opt)
self.validate_output_with_values(expected_context_normalized_structured, expected_normalized_structured, resolved_entity_def)
if expected_context_reference_only_normalized and expected_reference_only_normalized:
entity_file_name = 'referenceOnly_normalized'
res_opt.directives = AttributeResolutionDirectiveSet(set({'referenceOnly', 'normalized'}))
output_entity_name = '{}_Resolved_{}'.format(source_entity_name, entity_file_name)
output_entity_file_name = '{}.cdm.json'.format(output_entity_name)
resolved_entity_def = await src_entity_def.create_resolved_entity_async(output_entity_name, res_opt)
self.validate_output_with_values(expected_context_reference_only_normalized, expected_reference_only_normalized, resolved_entity_def)
if expected_context_reference_only_structured and expected_reference_only_structured:
entity_file_name = 'referenceOnly_structured'
res_opt.directives = AttributeResolutionDirectiveSet(set({'referenceOnly', 'structured'}))
output_entity_name = '{}_Resolved_{}'.format(source_entity_name, entity_file_name)
output_entity_file_name = '{}.cdm.json'.format(output_entity_name)
resolved_entity_def = await src_entity_def.create_resolved_entity_async(output_entity_name, res_opt)
self.validate_output_with_values(expected_context_reference_only_structured, expected_reference_only_structured, resolved_entity_def)
if expected_context_reference_only_normalized_structured and expected_reference_only_normalized_structured:
entity_file_name = 'referenceOnly_normalized_structured'
res_opt.directives = AttributeResolutionDirectiveSet(set({'referenceOnly', 'normalized', 'structured'}))
output_entity_name = '{}_Resolved_{}'.format(source_entity_name, entity_file_name)
output_entity_file_name = '{}.cdm.json'.format(output_entity_name)
resolved_entity_def = await src_entity_def.create_resolved_entity_async(output_entity_name, res_opt)
self.validate_output_with_values(expected_context_reference_only_normalized_structured, expected_reference_only_normalized_structured, resolved_entity_def)
except Exception as e:
self.fail(e)
def validate_output_with_values(self, expected_context: 'AttributeContextExpectedValue', expected_attributes: 'List[AttributeExpectedValue]', actual_resolved_entity_def: 'CdmEntityDefinition') -> None:
"""Runs validation to test actual output vs expected output for attributes collection vs attribute context."""
ObjectValidator.validate_attributes_collection(self, expected_attributes, actual_resolved_entity_def.attributes)
ObjectValidator.validate_attribute_context(self, expected_context, actual_resolved_entity_def.attribute_context) | 9,266 | 2,476 |
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Gnet define"""
from mindspore import ops
from mindspore import nn
from mindspore.ops import constexpr
import mindspore
import numpy as np
@constexpr
def generate_tensor(batch_size):
"""generate_tensor
Returns:
output.
"""
np_array = np.random.randn(batch_size, 1, 1, 1)
return mindspore.Tensor(np_array, mindspore.float32)
class GradientWithInput(nn.Cell):
"""GradientWithInput"""
def __init__(self, discrimator):
super(GradientWithInput, self).__init__()
self.reduce_sum = ops.ReduceSum()
self.discrimator = discrimator
def construct(self, interpolates, alpha):
"""GradientWithInput
Returns:
output.
"""
decisionInterpolate = self.discrimator(interpolates, alpha)
decisionInterpolate = self.reduce_sum(decisionInterpolate, 0)
return decisionInterpolate
class WGANGPGradientPenalty(nn.Cell):
"""WGANGPGradientPenalty"""
def __init__(self, discrimator, lambdaGP=10):
super(WGANGPGradientPenalty, self).__init__()
self.reduce_sum = ops.ReduceSum()
self.reduce_sum_keep_dim = ops.ReduceSum(keep_dims=True)
self.sqrt = ops.Sqrt()
self.discrimator = discrimator
self.gradientWithInput = GradientWithInput(discrimator)
self.lambdaGP = mindspore.Tensor(lambdaGP, mindspore.float32)
self.gradient_op = ops.GradOperation()
def construct(self, input_x, fake, input_alpha):
"""WGANGPGradientPenalty
Returns:
output.
"""
batch_size = input_x.shape[0]
alpha = generate_tensor(batch_size)
alpha = alpha.expand_as(input_x)
interpolates = alpha * input_x + ((1 - alpha) * fake)
gradient = self.gradient_op(self.gradientWithInput)(interpolates, input_alpha)
gradient = ops.reshape(gradient, (batch_size, -1))
gradient = self.sqrt(self.reduce_sum(gradient * gradient, 1))
gradient_penalty = self.reduce_sum_keep_dim((gradient - 1.0) ** 2) * self.lambdaGP
return gradient_penalty
class AllLossD(nn.Cell):
"""AllLossD"""
def __init__(self, netD):
super(AllLossD, self).__init__()
self.netD = netD
self.wGANGPGradientPenalty = WGANGPGradientPenalty(self.netD)
self.reduce_sum = ops.ReduceSum()
self.epsilonLoss = EpsilonLoss(0.001)
self.scalr_summary = ops.ScalarSummary()
self.summary = ops.TensorSummary()
def construct(self, real, fake, alpha):
"""AllLossD
Returns:
output.
"""
predict_real = self.netD(real, alpha)
loss_real = -self.reduce_sum(predict_real, 0)
predict_fake = self.netD(fake, alpha)
loss_fake = self.reduce_sum(predict_fake, 0)
lossD_Epsilon = self.epsilonLoss(predict_real)
lossD_Grad = self.wGANGPGradientPenalty(real, fake, alpha)
all_loss = loss_real + loss_fake + lossD_Grad + lossD_Epsilon
return all_loss
class AllLossG(nn.Cell):
"""AllLossG"""
def __init__(self, netG, netD):
super(AllLossG, self).__init__()
self.netG = netG
self.netD = netD
self.reduce_sum = ops.ReduceSum()
def construct(self, inputNoise, alpha):
"""AllLossG
Returns:
output.
"""
fake = self.netG(inputNoise, alpha)
predict_fake = self.netD(fake, alpha)
loss_fake = -self.reduce_sum(predict_fake, 0)
return loss_fake
class EpsilonLoss(nn.Cell):
"""EpsilonLoss"""
def __init__(self, epsilonD):
super(EpsilonLoss, self).__init__()
self.reduce_sum = ops.ReduceSum()
self.epsilonD = mindspore.Tensor(epsilonD, mindspore.float32)
def construct(self, predRealD):
"""EpsilonLoss
Returns:
output.
"""
return self.reduce_sum(predRealD ** 2) * self.epsilonD
| 4,592 | 1,523 |
GET_POWER_FLOW_REALTIME_DATA = {
"timestamp": {"value": "2019-01-10T23:33:12+01:00"},
"status": {"Code": 0, "Reason": "", "UserMessage": ""},
"energy_day": {"value": 0, "unit": "Wh"},
"energy_total": {"value": 26213502, "unit": "Wh"},
"energy_year": {"value": 12400.100586, "unit": "Wh"},
"meter_location": {"value": "load"},
"meter_mode": {"value": "vague-meter"},
"power_battery": {"value": None, "unit": "W"},
"power_grid": {"value": 367.722145, "unit": "W"},
"power_load": {"value": -367.722145, "unit": "W"},
"power_photovoltaics": {"value": None, "unit": "W"},
}
GET_METER_REALTIME_DATA_SYSTEM = {
"timestamp": {"value": "2019-01-10T23:33:13+01:00"},
"status": {"Code": 0, "Reason": "", "UserMessage": ""},
"meters": {
"0": {
"power_real": {"value": -367.722145, "unit": "W"},
"meter_location": {"value": 1},
"enable": {"value": 1},
"visible": {"value": 1},
"manufacturer": {"value": "Fronius"},
"model": {"value": ""},
"serial": {"value": ""},
}
},
}
GET_METER_REALTIME_DATA_SCOPE_DEVICE = {
"timestamp": {"value": "2019-01-10T23:33:14+01:00"},
"status": {"Code": 0, "Reason": "", "UserMessage": ""},
"power_real": {"value": -367.722145, "unit": "W"},
"meter_location": {"value": 1},
"enable": {"value": 1},
"visible": {"value": 1},
"manufacturer": {"value": "Fronius"},
"model": {"value": ""},
"serial": {"value": ""},
}
GET_INVERTER_REALTIME_DATA_SCOPE_DEVICE = {
"timestamp": {"value": "2019-01-10T23:33:15+01:00"},
"status": {"Code": 0, "Reason": "", "UserMessage": ""},
"energy_day": {"value": 0, "unit": "Wh"},
"energy_total": {"value": 26213502, "unit": "Wh"},
"energy_year": {"value": 12400.1, "unit": "Wh"},
}
GET_STORAGE_REALTIME_DATA_SCOPE_DEVICE = {
"timestamp": {"value": "2019-01-10T23:33:14+01:00"},
"status": {"Code": 255, "Reason": "Storages are not supported", "UserMessage": ""},
}
GET_INVERTER_REALTIME_DATA_SYSTEM = {
"timestamp": {"value": "2019-01-10T23:33:16+01:00"},
"status": {"Code": 0, "Reason": "", "UserMessage": ""},
"energy_day": {"value": 0, "unit": "Wh"},
"energy_total": {"value": 26213502, "unit": "Wh"},
"energy_year": {"value": 12400, "unit": "Wh"},
"power_ac": {"value": 0, "unit": "W"},
"inverters": {
"1": {
"energy_day": {"value": 0, "unit": "Wh"},
"energy_total": {"value": 26213502, "unit": "Wh"},
"energy_year": {"value": 12400, "unit": "Wh"},
"power_ac": {"value": 0, "unit": "W"},
}
},
}
GET_LOGGER_LED_INFO_STATE = {
"timestamp": {"value": "2019-06-23T23:50:16+02:00"},
"status": {"Code": 0, "Reason": "", "UserMessage": ""},
"power_led": {"color": "green", "state": "on"},
"solar_net_led": {"color": "green", "state": "on"},
"solar_web_led": {"color": "none", "state": "off"},
"wlan_led": {"color": "green", "state": "on"},
}
| 3,032 | 1,320 |
import pandas as pd
import numpy as np
from gensim.models.wrappers import LdaMallet
from sklearn.metrics.pairwise import cosine_similarity
from gensim.corpora import Dictionary
from gensim import corpora
import pickle
import os
"""This class trains the Latent Dirichlet Allocation (LDA) Model on
painting description corpus.,
we want to compare the paintings by computing a similarity measure : cosine similarity"""
class LdaTraining:
path_to_mallet_bin = "/resources/mallet-2.0.6/bin/mallet" #path has to be absolute
os.environ['MALLET_HOME'] = "/resources/mallet-2.0.6/" #path has to be absolute
path_save_score = 'resources/datasets/'
path_save_outputs = 'resources/matrices/lda/'
path_save_model = 'resources/models/'
path_to_listwords = 'resources/datasets/preprocessed/list_words.txt'
path_to_dict = 'resources/datasets/preprocessed/dict'
path_to_corpus = 'resources/datasets/preprocessed/corpus'
painting_df = pd.read_csv('resources/datasets/ng-dataset.csv')
def __init__(self, num_topics):
self.num_topics = num_topics
def load_list_words(self, path_to_listwords):
"""Load the list of words"""
with open(path_to_listwords, "rb") as fp: # Unpickling
list_words = pickle.load(fp)
return list_words
def load_dictionary(self, path_to_dict):
"""Load the dictionary"""
dictionary = Dictionary.load(path_to_dict)
return dictionary
def load_corpus(self, path_to_corpus):
"""Load the corpus"""
corpus = corpora.MmCorpus(path_to_corpus)
return corpus
def LdaModel(self, num_topics, corpus, dictionary):
"""Create a LDA topic model
Input:
num_topics: number of topics for the model
corpus: gensim corpus
ditionary: gensim dictionary
Output:
lda_model: a topic model using Latent Dirichlet Allocation (LDA)
"""
lda_model = LdaMallet(mallet_path=self.path_to_mallet_bin, num_topics=num_topics, corpus=corpus, id2word=dictionary, random_seed=123)
return lda_model
def transform_output(self, lda_model, corpus):
"""Transform the topic document matrix into an ordered array of topic distribution
Input:
lda_model: LDA model
corpus: gensim corpus
Output:
lda_model: a topic model using Latent Dirichlet Allocation (LDA)
"""
topdoc_mat = lda_model[corpus]
topdoc_sorted = self.sort_tuples(topdoc_mat)
lda_output = np.asarray(topdoc_sorted)
return lda_output
def sort_tuples(self, topdoc_mat):
"""Sort the tuples (topic, distribution) in a numeric ascending order and drop the topic index
[(3,0.02), (1, 0.1), (2,0.03), ...] => [(1, 0.1), (2, 0.03), (3,0.02), ...] => [0.1, 0.03, 0.02]
Input:
topdoc_mat: matrix topic distribution / document
Output:
sorted tuples with index removed
"""
# Reordering the topics in ascending order (0,1,2,3...) so we can compare them using a similarity metrics
for i in range(len(topdoc_mat)):
topdoc_mat[i] = sorted(topdoc_mat[i], key=lambda tup: (tup[0], tup[1]))
for j in range(len(topdoc_mat[i])):
topdoc_mat[i][j] = topdoc_mat[i][j][1]
return topdoc_mat
def save_output(self, lda_output, path_save_outputs):
np.save(path_save_outputs+'lda-output', lda_output)
def save_cosine(self, cos_mat, path_save_outputs):
np.save(path_save_outputs+'cosine-mat', cos_mat)
def save_pairwise_score(self, painting_df, cos_mat, path_save_score):
list_tuples = []
for i, list_score in enumerate(cos_mat):
for k, score in enumerate(list_score):
list_tuples.append((i, k, score))
sim_df = pd.DataFrame(list_tuples).rename(columns={0: 'painting_1', 1: 'painting_2', 2:'score'})
sim_df['painting_1'] = sim_df['painting_1'].apply(lambda x: painting_df.iloc[x].painting_id)
sim_df['painting_2'] = sim_df['painting_2'].apply(lambda x: painting_df.iloc[x].painting_id)
sim_df = sim_df.loc[sim_df['painting_1'] != sim_df['painting_2']]
#sim_df.to_csv(path_save_score+'lda-scores')
sim_df.to_csv('C:/Users/aghenda/Documents/Datasets/lda-scores.csv')
def main(self):
list_words = self.load_list_words(self.path_to_listwords)
dictionary = self.load_dictionary(self.path_to_dict)
corpus = self.load_corpus(self.path_to_corpus)
lda_model = self.LdaModel(self.num_topics, corpus, dictionary)
lda_model.save(self.path_save_model+'lda.model')
lda_output = self.transform_output(lda_model, corpus)
self.save_output(lda_output, self.path_save_outputs)
cos_mat = cosine_similarity(lda_output)
self.save_cosine(cos_mat, self.path_save_outputs)
self.save_pairwise_score(self.painting_df, cos_mat, self.path_save_score)
if __name__=='__main__':
lda = LdaTraining(10)
lda.main()
| 5,774 | 1,803 |
import os
import argparse
import json
import webptools
# Grant permissions to Webptools
webptools.grant_permission()
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--servers', required=True, type=str)
parser.add_argument('--servers_logos_source', required=True, type=str)
parser.add_argument('--servers_logos_output', required=True, type=str)
parser.add_argument('--sizes', nargs='+', type=int, default=[256])
parser.add_argument('--lossless', default=False, action='store_true')
args = parser.parse_args()
# Load server mappings JSON
servers = {}
with open(args.servers) as servers_file:
servers = json.load(servers_file)
print(f'Converting {len(servers)} server logos.')
# Create server logos output directory
os.makedirs(args.servers_logos_output, exist_ok=True)
for server in servers:
server_id = server['id']
server_name = server['name']
logo_path = f'{args.servers_logos_source}/{server_id}.png'
# Base 512 Size
convert_and_resize(
logo_path,
f'{args.servers_logos_output}/{server_id}.webp',
lossless=args.lossless
)
# Size-based destination name
for size in args.sizes:
convert_and_resize(
logo_path,
f'{args.servers_logos_output}/{server_id}-{size}.webp',
lossless=args.lossless,
size=size,
)
print(f'Successfully converted {server_name}\'s logo.')
print(f'Sucessfully converted {len(servers)} server logos.')
# Utility to convert and resize images
def convert_and_resize(source, destination, lossless=False, size=512):
options = [
f'-metadata none',
f'-resize {size} {size}'
]
if lossless:
options.append('-lossless')
output = webptools.cwebp(
input_image=source,
output_image=destination,
option=' '.join(options)
)
if output.get('exit_code'):
raise OSError(f'Failed to run Webptools ({source})')
if __name__ == '__main__':
main() | 2,130 | 674 |
from mycroft import MycroftSkill, intent_file_handler
from mycroft.skills.context import adds_context, removes_context
from mycroft import intent_handler
from adapt.intent import IntentBuilder
import json
import requests
import base64
class Createservice(MycroftSkill):
def __init__(self):
MycroftSkill.__init__(self)
self.s_messages = True
self.d={}
def initialize(self):
def on_utterance(message):
self.audio=message.data['signal']
decode_string = base64.b64decode(self.audio.encode('utf-8'))
send=base64.b64encode(decode_string).decode('ascii')
self.d['audio']=json.dumps(send)
self.d['tag'] = 'CreateService'
self.add_event('recognizer_loop:utterance', on_utterance)
@intent_handler('createservice.intent')
@adds_context('NameContext')
def handle_createservice(self, message):
self.name=""
self.hosts=[]
self.internet=False
self.performance=[]
response = requests.post('http://localhost:5550/sr/identification', json=self.d)
res=response.json()
id_=res['id']
name_user=res['user_name']
if(id_==1):
self.speak('Sure '+name_user+'. What is the name of the service?',expect_response=True)
elif(id_==0):
self.speak(name_user+", you dont have permissions for that")
else:
self.speak("User not recognize")
@intent_handler(IntentBuilder('NameServiceIntent').require("Type").require('NameContext').build())
@adds_context('HostsContext')
def handle_name_service(self, message):
utterance = message.data.get('Type')
self.name=utterance
self.log.info(utterance)
self.speak('Sure. Please name the machines that will have access to the service',expect_response=True)
@intent_handler(IntentBuilder('HostsIntent').require('HostsContext').build())
@adds_context('InternetAccessContext')
def handle_hosts_service(self, message):
utterance = message.data.get('utterance')
res = [int(i) for i in utterance.split() if i.isdigit()]
res=[str(x) for x in res]
self.log.info(res)
self.hosts=res
self.speak('Regarding internet access, does the service need internet access?',expect_response=True)
@intent_handler(IntentBuilder('YesInternetIntent').require("Yes").require('InternetAccessContext').build())
@adds_context('PerformanceContext')
@removes_context('InternetAccessContext')
def handle_yes_internet_access(self, message):
self.internet=True
self.log.info(self.internet)
self.speak('One last question, do you want to define the performance of the service?',expect_response=True)
@intent_handler(IntentBuilder('NoInternetIntent').require("No").require('InternetAccessContext').build())
@adds_context('PerformanceContext')
@removes_context('InternetAccessContext')
def handle_no_internet_access(self, message):
self.internet=False
self.log.info(self.internet)
self.speak('One last question, do you want to define the performance of the service?',expect_response=True)
@intent_handler(IntentBuilder('YesPerformanceIntent').require("Yes").require('PerformanceContext').build())
@removes_context('NameContext')
@removes_context('HostsContext')
@removes_context('InternetAccessContext')
def handle_yes_performance(self, message):
utterance = message.data.get('utterance')
res = [int(i) for i in utterance.split() if i.isdigit()]
self.performance=res[0]
self.log.info(res)
self.speak('Thanks for the information, wait a bit while I implement the service')
json_={"IntentType": "CreateService","Intent_Target": "Service","Intent_State": "new intent","Conditions": [{"Policy": "CreateService","Constraints": [{ "Domains":[{"Name": self.name,"Bool": self.internet,"Acess": self.hosts,"Performance": self.performance}]}]}]}
json_ = json.dumps(json_, indent = 4)
self.log.info(json_)
response = requests.post('http://localhost:5500/sr/intents', json=json_)
self.log.info(response.text)
dictFromServer = response.json()
@intent_handler(IntentBuilder('NoPerformanceIntent').require("No").require('PerformanceContext').build())
@removes_context('NameContext')
@removes_context('HostsContext')
@removes_context('InternetAccessContext')
def handle_no_performance(self, message):
self.performance=0
self.log.info(self.performance)
self.speak('Thanks for the information, wait a bit while I implement the service')
json_={"IntentType": "CreateService","Intent_Target": "Service","Intent_State": "new intent","Conditions": [{"Policy": "CreateService","Constraints": [{ "Domains":[{"Name": self.name,"Bool": self.internet,"Acess": self.hosts,"Performance": self.performance}]}]}]}
json_ = json.dumps(json_, indent = 4)
self.log.info(json_)
response = requests.post('http://localhost:5500/sr/intents', json=json_)
self.log.info(response.text)
dictFromServer = response.json()
def create_skill():
return Createservice()
| 5,396 | 1,607 |
import logging
import keras
import os
from Public.path import path_log_dir
def create_log(path, stream=False):
"""
获取日志对象
:param path: 日志文件路径
:param stream: 是否输出控制台
False: 不输出到控制台
True: 输出控制台,默认为输出到控制台
:return:日志对象
"""
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
fmt = logging.Formatter('[%(asctime)s] [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S')
if stream:
# 设置CMD日志
sh = logging.StreamHandler()
sh.setFormatter(fmt)
sh.setLevel(logging.DEBUG)
logger.addHandler(sh)
# 设置文件日志s
fh = logging.FileHandler(path, encoding='utf-8')
fh.setFormatter(fmt)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
return logger
class TrainHistory(keras.callbacks.Callback):
def __init__(self, log=None, model_name=None):
super(TrainHistory, self).__init__()
if not log:
path = os.path.join(path_log_dir, 'callback.log')
log = create_log(path=path, stream=False)
self.log = log
self.model_name = model_name
self.epoch = 0
self.info = []
def on_epoch_begin(self, epoch, logs=None):
self.epoch = epoch
message = f"begin epoch: {self.epoch}"
self.log.info(message)
def on_epoch_end(self, epoch, logs={}):
message = f'end epoch: {epoch} loss:{logs["loss"]} val_loss:{logs["val_loss"]} acc:{logs["crf_viterbi_accuracy"]} val_acc:{logs["val_crf_viterbi_accuracy"]}'
self.log.info(message)
dict = {
'model_name':self.model_name,
'epoch': self.epoch+1,
'loss': logs["loss"],
'acc': logs['crf_viterbi_accuracy'],
'val_loss': logs["val_loss"],
'val_acc': logs['val_crf_viterbi_accuracy']
}
self.info.append(dict)
def on_batch_end(self, batch, logs={}):
message = f'{self.model_name} epoch: {self.epoch} batch:{batch} loss:{logs["loss"]} acc:{logs["crf_viterbi_accuracy"]}'
self.log.info(message)
| 2,076 | 766 |
"""
Datasets are used for data and metadata loading.
"""
from .base import Dataset
from .csv import CSV
| 104 | 31 |
a = {2: 4}
for k, v in a.items():
print(k)
print(v)
| 61 | 33 |
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import logging
import time
from common.pipeline_wrapper import BasePipeline
from gae_libs.http.http_client_appengine import HttpClientAppengine
from libs import time_util
from model import analysis_status
from waterfall import monitoring
from waterfall import swarming_util
from waterfall import waterfall_config
class TriggerBaseSwarmingTaskPipeline(BasePipeline): # pragma: no cover.
"""A pipeline to trigger a Swarming task to re-run selected tests of a step.
This pipeline only supports test steps that run on Swarming and support the
gtest filter.
"""
def _GetSwarmingTaskName(self, ref_task_id): # pragma: no cover.
return 'findit/ref_task_id/%s/%s' % (
ref_task_id, time_util.GetUTCNow().strftime('%Y-%m-%d %H:%M:%S %f'))
def _CreateNewSwarmingTaskRequest(self, ref_task_id, ref_request, master_name,
builder_name, build_number, step_name,
tests, iterations):
"""Returns a SwarmingTaskRequest instance to run the given tests only."""
# Make a copy of the referred request and drop or overwrite some fields.
new_request = copy.deepcopy(ref_request)
new_request.name = self._GetSwarmingTaskName(ref_task_id)
new_request.parent_task_id = ''
new_request.user = ''
# To force a fresh re-run and ignore cached result of any equivalent run.
new_request.idempotent = False
# Set the gtest_filter to run the given tests only.
# Remove existing test filter first.
new_request.extra_args = [
a for a in new_request.extra_args if (
not a.startswith('--gtest_filter') and
not a.startswith('--test-launcher-filter-file'))
]
new_request.extra_args.append('--gtest_filter=%s' % ':'.join(tests))
# On Android, --gtest_repeat is only supported for gtest, but not for other
# test types. E.g. instrumentation tests currently support it via
# --test-repeat.
#
# Here we blindly treat all tests on Android as gtest, and let other test
# types fail out, because it is hard to distinguish them programmatically
# while the majority is gtest.
#
# https://crbug.com/669632 tracks the effort to unify the command switches
# of the Android test runner that are used here.
new_request.extra_args.append('--gtest_repeat=%s' % iterations)
ref_os = swarming_util.GetTagValue(ref_request.tags, 'os') or ''
if ref_os.lower() == 'android': # Workaround. pragma: no cover.
new_request.extra_args.append('--num_retries=0')
else:
new_request.extra_args.append('--test-launcher-retry-limit=0')
# Also rerun disabled tests. Scenario: the test was disabled before Findit
# runs any analysis. One possible case:
# 1. A gtest became flaky on CQ, but Findit was not automatically
# triggered to run any analysis because:
# * the test is not flaky enough
# * chromium-try-flakes has filed/updated too many bugs
# 2. The test got disabled, but no culprit was identified.
# 3. Some developer starts the investigation and requests Findit to
# analyze the flaky test.
# 4. Findit picks the latest Waterfall build of the matching configuration
# for the CQ build in which the flaky test is found.
# 5. In the picked Waterfall build, the test is already disabled.
#
# Note: test runner on Android ignores this flag because it is not supported
# yet even though it exists.
new_request.extra_args.append('--gtest_also_run_disabled_tests')
# Remove the env setting for sharding.
sharding_settings = ['GTEST_SHARD_INDEX', 'GTEST_TOTAL_SHARDS']
new_request.env = [
e for e in new_request.env if e['key'] not in sharding_settings
]
# Reset tags for searching and monitoring.
ref_name = swarming_util.GetTagValue(ref_request.tags, 'name')
new_request.tags = []
new_request.tags.append('ref_master:%s' % master_name)
new_request.tags.append('ref_buildername:%s' % builder_name)
new_request.tags.append('ref_buildnumber:%s' % build_number)
new_request.tags.append('ref_stepname:%s' % step_name)
new_request.tags.append('ref_task_id:%s' % ref_task_id)
new_request.tags.append('ref_name:%s' % ref_name)
# Add additional tags.
for tag in self._GetAdditionalTags():
new_request.tags.append(tag)
return new_request
def _GetAdditionalTags(self):
"""Returns additional tags for the Swarming task."""
return []
def _GetArgs(self, master_name, builder_name, build_number, step_name, tests):
# Returns an array you can pass into _GetSwarmingTask, _CreateSwarmingTask,
# _NeedANewSwarmingTask as the arguments.
# Should be overwritten in child method.
raise NotImplementedError(
'_GetArgs should be implemented in child class')
def _GetSwarmingTask(self):
# Get the appropriate kind of Swarming Task (Wf or Flake).
# Should be overwritten in child method.
raise NotImplementedError(
'_GetSwarmingTask should be implemented in child class')
def _CreateSwarmingTask(self):
# Create the appropriate kind of Swarming Task (Wf or Flake)
# Should be overwritten in child method.
raise NotImplementedError(
'_CreateSwarmingTask should be implemented in child class')
def _OnTaskTriggered(self):
"""A hook function called after the Swarming task is actually triggered."""
pass
def _NeedANewSwarmingTask(self, *args):
swarming_task = self._GetSwarmingTask(*args)
if not swarming_task:
swarming_task = self._CreateSwarmingTask(*args)
swarming_task.status = analysis_status.PENDING
swarming_task.put()
return True
else:
# TODO(http://crbug.com/585676): Rerun the Swarming task if it runs into
# unexpected infra errors.
return False
def _GetSwarmingTaskId(self, *args):
swarming_settings = waterfall_config.GetSwarmingSettings()
wait_seconds = swarming_settings.get('get_swarming_task_id_wait_seconds')
timeout_seconds = swarming_settings.get(
'get_swarming_task_id_timeout_seconds')
deadline = time.time() + timeout_seconds
while time.time() < deadline:
swarming_task = self._GetSwarmingTask(*args)
if not swarming_task: # pragma: no cover. Pipeline will retry.
raise Exception('Swarming task was deleted unexpectedly!')
if swarming_task.task_id:
return swarming_task.task_id
# Wait for the existing pipeline to start the Swarming task.
time.sleep(wait_seconds)
raise Exception('Time out!') # pragma: no cover. Pipeline will retry.
def _GetIterationsToRerun(self):
# How many times we want to run the swarming rerun
# By default, it's what's in wf_config
raise NotImplementedError(
'_GetIterationsToRerun should be implemented in child class')
# Arguments number differs from overridden method - pylint: disable=W0221
def run(self, master_name, builder_name, build_number, step_name, tests):
"""Triggers a new Swarming task to run the given tests.
Args:
master_name (str): The master name.
builder_name (str): The builder name.
build_number (str): The build number.
step_name (str): The failed test step name.
tests (list): A list of test cases, eg: ['suite1.test1', 'suite2.testw2']
Returns:
task_id (str): The new Swarming task that re-run the given tests.
"""
call_args = self._GetArgs(master_name, builder_name,
build_number, step_name, tests)
# Check if a new Swarming Task is really needed.
if not self._NeedANewSwarmingTask(*call_args):
return self._GetSwarmingTaskId(*call_args)
assert tests
http_client = HttpClientAppengine()
# 0. Retrieve existing Swarming task ids for the given step.
swarming_task_items = swarming_util.ListSwarmingTasksDataByTags(
master_name, builder_name, build_number, http_client,
{'stepname': step_name})
if len(swarming_task_items) < 1:
monitoring.swarming_tasks.increment(
{'operation': 'refer', 'category': 'copy-settings-and-parameters'})
raise Exception('No Swarming task was run at %s, %s, %s' % (
master_name, builder_name, build_number))
ref_task_id = swarming_task_items[0]['task_id']
# 1. Retrieve Swarming task parameters from a given Swarming task id.
ref_request = swarming_util.GetSwarmingTaskRequest(
ref_task_id, http_client)
# 2. Update/Overwrite parameters for the re-run.
iterations_to_rerun = self._GetIterationsToRerun()
new_request = self._CreateNewSwarmingTaskRequest(
ref_task_id, ref_request, master_name, builder_name, build_number,
step_name, tests, iterations_to_rerun)
# 3. Trigger a new Swarming task to re-run the failed tests.
task_id, error = swarming_util.TriggerSwarmingTask(new_request, http_client)
# Update swarming task info.
swarming_task = self._GetSwarmingTask(*call_args)
swarming_task.task_id = task_id
swarming_task.parameters['tests'] = tests
swarming_task.parameters['iterations_to_rerun'] = iterations_to_rerun
swarming_task.parameters['ref_name'] = swarming_util.GetTagValue(
new_request.tags, 'ref_name')
if error:
swarming_task.error = error
else:
logging.info('A Swarming task was triggered:%s', task_id)
swarming_task.put()
# Call the hook function after the task is triggered.
self._OnTaskTriggered()
return task_id
| 9,726 | 3,043 |
#!/bin/python2
import sys
import socket
import getopt
import threading
import subprocess
# global variables
listen = False
command = False
upload = False
execute = ""
target = ""
upload_destination = ""
port = 0
def usage:
print "BHP Net Tool"
print
print "Usage: bhpnet.py -t target_host -p port"
print "-l --listen - listen on [host]:[port] \
for incoming connections"
print "-e --execute=file_to_run - execute the given file upon \
receiving a connection"
print "-c --command - initialize a command shell"
print "-u --upload=destination - upon receiving connection upload \
a file and write to [destination]"
print
print
print "Examples: "
print "bhpnet.py -t 192.168.0.1 -p 5555 -l -c"
print "bhpnet.py -t 192.168.0.1 -p 5555 -l -u=c://target.exe
print "bhpnet.py -t 192.168.0.1 -p 5555 -l -e=\"cat /etc/passwd\" "
print "echo 'ABCDEFGHI' | ./bhpnet.py -t 192.168.11.12 -p 135"
sys.exit(0)
def main():
global listen
global port
global execute
global command
global upload_destination
global target
if not len(sys.argv[1:]):
usage()
#read the commandline options
try:
opts, args = getopt.getopt(sys.argv[1:],"hle:t:p:c:u", \
["help","listen","execute","target","port","command","upload"])
except getopt.GetoptError as err:
print str(err)
usage()
for o,a in opts:
if o in ("-h", "--help"):
usage()
elif o in ("-l", "--listen"):
listen = True
elif o in ("-e", "--execute"):
execute = a
elif o in ("-c", "--commandshell"):
command = True
elif o in ("-u", "--upload"):
upload_destination = a
elif o in ("-t", "--target"):
target = a
elif o in ("-p", "--port"):
port = int(a)
else:
assert False, "Unhandled Option"
# are we going to listen or just send data from stdin?
if not listen and len(target) and port > 0:
# read buffer from commandline
# this will block, so send CTRL-D if not sending input
buffer = sys.stdin.read()
# send data off
client_sender(buffer)
#if listen, potentially upload things, etc
if listen:
server_loop()
def __init__:
main()
| 2,636 | 869 |
import enum
#a tracking class for our numpy data model
class DataModel(enum.Enum):
"""
An enum class that defines the numpy array data model for the step data used throughout the HotStepper library. This class represents the layout of the steps data by index.
**START** : The step key value index within the data structure.
**DIRECTION** : The step delta direction value index within the data structure.
**WEIGHT** : The step amount of strength value index within the data structure.
**VALUE** : The step cummulative value index within the data structure.
"""
START = 0
DIRECTION = 1
WEIGHT = 2
VALUE = 3 | 664 | 176 |
#!/usr/bin/env python
import os
import re
import optparse
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
import config, config_defaults
except ImportError:
config = None
TEMPLATES_DIR = 'new_templates'
HTDOCS_HARDCODED_PATH = '/home/desuchan/public_html/desuchan.net/htdocs/'
FUTABA_STYLE_DEBUG = 0
EXPRESSION_DEBUG = 0
EXPRESSION_TRANSLATOR_DEBUG = 0
LOOP_TAG_DEBUG = 0
VARIABLES_DEBUG = 0
TEMPLATE_RE = re.compile(r'^use constant ([A-Z_]+) => (.*?;)\s*\n\n', re.M | re.S)
TEMPLATE_SECTION_RE = re.compile(
r'('
r'q{((?:[^{}]|{[^{}]*})*)}|' # allow non-nested braces inside the q{}
r'include\("([a-z_/\.]*?)"\)|'
r'"(.*?)"|'
r'([A-Z][A-Z_]+)|'
r'sprintf\(S_ABBRTEXT,([\'"].*?[\'"])\)'
r')[\.;] *',
re.S | re.M)
COMPILE_TEMPLATE_RE = re.compile(
r'^compile_template ?\((.*?)\);$',
re.S)
# regex copied from wakautils.pl
TAG_RE = re.compile(
r'(.*?)(<(/?)(var|const|if|loop)(?:|\s+(.*?[^\\]))>|$)',
re.S)
# i should write a decent tokenizer/parser instead
PERL_EXP_RE = re.compile(
# 1-2 board option, path
r'\$board->option\((\'[A-Z_]+\'|"[A-Z_]+")\)|'
r'\$board->(path\(?\)?)|'
# 3 advanced include (ignore most of it)
r'encode_string\(\(compile_template\(include\(\$board->path\(\).\'/\'.'
'"([a-z/\.]+?)"\)\)\)->\(board=>\$board\)\)|'
# 4-5 function call (evaluate recursively)
r'([a-z_]+)\(|'
r'(\))|'
# 6-7 variables and constants
r'\$([A-Za-z0-9_{}]+)|'
r'([A-Z_]+)|'
# 8 sprintf without brackets
r'sprintf (.+)$|'
# 9 regex
r'([!=]~ /.+?/[i]*)|'
# 10-11 operators and comma
r'(\+|-|/|\*|<=|>=|<|>|==|eq|!=|ne|&&|and|\|\||or|!|\?|:|\.)|'
r'(,)|'
# 12-13 values (string/number), whitespace
r'(".*?"|\'.*?\'|[0-9]+)|'
r'(\s+)|'
# 14 single opening bracket (turns into void function)
r'(\()',
re.S | re.M)
# post and admin table columns
_POST_TABLE = ['num', 'parent', 'timestamp', 'lasthit', 'ip', 'date', 'name',
'trip', 'email', 'subject', 'password', 'comment', 'image', 'size', 'md5',
'width', 'height', 'thumbnail', 'tn_width', 'tn_height', 'lastedit',
'lastedit_ip', 'admin_post', 'stickied', 'locked']
_ADMIN_TABLE = ['username', 'num', 'type', 'comment', 'ival1', 'ival2',
'sval1', 'total', 'expiration', 'divider']
# oh god what is this
KNOWN_LOOPS = {
'stylesheets': ('stylesheet', ['filename', 'title', 'default']),
'inputs': ('input', ['name', 'value']),
'S_OEKPAINTERS': ('painters', ['painter', 'name']),
'threads': ('currentthread', ['posts', 'omit', 'omitimages']),
'posts': ('post', _POST_TABLE + ['abbrev', 'postnum']),
'pages': ('page', ['page', 'filename', 'current']),
'loop': ('post', _POST_TABLE),
'boards_select': ('board', ['board_entry']),
'reportedposts': ('rpost', ['reporter', 'offender', 'postnum', 'comment',
'date', 'resolved']),
'users': ('user', ['username', 'account', 'password', 'reign', 'disabled']),
'boards': ('board', ['board_entry', 'underpower']),
'staff': ('account', ['username']),
# this is actually three different loops
'entries': ('entry', ['num', 'username', 'action', 'info', 'date', 'ip',
'admin_id', 'timestamp', 'rowtype', 'disabled',
'account', 'expiration', 'id', 'host', 'task',
'boardname', 'post', 'timestamp', 'passfail']),
'edits': ('edit', ['username', 'date', 'info', 'num']),
'bans': ('ban', _ADMIN_TABLE + ['rowtype', 'expirehuman', 'browsingban']),
'hash': ('row', _ADMIN_TABLE),
'scanned': ('proxy', ['num', 'type', 'ip', 'timestamp',
'date', 'divider', 'rowtype']),
'errors': ('error', ['error']),
'items': ('post', _POST_TABLE + ['mime_type']),
'reports' : ('report', ['reporter', 'offender', 'postnum', 'comment',
'date', 'resolved', 'board_name'])
}
RENAME = {
'sprintf': 'format',
'OEKAKI_DEFAULT_PAINTER': 'board.options.OEKAKI_DEFAULT_PAINTER',
'ENV{SERVER_NAME}': "environ['SERVER_NAME']",
'ENV{HTTP_REFERER}': "environ['HTTP_REFERER']",
'self': "get_script_name()",
'escamp': 'escape',
'expand_filename': 'expand_url',
'expand_image_filename': 'expand_image_url',
'round_decimal': 'round',
'get_filename': 'basename',
'include/boards/announcements_global.html': 'announcements_global.html',
'include/announcements.html': 'announcements.html',
'../include/boards/rules.html': 'rules.html',
}
TOUCHUPS = {
# Fix savelogin checkbox in admin login.
'"savelogin"': '"savelogin" value="1"',
# wakaba.pl -> wakarimasen.py
'wakaba.pl': '{{ get_script_name() }}',
# Replace references to wakaba with wakarimasen.
'wakaba': 'wakarimasen',
# Extend post.comment expression with tag filter for abbreviated pages.
'{{ post.comment }}': '{% if omit %}'
'{{ post.comment|redirect_reply_links(min_res) }}'
'{% else %}{{ post.comment }}{% endif %}',
# Fix for abbreviated thread message.
'if thread and currentthread.omit': 'if thread and omit',
'For the other {{ currentthread.omit }}': 'For the other {{ omit }}',
}
REMOVE_BACKSLASHES_RE = re.compile(r'\\([^\\])')
def remove_backslashes(string):
return REMOVE_BACKSLASHES_RE.sub(r'\1', string)
def debug_item(name, value='', match=None, span=''):
span = match and match.span() or span
if value:
value = repr(value)[1:-1]
if len(value) > 50:
value = value[:50] + "[...]"
print ' %14s %-8s %s' % (span, name, value)
class FutabaStyleParser(object):
def __init__(self, filename="futaba_style.pl", only=None, dry_run=False):
self.only = only
self.dry_run = dry_run
self.lastend = 0
self.current = None
if not os.path.exists(TEMPLATES_DIR) and not self.dry_run:
os.mkdir(TEMPLATES_DIR)
self.tl = Jinja2Translator(self)
TEMPLATE_RE.sub(self.do_constant, open(filename).read())
def debug_item(self, *args, **kwds):
if not FUTABA_STYLE_DEBUG:
return
debug_item(*args, **kwds)
def do_constant(self, match):
name, template = match.groups()
if self.only and self.only != name:
return
if FUTABA_STYLE_DEBUG or LOOP_TAG_DEBUG or VARIABLES_DEBUG:
print name
# remove compile_template(...)
compile = COMPILE_TEMPLATE_RE.match(template)
if compile:
self.debug_item('compiled', '1')
template = compile.group(1) + ';'
# init variables for the self.do_section loop
self.lastend = 0
self.current = StringIO()
TEMPLATE_SECTION_RE.sub(self.do_section, template)
# after the self.do_section loop
current = self.current.getvalue()
current = self.parse_template_tags(current)
current = self.do_touchups(current)
if not self.dry_run:
file = open(template_filename(name), 'w')
file.write(current)
if len(template) != self.lastend:
self.debug_item("NOT MATCHED (end)", template[self.lastend:],
span=(self.lastend, len(template)))
def do_section(self, match):
if not match.group():
return
if match.start() > self.lastend:
span = (self.lastend, match.start())
self.debug_item("NOT MATCHED", match.string[span[0]:span[1]],
span=span)
names = ['html', 'include', 'string', 'const', 'abbrtext']
groups = list(match.groups())[1:]
for groupname, value in map(None, names, groups):
if value:
self.debug_item(groupname, value, match)
self.current.write(self.tl.handle_item(groupname, value))
self.lastend = match.end()
def parse_template_tags(self, template):
return TemplateTagsParser(self.tl).run(template)
def do_touchups(self, template):
for old, new in TOUCHUPS.items():
template = template.replace(old, new)
return template
class TemplateTagsParser(object):
def __init__(self, tl):
self.tl = tl
self.output = None
self.loops = []
def run(self, template):
self.output = StringIO()
for match in TAG_RE.finditer(template):
html, tag, closing, name, args = match.groups()
if html:
self.output.write(html)
if args:
args = remove_backslashes(args)
if tag:
if closing:
self.end_tag(name)
else:
self.start_tag(tag, name, args)
return self.output.getvalue()
def start_tag(self, tag, name, args):
template = self.tl.TAGS[name][0]
try:
args = self.tl.translate_expression(self.parse_expression(args),
name, self.loops)
except AdvInclude, e:
template = self.tl.TAGS['include']
args = self.tl.handle_include(e.value)
if name == 'loop':
if LOOP_TAG_DEBUG:
print "Enter loop", args
self.loops.append(args[1].split('.')[-1])
self.output.write(template % args)
def end_tag(self, name):
if name == 'loop':
loop = self.loops.pop()
if LOOP_TAG_DEBUG:
print "Exit loop", loop
self.output.write(self.tl.TAGS[name][1])
def parse_expression(self, exp):
lastend = 0
if EXPRESSION_DEBUG or EXPRESSION_TRANSLATOR_DEBUG:
print "Expression\t", exp
result = self.parse_subexpression(exp)[0]
if EXPRESSION_DEBUG:
print ' ', result
return result
def parse_subexpression(self, exp, tmp=None):
'''return value: tuple
[0] list of tokens
[1] the remaining
if tmp is set, results are appended to that list instead of returning
a new one (useful when parsing the remaining)
'''
lastend = 0
if tmp is None:
result = []
else:
result = tmp
for match in PERL_EXP_RE.finditer(exp):
if not match.group():
continue
if EXPRESSION_DEBUG and match.start() > lastend:
span = (lastend, match.start())
debug_item("unknown token", match.string[span[0]:span[1]],
span=span)
names = ['option', 'path', 'advinclude', 'function', 'funcend',
'var', 'const', 'sprintf', 'regex', 'operator', 'comma',
'value', 'whitespace', 'void']
groups = match.groups()
for groupname, value in map(None, names, groups):
if value:
break
retval = self.handle_token(groupname, value, match, result)
if retval is not None:
return retval
lastend = match.end()
if EXPRESSION_DEBUG and len(exp) != lastend:
debug_item("unknown token", exp[lastend:],
span=(lastend, len(exp)))
return (result, '')
def call_function(self, name, args, result):
function, remaining = self.parse_subexpression(args)
result.append(('function', (name, function)))
return self.parse_subexpression(remaining, result)
def handle_token(self, type, value, match, result):
if type == 'sprintf':
return self.call_function('sprintf', value + ')', result)
elif type == 'void':
type, value = 'function', 'void'
if type == 'function':
return self.call_function(value, match.string[match.end():],
result)
elif type == 'funcend':
remaining = match.string[match.end():]
return (result, remaining)
if type == 'option':
value = value.strip('\'"')
if type == 'regex':
if value.startswith("!"):
result.append(('operator', '!'))
value = value[2:].strip(' ')
if type != 'whitespace':
result.append((type, value))
class Jinja2Translator(object):
'''Just to keep jinja2-specific code separate'''
TAGS = {
'var': ('{{ %s }}', ''),
'const': ('{{ %s }}', ''),
'if': ('{%% if %s %%}', '{% endif %}'),
'loop': ('{%% for %s in %s %%}', '{% endfor %}'),
'include': "{%% include '%s' %%}",
'filter': '{%% filter %s %%}%s{%% endfilter %%}',
}
OPERATORS = {
'!': 'not',
'eq': '==',
'ne': '!=',
'||': 'or',
'&&': 'and',
'?': 'and', # h4x
':': 'or', # ^
'.': '+',
}
def __init__(self, parent):
# not sure if needed
self.parent = parent
self.loops = None
def handle_item(self, type, value):
if type == 'string':
return value.decode('string-escape')
elif type == 'html':
return value
elif type == 'include':
return self.TAGS['include'] % self.handle_include(value)
elif type == 'const':
return self.TAGS['include'] % (value.lower() + '.html')
elif type == 'abbrtext':
if value.startswith('"'):
value = remove_backslashes(value)
return self.TAGS['filter'] % ('reverse_format(strings.ABBRTEXT)',
value.strip('\'"'))
return value
def handle_include(self, value):
value = value.replace(HTDOCS_HARDCODED_PATH, '')
if value in RENAME:
value = RENAME[value]
return value
def translate_expression(self, exp, tagname, loops):
mode = None
if tagname == 'loop':
mode = 'loop'
self.loops = loops
result = self._translate_expression(exp, mode=mode)
if LOOP_TAG_DEBUG and loops:
print " > exp(%s) :: %s" % (', '.join(loops), result)
if EXPRESSION_TRANSLATOR_DEBUG:
print "->", repr(result)
return result
def _translate_expression(self, exp, mode=None):
parts = []
result = []
for type, value in exp:
if type == 'option':
value = 'board.options.%s' % value
elif type == 'path':
value = 'board.name'
elif type == 'advinclude':
raise AdvInclude(value)
elif type == 'function':
name, subexp = value
if name in RENAME:
name = RENAME[name]
parsed = self._translate_expression(subexp, mode='function')
if name == 'void':
value = '(%s)' % ', '.join(parsed)
elif len(parsed) > 1:
value = '(%s)|%s(%s)'\
% (parsed[0], name, ', '.join(parsed[1:]))
elif len(parsed) == 1 and ''.join(parsed):
value = '(%s)|%s' % (parsed[0], name)
else:
value = '%s()' % name
if VARIABLES_DEBUG and name != 'void':
print " filter", name
elif type == 'var':
if value in RENAME:
value = RENAME[value]
for loop in self.loops[::-1]:
if loop in KNOWN_LOOPS and value in KNOWN_LOOPS[loop][1]:
value = '%s.%s' % (KNOWN_LOOPS[loop][0], value)
if VARIABLES_DEBUG:
print " var", value
elif type == 'const':
if value in RENAME:
value = RENAME[value]
if value.startswith("S_"):
value = 'strings.%s' % value[2:]
elif config and hasattr(config, value):
value = 'config.%s' % value
if VARIABLES_DEBUG:
print " const", value
elif type == 'regex':
do_lower = value.endswith('i')
action = value.startswith('/^') and 'startswith' or 'count'
value = value.strip('/i^')
variable = result.pop()
if variable == 'not':
variable = result.pop()
result.append('not')
result.append('%s.%s("%s")' % (variable, action, value))
value = None
elif type == 'operator':
value = self.OPERATORS.get(value, value)
elif type == 'comma':
parts.append(result)
result = []
value = None
if value:
result.append(value)
if mode == 'function':
parts.append(result)
return [' '.join(x) for x in parts]
elif mode == 'loop':
itervarname = 'i'
if len(exp) == 1:
type, value = exp[0]
if type in ('var', 'const'):
if value in KNOWN_LOOPS:
itervarname = KNOWN_LOOPS[value][0]
elif value.lower().endswith('s'):
itervarname = value.lower().rstrip('s')
else:
itervarname = value.lower() + '_item'
return (itervarname, ' '.join(result))
else:
return ' '.join(result)
class AdvInclude(Exception):
'''This is not an exception but an exceptional condition
Advincludes are complete includes with template tags parsing
and everything, but inside a <var> tag, so the most sensible
way to handle them was to raise an exception'''
def __init__(self, value):
self.value = value
def template_filename(constname):
return os.path.join(TEMPLATES_DIR, '%s.html' % constname.lower())
def main():
parser = optparse.OptionParser()
parser.add_option("-f", "--filename", default="futaba_style.pl",
help="Location of the futaba_style.pl file")
parser.add_option("-o", "--only", default=None, metavar="CONST",
help="Parse only one constant in futaba_style.pl")
parser.add_option("-n", "--dry-run", action="store_true",
help="Don't write templates to disk")
group = optparse.OptionGroup(parser, "Debug channels")
group.add_option("--futaba-style-debug", action="store_true")
group.add_option("--expression-debug", action="store_true")
group.add_option("--translator-debug", action="store_true")
group.add_option("--loop-debug", action="store_true")
group.add_option("--variables-debug", action="store_true")
parser.add_option_group(group)
(options, args) = parser.parse_args()
# set debug channels. oh god h4x
global FUTABA_STYLE_DEBUG, EXPRESSION_DEBUG, EXPRESSION_TRANSLATOR_DEBUG
global LOOP_TAG_DEBUG, VARIABLES_DEBUG
FUTABA_STYLE_DEBUG = options.futaba_style_debug
EXPRESSION_DEBUG = options.expression_debug
EXPRESSION_TRANSLATOR_DEBUG = options.translator_debug
LOOP_TAG_DEBUG = options.loop_debug
VARIABLES_DEBUG = options.variables_debug
FutabaStyleParser(filename=options.filename,
only=options.only,
dry_run=options.dry_run)
if __name__ == '__main__':
main()
| 19,740 | 6,243 |
import logging
logging.basicConfig(level=logging.INFO)
import subprocess
import os
import shutil
import re
from extract.common import config_dict as config
logger = logging.getLogger(__name__)
#List of all the news websites stored in extract/config.yaml
news_sites_uids = [site for site in config()['news_sites']]
def main():
_extract()
_transform()
_load()
def _extract():
logger.info('Starting extract process')
for news_site_uid in news_sites_uids:
## Execute the extractions of all the news sites
#subprocess.run(['python', 'main.py', news_site_uid], cwd='./extract')
os.system(f'python ./extract/main.py {news_site_uid} {news_site_uid}')
## Move all the .csv file generated with the scraper to the 'tranform' directory
r = re.compile(r'.*\.(csv|json)')
extension = re.search(r'(\.csv|\.json)', str(os.listdir(path='./extract'))).group(1)
try:
source = list(filter(r.match, os.listdir(path='./extract')))[0]
#import pdb; pdb.set_trace()
if source.endswith('.json'):
shutil.move(f'./extract/{source}', f'./transform/{news_site_uid}_.json')
elif source.endswith('.csv'):
shutil.move(f'./extract/{source}', f'./transform/{news_site_uid}_.csv')
except:
logger.warning(f'There is not csv or json file asociated to {news_site_uid}')
def _transform():
logger.info('Starting transform process')
r = re.compile(r'.*\.(csv|json)')
# extension = list(filter(r.match, os.listdir(path='./extract')))[0][-3:]
extension = re.search(r'(\.csv|\.json)', str(os.listdir(path='./transform'))).group(1)[1:]
#import pdb;pdb.set_trace()
for news_site_uid in news_sites_uids:
dirty_data_filename = f'{news_site_uid}_.{extension}'
clean_data_filename = f'clean_{news_site_uid}.{extension}'
try:
## Execute main.py to clean the data and create clean data files
subprocess.run(['python', 'main.py', f'{news_site_uid}_.{extension}'], cwd='./transform')
## Remove the dirty data file
os.remove(f'./transform/{news_site_uid}_.{extension}')
## Move the clean data file into 'load' directory with the '{news_site_uid}.csv' name
shutil.move(f'./transform/clean_{news_site_uid}_.csv', f'./load/{news_site_uid}_.csv')
except:
logger.warning(f'There is not csv file asociated to {news_site_uid} in the "transform" directory')
def _load():
logger.info('Starting load process')
for news_site_uid in news_sites_uids:
clean_data_filename = f'{news_site_uid}.csv'
try:
## Execute the script 'main.py' to load the date into a SQLite Database
subprocess.run(['python', 'main.py', f'{news_site_uid}_.csv'], cwd='./load')
pass
## Remove the csv file
os.remove(f'./load/{news_site_uid}_.csv')
except:
logger.warning(f'There is not csv file asociated to {news_site_uid} in the "load" directory')
if __name__ == "__main__":
main()
| 3,253 | 1,087 |
from setuptools import setup
__version__ = None
with open('mendeley/version.py') as f:
exec(f.read())
setup(
name='mendeley',
version=__version__,
packages=['mendeley', 'mendeley.models', 'mendeley.resources'],
url='http://dev.mendeley.com',
license='Apache',
author='Mendeley',
author_email='api@mendeley.com',
description='Python SDK for the Mendeley API',
install_requires=[
'arrow==0.5.0',
'future==0.14.3',
'memoized-property==1.0.2',
'requests==2.5.1',
'requests-oauthlib==0.4.2',
'oauthlib==0.7.2'
],
tests_require=[
'pytest==2.6.4',
'vcrpy==1.2.0'
],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering",
"Topic :: Software Development :: Libraries :: Python Modules"
]
)
| 1,149 | 384 |
# Copyright [2021] [Red Hat, Inc.]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os
from tempfile import TemporaryFile, NamedTemporaryFile
from select import select
from abc import ABCMeta, abstractmethod
import tarfile
# from ansible_collections.kubernetes.core.plugins.module_utils.ansiblemodule import AnsibleModule
from ansible.module_utils._text import to_native
try:
from kubernetes.client.api import core_v1_api
from kubernetes.stream import stream
from kubernetes.stream.ws_client import (
STDOUT_CHANNEL,
STDERR_CHANNEL,
ERROR_CHANNEL,
ABNF,
)
except ImportError:
pass
try:
import yaml
except ImportError:
# ImportError are managed by the common module already.
pass
class K8SCopy(metaclass=ABCMeta):
def __init__(self, module, client):
self.client = client
self.module = module
self.api_instance = core_v1_api.CoreV1Api(client.client)
self.local_path = module.params.get("local_path")
self.name = module.params.get("pod")
self.namespace = module.params.get("namespace")
self.remote_path = module.params.get("remote_path")
self.content = module.params.get("content")
self.no_preserve = module.params.get("no_preserve")
self.container_arg = {}
if module.params.get("container"):
self.container_arg["container"] = module.params.get("container")
@abstractmethod
def run(self):
pass
class K8SCopyFromPod(K8SCopy):
"""
Copy files/directory from Pod into local filesystem
"""
def __init__(self, module, client):
super(K8SCopyFromPod, self).__init__(module, client)
self.is_remote_path_dir = None
self.files_to_copy = list()
def list_remote_files(self):
"""
This method will check if the remote path is a dir or file
if it is a directory the file list will be updated accordingly
"""
try:
find_cmd = ["find", self.remote_path, "-type", "f", "-name", "*"]
response = stream(
self.api_instance.connect_get_namespaced_pod_exec,
self.name,
self.namespace,
command=find_cmd,
stdout=True,
stderr=True,
stdin=False,
tty=False,
_preload_content=False,
**self.container_arg
)
except Exception as e:
self.module.fail_json(
msg="Failed to execute on pod {0}/{1} due to : {2}".format(
self.namespace, self.name, to_native(e)
)
)
stderr = []
while response.is_open():
response.update(timeout=1)
if response.peek_stdout():
self.files_to_copy.extend(
response.read_stdout().rstrip("\n").split("\n")
)
if response.peek_stderr():
err = response.read_stderr()
if "No such file or directory" in err:
self.module.fail_json(
msg="{0} does not exist in remote pod filesystem".format(
self.remote_path
)
)
stderr.append(err)
error = response.read_channel(ERROR_CHANNEL)
response.close()
error = yaml.safe_load(error)
if error["status"] != "Success":
self.module.fail_json(
msg="Failed to execute on Pod due to: {0}".format(error)
)
def read(self):
self.stdout = None
self.stderr = None
if self.response.is_open():
if not self.response.sock.connected:
self.response._connected = False
else:
ret, out, err = select((self.response.sock.sock,), (), (), 0)
if ret:
code, frame = self.response.sock.recv_data_frame(True)
if code == ABNF.OPCODE_CLOSE:
self.response._connected = False
elif (
code in (ABNF.OPCODE_BINARY, ABNF.OPCODE_TEXT)
and len(frame.data) > 1
):
channel = frame.data[0]
content = frame.data[1:]
if content:
if channel == STDOUT_CHANNEL:
self.stdout = content
elif channel == STDERR_CHANNEL:
self.stderr = content.decode("utf-8", "replace")
def copy(self):
is_remote_path_dir = (
len(self.files_to_copy) > 1 or self.files_to_copy[0] != self.remote_path
)
relpath_start = self.remote_path
if is_remote_path_dir and os.path.isdir(self.local_path):
relpath_start = os.path.dirname(self.remote_path)
for remote_file in self.files_to_copy:
dest_file = self.local_path
if is_remote_path_dir:
dest_file = os.path.join(
self.local_path, os.path.relpath(remote_file, start=relpath_start)
)
# create directory to copy file in
os.makedirs(os.path.dirname(dest_file), exist_ok=True)
pod_command = ["cat", remote_file]
self.response = stream(
self.api_instance.connect_get_namespaced_pod_exec,
self.name,
self.namespace,
command=pod_command,
stderr=True,
stdin=True,
stdout=True,
tty=False,
_preload_content=False,
**self.container_arg
)
errors = []
with open(dest_file, "wb") as fh:
while self.response._connected:
self.read()
if self.stdout:
fh.write(self.stdout)
if self.stderr:
errors.append(self.stderr)
if errors:
self.module.fail_json(
msg="Failed to copy file from Pod: {0}".format("".join(errors))
)
self.module.exit_json(
changed=True,
result="{0} successfully copied locally into {1}".format(
self.remote_path, self.local_path
),
)
def run(self):
self.list_remote_files()
if self.files_to_copy == []:
self.module.exit_json(
changed=False,
warning="No file found from directory '{0}' into remote Pod.".format(
self.remote_path
),
)
self.copy()
class K8SCopyToPod(K8SCopy):
"""
Copy files/directory from local filesystem into remote Pod
"""
def __init__(self, module, client):
super(K8SCopyToPod, self).__init__(module, client)
self.files_to_copy = list()
def run_from_pod(self, command):
response = stream(
self.api_instance.connect_get_namespaced_pod_exec,
self.name,
self.namespace,
command=command,
stderr=True,
stdin=False,
stdout=True,
tty=False,
_preload_content=False,
**self.container_arg
)
errors = []
while response.is_open():
response.update(timeout=1)
if response.peek_stderr():
errors.append(response.read_stderr())
response.close()
err = response.read_channel(ERROR_CHANNEL)
err = yaml.safe_load(err)
response.close()
if err["status"] != "Success":
self.module.fail_json(
msg="Failed to run {0} on Pod.".format(command), errors=errors
)
def is_remote_path_dir(self):
pod_command = ["test", "-d", self.remote_path]
response = stream(
self.api_instance.connect_get_namespaced_pod_exec,
self.name,
self.namespace,
command=pod_command,
stdout=True,
stderr=True,
stdin=False,
tty=False,
_preload_content=False,
**self.container_arg
)
while response.is_open():
response.update(timeout=1)
err = response.read_channel(ERROR_CHANNEL)
err = yaml.safe_load(err)
response.close()
if err["status"] == "Success":
return True
return False
def close_temp_file(self):
if self.named_temp_file:
self.named_temp_file.close()
def run(self):
# remove trailing slash from destination path
dest_file = self.remote_path.rstrip("/")
src_file = self.local_path
self.named_temp_file = None
if self.content:
self.named_temp_file = NamedTemporaryFile(mode="w")
self.named_temp_file.write(self.content)
self.named_temp_file.flush()
src_file = self.named_temp_file.name
else:
if not os.path.exists(self.local_path):
self.module.fail_json(
msg="{0} does not exist in local filesystem".format(self.local_path)
)
if not os.access(self.local_path, os.R_OK):
self.module.fail_json(msg="{0} not readable".format(self.local_path))
if self.is_remote_path_dir():
if self.content:
self.module.fail_json(
msg="When content is specified, remote path should not be an existing directory"
)
else:
dest_file = os.path.join(dest_file, os.path.basename(src_file))
if self.no_preserve:
tar_command = [
"tar",
"--no-same-permissions",
"--no-same-owner",
"-xmf",
"-",
]
else:
tar_command = ["tar", "-xmf", "-"]
if dest_file.startswith("/"):
tar_command.extend(["-C", "/"])
response = stream(
self.api_instance.connect_get_namespaced_pod_exec,
self.name,
self.namespace,
command=tar_command,
stderr=True,
stdin=True,
stdout=True,
tty=False,
_preload_content=False,
**self.container_arg
)
with TemporaryFile() as tar_buffer:
with tarfile.open(fileobj=tar_buffer, mode="w") as tar:
tar.add(src_file, dest_file)
tar_buffer.seek(0)
commands = []
# push command in chunk mode
size = 1024 * 1024
while True:
data = tar_buffer.read(size)
if not data:
break
commands.append(data)
stderr, stdout = [], []
while response.is_open():
if response.peek_stdout():
stdout.append(response.read_stdout().rstrip("\n"))
if response.peek_stderr():
stderr.append(response.read_stderr().rstrip("\n"))
if commands:
cmd = commands.pop(0)
response.write_stdin(cmd)
else:
break
response.close()
if stderr:
self.close_temp_file()
self.module.fail_json(
command=tar_command,
msg="Failed to copy local file/directory into Pod due to: {0}".format(
"".join(stderr)
),
)
self.close_temp_file()
if self.content:
self.module.exit_json(
changed=True,
result="Content successfully copied into {0} on remote Pod".format(
self.remote_path
),
)
self.module.exit_json(
changed=True,
result="{0} successfully copied into remote Pod into {1}".format(
self.local_path, self.remote_path
),
)
def check_pod(k8s_ansible_mixin, module):
resource = k8s_ansible_mixin.find_resource("Pod", None, True)
namespace = module.params.get("namespace")
name = module.params.get("pod")
container = module.params.get("container")
def _fail(exc):
arg = {}
if hasattr(exc, "body"):
msg = "Namespace={0} Kind=Pod Name={1}: Failed requested object: {2}".format(
namespace, name, exc.body
)
else:
msg = to_native(exc)
for attr in ["status", "reason"]:
if hasattr(exc, attr):
arg[attr] = getattr(exc, attr)
module.fail_json(msg=msg, **arg)
try:
result = resource.get(name=name, namespace=namespace)
containers = [
c["name"] for c in result.to_dict()["status"]["containerStatuses"]
]
if container and container not in containers:
module.fail_json(msg="Pod has no container {0}".format(container))
return containers
except Exception as exc:
_fail(exc)
| 14,002 | 3,864 |
import re
import uuid
import netaddr
import mongoengine as me
from mist.api.exceptions import RequiredParameterMissingError
from mist.api.clouds.models import Cloud
from mist.api.clouds.models import CLOUDS
from mist.api.networks.controllers import SubnetController
from mist.api.networks.controllers import NetworkController
# Automatically populated mappings of all Network and Subnet subclasses,
# keyed by their provider name.
NETWORKS, SUBNETS = {}, {}
def _populate_class_mapping(mapping, class_suffix, base_class):
"""Populates a dict that matches a provider name with its model class."""
for key, value in globals().items():
if key.endswith(class_suffix) and key != class_suffix:
if issubclass(value, base_class) and value is not base_class:
for provider, cls in CLOUDS.items():
if key.replace(class_suffix, '') in repr(cls):
mapping[provider] = value
class Network(me.Document):
"""The basic Network model.
This class is only meant to be used as a basic class for cloud-specific
`Network` subclasses.
`Network` contains all common, provider-independent fields and handlers.
"""
id = me.StringField(primary_key=True, default=lambda: uuid.uuid4().hex)
cloud = me.ReferenceField(Cloud, required=True)
network_id = me.StringField() # required=True)
name = me.StringField()
cidr = me.StringField()
description = me.StringField()
extra = me.DictField() # The `extra` dictionary returned by libcloud.
meta = {
'allow_inheritance': True,
'collection': 'networks',
'indexes': [
{
'fields': ['cloud', 'network_id'],
'sparse': False,
'unique': True,
'cls': False,
},
],
}
def __init__(self, *args, **kwargs):
super(Network, self).__init__(*args, **kwargs)
# Set `ctl` attribute.
self.ctl = NetworkController(self)
# Calculate and store network type specific fields.
self._network_specific_fields = [field for field in type(self)._fields
if field not in Network._fields]
@classmethod
def add(cls, cloud, cidr=None, name='', description='', id='', **kwargs):
"""Add a Network.
This is a class method, meaning that it is meant to be called on the
class itself and not on an instance of the class.
You're not meant to be calling this directly, but on a network subclass
instead like this:
network = AmazonNetwork.add(cloud=cloud, name='Ec2Network')
:param cloud: the Cloud on which the network is going to be created.
:param cidr:
:param name: the name to be assigned to the new network.
:param description: an optional description.
:param id: a custom object id, passed in case of a migration.
:param kwargs: the kwargs to be passed to the corresponding controller.
"""
assert isinstance(cloud, Cloud)
network = cls(cloud=cloud, cidr=cidr, name=name,
description=description)
if id:
network.id = id
network.ctl.create(**kwargs)
return network
def clean(self):
"""Checks the CIDR to determine if it maps to a valid IPv4 network."""
if self.cidr:
try:
netaddr.cidr_to_glob(self.cidr)
except (TypeError, netaddr.AddrFormatError) as err:
raise me.ValidationError(err)
def as_dict(self):
"""Returns the API representation of the `Network` object."""
net_dict = {
'id': self.id,
'cloud': self.cloud.id,
'network_id': self.network_id,
'name': self.name,
'cidr': self.cidr,
'description': self.description,
'extra': self.extra,
}
net_dict.update(
{key: getattr(self, key) for key in self._network_specific_fields}
)
return net_dict
def __str__(self):
return '%s "%s" (%s)' % (self.__class__.__name__, self.name, self.id)
class AmazonNetwork(Network):
instance_tenancy = me.StringField(default='default', choices=('default',
'private'))
def clean(self):
"""Extended validation for EC2 Networks to ensure CIDR assignment."""
if not self.cidr:
raise me.ValidationError('Missing IPv4 range in CIDR notation')
super(AmazonNetwork, self).clean()
class GoogleNetwork(Network):
mode = me.StringField(default='legacy', choices=('legacy', 'auto',
'custom'))
def clean(self):
"""Custom validation for GCE Networks.
GCE enforces:
- Regex constrains on network names.
- CIDR assignment only if `legacy` mode has been selected.
"""
if self.mode == 'legacy':
super(GoogleNetwork, self).clean()
elif self.cidr is not None:
raise me.ValidationError('CIDR cannot be set for modes other than '
'"legacy" - Current mode: %s' % self.mode)
if not re.match('^(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?)$', self.name):
raise me.ValidationError('A **lowercase** name must be specified')
class OpenStackNetwork(Network):
shared = me.BooleanField(default=False)
admin_state_up = me.BooleanField(default=True)
router_external = me.BooleanField(default=False)
class Subnet(me.Document):
"""The basic Subnet model.
This class is only meant to be used as a basic class for cloud-specific
`Subnet` subclasses.
`Subnet` contains all common, provider-independent fields and handlers.
"""
id = me.StringField(primary_key=True, default=lambda: uuid.uuid4().hex)
network = me.ReferenceField('Network', required=True,
reverse_delete_rule=me.CASCADE)
subnet_id = me.StringField()
name = me.StringField()
cidr = me.StringField(required=True)
description = me.StringField()
extra = me.DictField() # The `extra` dictionary returned by libcloud.
meta = {
'allow_inheritance': True,
'collection': 'subnets',
'indexes': [
{
'fields': ['network', 'subnet_id'],
'sparse': False,
'unique': True,
'cls': False,
},
],
}
def __init__(self, *args, **kwargs):
super(Subnet, self).__init__(*args, **kwargs)
# Set `ctl` attribute.
self.ctl = SubnetController(self)
# Calculate and store subnet type specific fields.
self._subnet_specific_fields = [field for field in type(self)._fields
if field not in Subnet._fields]
@classmethod
def add(cls, network, cidr, name='', description='', id='', **kwargs):
"""Add a Subnet.
This is a class method, meaning that it is meant to be called on the
class itself and not on an instance of the class.
You're not meant to be calling this directly, but on a network subclass
instead like this:
subnet = AmazonSubnet.add(network=network,
name='Ec2Subnet',
cidr='172.31.10.0/24')
:param network: the Network nn which the subnet is going to be created.
:param cidr: the CIDR to be assigned to the new subnet.
:param name: the name to be assigned to the new subnet.
:param description: an optional description.
:param id: a custom object id, passed in case of a migration.
:param kwargs: the kwargs to be passed to the corresponding controller.
"""
assert isinstance(network, Network)
if not cidr:
raise RequiredParameterMissingError('cidr')
subnet = cls(network=network, cidr=cidr, name=name,
description=description)
if id:
subnet.id = id
subnet.ctl.create(**kwargs)
return subnet
def clean(self):
"""Checks the CIDR to determine if it maps to a valid IPv4 network."""
try:
netaddr.cidr_to_glob(self.cidr)
except (TypeError, netaddr.AddrFormatError) as err:
raise me.ValidationError(err)
def as_dict(self):
"""Returns the API representation of the `Subnet` object."""
subnet_dict = {
'id': self.id,
'cloud': self.network.cloud.id,
'network': self.network.id,
'subnet_id': self.subnet_id,
'name': self.name,
'cidr': self.cidr,
'description': self.description,
'extra': self.extra,
}
subnet_dict.update(
{key: getattr(self, key) for key in self._subnet_specific_fields}
)
return subnet_dict
def __str__(self):
return '%s "%s" (%s)' % (self.__class__.__name__, self.name, self.id)
class AmazonSubnet(Subnet):
availability_zone = me.StringField(required=True)
class GoogleSubnet(Subnet):
region = me.StringField(required=True)
def clean(self):
"""Extended validation for GCE Subnets."""
if not re.match('^(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?)$', self.name):
raise me.ValidationError('A **lowercase** name must be specified')
super(GoogleSubnet, self).clean()
class OpenStackSubnet(Subnet):
gateway_ip = me.StringField()
ip_version = me.IntField(default=4)
enable_dhcp = me.BooleanField(default=True)
dns_nameservers = me.ListField(default=lambda: [])
allocation_pools = me.ListField(default=lambda: [])
_populate_class_mapping(NETWORKS, 'Network', Network)
_populate_class_mapping(SUBNETS, 'Subnet', Subnet)
| 10,002 | 2,871 |
# Copyright (C) 2021 Satoshi Konno. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from uecho import Property
from uecho.std import Database
def test_manufacture_database():
db = Database()
expecteds = [
["Panasonic", 0x00000B],
["Panasonic", bytes([0x00, 0x00, 0x0B])],
["Panasonic", bytearray([0x00, 0x00, 0x0B])],
["Sharp", 0x000005],
["Sharp", bytes([0x00, 0x00, 0x05])],
["Sharp", bytearray([0x00, 0x00, 0x05])],
]
for expected in expecteds:
man = db.get_manufacturer(expected[1])
assert man
assert man.name.startswith(expected[0])
def test_object_database():
db = Database()
obj = db.get_object((0x00, 0x01))
assert (obj)
expecteds = [
[0x80, Property.REQUIRED, Property.OPTIONAL, Property.REQUIRED],
[0xB0, Property.OPTIONAL, Property.OPTIONAL, Property.PROHIBITED],
[0xB1, Property.REQUIRED, Property.PROHIBITED, Property.REQUIRED],
[0xBF, Property.PROHIBITED, Property.OPTIONAL, Property.PROHIBITED],
]
assert obj.get_property(0x00) is None
for expected in expecteds:
prop = obj.get_property(expected[0])
assert prop
assert prop.get_attribute(Property.GET) == expected[1]
assert prop.get_attribute(Property.SET) == expected[2]
assert prop.get_attribute(Property.ANNO) == expected[3]
def test_mra_object_database():
db = Database()
obj = db.get_object((0x02, 0x91))
assert (obj)
expecteds = [
[0x80, Property.REQUIRED, Property.REQUIRED, Property.REQUIRED],
[0xB0, Property.OPTIONAL, Property.OPTIONAL, Property.OPTIONAL],
]
assert obj.get_property(0x00) is None
for expected in expecteds:
prop = obj.get_property(expected[0])
assert prop
assert prop.get_attribute(Property.GET) == expected[1]
assert prop.get_attribute(Property.SET) == expected[2]
assert prop.get_attribute(Property.ANNO) == expected[3]
| 2,518 | 882 |
# Copyright (C) 2018 Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause
"""Module description here"""
# Import built in modules
# Import 3rd party modules
# Import local modules
# Module authorship metadata
__author__ = "Erik W Berg"
__copyright__ = "Copyright 2018, Intel Corporation"
__credits__ = [""]
__license__ = "BSD-3-Clause"
__version__ = "1.0"
__maintainer__ = "Erik W Berg"
__email__ = ""
__status__ = "Production" # Prototype, Development, Production
# Code starts here
class NodeMetricsData(object):
pass
| 541 | 194 |
import torch
from mlagents.trainers.torch.layers import (
Swish,
linear_layer,
lstm_layer,
Initialization,
)
def test_swish():
layer = Swish()
input_tensor = torch.Tensor([[1, 2, 3], [4, 5, 6]])
target_tensor = torch.mul(input_tensor, torch.sigmoid(input_tensor))
assert torch.all(torch.eq(layer(input_tensor), target_tensor))
def test_initialization_layer():
torch.manual_seed(0)
# Test Zero
layer = linear_layer(
3, 4, kernel_init=Initialization.Zero, bias_init=Initialization.Zero
)
assert torch.all(torch.eq(layer.weight.data, torch.zeros_like(layer.weight.data)))
assert torch.all(torch.eq(layer.bias.data, torch.zeros_like(layer.bias.data)))
def test_lstm_layer():
torch.manual_seed(0)
# Test zero for LSTM
layer = lstm_layer(
4, 4, kernel_init=Initialization.Zero, bias_init=Initialization.Zero
)
for name, param in layer.named_parameters():
if "weight" in name:
assert torch.all(torch.eq(param.data, torch.zeros_like(param.data)))
elif "bias" in name:
assert torch.all(
torch.eq(param.data[4:8], torch.ones_like(param.data[4:8]))
)
| 1,207 | 440 |
from jsinclude.templatetags.pkg.ArgumentValue import ArgumentValue
class TestArgumentValue:
def test_true_number_arg(self):
arg = ArgumentValue(1234)
assert arg == 1234
def test_string_number_arg(self):
arg = ArgumentValue('1234')
assert arg == '1234'
def test_string_arg(self):
arg = ArgumentValue('abc123')
assert arg == "'abc123'"
def test_boolean_arg(self):
arg = ArgumentValue('true')
assert arg == 'true'
arg = ArgumentValue('false')
assert arg == 'false'
| 563 | 180 |
import discord
from discord.ext import commands
import os
import random
import cv2
import numpy as np
import re
import pandas as pd
import codecs as cd
token = "Njg1MTgxMTU3NTc0MzExOTc0.XmE6rg.5Gyfl0WZSsVa8UEw14qmLinQpyg"
prefix = '$'
client = discord.Client()
'''
df0 = pd.read_csv('0.csv')
df0 = pd.read_csv('0.csv')
df0 = pd.read_csv('0.csv')
df0 = pd.read_csv('0.csv')
df0 = pd.read_csv('0.csv')
df0 = pd.read_csv('0.csv')
'''
helpmessage = ( '```\n'
'0.ヘルプ:\n'
' $help コマンド一覧を表示\n'
'1.モミール:\n'
' $dm{N} 無作為に選ばれたコスト{N}のクリーチャー(NEO, サイキック, 禁断を含む)を表示\n'
' $gr{N} 無作為に選ばれたGRクリーチャーを{N}枚表示({N}は省略可)\n'
' $st{N} 無作為に選ばれた「S・トリガー」を持つ呪文を{N}枚表示({N}は省略可)\n'
' $rule デュエマモミールのルール(暫定)を表示\n'
'2.TRPG:\n'
' ${N}d{M} {M}面ダイスを{N}回振る\n'
' $fumble ファンブル表を振る\n'
' $hencho 変調表を振る\n'
' $kanjo 感情表を振る\n'
' $scene シーン表を振る\n'
' $senjo 戦場表を振る\n'
'```\n'
)
def dice(dice_size):
num = np.random.randint(1, int(dice_size))
return num
def simple_dice(dice_size, dice_num):
dice_val = np.array([], dtype=np.int64)
for i in range(dice_num):
dice_val = np.append(dice_val, dice(dice_size))
return dice_val
def imread(filename, flags=cv2.IMREAD_COLOR, dtype=np.uint8):
try:
n = np.fromfile(filename, dtype)
img = cv2.imdecode(n, flags)
return img
except Exception as e:
print(e)
return None
def hconcat_resize_min(im_list, interpolation=cv2.INTER_CUBIC):
h_min = min(im.shape[0] for im in im_list)
im_list_resize = [cv2.resize(im, (int(im.shape[1] * h_min / im.shape[0]), h_min), interpolation=interpolation)
for im in im_list]
return cv2.hconcat(im_list_resize)
def dmomir(arg):
"""無作為に選ばれたコスト<arg>のクリーチャー(NEO, サイキック, 禁断を含む)を表示"""
# with cd.open(str(arg)+'.csv', "r", "Shift-JIS", "ignore") as csv_file:
# df = pd.read_table(csv_file, delimiter=",",names=["name","type","img","civ","pow","cost","race","abl"])
df = pd.read_csv(str(arg)+'.csv', encoding='utf_8_sig')
s = df.sample()
print(s)
name = str(s['name'].values[0])
typ = str(s['type'].values[0])
img = str(s['img'].values[0])
civ = str(s['civ'].values[0])
cost = str(s['cost'].values[0])
power = str(s['pow'].values[0])
race = str(s['race'].values[0])
abl = s['abl'].values[0]
info = '{0} [{1}] ({2}) {3}\n{4} -- {5}\n```{6}```'.format(name,civ,cost,typ,race,power,abl)
return info,img
def trigger():
"""無作為に選ばれた「S・トリガー」を持つ呪文を<arg>枚表示"""
df = pd.read_csv('st.csv', encoding='utf_8_sig')
s = df.sample()
print(s)
name = str(s['name'].values[0])
img = str(s['img'].values[0])
civ = str(s['civ'].values[0])
cost = str(s['cost'].values[0])
race = str(s['race'].values[0])
abl = s['abl'].values[0]
info = '{0} [{1}] ({2})\n```{3}```'.format(name,civ,cost,abl)
return info,img
def gr():
"""無作為に選ばれたGRクリーチャーを<arg>枚表示"""
df = pd.read_csv('gr.csv', encoding='utf_8_sig')
s = df.sample()
print(s)
name = str(s['name'].values[0])
typ = str(s['type'].values[0])
img = str(s['img'].values[0])
civ = str(s['civ'].values[0])
cost = str(s['cost'].values[0])
power = str(s['pow'].values[0])
race = str(s['race'].values[0])
abl = s['abl'].values[0]
info = '{0} [{1}] ({2}) {3}\n{4} -- {5}\n```{6}```'.format(name,civ,cost,typ,race,power,abl)
return info,img
def rule():
"""デュエマモミールのルール(暫定)を表示"""
return ("```"
"■プレイヤーは自分のメインステップ中に一度,カードを1枚捨て,マナゾーンのカードを好きな数タップしてもよい。\n"
" そうしたら,コストがそれと同じ数の進化でないクリーチャーを無作為に選び,コストを支払ったものとして召喚する。\n"
" このようにしてバトルゾーンに出たサイキック・クリーチャーを裏返すことはできない。\n"
"■プレイヤーがGR召喚をする時,かわりにすべてのGRクリーチャーから無作為に選び,召喚する(GR召喚として扱う)。\n"
"■バトルゾーンのクリーチャーがゲーム中にバトルゾーン以外のゾーンに行った場合,消滅する。これはルール上の処理として行う。\n"
"■手札と山札とマナゾーンと墓地とシールドゾーンにあるカードのコストと効果とカードタイプと名前は無視される(コストを参照する場合は0とする)。\n"
"■ゲーム開始時,山札の上から5枚をシールドとして置く時,かわりに3枚(←要調整)をシールドとして置く。\n"
" ただし、シールドゾーンにあるカードを手札に加える時、かわりに無作為に選ばれたS・トリガーを持つ呪文として扱ってもよい。\n"
"```"
)
def rush():
"""ランダムにカードを抽選"""
path = './rush'
dirs = os.listdir(path)
fl = random.choice(dirs)
image = '{0}/{1}'.format(path, fl)
return image
def kanjo():
"""感情表を振る"""
i = random.randrange(1,7,1)
num_to_kanjo = {1:"1: 共感/不信", 2:"2: 友情/怒り", 3:"3: 愛情/妬み", 4:"4: 忠誠/侮蔑", 5:"5: 憧憬/劣等感", 6:"6: 狂信/殺意"}
val = num_to_kanjo[i]
return val
def senjo():
"""戦場表を振る(括弧内は効果)"""
i = random.randrange(1,7,1)
num_to_senjo = {1:"1: 平地\n特になし", 2:"2: 水中\n回避判定-2", 3:"3: 高所\nファンブル時接近戦ダメージ1点",
4:"4: 悪天候\n攻撃忍法の間合+1", 5:"5: 雑踏\nファンブル値+1",
6:"6: 極地\nラウンド終了時GMは1D6を振る。戦闘開始時からの経過ラウンド以下の目が出たとき接近戦ダメージを1点受ける。この戦場から脱落したときランダムに変調を受ける)"}
val = num_to_senjo[i]
return val
def hencho():
"""変調表を振る"""
i = random.randrange(1,7,1)
num_to_hencho ={1:"1: 故障\n忍具が使用不能になる(累積しない)\n各サイクル終了時《絡繰術》で判定し成功で解除",
2:"2: マヒ\n修得している特技一つをランダムに選び,使用不能にする(特技の数だけ累積)\n各サイクル終了時《身体操術》で判定し成功ですべて解除",
3:"3: 重傷\n命中・情報・感情判定を行うたび接近戦ダメージ1点(累積しない)\n各サイクル終了時《生存術》で判定し成功で解除",
4:"4: 行方不明\nメインフェイズ中自分以外のシーンに登場不可(累積しない)\n各サイクル終了時《経済力》で判定し成功で解除",
5:"5: 忘却\n獲得している【感情】一つをランダムに選び,持っていないものとして扱う(【感情】の数だけ累積)\n各サイクル終了時《記憶術》で判定し成功ですべて解除",
6:"6: 呪い\n修得している忍法一つをランダムに選び,修得していないものとして扱う(忍法の数だけ累積)\n各サイクル終了時《呪術》で判定し成功ですべて解除"}
val = num_to_hencho[i]
return val
def fumble():
"""ファンブル表を振る"""
i = random.randrange(1,7,1)
num_to_kanjo = {1:"1: 何か調子がおかしい。そのサイクルの間、すべての行為判定にマイナス1の修正がつく。",
2:"2: しまった! 好きな忍具を1つ失ってしまう。",
3:"3: 情報が漏れる! このゲームであなたが獲得した【秘密】は、他のキャラクター全員の知るところとなる。",
4:"4: 油断した! 術の制御に失敗し、好きな【生命力】を1点失う。",
5:"5: 敵の陰謀か? 罠にかかり、ランダムに選んだ変調1つを受ける。変調は、変調表で決定すること。",
6:"6: ふう。危ないところだった。特に何も起こらない。"}
val = num_to_kanjo[i]
return val
def scene():
"""シーン表を振る"""
x = np.sum(simple_dice(6,2))
if x == 2:
val = "2: 血の匂いがあたりに充満している。何者かの戦いがあった気配。いや?まだ戦いは続いているのだろうか?"
elif x == 3:
val = "3: これは……夢か? もう終わったはずの過去。しかし、それを忘れることはできない。"
elif x == 4:
val = "4: 眼下に広がる街並みを眺める。ここからなら街を一望できるが……。"
elif x == 5:
val = "5: 世界の終わりのような暗黒。暗闇の中、お前達は密やかに囁く。"
elif x == 6:
val = "6: 優しい時間が過ぎていく。影の世界のことを忘れてしまいそうだ。"
elif x == 7:
val = "7: 清廉な気配が漂う森の中。鳥の囀りや、そよ風が樹々を通り過ぎる音が聞こえる。"
elif x == 8:
val = "8: 凄まじい人混み。喧噪。影の世界のことを知らない無邪気な人々の手柄話や無駄話が騒がしい。"
elif x == 9:
val = "9: 強い雨が降り出す。人々は、軒を求めて、大慌てて駆けだしていく。"
elif x == 10:
val = "10: 大きな風が吹き荒ぶ。髪の毛や衣服が大きく揺れる。何かが起こりそうな予感……"
elif x == 11:
val = "11: 酔っぱらいの怒号。客引きたちの呼び声。女たちの嬌声。いつもの繁華街の一幕だが。"
elif x == 12:
val = "12: 太陽の微笑みがあなたを包み込む。影の世界の住人には、あまりにまぶしすぎる。"
return val
@client.event
async def on_ready():
print('Logged in')
print('-----')
@client.event
async def on_message(message):
# 開始ワード
if message.content.startswith(prefix):
# 送り主がBotではないか
if client.user != message.author:
msg = message.content.lstrip(prefix)
#dmomir
if msg.startswith('dm'):
info = msg.lstrip('dm ')
if info.isdecimal():
data = dmomir(info)
await message.channel.send(message.author.mention + '\n' + re.sub(r'\\n','\n',data[0]), file=discord.File(data[1]))
elif msg.startswith('st'):
info = msg.lstrip('st ')
if info == '':
data = trigger()
await message.channel.send(message.author.mention + '\n' + re.sub(r'\\n','\n',data[0]), file=discord.File(data[1]))
elif info.isdecimal():
data = trigger()
s = data[0]
im1 = imread(data[1])
for i in range(int(info)-1):
data = trigger()
s = s + data[0]
im2 = imread(data[1])
im1 = hconcat_resize_min([im1, im2])
cv2.imwrite('data/triggers.jpg', im1)
await message.channel.send(message.author.mention + '\n' + re.sub(r'\\n','\n',s), file=discord.File('data/triggers.jpg'))
elif msg.startswith('gr'):
info = msg.lstrip('gr ')
if info == '':
data = gr()
await message.channel.send(message.author.mention + '\n' + re.sub(r'\\n','\n',data[0]), file=discord.File(data[1]))
elif info.isdecimal():
data = gr()
s = data[0]
im1 = imread(data[1])
for i in range(int(info)-1):
data = gr()
s = s + data[0]
im2 = imread(data[1])
im1 = hconcat_resize_min([im1, im2])
cv2.imwrite('data/grs.jpg', im1)
await message.channel.send(message.author.mention + '\n' + re.sub(r'\\n','\n',s), file=discord.File('data/grs.jpg'))
elif msg.startswith('rule'):
await message.channel.send(rule())
elif msg.startswith('rush'):
info = msg.lstrip('rush ')
if info == '':
await message.channel.send(message.author.mention, file=discord.File(rush()))
elif info.isdecimal():
im1 = cv2.imread(rush())
for i in range(int(info)-1):
im2 = cv2.imread(rush())
im1 = hconcat_resize_min([im1, im2])
cv2.imwrite('data/rushs.jpg', im1)
await message.channel.send(message.author.mention, file=discord.File('data/rushs.jpg'))
#trpg
elif msg.startswith('kanjo'):
await message.channel.send(message.author.mention + ' ' + kanjo())
elif msg.startswith('senjo'):
await message.channel.send(message.author.mention + ' ' + senjo())
elif msg.startswith('hencho'):
await message.channel.send(message.author.mention + ' ' + hencho())
elif msg.startswith('fumble'):
await message.channel.send(message.author.mention + ' ' + fumble())
elif msg.startswith('scene'):
await message.channel.send(scene())
#help
elif msg.startswith('help'):
await message.channel.send(helpmessage)
#dice
else:
info = re.split('\D+', message.content)
print(info)
if info:
if info[1].isdecimal() and info[2].isdecimal():
dice_num = int(info[1])
dice_size = int(info[2])
val = simple_dice(dice_size, dice_num)
await message.channel.send(message.author.mention + ' ' + str(dice_num) + 'd' + str(dice_size) + ': ' + str(val) + ' = ' + '**[' + str(np.sum(val)) + ']**')
client.run(token)
| 11,562 | 5,832 |
from session import dra
from info import self_id
special_ids = [
100, 1000, 10000, 100000, 1000000
]
for i in special_ids.copy():
for j in range(1, 10):
special_ids.append(i*j)
special_ids.extend([114514, 1919, 810, 1919810])
done = ['好了', '可以']
thanks = ['\u8C22', '\u5C04']
extension = {
'image': {
'good': '.webp',
'bad': ['.jpg', '.bmp']
},
# audio
'video': {
'good': '.mp4',
'bad': ['.avi', '.rm']
},
}
def process_id(chat_id, message_id):
if message_id in special_ids:
dra.send_message(chat_id, f'祝贺本群第**{message_id}**条消息达成! 🎉', parse_mode='Markdown')
if dra.get_chat_member(chat_id, self_id).can_pin_messages:
dra.pin_chat_message(chat_id, message_id, disable_notification=True)
return True
def process_keyword(message):
text = message.text
if message.caption and not text:
text = message.caption
for word in done:
if f'我{word}' in text:
return message.reply(f'我也{word}')
for word in thanks:
if f'已经{word}了' in text or f'我{word}了' in text:
return message.reply(f'我也{word}了')
return None
def process_msg(client, message):
text = message.text
chat_id = message.chat.id
message_id = message.message_id
process_id(chat_id, message_id)
if message.caption and not text:
text = message.caption
if text:
return process_keyword(message)
else:
return None
| 1,488 | 577 |
#!/usr/bin/env python
# coding: utf-8
import xs
import numpy as np
from scipy.integrate import odeint
from math import isclose
from constants import *
class Global_model:
def __init__(self, p, input_power, duty, period, time_resolution=1e-8):
self.p = p
self.input_power = input_power*6.241509e18 # [J/s] to [eV/s]
self.duty = duty
self.period = period
self.time_resolution = time_resolution
self.ng = (p/7.5)/(Tg*kB)*1e-6 #[cm^-3]
lambda_i = 1/(self.ng*sigma_i) #[cm] ion-neutral mean free path
hl = 0.86*(3+l/2/lambda_i)**-0.5
hR = 0.8*(4+ro/lambda_i)**-0.5
self.Aeff = 2*np.pi*ro*(l*hR+ro*hl) #[cm^2] effective area
self.deff = V/self.Aeff #[cm]
print('Condition : {}mTorr, {}W, {}ms, {}'.format(self.p, self.input_power/6.241509e18, self.period*1000, self.duty))
def balance_equations(self, calculation_array, t, power):
Te, nH, nH_2s, nH2_v1, nH2_v2, nH2_v3, nH2_v4, nH2_v5, nH2_v6, nH2_v7, nH2_v8, nH2_v9, nHp, nH2p, nH3p, nHm = calculation_array
uB = np.sqrt(e*Te/M)*100 #[cm/s]
uB2 = np.sqrt(e*Te/2/M)*100
uB3 = np.sqrt(e*Te/3/M)*100
#Vs = -Te*np.log(4/ne/np.sqrt(8*e*Te/np.pi/m)*(nHp*uB+nH2p*uB2+nH3p*uB3))
Vs = Te*np.log(np.sqrt(M/(2*np.pi*m)))
t0 = V/self.Aeff*np.sqrt(M/(e*Te))/100 #[s] Characteristic transit time of H+ ion
#k8,k9,k11의 Te가 매우 작을때의 Cross section값을 구해야한다. (k2는 괜찮음)
##### Rate coefficient calculation #####
k1_0 = xs.rate_constant_with_analytic_xs(Te, 'reaction1_0')
k1_1 = xs.rate_constant_with_analytic_xs(Te, 'reaction1_1')
k1_2 = xs.rate_constant_with_analytic_xs(Te, 'reaction1_2')
k1_3 = xs.rate_constant_with_analytic_xs(Te, 'reaction1_3')
k1_4 = xs.rate_constant_with_analytic_xs(Te, 'reaction1_4')
k2 = np.exp(-2.858072836568e+01+1.038543976082e+01*np.log(Te)-5.383825026583e+00*(np.log(Te))**2+1.950636494405e+00*(np.log(Te))**3-5.393666392407e-01*(np.log(Te))**4+1.006916814453e-01*(np.log(Te))**5-1.160758573972e-02*(np.log(Te))**6+7.411623859122e-04*(np.log(Te))**7-2.001369618807e-05*(np.log(Te))**8)
k3_1 = xs.rate_constant_with_point_xs(Te, 'reaction3_1')
k3_2 = xs.rate_constant_with_point_xs(Te, 'reaction3_2')
k3_3 = xs.rate_constant_with_point_xs(Te, 'reaction3_3')
k3_4 = xs.rate_constant_with_point_xs(Te, 'reaction3_4')
k3_5 = xs.rate_constant_with_point_xs(Te, 'reaction3_5')
k3_6 = xs.rate_constant_with_point_xs(Te, 'reaction3_6')
k3_1_inv =
k3_2_inv =
k3_3_inv =
k3_4_inv =
k3_5_inv =
k3_6_inv =
k4_0 =
k4_1 =
k4_2 =
k4_3 =
k4_4 =
k4_5 =
k4_6 =
k4_7 =
k4_8 =
k4_0_inv =
k4_1_inv =
k4_2_inv =
k4_3_inv =
k4_4_inv =
k4_5_inv =
k4_6_inv =
k4_7_inv =
k4_8_inv =
k5_0 = xs.rate_constant_with_analytic_xs(Te, 'reaction5_0')
k5_1 = xs.rate_constant_with_analytic_xs(Te, 'reaction5_1')
k5_2 = xs.rate_constant_with_analytic_xs(Te, 'reaction5_2')
k5_3 = xs.rate_constant_with_analytic_xs(Te, 'reaction5_3')
k5_4 = xs.rate_constant_with_analytic_xs(Te, 'reaction5_4')
k5_5 = xs.rate_constant_with_analytic_xs(Te, 'reaction5_5')
k5_6 = xs.rate_constant_with_analytic_xs(Te, 'reaction5_6')
k5_7 = xs.rate_constant_with_analytic_xs(Te, 'reaction5_7')
k5_8 = xs.rate_constant_with_analytic_xs(Te, 'reaction5_8')
k5_9 = xs.rate_constant_with_analytic_xs(Te, 'reaction5_9')
k6_0 = xs.rate_constant_with_point_xs(Te, 'reaction6_0')
k6_1 = xs.rate_constant_with_point_xs(Te, 'reaction6_1')
k6_2 = xs.rate_constant_with_point_xs(Te, 'reaction6_2')
k6_3 = xs.rate_constant_with_point_xs(Te, 'reaction6_2') #xs 데이터 보완 必
k6_4 = xs.rate_constant_with_point_xs(Te, 'reaction6_5') #xs 데이터 보완 必
k6_5 = xs.rate_constant_with_point_xs(Te, 'reaction6_5')
k6_6 = xs.rate_constant_with_point_xs(Te, 'reaction6_6')
k6_7 = xs.rate_constant_with_point_xs(Te, 'reaction6_7')
k6_8 = xs.rate_constant_with_point_xs(Te, 'reaction6_8')
k6_9 = xs.rate_constant_with_point_xs(Te, 'reaction6_9')
k7 = np.exp(-3.834597006782e+01+1.426322356722e+01*np.log(Te)-5.826468569506e+00*(np.log(Te))**2+1.727940947913e+00*(np.log(Te))**3-3.598120866343e-01*(np.log(Te))**4+4.822199350494e-02*(np.log(Te))**5-3.909402993006e-03*(np.log(Te))**6+1.738776657690e-04*(np.log(Te))**7-3.252844486351e-06*(np.log(Te))**8)
k8 = np.exp(-3.271396786375e+01+1.353655609057e+01*np.log(Te)-5.739328757388e+00*(np.log(Te))**2+1.563154982022e+00*(np.log(Te))**3-2.877056004391e-01*(np.log(Te))**4+3.482559773737e-02*(np.log(Te))**5-2.631976175590e-03*(np.log(Te))**6+1.119543953861e-04*(np.log(Te))**7-2.039149852002e-06*(np.log(Te))**8)
k9 = np.exp(-1.781416067709e+01+2.277799785711e+00*np.log(Te)-1.266868411626e+00*(np.log(Te))**2+4.296170447419e-01*(np.log(Te))**3-9.609908013189e-02*(np.log(Te))**4+1.387958040699e-02*(np.log(Te))**5-1.231349039470e-03*(np.log(Te))**6+6.042383126281e-05*(np.log(Te))**7-1.247521040900e-06*(np.log(Te))**8)
k10 = 2.1e-9
k11 = np.exp(-1.700270758355e+01-4.050073042947e-01*np.log(Te)+1.018733477232e-08*(np.log(Te))**2-1.695586285687e-08*(np.log(Te))**3+1.564311217508e-10*(np.log(Te))**4+1.979725412288e-09*(np.log(Te))**5-4.395545994733e-10*(np.log(Te))**6+3.584926377078e-11*(np.log(Te))**7-1.024189019465e-12*(np.log(Te))**8)
k12 = np.exp(-3.078408636631e+01+1.509421488513e+01*np.log(Te)-7.349167207324e+00*(np.log(Te))**2+2.320966107642e+00*(np.log(Te))**3-4.818077551719e-01*(np.log(Te))**4+6.389229162737e-02*(np.log(Te))**5-5.161880953089e-03*(np.log(Te))**6+2.303985092606e-04*(np.log(Te))**7-4.344846146197e-06*(np.log(Te))**8)
k13 = xs.rate_constant_with_point_xs(Te, 'reaction13')
k14 = np.exp(-1.801849334273e+01+2.360852208681e+00*np.log(Te)-2.827443061704e-01*(np.log(Te))**2+1.623316639567e-02*(np.log(Te))**3-3.365012031363e-02*(np.log(Te))**4+1.178329782711e-02*(np.log(Te))**5-1.656194699504e-03*(np.log(Te))**6+1.068275202678e-04*(np.log(Te))**7-2.631285809207e-06*(np.log(Te))**8)
k15 = 1.7e-9
k16 =
k17 = 4.4e-16 # at ion 0.02eV
k18 =
k19 = np.exp(-3.454175591367e+01+1.412655911280e+01*np.log(Te)-6.004466156761e+00*(np.log(Te))**2+1.589476697488e+00*(np.log(Te))**3-2.775796909649e-01*(np.log(Te))**4+3.152736888124e-02*(np.log(Te))**5-2.229578042005e-03*(np.log(Te))**6+8.890114963166e-05*(np.log(Te))**7-1.523912962346e-06*(np.log(Te))**8)
k20 = np.exp(-2.833259375256e+01+9.587356325603e+00*np.log(Te)-4.833579851041e+00*(np.log(Te))**2+1.415863373520e+00*(np.log(Te))**3-2.537887918825e-01*(np.log(Te))**4+2.800713977946e-02*(np.log(Te))**5-1.871408172571e-03*(np.log(Te))**6+6.986668318407e-05*(np.log(Te))**7-1.123758504195e-06*(np.log(Te))**8)
k21 = np.exp(-1.973476726029e+01+3.992702671457e+00*np.log(Te)-1.773436308973e+00*(np.log(Te))**2+5.331949621358e-01*(np.log(Te))**3-1.181042453190e-01*(np.log(Te))**4+1.763136575032e-02*(np.log(Te))**5-1.616005335321e-03*(np.log(Te))**6+8.093908992682e-05*(np.log(Te))**7-1.686664454913e-06*(np.log(Te))**8)
k22_1_0 = 0.42e-13 #non-reactive assumption
k22_2_0 = 0.59e-12
k22_2_1 = 0.30e-12
k22_3_0 = 0.15e-11
k22_3_1 = 0.16e-11
k22_3_2 = 0.20e-11
k22_4_0 = 0.43e-11
k22_4_1 = 0.42e-11
k22_4_2 = 0.49e-11
k22_4_3 = 0.55e-11
k22_5_0 = 0.16e-11
k22_5_1 = 0.37e-11
k22_5_2 = 0.69e-11
k22_5_3 = 0.74e-11
k22_5_4 = 0.89e-11
k22_6_0 = 0.33e-11
k22_6_1 = 0.51e-11
k22_6_2 = 0.53e-11
k22_6_3 = 0.69e-11
k22_6_4 = 0.11e-10
k22_6_5 = 0.12e-10
k22_7_0 = 0.24e-11
k22_7_1 = 0.38e-11
k22_7_2 = 0.68e-11
k22_7_3 = 0.57e-11
k22_7_4 = 0.70e-11
k22_7_5 = 0.11e-10
k22_7_6 = 0.12e-10
k22_8_0 = 0.30e-11
k22_8_1 = 0.29e-11
k22_8_2 = 0.29e-11
k22_8_3 = 0.35e-11
k22_8_4 = 0.56e-11
k22_8_5 = 0.82e-11
k22_8_6 = 0.12e-10
k22_8_7 = 0.14e-10
k22_9_0 = 0.52e-12
k22_9_1 = 0.14e-11
k22_9_2 = 0.30e-11
k22_9_3 = 0.37e-11
k22_9_4 = 0.48e-11
k22_9_5 = 0.53e-11
k22_9_6 = 0.92e-11
k22_9_7 = 0.13e-10
k22_9_8 = 0.14e-10
k23 =
k24 =
k25 =
k26 =
k27 =
k28_1_0 = 1
k28_2_0 = 0.6535
k28_2_1 = 0.35
k28_3_0 = 0.30023
k28_3_1 = 0.40221
k28_3_2 = 0.30023
k28_4_0 = 0.17949
k28_4_1 = 0.25373
k28_4_2 = 0.32389
k28_4_3 = 0.24312
k28_5_0 = 0.15093
k28_5_1 = 0.17867
k28_5_2 = 0.22844
k28_5_3 = 0.23986
k28_5_4 = 0.19662
k28_6_0 = 0.12483
k28_6_1 = 0.13462
k28_6_2 = 0.16399
k28_6_3 = 0.1958
k28_6_4 = 0.20478
k28_6_5 = 0.17541
k28_7_0 = 0.10035
k28_7_1 = 0.11096
k28_7_2 = 0.13054
k28_7_3 = 0.15991
k28_7_4 = 0.17949
k28_7_5 = 0.17051
k28_7_6 = 0.15093
k28_8_0 = 0.08648
k28_8_1 = 0.09056
k28_8_2 = 0.10688
k28_8_3 = 0.12483
k28_8_4 = 0.16888
k28_8_5 = 0.15991
k28_8_6 = 0.14033
k28_8_7 = 0.12564
k28_9_0 = 0.07506
k28_9_1 = 0.07832
k28_9_2 = 0.08974
k28_9_3 = 0.11014
k28_9_4 = 0.13951
k28_9_5 = 0.14359
k28_9_6 = 0.12483
k28_9_7 = 0.12238
k28_9_8 = 0.11503
##### Energy Loss per Reaction #####
E1_0 = 15.42
E1_1 = 15.42
E1_2 = 15.42
E1_3 = 15.42
E1_4 = 15.42
E2 = 8.5
E3_1 =
E3_2 =
E3_3 =
E3_4 =
E3_5 =
E3_6 =
E4_0 =
E4_1 =
E4_2 =
E4_3 =
E4_4 =
E4_5 =
E4_6 =
E4_7 =
E4_8 =
E5_0 = Te
E5_1 = Te
E5_2 = Te
E5_3 = Te
E5_4 = Te
E5_5 = Te
E5_6 = Te
E5_7 = Te
E5_8 = Te
E5_9 = Te
E6_0 = 20 # XS데이터가 다 20부터 시작임
E6_1 = 20
E6_2 = 20
E6_3 = 20
E6_4 = 20
E6_5 = 20
E6_6 = 20
E6_7 = 20
E6_8 = 20
E6_9 = 20
E7 = 18
E8 = 13.6
E9 = 10.5
E10 = 0
E11 = Te
E12 = 14
E13 = Te
E14 = 0.75
E15 = 0
E16 = 0
E17 = 0
E18 = 0
E19 = 15.3
E20 = 10.2
E21 = 3.4
E22 = 0
#Quasi-Neutrality eqn 완료
ne = nHp + nH2p + nH3p - nHm
#Hydrogen atom conservation eqn 완료
nH2_v0 = self.ng - (0.5*(nH + nHp + nH_2s + nHm) + sum(calculation_array[3:12]) + nH2p + 1.5*nH3p)
#Particle balance eqn for electron 완료
dne_dt = (k1_0*ne*nH2_v0) + (k1_1*ne*nH2_v1) + (k1_2*ne*nH2_v2) + (k1_3*ne*nH2_v3) + (k1_4*ne*nH2_v4) \
- (k5_0*ne*nH2_v0) - (k5_1*ne*nH2_v1) - (k5_2*ne*nH2_v2) - (k5_3*ne*nH2_v3) - (k5_4*ne*nH2_v4) - (k5_5*ne*nH2_v5) - (k5_6*ne*nH2_v6) - (k5_7*ne*nH2_v7) - (k5_8*ne*nH2_v8) - (k5_9*ne*nH2_v9) + (k7*ne*nH2_v0) + (k8*ne*nH) - (k11*ne*nH3p) - (k13*ne*nH3p) + (k14*ne*nHm) + (k15*nH*nHm) + (k21ne*nH_2s) - ne*uB*Aeff/V
#Power balance eqn for electron 아직임
dTe_dt = 2/(3*ne)*(power(t)/V - (Vs+2.5*Te)*ne*uB*Aeff/V - 3/2*Te*dne_dt - (k1*nH*E1*ne + k2*nHp*E2*ne + k3*nH2_v0*E3*ne + k4*nH2_v0*E4*ne + k5*nH2_v0*E5*ne + k6*nH2p*E6*ne + k7*nH2p*E7*ne + k8*nH2p*E8*ne + k9*nH3p*E9*ne + k10*nH3p*E10*ne + k11*nH2p*E11*nH2_v0))
#Particle balance eqn for other species except electron 아직임
dnH_dt = 2*(k2*ne*nH2_v0) + (k5_0*ne*nH2_v0) + (k5_1*ne*nH2_v1) + (k5_2*ne*nH2_v2) + (k5_3*ne*nH2_v3) + (k5_4*ne*nH2_v4) + (k5_5*ne*nH2_v5) + (k5_6*ne*nH2_v6) + (k5_7*ne*nH2_v7) + (k5_8*ne*nH2_v8) + (k5_9*ne*nH2_v9) + (k7*ne*nH2_v0) - (k8*ne*nH) + (k9*ne*nH2p) + (k10*nH2p*nH2_v0) + (k11*ne*nH3p) + 2*(k12*ne*nH3p) + (k14*ne*nHm) - (k15*nH*nHm) + (k16*nHp*nHm) + (k17*nH2p*nHm) + 2*(k18*nH3p*nHm) + (k19*ne*nH2_v0) - (k20*ne*nH) +k23 + k24 + k26 + k27
dnH_2s_dt = (k16*nHp*nHm) + (k19*ne*nH2_v0) + (k20*ne*nH) - (k21*ne*nH_2s) - k27
dnH2_v1_dt =
dnH2_v2_dt =
dnH2_v3_dt =
dnH2_v4_dt =
dnH2_v5_dt =
dnH2_v6_dt =
dnH2_v7_dt =
dnH2_v8_dt =
dnH2_v9_dt =
dnHp_dt = (k7*ne*nH2_v0) + (k8*ne*nH) + (k9*ne*nH2p) + (k12*ne*nH3p) - (k16*nHp*nHm) + (k21*ne*nH_2s) - k24
dnH2p_dt = (k1_0*ne*nH2_v0) + (k1_1*ne*nH2_v1) + (k1_2*ne*nH2_v2) + (k1_3*ne*nH2_v3) + (k1_4*ne*nH2_v4) - (k9*ne*nH2p) - (k10*nH2p*nH2_v0) + (k13*ne*nH3p) - (k17*nH2p*nHm) - k25
dnH3p_dt = (k10*nH2p*nH2_v0) - (k12*ne*nH3p) - (k13*ne*nH3p) - (k18*nH3p*nHm) - k26
dnHm_dt = (k5_1*ne*nH2_v1) + (k5_2*ne*nH2_v2) + (k5_3*ne*nH2_v3) + (k5_4*ne*nH2_v4) + (k5_5*ne*nH2_v5) + (k5_6*ne*nH2_v6) + (k5_7*ne*nH2_v7) + (k5_8*ne*nH2_v8) + (k5_9*ne*nH2_v9) + (k13*ne*nH3p) - (k14*ne*nHm) - (k15*nH*nHm) - (k16*nHp*nHm) - (k17*nH2p*nHm) - (k18*nH3p*nHm)
return [dTe_dt, dnH_dt, dnH_2s_dt, dnH2_v1_dt, dnH2_v2_dt, dnH2_v3_dt, dnH2_v4_dt, dnH2_v5_dt, dnH2_v6_dt, dnH2_v7_dt, dnH2_v8_dt, dnH2_v9_dt, dnHp_dt, dnH2p_dt, dnH3p_dt, dnHm_dt]
#Pulsed power generate function
def pulse_power(self, t):
if t <= self.duty*self.period:
return self.input_power
else:
return 0
#Temperature & Density Calculation
def routine(self, init_value):
routine_time_interval = np.linspace(0, self.period, int(self.period/self.time_resolution))
routine_result = odeint(balance_equations, init_value, routine_time_interval, args=(pulse_power,), rtol=10**-3, mxstep=10**6)
#routine_result = np.transpose(routine_result)
return routine_result
def iteration(self):
init_value = [2, 1e10, 1e10, 1e10, 1e10, 1e10, 1e10, 1e10, 1e10, 1e10, 1e10, 1e10, 1e10, 1e10, 1e10, 1e10]
routine_result = routine(init_value)
count = 0
Hm_compare = 1
while True:
init_value = routine_result[-1]
if not isclose(routine_result[:,15][-1], Hm_compare, rel_tol=1e-2):
if iteration_number > 100:
print('did not converge')
break
Hm_compare = routine_result[:,15][-1]
routine_result = routine(init_value)
count += 1
continue
print('---------calculation complete!---------')
print('iteration count : {}'.format(count))
print('---------------------------------------')
routine_result = np.transpose(routine_result)
Te, nH, nH_2s, nH2_v1, nH2_v2, nH2_v3, nH2_v4, nH2_v5, nH2_v6, nH2_v7, nH2_v8, nH2_v9, nHp, nH2p, nH3p, nHm = routine_result
ne = nHp + nH2p + nH3p - nHm
nH2_v0 = self.ng - (0.5*(nH + nHp + nH_2s + nHm) + sum(routine_result[3:12]) + nH2p + 1.5*nH3p)
break
return Te, ne, nH, nH_2s, nH2_v0, nH2_v1, nH2_v2, nH2_v3, nH2_v4, nH2_v5, nH2_v6, nH2_v7, nH2_v8, nH2_v9, nHp, nH2p, nH3p, nHm # 18개
| 15,608 | 9,512 |
import urllib3
import certifi
# It is absolutely CRITICAL that you use certificate validation to ensure and guarantee that
# 1. you are indeed sending the message to *.hanatrial.ondemand.com and
# 2. that you avoid the possibility of TLS/SSL MITM attacks which would allow a malicious person to capture the OAuth token
# URLLIB3 DOES NOT VERIFY CERTIFICATES BY DEFAULT
# Therefore, install urllib3 and certifi and specify the PoolManager as below to enforce certificate check
# See https://urllib3.readthedocs.org/en/latest/security.html for more details
# use with or without proxy
http = urllib3.PoolManager(
cert_reqs='CERT_REQUIRED', # Force certificate check.
ca_certs=certifi.where(), # Path to the Certifi bundle.
)
# http = urllib3.proxy_from_url('http://proxy_host:proxy_port')
# interaction for a specific Device instance - replace 'd000-e000-v000-i000-c000-e001' with your specific Device ID
url = 'https://iotmms_on_your_trial_system.hanatrial.ondemand.com/com.sap.iotservices.mms/v1/api/http/data/d000-e000-v000-i000-c000-e001'
headers = urllib3.util.make_headers()
# use with authentication
# please insert correct OAuth token
headers['Authorization'] = 'Bearer ' + 'your_oauth_token'
headers['Content-Type'] = 'application/json;charset=utf-8'
# send message of Message Type 'm0t0y0p0e1' and the corresponding payload layout that you defined in the IoT Services Cockpit
body='{"mode":"async", "messageType":"m0t0y0p0e1", "messages":[{"sensor":"sensor1", "value":"20", "timestamp":1468991773}]}'
# It is also possible to send multiple messages (3 in this example) in a single request that conform to the same message type.
# body='{"mode":"async", "messageType":"m0t0y0p0e1", "messages":[{"sensor":"sensor1", "value":"20", "timestamp":1468991773},{"sensor":"sensor1", "value":"21", "timestamp":1468991873},{"sensor":"sensor1", "value":"22", "timestamp":1468991973}]}'
# Because every message field in a message type definition defines its position (see message type example above) it is also possible to compress the messages array by omitting the field names.
# Please be aware is that value order is very important in this case (it should match to the message type field positions like specified during message type creation)
# body='{"mode":"async","messageType":"m0t0y0p0e1","messages":[["sensor1","20",1468991773],["sensor1","21",1468991873],["sensor1","22",1468991973]]}'
# For more information with regard to Communication Handling, please refer to online documentation at https://help.hana.ondemand.com/iot -> Message Management Service API -> Interfaces and Communication Handling
try:
r = http.urlopen('POST', url, body=body, headers=headers)
print(r.status)
print(r.data)
except urllib3.exceptions.SSLError as e:
print e
| 2,764 | 931 |
import os
from os.path import join
import logging
import collections
import cv2 as cv
from vframe.settings import types
from vframe.models.video_item import VideoQuality
from vframe.utils import click_utils
# -----------------------------------------------------------------------------
# Enun lists used for custom Click Params
# -----------------------------------------------------------------------------
ClassifyNetVar = click_utils.ParamVar(types.ClassifyNet)
DetectorNetVar = click_utils.ParamVar(types.DetectorNet)
PyTorchNetVar = click_utils.ParamVar(types.PyTorchNet)
SceneTextNetVar = click_utils.ParamVar(types.SceneTextNet)
FaceDetectNetVar = click_utils.ParamVar(types.FaceDetectNet)
SearchParamVar = click_utils.ParamVar(types.SearchParam)
ClientRecordVar = click_utils.ParamVar(types.ClientRecord)
MetadataTreeVar = click_utils.ParamVar(types.MetadataTree)
ImageSizeVar = click_utils.ParamVar(types.ImageSize)
VideoQualityVar = click_utils.ParamVar(types.VideoQuality)
DataStoreVar = click_utils.ParamVar(types.DataStore)
FileExtVar = click_utils.ParamVar(types.FileExt)
KeyframeMetadataVar = click_utils.ParamVar(types.KeyframeMetadata)
MediaRecordVar = click_utils.ParamVar(types.MediaRecord)
VerifiedVar = click_utils.ParamVar(types.Verified)
MediaFormatVar = click_utils.ParamVar(types.MediaFormat)
MetadataVar = click_utils.ParamVar(types.Metadata)
LogLevelVar = click_utils.ParamVar(types.LogLevel)
ActionVar = click_utils.ParamVar(types.Action)
# # data_store
DATA_STORE = '/data_store_hdd/'
DIR_DATASETS = join(DATA_STORE,'datasets')
DIR_APPS = join(DATA_STORE,'apps')
DIR_APP_VFRAME = join(DIR_APPS,'vframe')
DIR_APP_SA = join(DIR_APPS, 'syrianarchive')
DIR_MODELS_VFRAME = join(DIR_APP_VFRAME,'models')
DIR_MODELS_SA = join(DIR_APP_SA,'models')
# # Frameworks
DIR_MODELS_OPENCV = join(DIR_MODELS_VFRAME,'caffe')
DIR_MODELS_CAFFE = join(DIR_MODELS_VFRAME,'caffe')
DIR_MODELS_DARKNET = join(DIR_MODELS_VFRAME,'darknet')
DIR_MODELS_DARKNET_PJREDDIE = join(DIR_MODELS_DARKNET, 'pjreddie')
DIR_MODELS_DARKNET_VFRAME = join(DIR_MODELS_DARKNET, 'vframe')
DIR_MODELS_PYTORCH = join(DIR_MODELS_VFRAME,'pytorch')
DIR_MODELS_TORCH = join(DIR_MODELS_VFRAME,'torch')
DIR_MODELS_MXNET = join(DIR_MODELS_VFRAME,'mxnet')
DIR_MODELS_TF = join(DIR_MODELS_VFRAME,'tensorflow')
DIR_MODELS_DLIB = join(DIR_MODELS_VFRAME,'dlib')
DIR_MODELS_DLIB_CNN = join(DIR_MODELS_DLIB, 'mmod_human_face_detector.dat')
DIR_MODELS_DLIB_5PT = join(DIR_MODELS_DLIB, 'shape_predictor_5_face_landmarks.dat')
DIR_MODELS_DLIB_68PT = join(DIR_MODELS_DLIB, 'shape_predictor_68_face_landmarks.dat')
# Test images
DIR_TEST_IMAGES = join(DIR_APP_SA, 'test', 'images')
# -----------------------------------------------------------------------------
# Drawing, GUI settings
# -----------------------------------------------------------------------------
DIR_ASSETS = join(DIR_APP_VFRAME, 'assets')
FP_FONT = join(DIR_ASSETS, 'font')
# -----------------------------------------------------------------------------
# click chair settings
# -----------------------------------------------------------------------------
DIR_COMMANDS_PROCESSOR_CHAIR = 'vframe/commands/'
DIR_COMMANDS_PROCESSOR_VCAT = 'vcat/commands/'
DIR_COMMANDS_PROCESSOR_ADMIN = 'admin/commands'
# -----------------------------------------------------------------------------
# Sugarcube dates
# Dates the snaphots are made
# -----------------------------------------------------------------------------
SUGARCUBE_DATES = ['20180611']
# -----------------------------------------------------------------------------
# Filesystem settings
# hash trees enforce a maximum number of directories per directory
# -----------------------------------------------------------------------------
ZERO_PADDING = 6 # padding for enumerated image filenames
#FRAME_NAME_ZERO_PADDING = 6 # is this active??
CKPT_ZERO_PADDING = 9
HASH_TREE_DEPTH = 3
HASH_BRANCH_SIZE = 3
# -----------------------------------------------------------------------------
# Logging options exposed for custom click Params
# -----------------------------------------------------------------------------
LOGGER_NAME = 'vframe'
LOGLEVELS = {
types.LogLevel.DEBUG: logging.DEBUG,
types.LogLevel.INFO: logging.INFO,
types.LogLevel.WARN: logging.WARN,
types.LogLevel.ERROR: logging.ERROR,
types.LogLevel.CRITICAL: logging.CRITICAL
}
LOGLEVEL_OPT_DEFAULT = types.LogLevel.DEBUG.name
#LOGFILE_FORMAT = "%(asctime)s: %(levelname)s: %(message)s"
#LOGFILE_FORMAT = "%(levelname)s:%(name)s: %(message)s"
#LOGFILE_FORMAT = "%(levelname)s: %(message)s"
#LOGFILE_FORMAT = "%(filename)s:%(lineno)s %(funcName)s() %(message)s"
# colored logs
"""
black, red, green, yellow, blue, purple, cyan and white.
{color}, fg_{color}, bg_{color}: Foreground and background colors.
bold, bold_{color}, fg_bold_{color}, bg_bold_{color}: Bold/bright colors.
reset: Clear all formatting (both foreground and background colors).
"""
LOGFILE_FORMAT = "%(log_color)s%(levelname)-8s%(reset)s %(cyan)s%(filename)s:%(lineno)s:%(bold_cyan)s%(funcName)s() %(reset)s%(message)s"
# -----------------------------------------------------------------------------
# Media formats accepted by VFRAME
# -----------------------------------------------------------------------------
VALID_MEDIA_EXTS = {
types.MediaFormat.VIDEO: ['mp4','mov','avi'],
types.MediaFormat.PHOTO: ['jpg','jpeg','png']
}
# -----------------------------------------------------------------------------
# Image size for web images
# -----------------------------------------------------------------------------
# order here is used for effecient image-pyramid resizing
IMAGE_SIZES = collections.OrderedDict()
IMAGE_SIZES[types.ImageSize.THUMB] = 160
IMAGE_SIZES[types.ImageSize.SMALL] = 320
IMAGE_SIZES[types.ImageSize.MEDIUM] = 640
IMAGE_SIZES[types.ImageSize.LARGE] = 1280
IMAGE_SIZE_LABELS = collections.OrderedDict()
IMAGE_SIZE_LABELS[types.ImageSize.THUMB] = 'th'
IMAGE_SIZE_LABELS[types.ImageSize.SMALL] = 'sm'
IMAGE_SIZE_LABELS[types.ImageSize.MEDIUM] = 'md'
IMAGE_SIZE_LABELS[types.ImageSize.LARGE] = 'lg'
DEFAULT_SIZE_LABEL_FEAT_EXTRACT = IMAGE_SIZE_LABELS[types.ImageSize.MEDIUM]
JPG_SAVE_QUALITY = 75
KEYFRAME_EXT = 'jpg'
# Define video quality metrics (w, h, fps, sec)
VIDEO_QUALITY = collections.OrderedDict()
VIDEO_QUALITY[types.VideoQuality.POOR] = VideoQuality(160, 90, 12, 2)
VIDEO_QUALITY[types.VideoQuality.LOW] = VideoQuality(320, 180, 12, 2)
VIDEO_QUALITY[types.VideoQuality.MEDIUM] = VideoQuality(640, 360, 12, 2)
VIDEO_QUALITY[types.VideoQuality.HIGH] = VideoQuality(1280, 720, 12, 2) # HD Ready
VIDEO_QUALITY[types.VideoQuality.HD] = VideoQuality(1920, 1080, 24, 2) # Full HD
# -----------------------------------------------------------------------------
# OpenCV backend and target
# used for optimizing DNN inference speeds
# requires OpenCV >= 3.4.2
# -----------------------------------------------------------------------------
OPENCV_DNN_BACKENDS = {
types.CVBackend.DEFAULT: cv.dnn.DNN_BACKEND_DEFAULT,
types.CVBackend.HALIDE: cv.dnn.DNN_BACKEND_HALIDE,
types.CVBackend.INFER_ENGINE: cv.dnn.DNN_BACKEND_INFERENCE_ENGINE,
types.CVBackend.OPENCV: cv.dnn.DNN_BACKEND_OPENCV
}
OPENCV_DNN_TARGETS = {
types.CVTarget.CPU: cv.dnn.DNN_TARGET_CPU,
types.CVTarget.OPENCL: cv.dnn.DNN_TARGET_OPENCL,
types.CVTarget.OPENCL_FP16: cv.dnn.DNN_TARGET_OPENCL_FP16,
types.CVTarget.MYRIAD: cv.dnn.DNN_TARGET_MYRIAD
}
OPENCV_BACKEND_DEFAULT = types.CVBackend.OPENCV
OPENCV_TARGET_DEFAULT = types.CVTarget.OPENCL_FP16
# -----------------------------------------------------------------------------
# Minimum keyframe extraction video attributes
# -----------------------------------------------------------------------------
KEYFRAME_MIN_WIDTH = 640
#KEYFRAME_MIN_WIDTH = 480 # some verified videos are this small, ignore for now
KEYFRAME_MIN_HEIGHT = 320
KEYFRAME_MIN_FPS = 10
KEYFRAME_MIN_FRAMES = 90
# -----------------------------------------------------------------------------
# Defaults
# -----------------------------------------------------------------------------
| 8,086 | 2,810 |
from django.db import models
from django.core.validators import MinValueValidator, MaxValueValidator
from teachers.models import Teacher
from students.models import Student
STATUS = (
('Passed', 'Passed'),
('Failed', 'Failed'),
('Pending', 'Pending'),
)
class Course(models.Model):
title = models.CharField(max_length=100)
code = models.CharField(max_length=10, unique=True)
credit = models.PositiveIntegerField()
def __str__(self):
return self.title
class Instructor(models.Model):
teacher = models.ForeignKey(Teacher, on_delete=models.CASCADE)
course = models.ForeignKey(Course, on_delete=models.CASCADE)
about = models.TextField(blank=True, null=True)
class Meta:
unique_together = ('course', 'teacher')
def __str__(self):
return f'{self.course.title} - {self.teacher.__str__()}'
class Enrollment(models.Model):
student = models.ForeignKey(Student, on_delete=models.CASCADE)
course = models.ForeignKey(Instructor, on_delete=models.CASCADE)
semester = models.PositiveIntegerField()
score = models.PositiveIntegerField(
blank=True, null=True, validators=[MinValueValidator(0), MaxValueValidator(100)])
grade = models.CharField(max_length=5, blank=True, null=True)
status = models.CharField(choices=STATUS, max_length=10, default='Pending')
comments = models.TextField(blank=True, null=True)
class Meta:
unique_together = ('course', 'student')
def __str__(self):
return f'{self.student.__str__()} takes {self.course.course.title}'
def save(self, *args, **kwargs):
if self.score:
mark = self.score // 10
self.status = 'Passed'
if mark > 8:
self.grade = 'A'
elif mark > 7:
self.grade = 'B'
elif mark > 6:
self.grade = 'C'
elif mark > 5:
self.grade = 'D'
else:
self.grade = 'F'
self.status = 'Failed'
return super().save(*args, **kwargs)
| 2,082 | 642 |
import json
from loguru import logger
from .gcloud_config import gcloud_config_get
from .util.cmd import cmd
class _GCloudSavedAuth(object):
"""A ContextManager that will save currently used gcloud authentication upon entering
and restore the saved authentication upon exiting."""
def __enter__(self):
# Load currently used authentication
# res = cmd("gcloud auth list --format=json")
# res_data = json.loads(res.stdout)
# active_accounts = [account for account in res_data if account["status"] == "ACTIVE"]
# assert len(active_accounts) <= 1
self.previously_used_account = gcloud_config_get("core/account") or ""
logger.trace(f"Memorizing in-use account '{self.previously_used_account}'")
def __exit__(self, type, value, traceback):
# Restore previously used authentication
cmd(["gcloud", "config", "set", "account", self.previously_used_account])
logger.trace(f"Restored previously used account '{self.previously_used_account}'")
class GCloudServiceAccountAuth(_GCloudSavedAuth):
"""A ContextManager used to change the gcloud authentication to a specific
service account. The previously used authentication is restored upon exiting.
See: gcloud auth activate-service-account --help"""
def __init__(self, service_account_key_path):
self.service_account_key_path = service_account_key_path
def __enter__(self):
# Read service account identifier from file
with open(self.service_account_key_path, "r") as f:
service_account_data = json.load(f)
self.service_account_identifier = service_account_data["client_email"]
super().__enter__() # Save current auth
# Activate service account
cmd(["gcloud", "auth", "activate-service-account",
f"--key-file={self.service_account_key_path}"])
logger.trace(f"Authenticated with '{self.service_account_key_path}'")
class GCloudAccountIdAuth(_GCloudSavedAuth):
"""A ContextManager used to change the gcloud authentication to a specific account-id.
The user will be prompter / a browser might be opened if gcloud doesn't have cached
credentials for this account.
See: gcloud auth login --help"""
def __init__(self, account_id):
self.account_id = account_id
def __enter__(self):
super().__enter__() # Save current auth
cmd(["gcloud", "auth", "login", self.account_id]) # Activate new auth
| 2,507 | 692 |
# import module
import calendar
# ask of month and year
yy = int(input("Enter year: "))
mm = int(input("Enter month: "))
# display the calendar
print(calendar.month(yy,mm)) | 174 | 56 |
#!/usr/bin/env python
""" For a set of data files used in our dashboard,
generate:
1. a summary file called nssac-ncov-sd-summary.csv with below headers:
(for all countries)
date,totalConfirmed,totalDeaths,totalRecovered,newConfirmed,newDeaths,newRecovered
date in mm-dd-yyyy format
total stands for cumulative amount as
new stands for changes in previous day
2. nssac-ncov-sd-region_list.csv
use latest data file to generate a csv file to populate region drop down menu
3. region_first_case.txt (not finished)
this will be used for timeline, currently shows some error when running
4. a summary file for each region in nssac-ncov-sd-region_list.csv
Notes from DX:
1. let store files in a new folder called "regions" under current data folder.
2. the format should be same as nssac-ncov-sd-summary.csv, if using region's name
as part of file name is hard, you might consider to add an id column in nssac-ncov-sd-summary.csv
for linking purpose. you can also use "ISO_3" code in file name
(regions.csv is checked in as reference)
3. pay attention to "Mainland China" and we need to aggregate data for all 31 provinces.
By Dawen Xie
"""
import sys
import os
import csv
import glob
def main():
datafolder = sys.argv[1]
if not os.path.isdir(datafolder):
print(f"ERROR: Data folder {datafolder} does not exist. Exiting...")
sys.exit()
total_by_date_file="summary1.csv"
print(f"Generate total by date file: {total_by_date_file}")
file = open(total_by_date_file, "w")
file.write("date,totalConfirmed,totalDeaths,totalRecovered\n")
latest_file = ""
for source_file in sorted(glob.glob(f"{datafolder}/nssac-ncov-sd-??-*.csv")):
latest_file = source_file
s=str.split(source_file,".")
file_name=s[0]
date_str=file_name[-10:]
with open(source_file) as csv_file:
csv_reader= csv.reader(csv_file, delimiter=',')
line_count, tC, tD, tR= 0,0,0,0
for row in csv_reader:
if line_count == 0:
line_count +=1
continue
else:
tC += int(row[3])
tD += int(row[4])
tR += int(row[5])
file.write(f"{date_str},{tC},{tD},{tR}\n")
file.close()
summary_file="nssac-ncov-sd-summary.csv"
print(f"Generate nssac summary file: {summary_file}")
file = open(summary_file, "w")
file.write("date,totalConfirmed,totalDeaths,totalRecovered,newConfirmed,newDeaths,newRecovered\n")
with open(total_by_date_file) as csv_file:
csv_reader= csv.reader(csv_file, delimiter=',')
#pC confirmed number in previous day
line_count, pC, pD, pR= 0,0,0,0
for row in csv_reader:
if line_count == 0:
line_count +=1
continue
elif line_count == 1:
(pC, pD, pR) = row[1:]
line_count +=1
file.write(f"{row[0]},{row[1]},{row[2]},{row[3]},0,0,0\n")
else:
line_count +=1
dC=int(row[1])-int(pC)
dD=int(row[2])-int(pD)
dR=int(row[3])-int(pR)
file.write(f"{row[0]},{row[1]},{row[2]},{row[3]},{dC},{dD},{dR}\n")
(pC, pD, pR) = row[1:]
file.close()
# 3. generate region list (for country dropdown menu) and region_first_case.csv (for timeline)
# first_case file is NOT finished yet
print(f"latest file: {latest_file}")
region_list_file="nssac-ncov-sd-region_list.csv"
region_first_case_file="region_first_case.txt"
cmd = f"rm {region_first_case_file}"
returned_value = os.system(cmd)
file = open(region_list_file, "w")
file.write("All regions\n")
region_list = []
with open(latest_file) as csv_file:
csv_reader= csv.reader(csv_file, delimiter=',')
line_count = 0
for row in csv_reader:
if line_count == 0:
line_count +=1
continue
else:
line_count +=1
region = str.split(row[1],'(')[0].strip()
if region not in region_list:
region_list.append(region)
#get country's first date
cmd = f"grep {region} {datafolder}/nssac-ncov-sd-??-*.csv | head -n 1 | cut -d'/' -f9 >> {region_first_case_file}"
returned_value = os.system(cmd)
for region in sorted(region_list):
file.write(f"{region}\n")
file.close()
print("all done...")
if __name__ == '__main__':
main()
| 4,656 | 1,556 |
import qtmodern.styles
import qtmodern.windows
from project.firewall.configFirewall import CreateFwWindow, EditFwWindow, DeleteFwWindow
from project.firewall.firewallScripts import firewallGlobalInfo, setDefaultZone, defaultZone
from project.firewall.tableFirewall import *
from PyQt5 import QtCore
from PyQt5.QtWidgets import *
from project.firewall.tableFirewall import listservices, listZoneModified, listports
firewallIsrunnig=True
def getContentFirewall(self):
self.gridFw = QGridLayout()
createTableFw(self,firewallIsrunnig)
createFwButtons(self,firewallIsrunnig)
self.groupBox = QGroupBox()
self.containerFw = QVBoxLayout()
self.containerFw.addLayout(self.gridFw)
self.containerFw.addLayout(self.hboxbtn)
self.containerFw.addWidget(self.tableFw)
self.containerFw.addStretch()
self.groupBox.setLayout(self.containerFw)
self.scroll = QScrollArea()
self.scroll.setFixedWidth(1150)
self.scroll.setWidget(self.groupBox)
self.bottomRightLayout.addWidget(self.scroll)
def createFwButtons(self,firewallIsrunnig):
self.hboxbtn = QHBoxLayout()
try:
defaultzone = defaultZone()[0]
self.defaultZone = QLabel(f"DEFAULT ZONE : {defaultzone}")
self.defaultZone.move(10, 10)
except IndexError:
QMessageBox.critical(self, 'error', f'Please install Firewalld or start the service ')
self.defaultZone = QLabel("FIREWALLD SERVICE IS NOT RUNNING")
firewallIsrunnig=False
self.defaultZone.setStyleSheet("color: #303a46;font: bold 14px;")
self.addBtn = QPushButton('Add')
self.editBtn = QPushButton('Edit')
self.deleteBtn = QPushButton('Delete')
self.addBtn.setFixedHeight(30)
self.addBtn.setFixedWidth(120)
self.editBtn.setFixedHeight(30)
self.editBtn.setFixedWidth(120)
self.deleteBtn.setFixedHeight(30)
self.deleteBtn.setFixedWidth(120)
self.addBtn.clicked.connect(lambda: createUsersWindow(self, self.dic4))
self.addBtn.setStyleSheet("color: #ecf0f1; background-color: #2ecc71 ; border: 0px solid #2c3e50")
self.editBtn.clicked.connect(lambda: editFWWindow(self))
self.editBtn.setStyleSheet("color: #ecf0f1; background-color: #34495e ; border: 0px solid #2c3e50")
self.deleteBtn.clicked.connect(lambda: deleteFwWindow(self, self.dic4))
self.deleteBtn.setStyleSheet("color: #ecf0f1; background-color: #e74c3c; border: 0px solid #2c3e50")
self.hboxbtn.addWidget(self.defaultZone)
self.hboxbtn.addStretch()
self.hboxbtn.addStretch()
self.hboxbtn.addStretch()
self.hboxbtn.addStretch()
if firewallIsrunnig :
self.hboxbtn.addWidget(self.addBtn)
self.hboxbtn.addWidget(self.editBtn)
self.hboxbtn.addWidget(self.deleteBtn)
def createTableFw(self,firewallIsrunnig):
if firewallIsrunnig:
self.tableFw = QTableWidget()
self.tableFw.setRowCount(0)
self.tableFw.setColumnCount(6)
self.tableFw.setFixedHeight(570)
self.tableFw.setFixedWidth(1130)
self.tableFw.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.tableFw.setHorizontalHeaderItem(0, QTableWidgetItem("zone"))
self.tableFw.setHorizontalHeaderItem(1, QTableWidgetItem("interface"))
self.tableFw.setHorizontalHeaderItem(2, QTableWidgetItem("Services"))
self.tableFw.setHorizontalHeaderItem(3, QTableWidgetItem("Ports"))
self.tableFw.setHorizontalHeaderItem(4, QTableWidgetItem("set-Default"))
self.tableFw.setHorizontalHeaderItem(5, QTableWidgetItem("select"))
self.tableFw.setEditTriggers(QAbstractItemView.NoEditTriggers)
showmyfwlist(self)
class SelectCellInTableNet(QWidget):
def __init__(self, parent=None):
super(SelectCellInTableNet, self).__init__(parent)
self.isSelected = False
self.hbox = QHBoxLayout()
self.checkb = QCheckBox(self)
self.checkb.stateChanged.connect(self.checkBoxChangedAction)
self.hbox.addStretch()
self.hbox.addWidget(self.checkb)
self.hbox.addStretch()
self.hbox.setContentsMargins(0, 0, 0, 0)
self.hbox.setSpacing(8)
self.setLayout(self.hbox)
def checkBoxChangedAction(self, state):
if (QtCore.Qt.Checked == state):
self.isSelected = True
else:
self.isSelected = False
class SetDefaultZone(QWidget):
def __init__(self, zone, parent=None):
super(SetDefaultZone, self).__init__(parent)
self.zone = zone
self.hbox = QHBoxLayout()
self.showmoreBtn = QPushButton('Set')
self.showmoreBtn.clicked.connect(self.showmoreBtnClicked)
self.hbox.addWidget(self.showmoreBtn)
self.hbox.addStretch()
self.hbox.setContentsMargins(0, 0, 0, 0)
self.hbox.setSpacing(8)
self.setLayout(self.hbox)
def showmoreBtnClicked(self):
index = str(self.zone)
try:
setDefaultZone(index)
except:
QMessageBox.critical(self, 'warrning', f'\n can set {index} the default zone ')
else:
QMessageBox.information(self, 'Services', f'\n {index} has been setted the default zone ')
class ServiceTableFw(QWidget):
def __init__(self, username, parent=None):
super(ServiceTableFw, self).__init__(parent)
self.username = username
self.hbox = QHBoxLayout()
self.showmoreBtn = QPushButton('more')
self.showmoreBtn.clicked.connect(self.showmoreBtnClicked)
self.hbox.addWidget(self.showmoreBtn)
self.hbox.addStretch()
self.hbox.setContentsMargins(0, 0, 0, 0)
self.hbox.setSpacing(8)
self.setLayout(self.hbox)
def showmoreBtnClicked(self):
index = str(self.username)
output = listservices(index)
outputString = ''
for i in output:
outputString += f'{i} '
QMessageBox.information(self, 'Services', f'\n Services enabled in {index} Zone are:\n {outputString}')
class PortsTableFw(QWidget):
def __init__(self, username, parent=None):
super(PortsTableFw, self).__init__(parent)
self.username = username
self.hbox = QHBoxLayout()
self.showmoreBtn = QPushButton('more')
self.showmoreBtn.clicked.connect(self.showmoreBtnClicked)
self.hbox.addWidget(self.showmoreBtn)
self.hbox.addStretch()
self.hbox.setContentsMargins(0, 0, 0, 0)
self.hbox.setSpacing(8)
self.setLayout(self.hbox)
def showmoreBtnClicked(self):
index = str(self.username)
output = listports(index)
outputString = ''
for i in output:
outputString += f'{i} '
QMessageBox.information(self, 'Ports', f'\n Ports added in {index} Zone are:\n {outputString}')
class interfaceTableFw(QWidget):
def __init__(self, zone, parent=None):
super(interfaceTableFw, self).__init__(parent)
self.zone = zone
self.hbox = QHBoxLayout()
self.showmoreBtn = QPushButton('more')
self.showmoreBtn.clicked.connect(self.showmoreBtnClicked)
self.hbox.addWidget(self.showmoreBtn)
self.hbox.addStretch()
self.hbox.setContentsMargins(0, 0, 0, 0)
self.hbox.setSpacing(8)
self.setLayout(self.hbox)
def showmoreBtnClicked(self):
index = str(self.zone)
output = listinterfaces(index)
outputString = ''
for i in output:
outputString += f'{i} '
QMessageBox.information(self, 'Interfaces', f'\n Interfaces added in {index} Zone are:\n {outputString}')
def showmyfwlist(self):
self.list_of_fw = listZoneModified()
self.dic = {}
self.dic1 = {}
self.dic2 = {}
self.dic3 = {}
self.dic4 = {}
self.rowposition = 0
for i in self.list_of_fw:
self.rowPosition = self.tableFw.rowCount()
self.tableFw.insertRow(self.rowPosition)
self.tableFw.setItem(self.rowPosition, 0, QTableWidgetItem(i[0]))
self.dic3[i[0]] = interfaceTableFw(i[0])
self.dic[i[0]] = SetDefaultZone(i[0])
self.dic1[i[0]] = ServiceTableFw(i[0])
self.dic2[i[0]] = PortsTableFw(i[0])
self.dic4[i[0]] = SelectCellInTableNet()
self.tableFw.setCellWidget(self.rowPosition, 4, self.dic[i[0]])
self.tableFw.setCellWidget(self.rowPosition, 2, self.dic1[i[0]])
self.tableFw.setCellWidget(self.rowPosition, 3, self.dic2[i[0]])
self.tableFw.setCellWidget(self.rowPosition, 1, self.dic3[i[0]])
self.tableFw.setCellWidget(self.rowPosition, 5, self.dic4[i[0]])
def createUsersWindow(self, d):
list_users_to_edit = []
for i in d:
if d[i].isSelected == True:
list_users_to_edit.append(i)
if len(list_users_to_edit) == 0 or len(list_users_to_edit) > 1:
user = []
user.append("")
self.secondwindow = CreateFwWindow(user)
self.sw = qtmodern.windows.ModernWindow(self.secondwindow)
self.sw.show()
else:
for user in self.list_of_fw:
if user[0] == list_users_to_edit[0]:
self.secondwindow = CreateFwWindow(user)
self.sw = qtmodern.windows.ModernWindow(self.secondwindow)
self.sw.show()
else:
continue
def editFWWindow(self):
self.secondwindow = EditFwWindow()
self.sw = qtmodern.windows.ModernWindow(self.secondwindow)
self.sw.show()
def deleteFwWindow(self, d):
list_users_to_edit = []
for i in d:
if d[i].isSelected == True:
list_users_to_edit.append(i)
if len(list_users_to_edit) == 0 or len(list_users_to_edit) > 1:
QMessageBox.warning(self, 'warning', 'Please select just one Zone')
else:
for user in self.list_of_fw:
if user[0] == list_users_to_edit[0]:
self.secondwindow = DeleteFwWindow(user)
self.sw = qtmodern.windows.ModernWindow(self.secondwindow)
self.sw.show()
else:
continue
| 10,040 | 3,378 |
#!/usr/bin/python3
import threading, queue, time
import picamera.array as picamarray, numpy, pathlib
import numpy.ma as nma
import png, io
from pootlestuff import watchables as wv
class piCamCPU(wv.watchablesmart):
"""
a base class for things that want to analyse images in detail for movement detection, exposure adjustment or anything else.
It uses picamera to resize frames (to reduce processing load and reduce noise, pulls out each frame and passes it to
an analyser.
"""
def __init__(self, statusvals, wabledefs, startbtnvals, loglevel=wv.loglvls.INFO, **kwargs):
assert hasattr(self, 'monitor')
super().__init__(wabledefs=[
('status', wv.enumWatch, statusvals[0], False, {'vlist': statusvals}),
('startstopbtn',wv.enumWatch, startbtnvals[0], False, {'vlist': startbtnvals}),
('autostart', wv.enumWatch, 'off', True, {'vlist': ('off', 'on')}),
('width', wv.intWatch, 128, True, {'minv': 8, 'maxv': 800}),
('height', wv.intWatch, 96, True, {'minv': 6, 'maxv': 600}),
('lastactive', wv.floatWatch, float('nan'), False),
('imagemode', wv.enumWatch, 'rgb', True, {'vlist': ('rgb', 'yuv')}),
('imagechannel',wv.enumWatch, '0', True, {'vlist': ('0','1','2', '*')}),
('skippedcount',wv.intWatch, 0, False),
('analysedcount',wv.intWatch, 0, False),
]+wabledefs,
**kwargs)
self.agentclass=self.app.agentclass
self.monthread=None
self.procthread=None
self.loglevel=loglevel
if self.autostart.getIndex()==1:
self.startstopbtn.setIndex(1,wv.myagents.app)
self.running=True
self.monthread=threading.Thread(name=type(self).__name__, target=self.monitor, kwargs={'startdelay':2.5})
self.monthread.start()
self.startstopbtn.addNotify(self.do_startstop, wv.myagents.user)
def do_startstop(self, watched, agent, newValue, oldValue):
"""
called when the users clicks the start / stop button
to start running detection, run up a thread on the 'monitor' function of this object
"""
btnstate=watched.getIndex()
if self.monthread==None and btnstate==1:
self.running=True
self.monthread=threading.Thread(name=type(self).__name__, target=self.monitor)
self.monthread.start()
elif not self.monthread==None and btnstate==0:
self.running=False
else:
self.log(wv.loglvls.WARN,' inconsistent move detection states running is %s and button was %s' % (self.running, oldValue))
def preparearray(self):
"""
prepares / updates a numpy array or masked array dependent on various variables
"""
nshape=[self.height.getValue(),self.width.getValue()]
if self.imagechannel.getIndex() == 3:
nspage.append(3)
return numpy.empty(shape=nshape, dtype=numpy.int16)
def monitor(self, startdelay=0):
"""
This function coordinates cpu based movement detection, it runs in its own thread within the loop of a picamera.capture_sequence call
until self.running is set False.
buffercycle (a generator) runs in a loop to process each frame.
This also starts another thread to analyse successive frames from the camera, this thread uses a threadsafe queue (which only ever has 1 entry)
to trigger analysis (if analysis still running when next frame arrives, it is discarded)
"""
if startdelay > 0:
time.sleep(startdelay)
self.status.setIndex(1, self.agentclass.app)
self.lastactive.setValue(time.time(), self.agentclass.app)
picam=self.app.startCamera()
resize=((self.width.getValue()+31) // 32 * 32, (self.height.getValue()+15) // 16 * 16)
self.freebuffs=queue.Queue()
arraytype=picamarray.PiRGBArray if self.imagemode.getValue()=='rgb' else picamarray.PiYUVArray
for i in range(3):
self.freebuffs.put(arraytype(picam, size=resize))
self.camerabuff=None # the buffer currently being filled
self.pendingbuffs=queue.Queue(maxsize=1) # and a queue of buffers we want to analyse - restricted to 1 - just using threadsafeness
splitter_port=self.app._getSplitterPort(type(self).__name__)
self.log(wv.loglvls.INFO, 'cpu move detect using port %d and image size %s' % (splitter_port, resize))
time.sleep(.1)
self.condition=None # used to trigger detection overlay streaming
self.analthread=threading.Thread(name='cpuanalyse', target=self.analysethread)
self.analthread.start()
picam.capture_sequence(self.buffercycle(),
format='rgb' if self.imagemode.getValue()=='rgb' else 'yuv',
resize=resize, splitter_port=splitter_port, use_video_port=True)
self.camerabuff=None
self.pendingbuffs=None
self.freebuffs=None
self.app._releaseSplitterPort(type(self).__name__, splitter_port)
self.lastactive.setValue(time.time(), self.agentclass.app)
self.monthread=None
self.analthread.join()
self.analthread=None
self.status.setIndex(0, self.agentclass.app)
def buffercycle(self):
"""
This generator function is used by picamera.capture_sequence to yield buffers to capture_sequence.
A small pool of buffers is used, and each time it runs round the loop it records the last filled buffer so
the analyse thread can pick up the latest frame whenever it is ready.
"""
try:
while self.running:
try:
nextbuff=self.freebuffs.get_nowait()
except queue.Empty:
nextbuff=None
if nextbuff is None:
self.overruns.increment(agent=self.agentclass.app)
time.sleep(.2)
try:
nextbuff=self.freebuffs.get_nowait()
except queue.Empty:
raise StopIteration()
self.log(wv.loglvls.ERROR,'irrecoverable buffer overflow')
prevbuff=self.camerabuff
self.camerabuff=nextbuff
if not prevbuff is None:
try:
expiredbuff=self.pendingbuffs.get_nowait()
expiredbuff.truncate(0)
self.freebuffs.put(expiredbuff)
self.skippedcount.increment(agent=self.agentclass.app)
except queue.Empty:
pass
self.pendingbuffs.put_nowait(prevbuff)
yield nextbuff
except:
self.log(wv.loglvls.DEBUG,'move detect thread problem!', exc_info=True)
def analysethread(self):
prevbuff=None
clocktimestart=time.time()
cputimestart=time.clock()
busytime=0
busystart=time.time()
tick5=busystart+5
logpal=None
logpng=None
detstreamcount=0
channel=self.imagechannel.getIndex()
workarray=None
while self.running:
try:
busytime+=time.time()-busystart
thisbuff=self.pendingbuffs.get(block=True, timeout=2)
busystart=time.time()
except queue.Empty:
thisbuff=None
if not thisbuff is None:
thisbuff, prevbuff, workarray = self.analysebuff(thisbuff, prevbuff, workarray, channel)
prevbuff=thisbuff
if time.time() > tick5:
elapsed=time.time()-clocktimestart
self.analcpu.setValue(100*(time.clock()-cputimestart)/elapsed,self.agentclass.app)
self.analbusy.setValue(100*busytime/elapsed, self.agentclass.app)
tick5+=5
if self.condition:
try:
self.condition.notify_all() # release clients one last time
except:
pass
self.condition=None
class MoveDetectCPU(piCamCPU):
"""
This class analyses successive frames and looks for significant change, setting its 'triggered' watchable True when movement is detected.
This remains True until all frames for 'latchtime' have not detected movement. Anything wanting to be triggered can poll or set a notification
on this watchable.
The code uses picamera to resize the frames (which happens in the GPU) to a (typically) much smaller size for analysis in this thread.
Initially this class just sets up a bunch of variables that control and monitor this functionality. When detection is active, it runs a monitor thread
to drive the camera and grab frames, and a further thread to actually analyse the frames.
The mpnitor thread creates a small number of buffers and uses picamera.capture_sequence to run the camera, the capture_sequence call does not return
until an external event causes capture sequence to stop.
The buffers are allocated and managed by the member function buffercycle which is called from within picamera.capture_sequence. 'buffercycle' uses 'yield'
to give a free buffer back to the camera, and passes places the buffer just filled to be ready for the analysis thread to use. If there was already a buffer
waiting for analysis this expired buffer is returned to the free list and replaced by the more recent buffer, if the analysis thread has grabbed the previous
buffer, the analysis thread returns it to the queue when it has dealt with it.
Starting with the second buffer, the analysis thread picks 1 channel from the buffer and compares it with previous frame to check for differences.
"""
def __init__(self, statusvals=('off', 'watching', 'triggered'), startbtnvals=('start watching', 'stop watching'), **kwargs):
"""
initialisation just sets up the vars used.
"""
super().__init__(statusvals=statusvals, startbtnvals=startbtnvals, wabledefs=[
('triggercount', wv.intWatch, 0, False),
('lasttrigger', wv.floatWatch, float('nan'), False),
('cellthresh', wv.intWatch, 22, True, {'minv': 1, 'maxv': 255}),
('celltrigcount', wv.intWatch, 100, True, {'minv': 1}),
('latchtime', wv.floatWatch, 4, True),
('maskfold', wv.folderWatch, '~/camfiles/masks', True),
('maskfile', wv.textWatch, '-off-', True),
('overruns', wv.intWatch, 0, False),
('analbusy', wv.floatWatch, 0, False),
('analcpu', wv.floatWatch, 0, False),
], **kwargs)
self.running=False
def fetchmasksize(self):
"""
called from web server to retrieve info about mask in preparation for editing
"""
rr={'width' : self.width.getValue(),
'height' : self.height.getValue(),
}
return rr
def savemask(self, pathinf, name, mask):
"""
called from webserver when user saves a mask after editing
"""
mfile=(self.maskfold.getFolder()/name).with_suffix('.png')
print('savemask (%3d/%3d) to %s (%s): ' % (len(mask[0]), len(mask), name, mfile))
pw = png.Writer(len(mask[0]), len(mask), greyscale=True, bitdepth=1)
with mfile.open('wb') as fff:
pw.write(fff,mask)
return {'resp': 200, 'rdata':{'message': 'saved to %s' % mfile}}
def checkmask(self, var, agent, newValue, oldValue):
pass
def preparearray(self):
"""
prepares / updates a numpy array or masked array dependent on various variables
"""
if True:
return super().preparearray()
if self.maskfile.getValue()=='-off-':
return dataarray
else:
mfile=(self.maskfold.getValue()/self.maskfile.getValue()).with_suffix('.png')
if mfile.is_file():
with mfile.open('rb') as mfo:
mwidth, mheight, mrows, minfo = png.Reader(file=mfo).read()
rowdat=[m for m in mrows]
if mwidth==self.width.getValue() and mheight==self.height.getValue():
if minfo['planes']==1 and minfo['bitdepth']==1:
mask=numpy.array(rowdat,dtype=numpy.bool_)
self.log(wv.loglvls.INFO,'mask updated from %s %d of %d masked' % (str(mfile), len(numpy.nonzero(mask)[0]), mask.shape[0]*mask.shape[1]))
return nma.masked_array(data=dataarray, mask=mask)
else:
self.log(wv.loglvls.INFO, 'mask file has %d planes and bitdepth %d: should be 1 and 1' % (minfo.planes, minfo.bit_depth))
else:
self.log(wv.loglvls.INFO,'mask image is wrong size - expected (%3d/%3d), file has (%3d/%3d)' % (self['width'].getValue(), self['height'].getValue(), mwidth, mheight))
else:
self.log(wv.loglvls.INFO, 'unable to get maskfile %s' % str(maskfile))
return dataarray
def analysebuff(self, thisbuff, prevbuff, workarray, channel):
if prevbuff is None:
workarray=self.preparearray()
else:
logthresh=self. cellthresh.getValue()
if channel == 3:
numpy.copyto(workarray, thisbuff.array)
workarray -= prevbuff.array
else:
numpy.copyto(workarray, thisbuff.array[:,:,channel])
workarray -= prevbuff.array[:,:,channel]
numpy.absolute(workarray, workarray)
cthresh=self.cellthresh.getValue()
if logthresh != cthresh:
logthresh = cthresh
logpal=None
hits=(workarray >= logthresh).nonzero()
trig=len(hits[0]) >=self.celltrigcount.getValue()
if trig:
if self.status.getIndex() < 2:
self.triggercount.increment(agent=self.agentclass.app)
self.status.setIndex(2, agent=self.agentclass.app)
self.lasttrigger.setValue(time.time(), agent=self.agentclass.app)
else:
if self.status.getIndex() > 1 and time.time() > (self.lasttrigger.getValue() + self.latchtime.getValue()):
self.status.setIndex(1, agent=self.agentclass.app)
if not self.condition is None: # check if we're streaming the detection overlay
if self.laststreamactive+5 < time.time():
# client(s) all gone - stop the stream
print('clients gone - shut detect stream')
self.condition=None
logpal=None
streaming=None
logpng=None
else:
if logpal is None:
logpal=makepalette(logthresh)
streamimg = io.BytesIO()
arshape=workarray.shape
if logpng is None:
logpng = png.Writer(arshape[1], arshape[0], palette=logpal)
detimg=workarray.filled(fill_value=0) if hasattr(workarray, 'filled') else workarray
if trig and not detoverlay['xbase'] is None: # check if we want a blob
xb=detoverlay['xbase']
yb=detoverlay['ybase']
if abs(xb) < self.width.getValue() and abs(yb) < self.height.getValue():
if xb < 0:
xe=min((-1, xb+detoverlay['xsize']))
else:
xe=min((255, xb+detoverlay['xsize']))
if yb < 0:
ye=min((-1, yb+detoverlay['ysize']))
else:
ye=min((255, yb+detoverlay['ysize']))
detimg[yb:ye,xb:xe]=255
logpng.write(streamimg, detimg.tolist())
with self.condition:
self.frame = streamimg.getvalue()
self.condition.notify_all()
if False:
sfp=pathlib.Path('~/Pictures').expanduser()/('b%04d.png' % self['triggercount'].getValue())
with sfp.open('wb') as sfpf:
sfpf.write(self.frame)
prevbuff.truncate(0)
self.freebuffs.put(prevbuff)
return thisbuff, prevbuff, workarray
def getStream(self):
"""
When we get a stream request from the web server, check if already running.
This is called by an http handler request thread.
THE HTTP thread (there can be several) then loops calling nextframe
"""
if self.running:
if self.condition==None:
print('make detect stream')
self.condition = threading.Condition()
self.laststreamactive=time.time()
else:
print('returning existing detect stream')
return self
else:
print('aborting detect stream')
raise StopIteration()
def nextframe(self):
"""
The http handler thread calls this to get each successive frame.
It waits for a new frame to arrive, then updates the lastactive timestamp and returns the frame
"""
if self.running and self.condition:
with self.condition:
self.condition.wait()
self.laststreamactive=time.time()
return self.frame, 'image/png', len(self.frame)
else:
raise StopIteration()
detpalette=(
(0.5, ( 0, 0, 0, 0)), # totally transparent black below 1/2 threshold
(0.75,( 60, 60, 140, 160)), # medium blue for 1/2 to 3/4 below
(1, ( 75, 75, 255, 200)), # brighter and more opaque blue to threshold
(1.5, ( 75, 255, 75, 200)), # green just above threshold
(2, ( 60, 150, 69, 160)), # then paler green
(400, (255, 0, 0, 139)), # then red wash
)
detoverlay={ # overlay a blob when triggered
'xbase': -4, # set 'xbase' to None to stop the blob, pos for inset from left, neg for inset from right
'xsize': 4, # size must be +ve
'ybase': 4,
'ysize': 4,
'palette': (255,100,100,255)
}
def makepalette(thresh):
"""
prepares a pallette to use with the difference data to make a png image.
The diff values are all in range 0..255 so we'll use a simple palette to highlight things as follows:
values below cellthreshold * .5 are black
values then below cellthreshold *.75 are medium blue
values then below cellthreshold are bright blue
values then below cellthreshold * 1.25 are dark green
values then below cellthreshold * 2 are light green
values beyond that go from dark blue to bright blue as the value increases
The actual colours and transparency are set in a table, so can be easily changed
"""
colourno=0
palette=[]
for tfact, pal in detpalette:
nextmax=tfact*thresh
while colourno < nextmax:
palette.append(pal)
colourno += 1
if colourno > 254:
palette.append(detoverlay['palette'])
return palette
while colourno < 255:
palette.append(pal)
colourno += 1
palette.append(detoverlay['palette'])
return palette
| 20,397 | 5,733 |
from random import random
class Triplet():
def __init__(self, entity_1, relation, entity_2, score = random()):
self.__entity_1 = entity_1
self.__relation = relation
self.__entity_2 = entity_2
self.__score = float(score) # for developing purposes
def __repr__(self):
return '\t'.join((self.__entity_1, self.__relation, self.__entity_2, str(self.__score)))
def __eq__(self, other):
on_entity_1 = self.__entity_1 == other.entity_1
on_relation = self.__relation == other.relation
on_entity_2 = self.__entity_2 == other.entity_2
return on_entity_1 and on_relation and on_entity_2
def __key(self):
return (self.__entity_1, self.__relation, self.__entity_2)
def __hash__(self):
return hash(self.__key())
@property
def entity_1(self):
return self.__entity_1
@property
def entity_2(self):
return self.__entity_2
@property
def relation(self):
return self.__relation
# @relation.setter
# def relation(self, value)
# self.__relation = value
@property
def score(self):
return self.__score
@score.setter
def score(self, value):
self.__score = value
@classmethod
def create_valid_triplet(cls, string):
# will try to parse axioms of the form: "ENTITY_1 SubClassOf RELATION some ENTITY_2"
words = string.rstrip('\n').split(' ')
if len(words) == 5 and words[1] == "SubClassOf" and words[3] == "some":
entity_1 = words[0]
relation = words[2]
entity_2 = words[4]
return cls(entity_1, relation, entity_2)
else:
return None
@classmethod
def read_triplet(cls, string):
entity_1, relation, entity_2, score = tuple(string.rstrip('\n').split('\t'))
return cls(entity_1, relation, entity_2, score)
@classmethod
def groupBy_relation(cls, triplets):
dict = {}
for triplet in triplets:
rel = triplet.relation
if not rel in dict:
dict[rel] = [triplet]
else:
dict[rel].append(triplet)
return dict
@classmethod
def groupBy_entity_relation(cls, triplets):
"""
Given a list of triplets [(s_1,r_1,t_2),...,(s_n,r_n,t_n)], the function groups the triplets by checking equality on s_l and r_l. The result has the following form:
[[s_1,r_1,t_i],...,[s_1,r_k,t_i],...,[s_j,r_1,t_i],...,[s_j,r_k,t_i]]. j is the number of source nodes. k is the number of relations and i is a value from 1 to the number of target nodes.
"""
dict = {}
for triplet in triplets:
entity_1 = triplet.entity_1
rel = triplet.relation
if not (entity_1, rel) in dict:
dict[(entity_1, rel)] = [triplet]
else:
dict[(entity_1, rel)].append(triplet)
return dict
@classmethod
def to_file(cls, triplets, out_file):
with open(out_file, 'w') as f:
for triplet in triplets:
f.write(str(triplet)+'\n')
@classmethod
def to_zero(cls, triplets):
triplets = list(triplets)[:]
for i in range(len(triplets)):
triplets[i].score =0
return set(triplets) | 3,380 | 1,076 |
"""Takes a screenshot of a webpage"""
from pathlib import Path
from tempfile import NamedTemporaryFile
from time import sleep
from loguru import logger
from selenium import webdriver # type: ignore
from selenium.webdriver.chrome.options import Options # type: ignore
def take_screenshot(url, driver_path, dest_file: Path = None, delay: int = 0) -> Path:
"""
Take a screenshot with Selenium
:param url: URL of the page to screenshot
:param driver_path: Where the Chrome driver was saved
:param dest_file: Full path to save the screenshot. If None, then its put in a temporary directory
:param delay: Number of seconds to delay before taking the screenshot; good for long loading pages
:return: The location the file was saved
"""
options = Options()
options.headless = True
driver = webdriver.Chrome(driver_path, options=options)
driver.get(url)
sleep(delay)
if not dest_file:
with NamedTemporaryFile(suffix='.png') as temp_path:
dest_file = Path(temp_path.name)
logger.info(f'Taking screenshot of {url}, saving it to {dest_file} with driver {driver_path}')
driver.save_screenshot(str(dest_file))
driver.quit()
return dest_file
| 1,225 | 349 |
import copy
from hypothesis import given
from tests.utils import (BoundPortedEdgesPair,
are_bound_ported_edges_equal)
from . import strategies
@given(strategies.edges_pairs)
def test_shallow(edges_pair: BoundPortedEdgesPair) -> None:
bound, ported = edges_pair
assert are_bound_ported_edges_equal(copy.copy(bound), copy.copy(ported))
@given(strategies.edges_pairs)
def test_deep(edges_pair: BoundPortedEdgesPair) -> None:
bound, ported = edges_pair
assert are_bound_ported_edges_equal(copy.deepcopy(bound),
copy.deepcopy(ported))
| 619 | 209 |
from pandas.plotting import scatter_matrix
import matplotlib.pyplot as plt
from PyQt5 import QtWidgets
import sys
from .DataVisulization import DataVisulization
class DataCorrelation:
'''this module allows you to be able to view the correlation values of your dataset
allowing you the ability to prevent simple errors
DataCorrelation(df = pandas dataframe)
df: is where you will input the dataset you would like to evaluate
Correlationmatrix(): is the method you call uppon to view which columns have
correlation relationships
LookingAtCorr() is the method is where you will actually make the changes to your dataset
this method returns a pandas dataframe
Check(): this method will call uppon both LookingAtCorr, and Correlationmatrix for you
this method also will return a pandas dataframe
'''
def __init__(self,df):
self.df = df
self.copy = self.ByeByeText(self.df)
self.high_corr = {}
self.corr = []
def ByeByeText(self,df):
copy = df.copy()
for i in copy.columns:
if (copy[i].dtype == "object"):
copy.drop(columns = i, inplace=True)
return (copy)
def FindingScreenSize_(self):
app = QtWidgets.QApplication(sys.argv)
screen = app.primaryScreen()
size = screen.size()
screensize = (size.width()/95-2, size.height()/96-2)
return screensize
def Correlationmatrix(self):
'''Correlationmatrix(): is the method you call uppon to view which columns have
correlation relationships'''
for i in self.copy.columns:
for j in self.copy.columns:
if i == j:
pass
else:
# print(j)
corr = self.copy[i].corr(self.copy[j])
if (corr>=0.5 or corr<=-0.5):
# print(corr)
if (i not in self.high_corr.keys()):
self.high_corr[i] = []
self.high_corr[i].extend([j,corr])
self.corr.append(corr)
print("these are correlation values for each column")
count = 0
for i in self.high_corr.keys():
print(f"{count}:{i},{self.high_corr[i]}")
count += 1
def LookingAtCorr(self):
'''LookingAtCorr() is the method is where you will actually make the changes to your dataset
this method returns a pandas dataframe '''
print("with the values you see up above do you with to see a a scatter matrix of them")
choice = input("enter yes if you do")
if choice.upper() == "YES":
column = []
matrix = []
for i in self.copy.columns:
column.append(i)
print(column)
while (choice.upper()!="Q"):
count = 0
for i in column:
print(f"{count}:{i}")
count+=1
try:
index = int(input("enter the corresponding number of each column that you would like to see"))
except:
print("seems you picked an option that was not available")
matrix.append(column[index])
choice = input("enter q to view the scatter matrix")
column.pop(index)
scatter_matrix(self.copy[matrix],figsize=self.FindingScreenSize_())
plt.show()
choice = input("enter yes if there is a plot you would like to view in more depth")
if (choice.upper()=="YES"):
for i in self.copy.columns:
print(i)
x = input("enter the name of the column you would like to be on the x axis")
y = input("enter the name of the column you would like to be on the y axis")
single_plot =DataVisulization(data = self.copy, type_of_plot="scatter",column_values_for_x=x,column_values_for_y=y)
single_plot.driver()
def combine(self):
copy_of_copy = self.copy.copy()
drop = []
choice = input("enter yes to combine some of the columns")
if (choice.upper() == "YES"):
column = []
for i in self.copy.columns:
column.append(i)
while(choice.upper()!="Q"):
count = 0
for i in column:
print(f"{count}:{i}")
count+=1
while (True):
try:
numerator = int(input("enter the number of the corresponding column you would like to be the numerator to be"))
if (numerator<= len(column)):
break
else:
print("please enter of of the numbers you see on the screen")
except ValueError:
print("please enter a number")
while (True):
try:
denominator = int(input("enter the number of the corresponding column you would like to be the denominator to be"))
if (denominator<= len(column)):
break
else:
print("please enter of of the numbers you see on the screen")
except ValueError:
print("please enter a number")
name_of_new_column = input("enter what you would like the new name of the column to be")
self.copy[name_of_new_column]= self.copy[column[numerator]]/self.copy[column[denominator]]
drop.append(column[numerator])
drop.append(column[denominator])
choice = input("enter q if that is all the columns you would like to combine")
self.copy.drop(columns= drop, inplace=True)
# print(self.copy.columns)
choice = input("enter yes if you would like to view the new correlation matrix scores")
if (choice.upper()=="YES"):
self.high_corr.clear()
self.corr.clear()
self.Correlationmatrix()
print("what do you think of those scores?")
choice = input("enter yes if you would like to keep these new scores"
" or enter nothing to revert them back to the original")
if (choice.upper()!="YES"):
self.copy = copy_of_copy
elif(choice.upper()=="YES"):
self.df = self.copy
return self.df
def Check(self):
'''
Check(): this method will call uppon both LookingAtCorr, and Correlationmatrix for you
this method also will return a pandas dataframe'''
self.Correlationmatrix()
self.LookingAtCorr()
return self.df
| 6,950 | 1,729 |
version_info = (2, 4, 19)
__version__ = '.'.join(map(str, version_info))
| 73 | 33 |
from .util_fft import *
from .util_gettcv import *
| 51 | 20 |
import sys
import pandas as pd
import numpy as np
from sqlalchemy import create_engine
def load_data(messages_filepath, categories_filepath):
'''
Load the message and category .csv data files
Input: the filepath of the two .csv files
Output: return the two data frames
'''
messages = pd.read_csv(messages_filepath)
categories = pd.read_csv(categories_filepath)
return messages,categories
def clean_data(messages,categories):
'''
merge and clean the dataframe for further ML model
Input: the message and category dataframes
Output: the final clean, merged dataframe
'''
df= messages.merge(categories,how='outer',on=['id'])
# create a dataframe of the 36 individual category columns
categories = df['categories'].str.split(";",expand=True)
# select the first row of the categories dataframe
row = categories.iloc[0]
# use this row to extract a list of new column names for categories.
# one way is to apply a lambda function that takes everything
# up to the second to last character of each string with slicing
category_colnames=row.map(lambda x: x[:-2])
# rename the columns of `categories`
categories.columns = category_colnames
# Convert category values to just numbers 0 or 1.
for column in categories:
# set each value to be the last character of the string
categories[column] = categories[column].astype(str).str[-1]
# convert column from string to numeric
categories[column]=categories[column].map(lambda x:int(x))
# drop the original categories column from `df`
df=df.drop(['categories'],axis=1)
# concatenate the original dataframe with the new `categories` dataframe
df = pd.concat([df,categories],axis=1)
# drop duplicates
df = df.drop_duplicates()
# replace the label2 in category related with 1
df['related']=np.where(df['related']==2,1,0)
return df
def save_data(df, database_filename):
'''
save the data into a sql table
Input: the dataframe to be saved and the filepath to save the table
'''
engine = create_engine('sqlite:///'+database_filename, echo=False)
df.to_sql('DisasterData', engine, if_exists='replace', index=False)
def main():
if len(sys.argv) == 4:
messages_filepath, categories_filepath, database_filepath = sys.argv[1:]
print('step1: Loading data...\n MESSAGES: {}\n CATEGORIES: {}'
.format(messages_filepath, categories_filepath))
messages,categories = load_data(messages_filepath, categories_filepath)
print('step2: Cleaning data...')
df = clean_data(messages,categories)
print('step3: Saving data...\n DATABASE: {}'.format(database_filepath))
save_data(df, database_filepath)
print('step4: Cleaned data saved to database!')
else:
print('Warning: Please provide the filepaths of the messages and categories '\
'datasets as the first and second argument respectively, as '\
'well as the filepath of the database to save the cleaned data '\
'to as the third argument. \n\nExample: python process_data.py '\
'disaster_messages.csv disaster_categories.csv '\
'DisasterResponse.db')
if __name__ == '__main__':
main() | 3,460 | 1,008 |
import logging
import os
from typing import Optional
from handler.model.base.base_db import ListOptions
from handler.model.model_activity import ModelActivity
from handler.model.model_user import ModelUser
from handler.util.time_util import get_now
def _get_earlist_activity(user: ModelUser) -> Optional[ModelActivity]:
activities = ModelActivity.list(ListOptions(parent=user.name, order_by='create_time'))[0]
return next(activities, None)
def backfill_user_create_update_time():
'''
Traverse table `users` and set fields `create_time`, `update_time` to the
`create_time` of earlist activity created by given user or current time if
no activity is found.
'''
users = ModelUser.list(ListOptions(parent=''))[0]
for i, user in enumerate(users):
activity = _get_earlist_activity(user)
user.create_time = activity.create_time if activity else get_now()
user.update_time = user.create_time
user.update(update_update_time=False)
print(f'Progress idx-{i}: set {user.username}\'s create_time to {user.create_time}')
def main():
backfill_user_create_update_time()
if __name__ == "__main__":
main()
| 1,181 | 354 |
#!/usr/bin/python
import sys
import os
import string
import argparse
import math
#Description
#-------------------------------
# This is an script that generates TPC-H skewed data.
#
# To see the parameters use "-h" or "--help".
#
# Run script Example :
# python ./dbgen-skew.py -d ~/workspace/subquery/test/tables/ -t partsupp.tbl -c 2 -s 0.5
#-------------------------------
#Write data
def writeData(data, file):
#Open file
fstream = open(file, 'w+')
#Write data
for line in range (0, len(data)):
for col in range (0, len(data[0])-1):
fstream.write(data[line][col])
fstream.write("|")
fstream.write("\n")
# Close file
fstream.close()
#Introduce Skew
def modifyData(data, col, skew):
#Column index
colIdx = int(col)-1
#Calculate tuples need to be changes
total = len(data)
if (float(skew) < 1):
skewed_tuples = int(math.floor(total * float(skew)))
else:
skewed_tuples = total
#Get first value of column that need to have skew
skew_value = data[0][colIdx]
#Add skew
for i in range (0,skewed_tuples):
data[i][colIdx] = skew_value
#Return data
return data
#Read data from the TPC-H directory
def loadData(path, table):
#CreateTable
tableData = []
#Open file
fileName = path + "/" + table
f = open(fileName, 'r')
#Read file
count = 0
data = f.readlines()
for line in range (0, len(data)):
tableData.append(data[line].split('|'))
count = count + 1
#Print info
print "Read "+str(count)+ " lines..."
#Return array
return tableData
# This function handles parameters
def handleArgs(currentPath):
#Add parameter parsing
parser = argparse.ArgumentParser(description='This is an script that generates TPC-H skewed data.')
#Add data directory parameter
parser.add_argument('-d', action="store", type=str, help='Path to the existing TPC-H data directory.')
#Add data directory parameter
parser.add_argument('-t', action="store", type=str, help='Name of the TPC-H table to generate.')
#Add data directory parameter
parser.add_argument('-c', action="store", type=str, help='Column(s) to introduce skew.', nargs='+')
#Add data files sizes
parser.add_argument('-s', action="store", type=str, help='Skew in each column.')
#Parse arguments and return them
return parser.parse_args()
# Main function
def main(argv):
# Current Path
currentPath = os.path.dirname(os.path.realpath(__file__))
#Parse argument given by the user
parsed_args = handleArgs(currentPath)
#Configure output file
path = os.path.dirname(os.path.realpath(__file__))
outF = path + "/s"+parsed_args.s + "_" + parsed_args.t
#Print info
print "---- TPC-H Data Skew ----"
print "Current TPC-H data direcotry : " + str(parsed_args.d)
print "Name of the input table : " + str(parsed_args.t)
print "Columns processed : " + str(parsed_args.c)
print "Skew introduces : " + str(parsed_args.s)
print "Output file : " + str(outF)
#Load data
print "Loading data..."
data = loadData(os.path.abspath(parsed_args.d), parsed_args.t)
#Introduce Skew
print "Introducing skew..."
cols = parsed_args.c
for i in range (0, len(cols)):
skewed_data = modifyData(data, cols[i], parsed_args.s )
#Write data
print "Writing data..."
writeData(skewed_data, outF)
print "-------------------------"
if __name__ == "__main__":
main(sys.argv)
| 3,612 | 1,195 |
#!/usr/bin/env python
# Copyright 2019-2020 Toyota Research Institute. All rights reserved.
"""DGP command line interface
"""
import glob
import itertools
import os
import sys
from multiprocessing import Pool, cpu_count
import click
from dgp.proto.dataset_pb2 import SceneDataset
from dgp.utils.aws import (convert_uri_to_bucket_path,
parallel_upload_s3_objects)
from dgp.utils.dataset_conversion import MergeSceneDatasetGen
from dgp.utils.protobuf import open_pbobject
@click.group()
@click.version_option()
def cli():
pass
@cli.command(name="upload-scenes")
@click.option(
"--scene-dataset-json",
required=True,
help="Path to a local scene dataset .json file i.e. /mnt/scene_dataset_v1.2.json"
)
@click.option(
"--s3-dst-dir", required=True, help="Prefix for uploaded scenes"
)
def upload_scenes(scene_dataset_json, s3_dst_dir):
"""Parallelized upload for scenes from a scene dataset JSON.
NOTE: This tool only verifies the presence of a scene, not the validity any of its contents.
"""
bucket_name, s3_base_path = convert_uri_to_bucket_path(s3_dst_dir)
dataset = open_pbobject(scene_dataset_json, SceneDataset)
local_dataset_root = os.path.dirname(os.path.abspath(scene_dataset_json))
if not dataset:
print('Failed to parse dataset artifacts {}'.format(scene_dataset_json))
sys.exit(0)
scene_dirs = []
for split in dataset.scene_splits.keys():
scene_dirs.extend([
os.path.join(local_dataset_root, os.path.dirname(filename))
for filename in dataset.scene_splits[split].filenames
])
# Make sure the scenes exist
with Pool(cpu_count()) as proc:
file_list = list(itertools.chain.from_iterable(proc.map(_get_scene_files, scene_dirs)))
# Upload the scene JSON, too.
file_list += [scene_dataset_json]
print("Creating file manifest for S3 for {} files".format(len(file_list)))
s3_file_list = [os.path.join(s3_base_path, os.path.relpath(_f, local_dataset_root)) for _f in file_list]
print("Done. Uploading to S3.")
parallel_upload_s3_objects(file_list, s3_file_list, bucket_name)
def _get_scene_files(scene):
assert os.path.exists(scene), "Scene {} doesn't exist".format(scene)
scene_files = glob.glob(os.path.join(scene, "**"), recursive=True)
return [_f for _f in scene_files if os.path.isfile(_f)]
if __name__ == '__main__':
cli()
| 2,438 | 818 |
#!/usr/bin/env python
###############################################################################
## Databrowse: An Extensible Data Management Platform ##
## Copyright (C) 2012-2016 Iowa State University Research Foundation, Inc. ##
## All rights reserved. ##
## ##
## Redistribution and use in source and binary forms, with or without ##
## modification, are permitted provided that the following conditions are ##
## met: ##
## 1. Redistributions of source code must retain the above copyright ##
## notice, this list of conditions and the following disclaimer. ##
## 2. Redistributions in binary form must reproduce the above copyright ##
## notice, this list of conditions and the following disclaimer in the ##
## documentation and/or other materials provided with the distribution. ##
## 3. Neither the name of the copyright holder nor the names of its ##
## contributors may be used to endorse or promote products derived from ##
## this software without specific prior written permission. ##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ##
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED ##
## TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A ##
## PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER ##
## OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, ##
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, ##
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR ##
## PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF ##
## LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING ##
## NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS ##
## SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ##
## ##
## This material is based on work supported by the Air Force Research ##
## Laboratory under Contract #FA8650-10-D-5210, Task Order #023 and ##
## performed at Iowa State University. ##
## ##
## DISTRIBUTION A. Approved for public release: distribution unlimited; ##
## 19 Aug 2016; 88ABW-2016-4051. ##
###############################################################################
""" databrowse.wsgi - Entry Point for Main Application """
import sys
import os
import string
from lxml import etree
from time import time
# Enable cgitb to provide better error message output
import cgitb
cgitb.enable()
serverwrapper = '''<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE doc [
<!ENTITY agr "α"> <!-- -->
<!ENTITY Agr "Α"> <!-- GREEK CAPITAL LETTER ALPHA -->
<!ENTITY bgr "β"> <!-- GREEK SMALL LETTER BETA -->
<!ENTITY Bgr "Β"> <!-- GREEK CAPITAL LETTER BETA -->
<!ENTITY ggr "γ"> <!-- GREEK SMALL LETTER GAMMA -->
<!ENTITY Ggr "Γ"> <!-- GREEK CAPITAL LETTER GAMMA -->
<!ENTITY dgr "δ"> <!-- GREEK SMALL LETTER DELTA -->
<!ENTITY Dgr "Δ"> <!-- GREEK CAPITAL LETTER DELTA -->
<!ENTITY egr "ε"> <!-- -->
<!ENTITY Egr "Ε"> <!-- GREEK CAPITAL LETTER EPSILON -->
<!ENTITY zgr "ζ"> <!-- GREEK SMALL LETTER ZETA -->
<!ENTITY Zgr "Ζ"> <!-- GREEK CAPITAL LETTER ZETA -->
<!ENTITY eegr "η"> <!-- GREEK SMALL LETTER ETA -->
<!ENTITY EEgr "Η"> <!-- GREEK CAPITAL LETTER ETA -->
<!ENTITY thgr "θ"> <!-- -->
<!ENTITY THgr "Θ"> <!-- GREEK CAPITAL LETTER THETA -->
<!ENTITY igr "ι"> <!-- GREEK SMALL LETTER IOTA -->
<!ENTITY Igr "Ι"> <!-- GREEK CAPITAL LETTER IOTA -->
<!ENTITY kgr "κ"> <!-- GREEK SMALL LETTER KAPPA -->
<!ENTITY Kgr "Κ"> <!-- GREEK CAPITAL LETTER KAPPA -->
<!ENTITY lgr "λ"> <!-- GREEK SMALL LETTER LAMDA -->
<!ENTITY Lgr "Λ"> <!-- GREEK CAPITAL LETTER LAMDA -->
<!ENTITY mgr "μ"> <!-- GREEK SMALL LETTER MU -->
<!ENTITY Mgr "Μ"> <!-- GREEK CAPITAL LETTER MU -->
<!ENTITY ngr "ν"> <!-- GREEK SMALL LETTER NU -->
<!ENTITY Ngr "Ν"> <!-- GREEK CAPITAL LETTER NU -->
<!ENTITY xgr "ξ"> <!-- GREEK SMALL LETTER XI -->
<!ENTITY Xgr "Ξ"> <!-- GREEK CAPITAL LETTER XI -->
<!ENTITY ogr "ο"> <!-- GREEK SMALL LETTER OMICRON -->
<!ENTITY Ogr "Ο"> <!-- GREEK CAPITAL LETTER OMICRON -->
<!ENTITY pgr "π"> <!-- GREEK SMALL LETTER PI -->
<!ENTITY Pgr "Π"> <!-- GREEK CAPITAL LETTER PI -->
<!ENTITY rgr "ρ"> <!-- GREEK SMALL LETTER RHO -->
<!ENTITY Rgr "Ρ"> <!-- GREEK CAPITAL LETTER RHO -->
<!ENTITY sgr "σ"> <!-- GREEK SMALL LETTER SIGMA -->
<!ENTITY Sgr "Σ"> <!-- GREEK CAPITAL LETTER SIGMA -->
<!ENTITY sfgr "ς"> <!-- -->
<!ENTITY tgr "τ"> <!-- GREEK SMALL LETTER TAU -->
<!ENTITY Tgr "Τ"> <!-- GREEK CAPITAL LETTER TAU -->
<!ENTITY ugr "υ"> <!-- GREEK SMALL LETTER UPSILON -->
<!ENTITY Ugr "Υ"> <!-- -->
<!ENTITY phgr "φ"> <!-- GREEK SMALL LETTER PHI -->
<!ENTITY PHgr "Φ"> <!-- GREEK CAPITAL LETTER PHI -->
<!ENTITY khgr "χ"> <!-- GREEK SMALL LETTER CHI -->
<!ENTITY KHgr "Χ"> <!-- GREEK CAPITAL LETTER CHI -->
<!ENTITY psgr "ψ"> <!-- GREEK SMALL LETTER PSI -->
<!ENTITY PSgr "Ψ"> <!-- GREEK CAPITAL LETTER PSI -->
<!ENTITY ohgr "ω"> <!-- GREEK SMALL LETTER OMEGA -->
<!ENTITY OHgr "Ω"> <!-- GREEK CAPITAL LETTER OMEGA -->
]>
<xsl:stylesheet xmlns="http://www.w3.org/1999/xhtml" xmlns:html="http://www.w3.org/1999/xhtml" xmlns:db="http://thermal.cnde.iastate.edu/databrowse" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:xi="http://www.w3.org/2001/XInclude" version="1.0">
<xsl:output method="xml" omit-xml-declaration="no" indent="no" version="1.0" media-type="application/xhtml+xml" encoding="UTF-8" doctype-public="-//W3C//DTD XHTML 1.1//EN" doctype-system="http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"/>
<xsl:variable name="resdir">%s</xsl:variable>
<xsl:variable name="dataroot">%s</xsl:variable>
<xsl:variable name="proctime">%s</xsl:variable>
<xsl:template match="/">
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:db="http://thermal.cnde.iastate.edu/databrowse">
<body>
<xsl:attribute name="db:resdir"><xsl:value-of select="$resdir"/></xsl:attribute>
<xsl:attribute name="db:proctime"><xsl:value-of select="$proctime"/></xsl:attribute>
%s
<xsl:apply-templates mode="%s"/>
</body>
</html>
</xsl:template>
%s
</xsl:stylesheet>'''
localwrapper = '''<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE doc [
<!ENTITY agr "α"> <!-- -->
<!ENTITY Agr "Α"> <!-- GREEK CAPITAL LETTER ALPHA -->
<!ENTITY bgr "β"> <!-- GREEK SMALL LETTER BETA -->
<!ENTITY Bgr "Β"> <!-- GREEK CAPITAL LETTER BETA -->
<!ENTITY ggr "γ"> <!-- GREEK SMALL LETTER GAMMA -->
<!ENTITY Ggr "Γ"> <!-- GREEK CAPITAL LETTER GAMMA -->
<!ENTITY dgr "δ"> <!-- GREEK SMALL LETTER DELTA -->
<!ENTITY Dgr "Δ"> <!-- GREEK CAPITAL LETTER DELTA -->
<!ENTITY egr "ε"> <!-- -->
<!ENTITY Egr "Ε"> <!-- GREEK CAPITAL LETTER EPSILON -->
<!ENTITY zgr "ζ"> <!-- GREEK SMALL LETTER ZETA -->
<!ENTITY Zgr "Ζ"> <!-- GREEK CAPITAL LETTER ZETA -->
<!ENTITY eegr "η"> <!-- GREEK SMALL LETTER ETA -->
<!ENTITY EEgr "Η"> <!-- GREEK CAPITAL LETTER ETA -->
<!ENTITY thgr "θ"> <!-- -->
<!ENTITY THgr "Θ"> <!-- GREEK CAPITAL LETTER THETA -->
<!ENTITY igr "ι"> <!-- GREEK SMALL LETTER IOTA -->
<!ENTITY Igr "Ι"> <!-- GREEK CAPITAL LETTER IOTA -->
<!ENTITY kgr "κ"> <!-- GREEK SMALL LETTER KAPPA -->
<!ENTITY Kgr "Κ"> <!-- GREEK CAPITAL LETTER KAPPA -->
<!ENTITY lgr "λ"> <!-- GREEK SMALL LETTER LAMDA -->
<!ENTITY Lgr "Λ"> <!-- GREEK CAPITAL LETTER LAMDA -->
<!ENTITY mgr "μ"> <!-- GREEK SMALL LETTER MU -->
<!ENTITY Mgr "Μ"> <!-- GREEK CAPITAL LETTER MU -->
<!ENTITY ngr "ν"> <!-- GREEK SMALL LETTER NU -->
<!ENTITY Ngr "Ν"> <!-- GREEK CAPITAL LETTER NU -->
<!ENTITY xgr "ξ"> <!-- GREEK SMALL LETTER XI -->
<!ENTITY Xgr "Ξ"> <!-- GREEK CAPITAL LETTER XI -->
<!ENTITY ogr "ο"> <!-- GREEK SMALL LETTER OMICRON -->
<!ENTITY Ogr "Ο"> <!-- GREEK CAPITAL LETTER OMICRON -->
<!ENTITY pgr "π"> <!-- GREEK SMALL LETTER PI -->
<!ENTITY Pgr "Π"> <!-- GREEK CAPITAL LETTER PI -->
<!ENTITY rgr "ρ"> <!-- GREEK SMALL LETTER RHO -->
<!ENTITY Rgr "Ρ"> <!-- GREEK CAPITAL LETTER RHO -->
<!ENTITY sgr "σ"> <!-- GREEK SMALL LETTER SIGMA -->
<!ENTITY Sgr "Σ"> <!-- GREEK CAPITAL LETTER SIGMA -->
<!ENTITY sfgr "ς"> <!-- -->
<!ENTITY tgr "τ"> <!-- GREEK SMALL LETTER TAU -->
<!ENTITY Tgr "Τ"> <!-- GREEK CAPITAL LETTER TAU -->
<!ENTITY ugr "υ"> <!-- GREEK SMALL LETTER UPSILON -->
<!ENTITY Ugr "Υ"> <!-- -->
<!ENTITY phgr "φ"> <!-- GREEK SMALL LETTER PHI -->
<!ENTITY PHgr "Φ"> <!-- GREEK CAPITAL LETTER PHI -->
<!ENTITY khgr "χ"> <!-- GREEK SMALL LETTER CHI -->
<!ENTITY KHgr "Χ"> <!-- GREEK CAPITAL LETTER CHI -->
<!ENTITY psgr "ψ"> <!-- GREEK SMALL LETTER PSI -->
<!ENTITY PSgr "Ψ"> <!-- GREEK CAPITAL LETTER PSI -->
<!ENTITY ohgr "ω"> <!-- GREEK SMALL LETTER OMEGA -->
<!ENTITY OHgr "Ω"> <!-- GREEK CAPITAL LETTER OMEGA -->
]>
<xsl:stylesheet xmlns="http://www.w3.org/1999/xhtml" xmlns:html="http://www.w3.org/1999/xhtml" xmlns:db="http://thermal.cnde.iastate.edu/databrowse" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:xi="http://www.w3.org/2001/XInclude" version="1.0">
<xsl:output method="xml" omit-xml-declaration="no" indent="no" version="1.0" media-type="application/xhtml+xml" encoding="UTF-8" doctype-public="-//W3C//DTD XHTML 1.1//EN" doctype-system="http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"/>
<xsl:variable name="resdir">%s</xsl:variable>
<xsl:variable name="dataroot">%s</xsl:variable>
<xsl:variable name="proctime">%s</xsl:variable>
<xsl:template match="/">
<xsl:processing-instruction name="xml-stylesheet">type="text/xsl" href="/dbres/db_web.xml"</xsl:processing-instruction>
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:db="http://thermal.cnde.iastate.edu/databrowse">
<body>
<xsl:attribute name="db:resdir"><xsl:value-of select="$resdir"/></xsl:attribute>
<xsl:attribute name="db:proctime"><xsl:value-of select="$proctime"/></xsl:attribute>
%s
<xsl:apply-templates mode="%s"/>
</body>
</html>
</xsl:template>
%s
</xsl:stylesheet>'''
ajaxwrapper = '''<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE doc [
<!ENTITY agr "α"> <!-- -->
<!ENTITY Agr "Α"> <!-- GREEK CAPITAL LETTER ALPHA -->
<!ENTITY bgr "β"> <!-- GREEK SMALL LETTER BETA -->
<!ENTITY Bgr "Β"> <!-- GREEK CAPITAL LETTER BETA -->
<!ENTITY ggr "γ"> <!-- GREEK SMALL LETTER GAMMA -->
<!ENTITY Ggr "Γ"> <!-- GREEK CAPITAL LETTER GAMMA -->
<!ENTITY dgr "δ"> <!-- GREEK SMALL LETTER DELTA -->
<!ENTITY Dgr "Δ"> <!-- GREEK CAPITAL LETTER DELTA -->
<!ENTITY egr "ε"> <!-- -->
<!ENTITY Egr "Ε"> <!-- GREEK CAPITAL LETTER EPSILON -->
<!ENTITY zgr "ζ"> <!-- GREEK SMALL LETTER ZETA -->
<!ENTITY Zgr "Ζ"> <!-- GREEK CAPITAL LETTER ZETA -->
<!ENTITY eegr "η"> <!-- GREEK SMALL LETTER ETA -->
<!ENTITY EEgr "Η"> <!-- GREEK CAPITAL LETTER ETA -->
<!ENTITY thgr "θ"> <!-- -->
<!ENTITY THgr "Θ"> <!-- GREEK CAPITAL LETTER THETA -->
<!ENTITY igr "ι"> <!-- GREEK SMALL LETTER IOTA -->
<!ENTITY Igr "Ι"> <!-- GREEK CAPITAL LETTER IOTA -->
<!ENTITY kgr "κ"> <!-- GREEK SMALL LETTER KAPPA -->
<!ENTITY Kgr "Κ"> <!-- GREEK CAPITAL LETTER KAPPA -->
<!ENTITY lgr "λ"> <!-- GREEK SMALL LETTER LAMDA -->
<!ENTITY Lgr "Λ"> <!-- GREEK CAPITAL LETTER LAMDA -->
<!ENTITY mgr "μ"> <!-- GREEK SMALL LETTER MU -->
<!ENTITY Mgr "Μ"> <!-- GREEK CAPITAL LETTER MU -->
<!ENTITY ngr "ν"> <!-- GREEK SMALL LETTER NU -->
<!ENTITY Ngr "Ν"> <!-- GREEK CAPITAL LETTER NU -->
<!ENTITY xgr "ξ"> <!-- GREEK SMALL LETTER XI -->
<!ENTITY Xgr "Ξ"> <!-- GREEK CAPITAL LETTER XI -->
<!ENTITY ogr "ο"> <!-- GREEK SMALL LETTER OMICRON -->
<!ENTITY Ogr "Ο"> <!-- GREEK CAPITAL LETTER OMICRON -->
<!ENTITY pgr "π"> <!-- GREEK SMALL LETTER PI -->
<!ENTITY Pgr "Π"> <!-- GREEK CAPITAL LETTER PI -->
<!ENTITY rgr "ρ"> <!-- GREEK SMALL LETTER RHO -->
<!ENTITY Rgr "Ρ"> <!-- GREEK CAPITAL LETTER RHO -->
<!ENTITY sgr "σ"> <!-- GREEK SMALL LETTER SIGMA -->
<!ENTITY Sgr "Σ"> <!-- GREEK CAPITAL LETTER SIGMA -->
<!ENTITY sfgr "ς"> <!-- -->
<!ENTITY tgr "τ"> <!-- GREEK SMALL LETTER TAU -->
<!ENTITY Tgr "Τ"> <!-- GREEK CAPITAL LETTER TAU -->
<!ENTITY ugr "υ"> <!-- GREEK SMALL LETTER UPSILON -->
<!ENTITY Ugr "Υ"> <!-- -->
<!ENTITY phgr "φ"> <!-- GREEK SMALL LETTER PHI -->
<!ENTITY PHgr "Φ"> <!-- GREEK CAPITAL LETTER PHI -->
<!ENTITY khgr "χ"> <!-- GREEK SMALL LETTER CHI -->
<!ENTITY KHgr "Χ"> <!-- GREEK CAPITAL LETTER CHI -->
<!ENTITY psgr "ψ"> <!-- GREEK SMALL LETTER PSI -->
<!ENTITY PSgr "Ψ"> <!-- GREEK CAPITAL LETTER PSI -->
<!ENTITY ohgr "ω"> <!-- GREEK SMALL LETTER OMEGA -->
<!ENTITY OHgr "Ω"> <!-- GREEK CAPITAL LETTER OMEGA -->
]>
<xsl:stylesheet xmlns="http://www.w3.org/1999/xhtml" xmlns:db="http://thermal.cnde.iastate.edu/databrowse" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:html="http://www.w3.org/1999/xhtml" xmlns:xi="http://www.w3.org/2001/XInclude" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.0">
<xsl:output method="xml" omit-xml-declaration="no" indent="yes" version="1.0" media-type="application/xhtml+xml" encoding="UTF-8" doctype-public="-//W3C//DTD XHTML 1.1//EN" doctype-system="http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"/>
<xsl:template match="/">
<xsl:apply-templates mode="%s"/>
</xsl:template>
%s
</xsl:stylesheet>'''
class FileResolver(etree.Resolver):
_path = None
def __init__(self, path):
self._path = path
pass
def resolve(self, url, pubid, context):
if url.startswith('http://'):
return self.resolve_filename(url, context)
else:
return self.resolve_filename(os.path.abspath(self._path + '/' + url), context)
pass
def application(environ, start_response):
""" Entry Point for WSGI Application """
os.environ["HOME"] = "/home/www/.home"
try:
# Add paths and import support modules
#if os.path.dirname(environ['SCRIPT_FILENAME']) not in sys.path: Removed 8/5/13 - Transition to Installed Modules
# sys.path.append(os.path.dirname(environ['SCRIPT_FILENAME']))
#if os.path.dirname(environ['SCRIPT_FILENAME'] + '/support/') not in sys.path:
# sys.path.append(os.path.dirname(environ['SCRIPT_FILENAME']) + '/support/')
#import web_support as db_web_support_module
starttime = time()
import databrowse.support.web_support as db_web_support_module
# Set up web_support class with environment information
db_web_support = db_web_support_module.web_support(environ, start_response)
# Determine Requested File/Folder Absolute Path and Path Relative to Dataroot
if "path" not in db_web_support.req.form:
fullpath = db_web_support.dataroot
relpath = '/'
pass
else:
fullpath = os.path.abspath(db_web_support.dataroot + '/' + db_web_support.req.form["path"].value)
if not fullpath.startswith(db_web_support.dataroot):
return db_web_support.req.return_error(403)
if os.access(fullpath, os.R_OK) and os.path.exists(fullpath):
if fullpath == db_web_support.dataroot:
relpath = '/'
pass
else:
relpath = fullpath.replace(db_web_support.dataroot, '')
pass
pass
elif not os.path.exists(fullpath):
return db_web_support.req.return_error(404)
else:
return db_web_support.req.return_error(401)
pass
relpath = '/'.join(relpath.split('\\'))
# Import Plugin Directory
#if db_web_support.pluginpath not in sys.path: # Removed 8/5/13 - Transition to Installed Modules
# sys.path.append(db_web_support.pluginpath)
# Determine handler for requested path
#import handler_support as handler_support_module
import databrowse.support.handler_support as handler_support_module
handler_support = handler_support_module.handler_support(db_web_support.icondbpath, db_web_support.hiddenfiledbpath, db_web_support.directorypluginpath)
handlers = handler_support.GetHandler(fullpath)
handler = handlers[-1]
# Let's see if we want to override the default handler
if "handler" in db_web_support.req.form:
handler = db_web_support.req.form['handler'].value
pass
# Get A Handle to The Rendering Plugin
caller = "databrowse"
exec "import databrowse.plugins.%s.%s as %s_module" % (handler, handler, handler)
exec "renderer = %s_module.%s(relpath, fullpath, db_web_support, handler_support, caller, handlers%s%s%s)" % (handler, handler,\
', content_mode="' + db_web_support.req.form["content_mode"].value + '"' if "content_mode" in db_web_support.req.form else '',\
', style_mode="' + db_web_support.req.form['style_mode'].value + '"' if "style_mode" in db_web_support.req.form else '',\
', recursion_depth=' + db_web_support.req.form['recursion_depth'].value + '' if "recursion_depth" in db_web_support.req.form else '')
# Register Primary Namespace
#etree.register_namespace('db', 'http://thermal.cnde.iastate.edu/databrowse')
if not renderer.isRaw() and not renderer.isGit():
# Prepare Top Menu String
topbarstring = '<div class="pathbar"><div style="float:left">'
linkstring = db_web_support.siteurl
itemslist = string.split(relpath, "/")[1:]
count = 1
if itemslist[0] != "":
topbarstring += '<a style="padding:0 5px;position:relative;top:3px;" href="%s"><img src="%s/icons/go-home.png"/></a><a class="button" href="%s">/</a>>' % (linkstring, db_web_support.resurl, linkstring)
pass
for item in itemslist:
if item != "" and count != len(itemslist):
linkstring += "/" + item
topbarstring += '<a class="button" href="%s">%s</a>>' % (linkstring, item)
pass
elif item != "" and count == len(itemslist):
linkstring += "/" + item
topbarstring += '<a class="button active" href="%s">%s</a>' % (linkstring, item)
pass
else:
topbarstring += '<a style="padding:0 5px;position:relative;top:3px;" href="%s"><img src="%s/icons/go-home.png"/></a><a class="button active" href="%s">/</a>' % (linkstring, db_web_support.resurl, linkstring)
pass
count += 1
pass
topbarstring += "</div><div id='toggleexpand'><a onclick='togglefullwidth()' style='position:relative; right: 3px; top: 2px; float:right; cursor:pointer'><img src='%s/icons/gtk-fullscreen.png'/></a></div></div>" % db_web_support.resurl
# If we are only requesting content or style, output them
if "contentonly" in db_web_support.req.form:
xml = etree.ElementTree(renderer.getContent())
db_web_support.req.response_headers['Content-Type'] = 'text/xml'
db_web_support.req.output = etree.tostring(xml)
return [db_web_support.req.return_page()]
elif "styleonly" in db_web_support.req.form:
endtime = time()
runtime = "%.6f" % (endtime-starttime)
style = serverwrapper % (db_web_support.resurl, db_web_support.dataroot.replace("\\", "/"), runtime, topbarstring, renderer.getContentMode(), db_web_support.style.GetStyle())
parser = etree.XMLParser()
parser.resolvers.add(FileResolver(os.path.dirname(fullpath)))
styletree = etree.ElementTree(etree.XML(style, parser))
styletree.xinclude()
db_web_support.req.response_headers['Content-Type'] = 'text/xml'
db_web_support.req.output = etree.tostring(styletree)
return [db_web_support.req.return_page()]
else:
pass
# If we want styling to be done by the browser or we don't want page styling
parser = etree.XMLParser()
parser.resolvers.add(FileResolver(os.path.dirname(fullpath)))
if "ajax" in db_web_support.req.form:
return renderer.getContent()
elif renderer.getContentMode() == "ajax":
xml = etree.ElementTree(renderer.getContent())
style = ajaxwrapper % (renderer.getContentMode(), db_web_support.style.GetStyle())
styletree = etree.ElementTree(etree.XML(style, parser))
styletree.xinclude()
content = xml.xslt(styletree.getroot())
db_web_support.req.output = etree.tostring(content)
db_web_support.req.response_headers['Content-Type'] = 'application/xhtml+xml'
return [db_web_support.req.return_page()]
elif "nopagestyle" in db_web_support.req.form:
xml = etree.ElementTree(renderer.getContent())
endtime = time()
runtime = "%.6f" % (endtime-starttime)
style = serverwrapper % (db_web_support.resurl, db_web_support.dataroot.replace("\\", "/"), runtime, topbarstring, renderer.getContentMode(), db_web_support.style.GetStyle())
styletree = etree.ElementTree(etree.XML(style, parser))
styletree.xinclude()
content = xml.xslt(styletree.getroot())
db_web_support.req.output = etree.tostring(content)
db_web_support.req.response_headers['Content-Type'] = 'application/xhtml+xml'
return [db_web_support.req.return_page()]
elif "localpagestyle" in db_web_support.req.form:
xml = etree.ElementTree(renderer.getContent())
endtime = time()
runtime = "%.6f" % (endtime-starttime)
style = localwrapper % (db_web_support.resurl, db_web_support.dataroot.replace("\\", "/"), runtime, topbarstring, renderer.getContentMode(), db_web_support.style.GetStyle())
styletree = etree.ElementTree(etree.XML(style, parser))
styletree.xinclude()
content = xml.xslt(styletree.getroot())
contentroot = content.getroot()
renderer.loadMenu()
contentroot.append(db_web_support.menu.GetMenu())
db_web_support.req.output = etree.tostring(content)
db_web_support.req.response_headers['Content-Type'] = 'application/xhtml+xml'
return [db_web_support.req.return_page()]
else:
xml = etree.ElementTree(renderer.getContent())
endtime = time()
runtime = "%.6f" % (endtime-starttime)
style = serverwrapper % (db_web_support.resurl, db_web_support.dataroot.replace("\\", "/"), runtime, topbarstring, renderer.getContentMode(), db_web_support.style.GetStyle())
styletree = etree.ElementTree(etree.XML(style, parser))
styletree.xinclude()
content = xml.xslt(styletree.getroot())
contentroot = content.getroot()
renderer.loadMenu()
contentroot.append(db_web_support.menu.GetMenu())
f = open(os.path.join(db_web_support.webdir, "resources/db_web.xml"))
template = etree.parse(f)
f.close()
db_web_support.req.output = str(content.xslt(template))
db_web_support.req.response_headers['Content-Type'] = 'application/xhtml+xml'
#raise Exception("Testing")
return [db_web_support.req.return_page()]
else:
# We're outputting raw content, so pass it off to the plugin to do its thing
return renderer.getContent()
pass
except Exception as err:
# Something has gone terribly wrong, let's display some useful information to the user
# Error Page Template
errormessage = '''\
<?xml-stylesheet type="text/xsl" href="/dbres/db_web.xml"?>
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:html="http://www.w3.org/1999/xhtml" xmlns:db="http://thermal.cnde.iastate.edu/databrowse">
<body db:resdir="/dbres/">
<h1>500 Internal Server Error</h1>
<p>An unhandled exception has occurred. Notify the administrators for assistance. Please make note of what you were doing, the steps to reproduce the error, and the approximate time. More details are shown below:</p>
<p>
<strong>Error:</strong> %s <br/>
<strong>Time:</strong> %s <br/>
<strong>Hostname:</strong> %s <br/>
<strong>Platform:</strong> %s <strong>Python:</strong> %s <br/>
<strong>PID:</strong> %s <br/>
<strong>Traceback:</strong> <br/>
<pre style="overflow:auto">%s</pre>
<strong>Environment:</strong> <br/>
<pre style="overflow:auto">%s</pre>
<strong>Request Variables:</strong> <br/>
<pre style="overflow:auto">%s</pre>
<strong>Dir()</strong> <br/>
<pre style="overflow:auto">%s</pre>
</p>
</body>
<db:navigation xmlns="http://www.w3.org/1999/xhtml" xmlns:db="http://thermal.cnde.iastate.edu/databrowse">
<db:navbar>
<db:navelem><a href="javascript:window.history.back()">Go Back</a></db:navelem>
</db:navbar>
</db:navigation>
</html>'''
# Import Modules Needed For All Of This - No need to import these things otherwise
import traceback
import StringIO
import cgi
import socket
from time import gmtime, strftime
# Get Our Own FieldStorage Object
fs = environ['wsgi.input'] if isinstance(environ['wsgi.input'], cgi.FieldStorage) else None
if fs is None:
form = cgi.FieldStorage(fp=environ["wsgi.input"], environ=environ, keep_blank_values=1)
pass
else:
form = fs
pass
# Get a Trace and Also Output a Copy of the Trace to the Server Log
trace = StringIO.StringIO()
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback, file=trace)
traceback.print_exception(exc_type, exc_value, exc_traceback)
tracestring = trace.getvalue()
trace.close()
tracestring = tracestring.replace('&', " ").replace('<', "<").replace('>', ">")
# Return Proper Error so AJAX Works
if "ajax" in form:
start_response('500 Internal Server Error', {'Content-Type': 'text/html', 'Content-Length': '25'}.items())
return ['500 Internal Server Error']
else:
# Now we can get a list of request variables
inputstring = ""
for key in form.keys():
try:
inputstring = inputstring + "%s: %s \n" % (key, repr(form[key].value))
except AttributeError:
pass
pass
inputstring = inputstring.replace('<', "<").replace('>', ">").replace('&', " ")
# Get A List of Everything in Environ
keystring = ""
keys = environ.keys()
keys.sort()
for key in keys:
keystring = keystring + "%s: %s \n" % (key, repr(environ[key]))
pass
keystring = keystring.replace('&', " ").replace('<', "<").replace('>', ">")
# Get a list of everything in dir()
dirstring = ""
for name in dir():
dirstring = dirstring + "%s %s: %s \n" % (name, str(type(name)), repr(eval(name)))
dirstring = dirstring.replace('&', " ").replace('<', "<").replace('>', ">")
# Output Error Message
err = str(err).replace('&', " ").replace('<', "<").replace('>', ">")
errormessage = errormessage % (err, strftime("%Y-%m-%d %H:%M:%S", gmtime()), socket.getfqdn(), sys.platform, sys.version, os.getpid(), tracestring, keystring, inputstring, dirstring)
start_response('200 OK', {'Content-Type': 'text/xml', 'Content-Length': str(len(errormessage))}.items())
return [errormessage]
pass
class Debugger:
""" Code Used To Enable PDB in Single Instance Apache Mode """
def __init__(self, object):
self.__object = object
def __call__(self, *args, **kwargs):
import pdb
import sys
debugger = pdb.Pdb()
debugger.use_rawinput = 0
debugger.reset()
sys.settrace(debugger.trace_dispatch)
try:
return self.__object(*args, **kwargs)
finally:
debugger.quitting = 1
sys.settrace(None)
pass
pass
class Profiler:
""" Code Used to Enable Profiling in Single Instance Apache Mode """
def __init__(self, object):
self.__object = object
def __call__(self, *args, **kwargs):
from pycallgraph import PyCallGraph
from pycallgraph.output import GraphvizOutput
with PyCallGraph(output=GraphvizOutput(output_file='/tmp/pycallgraph.svg', output_type='svg')):
return self.__object(*args, **kwargs)
pass
# Uncomment this line to enable PDB
# Apache must be ran in single instance mode using the following commands:
# sudo /etc/init.d/httpd stop
# httpd -X
#application = Debugger(application)
# Uncomment the below code to enable profiling
# Apache must be ran in single instance mode using the following commands:
# sudo /etc/init.d/httpd stop
# httpd -X
#application = Profiler(application)
| 32,185 | 11,312 |
import curses
from app.environment import PuzzleEnvironment, PuzzleEnvironmentSettings, PuzzleAction
from app.views import TerminalView, AbstractView
from app.inputs import TerminalInput, AbstractInput
class CursesKeysWrapper:
Q = 113
ESC = 27
R = 114
W = 119
KEY_UP = curses.KEY_UP
D = 100
KEY_RIGHT = curses.KEY_RIGHT
S = 115
KEY_DOWN = curses.KEY_DOWN
A = 97
KEY_LEFT = curses.KEY_LEFT
class PuzzleGame:
def __init__(self, window, env_settings: PuzzleEnvironmentSettings, debug: bool):
self.max_value = env_settings.cols_number * env_settings.rows_number - 1
self.puzzle_env: PuzzleEnvironment = PuzzleEnvironment(env_settings)
self.view: AbstractView = TerminalView(window, debug)
self.input: AbstractInput = TerminalInput(window)
def start(self):
self.__reset_game()
key = None
while True:
env_state = self.puzzle_env.get_state()
is_completed = self.puzzle_env.is_completed()
self.view.render_screen(self.step, env_state, key, self.max_value, is_completed)
key = self.input.get_ch()
if key == CursesKeysWrapper.Q or key == CursesKeysWrapper.ESC:
break
if key == CursesKeysWrapper.R:
self.__reset_game()
continue
if is_completed:
continue
if key == CursesKeysWrapper.W or key == CursesKeysWrapper.KEY_UP:
action = PuzzleAction.UP
elif key == CursesKeysWrapper.D or key == CursesKeysWrapper.KEY_RIGHT:
action = PuzzleAction.RIGHT
elif key == CursesKeysWrapper.S or key == CursesKeysWrapper.KEY_DOWN:
action = PuzzleAction.DOWN
elif key == CursesKeysWrapper.A or key == CursesKeysWrapper.KEY_LEFT:
action = PuzzleAction.LEFT
else:
continue
self.step += 1
self.puzzle_env.act(action)
def __reset_game(self):
self.puzzle_env.setup()
self.step = 0
| 2,124 | 661 |
from heapq import heapify, heappush, heappop
from collections import Counter, defaultdict, deque, OrderedDict
from sys import setrecursionlimit, maxsize
from bisect import bisect_left, bisect, insort_left, insort
from math import ceil, log, factorial, hypot, pi
from fractions import gcd
from copy import deepcopy
from functools import reduce
from operator import mul
from itertools import product, permutations, combinations, accumulate, cycle
from string import ascii_uppercase, ascii_lowercase, ascii_letters, digits, hexdigits, octdigits
prod = lambda l: reduce(mul, l)
prodmod = lambda l, mod: reduce(lambda x, y: mul(x,y)%mod, l)
def read_list(t): return [t(x) for x in input().split()]
def read_line(t): return t(input())
def read_lines(t, N): return [t(input()) for _ in range(N)]
T = read_line(int)
for _ in range(T):
R, S = read_list(int)
k = R // S
if S * k <= R <= S * k + S // 4:
g = gcd(R-S*k, S)
print('{} {}/{}'.format(R, (R-S*k)//g, S//g))
else:
print('{} 0/1'.format(S*(k+1)))
| 1,038 | 370 |
import os
import numpy as np
from foolbox.attacks import (
FGSM,
DeepFoolAttack,
IterativeGradientSignAttack,
SaliencyMapAttack,
)
from nninst import mode
from nninst.backend.tensorflow.attack.common import (
get_overlay_summary_compare,
get_overlay_summary_compare_detail,
resnet_50_imagenet_overlap_ratio_top5_diff,
)
from nninst.backend.tensorflow.attack.cw_attack import cw_generate_adversarial_example
from nninst.backend.tensorflow.attack.cw_attacks import CarliniL2
from nninst.backend.tensorflow.attack.foolbox_attack import (
foolbox_generate_adversarial_example,
)
from nninst.backend.tensorflow.dataset import imagenet
from nninst.backend.tensorflow.dataset.imagenet_preprocessing import _CHANNEL_MEANS
from nninst.backend.tensorflow.trace.resnet_50_imagenet_class_trace_v3 import (
resnet_50_imagenet_class_trace_compact,
)
from nninst.statistics import calc_trace_side_overlap_both_compact
from nninst.trace import TraceKey
from nninst.utils.numpy import arg_approx
from nninst.utils.ray import ray_init
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "1"
if __name__ == "__main__":
mode.debug()
# mode.distributed()
# mode.local()
# ray_init("gpu")
# ray_init("dell")
ray_init()
threshold = 0.5
# threshold = 1
# threshold = 0.8
attacks = {
"FGSM": [FGSM],
"BIM": [IterativeGradientSignAttack],
"JSMA": [SaliencyMapAttack],
"DeepFool": [DeepFoolAttack],
"DeepFool_full": [DeepFoolAttack, dict(subsample=None)],
"CWL2": [CarliniL2],
}
label = None
# label = "best_in_10"
# label = "worst_in_10"
# label = "import"
# label = "norm"
print(f"attack model with label {label} using Foolbox")
for attack_name in [
"DeepFool",
# "FGSM",
# "BIM",
# "JSMA",
# "DeepFool_full",
# "CWL2",
]:
for threshold in [
# 1.0,
# 0.9,
# 0.7,
0.5,
# 0.3,
# 0.1,
]:
print(f"attack: {attack_name}")
# path_template = "resnet_50_imagenet_class_overlap_ratio_{0:.1f}_{1}_{2}.foolbox.csv"
# path_template = "resnet_50_imagenet_class_overlap_ratio_{0:.1f}_{1}_{2}_train.foolbox.csv"
# path_template = "resnet_50_imagenet_class_overlap_ratio_{0:.1f}_{1}_{2}.foolbox.weighted_iou.csv"
# path_template = "resnet_50_imagenet_class_overlap_ratio_{0:.1f}_{1}_{2}_error.foolbox.csv"
# path_template = "resnet_50_imagenet_class_overlap_ratio_{0:.1f}_{1}_{2}_rand.foolbox.csv"
# path_template = "resnet_50_imagenet_class_overlap_ratio_{0:.1f}_{1}_{2}_top5_rand.foolbox.csv"
# path_template = "resnet_50_imagenet_class_overlap_ratio_{0:.1f}_{1}_{2}_top5.foolbox.csv"
# path_template = "resnet_50_imagenet_class_overlap_ratio_{0:.1f}_{1}_{2}_class_1.foolbox.csv"
# path_template = "resnet_50_imagenet_class_overlap_ratio_{0:.1f}_{1}_{2}_top5_diff_all.foolbox.csv"
path_template = "resnet_50_imagenet_class_overlap_ratio_{0:.1f}_{1}_{2}_top2_diff_all.foolbox.csv"
# overlap_fn = calc_trace_side_overlap_compact
overlap_fn = calc_trace_side_overlap_both_compact
# overlap_fn = calc_weighted_iou
# per_channel = True
per_channel = False
# resnet_50_overlap_ratio = resnet_50_imagenet_overlap_ratio_top5(
# resnet_50_overlap_ratio = resnet_50_imagenet_overlap_ratio(
resnet_50_overlap_ratio = resnet_50_imagenet_overlap_ratio_top5_diff(
attack_name=attack_name,
attack_fn=attacks[attack_name][0],
generate_adversarial_fn=cw_generate_adversarial_example
if attack_name.startswith("CW")
else foolbox_generate_adversarial_example,
class_trace_fn=lambda class_id: resnet_50_imagenet_class_trace_compact(
class_id, threshold, label=label
),
# class_trace_fn=lambda class_id: resnet_50_imagenet_class_trace(class_id, threshold, label=label),
select_fn=lambda input: arg_approx(input, threshold),
overlap_fn=overlap_fn,
# overlap_fn=calc_iou,
# overlap_fn=calc_class_trace_side_overlap,
# overlap_fn=calc_class_trace_side_overlap_norm,
# overlap_fn=calc_weighted_iou,
path=path_template.format(threshold, attack_name, label),
# path='lenet_class_overlap_ratio_{0:.1f}_{1}_{2}.foolbox.iou.csv'.format(threshold, attack_name, label),
# path='lenet_class_overlap_ratio_{0:.1f}_{1}_{2}.foolbox.class_side.csv'.format(
# path='lenet_class_overlap_ratio_{0:.1f}_{1}_{2}.foolbox.wo_pool.csv'.format(
# path='lenet_class_overlap_ratio_{0:.1f}_{1}_{2}.foolbox.class_side_norm.csv'.format(
# path='lenet_class_overlap_ratio_{0:.1f}_{1}_{2}.foolbox.weighted_iou.csv'.format(
# threshold, attack_name, label),
preprocessing=(_CHANNEL_MEANS, 1),
bounds=(0, 255),
channel_axis=3,
image_size=224,
class_num=1001,
norm_fn=imagenet.normalize,
data_format="channels_last",
per_channel=per_channel,
**(attacks[attack_name][1] if len(attacks[attack_name]) == 2 else {}),
)
# resnet_50_overlap_ratio = resnet_50_imagenet_overlap_ratio_error(
# class_trace_fn=lambda class_id: resnet_50_imagenet_class_trace_compact(class_id, threshold, label=label),
# select_fn=lambda input: arg_approx(input, threshold),
# overlap_fn=overlap_fn,
# path=path_template.format(threshold, attack_name, label),
# )
# resnet_50_overlap_ratio = resnet_50_imagenet_overlap_ratio_rand(
# resnet_50_overlap_ratio = resnet_50_imagenet_overlap_ratio_top5_rand(
# class_trace_fn=lambda class_id: resnet_50_imagenet_class_trace_compact(class_id, threshold, label=label),
# select_fn=lambda input: arg_approx(input, threshold),
# overlap_fn=overlap_fn,
# path=path_template.format(threshold, attack_name, label),
# )
# resnet_50_overlap_ratio.save()
# print("edge:")
# summary = get_overlay_summary(lenet_overlap_ratio.load(), TraceKey.EDGE)
# summary_file = "lenet_class_overlap_ratio_summary_{threshold:.1f}_{label}.csv".format(
# threshold=threshold, label=label)
# file_exists = os.path.exists(summary_file)
# with open(summary_file, "a") as csv_file:
# headers = ["attack"] + list(summary.keys())
# writer = csv.DictWriter(csv_file, delimiter=',', lineterminator='\n', fieldnames=headers)
# if not file_exists:
# writer.writeheader()
# writer.writerow({"attack": attack_name, **summary})
# print(summary)
# print("weight:")
# print(get_overlay_summary(lenet_overlap_ratio.load(), TraceKey.WEIGHT))
# print("point:")
# print(get_overlay_summary(lenet_overlap_ratio.load(), TraceKey.POINT))
# summary_path_template = "resnet_50_imagenet_class_overlap_ratio_summary_{threshold:.1f}_{attack}_{label}_top5_diff_all_compare.{key}.csv"
summary_path_template = "resnet_50_imagenet_class_overlap_ratio_summary_{threshold:.1f}_{attack}_{label}_top2_diff_all_compare.{key}.csv"
# key = TraceKey.EDGE
# # summary_file = "alexnet_imagenet_class_overlap_ratio_summary_{threshold:.1f}_{attack}_{label}.{key}.csv".format(
# summary_file = summary_path_template.format(
# # summary_file = "lenet_class_overlap_ratio_summary_{threshold:.1f}_{label}.iou.csv".format(
# # summary_file = "lenet_class_overlap_ratio_summary_{threshold:.1f}_{label}.class_side.csv".format(
# # summary_file = "lenet_class_overlap_ratio_summary_{threshold:.1f}_{label}.wo_pool.csv".format(
# # summary_file = "lenet_class_overlap_ratio_summary_{threshold:.1f}_{label}.class_side_norm.csv".format(
# # summary_file = "lenet_class_overlap_ratio_summary_{threshold:.1f}_{label}.weighted_iou.csv".format(
# threshold=threshold, attack=attack_name, label=label, key=key)
# with open(summary_file, "w") as csv_file:
# has_header = False
# for overlay_threshold in np.linspace(-1, 1, 201):
# # summary = get_overlay_summary(alexnet_overlap_ratio.load(), key, overlay_threshold)
# # summary = get_overlay_summary_top1(alexnet_overlap_ratio.load(), key, overlay_threshold)
# summary = get_overlay_summary_compare(resnet_50_overlap_ratio.load(), key, float(overlay_threshold))
# # summary = get_overlay_summary_compare_filter(alexnet_overlap_ratio.load(), key, float(overlay_threshold))
# # summary = get_overlay_summary_one_side(alexnet_overlap_ratio.load(), key, overlay_threshold)
# if not has_header:
# headers = ["attack"] + list(summary.keys())
# writer = csv.DictWriter(csv_file, delimiter=',', lineterminator='\n', fieldnames=headers)
# writer.writeheader()
# has_header = True
# writer.writerow({"attack": attack_name, **summary})
summary_file = summary_path_template.format(
threshold=threshold, attack=attack_name, label=label, key="detail"
)
get_overlay_summary_compare_detail(
summary_file, resnet_50_overlap_ratio.load(), from_zero=False
).save()
# for overlay_threshold in np.arange(0, 1.01, 0.01):
# # summary = get_overlay_summary(resnet_50_overlap_ratio.load(), TraceKey.EDGE, overlay_threshold)
# for key in [TraceKey.EDGE, TraceKey.WEIGHT]:
# summary = get_overlay_summary(resnet_50_overlap_ratio.load(), key, overlay_threshold)
# # summary = get_overlay_summary_one_side(resnet_50_overlap_ratio.load(), key, overlay_threshold)
# # summary_path_template = "resnet_50_imagenet_class_overlap_ratio_summary_{threshold:.1f}_{attack}_{label}.csv"
# # summary_path_template = "resnet_50_imagenet_class_overlap_ratio_summary_{threshold:.1f}_{attack}_{label}_train.{key}.csv"
# # summary_path_template = "resnet_50_imagenet_class_overlap_ratio_summary_{threshold:.1f}_{attack}_{label}_train.{key}.weighted_iou.csv"
# # summary_path_template = "resnet_50_imagenet_class_overlap_ratio_summary_{threshold:.1f}_{attack}_{label}_error.{key}.csv"
# # summary_path_template = "resnet_50_imagenet_class_overlap_ratio_summary_{threshold:.1f}_{attack}_{label}_rand.{key}.csv"
# # summary_path_template = "resnet_50_imagenet_class_overlap_ratio_summary_{threshold:.1f}_{attack}_{label}_top5_rand.{key}.csv"
# # summary_path_template = "resnet_50_imagenet_class_overlap_ratio_summary_{threshold:.1f}_{attack}_{label}_top5.{key}.csv"
# summary_path_template = "resnet_50_imagenet_class_overlap_ratio_summary_{threshold:.1f}_{attack}_{label}_class_1.{key}.csv"
# summary_file = summary_path_template.format(
# # summary_file = "lenet_class_overlap_ratio_summary_{threshold:.1f}_{label}.iou.csv".format(
# # summary_file = "lenet_class_overlap_ratio_summary_{threshold:.1f}_{label}.class_side.csv".format(
# # summary_file = "lenet_class_overlap_ratio_summary_{threshold:.1f}_{label}.wo_pool.csv".format(
# # summary_file = "lenet_class_overlap_ratio_summary_{threshold:.1f}_{label}.class_side_norm.csv".format(
# # summary_file = "lenet_class_overlap_ratio_summary_{threshold:.1f}_{label}.weighted_iou.csv".format(
# threshold=threshold, attack=attack_name, label=label, key=key)
# file_exists = os.path.exists(summary_file)
# with open(summary_file, "a") as csv_file:
# headers = ["attack"] + list(summary.keys())
# writer = csv.DictWriter(csv_file, delimiter=',', lineterminator='\n', fieldnames=headers)
# if not file_exists:
# writer.writeheader()
# writer.writerow({"attack": attack_name, **summary})
# resnet_50_overlap_ratio_per_node = resnet_50_imagenet_overlap_ratio(
# attack_fn=attacks[attack_name][0],
# generate_adversarial_fn=generate_adversarial_example,
# class_trace_fn=lambda class_id: resnet_50_imagenet_class_trace_compact(class_id, threshold, label=label),
# # class_trace_fn=lambda class_id: lenet_mnist_class_trace(class_id, threshold),
# select_fn=lambda input: arg_approx(input, threshold),
# overlap_fn=calc_trace_side_overlap_compact,
# # overlap_fn=calc_iou,
# # overlap_fn=calc_class_trace_side_overlap,
# # overlap_fn=calc_class_trace_side_overlap_norm,
# # overlap_fn=calc_weighted_iou,
# path='resnet_50_imagenet_class_overlap_ratio_per_node_{0:.1f}_{1}_{2}.foolbox.csv'.format(
# threshold, attack_name, label),
# # path='lenet_class_overlap_ratio_{0:.1f}_{1}_{2}.foolbox.iou.csv'.format(threshold, attack_name, label),
# # path='lenet_class_overlap_ratio_{0:.1f}_{1}_{2}.foolbox.class_side.csv'.format(
# # path='lenet_class_overlap_ratio_{0:.1f}_{1}_{2}.foolbox.wo_pool.csv'.format(
# # path='lenet_class_overlap_ratio_{0:.1f}_{1}_{2}.foolbox.class_side_norm.csv'.format(
# # path='lenet_class_overlap_ratio_{0:.1f}_{1}_{2}.foolbox.weighted_iou.csv'.format(
# # threshold, attack_name, label),
# preprocessing=(_CHANNEL_MEANS, 1),
# per_node=True,
# **(attacks[attack_name][1] if len(attacks[attack_name]) == 2 else {}),
# )
# resnet_50_overlap_ratio_per_node.save()
| 14,797 | 5,027 |
import cv2
import os
import sys
import unittest
import threading
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import leapvision.tracker
class SingleObjectTrackerTest(unittest.TestCase):
def setUp(self):
self.video_path = os.path.join(
os.path.dirname(__file__),
'data',
'people_walking.mp4'
)
self.camera = cv2.VideoCapture(self.video_path)
ok, frame = self.camera.read()
box = (421, 417, 100, 160)
self.tracker = leapvision.tracker.SingleObjectTracker(
frame,
box
)
def test_same_image(self):
self.camera = cv2.VideoCapture(self.video_path)
ok, frame = self.camera.read()
ok, box = self.tracker.track(frame)
self.assertTrue(ok)
self.assertAlmostEqual(box[0], 421, delta=2)
self.assertAlmostEqual(box[1], 417, delta=2)
self.assertAlmostEqual(box[2], 100, delta=2)
self.assertAlmostEqual(box[3], 160, delta=2)
def test_future_frame(self):
ok, frame = self.camera.read()
for x in range(30):
ok, frame = self.camera.read()
ok, box = self.tracker.track(frame)
self.assertTrue(ok)
self.assertAlmostEqual(box[0], 423, delta=2)
self.assertAlmostEqual(box[1], 325, delta=2)
self.assertAlmostEqual(box[2], 100, delta=2)
self.assertAlmostEqual(box[3], 160, delta=2)
def test_demo(self):
enabled = bool(os.environ.get('TRACKER_DEMO_ENABLED'))
if not enabled:
return
video_path = os.path.join(
os.path.dirname(__file__),
'data',
'people_walking.mp4'
)
key = 0
while key & 0xff != ord('q'):
camera = cv2.VideoCapture(video_path)
ok, frame = camera.read()
box = (421, 417, 100, 160)
self.tracker = leapvision.tracker.SingleObjectTracker(
frame,
box
)
cv2.namedWindow('camera')
cv2.moveWindow('camera', 0, 0)
while camera.isOpened():
ok, frame = camera.read()
if not ok:
break
tick_count = cv2.getTickCount()
ok, box = self.tracker.track(frame)
fps = cv2.getTickFrequency() / (cv2.getTickCount() - tick_count)
if not ok:
continue
cv2.putText(
frame,
"fps: {}".format(int(fps)),
(12, 24),
cv2.FONT_HERSHEY_SIMPLEX,
0.75,
(0, 255, 0),
2
)
cv2.rectangle(
frame,
box[:2],
(box[0]+box[2], box[1]+box[3]),
(0, 255, 0),
2
)
cv2.imshow('camera', frame)
key = cv2.waitKey(1)
self.tracker.clear()
'''
class HumanTrackerTest(unittest.TestCase):
testVideo = None
#setup
def setUp(self):
videoPath = os.path.join(os.path.dirname(__file__), 'videos', '924810169.mp4')
self.humanTracker = leapvision.tracker.HumanTracker(videoPath)
print("setUP")
#Test Creation of Tracker
def test_trackerConstructor(self):
self.assertNotEqual(self.humanTracker.tracker, None)
self.assertTrue(type(self.humanTracker.videoPath), type(""))
self.assertTrue(type(self.humanTracker), type(leapvision.tracker.HumanTracker))
print("trackerConstructor")
#Test Initialization of Tracker
def test_initTracker(self):
self.expectedBbox = (700, 500, 286, 320)
self.humanTracker.initTracker(cv2.VideoCapture(self.humanTracker.videoPath))
actualBbox = self.humanTracker.bbox
self.assertNotEqual(self.humanTracker.video, None)
self.assertNotEqual(actualBbox, None)
self.assertEqual(actualBbox, self.expectedBbox)
self.testVideo = self.humanTracker.video
self.test_updateTracker(self.testVideo, 1)
print(threading.active_count())
def test_updateTracker(self, theVideo = None, permission = 0):
print(threading.active_count())
framesToCycleThrough = 48
if permission == 1:
self.humanTracker.updateTracker(framesToCycleThrough, theVideo)
'''
if __name__ == '__main__':
unittest.main()
| 4,599 | 1,503 |
#!/usr/bin/python
#coding=utf-8
# 目录文件备份工具
# 用于备份特定的数据目录或文件
# 作者: kevin.hongs@gmail.com
# 修订: 2016/03/03
import os
import re
import sys
import time
import shutil
import tarfile
from getopt import getopt
if __name__ == "__main__":
def cmd_help():
print "Usage: cleat.py SRC_PATH DST_PATH"
print "Another options:"
print " -n --name Dest name with time format"
print " -p --pack Pack the back"
print " -z --gzip Gzip the back"
print " -h --help Show this msg"
print "--name(-n) syntax:"
print " $n_%Y%m%d$x for default"
print " $n is file name, like 'dump'"
print " $x is extension, like '.sql'"
print " %Y,%m,%d,%H,%M,%S is current year,month,date,hour,minute,second"
if len(sys.argv) < 3:
cmd_help( )
sys.exit(0)
sp = sys.argv[1]
dn = sys.argv[2]
tn = None
pc = False
gz = False
if not sp:
print "Argument 1 (source path) required!"
cmd_help( )
sys.exit(1)
if not dn:
print "Argument 2 (target path) required!"
cmd_help( )
sys.exit(1)
rp = re.compile("/$")
sp = rp.sub( "", sp )
dn = rp.sub( "", dn )
opts, args = getopt(sys.argv[3:], "n:zxh", ["name", "gzip", "pack", "help"])
for n,v in opts:
if n in ("-n", "--name"):
tn = v
if n in ("-p", "--pack"):
pc = True
if n in ("-z", "--gzip"):
gz = True
if n in ("-h", "--help"):
cmd_help( )
sys.exit(0)
# 构建目标名称
if tn is None:
tn = "$n_%Y%m%d$x"
tn = time.strftime(tn)
(fe, fn) = os.path.split (sp)
(fn, fe) = os.path.splitext(fn)
tn = tn.replace("$n", fn)
tn = tn.replace("$x", fe)
tn = dn + "/" + tn
if pc or gz:
if gz:
tn = tn+".tar.gz"
md = "w:gz"
else:
tn = tn+".tar"
md = "w"
# 执行备份操作
print "Backup files from '" + sp + "' to '" + tn + "'"
if pc or gz:
zp = tarfile.open(tn, md)
zp.add(sp)
zp.close()
else:
shutil.copytree (sp, tn)
| 2,228 | 919 |
import cv2
import numpy as np
img_rgb = cv2.imread('mainimage.jpg')
img_gray = cv2.cvtColor(img_rgb, cv2.COLOR_BGR2GRAY)
template = cv2.imread('template.jpg',0)
w, h = template.shape[::-1]
res = cv2.matchTemplate(img_gray,template,cv2.TM_CCOEFF_NORMED)
threshold = 0.8
loc = np.where( res >= threshold)
for pt in zip(*loc[::-1]):
cv2.rectangle(img_rgb, pt, (pt[0] + w, pt[1] + h), (0,255,255), 1)
cv2.imshow('Det',img_rgb)
cv2.waitKey() | 443 | 209 |
import numpy as np
import cv2
from matplotlib import pyplot as plt
class window():
def __init__(self, nwindow, margin, mpixel, centroid):
self.nwindow = nwindow
self.margin = margin
self.mpixel = mpixel
self.centroid = centroid
# Function to return the point within boundarys of widown-n
# -- Input:
# 1. window_no: number of widown - should be < than nwindow
# 2. basex: base x to draw the rectangle (will be +- margin to get high & low)
# 3. binary_wrapped: binary image to detect point within boundary
# -- Output:
# 1. point_inds: array of all detected points
# 2. ncount: number of points
# 3. window_coordinate: window coordinate to draw rectangle if requires
def get_points_within_boundary(self, window_no, binary_wrapped):
if (window_no < self.nwindow):
# calculate window size
window_height = np.int(binary_wrapped.shape[0]//self.nwindow)
# calculate window coordinates
win_y_low = int(binary_wrapped.shape[0] - (window_no+1) * window_height)
win_y_high = int(binary_wrapped.shape[0] - window_no * window_height)
win_x_low = int(self.centroid - self.margin)
win_x_high = int(self.centroid + self.margin)
window_coordinate = [(win_x_low, win_y_low), (win_x_high, win_y_high)]
# Identify the x and y positions of all nonzero pixels in the image
nonzero = binary_wrapped.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Get the indexes of point within boundary
point_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_x_low) & (nonzerox < win_x_high)).nonzero()[0]
# Get the count of point within boundary
ncount = len(point_inds)
# Update centroid of window
if (ncount > self.mpixel):
self.centroid = np.mean(nonzerox[point_inds])
else:
print ("Invalid window_no %d", (widown_no))
return point_inds, window_coordinate, nonzerox, nonzeroy
#Function to draw the rectagle and overlay that to image for sanity check
def draw_rectangle(self, window_coordinate, overlay_image):
# Draw the windows on the visualization image
window_coordinate = tuple(window_coordinate)
print (window_coordinate)
output = np.copy(overlay_image)
cv2.rectangle(output, window_coordinate[0], window_coordinate[1], (0,255,0), 2)
return output
class Line():
def __init__(self, nwindow, margin, mpixel, centroid, us_line_long, us_line_wide):
self.us_line_wide = us_line_wide
self.us_line_long = us_line_long
self .has_data = False
# was the line detected in the last iteration?
self.detected = False
#average x values of the fitted line over the last n iterations
self.bestx = None
#polynomial coefficients averaged over the last n iterations
self.best_fit = np.array([0,0,0], dtype='float')
#polynomial coefficients for the most recent fit
self.current_fit = []
self.current_fit_cr = []
#radius of curvature of the line in some units
self.radius_of_curvature = []
self.radius_of_curvature_value = 0
#distance in meters of vehicle center from the line
self.line_base_pos = 0
#difference in fit coefficients between last and new fits
self.diffs = np.array([0,0,0], dtype='float')
#x values for detected line pixels
self.allx = None
#y values for detected line pixels
self.ally = None
# window used for running itegration
self.window = window(nwindow, margin, mpixel, centroid)
self.count = 0
self.sliding_window =[]
self.epsilon = float(4 * (10 ** (-3)))
self.epsilon2 = float(100)
self.epsilon3 = float(500)
def update_curvature(self):
self.radius_of_curvature_value = np.average(self.radius_of_curvature)
def update_polycoeff(self, binary_warped):
lane_point_inds = []
ym_per_pix = self.us_line_long/720
xm_per_pix = self.us_line_wide/640
debug_image = np.copy(binary_warped)
debug = []
for no in range(self.window.nwindow):
point_inds, window_coordinate, nonzerox, nonzeroy = self.window.get_points_within_boundary(no, binary_warped)
self.sliding_window.append(window_coordinate)
lane_point_inds.append(point_inds)
# append all the x & y to Line
ycal = np.linspace(0, binary_warped.shape[0]-1, binary_warped.shape[0] )
if (len(lane_point_inds)):
## TODO: Need to add validation before updating lines
try:
lane_point_inds = np.concatenate(lane_point_inds)
except ValueError:
pass
if(len(nonzerox[lane_point_inds]) & len(nonzeroy[lane_point_inds])):
self.allx = nonzerox[lane_point_inds]
self.ally = nonzeroy[lane_point_inds]
var_current_fit = np.polyfit(self.ally, self.allx, 2)
self.current_fit_cr = np.polyfit((np.array(self.ally) * ym_per_pix), (np.array(self.allx) * xm_per_pix), 2)
self.detected = True
else:
self.detected = False
if(self.detected == True):
if(self.has_data == True):
self.diffs = np.absolute(var_current_fit - self.best_fit)
if ((self.diffs[0] < self.epsilon) & (self.diffs[1] < self.epsilon2)):
if (self.diffs[2] > self.epsilon3):
var_current_fit[2] = self.best_fit[2]
if(self.count == 10):
self.count = 0
if(len(self.current_fit) > self.count):
self.current_fit.pop(self.count)
self.current_fit.insert(self.count, var_current_fit)
self.best_fit = np.average(self.current_fit, axis=0)
self.radius_of_curvature.append(((1 + (2*self.current_fit_cr[0]*max(self.ally)*ym_per_pix + self.current_fit_cr[1])**2)**1.5) / np.absolute(2*self.current_fit_cr[0]))
else:
self.detected = False
else:
self.current_fit.insert(self.count, np.polyfit(self.ally, self.allx, 2))
self.best_fit = self.current_fit[-1]
self.radius_of_curvature.append(((1 + (2*self.current_fit_cr[0]*max(self.ally)*ym_per_pix + self.current_fit_cr[1])**2)**1.5) / np.absolute(2*self.current_fit_cr[0]))
self.count = self.count + 1
self.has_data = True
try:
self.bestx = self.best_fit[0]*ycal**2 + self.best_fit[1]*ycal + self.best_fit[2]
self.line_base_pos = self.bestx[-1]
except TypeError:
# Avoids an error if `left` and `right_fit` are still none or incorrect
print('The function failed to fit a line!')
self.bestx = 1*ycal**2 + 1*ycal
print("+++++++++++", file=open("debug.txt", "a"))
print (self.diffs,file=open("debug.txt", "a"))
print("----", file=open("debug.txt", "a"))
print (self.best_fit,file=open("debug.txt", "a"))
| 7,607 | 2,355 |
# ITERATIVE FUNCTION FOR FACTORIAL
# def factorial_iterative(n):
# fact=1
# for b in range(1, n+1):
# fact=fact*b
# return fact
# a = int(input("enter the number you want the factorial of: "))
# print(f"the factorial of {a} is {factorial_iterative(a)}")
# RECURSIVE FUNCTION FOR FACTORIAL
def factorial_recursive(n):
if n==1 or n==0:
return 1
return n * factorial_recursive(n-1)
a = int(input("enter the number you want the factorial of: "))
print(f"the factorial of {a} is {factorial_recursive(a)}")
| 550 | 211 |
# Generated by Django 2.0.1 on 2018-02-20 05:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('courses', '0004_course_tag'),
]
operations = [
migrations.AddField(
model_name='video',
name='url',
field=models.CharField(default='', max_length=200, verbose_name='video address'),
),
]
| 415 | 140 |
#
# cogs/welcome/role_reapplication.py
#
# futaba - A Discord Mod bot for the Programming server
# Copyright (c) 2017-2020 Jake Richardson, Ammon Smith, jackylam5
#
# futaba is available free of charge under the terms of the MIT
# License. You are free to redistribute and/or modify it under those
# terms. It is distributed in the hopes that it will be useful, but
# WITHOUT ANY WARRANTY. See the LICENSE file for more details.
#
"""
Handling to reapply roles when the member rejoins the guild.
"""
import asyncio
import logging
from collections import deque, namedtuple
import discord
from discord.ext import commands
from futaba.converters import UserConv
from futaba.utils import user_discrim
from ..abc import AbstractCog
logger = logging.getLogger(__name__)
FakeMember = namedtuple("FakeMember", ("name", "id", "guild"))
__all__ = ["RoleReapplication"]
class RoleReapplication(AbstractCog):
__slots__ = ("journal", "lock", "recent_updates")
def __init__(self, bot):
super().__init__(bot)
self.journal = bot.get_broadcaster("/roles")
self.lock = asyncio.Lock()
self.recent_updates = deque(maxlen=20)
async def bg_setup(self):
"""
Update all of the member's saved roles.
Since this task can be very slow with several thousand members,
the task is run in the background delays itself to avoid clogging
the bot. However, this will degrade reapply-role performance until
it's finished.
"""
async with self.lock:
with self.bot.sql.transaction():
for i, member in enumerate(self.bot.get_all_members()):
self.bot.sql.roles.update_saved_roles(member)
if i % 20 == 0:
await asyncio.sleep(0.2)
def setup(self):
logger.info("Running member role update in background")
self.bot.loop.create_task(self.bg_setup())
async def member_update(self, before, after):
if before.roles == after.roles:
return
entry = (before, after)
if entry in self.recent_updates:
return
else:
self.recent_updates.append(entry)
special_roles = self.bot.sql.settings.get_special_roles(after.guild)
if special_roles.guest_role in after.roles:
return
await self.save_roles(after)
def get_reapply_roles(self, guild):
logger.debug(
"Getting possible reapplication roles for guild '%s' (%d)",
guild.name,
guild.id,
)
reapply_roles = self.bot.sql.settings.get_reapply_roles(guild)
can_reapply = list(reapply_roles)
special_roles = self.bot.sql.settings.get_special_roles(guild)
if special_roles.mute_role is not None:
can_reapply.append(special_roles.mute_role)
if special_roles.jail_role is not None:
can_reapply.append(special_roles.jail_role)
if "SelfAssignableRoles" in self.bot.cogs:
can_reapply.extend(self.bot.sql.roles.get_assignable_roles(guild))
return can_reapply
def get_roles_to_reapply(self, member):
roles = self.bot.sql.roles.get_saved_roles(member)
if not roles:
logger.debug("No roles to reapply, user is new")
return None
can_reapply = self.get_reapply_roles(member.guild)
return list(filter(lambda r: r in can_reapply, roles))
@commands.guild_only()
@commands.command(name="savedroles", aliases=["saveroles", "userroles", "uroles"])
async def saved_roles(self, ctx, user: UserConv = None):
""" Returns all roles that would be reapplied when a given user rejoins. """
if user is None:
member = ctx.author
mention = ctx.author.mention
else:
member = FakeMember(id=user.id, name=user.name, guild=ctx.guild)
mention = user.mention
roles = self.get_roles_to_reapply(member)
if roles:
roles.sort(key=lambda r: r.position, reverse=True)
role_list = " ".join(role.mention for role in roles)
sep = "\n\n" if len(roles) > 3 else " "
embed = discord.Embed(colour=discord.Colour.dark_teal())
embed.title = "\N{MILITARY MEDAL} Roles which would be applied on join"
embed.description = f"{mention}:{sep}{role_list}"
else:
embed = discord.Embed(colour=discord.Colour.dark_purple())
embed.description = f"No roles are saved for {mention}."
await ctx.send(embed=embed)
async def reapply_roles(self, member):
roles = self.get_roles_to_reapply(member)
if roles is None:
return None
logger.info(
"Reapplying roles to member '%s' (%d): [%s]",
member.name,
member.id,
", ".join(role.name for role in roles),
)
await member.add_roles(
*roles, reason="Automatically reapplying roles", atomic=True
)
content = (
f"Reapplied roles to {member.mention}: {', '.join(f'`{role.name}`' for role in roles)}"
if roles
else f"Reapplied no roles to {member.mention}"
)
self.journal.send(
"reapply", member.guild, content, member=member, roles=roles, icon="role"
)
return roles
async def save_roles(self, member):
logger.info(
"Member '%s' (%d) updated roles in '%s' (%d)",
member.name,
member.id,
member.guild.name,
member.guild.id,
)
async with self.lock:
with self.bot.sql.transaction():
self.bot.sql.roles.update_saved_roles(member)
content = f"Saved updated roles for {user_discrim(member)}"
self.journal.send("save", member.guild, content, member=member, icon="save")
| 5,943 | 1,849 |
import logging
from dataclasses import dataclass
from typing import List, Union, Dict, Any, Tuple
from lxml import etree
@dataclass
class Feature:
id: str
tags: Dict[str, Any]
geojson_geometry: Dict[str, Any]
def to_geojson_dict(features: List[Feature]) -> Dict[str, Any]:
results = {
'type': 'FeatureCollection',
'features': [
{
'type': 'Feature',
'geometry': feature.geojson_geometry,
'properties': {
'id': feature.id,
'tags': feature.tags
}
}
for feature in features
]
}
return results
@dataclass
class InputPoint:
tags: Dict[str, Any]
latitude: float
longitude: float
@dataclass
class InputLine:
tags: Dict[str, Any]
list_of_coordinate_pairs: List[Tuple[float, float]]
@dataclass
class InputPolygon:
tags: Dict[str, Any]
outer_ring: List[Tuple[float, float]]
inner_rings: List[List[Tuple[float, float]]]
@dataclass
class InputMultiPolygon:
tags: Dict[str, Any]
outer_rings: List[List[Tuple[float, float]]]
inner_rings: List[List[Tuple[float, float]]]
@dataclass
class Node:
id: int
tags: Dict[str, Any]
latitude: float
longitude: float
def as_xml_element(self) -> etree.Element:
tags_elements = [etree.Element('tag', k=key, v=str(value)) for key, value in self.tags.items()]
node_element = etree.Element('node', id=str(self.id), lat=str(self.latitude), lon=str(self.longitude))
for elem in tags_elements:
node_element.append(elem)
return node_element
@dataclass
class Way:
id: int
tags: Dict[str, Any]
node_ids: List[int]
def as_xml_element(self) -> etree.Element:
tags_elements = [etree.Element('tag', k=key, v=str(value)) for key, value in self.tags.items()]
node_id_elements = [etree.Element('nd', ref=str(node_id)) for node_id in self.node_ids]
way_element = etree.Element('way', id=str(self.id))
for elem in node_id_elements:
way_element.append(elem)
for elem in tags_elements:
way_element.append(elem)
return way_element
@dataclass
class RelationMember:
type: str
id: int
role: str
def as_xml_element(self) -> etree.Element:
return etree.Element('member', type=self.type, ref=str(self.id), role=self.role)
@dataclass
class Relation:
id: int
tags: Dict[str, Any]
members: List[RelationMember]
def as_xml_element(self) -> etree.Element:
tags_elements = [etree.Element('tag', k=key, v=str(value)) for key, value in self.tags.items()]
member_elements = [member.as_xml_element() for member in self.members]
relation_element = etree.Element('relation', id=str(self.id))
for elem in member_elements:
relation_element.append(elem)
for elem in tags_elements:
relation_element.append(elem)
return relation_element
class DecreasingSequence:
def __init__(self, starting_value: int = 0, step: int = -1):
self.value = starting_value
if step == 0:
raise ValueError('Step cannot be equal zero.')
if step > 0:
logging.warning('DecreasingSequence given step greater than zero. Sequence will increase instead of decreasing.')
self.step = step
def next_value(self):
self.value += self.step
return self.value
def trim_coordinates(lat: float, lon: float) -> Tuple[float, float]:
return round(lat, 7), round(lon, 7)
def input_feature_factory(geom_type: str, **kwargs) -> Union[InputPoint, InputLine, InputPolygon]:
if geom_type == 'POINT':
feature = InputPoint(tags=kwargs['tags'], latitude=kwargs['latitude'], longitude=kwargs['longitude'])
elif geom_type == 'LINESTRING':
feature = InputLine(tags=kwargs['tags'], list_of_coordinate_pairs=kwargs['list_of_coordinate_pairs'])
elif geom_type == 'POLYGON':
feature = InputPolygon(tags=kwargs['tags'], outer_ring=kwargs['outer_ring'], inner_rings=kwargs['inner_rings'])
elif geom_type == 'MULTIPOLYGON':
feature = InputMultiPolygon(tags=kwargs['tags'], outer_rings=kwargs['outer_rings'], inner_rings=kwargs['inner_rings'])
else:
raise AttributeError(f'Geometry type: {geom_type} currently not supported.')
return feature
def convert_to_osm_style_objects(
list_of_features: List[Union[InputPoint, InputLine, InputPolygon, InputMultiPolygon]]
) -> Tuple[List[Node], List[Way], List[Relation]]:
""""Method converts input features (points, lines, polygons) into OSM style objects (nodes, ways, relations)."""
id_generator = DecreasingSequence()
list_of_nodes: List[Node] = []
node_dict: Dict[Tuple[float, float], Node] = {}
list_of_ways: List[Way] = []
list_of_relations: List[Relation] = []
def create_way(list_of_coordinates: List[Tuple[float, float]], tags: Dict[str, Any]) -> int:
node_ids = []
for coordinates in list_of_coordinates:
lat_lon_tuple = trim_coordinates(*coordinates)
if node_dict.get(lat_lon_tuple):
node_id = node_dict[lat_lon_tuple].id
else:
new_node = Node(id_generator.next_value(), {}, *lat_lon_tuple)
node_id = new_node.id
node_dict[lat_lon_tuple] = new_node
list_of_nodes.append(new_node)
node_ids.append(node_id)
w = Way(id_generator.next_value(), tags, node_ids)
list_of_ways.append(w)
return w.id
expected_classes = [InputPoint, InputLine, InputPolygon, InputMultiPolygon]
for feature in list_of_features:
if isinstance(feature, InputPoint):
lat, lon = trim_coordinates(feature.latitude, feature.longitude)
if node_dict.get((lat, lon)):
existing_node = node_dict[(lat, lon)]
existing_node.tags = {**existing_node.tags, **feature.tags}
logging.warning(f'Node with coordinates {lat}, {lon} already exists in dictionary. Merging tags.')
continue
n = Node(id_generator.next_value(), feature.tags, lat, lon)
node_dict[(lat, lon)] = n
list_of_nodes.append(n)
elif isinstance(feature, InputLine):
create_way(feature.list_of_coordinate_pairs, feature.tags)
elif isinstance(feature, InputPolygon):
if len(feature.inner_rings) == 0:
# create way
create_way(feature.outer_ring, feature.tags)
else:
# create a relation
outer_id = create_way(feature.outer_ring, dict())
inner_ids = [create_way(ring, dict()) for ring in feature.inner_rings]
members = [RelationMember('way', outer_id, 'outer')] + [RelationMember('way', i, 'inner') for i in inner_ids]
relation_tags = {**feature.tags, 'type': 'multipolygon'}
r = Relation(id_generator.next_value(), relation_tags, members)
list_of_relations.append(r)
elif isinstance(feature, InputMultiPolygon):
outer_ids = [create_way(ring, dict()) for ring in feature.outer_rings]
inner_ids = [create_way(ring, dict()) for ring in feature.inner_rings]
members = [RelationMember('way', i, 'outer') for i in outer_ids] + [RelationMember('way', i, 'inner') for i in inner_ids]
relation_tags = {**feature.tags, 'type': 'multipolygon'}
r = Relation(id_generator.next_value(), relation_tags, members)
list_of_relations.append(r)
else:
raise ValueError(f'Feature is not one of expected types: {type(feature)}. Expected one of: {expected_classes}')
return list_of_nodes, list_of_ways, list_of_relations
def create_osm_xml(list_of_features: List[Union[InputPoint, InputLine, InputPolygon]]) -> etree.Element:
"""Method """
root = etree.Element('osm', version='0.6')
list_of_nodes, list_of_ways, list_of_relations = convert_to_osm_style_objects(list_of_features)
for node in list_of_nodes:
root.append(node.as_xml_element())
for way in list_of_ways:
root.append(way.as_xml_element())
for relation in list_of_relations:
root.append(relation.as_xml_element())
return root
def bbox_to_geojson_geometry(bbox: Tuple[float, float, float, float]) -> Dict[str, Any]:
return {
"type": "Polygon",
"coordinates": [
[
[bbox[0], bbox[1]],
[bbox[2], bbox[1]],
[bbox[2], bbox[3]],
[bbox[0], bbox[3]],
[bbox[0], bbox[1]]
]
]
}
def create_geojson_dict(list_of_geometries: List[dict], list_of_properties: List[dict]) -> dict:
response_dict = {
'type': 'FeatureCollection',
'features': [
{
'type': 'Feature',
'geometry': geom,
'properties': {
**properties
}
}
for geom, properties in zip(list_of_geometries, list_of_properties)
]
}
return response_dict
| 9,292 | 2,885 |
import random
class Uteis():
def gerar_nome(self,length):
letters ="AaBbCcDdEeFfGgHhIiJjKkLlMmNnOoPpQqRrSsTtUuVvWwXxYyZz"
result = ""
for n in range(int(length)):
result += letters[random.randint(0,len(letters)-1)]
return result
def gerar_email(self):
domains = ["teste","mail","outmail","gmail"]
result= self.gerar_nome(8)+"@"+domains[random.randint(0,len(domains)-1)]+".com"
return result | 477 | 189 |
class NotifySwUpgradeStateChangedDTO(object):
def __init__(self):
self.notifyType = None
self.deviceId = None
self.appId = None
self.operationId = None
self.subOperationId = None
self.swUpgradeState = None
def getNotifyType(self):
return self.notifyType
def setNotifyType(self, notifyType):
self.notifyType = notifyType
def getDeviceId(self):
return self.deviceId
def setDeviceId(self, deviceId):
self.deviceId = deviceId
def getAppId(self):
return self.appId
def setAppId(self, appId):
self.appId = appId
def getOperationId(self):
return self.operationId
def setOperationId(self, operationId):
self.operationId = operationId
def getSubOperationId(self):
return self.subOperationId
def setSubOperationId(self, subOperationId):
self.subOperationId = subOperationId
def getSwUpgradeState(self):
return self.swUpgradeState
def setSwUpgradeState(self, swUpgradeState):
self.swUpgradeState = swUpgradeState
| 1,111 | 329 |
from app.overpass.overpass import Overpass
from app.overpass.location import Location
class Nodes(Overpass):
TYPE = 'node'
if __name__ == '__main__':
import pandas as pd
pd.options.display.max_columns = None
pd.options.display.width = 800
nodes = Nodes()
nodes.location = Location(13.383333, 52.516667)
nodes.radius = 4000
nodes.selection = '["tourism"="attraction"]'
nodes.sorted = True
result = nodes.around()
print(result)
| 476 | 174 |
"""Interface for managing interactions between a client frontend (this CLI) and backend (API)."""
from abc import ABC, abstractmethod
class ClientApi(ABC):
"""Interface for managing interactions between a client frontend (this CLI) and backend (API)."""
@abstractmethod
def is_active(self):
"""
Checks if the API is active.
:return: True, if the API is running and responding to requests
"""
@abstractmethod
def stop(self):
"""
Stops the backend service.
:return: dict with result of action
"""
@abstractmethod
def dataset_metadata(self, entry):
"""
Retrieves dataset metadata for the specified entry.
:param entry: entry associated with requested metadata
:return: requested metadata
"""
@abstractmethod
def dataset_metadata_search(self, search_query, until):
"""
Applies the provided search query to the metadata of the client, for all entries until the provided timestamp.
:param search_query: query to apply
:param until: timestamp to use for limiting search
:return: search results
"""
@abstractmethod
def dataset_definitions(self):
"""
Retrieves all dataset definitions for the current user and device.
:return: requested definitions
"""
@abstractmethod
def dataset_entries(self):
"""
Retrieves all dataset entries.
:return: requested entries
"""
@abstractmethod
def dataset_entries_for_definition(self, definition):
"""
Retrieves all dataset entries for the provided dataset definition.
:param definition: definition associated with requested entries
:return: requested entries
"""
@abstractmethod
def user(self):
"""
Retrieves information about the current user.
:return: current user
"""
@abstractmethod
def device(self):
"""
Retrieves information about the current device.
:return: current device
"""
@abstractmethod
def device_connections(self):
"""
Retrieves information about the server connections of the current device.
:return: active device connections
"""
@abstractmethod
def operations(self):
"""
Retrieves the currently active operations.
:return: active operations
"""
@abstractmethod
def operation_progress(self, operation):
"""
Retrieves the progress of an operation.
:param operation: operation to follow
:return: response as an event stream
"""
@abstractmethod
def operation_follow(self, operation):
"""
Follows an operation's progress.
:param operation: operation to follow
:return: response as an event stream
"""
@abstractmethod
def operation_stop(self, operation):
"""
Stops an active operation.
:param operation: operation to stop
:return: dict with result of action
"""
@abstractmethod
def backup_rules(self):
"""
Retrieves the current backup specification/rules.
:return: backup spec/rules
"""
@abstractmethod
def backup_start(self, definition):
"""
Starts a backup for the specified dataset definition.
:param definition: definition for which to start a backup
:return: dict with result of action
"""
@abstractmethod
def backup_define(self, request):
"""
Creates a new dataset definition with the provided data.
:param request: data to use for creating a definition
:return: dict with result of action
"""
@abstractmethod
def recover_until(self, definition, until, path_query, destination, discard_paths):
"""
Starts a recovery for the specified dataset definition, restricted by the provided timestamp and query.
:param definition: definition for which to start a recovery
:param until: timestamp to use for limiting recovery
:param path_query: file/path query to use for limiting recovery
:param destination: recovery directory path override
:param discard_paths: set to True to discard original file directory structure
:return: dict with result of action
"""
@abstractmethod
def recover_from(self, definition, entry, path_query, destination, discard_paths):
"""
Starts a recovery for the specified dataset definition and entry, restricted by the provided query.
:param definition: definition for which to start a recovery
:param entry: entry for which to use for recovery
:param path_query: file/path query to use for limiting recovery
:param destination: recovery directory path override
:param discard_paths: set to True to discard original file directory structure
:return: dict with result of action
"""
@abstractmethod
def recover_from_latest(self, definition, path_query, destination, discard_paths):
"""
Starts a recovery for the specified dataset definition and latest entry, restricted by the provided query.
:param definition: definition for which to start a recovery
:param path_query: file/path query to use for limiting recovery
:param destination: recovery directory path override
:param discard_paths: set to True to discard original file directory structure
:return: dict with result of action
"""
@abstractmethod
def schedules_public(self):
"""
Retrieves all available public schedules.
:return: requested public schedules
"""
@abstractmethod
def schedules_configured(self):
"""
Retrieves all available configured schedules.
:return: requested configured schedules
"""
@abstractmethod
def schedules_configured_refresh(self):
"""
Refreshes settings for all configured schedules.
:return: dict with result of action
"""
| 6,229 | 1,444 |
# -*- coding: utf-8 -*-
import base64
from datetime import datetime
from xml.dom import minidom
from django.conf import settings
from django.core.cache import cache
from nose.tools import eq_, ok_
from olympia import amo
from olympia.amo.tests import TestCase
from olympia.amo.urlresolvers import reverse
from olympia.blocklist.models import (
BlocklistApp, BlocklistCA, BlocklistDetail, BlocklistGfx, BlocklistItem,
BlocklistIssuerCert, BlocklistPlugin, BlocklistPref)
base_xml = """
<?xml version="1.0"?>
<blocklist xmlns="http://www.mozilla.org/2006/addons-blocklist">
</blocklist>
"""
class XMLAssertsMixin(object):
def assertOptional(self, obj, field, xml_field):
"""Make sure that if the field isn't filled in, it's not in the XML."""
# Save the initial value.
initial = getattr(obj, field)
try:
# If not set, the field isn't in the XML.
obj.update(**{field: ''})
eq_(self.dom(self.fx4_url).getElementsByTagName(xml_field), [])
# If set, it's in the XML.
obj.update(**{field: 'foobar'})
element = self.dom(self.fx4_url).getElementsByTagName(xml_field)[0]
eq_(element.firstChild.nodeValue, 'foobar')
finally:
obj.update(**{field: initial})
def assertAttribute(self, obj, field, tag, attr_name):
# Save the initial value.
initial = getattr(obj, field)
try:
# If set, it's in the XML.
obj.update(**{field: 'foobar'})
element = self.dom(self.fx4_url).getElementsByTagName(tag)[0]
eq_(element.getAttribute(attr_name), 'foobar')
finally:
obj.update(**{field: initial})
def assertEscaped(self, obj, field):
"""Make sure that the field content is XML escaped."""
obj.update(**{field: 'http://example.com/?foo=<bar>&baz=crux'})
r = self.client.get(self.fx4_url)
assert 'http://example.com/?foo=<bar>&baz=crux' in r.content
class BlocklistViewTest(TestCase):
def setUp(self):
super(BlocklistViewTest, self).setUp()
self.fx4_url = reverse('blocklist', args=[3, amo.FIREFOX.guid, '4.0'])
self.fx2_url = reverse('blocklist', args=[2, amo.FIREFOX.guid, '2.0'])
self.tb4_url = reverse('blocklist', args=[3, amo.THUNDERBIRD.guid,
'4.0'])
self.mobile_url = reverse('blocklist', args=[2, amo.MOBILE.guid, '.9'])
cache.clear()
self.details = BlocklistDetail.objects.create()
def create_blplugin(self, app_guid=None, app_min=None, app_max=None,
*args, **kw):
plugin = BlocklistPlugin.objects.create(*args, **kw)
app = BlocklistApp.objects.create(blplugin=plugin, guid=app_guid,
min=app_min, max=app_max)
return plugin, app
def normalize(self, s):
return '\n'.join(x.strip() for x in s.split())
def eq_(self, x, y):
return eq_(self.normalize(x), self.normalize(y))
def dom(self, url):
r = self.client.get(url)
return minidom.parseString(r.content)
class BlocklistItemTest(XMLAssertsMixin, BlocklistViewTest):
def setUp(self):
super(BlocklistItemTest, self).setUp()
self.item = BlocklistItem.objects.create(guid='guid@addon.com',
details=self.details)
self.pref = BlocklistPref.objects.create(blitem=self.item,
pref='foo.bar')
self.app = BlocklistApp.objects.create(blitem=self.item,
guid=amo.FIREFOX.guid)
def stupid_unicode_test(self):
junk = u'\xc2\x80\x15\xc2\x80\xc3'
url = reverse('blocklist', args=[3, amo.FIREFOX.guid, junk])
# Just make sure it doesn't fail.
eq_(self.client.get(url).status_code, 200)
def test_content_type(self):
response = self.client.get(self.fx4_url)
eq_(response['Content-Type'], 'text/xml')
def test_empty_string_goes_null_on_save(self):
b = BlocklistItem(guid='guid', min='', max='', os='')
b.save()
assert b.min is None
assert b.max is None
assert b.os is None
def test_lastupdate(self):
def eq(a, b):
eq_(a, b.replace(microsecond=0))
def find_lastupdate():
bl = self.dom(self.fx4_url).getElementsByTagName('blocklist')[0]
t = int(bl.getAttribute('lastupdate')) / 1000
return datetime.fromtimestamp(t)
eq(find_lastupdate(), self.item.created)
self.item.save()
eq(find_lastupdate(), self.item.modified)
plugin, app = self.create_blplugin(app_guid=amo.FIREFOX.guid)
eq(find_lastupdate(), plugin.created)
plugin.save()
eq(find_lastupdate(), plugin.modified)
gfx = BlocklistGfx.objects.create(guid=amo.FIREFOX.guid)
eq(find_lastupdate(), gfx.created)
gfx.save()
eq(find_lastupdate(), gfx.modified)
assert (self.item.created != self.item.modified != plugin.created
!= plugin.modified != gfx.created != gfx.modified)
def test_no_items(self):
self.item.delete()
dom = self.dom(self.fx4_url)
children = dom.getElementsByTagName('blocklist')[0].childNodes
# There are only text nodes.
assert all(e.nodeType == 3 for e in children)
def test_existing_user_cookie(self):
self.client.cookies[settings.BLOCKLIST_COOKIE] = 'adfadf'
self.client.get(self.fx4_url)
eq_(self.client.cookies[settings.BLOCKLIST_COOKIE].value, 'adfadf')
def test_url_params(self):
eq_(self.client.get(self.fx4_url).status_code, 200)
eq_(self.client.get(self.fx2_url).status_code, 200)
# We ignore trailing url parameters.
eq_(self.client.get(self.fx4_url + 'other/junk/').status_code, 200)
def test_app_guid(self):
# There's one item for Firefox.
r = self.client.get(self.fx4_url)
eq_(r.status_code, 200)
eq_(len(r.context['items']), 1)
# There are no items for mobile.
r = self.client.get(self.mobile_url)
eq_(r.status_code, 200)
eq_(len(r.context['items']), 0)
# Without the app constraint we see the item.
self.app.delete()
r = self.client.get(self.mobile_url)
eq_(r.status_code, 200)
eq_(len(r.context['items']), 1)
def test_item_guid(self):
items = self.dom(self.fx4_url).getElementsByTagName('emItem')
eq_(len(items), 1)
eq_(items[0].getAttribute('id'), 'guid@addon.com')
def test_block_id(self):
item = self.dom(self.fx4_url).getElementsByTagName('emItem')[0]
eq_(item.getAttribute('blockID'), 'i' + str(self.details.id))
def test_item_os(self):
item = self.dom(self.fx4_url).getElementsByTagName('emItem')[0]
assert 'os' not in item.attributes.keys()
self.item.update(os='win,mac')
item = self.dom(self.fx4_url).getElementsByTagName('emItem')[0]
eq_(item.getAttribute('os'), 'win,mac')
def test_item_pref(self):
self.item.update(severity=2)
eq_(len(self.vr()), 1)
item = self.dom(self.fx4_url).getElementsByTagName('emItem')[0]
prefs = item.getElementsByTagName('prefs')
pref = prefs[0].getElementsByTagName('pref')
eq_(pref[0].firstChild.nodeValue, self.pref.pref)
def test_item_severity(self):
self.item.update(severity=2)
eq_(len(self.vr()), 1)
item = self.dom(self.fx4_url).getElementsByTagName('emItem')[0]
vrange = item.getElementsByTagName('versionRange')
eq_(vrange[0].getAttribute('severity'), '2')
def test_item_severity_zero(self):
# Don't show severity if severity==0.
self.item.update(severity=0, min='0.1')
eq_(len(self.vr()), 1)
item = self.dom(self.fx4_url).getElementsByTagName('emItem')[0]
vrange = item.getElementsByTagName('versionRange')
eq_(vrange[0].getAttribute('minVersion'), '0.1')
assert not vrange[0].hasAttribute('severity')
def vr(self):
item = self.dom(self.fx4_url).getElementsByTagName('emItem')[0]
return item.getElementsByTagName('versionRange')
def test_item_version_range(self):
self.item.update(min='0.1')
eq_(len(self.vr()), 1)
eq_(self.vr()[0].attributes.keys(), ['minVersion'])
eq_(self.vr()[0].getAttribute('minVersion'), '0.1')
self.item.update(max='0.2')
keys = self.vr()[0].attributes.keys()
eq_(len(keys), 2)
ok_('minVersion' in keys)
ok_('maxVersion' in keys)
eq_(self.vr()[0].getAttribute('minVersion'), '0.1')
eq_(self.vr()[0].getAttribute('maxVersion'), '0.2')
def test_item_multiple_version_range(self):
# There should be two <versionRange>s under one <emItem>.
self.item.update(min='0.1', max='0.2')
BlocklistItem.objects.create(guid=self.item.guid, severity=3)
item = self.dom(self.fx4_url).getElementsByTagName('emItem')
eq_(len(item), 1)
vr = item[0].getElementsByTagName('versionRange')
eq_(len(vr), 2)
eq_(vr[0].getAttribute('minVersion'), '0.1')
eq_(vr[0].getAttribute('maxVersion'), '0.2')
eq_(vr[1].getAttribute('severity'), '3')
def test_item_target_app(self):
app = self.app
self.app.delete()
self.item.update(severity=2)
version_range = self.vr()[0]
eq_(version_range.getElementsByTagName('targetApplication'), [])
app.save()
version_range = self.vr()[0]
target_app = version_range.getElementsByTagName('targetApplication')
eq_(len(target_app), 1)
eq_(target_app[0].getAttribute('id'), amo.FIREFOX.guid)
app.update(min='0.1', max='*')
version_range = self.vr()[0]
target_app = version_range.getElementsByTagName('targetApplication')
eq_(target_app[0].getAttribute('id'), amo.FIREFOX.guid)
tvr = target_app[0].getElementsByTagName('versionRange')
eq_(tvr[0].getAttribute('minVersion'), '0.1')
eq_(tvr[0].getAttribute('maxVersion'), '*')
def test_item_multiple_apps(self):
# Make sure all <targetApplication>s go under the same <versionRange>.
self.app.update(min='0.1', max='0.2')
BlocklistApp.objects.create(guid=amo.FIREFOX.guid, blitem=self.item,
min='3.0', max='3.1')
version_range = self.vr()[0]
apps = version_range.getElementsByTagName('targetApplication')
eq_(len(apps), 2)
eq_(apps[0].getAttribute('id'), amo.FIREFOX.guid)
vr = apps[0].getElementsByTagName('versionRange')[0]
eq_(vr.getAttribute('minVersion'), '0.1')
eq_(vr.getAttribute('maxVersion'), '0.2')
eq_(apps[1].getAttribute('id'), amo.FIREFOX.guid)
vr = apps[1].getElementsByTagName('versionRange')[0]
eq_(vr.getAttribute('minVersion'), '3.0')
eq_(vr.getAttribute('maxVersion'), '3.1')
def test_item_empty_version_range(self):
# No version_range without an app, min, max, or severity.
self.app.delete()
self.item.update(min=None, max=None, severity=None)
eq_(len(self.vr()), 0)
def test_item_empty_target_app(self):
# No empty <targetApplication>.
self.item.update(severity=1)
self.app.delete()
eq_(self.dom(self.fx4_url).getElementsByTagName('targetApplication'),
[])
def test_item_target_empty_version_range(self):
app = self.dom(self.fx4_url).getElementsByTagName('targetApplication')
eq_(app[0].getElementsByTagName('versionRange'), [])
def test_name(self):
self.assertAttribute(self.item, field='name', tag='emItem',
attr_name='name')
def test_creator(self):
self.assertAttribute(self.item, field='creator', tag='emItem',
attr_name='creator')
def test_homepage_url(self):
self.assertAttribute(self.item, field='homepage_url', tag='emItem',
attr_name='homepageURL')
def test_update_url(self):
self.assertAttribute(self.item, field='update_url', tag='emItem',
attr_name='updateURL')
def test_urls_escaped(self):
self.assertEscaped(self.item, 'homepage_url')
self.assertEscaped(self.item, 'update_url')
class BlocklistPluginTest(XMLAssertsMixin, BlocklistViewTest):
def setUp(self):
super(BlocklistPluginTest, self).setUp()
self.plugin, self.app = self.create_blplugin(app_guid=amo.FIREFOX.guid,
details=self.details)
def test_no_plugins(self):
dom = BlocklistViewTest.dom(self, self.mobile_url)
children = dom.getElementsByTagName('blocklist')[0].childNodes
# There are only text nodes.
assert all(e.nodeType == 3 for e in children)
def dom(self, url=None):
url = url or self.fx4_url
r = self.client.get(url)
d = minidom.parseString(r.content)
return d.getElementsByTagName('pluginItem')[0]
def test_plugin_empty(self):
self.app.delete()
eq_(self.dom().attributes.keys(), ['blockID'])
eq_(self.dom().getElementsByTagName('match'), [])
eq_(self.dom().getElementsByTagName('versionRange'), [])
def test_block_id(self):
item = self.dom(self.fx4_url)
eq_(item.getAttribute('blockID'), 'p' + str(self.details.id))
def test_plugin_os(self):
self.plugin.update(os='win')
eq_(sorted(self.dom().attributes.keys()), ['blockID', 'os'])
eq_(self.dom().getAttribute('os'), 'win')
def test_plugin_xpcomabi(self):
self.plugin.update(xpcomabi='win')
eq_(sorted(self.dom().attributes.keys()), ['blockID', 'xpcomabi'])
eq_(self.dom().getAttribute('xpcomabi'), 'win')
def test_plugin_name(self):
self.plugin.update(name='flash')
match = self.dom().getElementsByTagName('match')
eq_(len(match), 1)
eq_(dict(match[0].attributes.items()),
{'name': 'name', 'exp': 'flash'})
def test_plugin_description(self):
self.plugin.update(description='flash')
match = self.dom().getElementsByTagName('match')
eq_(len(match), 1)
eq_(dict(match[0].attributes.items()),
{'name': 'description', 'exp': 'flash'})
def test_plugin_filename(self):
self.plugin.update(filename='flash')
match = self.dom().getElementsByTagName('match')
eq_(len(match), 1)
eq_(dict(match[0].attributes.items()),
{'name': 'filename', 'exp': 'flash'})
def test_plugin_severity(self):
self.plugin.update(severity=2)
v = self.dom().getElementsByTagName('versionRange')[0]
eq_(v.getAttribute('severity'), '2')
def test_plugin_severity_zero(self):
self.plugin.update(severity=0)
v = self.dom().getElementsByTagName('versionRange')[0]
eq_(v.getAttribute('severity'), '0')
def test_plugin_no_target_app(self):
self.plugin.update(severity=1, min='1', max='2')
self.app.delete()
vr = self.dom().getElementsByTagName('versionRange')[0]
eq_(vr.getElementsByTagName('targetApplication'), [],
'There should not be a <targetApplication> if there was no app')
eq_(vr.getAttribute('severity'), '1')
eq_(vr.getAttribute('minVersion'), '1')
eq_(vr.getAttribute('maxVersion'), '2')
def test_plugin_with_target_app(self):
self.plugin.update(severity=1)
self.app.update(guid=amo.FIREFOX.guid, min='1', max='2')
vr = self.dom().getElementsByTagName('versionRange')[0]
eq_(vr.getAttribute('severity'), '1')
assert not vr.getAttribute('vulnerabilitystatus')
app = vr.getElementsByTagName('targetApplication')[0]
eq_(app.getAttribute('id'), amo.FIREFOX.guid)
vr = app.getElementsByTagName('versionRange')[0]
eq_(vr.getAttribute('minVersion'), '1')
eq_(vr.getAttribute('maxVersion'), '2')
def test_plugin_with_multiple_target_apps(self):
self.plugin.update(severity=1, min='5', max='6')
self.app.update(guid=amo.FIREFOX.guid, min='1', max='2')
BlocklistApp.objects.create(guid=amo.THUNDERBIRD.guid,
min='3', max='4',
blplugin=self.plugin)
vr = self.dom().getElementsByTagName('versionRange')[0]
eq_(vr.getAttribute('severity'), '1')
eq_(vr.getAttribute('minVersion'), '5')
eq_(vr.getAttribute('maxVersion'), '6')
assert not vr.getAttribute('vulnerabilitystatus')
app = vr.getElementsByTagName('targetApplication')[0]
eq_(app.getAttribute('id'), amo.FIREFOX.guid)
vr = app.getElementsByTagName('versionRange')[0]
eq_(vr.getAttribute('minVersion'), '1')
eq_(vr.getAttribute('maxVersion'), '2')
vr = self.dom(self.tb4_url).getElementsByTagName('versionRange')[0]
eq_(vr.getAttribute('severity'), '1')
eq_(vr.getAttribute('minVersion'), '5')
eq_(vr.getAttribute('maxVersion'), '6')
assert not vr.getAttribute('vulnerabilitystatus')
app = vr.getElementsByTagName('targetApplication')[0]
eq_(app.getAttribute('id'), amo.THUNDERBIRD.guid)
vr = app.getElementsByTagName('versionRange')[0]
eq_(vr.getAttribute('minVersion'), '3')
eq_(vr.getAttribute('maxVersion'), '4')
def test_plugin_with_target_app_with_vulnerability(self):
self.plugin.update(severity=0, vulnerability_status=2)
self.app.update(guid=amo.FIREFOX.guid, min='1', max='2')
vr = self.dom().getElementsByTagName('versionRange')[0]
eq_(vr.getAttribute('severity'), '0')
eq_(vr.getAttribute('vulnerabilitystatus'), '2')
app = vr.getElementsByTagName('targetApplication')[0]
eq_(app.getAttribute('id'), amo.FIREFOX.guid)
vr = app.getElementsByTagName('versionRange')[0]
eq_(vr.getAttribute('minVersion'), '1')
eq_(vr.getAttribute('maxVersion'), '2')
def test_plugin_with_severity_only(self):
self.plugin.update(severity=1)
self.app.delete()
vr = self.dom().getElementsByTagName('versionRange')[0]
eq_(vr.getAttribute('severity'), '1')
assert not vr.getAttribute('vulnerabilitystatus')
eq_(vr.getAttribute('minVersion'), '')
eq_(vr.getAttribute('maxVersion'), '')
eq_(vr.getElementsByTagName('targetApplication'), [],
'There should not be a <targetApplication> if there was no app')
def test_plugin_without_severity_and_with_vulnerability(self):
self.plugin.update(severity=0, vulnerability_status=1)
self.app.delete()
vr = self.dom().getElementsByTagName('versionRange')[0]
eq_(vr.getAttribute('severity'), '0')
eq_(vr.getAttribute('vulnerabilitystatus'), '1')
eq_(vr.getAttribute('minVersion'), '')
eq_(vr.getAttribute('maxVersion'), '')
def test_plugin_without_severity_and_with_vulnerability_and_minmax(self):
self.plugin.update(severity=0, vulnerability_status=1, min='2.0',
max='3.0')
self.app.delete()
vr = self.dom().getElementsByTagName('versionRange')[0]
eq_(vr.getAttribute('severity'), '0')
eq_(vr.getAttribute('vulnerabilitystatus'), '1')
eq_(vr.getAttribute('minVersion'), '2.0')
eq_(vr.getAttribute('maxVersion'), '3.0')
def test_plugin_apiver_lt_3(self):
self.plugin.update(severity='2')
# No min & max so the app matches.
e = self.dom(self.fx2_url).getElementsByTagName('versionRange')[0]
eq_(e.getAttribute('severity'), '2')
eq_(e.getElementsByTagName('targetApplication'), [])
# The app version is not in range.
self.app.update(min='3.0', max='4.0')
self.assertRaises(IndexError, self.dom, self.fx2_url)
# The app is back in range.
self.app.update(min='1.1')
e = self.dom(self.fx2_url).getElementsByTagName('versionRange')[0]
eq_(e.getAttribute('severity'), '2')
eq_(e.getElementsByTagName('targetApplication'), [])
def test_info_url(self):
self.assertOptional(self.plugin, 'info_url', 'infoURL')
self.assertEscaped(self.plugin, 'info_url')
class BlocklistGfxTest(BlocklistViewTest):
def setUp(self):
super(BlocklistGfxTest, self).setUp()
self.gfx = BlocklistGfx.objects.create(
guid=amo.FIREFOX.guid, os='os', vendor='vendor', devices='x y z',
feature='feature', feature_status='status', details=self.details,
driver_version='version', driver_version_max='version max',
driver_version_comparator='compare', hardware='giant_robot')
def test_no_gfx(self):
dom = self.dom(self.mobile_url)
children = dom.getElementsByTagName('blocklist')[0].childNodes
# There are only text nodes.
assert all(e.nodeType == 3 for e in children)
def test_gfx(self):
r = self.client.get(self.fx4_url)
dom = minidom.parseString(r.content)
gfx = dom.getElementsByTagName('gfxBlacklistEntry')[0]
def find(e):
return gfx.getElementsByTagName(e)[0].childNodes[0].wholeText
assert find('os') == self.gfx.os
assert find('feature') == self.gfx.feature
assert find('vendor') == self.gfx.vendor
assert find('featureStatus') == self.gfx.feature_status
assert find('driverVersion') == self.gfx.driver_version
assert find('driverVersionMax') == self.gfx.driver_version_max
expected_version_comparator = self.gfx.driver_version_comparator
assert find('driverVersionComparator') == expected_version_comparator
assert find('hardware') == self.gfx.hardware
devices = gfx.getElementsByTagName('devices')[0]
for device, val in zip(devices.getElementsByTagName('device'),
self.gfx.devices.split(' ')):
assert device.childNodes[0].wholeText == val
def test_empty_devices(self):
self.gfx.devices = None
self.gfx.save()
r = self.client.get(self.fx4_url)
self.assertNotContains(r, '<devices>')
def test_no_empty_nodes(self):
self.gfx.update(os=None, vendor=None, devices=None,
feature=None, feature_status=None,
driver_version=None, driver_version_max=None,
driver_version_comparator=None, hardware=None)
r = self.client.get(self.fx4_url)
self.assertNotContains(r, '<os>')
self.assertNotContains(r, '<vendor>')
self.assertNotContains(r, '<devices>')
self.assertNotContains(r, '<feature>')
self.assertNotContains(r, '<featureStatus>')
self.assertNotContains(r, '<driverVersion>')
self.assertNotContains(r, '<driverVersionMax>')
self.assertNotContains(r, '<driverVersionComparator>')
self.assertNotContains(r, '<hardware>')
def test_block_id(self):
item = (self.dom(self.fx4_url)
.getElementsByTagName('gfxBlacklistEntry')[0])
eq_(item.getAttribute('blockID'), 'g' + str(self.details.id))
class BlocklistCATest(BlocklistViewTest):
def setUp(self):
super(BlocklistCATest, self).setUp()
self.ca = BlocklistCA.objects.create(data=u'Ètå…, ≥•≤')
def test_ca(self):
r = self.client.get(self.fx4_url)
dom = minidom.parseString(r.content)
ca = dom.getElementsByTagName('caBlocklistEntry')[0]
eq_(base64.b64decode(ca.childNodes[0].toxml()), 'Ètå…, ≥•≤')
class BlocklistIssuerCertTest(BlocklistViewTest):
def setUp(self):
super(BlocklistIssuerCertTest, self).setUp()
self.issuerCertBlock = BlocklistIssuerCert.objects.create(
issuer='testissuer', serial='testserial',
details=BlocklistDetail.objects.create(name='one'))
self.issuerCertBlock2 = BlocklistIssuerCert.objects.create(
issuer='anothertestissuer', serial='anothertestserial',
details=BlocklistDetail.objects.create(name='two'))
def test_extant_nodes(self):
r = self.client.get(self.fx4_url)
dom = minidom.parseString(r.content)
certItem = dom.getElementsByTagName('certItem')[0]
eq_(certItem.getAttribute('issuerName'), self.issuerCertBlock.issuer)
serialNode = dom.getElementsByTagName('serialNumber')[0]
serialNumber = serialNode.childNodes[0].wholeText
eq_(serialNumber, self.issuerCertBlock.serial)
certItem = dom.getElementsByTagName('certItem')[1]
eq_(certItem.getAttribute('issuerName'), self.issuerCertBlock2.issuer)
serialNode = dom.getElementsByTagName('serialNumber')[1]
serialNumber = serialNode.childNodes[0].wholeText
eq_(serialNumber, self.issuerCertBlock2.serial)
| 25,302 | 8,072 |
# -*- coding: utf-8 -*-
import numpy as np
import time
import IK
import DynamixelController as dc
import TimerIMU as ti
import GaitGenerator as gg
# ロボットの関節を直接動かして座標を確認
def posPrint():
pos_list = dc.syncreadPos(dc.DXL_ID_LIST)
pos_leg = np.array([
[pos_list[0], pos_list[1], pos_list[2]],
[pos_list[3], pos_list[4], pos_list[5]],
[pos_list[6], pos_list[7], pos_list[8]],
[pos_list[9], pos_list[10], pos_list[11]]
])
xyz_pos = []
rad_list2 = []
rad_list = (pos_leg-np.ones((4, 3))*dc.DXL_MEDIUM_POSITION_VALUE)*dc.JOINT_DIREC*dc.DXLPOS_2_RAD
for rad in rad_list:
xyz = IK.legFK(rad, [0, 0, 0])[2]
xyz_pos.append(xyz)
rad_list2.append(IK.legSmartIK(xyz, rad, False))
print("DXLpos : {}".format((pos_leg-np.ones((4, 3))*dc.DXL_MEDIUM_POSITION_VALUE)*dc.JOINT_DIREC))
print("XYZpos list : {}".format(xyz_pos))
print("rad2 : {}".format(rad_list2))
def trotWalk():
leg_centers = [
[0.0, 0.0, -170.0],
[-40.0, 0.0, -170.0],
[0.0, 0.0, -170.0],
[-40.0, 0.0, -170.0]
]
leg_start_states = [
[[0.0, 0.0, -170.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], 0.0],
[[-40.0, 0.0, -170.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], 0.0],
[[0.0, 0.0, -170.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], 0.0],
[[-40.0, 0.0, -170.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], 0.0]
]
body_h = 170.0 + IK.sole_r
leg_up_h = 45.0
h_offset = 3.0
T = 0.5
dt = 0.05
tIMU = ti.timerIMU(0.015)
qGait = gg.quadrupedGait(leg_centers, leg_start_states, body_h, leg_up_h, h_offset, T, dt, tIMU)
def main():
# dc.setDXL()
# dc.torqueOFF(dc.DXL_ID_LIST)
trotWalk()
while(True):
# posPrint()
time.sleep(0.1)
if __name__ == '__main__':
main()
| 1,821 | 970 |