code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import os
import shutil
import csv
import random
class bms_config:
arch = 'Vnet'
# data
data = '/mnt/dataset/shared/zongwei/BraTS'
csv = "data/bms"
deltr = 30
input_rows = 64
input_cols = 64
input_deps = 32
crop_rows = 100
crop_cols = 100
crop_deps = 50
# model
optimizer = 'adam'
lr = 1e-3
patience = 30
verbose = 1
batch_size = 16
workers = 1
max_queue_size = workers * 1
nb_epoch = 10000
def __init__(self, args):
self.exp_name = self.arch + '-' + args.suffix
if args.data is not None:
self.data = args.data
if args.suffix == 'random':
self.weights = None
elif args.suffix == 'genesis':
self.weights = 'pretrained_weights/Genesis_Chest_CT.h5'
elif args.suffix == 'genesis-autoencoder':
self.weights = 'pretrained_weights/Genesis_Chest_CT-autoencoder.h5'
elif args.suffix == 'genesis-nonlinear':
self.weights = 'pretrained_weights/Genesis_Chest_CT-nonlinear.h5'
elif args.suffix == 'genesis-localshuffling':
self.weights = 'pretrained_weights/Genesis_Chest_CT-localshuffling.h5'
elif args.suffix == 'genesis-outpainting':
self.weights = 'pretrained_weights/Genesis_Chest_CT-outpainting.h5'
elif args.suffix == 'genesis-inpainting':
self.weights = 'pretrained_weights/Genesis_Chest_CT-inpainting.h5'
elif args.suffix == 'denoisy':
self.weights = 'pretrained_weights/denoisy.h5'
elif args.suffix == 'patchshuffling':
self.weights = 'pretrained_weights/patchshuffling.h5'
elif args.suffix == 'hg':
self.weights = 'pretrained_weights/hg.h5'
else:
raise
train_ids = self._load_csv(os.path.join(self.csv, "fold_1.csv")) + self._load_csv(os.path.join(self.csv, "fold_2.csv"))
random.Random(4).shuffle(train_ids)
self.validation_ids = train_ids[:len(train_ids) // 8]
self.train_ids = train_ids[len(train_ids) // 8:]
self.test_ids = self._load_csv(os.path.join(self.csv, "fold_3.csv"))
self.num_train = len(self.train_ids)
self.num_validation = len(self.validation_ids)
self.num_test = len(self.test_ids)
# logs
self.model_path = os.path.join("models/bms", "run_"+str(args.run))
if not os.path.exists(self.model_path):
os.makedirs(self.model_path)
self.logs_path = os.path.join(self.model_path, "logs")
if not os.path.exists(self.logs_path):
os.makedirs(self.logs_path)
def _load_csv(self, foldfile=None):
assert foldfile is not None
patient_ids = []
with open(foldfile, 'r') as f:
reader = csv.reader(f, lineterminator='\n')
patient_ids.extend(reader)
for i, item in enumerate(patient_ids):
patient_ids[i] = item[0]
return patient_ids
def display(self):
"""Display Configuration values."""
print("\nConfigurations:")
for a in dir(self):
if not a.startswith("__") and not callable(getattr(self, a)) and not '_ids' in a:
print("{:30} {}".format(a, getattr(self, a)))
print("\n")
class ecc_config:
arch = 'Vnet'
# data
data = '/mnt/dfs/zongwei/Academic/MICCAI2020/Genesis_PE/dataset/augdata/VOIR'
csv = "data/ecc"
clip_min = -1000
clip_max = 1000
input_rows = 64
input_cols = 64
input_deps = 64
# model
optimizer = 'adam'
lr = 1e-3
patience = 38
verbose = 1
batch_size = 24
workers = 1
max_queue_size = workers * 1
nb_epoch = 10000
num_classes = 1
verbose = 1
def __init__(self, args=None):
self.exp_name = self.arch + '-' + args.suffix + '-cv-' + str(args.cv)
if args.data is not None:
self.data = args.data
if args.suffix == 'random':
self.weights = None
elif args.suffix == 'genesis':
self.weights = 'pretrained_weights/Genesis_Chest_CT.h5'
elif args.suffix == 'genesis-autoencoder':
self.weights = 'pretrained_weights/Genesis_Chest_CT-autoencoder.h5'
elif args.suffix == 'genesis-nonlinear':
self.weights = 'pretrained_weights/Genesis_Chest_CT-nonlinear.h5'
elif args.suffix == 'genesis-localshuffling':
self.weights = 'pretrained_weights/Genesis_Chest_CT-localshuffling.h5'
elif args.suffix == 'genesis-outpainting':
self.weights = 'pretrained_weights/Genesis_Chest_CT-outpainting.h5'
elif args.suffix == 'genesis-inpainting':
self.weights = 'pretrained_weights/Genesis_Chest_CT-inpainting.h5'
elif args.suffix == 'denoisy':
self.weights = 'pretrained_weights/denoisy.h5'
elif args.suffix == 'patchshuffling':
self.weights = 'pretrained_weights/patchshuffling.h5'
elif args.suffix == 'hg':
self.weights = 'pretrained_weights/hg.h5'
else:
raise
# logs
assert args.subsetting is not None
self.model_path = os.path.join("models/ecc", "run_"+str(args.run), args.subsetting)
if not os.path.exists(self.model_path):
os.makedirs(self.model_path)
self.logs_path = os.path.join(self.model_path, "logs")
if not os.path.exists(self.logs_path):
os.makedirs(self.logs_path)
self.patch_csv_path = 'Patch-20mm-cv-'+str(args.cv)+'-features_output_2_iter-100000.csv'
self.candidate_csv_path = 'Candidate-20mm-cv-'+str(args.cv)+'-features_output_2_iter-100000.csv'
self.csv_froc = 'features_output_2_iter-100000.csv'
def display(self):
print("Configurations")
for a in dir(self):
if not a.startswith("__") and not callable(getattr(self,a)):
print("{:30} {}".format(a,getattr(self,a)))
#print("\n")
class ncc_config:
arch = 'Vnet'
# data
data = '/mnt/dataset/shared/zongwei/LUNA16/LUNA16_FPR_32x32x32'
train_fold=[0,1,2,3,4]
valid_fold=[5,6]
test_fold=[7,8,9]
hu_min = -1000
hu_max = 1000
input_rows = 64
input_cols = 64
input_deps = 32
# model
optimizer = 'adam'
lr = 1e-3
patience = 10
verbose = 1
batch_size = 24
workers = 1
max_queue_size = workers * 1
nb_epoch = 10000
num_classes = 1
verbose = 1
def __init__(self, args=None):
self.exp_name = self.arch + '-' + args.suffix
if args.data is not None:
self.data = args.data
if args.suffix == 'random':
self.weights = None
elif args.suffix == 'genesis':
self.weights = 'pretrained_weights/Genesis_Chest_CT.h5'
elif args.suffix == 'genesis-autoencoder':
self.weights = 'pretrained_weights/Genesis_Chest_CT-autoencoder.h5'
elif args.suffix == 'genesis-nonlinear':
self.weights = 'pretrained_weights/Genesis_Chest_CT-nonlinear.h5'
elif args.suffix == 'genesis-localshuffling':
self.weights = 'pretrained_weights/Genesis_Chest_CT-localshuffling.h5'
elif args.suffix == 'genesis-outpainting':
self.weights = 'pretrained_weights/Genesis_Chest_CT-outpainting.h5'
elif args.suffix == 'genesis-inpainting':
self.weights = 'pretrained_weights/Genesis_Chest_CT-inpainting.h5'
elif args.suffix == 'denoisy':
self.weights = 'pretrained_weights/denoisy.h5'
elif args.suffix == 'patchshuffling':
self.weights = 'pretrained_weights/patchshuffling.h5'
elif args.suffix == 'hg':
self.weights = 'pretrained_weights/hg.h5'
else:
raise
# logs
self.model_path = os.path.join("models/ncc", "run_"+str(args.run))
if not os.path.exists(self.model_path):
os.makedirs(self.model_path)
self.logs_path = os.path.join(self.model_path, "logs")
if not os.path.exists(self.logs_path):
os.makedirs(self.logs_path)
def display(self):
print("Configurations")
for a in dir(self):
if not a.startswith("__") and not callable(getattr(self,a)):
print("{:30} {}".format(a,getattr(self,a)))
#print("\n")
class ncs_config:
arch = 'Vnet'
# data
data = '/mnt/dataset/shared/zongwei/LIDC'
input_rows = 64
input_cols = 64
input_deps = 32
# model
optimizer = 'adam'
lr = 1e-3
patience = 50
verbose = 1
batch_size = 16
workers = 1
max_queue_size = workers * 1
nb_epoch = 10000
def __init__(self, args):
self.exp_name = self.arch + '-' + args.suffix
if args.data is not None:
self.data = args.data
if args.suffix == 'random':
self.weights = None
elif args.suffix == 'genesis':
self.weights = 'pretrained_weights/Genesis_Chest_CT.h5'
elif args.suffix == 'genesis-autoencoder':
self.weights = 'pretrained_weights/Genesis_Chest_CT-autoencoder.h5'
elif args.suffix == 'genesis-nonlinear':
self.weights = 'pretrained_weights/Genesis_Chest_CT-nonlinear.h5'
elif args.suffix == 'genesis-localshuffling':
self.weights = 'pretrained_weights/Genesis_Chest_CT-localshuffling.h5'
elif args.suffix == 'genesis-outpainting':
self.weights = 'pretrained_weights/Genesis_Chest_CT-outpainting.h5'
elif args.suffix == 'genesis-inpainting':
self.weights = 'pretrained_weights/Genesis_Chest_CT-inpainting.h5'
elif args.suffix == 'denoisy':
self.weights = 'pretrained_weights/denoisy.h5'
elif args.suffix == 'patchshuffling':
self.weights = 'pretrained_weights/patchshuffling.h5'
elif args.suffix == 'hg':
self.weights = 'pretrained_weights/hg.h5'
else:
raise
# logs
self.model_path = os.path.join("models/ncs", "run_"+str(args.run))
if not os.path.exists(self.model_path):
os.makedirs(self.model_path)
self.logs_path = os.path.join(self.model_path, "logs")
if not os.path.exists(self.logs_path):
os.makedirs(self.logs_path)
def display(self):
"""Display Configuration values."""
print("\nConfigurations:")
for a in dir(self):
if not a.startswith("__") and not callable(getattr(self, a)):
print("{:30} {}".format(a, getattr(self, a)))
print("\n")
class lcs_config:
arch = 'Vnet'
# data
data = '/mnt/dfs/zongwei/Academic/MICCAI2019/Data/LiTS/3D_LiTS_NPY_256x256xZ'
nii = '/mnt/dataset/shared/zongwei/LiTS/Tr'
obj = 'liver'
train_idx = [n for n in range(0, 100)]
valid_idx = [n for n in range(100, 115)]
test_idx = [n for n in range(115, 130)]
num_train = len(train_idx)
num_valid = len(valid_idx)
num_test = len(test_idx)
hu_max = 1000
hu_min = -1000
input_rows = 64
input_cols = 64
input_deps = 32
# model
optimizer = 'adam'
lr = 1e-2
patience = 20
verbose = 1
batch_size = 16
workers = 1
max_queue_size = workers * 1
nb_epoch = 10000
def __init__(self, args):
self.exp_name = self.arch + '-' + args.suffix
if args.data is not None:
self.data = args.data
if args.suffix == 'random':
self.weights = None
elif args.suffix == 'genesis':
self.weights = 'pretrained_weights/Genesis_Chest_CT.h5'
elif args.suffix == 'genesis-autoencoder':
self.weights = 'pretrained_weights/Genesis_Chest_CT-autoencoder.h5'
elif args.suffix == 'genesis-nonlinear':
self.weights = 'pretrained_weights/Genesis_Chest_CT-nonlinear.h5'
elif args.suffix == 'genesis-localshuffling':
self.weights = 'pretrained_weights/Genesis_Chest_CT-localshuffling.h5'
elif args.suffix == 'genesis-outpainting':
self.weights = 'pretrained_weights/Genesis_Chest_CT-outpainting.h5'
elif args.suffix == 'genesis-inpainting':
self.weights = 'pretrained_weights/Genesis_Chest_CT-inpainting.h5'
elif args.suffix == 'denoisy':
self.weights = 'pretrained_weights/denoisy.h5'
elif args.suffix == 'patchshuffling':
self.weights = 'pretrained_weights/patchshuffling.h5'
elif args.suffix == 'hg':
self.weights = 'pretrained_weights/hg.h5'
else:
raise
# logs
self.model_path = os.path.join("models/lcs", "run_"+str(args.run))
if not os.path.exists(self.model_path):
os.makedirs(self.model_path)
self.logs_path = os.path.join(self.model_path, "logs")
if not os.path.exists(self.logs_path):
os.makedirs(self.logs_path)
def display(self):
"""Display Configuration values."""
print("\nConfigurations:")
for a in dir(self):
if not a.startswith("__") and not callable(getattr(self, a)) and not '_idx' in a:
print("{:30} {}".format(a, getattr(self, a)))
print("\n")
|
[
"csv.reader",
"os.makedirs",
"random.Random",
"os.path.exists",
"os.path.join"
] |
[((2542, 2579), 'os.path.join', 'os.path.join', (['self.model_path', '"""logs"""'], {}), "(self.model_path, 'logs')\n", (2554, 2579), False, 'import os\n'), ((5452, 5489), 'os.path.join', 'os.path.join', (['self.model_path', '"""logs"""'], {}), "(self.model_path, 'logs')\n", (5464, 5489), False, 'import os\n'), ((8155, 8192), 'os.path.join', 'os.path.join', (['self.model_path', '"""logs"""'], {}), "(self.model_path, 'logs')\n", (8167, 8192), False, 'import os\n'), ((10403, 10440), 'os.path.join', 'os.path.join', (['self.model_path', '"""logs"""'], {}), "(self.model_path, 'logs')\n", (10415, 10440), False, 'import os\n'), ((13075, 13112), 'os.path.join', 'os.path.join', (['self.model_path', '"""logs"""'], {}), "(self.model_path, 'logs')\n", (13087, 13112), False, 'import os\n'), ((2148, 2184), 'os.path.join', 'os.path.join', (['self.csv', '"""fold_3.csv"""'], {}), "(self.csv, 'fold_3.csv')\n", (2160, 2184), False, 'import os\n'), ((2443, 2474), 'os.path.exists', 'os.path.exists', (['self.model_path'], {}), '(self.model_path)\n', (2457, 2474), False, 'import os\n'), ((2488, 2516), 'os.makedirs', 'os.makedirs', (['self.model_path'], {}), '(self.model_path)\n', (2499, 2516), False, 'import os\n'), ((2595, 2625), 'os.path.exists', 'os.path.exists', (['self.logs_path'], {}), '(self.logs_path)\n', (2609, 2625), False, 'import os\n'), ((2639, 2666), 'os.makedirs', 'os.makedirs', (['self.logs_path'], {}), '(self.logs_path)\n', (2650, 2666), False, 'import os\n'), ((2841, 2875), 'csv.reader', 'csv.reader', (['f'], {'lineterminator': '"""\n"""'}), "(f, lineterminator='\\n')\n", (2851, 2875), False, 'import csv\n'), ((5353, 5384), 'os.path.exists', 'os.path.exists', (['self.model_path'], {}), '(self.model_path)\n', (5367, 5384), False, 'import os\n'), ((5398, 5426), 'os.makedirs', 'os.makedirs', (['self.model_path'], {}), '(self.model_path)\n', (5409, 5426), False, 'import os\n'), ((5505, 5535), 'os.path.exists', 'os.path.exists', (['self.logs_path'], {}), '(self.logs_path)\n', (5519, 5535), False, 'import os\n'), ((5549, 5576), 'os.makedirs', 'os.makedirs', (['self.logs_path'], {}), '(self.logs_path)\n', (5560, 5576), False, 'import os\n'), ((8056, 8087), 'os.path.exists', 'os.path.exists', (['self.model_path'], {}), '(self.model_path)\n', (8070, 8087), False, 'import os\n'), ((8101, 8129), 'os.makedirs', 'os.makedirs', (['self.model_path'], {}), '(self.model_path)\n', (8112, 8129), False, 'import os\n'), ((8208, 8238), 'os.path.exists', 'os.path.exists', (['self.logs_path'], {}), '(self.logs_path)\n', (8222, 8238), False, 'import os\n'), ((8252, 8279), 'os.makedirs', 'os.makedirs', (['self.logs_path'], {}), '(self.logs_path)\n', (8263, 8279), False, 'import os\n'), ((10304, 10335), 'os.path.exists', 'os.path.exists', (['self.model_path'], {}), '(self.model_path)\n', (10318, 10335), False, 'import os\n'), ((10349, 10377), 'os.makedirs', 'os.makedirs', (['self.model_path'], {}), '(self.model_path)\n', (10360, 10377), False, 'import os\n'), ((10456, 10486), 'os.path.exists', 'os.path.exists', (['self.logs_path'], {}), '(self.logs_path)\n', (10470, 10486), False, 'import os\n'), ((10500, 10527), 'os.makedirs', 'os.makedirs', (['self.logs_path'], {}), '(self.logs_path)\n', (10511, 10527), False, 'import os\n'), ((12976, 13007), 'os.path.exists', 'os.path.exists', (['self.model_path'], {}), '(self.model_path)\n', (12990, 13007), False, 'import os\n'), ((13021, 13049), 'os.makedirs', 'os.makedirs', (['self.model_path'], {}), '(self.model_path)\n', (13032, 13049), False, 'import os\n'), ((13128, 13158), 'os.path.exists', 'os.path.exists', (['self.logs_path'], {}), '(self.logs_path)\n', (13142, 13158), False, 'import os\n'), ((13172, 13199), 'os.makedirs', 'os.makedirs', (['self.logs_path'], {}), '(self.logs_path)\n', (13183, 13199), False, 'import os\n'), ((1853, 1889), 'os.path.join', 'os.path.join', (['self.csv', '"""fold_1.csv"""'], {}), "(self.csv, 'fold_1.csv')\n", (1865, 1889), False, 'import os\n'), ((1908, 1944), 'os.path.join', 'os.path.join', (['self.csv', '"""fold_2.csv"""'], {}), "(self.csv, 'fold_2.csv')\n", (1920, 1944), False, 'import os\n'), ((1954, 1970), 'random.Random', 'random.Random', (['(4)'], {}), '(4)\n', (1967, 1970), False, 'import random\n')]
|
"""Generates a gin config from the current task/mixture list.
Usage: `python3 -m config.generate`
"""
from itertools import chain, product
from pathlib import Path
import t5
import conversational_ai.tasks # noqa: F401
WHITELIST = ["chitchat", "dailydialog", "convai2"]
sizes = ["small", "base", "large", "3b", "11b"]
mixtures = filter(
lambda task: any(name in task for name in WHITELIST),
chain(t5.data.TaskRegistry.names(), t5.data.MixtureRegistry.names()),
)
for size, mixture in product(sizes, mixtures):
path = Path(f"./config/mixtures/{mixture}/{size}.gin")
print(path)
path.parent.mkdir(parents=True, exist_ok=True)
body = """include "finetune_{size}.gin"
MIXTURE_NAME = "{mixture}"
utils.run.model_dir = "./checkpoints/conversational-ai/{mixture}/{size}"
""".format(
size=size, mixture=mixture
)
path.write_text(body)
|
[
"t5.data.TaskRegistry.names",
"pathlib.Path",
"t5.data.MixtureRegistry.names",
"itertools.product"
] |
[((498, 522), 'itertools.product', 'product', (['sizes', 'mixtures'], {}), '(sizes, mixtures)\n', (505, 522), False, 'from itertools import chain, product\n'), ((535, 582), 'pathlib.Path', 'Path', (['f"""./config/mixtures/{mixture}/{size}.gin"""'], {}), "(f'./config/mixtures/{mixture}/{size}.gin')\n", (539, 582), False, 'from pathlib import Path\n'), ((410, 438), 't5.data.TaskRegistry.names', 't5.data.TaskRegistry.names', ([], {}), '()\n', (436, 438), False, 'import t5\n'), ((440, 471), 't5.data.MixtureRegistry.names', 't5.data.MixtureRegistry.names', ([], {}), '()\n', (469, 471), False, 'import t5\n')]
|
from super_taxi.model.generics import Vehicle, Coordinate
from super_taxi.model.cars import Car,SUVCar
class Taxi(Vehicle):
def __init__(self, id=None):
Vehicle.__init__(self, id=id)
self.position = Coordinate(0, 0)
self.ride = None
self.booked = False
self.pickup_distance = 0
def booked_for(self, ride):
self.ride = ride
self.booked = True
def is_booked(self):
return self.booked
def reset(self):
self.position = Coordinate(0, 0)
self.ride = None
self.booked = False
self.pickup_distance = 0
class TaxiCar(Car, Taxi):
def __init__(self, id=None):
Car.__init__(self, id)
Taxi.__init__(self, id)
class TaxiSuvCar(SUVCar, Taxi):
def __init__(self, id=None):
SUVCar.__init__(self, id)
Taxi.__init__(self, id)
|
[
"super_taxi.model.cars.SUVCar.__init__",
"super_taxi.model.generics.Vehicle.__init__",
"super_taxi.model.cars.Car.__init__",
"super_taxi.model.generics.Coordinate"
] |
[((167, 196), 'super_taxi.model.generics.Vehicle.__init__', 'Vehicle.__init__', (['self'], {'id': 'id'}), '(self, id=id)\n', (183, 196), False, 'from super_taxi.model.generics import Vehicle, Coordinate\n'), ((221, 237), 'super_taxi.model.generics.Coordinate', 'Coordinate', (['(0)', '(0)'], {}), '(0, 0)\n', (231, 237), False, 'from super_taxi.model.generics import Vehicle, Coordinate\n'), ((508, 524), 'super_taxi.model.generics.Coordinate', 'Coordinate', (['(0)', '(0)'], {}), '(0, 0)\n', (518, 524), False, 'from super_taxi.model.generics import Vehicle, Coordinate\n'), ((680, 702), 'super_taxi.model.cars.Car.__init__', 'Car.__init__', (['self', 'id'], {}), '(self, id)\n', (692, 702), False, 'from super_taxi.model.cars import Car, SUVCar\n'), ((810, 835), 'super_taxi.model.cars.SUVCar.__init__', 'SUVCar.__init__', (['self', 'id'], {}), '(self, id)\n', (825, 835), False, 'from super_taxi.model.cars import Car, SUVCar\n')]
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from airflow.hooks.http_hook import HttpHook
from airflow.exceptions import AirflowException
class MSTeamsWebhookHook(HttpHook):
"""
This hook allows you to post messages to MS Teams using the Incoming Webhook connector.
Takes both MS Teams webhook token directly and connection that has MS Teams webhook token.
If both supplied, the webhook token will be appended to the host in the connection.
:param http_conn_id: connection that has MS Teams webhook URL
:type http_conn_id: str
:param webhook_token: MS Teams webhook token
:type webhook_token: str
:param message: The message you want to send on MS Teams
:type message: str
:param subtitle: The subtitle of the message to send
:type subtitle: str
:param button_text: The text of the action button
:type button_text: str
:param button_url: The URL for the action button click
:type button_url : str
:param theme_color: Hex code of the card theme, without the #
:type message: str
:param proxy: Proxy to use when making the webhook request
:type proxy: str
"""
def __init__(self,
http_conn_id=None,
webhook_token=None,
message="",
subtitle="",
button_text="",
button_url="",
theme_color="00FF00",
proxy=None,
*args,
**kwargs
):
super(MSTeamsWebhookHook, self).__init__(*args, **kwargs)
self.http_conn_id = http_conn_id
self.webhook_token = self.get_token(webhook_token, http_conn_id)
self.message = message
self.subtitle = subtitle
self.button_text = button_text
self.button_url = button_url
self.theme_color = theme_color
self.proxy = proxy
def get_proxy(self, http_conn_id):
conn = self.get_connection(http_conn_id)
extra = conn.extra_dejson
print(extra)
return extra.get("proxy", '')
def get_token(self, token, http_conn_id):
"""
Given either a manually set token or a conn_id, return the webhook_token to use
:param token: The manually provided token
:param conn_id: The conn_id provided
:return: webhook_token (str) to use
"""
if token:
return token
elif http_conn_id:
conn = self.get_connection(http_conn_id)
extra = conn.extra_dejson
return extra.get('webhook_token', '')
else:
raise AirflowException('Cannot get URL: No valid MS Teams '
'webhook URL nor conn_id supplied')
def build_message(self):
cardjson = """
{{
"@type": "MessageCard",
"@context": "http://schema.org/extensions",
"themeColor": "{3}",
"summary": "{0}",
"sections": [{{
"activityTitle": "{1}",
"activitySubtitle": "{2}",
"markdown": true,
"potentialAction": [
{{
"@type": "OpenUri",
"name": "{4}",
"targets": [
{{ "os": "default", "uri": "{5}" }}
]
}}
]
}}]
}}
"""
return cardjson.format(self.message, self.message, self.subtitle, self.theme_color,
self.button_text, self.button_url)
def execute(self):
"""
Remote Popen (actually execute the webhook call)
:param cmd: command to remotely execute
:param kwargs: extra arguments to Popen (see subprocess.Popen)
"""
proxies = {}
proxy_url = self.get_proxy(self.http_conn_id)
print("Proxy is : " + proxy_url)
if len(proxy_url) > 5:
proxies = {'https': proxy_url}
self.run(endpoint=self.webhook_token,
data=self.build_message(),
headers={'Content-type': 'application/json'},
extra_options={'proxies': proxies})
|
[
"airflow.exceptions.AirflowException"
] |
[((3382, 3473), 'airflow.exceptions.AirflowException', 'AirflowException', (['"""Cannot get URL: No valid MS Teams webhook URL nor conn_id supplied"""'], {}), "(\n 'Cannot get URL: No valid MS Teams webhook URL nor conn_id supplied')\n", (3398, 3473), False, 'from airflow.exceptions import AirflowException\n')]
|
import numpy as np
#import matplotlib.pyplot as plt
import shapely.geometry
from scipy.ndimage.morphology import binary_dilation
from scipy.ndimage import label
from multiprocessing import Pool
def voxels_to_polygon(image_stack, pixel_size, center=(0.5, 0.5)):
"""Take a stack of images and produce a stack of shapely polygons.
The images are interpreted as a solid shape with boundary along the pixel
exterior edge. Thus an image eith a single nonzero pixel will return a square
polygon with sidelength equal to the pixel_size.
IN:
image_stack: list of binary (1.0,0) numpy array 2d images each depicting
a single connected region of 1.0 surrounded by 0.0.
pixel_size: The absolute pixel size of the input images. Used to make the
output polygons coordinates real spaced.
center: the relative origin of the image, axis=0 is x and axis=1 is y
increasing with increasingf index. For instance center=(0.5,0.5)
will select the centre of the image as the orign.
OUT:
polygon_stack: list of shapely.geometry.polygons each representing the bound
of the corresponding input binary image.
"""
polygon_stack = [pixels_to_polygon(image, pixel_size, center) for image in image_stack]
return polygon_stack
def check_input(image):
"""Check that the provided image consists of a single connected domain of pixels.
"""
# Check that the input image has no floating pixels.
labeled_array, num_features = label(image.astype(int) + 1)
assert num_features == 1, "The input image must contain a single solid domain of connected pixels but it appears " \
"to have floating pixels "
#
# Check that the input image has no holes.
s = np.sum(np.abs(image.astype(int)[1:, :] - image.astype(int)[0:-1, :]), axis=0)
assert np.alltrue(
s <= 2), "The input image must contain a single solid domain of connected pixels but it appears to have holes"
#
def pixels_to_polygon(image, pixel_size, center=(0.5, 0.5)):
"""Take a single image and produce a shapely polygon.
"""
check_input(image)
expanded_image = expand_image(image, factor=3)
indices = get_image_boundary_index(expanded_image)
coordinates = indices_to_coordinates(indices, pixel_size / 3., center, expanded_image)
polygon = shapely.geometry.Polygon(coordinates)
# show_polygon_and_image(polygon, image, pixel_size, center) #<= DEBUG
return polygon
def expand_image(image, factor):
"""Expand 2d binary image so that each pixel is split by copying
into factor x factor number of pixels.
"""
expanded_image = np.repeat(image, factor, axis=1)
expanded_image = np.repeat(expanded_image, factor, axis=0)
return expanded_image
def get_image_boundary_index(image):
"""Find the pixel indices of the boundary pixels of a binary image.
"""
boundary_image = get_boundary_image(image)
bound_indx = np.where(boundary_image == 1)
ix, iy = bound_indx[0][0], bound_indx[1][0] # starting index
indices = [(ix, iy)]
while (not len(indices) == np.sum(boundary_image)):
# Walk around border and save boundary pixel indices
mask = np.zeros(boundary_image.shape)
mask[np.max([0, ix - 1]):ix + 2, iy] = 1
mask[ix, np.max([iy - 1]):iy + 2] = 1
neighbour_indx = np.where(boundary_image * mask)
for ix, iy in zip(neighbour_indx[0], neighbour_indx[1]):
if (ix, iy) not in indices:
indices.append((ix, iy))
break
indices = sparse_indices(indices)
return indices
def get_boundary_image(image):
"""Return a pixel image with 1 along the boundary if the assumed
object in image.
"""
k = np.ones((3, 3), dtype=int)
dilation = binary_dilation(image == 0, k, border_value=1)
boundary_image = dilation * image
return boundary_image
def sparse_indices(indices):
"""Remove uneccesary nodes in the polygon (three nodes on a line is uneccesary).
"""
new_indices = []
for i in range(0, len(indices) - 1):
if not (indices[i - 1][0] == indices[i][0] == indices[i + 1][0] or \
indices[i - 1][1] == indices[i][1] == indices[i + 1][1]):
new_indices.append(indices[i])
return new_indices
def indices_to_coordinates(indices, pixel_size, center, image):
"""Compute real space coordinates of image boundary form set of pixel indices.
"""
dx = image.shape[1] * center[0]
dy = image.shape[0] * center[1]
coordinates = []
for c in indices:
# Verified by simulated nonsymmetric grain
ycoord = pixel_size * (c[1] + 0.5 - dx + (c[1] % 3 - 1) * 0.5)
xcoord = pixel_size * (-c[0] - 0.5 + dy - (c[0] % 3 - 1) * 0.5)
coordinates.append((xcoord, ycoord))
return coordinates
def get_path_for_pos(args):
arr, all_entry, all_exit, all_nhat, all_L, all_nsegs, \
bad_lines, xray_endpoints, sample_polygon, zpos = args
for i, ang, dty in arr:
# Translate and rotate the xray endpoints according to ytrans and angle
c, s = np.cos(np.radians(-ang)), np.sin(np.radians(-ang))
rotz = np.array([[c, -s], [s, c]])
rx = rotz.dot(xray_endpoints + np.array([[0, 0], [dty, dty]]))
xray_polygon = shapely.geometry.LineString([rx[:, 0], rx[:, 1]])
# compute the intersections between beam and sample in sample coordinates
intersection_points = get_intersection(xray_polygon, sample_polygon, zpos)
if intersection_points is None:
# If a measurement missed the sample or graced a corner, we skipp ahead
bad_lines.append(int(i))
else:
# make a measurement at the current setting
entry, exit, nhat, L, nsegs = get_quanteties(intersection_points)
# save the measurement results in global lists
all_entry.append(entry)
all_exit.append(exit)
all_nhat.append(nhat)
all_L.append(L)
all_nsegs.append(nsegs)
return all_entry, all_exit, all_nhat, all_L, all_nsegs, bad_lines
def get_integral_paths(angles, ytrans, zpos, sample_polygon, nprocs, show_geom=False):
"""Compute entry-exit points for a scanrange.
"""
# Instantiate lists to contain all measurements
all_entry, all_exit, all_nhat, all_L, all_nsegs, bad_lines = [], [], [], [], [], []
xray_endpoints = get_xray_endpoints(sample_polygon)
# Loop over all experimental settings
split_arrays = np.array_split(list(zip(range(len(angles)), angles, ytrans)), nprocs)
# split_arrays = np.array_split(np.array(list(enumerate(zip(angles, ytrans)))), 2)
args = [(arr, all_entry, all_exit, all_nhat, all_L, all_nsegs, bad_lines,
xray_endpoints, sample_polygon, zpos) for arr in split_arrays]
with Pool(nprocs) as p:
out = p.map(get_path_for_pos, args)
# Unpack the multicore results
all_entry, all_exit, all_nhat, all_L, all_nsegs, bad_lines = [], [], [], [], [], []
for o in out:
for i, l in enumerate([all_entry, all_exit, all_nhat, all_L, all_nsegs, bad_lines]):
l.extend(o[i])
# repack lists of measurements into numpy arrays of desired format
entry, exit, nhat, L, nsegs = repack(all_entry, all_exit, all_nhat, all_L, all_nsegs)
return entry, exit, nhat, L, nsegs, bad_lines
def get_xray_endpoints(sample_polygon):
"""Calculate endpoitns of xray line segement. The lenght of the
line segment is adapted to make sure xray always convers the full
length of the sample.
"""
xc, yc = sample_polygon.exterior.xy
xmin = np.min(xc)
xmax = np.max(xc)
ymin = np.min(yc)
ymax = np.max(yc)
D = np.sqrt((xmax - xmin) ** 2 + (ymax - ymin) ** 2)
return np.array([[-1.1 * D, 1.1 * D], [0, 0]])
def get_intersection(xray_polygon, sample_polygon, z):
"""Compute the 3d coordinates of intersection between xray and
sample.
"""
intersection = sample_polygon.intersection(xray_polygon)
if intersection.is_empty or isinstance(intersection, shapely.geometry.point.Point):
# we missed the sample with the beam
intersection_points = None
elif isinstance(intersection, shapely.geometry.linestring.LineString):
# we got a single line segment intersection
intersection_points = np.zeros((2, 3))
intersection_points[:2, :2] = np.array(intersection.xy).T
intersection_points[:, 2] = z
elif isinstance(intersection, shapely.geometry.multilinestring.MultiLineString):
# we got multiple line segments intersection
intersection_points = np.zeros((2 * len(intersection.geoms), 3))
for i, line_segment in enumerate(intersection.geoms):
intersection_points[2 * i:2 * (i + 1), :2] = np.array(line_segment.xy).T
intersection_points[:, 2] = z
return intersection_points
def get_quanteties(intersection_points):
nsegs = intersection_points.shape[0] // 2
entry, exit = [], []
p1 = intersection_points[0, :]
p2 = intersection_points[1, :]
nhat = list((p2 - p1) / np.linalg.norm(p2 - p1))
L = 0
for i in range(nsegs):
p1 = intersection_points[2 * i, :]
p2 = intersection_points[2 * i + 1, :]
entry.extend(list(p1))
exit.extend(list(p2))
length = np.linalg.norm(p2 - p1)
L += length
return entry, exit, nhat, L, nsegs
def repack(all_entry, all_exit, all_nhat, all_L, all_nsegs):
"""Repack global measurement list into numpy arrays of desired format.
"""
N = len(all_L)
p = max(max(all_nsegs), 1)
nsegs = np.array(all_nsegs).reshape(1, N)
L = np.array(all_L).reshape(1, N)
entry = np.zeros((3 * p, N))
for i, en in enumerate(all_entry):
entry[:len(en[:]), i] = en[:]
exit = np.zeros((3 * p, N))
for i, ex in enumerate(all_exit):
exit[:len(ex[:]), i] = ex[:]
nhat = np.array(all_nhat).T
return entry, exit, nhat, L, nsegs
# def show_polygon_and_image(polygon, image, pixel_size, center):
# """Plot a image and polygon for debugging purposes
# """
# fig, ax = plt.subplots(1, 2, figsize=(12, 6))
# fig.suptitle('Center at ' + str(center))
# xc, yc = polygon.exterior.xy
# xcenter = image.shape[1] * pixel_size * center[0]
# ycenter = image.shape[0] * pixel_size * center[1]
# ax[0].imshow(image, cmap='gray')
# ax[0].set_title('Pixel image')
# ax[0].arrow(int(image.shape[1] * center[0]), int(image.shape[0] * center[1]), \
# image.shape[0] // 4, 0, color='r', head_width=0.15) # y
# ax[0].text(int(image.shape[1] * center[0]) + image.shape[1] // 4, int(image.shape[0] * center[1]) + 0.25, \
# 'y', color='r')
# ax[0].arrow(int(image.shape[1] * center[0]), int(image.shape[0] * center[1]), \
# 0, -image.shape[1] // 4, color='r', head_width=0.15) # x
# ax[0].text(int(image.shape[1] * center[0]) + 0.25, int(image.shape[0] * center[1]) - image.shape[1] // 4, \
# 'x', color='r')
# ax[1].set_title('Polygon representation')
# ax[1].fill(xc, yc, c='gray', zorder=1)
# ax[1].scatter(xc, yc, c='r', zorder=2)
# ax[1].grid(True)
# ax[1].scatter(0, 0, c='b', zorder=3)
# ax[1].set_xlim([-xcenter, image.shape[1] * pixel_size - xcenter])
# ax[1].set_ylim([-ycenter, image.shape[0] * pixel_size - ycenter])
# ax[1].set_xlabel('x')
# ax[1].set_ylabel('y')
# plt.show()
|
[
"numpy.radians",
"numpy.sum",
"scipy.ndimage.morphology.binary_dilation",
"numpy.zeros",
"numpy.ones",
"numpy.min",
"numpy.where",
"numpy.max",
"numpy.array",
"multiprocessing.Pool",
"numpy.alltrue",
"numpy.linalg.norm",
"numpy.sqrt",
"numpy.repeat"
] |
[((1932, 1950), 'numpy.alltrue', 'np.alltrue', (['(s <= 2)'], {}), '(s <= 2)\n', (1942, 1950), True, 'import numpy as np\n'), ((2742, 2774), 'numpy.repeat', 'np.repeat', (['image', 'factor'], {'axis': '(1)'}), '(image, factor, axis=1)\n', (2751, 2774), True, 'import numpy as np\n'), ((2796, 2837), 'numpy.repeat', 'np.repeat', (['expanded_image', 'factor'], {'axis': '(0)'}), '(expanded_image, factor, axis=0)\n', (2805, 2837), True, 'import numpy as np\n'), ((3048, 3077), 'numpy.where', 'np.where', (['(boundary_image == 1)'], {}), '(boundary_image == 1)\n', (3056, 3077), True, 'import numpy as np\n'), ((3848, 3874), 'numpy.ones', 'np.ones', (['(3, 3)'], {'dtype': 'int'}), '((3, 3), dtype=int)\n', (3855, 3874), True, 'import numpy as np\n'), ((3890, 3936), 'scipy.ndimage.morphology.binary_dilation', 'binary_dilation', (['(image == 0)', 'k'], {'border_value': '(1)'}), '(image == 0, k, border_value=1)\n', (3905, 3936), False, 'from scipy.ndimage.morphology import binary_dilation\n'), ((7754, 7764), 'numpy.min', 'np.min', (['xc'], {}), '(xc)\n', (7760, 7764), True, 'import numpy as np\n'), ((7776, 7786), 'numpy.max', 'np.max', (['xc'], {}), '(xc)\n', (7782, 7786), True, 'import numpy as np\n'), ((7798, 7808), 'numpy.min', 'np.min', (['yc'], {}), '(yc)\n', (7804, 7808), True, 'import numpy as np\n'), ((7820, 7830), 'numpy.max', 'np.max', (['yc'], {}), '(yc)\n', (7826, 7830), True, 'import numpy as np\n'), ((7839, 7887), 'numpy.sqrt', 'np.sqrt', (['((xmax - xmin) ** 2 + (ymax - ymin) ** 2)'], {}), '((xmax - xmin) ** 2 + (ymax - ymin) ** 2)\n', (7846, 7887), True, 'import numpy as np\n'), ((7899, 7938), 'numpy.array', 'np.array', (['[[-1.1 * D, 1.1 * D], [0, 0]]'], {}), '([[-1.1 * D, 1.1 * D], [0, 0]])\n', (7907, 7938), True, 'import numpy as np\n'), ((9838, 9858), 'numpy.zeros', 'np.zeros', (['(3 * p, N)'], {}), '((3 * p, N))\n', (9846, 9858), True, 'import numpy as np\n'), ((9948, 9968), 'numpy.zeros', 'np.zeros', (['(3 * p, N)'], {}), '((3 * p, N))\n', (9956, 9968), True, 'import numpy as np\n'), ((3301, 3331), 'numpy.zeros', 'np.zeros', (['boundary_image.shape'], {}), '(boundary_image.shape)\n', (3309, 3331), True, 'import numpy as np\n'), ((3452, 3483), 'numpy.where', 'np.where', (['(boundary_image * mask)'], {}), '(boundary_image * mask)\n', (3460, 3483), True, 'import numpy as np\n'), ((5277, 5304), 'numpy.array', 'np.array', (['[[c, -s], [s, c]]'], {}), '([[c, -s], [s, c]])\n', (5285, 5304), True, 'import numpy as np\n'), ((6950, 6962), 'multiprocessing.Pool', 'Pool', (['nprocs'], {}), '(nprocs)\n', (6954, 6962), False, 'from multiprocessing import Pool\n'), ((9460, 9483), 'numpy.linalg.norm', 'np.linalg.norm', (['(p2 - p1)'], {}), '(p2 - p1)\n', (9474, 9483), True, 'import numpy as np\n'), ((10056, 10074), 'numpy.array', 'np.array', (['all_nhat'], {}), '(all_nhat)\n', (10064, 10074), True, 'import numpy as np\n'), ((3200, 3222), 'numpy.sum', 'np.sum', (['boundary_image'], {}), '(boundary_image)\n', (3206, 3222), True, 'import numpy as np\n'), ((8469, 8485), 'numpy.zeros', 'np.zeros', (['(2, 3)'], {}), '((2, 3))\n', (8477, 8485), True, 'import numpy as np\n'), ((9229, 9252), 'numpy.linalg.norm', 'np.linalg.norm', (['(p2 - p1)'], {}), '(p2 - p1)\n', (9243, 9252), True, 'import numpy as np\n'), ((9753, 9772), 'numpy.array', 'np.array', (['all_nsegs'], {}), '(all_nsegs)\n', (9761, 9772), True, 'import numpy as np\n'), ((9795, 9810), 'numpy.array', 'np.array', (['all_L'], {}), '(all_L)\n', (9803, 9810), True, 'import numpy as np\n'), ((5218, 5234), 'numpy.radians', 'np.radians', (['(-ang)'], {}), '(-ang)\n', (5228, 5234), True, 'import numpy as np\n'), ((5244, 5260), 'numpy.radians', 'np.radians', (['(-ang)'], {}), '(-ang)\n', (5254, 5260), True, 'import numpy as np\n'), ((5344, 5374), 'numpy.array', 'np.array', (['[[0, 0], [dty, dty]]'], {}), '([[0, 0], [dty, dty]])\n', (5352, 5374), True, 'import numpy as np\n'), ((8524, 8549), 'numpy.array', 'np.array', (['intersection.xy'], {}), '(intersection.xy)\n', (8532, 8549), True, 'import numpy as np\n'), ((3345, 3364), 'numpy.max', 'np.max', (['[0, ix - 1]'], {}), '([0, ix - 1])\n', (3351, 3364), True, 'import numpy as np\n'), ((3398, 3414), 'numpy.max', 'np.max', (['[iy - 1]'], {}), '([iy - 1])\n', (3404, 3414), True, 'import numpy as np\n'), ((8920, 8945), 'numpy.array', 'np.array', (['line_segment.xy'], {}), '(line_segment.xy)\n', (8928, 8945), True, 'import numpy as np\n')]
|
#!/usr/bin/env python3
""" rcrr_instr.py
Implementation of RCRR format instructions.
"""
from pyvex.lifting.util import Type, Instruction
import bitstring
from .logger import log_this
class RCRR_Instructions(Instruction):
""" Insert Bit Field instruction.
op = 0x97
op2 = 0x00 3-bit
User Status Flags: no change.
"""
name = 'RCRR_Instructions ...'
op = "{0}{1}".format(bin(9)[2:].zfill(4), bin(7)[2:].zfill(4))
bin_format = op + 'a'*4 + 'b'*4 + 'c'*4 + 'd'*4 + 'e'*4 + 'f'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
tmp = bitstring.BitArray(bin="{0}{1}{2}{3}{4}{5}".format(data['e'],
data['f'],
data['c'],
data['d'],
data['a'],
data['b']))
a = int(tmp[20:24].hex, 16)
const4 = int(tmp[16:20].hex, 16)
w = int(tmp[11:16].bin.zfill(8), 2)
op2 = int(tmp[8:11].bin, 2)
d = int(tmp[4:8].hex, 16)
c = int(tmp[:4].hex, 16)
if op2 == 0:
self.name = "RCRR_INSERT"
else:
self.name = "UNKNOWN"
data = {"a": a,
"const4": const4,
"c": c,
"w": w,
"d": d,
"op2": op2}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_const4(self):
return self.constant(self.data['const4'], Type.int_32)
def get_d_d_2(self):
return self.get("d{0}".format(self.data['d']+1), Type.int_32)
def get_d_d_1(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_d_1(), self.get_d_d_2(), self.get_const4()
def compute_result(self, *args):
d_a = args[0]
d_d_1 = args[1]
d_d_2 = args[2]
const4 = args[3]
# E[d] = d_d_2 | d_d_1
pos = d_d_1 & 0x1f
width = d_d_2 & 0x1f
#TODO if (pos + width > 32) or (width == 0):
# print("Undefined result for (pos + width > 32)!")
# exit(1)
result = ""
if self.data['op2'] == 0:
const_2 = self.constant(2, Type.int_8)
power_2_cond_1 = ((width & 1) == 1).cast_to(Type.int_8)
power_2_cond_2 = ((width >> 1 & 1) == 1).cast_to(Type.int_8)
power_2_cond_3 = ((width >> 2 & 1) == 1).cast_to(Type.int_8)
power_2_cond_4 = ((width >> 3 & 1) == 1).cast_to(Type.int_8)
power_2_cond_5 = ((width >> 4 & 1) == 1).cast_to(Type.int_8)
power_2_calc = ((((const_2 << power_2_cond_1) <<
power_2_cond_2) << power_2_cond_3) << power_2_cond_4) << power_2_cond_5
mask = ((power_2_calc - 1) << pos.cast_to(Type.int_8)).cast_to(Type.int_32)
result = (d_a & ~mask) | ((const4 << pos.cast_to(Type.int_8)) & mask)
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
|
[
"pyvex.lifting.util.Instruction.parse"
] |
[((568, 600), 'pyvex.lifting.util.Instruction.parse', 'Instruction.parse', (['self', 'bitstrm'], {}), '(self, bitstrm)\n', (585, 600), False, 'from pyvex.lifting.util import Type, Instruction\n')]
|
import panel as pn
import holoviews as hv
from earthsim.grabcut import GrabCutPanel, SelectRegionPanel
from adhui import CreateMesh, ConceptualModelEditor
hv.extension('bokeh')
stages = [
('Select Region', SelectRegionPanel),
('Grabcut', GrabCutPanel),
('Path Editor', ConceptualModelEditor),
('Mesh', CreateMesh)
]
# create the pipeline
pipeline = pn.pipeline.Pipeline(stages, debug=True)
# modify button width (not exposed)
pipeline.layout[0][1]._widget_box.width = 100
pipeline.layout[0][2]._widget_box.width = 100
# return a display of the pipeline
pipeline.layout.servable()
|
[
"panel.pipeline.Pipeline",
"holoviews.extension"
] |
[((156, 177), 'holoviews.extension', 'hv.extension', (['"""bokeh"""'], {}), "('bokeh')\n", (168, 177), True, 'import holoviews as hv\n'), ((369, 409), 'panel.pipeline.Pipeline', 'pn.pipeline.Pipeline', (['stages'], {'debug': '(True)'}), '(stages, debug=True)\n', (389, 409), True, 'import panel as pn\n')]
|
import unittest
from etk.extractors.language_identification_extractor import LanguageIdentificationExtractor
class TestLanguageIdentificationExtractor(unittest.TestCase):
def test_langid(self):
extractor = LanguageIdentificationExtractor()
text_en = "langid.py comes pre-trained on 97 languages (ISO 639-1 codes given)"
result_en = extractor.extract(text_en, "LANGID")
self.assertEqual(result_en[0].value, "en")
text_es = "<NAME>"
result_es = extractor.extract(text_es, "LANGID")
self.assertEqual(result_es[0].value, "es")
text_de = "Ein, zwei, drei, vier"
result_de = extractor.extract(text_de, "LANGID")
self.assertEqual(result_de[0].value, "de")
text_unknown = "%$@$%##"
result_unknown = extractor.extract(text_unknown, "LANGID")
self.assertEqual(result_unknown[0].value, "en")
def test_langdetect(self):
extractor = LanguageIdentificationExtractor()
text_en = "langdetect supports 55 languages out of the box (ISO 639-1 codes)"
result_en = extractor.extract(text_en, "LANGDETECT")
self.assertEqual(result_en[0].value, "en")
text_es = "<NAME>"
result_es = extractor.extract(text_es, "LANGDETECT")
self.assertEqual(result_es[0].value, "es")
text_de = "Ein, zwei, drei, vier"
result_de = extractor.extract(text_de, "LANGDETECT")
self.assertEqual(result_de[0].value, "de")
text_unknown = "%$@$%##"
result_unknown = extractor.extract(text_unknown, "LANGDETECT")
self.assertTrue(len(result_unknown) == 0)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"etk.extractors.language_identification_extractor.LanguageIdentificationExtractor"
] |
[((1663, 1678), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1676, 1678), False, 'import unittest\n'), ((220, 253), 'etk.extractors.language_identification_extractor.LanguageIdentificationExtractor', 'LanguageIdentificationExtractor', ([], {}), '()\n', (251, 253), False, 'from etk.extractors.language_identification_extractor import LanguageIdentificationExtractor\n'), ((947, 980), 'etk.extractors.language_identification_extractor.LanguageIdentificationExtractor', 'LanguageIdentificationExtractor', ([], {}), '()\n', (978, 980), False, 'from etk.extractors.language_identification_extractor import LanguageIdentificationExtractor\n')]
|
# We expect the arccos of 1 to be 0, and of -1 to be pi:
np.arccos([1, -1])
# array([ 0. , 3.14159265])
# Plot arccos:
import matplotlib.pyplot as plt
x = np.linspace(-1, 1, num=100)
plt.plot(x, np.arccos(x))
plt.axis('tight')
plt.show()
|
[
"matplotlib.pyplot.show",
"matplotlib.pyplot.axis"
] |
[((220, 237), 'matplotlib.pyplot.axis', 'plt.axis', (['"""tight"""'], {}), "('tight')\n", (228, 237), True, 'import matplotlib.pyplot as plt\n'), ((238, 248), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (246, 248), True, 'import matplotlib.pyplot as plt\n')]
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import copy
import os
import datetime
import time
from functools import reduce
# Function that provide some information about the cvs files
def infos(old_df_names, months):
"""
Print informations about databases
input:
- dataframe
- months
output:
- months, number of NaN values in each column
"""
for i in range(len(old_df_names)):
df = pd.read_csv(old_df_names[i])
print('Month %s :' %months[i])
for i in df.columns:
print('\t- {} has number of Nan : {:d} ({:.2f}%)'.format(i, int(df[i].isna().sum()), (int(df[i].isna().sum())/len(df))*100))
print('Total number of rows: {:d}'.format(len(df)))
print('\n')
return
# Function that clean the databases from NaN values
def clean_dataframe(df):
"""
Clean the dataframe, removing NaN from columns
input:
- dataframe
output:
- cleaned dataframe
"""
df.dropna(inplace = True)
return df
# Function that create new csv files
def make_new_csv(old_df_names, df_names):
"""
Make new csv files
input:
- dataframe
output:
- new csv files
"""
for i in range(len(old_df_names)):
df = pd.read_csv(old_df_names[i])
# cleaning function
df = clean_dataframe(df)
df.to_csv(df_names[i], index=False)
return
# RQ1 functions
# RQ1.1 functions
def compute_average_session(df_names):
"""
Compute average number of times users perform view/cart/purchase within each session
input:
- list of names of csv files to open
output:
- series of average of each operation
"""
# init the daily average dict
average_session_dict = {}
for i in range(len(df_names)):
average_session_dict[i] = {}
# load the ith dataframe, taking the event_type and user_session columns
df = pd.read_csv(df_names[i], usecols=['event_type', 'user_session'])
for j in df['event_type'].unique():
#print('{} of {:d} has average of : {:.2f} ' .format(j, i, float(df[df['event_type'] == j].groupby(['user_session']).count().mean())))
average_session_dict[i][j] = df[df['event_type'] == j].groupby(['user_session']).count().mean()
average_session_df = pd.DataFrame(average_session_dict).mean(axis=1)
return average_session_df
def plot_average_session(average_session_df, months):
"""
plots the average number of times users perform each operation
"""
# plot average_session_df
fig = plt.figure()
X = np.arange(len(average_session_df))
plt.bar(X, average_session_df)
plt.xticks(np.arange(len(average_session_df)),average_session_df.index)
plt.ylabel("average operation per session")
plt.xlabel("operations")
plt.title("Average number of times users perform each operation within a session")
plt.grid(color ='silver', linestyle = ':')
fig.set_figwidth(15)
fig.set_figheight(5)
return
# RQ1.2 functions
def compute_average_view_cart(df_names, months):
"""
Compute average number of times a user views a product before adding it to the cart
input:
- list of names of csv files to open
output:
- the average of how many times a product is viewed before to be added to the cart
"""
# init a dataframe with index as every months and column as the mean for each user
df_mean_database = pd.DataFrame(index=months, columns=['mean'])
for i in range(len(df_names)):
# load the ith dataframe, taking the event_time, event_type, product_id, user_id columns
df = pd.read_csv(df_names[i],
usecols=['event_time','event_type', 'product_id', 'user_id'], nrows=100000,
parse_dates=['event_time'])
# cut off the 'purchase' variable from event_type
df_2 = df[df['event_type'] != 'purchase']
df_3 = df_2[df_2.event_type=='view'].groupby(by=['product_id']).agg(view=('event_type', 'count'))
df_4 = df_2[df_2.event_type=='cart'].groupby(by=['product_id']).agg(cart=('event_type', 'count'))
# get dataframe where event_type is equal to 'cart'
df_cart = df_2[df_2['event_type']=='cart']
# init a dataframe with index as every user and column as the mean for each user
df_mean_user = pd.DataFrame(index=df_cart['user_id'].unique(), columns=['mean'])
df_cart.groupby(by=['user_id']).count()
for user in df_cart['user_id'].unique():
# get dataframe with one user at a time
df_user = df_2[df_2['user_id'] == user]
# init the dict where the key are the products and the values are the mean of each product
product_dict = {}
for prod in df_user['product_id'].unique():
# get dataframe with one product at a time
df_product = df_user[df_user['product_id'] == prod]
df_product_2 = df_product.copy()
product_dict[prod] = []
# init a list to append how many times 'view' appears before 'cart' for each product
product_lst = []
# check if at least a 'view' exist in the dataframe otherwise pass
if any(df_product_2['event_type'] == 'view') == True:
df_product_2_time = df_product_2[df_product_2['event_type'] == 'view'].event_time.reset_index(drop=True)[0]
# check if there are some 'cart' event before the 'view' event (only for the first time of seeing the 'cart')
if any(df_product_2[df_product_2['event_type'] == 'cart'].event_time <= df_product_2_time) == True:
df_product_3 = df_product_2[df_product_2.event_time <= df_product_2_time]
# drop any 'cart' events at the beginning
df_product_2 = df_product_2.drop(labels=df_product_3[df_product_3['event_type'] == 'cart'].index)
# count how many times 'view' is before 'cart'
if any(df_product_2['event_type'] == 'view') == True:
for index, row in df_product_2.iterrows():
if row['event_type'] == 'cart':
product_lst.append(np.sum(df_product_2[df_product['event_type'] == 'view'].event_time < row['event_time']))
df_product_2 = df_product_2[df_product_2.event_time > row['event_time']]
# compute mean for each product
if len(product_lst) > 0:
product_dict[prod] = [i for i in product_lst if i != 0]
product_dict[prod] = np.mean(product_dict[prod])
else:
product_dict[prod].append(0)
# compute mean for each user
try:
df_mean_user.loc[user,'mean'] = round(pd.DataFrame(product_dict).mean(axis=1)[0], 2)
except ValueError:
df_mean_user.loc[user,'mean'] = round(product_dict[prod], 2)
# compute final average for a user for a product
df_mean_user.dropna(inplace=True)
mean_prod_user = np.mean(df_mean_user)
# add final average per month
df_mean_database.loc[months[i], 'mean'] = round(mean_prod_user[0], 2)
df_mean_database.dropna(inplace=True)
final_mean = np.mean(df_mean_database)
return final_mean
# RQ1.3 functions
def compute_probability_cart_purchase(df_names, months):
"""
Compute the probability that products are bought once is added to the cart
input:
- list of names of csv files to open
output:
- probability products are purchased once are added to the cart
"""
# init dictionary to merge each monthly datasets
df_database = {}
for i in range(len(df_names)):
# load the ith dataframe, taking only the event_type
df = pd.read_csv(df_names[i],
usecols=['event_type'])
# cut off the view variable from event_type
df_database[months[i]] = df[df['event_type'] != 'view']
# function to concatenate each dataset
merged_df = pd.concat([df_database[months[i]] for i in range(len(df_database))])
# compute probability as the ratio between purchase and cart events
prob = round(merged_df[merged_df['event_type'] == 'purchase'].shape[0] /
merged_df[merged_df['event_type'] == 'cart'].shape[0], 4) * 100
return prob
# RQ1.4 functions
def compute_average_time_removed_item(df_names, months):
"""
Compute the average time an item stays in the cart before being removed
input:
- list of names of csv files to open
output:
- average time
"""
df_mean_database = pd.DataFrame(index=months, columns=['mean'])
for i in range(len(df_names)):
# load the ith dataframe, taking only the
df = pd.read_csv(df_names[i],
usecols=['event_time', 'event_type', 'product_id'], nrows=100000,
parse_dates=['event_time'])
# cut off the view variable from event_type
df_2 = df[df['event_type'] != 'view']
# init the dict where the key are the products and the values are the mean of each product
product_dict = {}
# loop through the event_type 'purchase' to find unique product_id
for prod in df_2[df_2['event_type'] == 'purchase']['product_id'].unique():
df_product = df_2[df_2['product_id'] == prod]
# check if at least a 'cart' event exist
if df_product['event_type'].str.contains('cart').any():
pass
else:
continue
# check if there are some 'purchase' event before the 'cart' event (only for the first time of seeing the 'purchase')
if any(df_product[df_product['event_type'] == 'purchase'].event_time <=
df_product[df_product['event_type'] == 'cart'].event_time.reset_index(drop=True)[0]) == True:
df_3 = df_product[df_product.event_time <= df_product[df_product['event_type'] == 'cart'].event_time.reset_index(drop=True)[0]]
# drop any 'cart' events at the beginning
df_product = df_product.drop(labels=df_3[df_3['event_type'] == 'purchase'].index)
# check if there are some 'cart' event before the 'purchase' event (only for the last time of seeing the 'cart')
if any(df_product[df_product['event_type'] == 'cart'].event_time >=
df_product[df_product['event_type'] == 'purchase'].event_time.reset_index(drop=True)[len(df_product[df_product['event_type'] == 'purchase'])-1]) == True:
df_3 = df_product[df_product.event_time >= df_product[df_product['event_type'] == 'purchase'].event_time.reset_index(drop=True)[len(df_product[df_product['event_type'] == 'purchase'])-1]]
# drop any 'cart' events at the beginning
df_product = df_product.drop(labels=df_3[df_3['event_type'] == 'cart'].index)
# check if at least a 'cart' event exist
if df_product['event_type'].str.contains('cart').any():
pass
else:
continue
# check if at least a 'purchase' event exist
if df_product['event_type'].str.contains('purchase').any():
pass
else:
continue
dist_prod = df_product.event_time[df_product.event_type == 'purchase'].values - df_product.event_time[df_product.event_type == 'cart'].values
product_dict[prod] = []
product_dict[prod].append(np.mean(dist_prod))
# add final average per month
df_mean_database.loc[months[i], 'mean'] = pd.DataFrame(product_dict).mean(axis=1)[0]
# RQ1.5 functions
def compute_average_time_first_view(df_names, months):
"""
Compute the average time an item stays in the cart between the first time view and purchase/addition to cart
input:
- list of names of csv files to open
output:
- average time
"""
df_mean_database = pd.DataFrame(index=months, columns=['mean'])
for i in range(len(df_names)):
# load the ith dataframe, taking only the
df = pd.read_csv(df_names[i],
usecols=['event_time', 'event_type', 'product_id'],
parse_dates=['event_time'])
# cut off the view variable from event_type
df_3 = df[df['event_type'] != 'view']
# init the dict where the key are the products and the values are the mean of each product
product_dict = {}
# loop through the event_type 'purchase' to find unique product_id
for prod in df_3['product_id'].unique():
df_product = df[df['product_id'] == prod]
# check if at least a 'view' event exist
if df_product['event_type'].str.contains('view').any():
pass
else:
continue
# check if there are some 'purchase' event before the 'view' event (only for the first time of seeing the 'purchase')
if any(df_product[df_product['event_type'] == 'purchase'].event_time <=
df_product[df_product['event_type'] == 'view'].event_time.reset_index(drop=True)[0]) == True:
df_3 = df_product[df_product.event_time <= df_product[df_product['event_type'] == 'view'].event_time.reset_index(drop=True)[0]]
# drop any 'cart' events at the beginning
df_product = df_product.drop(labels=df_3[df_3['event_type'] == 'purchase'].index)
# check if there are some 'cart' event before the 'view' event (only for the first time of seeing the 'purchase')
if any(df_product[df_product['event_type'] == 'cart'].event_time <=
df_product[df_product['event_type'] == 'view'].event_time.reset_index(drop=True)[0]) == True:
df_3 = df_product[df_product.event_time <= df_product[df_product['event_type'] == 'view'].event_time.reset_index(drop=True)[0]]
# drop any 'cart' events at the beginning
df_product = df_product.drop(labels=df_3[df_3['event_type'] == 'cart'].index)
# check if at least a 'purchase' event exist
if df_product['event_type'].str.contains('purchase').any():
pass
else:
continue
# check if at least a 'cart' event exist
if df_product['event_type'].str.contains('cart').any():
pass
else:
continue
product_dict[prod] = []
df_product.drop_duplicates(subset=['event_type'], keep='first', inplace=True)
df_product.reset_index(inplace=True)
product_dict[prod].append(df_product.event_time[1] - df_product.event_time[0])
# add final average per month
df_mean_database.loc[months[i], 'mean'] = pd.DataFrame(product_dict).mean(axis=1)[0]
return df_mean_database
# RQ2 functions
def compute_number_sold_per_category(df_names, months):
"""
Compute the most sold product per category
input:
- list of names of csv files to open
output:
- number of sold product per category
"""
# init a dataframe with index as months and column as most sold product
df_final = {}
for i in range(len(df_names)):
# load the ith dataframe, taking only the
df = pd.read_csv(df_names[i],
usecols=['product_id', 'category_code', 'event_type'])
df = df[df['event_type'] == 'purchase']
new = df['category_code'].str.split(".", expand=True)
df['category_1'] = new[0]
df.drop(columns=['category_code', 'event_type'], inplace=True)
df_final[months[i]] = df.groupby(by=['category_1']).count().sort_values('product_id', ascending=False)
df_final = [df_final[months[i]] for i in range(len(df_final))]
return df_final
def plot_number_sold_per_category(df_final, months):
"""
plot the number of sold product per category per month
"""
# plot number of sold product per category pe moth using subplots
fig, a = plt.subplots(4,2)
# Plot 1
df_final[0].reset_index().plot(kind='bar', y='product_id', x='category_1', ax=a[0][0])
a[0][0].set(title=months[0], xlabel='Categories', ylabel='Total Sales')
a[0][0].tick_params(labelrotation=45)
a[0][0].get_legend().remove()
a[0][0].grid(color ='silver', linestyle = ':')
# Plot 2
df_final[1].reset_index().plot(kind='bar', y='product_id', x='category_1', ax=a[0][1])
a[0][1].set(title=months[1], xlabel='Categories', ylabel='Total Sales')
a[0][1].tick_params(labelrotation=45)
a[0][1].get_legend().remove()
a[0][1].grid(color ='silver', linestyle = ':')
# Plot 3
df_final[2].reset_index().plot(kind='bar', y='product_id', x='category_1', ax=a[1][0])
a[1][0].set(title=months[2], xlabel='Categories', ylabel='Total Sales')
a[1][0].tick_params(labelrotation=45)
a[1][0].get_legend().remove()
a[1][0].grid(color ='silver', linestyle = ':')
# Plot 4
df_final[3].reset_index().plot(kind='bar', y='product_id', x='category_1', ax=a[1][1])
a[1][1].set(title=months[3], xlabel='Categories', ylabel='Total Sales')
a[1][1].tick_params(labelrotation=45)
a[1][1].get_legend().remove()
a[1][1].grid(color ='silver', linestyle = ':')
# Plot 5
df_final[4].reset_index().plot(kind='bar', y='product_id', x='category_1', ax=a[2][0])
a[2][0].set(title=months[4], xlabel='Categories', ylabel='Total Sales')
a[2][0].tick_params(labelrotation=45)
a[2][0].get_legend().remove()
a[2][0].grid(color ='silver', linestyle = ':')
# Plot 6
df_final[5].reset_index().plot(kind='bar', y='product_id', x='category_1', ax=a[2][1])
a[2][1].set(title=months[5], xlabel='Categories', ylabel='Total Sales')
a[2][1].tick_params(labelrotation=45)
a[2][1].get_legend().remove()
a[2][1].grid(color ='silver', linestyle = ':')
# Plot 7
df_final[6].reset_index().plot(kind='bar', y='product_id', x='category_1', ax=a[3][0])
a[3][0].set(title=months[6], xlabel='Categories', ylabel='Total Sales')
a[3][0].tick_params(labelrotation=45)
a[3][0].get_legend().remove()
a[3][0].grid(color ='silver', linestyle = ':')
a[3][1].axis('off')
# Title the figure
fig.suptitle('Category of the most trending products overall', fontsize=14, fontweight='bold')
fig.set_figwidth(20)
fig.set_figheight(50)
plt.show()
return
def plot_most_visited_subcategories(df_names, months):
"""
plot the most visited subcategories
"""
# init a dataframe with index as months and column as most sold product
df_final = {}
for i in range(len(df_names)):
# load the ith dataframe, taking only the
df = pd.read_csv(df_names[i],
usecols=['event_type', 'category_code'])
# take only the view events
df = df[df['event_type'] == 'view']
# split the categories into subcategories
new = df['category_code'].str.split(".", expand=True)
df['subcategory'] = new[1]
df.drop(columns=['category_code'], inplace=True)
# group the subcategories and sort in descending order the relative values
df_final[months[i]] = df.groupby(by=['subcategory']).count().sort_values('event_type', ascending=False)
# build a pool of lists
df_final = [df_final[months[i]] for i in range(len(df_final))]
# concat each list of month
merged_df = pd.concat([df_final[i] for i in range(len(df_final))]).reset_index()
df_tot = merged_df.groupby(by=['subcategory']).sum().sort_values('event_type', ascending=False).rename(columns={'event_type': 'view'}).reset_index()
# plot most visited subcategories
fig = plt.figure()
X = np.arange(len(df_tot))
plt.barh(X, df_tot['view'])
plt.yticks(np.arange(len(df_tot)),df_tot['subcategory'])
plt.ylabel("views")
plt.xlabel("subcategories")
plt.title("Most visited subcategories")
plt.grid(color ='silver', linestyle = ':')
fig.set_figwidth(15)
fig.set_figheight(15)
plt.show()
return
def plot_10_most_sold(df_final, months):
"""
plot the 10 most sold product per category
"""
# concat the dataset
merged_df = pd.concat([df_final[i] for i in range(len(df_final))]).reset_index()
# group together by category in descending order
df_tot = merged_df.groupby(by=['category_1']).sum().sort_values('product_id', ascending=False).rename(columns={'event_type': 'view'})[:10]
return df_tot
# RQ3 functions
# Function used for showing the values of the bars in the plots of RQ3
def plot_values_in_barh(y):
for index, value in enumerate(y):
plt.text(value, index, str(round(value, 2)))
# Function that given a category in input, returns a plot with the average price per brand for the selected category
def plot_average_price_per_category(category, df_names):
# Initializing an empty list where we will put every grouped-by DataFrame later on
l = []
# Starting a for loop to read every DataFrame
for i in range(len(df_names)):
# Selecting the columns to use for this task
data = pd.read_csv(df_names[i], usecols=['category_code', 'brand', 'price'])
# For every category_code and brand, calculating the average price of the products, then i reset the index
# because i do not want to work with MultiIndex
a = data.groupby(['category_code', 'brand']).mean().reset_index()
# Appending the DataFrame analyzed for 1 month to the list l
l.append(a)
# Concatenating every DataFrame of each month grouped by category_code and brand in one DataFrame that will not
# be memory expensive
final = pd.concat(l)
# Grouping again by category_code and brand after the concatenation. We reset again the index for the same
# reason as before
final2 = final.groupby(['category_code', 'brand']).mean().reset_index()
# Selecting the category_code we want to analyze
fplot = final2.loc[final2['category_code'] == category]
# Setting the values to show in the plot at the end of the bars
y = list(fplot['price'])
# Assigning a variable to the plot
end = fplot.plot(x='brand', kind='barh', figsize=(20, 60))
# Returning the plot and calling the function to show the prices on the top of the bars
return end, plot_values_in_barh(y)
# Function that returns for each category, the brand with the highest price
def brand_with_highest_price_for_category(df_names):
# Initializing an empty list where we will put our Dataframes later on
l = []
# Starting a for loop to read every DataFrame
for i in range(len(df_names)):
# Selecting the columns to use for this task
data = pd.read_csv(df_names[i], usecols=['category_code', 'brand', 'price'])
# For every category_code and brand, calculating the average price of the products
a = data.groupby(['category_code', 'brand']).mean()
# Selecting the rows with the higher average price for each category
a1 = a.loc[a.groupby(level='category_code')['price'].idxmax()]
# Appending the analyzed DataFrame for 1 month to the list l
l.append(a1)
# Concatenating every DataFrame of each month grouped by category_code and brand in one DataFrame that will not
# be memory expensive
final = pd.concat(l)
# Resetting the index because i do not want to work with MultiIndex
rfinal = final.reset_index()
# Selecting again only the rows with the higher average price for category after concatenating the DataFrames
last_final = rfinal.loc[rfinal.groupby('category_code')['price'].idxmax()]
# Return the output
return last_final.sort_values(by=['price'])
# RQ4 functions
# Function that is used to see if the prices of different brands are significantly different
def average_price_per_brand(df_names):
# Initializing an empty list
l = []
# Starting the loop to read the dataframes of every month
for i in range(len(df_names)):
# Selecting just the columns referring to the brand and price
data = pd.read_csv(df_names[i], usecols=['brand', 'price'])
# Grouping by brand and calculating the average price per brand
a = data.groupby('brand').mean()
# Appending the obtained DataFrame regarding the results of one month in the starting empty list
l.append(a)
# Concatenating every DataFrame of each month in one DataFrame that will not be memory expensive
t = pd.concat(l)
# Resetting the index because i do not want to work with MultiIndex
rt = t.reset_index()
# Grouping by brand the full DataFrame regarding all months and calculating the mean price
u = rt.groupby('brand').mean()
# Returning the Dataframe, the minimum and the maximum to compare the results
return u, u.min(), u.max()
# Function that is used to reduce the number of data we want to analyze for the RQ4
def make_df_purchase(df_names, months):
df_purchase = {}
# Reading the data of all months and selecting only purchase events from the DataFrame
for i in range(len(df_names)):
data = pd.read_csv(df_names[i], usecols=['brand', 'price', 'event_type'])
df_purchase[months[i]] = data[data['event_type'] == 'purchase']
# Appending the results of every months to a dictionary
return df_purchase
# Function that returns the profit of every brand in each month
def earning_per_month(df_purchase, months):
dict_earning = {}
# Calculating the earning per month of each brand grouping by brand and doing the sum of the prices of every sold
# product
for i in range(len(df_purchase)):
data = df_purchase[months[i]]
dict_earning[months[i]] = data.groupby('brand', as_index=False).sum()
return dict_earning
# Function that given a brand in input, returns the total profit for month of that brand
def brand_per_month(brand, dict_earning, months):
df_profit = {}
# For every month selecting the profit from the dictionary of earnings created before. If there is no profit for the
# selected brand, we set it equal to 0
for i in range(len(months)):
try:
df_profit[months[i]] = dict_earning[months[i]].loc[dict_earning[months[i]].brand == brand, 'price'].values[
0]
except IndexError:
df_profit[months[i]] = 0
return df_profit
# Function that given the earnings of every brand, returns the top 3 brands that have suffered the biggest losses
# between one month and the previous one
def find_3_worst_brand(dict_earning, months):
# Selecting the dictionary obtained from the total profits of the brands and then merging them in one DataFrame
# where on the columns we have the months and on the rows we have the brands. The values are the earnings of each
# brand for every month
data_frames = [dict_earning[months[i]] for i in range(len(dict_earning))]
df_merged = reduce(lambda left, right: pd.merge(left, right, on=['brand'],
how='outer'), data_frames)
df_merged.set_index('brand', inplace=True)
df_merged.set_axis(months, axis=1, inplace=True)
# Transposing the DataFrame and applying the pct_change to calculate the percentage change between every month
# and the month before
df_pct = df_merged.T.pct_change()
worst_brand = []
worst_value = []
worst_months = []
# Selecting the minimum of the percentage change(which means the bigger loss) in our DataFrame, the brand that
# corresponds to it and the month that refers to it. We append those values to the lists we defined before
for i in range(0, 3):
worst_brand.append(df_pct.min().sort_values().index[i])
worst_value.append(round(abs(df_pct.min().sort_values()[i]) * 100, 2))
L = list(df_pct[df_pct[worst_brand[i]] == df_pct.min().sort_values()[i]].index.values)
worst_months.append(''.join(L))
# Showing the result of the request
for j in range(0, 3):
print('{} lost {}% bewteen {} and the month before'.format(worst_brand[j], worst_value[j], worst_months[j]),
end=' \n')
return
#RQ5
#Function that create a plot that for each day of the week shows the hourly average of visitors
def plot_hour_avg(df_names,months):
'''
create a plot
input:
-dataframe
-months
output:
-plot
'''
for i in range(len(df_names)):
df=pd.read_csv(df_names[i],parse_dates=['event_time'],usecols=['event_time','user_id'])
#hourly averege of visitors for each day
domenica=df[df.event_time.dt.dayofweek==0].groupby(df.event_time.dt.hour).user_id.count()
lunedi=df[df.event_time.dt.dayofweek==1].groupby(df.event_time.dt.hour).user_id.count()
martedi=df[df.event_time.dt.dayofweek==2].groupby(df.event_time.dt.hour).user_id.count()
mercoledi=df[df.event_time.dt.dayofweek==3].groupby(df.event_time.dt.hour).user_id.count()
giovedi=df[df.event_time.dt.dayofweek==4].groupby(df.event_time.dt.hour).user_id.count()
venerdi=df[df.event_time.dt.dayofweek==5].groupby(df.event_time.dt.hour).user_id.count()
sabato=df[df.event_time.dt.dayofweek==6].groupby(df.event_time.dt.hour).user_id.count()
plt.figure(figsize=[10.0,5.0])
plt.plot(domenica, '-o', color='royalblue', label = 'SUNDAY')
plt.plot(lunedi, '-o', color='green', label = 'MONDAY')
plt.plot(martedi, '-o', color='red', label = 'TUESDAY')
plt.plot(mercoledi, '-o', color='yellow', label = 'WEDNESDAY')
plt.plot(giovedi, '-o', color='orange', label = 'THURSDAY')
plt.plot(venerdi, '-o', color='violet', label = 'FRIDAY')
plt.plot(sabato, '-o', color='grey', label = 'SATURDAY')
plt.xlabel('HOUR')
plt.ylabel('VISITORS')
plt.title("Daily average - %s " %months[i])
plt.xticks(range(0,24))
plt.legend()
plt.show()
return
#RQ6
#Function that calculates the overall conversion rate of the products, creates the plot of the number of purchases by category and shows the conversion rate of each category in descending order
def conversion_rate(df_names,months):
"""
calculate overall conversion rate
plot of purchase by category
calculate conversion rate for each category
input:
- dataframe
- months
output:
- overall conversion rate for each month
- conversion rate for each category of each month
- plot of purchase by category of each month
"""
for i in range(len(df_names)):
dataset=pd.read_csv(df_names[i],usecols=['event_type','category_code'])
#NUMBER OF ALL PURCHASE PRODUCTS
purchase=dataset[dataset.event_type=='purchase']
totpurc=len(purchase)
#NUMBER OF ALL VIEW PRODUCTS
view=dataset[dataset.event_type=='view']
totview=len(view)
#OVERALL CONVERSION RATE OF STORE
cr=totpurc/totview
print ('Overall conversion rate of %s'%months[i])
print (cr)
#CREATE A NEW COLUMN WITH THE SPLITTED CATEGORY NAME
new = dataset['category_code'].str.split(".", expand=True)
dataset['category_name'] = new[0]
dataset.drop(columns=['category_code'], inplace=True)
#NUMBER OF PURCHASE FOR CATEGORY
purc_4_category=dataset[dataset.event_type=='purchase'].groupby('category_name').agg(purchase=('event_type','count'))
#NUMBER OF VIEW FOR CATEGORY
view_4_category=dataset[dataset.event_type=='view'].groupby('category_name').agg(view=('event_type','count'))
#PLOT OF NUMBER OF PURCHASE FOR CATEGORY
fig = plt.figure()
purc_4_category.plot.bar(figsize = (18, 7), title='Number of purchase of %s'%months[i])
plt.show()
#CONVERSION RATE FOR CATEGORY
cr_4_cat=(purc_4_category.purchase/view_4_category.view)
dec=cr_4_cat.sort_values(axis=0, ascending=False)
print ('Conversion rate of each category of %s'%months[i])
print(dec, end='\n')
return
#RQ7
#Function that demonstrates the Pareto's principle
def pareto(df_names,months):
"""
Apply Pareto's principle
input:
- dataframe
- months
output:
- dimostration if Pareto's principle is apply for each month
"""
for i in range(len(df_names)):
dataset=pd.read_csv(df_names[i],usecols=['user_id','event_type','price'])
#PURCHASE BY USERS
purchase_by_user=dataset[dataset.event_type == 'purchase'].groupby(dataset.user_id).agg(number_of_purchases=('user_id','count'),total_spent=('price','sum'))
purchase_by_user=purchase_by_user.sort_values('total_spent',ascending=False)
#20% OF USERS
user_20=int(len(purchase_by_user)*20/100)
purch_by_user20=purchase_by_user[:user_20]
#TOTAL SPENT BY 20% OF USERS
spent_by_20=purch_by_user20.agg('sum')
#TOTAL PROFIT OF STORE
profit=dataset[dataset.event_type == 'purchase'].groupby(dataset.event_type).agg(gain=('price','sum'))
#80% OF STORE'S TOTAL PROFIT
profit_80=(profit*80)/100
#PERCENTAGE CHANGE BETWEEN 80% OF PROFIT AND 20% OF USERS
percent=int((float( spent_by_20.total_spent)/float(profit_80.gain))*100)
print("%d%% of the profit for the month of %s comes from 20%% of the user's purchases"%(percent,months[i]))
if (percent >= 80):
print ("For the month of %s Pareto's principle is applied." %months[i])
else:
print ("For the month of %s Pareto's principle isn't applied." %months[i])
return
|
[
"matplotlib.pyplot.title",
"pandas.DataFrame",
"matplotlib.pyplot.show",
"numpy.sum",
"matplotlib.pyplot.plot",
"pandas.read_csv",
"matplotlib.pyplot.bar",
"matplotlib.pyplot.legend",
"pandas.merge",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.barh",
"matplotlib.pyplot.figure",
"numpy.mean",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.grid",
"pandas.concat"
] |
[((2627, 2639), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2637, 2639), True, 'import matplotlib.pyplot as plt\n'), ((2687, 2717), 'matplotlib.pyplot.bar', 'plt.bar', (['X', 'average_session_df'], {}), '(X, average_session_df)\n', (2694, 2717), True, 'import matplotlib.pyplot as plt\n'), ((2798, 2841), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""average operation per session"""'], {}), "('average operation per session')\n", (2808, 2841), True, 'import matplotlib.pyplot as plt\n'), ((2846, 2870), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""operations"""'], {}), "('operations')\n", (2856, 2870), True, 'import matplotlib.pyplot as plt\n'), ((2875, 2962), 'matplotlib.pyplot.title', 'plt.title', (['"""Average number of times users perform each operation within a session"""'], {}), "(\n 'Average number of times users perform each operation within a session')\n", (2884, 2962), True, 'import matplotlib.pyplot as plt\n'), ((2962, 3001), 'matplotlib.pyplot.grid', 'plt.grid', ([], {'color': '"""silver"""', 'linestyle': '""":"""'}), "(color='silver', linestyle=':')\n", (2970, 3001), True, 'import matplotlib.pyplot as plt\n'), ((3505, 3549), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'months', 'columns': "['mean']"}), "(index=months, columns=['mean'])\n", (3517, 3549), True, 'import pandas as pd\n'), ((7516, 7541), 'numpy.mean', 'np.mean', (['df_mean_database'], {}), '(df_mean_database)\n', (7523, 7541), True, 'import numpy as np\n'), ((8905, 8949), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'months', 'columns': "['mean']"}), "(index=months, columns=['mean'])\n", (8917, 8949), True, 'import pandas as pd\n'), ((12325, 12369), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'months', 'columns': "['mean']"}), "(index=months, columns=['mean'])\n", (12337, 12369), True, 'import pandas as pd\n'), ((16476, 16494), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(4)', '(2)'], {}), '(4, 2)\n', (16488, 16494), True, 'import matplotlib.pyplot as plt\n'), ((18846, 18856), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (18854, 18856), True, 'import matplotlib.pyplot as plt\n'), ((20151, 20163), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (20161, 20163), True, 'import matplotlib.pyplot as plt\n'), ((20199, 20226), 'matplotlib.pyplot.barh', 'plt.barh', (['X', "df_tot['view']"], {}), "(X, df_tot['view'])\n", (20207, 20226), True, 'import matplotlib.pyplot as plt\n'), ((20292, 20311), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""views"""'], {}), "('views')\n", (20302, 20311), True, 'import matplotlib.pyplot as plt\n'), ((20316, 20343), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""subcategories"""'], {}), "('subcategories')\n", (20326, 20343), True, 'import matplotlib.pyplot as plt\n'), ((20348, 20387), 'matplotlib.pyplot.title', 'plt.title', (['"""Most visited subcategories"""'], {}), "('Most visited subcategories')\n", (20357, 20387), True, 'import matplotlib.pyplot as plt\n'), ((20392, 20431), 'matplotlib.pyplot.grid', 'plt.grid', ([], {'color': '"""silver"""', 'linestyle': '""":"""'}), "(color='silver', linestyle=':')\n", (20400, 20431), True, 'import matplotlib.pyplot as plt\n'), ((20490, 20500), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (20498, 20500), True, 'import matplotlib.pyplot as plt\n'), ((22138, 22150), 'pandas.concat', 'pd.concat', (['l'], {}), '(l)\n', (22147, 22150), True, 'import pandas as pd\n'), ((23788, 23800), 'pandas.concat', 'pd.concat', (['l'], {}), '(l)\n', (23797, 23800), True, 'import pandas as pd\n'), ((24948, 24960), 'pandas.concat', 'pd.concat', (['l'], {}), '(l)\n', (24957, 24960), True, 'import pandas as pd\n'), ((491, 519), 'pandas.read_csv', 'pd.read_csv', (['old_df_names[i]'], {}), '(old_df_names[i])\n', (502, 519), True, 'import pandas as pd\n'), ((1312, 1340), 'pandas.read_csv', 'pd.read_csv', (['old_df_names[i]'], {}), '(old_df_names[i])\n', (1323, 1340), True, 'import pandas as pd\n'), ((1979, 2043), 'pandas.read_csv', 'pd.read_csv', (['df_names[i]'], {'usecols': "['event_type', 'user_session']"}), "(df_names[i], usecols=['event_type', 'user_session'])\n", (1990, 2043), True, 'import pandas as pd\n'), ((3695, 3828), 'pandas.read_csv', 'pd.read_csv', (['df_names[i]'], {'usecols': "['event_time', 'event_type', 'product_id', 'user_id']", 'nrows': '(100000)', 'parse_dates': "['event_time']"}), "(df_names[i], usecols=['event_time', 'event_type', 'product_id',\n 'user_id'], nrows=100000, parse_dates=['event_time'])\n", (3706, 3828), True, 'import pandas as pd\n'), ((7311, 7332), 'numpy.mean', 'np.mean', (['df_mean_user'], {}), '(df_mean_user)\n', (7318, 7332), True, 'import numpy as np\n'), ((8058, 8106), 'pandas.read_csv', 'pd.read_csv', (['df_names[i]'], {'usecols': "['event_type']"}), "(df_names[i], usecols=['event_type'])\n", (8069, 8106), True, 'import pandas as pd\n'), ((9049, 9171), 'pandas.read_csv', 'pd.read_csv', (['df_names[i]'], {'usecols': "['event_time', 'event_type', 'product_id']", 'nrows': '(100000)', 'parse_dates': "['event_time']"}), "(df_names[i], usecols=['event_time', 'event_type', 'product_id'],\n nrows=100000, parse_dates=['event_time'])\n", (9060, 9171), True, 'import pandas as pd\n'), ((12469, 12577), 'pandas.read_csv', 'pd.read_csv', (['df_names[i]'], {'usecols': "['event_time', 'event_type', 'product_id']", 'parse_dates': "['event_time']"}), "(df_names[i], usecols=['event_time', 'event_type', 'product_id'],\n parse_dates=['event_time'])\n", (12480, 12577), True, 'import pandas as pd\n'), ((15755, 15834), 'pandas.read_csv', 'pd.read_csv', (['df_names[i]'], {'usecols': "['product_id', 'category_code', 'event_type']"}), "(df_names[i], usecols=['product_id', 'category_code', 'event_type'])\n", (15766, 15834), True, 'import pandas as pd\n'), ((19177, 19242), 'pandas.read_csv', 'pd.read_csv', (['df_names[i]'], {'usecols': "['event_type', 'category_code']"}), "(df_names[i], usecols=['event_type', 'category_code'])\n", (19188, 19242), True, 'import pandas as pd\n'), ((21580, 21649), 'pandas.read_csv', 'pd.read_csv', (['df_names[i]'], {'usecols': "['category_code', 'brand', 'price']"}), "(df_names[i], usecols=['category_code', 'brand', 'price'])\n", (21591, 21649), True, 'import pandas as pd\n'), ((23175, 23244), 'pandas.read_csv', 'pd.read_csv', (['df_names[i]'], {'usecols': "['category_code', 'brand', 'price']"}), "(df_names[i], usecols=['category_code', 'brand', 'price'])\n", (23186, 23244), True, 'import pandas as pd\n'), ((24548, 24600), 'pandas.read_csv', 'pd.read_csv', (['df_names[i]'], {'usecols': "['brand', 'price']"}), "(df_names[i], usecols=['brand', 'price'])\n", (24559, 24600), True, 'import pandas as pd\n'), ((25589, 25655), 'pandas.read_csv', 'pd.read_csv', (['df_names[i]'], {'usecols': "['brand', 'price', 'event_type']"}), "(df_names[i], usecols=['brand', 'price', 'event_type'])\n", (25600, 25655), True, 'import pandas as pd\n'), ((28925, 29016), 'pandas.read_csv', 'pd.read_csv', (['df_names[i]'], {'parse_dates': "['event_time']", 'usecols': "['event_time', 'user_id']"}), "(df_names[i], parse_dates=['event_time'], usecols=['event_time',\n 'user_id'])\n", (28936, 29016), True, 'import pandas as pd\n'), ((29748, 29779), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '[10.0, 5.0]'}), '(figsize=[10.0, 5.0])\n', (29758, 29779), True, 'import matplotlib.pyplot as plt\n'), ((29787, 29846), 'matplotlib.pyplot.plot', 'plt.plot', (['domenica', '"""-o"""'], {'color': '"""royalblue"""', 'label': '"""SUNDAY"""'}), "(domenica, '-o', color='royalblue', label='SUNDAY')\n", (29795, 29846), True, 'import matplotlib.pyplot as plt\n'), ((29857, 29910), 'matplotlib.pyplot.plot', 'plt.plot', (['lunedi', '"""-o"""'], {'color': '"""green"""', 'label': '"""MONDAY"""'}), "(lunedi, '-o', color='green', label='MONDAY')\n", (29865, 29910), True, 'import matplotlib.pyplot as plt\n'), ((29921, 29974), 'matplotlib.pyplot.plot', 'plt.plot', (['martedi', '"""-o"""'], {'color': '"""red"""', 'label': '"""TUESDAY"""'}), "(martedi, '-o', color='red', label='TUESDAY')\n", (29929, 29974), True, 'import matplotlib.pyplot as plt\n'), ((29985, 30045), 'matplotlib.pyplot.plot', 'plt.plot', (['mercoledi', '"""-o"""'], {'color': '"""yellow"""', 'label': '"""WEDNESDAY"""'}), "(mercoledi, '-o', color='yellow', label='WEDNESDAY')\n", (29993, 30045), True, 'import matplotlib.pyplot as plt\n'), ((30056, 30113), 'matplotlib.pyplot.plot', 'plt.plot', (['giovedi', '"""-o"""'], {'color': '"""orange"""', 'label': '"""THURSDAY"""'}), "(giovedi, '-o', color='orange', label='THURSDAY')\n", (30064, 30113), True, 'import matplotlib.pyplot as plt\n'), ((30124, 30179), 'matplotlib.pyplot.plot', 'plt.plot', (['venerdi', '"""-o"""'], {'color': '"""violet"""', 'label': '"""FRIDAY"""'}), "(venerdi, '-o', color='violet', label='FRIDAY')\n", (30132, 30179), True, 'import matplotlib.pyplot as plt\n'), ((30190, 30244), 'matplotlib.pyplot.plot', 'plt.plot', (['sabato', '"""-o"""'], {'color': '"""grey"""', 'label': '"""SATURDAY"""'}), "(sabato, '-o', color='grey', label='SATURDAY')\n", (30198, 30244), True, 'import matplotlib.pyplot as plt\n'), ((30255, 30273), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""HOUR"""'], {}), "('HOUR')\n", (30265, 30273), True, 'import matplotlib.pyplot as plt\n'), ((30282, 30304), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""VISITORS"""'], {}), "('VISITORS')\n", (30292, 30304), True, 'import matplotlib.pyplot as plt\n'), ((30313, 30357), 'matplotlib.pyplot.title', 'plt.title', (["('Daily average - %s ' % months[i])"], {}), "('Daily average - %s ' % months[i])\n", (30322, 30357), True, 'import matplotlib.pyplot as plt\n'), ((30397, 30409), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (30407, 30409), True, 'import matplotlib.pyplot as plt\n'), ((30418, 30428), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (30426, 30428), True, 'import matplotlib.pyplot as plt\n'), ((31069, 31134), 'pandas.read_csv', 'pd.read_csv', (['df_names[i]'], {'usecols': "['event_type', 'category_code']"}), "(df_names[i], usecols=['event_type', 'category_code'])\n", (31080, 31134), True, 'import pandas as pd\n'), ((32136, 32148), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (32146, 32148), True, 'import matplotlib.pyplot as plt\n'), ((32253, 32263), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (32261, 32263), True, 'import matplotlib.pyplot as plt\n'), ((32831, 32899), 'pandas.read_csv', 'pd.read_csv', (['df_names[i]'], {'usecols': "['user_id', 'event_type', 'price']"}), "(df_names[i], usecols=['user_id', 'event_type', 'price'])\n", (32842, 32899), True, 'import pandas as pd\n'), ((2370, 2404), 'pandas.DataFrame', 'pd.DataFrame', (['average_session_dict'], {}), '(average_session_dict)\n', (2382, 2404), True, 'import pandas as pd\n'), ((27434, 27482), 'pandas.merge', 'pd.merge', (['left', 'right'], {'on': "['brand']", 'how': '"""outer"""'}), "(left, right, on=['brand'], how='outer')\n", (27442, 27482), True, 'import pandas as pd\n'), ((11855, 11873), 'numpy.mean', 'np.mean', (['dist_prod'], {}), '(dist_prod)\n', (11862, 11873), True, 'import numpy as np\n'), ((6798, 6825), 'numpy.mean', 'np.mean', (['product_dict[prod]'], {}), '(product_dict[prod])\n', (6805, 6825), True, 'import numpy as np\n'), ((11965, 11991), 'pandas.DataFrame', 'pd.DataFrame', (['product_dict'], {}), '(product_dict)\n', (11977, 11991), True, 'import pandas as pd\n'), ((15226, 15252), 'pandas.DataFrame', 'pd.DataFrame', (['product_dict'], {}), '(product_dict)\n', (15238, 15252), True, 'import pandas as pd\n'), ((6385, 6477), 'numpy.sum', 'np.sum', (["(df_product_2[df_product['event_type'] == 'view'].event_time < row[\n 'event_time'])"], {}), "(df_product_2[df_product['event_type'] == 'view'].event_time < row[\n 'event_time'])\n", (6391, 6477), True, 'import numpy as np\n'), ((7022, 7048), 'pandas.DataFrame', 'pd.DataFrame', (['product_dict'], {}), '(product_dict)\n', (7034, 7048), True, 'import pandas as pd\n')]
|
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import mock
from services import parameters
from services import resultdb
from waterfall.test import wf_testcase
from go.chromium.org.luci.resultdb.proto.v1 import (common_pb2, test_result_pb2)
from infra_api_clients.swarming import swarming_util
from services import resultdb
from services import resultdb_util
class ResultDBTest(wf_testcase.WaterfallTestCase):
@mock.patch.object(
swarming_util,
'GetInvocationNameForSwarmingTask',
return_value="inv_name")
@mock.patch.object(resultdb, 'query_resultdb')
def testGetFailedTestInStep(self, mock_result_db, *_):
failed_step = parameters.TestFailedStep()
failed_step.swarming_ids = ["1", "2"]
mock_result_db.side_effect = [
[
test_result_pb2.TestResult(
test_id="test_id_1",
tags=[
common_pb2.StringPair(key="test_name", value="test_id_1"),
])
],
[
test_result_pb2.TestResult(
test_id="test_id_2",
tags=[
common_pb2.StringPair(key="test_name", value="test_id_2"),
])
],
]
test_results = resultdb_util.get_failed_tests_in_step(failed_step)
self.assertEqual(len(test_results.test_results), 2)
failed_step.swarming_ids = []
test_results = resultdb_util.get_failed_tests_in_step(failed_step)
self.assertIsNone(test_results)
@mock.patch.object(
swarming_util, 'GetInvocationNameForSwarmingTask', return_value=None)
def testGetFailedTestInStepWithNoInvocationName(self, *_):
failed_step = parameters.TestFailedStep()
failed_step.swarming_ids = ["1", "2"]
test_results = resultdb_util.get_failed_tests_in_step(failed_step)
self.assertIsNone(test_results)
|
[
"mock.patch.object",
"services.parameters.TestFailedStep",
"services.resultdb_util.get_failed_tests_in_step",
"go.chromium.org.luci.resultdb.proto.v1.common_pb2.StringPair"
] |
[((533, 630), 'mock.patch.object', 'mock.patch.object', (['swarming_util', '"""GetInvocationNameForSwarmingTask"""'], {'return_value': '"""inv_name"""'}), "(swarming_util, 'GetInvocationNameForSwarmingTask',\n return_value='inv_name')\n", (550, 630), False, 'import mock\n'), ((649, 694), 'mock.patch.object', 'mock.patch.object', (['resultdb', '"""query_resultdb"""'], {}), "(resultdb, 'query_resultdb')\n", (666, 694), False, 'import mock\n'), ((1591, 1682), 'mock.patch.object', 'mock.patch.object', (['swarming_util', '"""GetInvocationNameForSwarmingTask"""'], {'return_value': 'None'}), "(swarming_util, 'GetInvocationNameForSwarmingTask',\n return_value=None)\n", (1608, 1682), False, 'import mock\n'), ((770, 797), 'services.parameters.TestFailedStep', 'parameters.TestFailedStep', ([], {}), '()\n', (795, 797), False, 'from services import parameters\n'), ((1338, 1389), 'services.resultdb_util.get_failed_tests_in_step', 'resultdb_util.get_failed_tests_in_step', (['failed_step'], {}), '(failed_step)\n', (1376, 1389), False, 'from services import resultdb_util\n'), ((1499, 1550), 'services.resultdb_util.get_failed_tests_in_step', 'resultdb_util.get_failed_tests_in_step', (['failed_step'], {}), '(failed_step)\n', (1537, 1550), False, 'from services import resultdb_util\n'), ((1765, 1792), 'services.parameters.TestFailedStep', 'parameters.TestFailedStep', ([], {}), '()\n', (1790, 1792), False, 'from services import parameters\n'), ((1854, 1905), 'services.resultdb_util.get_failed_tests_in_step', 'resultdb_util.get_failed_tests_in_step', (['failed_step'], {}), '(failed_step)\n', (1892, 1905), False, 'from services import resultdb_util\n'), ((1005, 1062), 'go.chromium.org.luci.resultdb.proto.v1.common_pb2.StringPair', 'common_pb2.StringPair', ([], {'key': '"""test_name"""', 'value': '"""test_id_1"""'}), "(key='test_name', value='test_id_1')\n", (1026, 1062), False, 'from go.chromium.org.luci.resultdb.proto.v1 import common_pb2, test_result_pb2\n'), ((1224, 1281), 'go.chromium.org.luci.resultdb.proto.v1.common_pb2.StringPair', 'common_pb2.StringPair', ([], {'key': '"""test_name"""', 'value': '"""test_id_2"""'}), "(key='test_name', value='test_id_2')\n", (1245, 1281), False, 'from go.chromium.org.luci.resultdb.proto.v1 import common_pb2, test_result_pb2\n')]
|
#!/usr/bin/env python
#
# Copyright (c) 2017-2018 Via Technology Ltd. All Rights Reserved.
# Consult your license regarding permissions and restrictions.
"""
Software to read Eurocontrol APDS files.
"""
import sys
import os
import bz2
import csv
import errno
import pandas as pd
from enum import IntEnum, unique
from pru.trajectory_fields import \
FLIGHT_FIELDS, FLIGHT_EVENT_FIELDS, POSITION_FIELDS, FlightEventType, \
is_valid_iso8601_date, iso8601_datetime_parser, has_bz2_extension, \
split_dual_date
from pru.trajectory_files import create_convert_apds_filenames
from pru.logger import logger
log = logger(__name__)
@unique
class ApdsField(IntEnum):
'The fields of an APDS line.'
APDS_ID = 0
AP_C_FLTID = 1
AP_C_REG = 2
ADEP_ICAO = 3
ADES_ICAO = 4
SRC_PHASE = 5
MVT_TIME_UTC = 6
BLOCK_TIME_UTC = 7
SCHED_TIME_UTC = 8
ARCTYP = 9
AP_C_RWY = 10
AP_C_STND = 11
C40_CROSS_TIME = 12
C40_CROSS_LAT = 13
C40_CROSS_LON = 14
C40_CROSS_FL = 15
C40_BEARING = 16
C100_CROSS_TIME = 17
C100_CROSS_LAT = 18
C100_CROSS_LON = 19
C100_CROSS_FL = 20
C100_BEARING = 21
class ApdsEvent:
'A class for storing and outputting a APDS event'
def __init__(self, id, event, date_time):
self.id = id
self.event = event
self.date_time = date_time
def __lt__(self, other):
return self.event < other.event
def __repr__(self):
return '{},{},{}Z'. \
format(self.id, self.event, self.date_time.isoformat())
class ApdsPosition:
'A class for storing and outputting a APDS poistion'
def __init__(self, id, date_time, latitude, longitude, airport, stand):
self.id = id
self.date_time = date_time
self.latitude = latitude
self.longitude = longitude
self.airport = airport
self.stand = stand
def __lt__(self, other):
return self.date_time < other.date_time
def __repr__(self):
return '{},,{}Z,{:.5f},{:.5f},,,,,1,APDS {} {},,'. \
format(self.id, self.date_time.isoformat(),
self.latitude, self.longitude, self.airport, self.stand)
class ApdsFlight:
'A class for reading, storing and outputting data for an APDS flight'
def __init__(self, apds_fields, airport_stands):
self.id = apds_fields[ApdsField.APDS_ID]
self.callsign = apds_fields[ApdsField.AP_C_FLTID]
self.registration = apds_fields[ApdsField.AP_C_REG]
self.aircraft_type = apds_fields[ApdsField.ARCTYP]
self.departure = apds_fields[ApdsField.ADEP_ICAO]
self.destination = apds_fields[ApdsField.ADES_ICAO]
self.events = []
self.positions = []
is_arrival = (apds_fields[ApdsField.SRC_PHASE] == 'ARR')
airport = self.destination if (is_arrival) else self.destination
# Get the take-off or landing event
if apds_fields[ApdsField.MVT_TIME_UTC]:
movement_event = FlightEventType.WHEELS_ON if (is_arrival) \
else FlightEventType.WHEELS_OFF
movement_time = iso8601_datetime_parser(apds_fields[ApdsField.MVT_TIME_UTC])
self.events.append(ApdsEvent(self.id, movement_event, movement_time))
# if the airport and runway is known, create a position
# if airport and apds_fields[ApdsField.AP_C_RWY]:
# Get the actual off-block or in-block event
if apds_fields[ApdsField.BLOCK_TIME_UTC]:
block_event = FlightEventType.GATE_IN if (is_arrival) \
else FlightEventType.GATE_OUT
block_time = iso8601_datetime_parser(apds_fields[ApdsField.BLOCK_TIME_UTC])
self.events.append(ApdsEvent(self.id, block_event, block_time))
# if the airport and stand is known, create a position
if len(airport_stands):
stand = apds_fields[ApdsField.AP_C_STND]
if airport and stand:
if (airport, stand) in airport_stands.index:
pos = airport_stands.loc[airport, stand]
latitude = pos['LAT']
longitude = pos['LON']
self.positions.append(ApdsPosition(self.id, block_time,
latitude, longitude,
airport, stand))
# Get the scheduled off-block or in-block event
if apds_fields[ApdsField.SCHED_TIME_UTC]:
scheduled_event = FlightEventType.SCHEDULED_IN_BLOCK if (is_arrival) \
else FlightEventType.SCHEDULED_OFF_BLOCK
scheduled_time = iso8601_datetime_parser(apds_fields[ApdsField.SCHED_TIME_UTC])
self.events.append(ApdsEvent(self.id, scheduled_event, scheduled_time))
def __repr__(self):
return '{},{},{},{},,{},{}'. \
format(self.id, self.callsign, self.registration, self.aircraft_type,
self.departure, self.destination)
def convert_apds_data(filename, stands_filename):
# Extract the start and finish date strings from the filename
start_date, finish_date = split_dual_date(os.path.basename(filename))
if not is_valid_iso8601_date(start_date):
log.error('apds data file: %s, invalid start date: %s',
filename, start_date)
return errno.EINVAL
# validate the finish date string from the filename
if not is_valid_iso8601_date(finish_date):
log.error('apds data file: %s, invalid finish date: %s',
filename, finish_date)
return errno.EINVAL
log.info('apds data file: %s', filename)
airport_stands_df = pd.DataFrame()
if stands_filename:
try:
airport_stands_df = pd.read_csv(stands_filename,
index_col=['ICAO_ID', 'STAND_ID'],
memory_map=True)
airport_stands_df.sort_index()
except EnvironmentError:
log.error('could not read file: %s', stands_filename)
return errno.ENOENT
log.info('airport stands file: %s', stands_filename)
else:
log.info('airport stands not provided')
# A dict to hold the APDS flights
flights = {}
# Read the APDS flights file into flights
try:
is_bz2 = has_bz2_extension(filename)
with bz2.open(filename, 'rt', newline="") if (is_bz2) else \
open(filename, 'r') as file:
reader = csv.reader(file, delimiter=',')
next(reader, None) # skip the headers
for row in reader:
flights.setdefault(row[ApdsField.APDS_ID],
ApdsFlight(row, airport_stands_df))
except EnvironmentError:
log.error('could not read file: %s', filename)
return errno.ENOENT
log.info('apds flights read ok')
valid_flights = 0
# Output the APDS flight data
# finish_date
output_files = create_convert_apds_filenames(start_date, finish_date)
flight_file = output_files[0]
try:
with open(flight_file, 'w') as file:
file.write(FLIGHT_FIELDS)
for key, value in sorted(flights.items()):
print(value, file=file)
valid_flights += 1
log.info('written file: %s', flight_file)
except EnvironmentError:
log.error('could not write file: %s', flight_file)
# if airport stand data was provided
if len(airport_stands_df):
# Output the APDS position data
positions_file = output_files[1]
try:
with open(positions_file, 'w') as file:
file.write(POSITION_FIELDS)
for key, value in sorted(flights.items()):
for event in sorted(value.positions):
print(event, file=file)
log.info('written file: %s', positions_file)
except EnvironmentError:
log.error('could not write file: %s', positions_file)
# Output the APDS event data
event_file = output_files[2]
try:
with open(event_file, 'w') as file:
file.write(FLIGHT_EVENT_FIELDS)
for key, value in sorted(flights.items()):
for event in sorted(value.events):
print(event, file=file)
log.info('written file: %s', event_file)
except EnvironmentError:
log.error('could not write file: %s', event_file)
return errno.EACCES
log.info('apds conversion complete for %s flights on %s',
valid_flights, start_date)
return 0
if __name__ == '__main__':
if len(sys.argv) < 2:
print('Usage: convert_apt_data.py <apds_filename> [stands_filename]')
sys.exit(errno.EINVAL)
# Get the stands_filename, if supplied
stands_filename = ''
if len(sys.argv) >= 3:
stands_filename = sys.argv[2]
error_code = convert_apds_data(sys.argv[1], stands_filename)
if error_code:
sys.exit(error_code)
|
[
"pandas.DataFrame",
"pru.trajectory_fields.has_bz2_extension",
"csv.reader",
"pru.logger.logger",
"os.path.basename",
"pandas.read_csv",
"pru.trajectory_fields.is_valid_iso8601_date",
"pru.trajectory_files.create_convert_apds_filenames",
"bz2.open",
"pru.trajectory_fields.iso8601_datetime_parser",
"sys.exit"
] |
[((619, 635), 'pru.logger.logger', 'logger', (['__name__'], {}), '(__name__)\n', (625, 635), False, 'from pru.logger import logger\n'), ((5734, 5748), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (5746, 5748), True, 'import pandas as pd\n'), ((7064, 7118), 'pru.trajectory_files.create_convert_apds_filenames', 'create_convert_apds_filenames', (['start_date', 'finish_date'], {}), '(start_date, finish_date)\n', (7093, 7118), False, 'from pru.trajectory_files import create_convert_apds_filenames\n'), ((5219, 5245), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (5235, 5245), False, 'import os\n'), ((5258, 5291), 'pru.trajectory_fields.is_valid_iso8601_date', 'is_valid_iso8601_date', (['start_date'], {}), '(start_date)\n', (5279, 5291), False, 'from pru.trajectory_fields import FLIGHT_FIELDS, FLIGHT_EVENT_FIELDS, POSITION_FIELDS, FlightEventType, is_valid_iso8601_date, iso8601_datetime_parser, has_bz2_extension, split_dual_date\n'), ((5493, 5527), 'pru.trajectory_fields.is_valid_iso8601_date', 'is_valid_iso8601_date', (['finish_date'], {}), '(finish_date)\n', (5514, 5527), False, 'from pru.trajectory_fields import FLIGHT_FIELDS, FLIGHT_EVENT_FIELDS, POSITION_FIELDS, FlightEventType, is_valid_iso8601_date, iso8601_datetime_parser, has_bz2_extension, split_dual_date\n'), ((6410, 6437), 'pru.trajectory_fields.has_bz2_extension', 'has_bz2_extension', (['filename'], {}), '(filename)\n', (6427, 6437), False, 'from pru.trajectory_fields import FLIGHT_FIELDS, FLIGHT_EVENT_FIELDS, POSITION_FIELDS, FlightEventType, is_valid_iso8601_date, iso8601_datetime_parser, has_bz2_extension, split_dual_date\n'), ((8839, 8861), 'sys.exit', 'sys.exit', (['errno.EINVAL'], {}), '(errno.EINVAL)\n', (8847, 8861), False, 'import sys\n'), ((9089, 9109), 'sys.exit', 'sys.exit', (['error_code'], {}), '(error_code)\n', (9097, 9109), False, 'import sys\n'), ((3118, 3178), 'pru.trajectory_fields.iso8601_datetime_parser', 'iso8601_datetime_parser', (['apds_fields[ApdsField.MVT_TIME_UTC]'], {}), '(apds_fields[ApdsField.MVT_TIME_UTC])\n', (3141, 3178), False, 'from pru.trajectory_fields import FLIGHT_FIELDS, FLIGHT_EVENT_FIELDS, POSITION_FIELDS, FlightEventType, is_valid_iso8601_date, iso8601_datetime_parser, has_bz2_extension, split_dual_date\n'), ((3635, 3697), 'pru.trajectory_fields.iso8601_datetime_parser', 'iso8601_datetime_parser', (['apds_fields[ApdsField.BLOCK_TIME_UTC]'], {}), '(apds_fields[ApdsField.BLOCK_TIME_UTC])\n', (3658, 3697), False, 'from pru.trajectory_fields import FLIGHT_FIELDS, FLIGHT_EVENT_FIELDS, POSITION_FIELDS, FlightEventType, is_valid_iso8601_date, iso8601_datetime_parser, has_bz2_extension, split_dual_date\n'), ((4708, 4770), 'pru.trajectory_fields.iso8601_datetime_parser', 'iso8601_datetime_parser', (['apds_fields[ApdsField.SCHED_TIME_UTC]'], {}), '(apds_fields[ApdsField.SCHED_TIME_UTC])\n', (4731, 4770), False, 'from pru.trajectory_fields import FLIGHT_FIELDS, FLIGHT_EVENT_FIELDS, POSITION_FIELDS, FlightEventType, is_valid_iso8601_date, iso8601_datetime_parser, has_bz2_extension, split_dual_date\n'), ((5818, 5903), 'pandas.read_csv', 'pd.read_csv', (['stands_filename'], {'index_col': "['ICAO_ID', 'STAND_ID']", 'memory_map': '(True)'}), "(stands_filename, index_col=['ICAO_ID', 'STAND_ID'], memory_map=True\n )\n", (5829, 5903), True, 'import pandas as pd\n'), ((6574, 6605), 'csv.reader', 'csv.reader', (['file'], {'delimiter': '""","""'}), "(file, delimiter=',')\n", (6584, 6605), False, 'import csv\n'), ((6451, 6487), 'bz2.open', 'bz2.open', (['filename', '"""rt"""'], {'newline': '""""""'}), "(filename, 'rt', newline='')\n", (6459, 6487), False, 'import bz2\n')]
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
import io
import os
import re
import sys
import json
#import copy
import codecs
#reload(sys)
#sys.setdefaultencoding('UTF-8')
DEBUG_MODE = False
CIN_HEAD = "%gen_inp"
ENAME_HEAD = "%ename"
CNAME_HEAD = "%cname"
ENCODING_HEAD = "%encoding"
SELKEY_HEAD = "%selkey"
KEYNAME_HEAD = "%keyname"
CHARDEF_HEAD = "%chardef"
PARSING_HEAD_STATE = 0
PARSE_KEYNAME_STATE = 1
PARSE_CHARDEF_STATE = 2
HEADS = [
CIN_HEAD,
ENAME_HEAD,
CNAME_HEAD,
ENCODING_HEAD,
SELKEY_HEAD,
KEYNAME_HEAD,
CHARDEF_HEAD,
]
class CinToJson(object):
# TODO check the possiblility if the encoding is not utf-8
encoding = 'utf-8'
def __init__(self):
self.sortByCharset = False
self.ename = ""
self.cname = ""
self.selkey = ""
self.keynames = {}
self.chardefs = {}
self.dupchardefs = {}
self.bopomofo = {}
self.big5F = {}
self.big5LF = {}
self.big5S = {}
self.big5Other = {}
self.cjk = {}
self.cjkExtA = {}
self.cjkExtB = {}
self.cjkExtC = {}
self.cjkExtD = {}
self.cjkExtE = {}
self.cjkOther = {}
self.phrases = {}
self.privateuse = {}
self.cincount = {}
self.cincount['bopomofo'] = 0
self.cincount['big5F'] = 0
self.cincount['big5LF'] = 0
self.cincount['big5S'] = 0
self.cincount['big5Other'] = 0
self.cincount['cjk'] = 0
self.cincount['cjkExtA'] = 0
self.cincount['cjkExtB'] = 0
self.cincount['cjkExtC'] = 0
self.cincount['cjkExtD'] = 0
self.cincount['cjkExtE'] = 0
self.cincount['cjkOther'] = 0
self.cincount['phrases'] = 0
self.cincount['cjkCIS'] = 0
self.cincount['privateuse'] = 0
self.cincount['totalchardefs'] = 0
self.charsetRange = {}
self.charsetRange['bopomofo'] = [int('0x3100', 16), int('0x3130', 16)]
self.charsetRange['bopomofoTone'] = [int('0x02D9', 16), int('0x02CA', 16), int('0x02C7', 16), int('0x02CB', 16)]
self.charsetRange['cjk'] = [int('0x4E00', 16), int('0x9FD6', 16)]
self.charsetRange['big5F'] = [int('0xA440', 16), int('0xC67F', 16)]
self.charsetRange['big5LF'] = [int('0xC940', 16), int('0xF9D6', 16)]
self.charsetRange['big5S'] = [int('0xA140', 16), int('0xA3C0', 16)]
self.charsetRange['cjkExtA'] = [int('0x3400', 16), int('0x4DB6', 16)]
self.charsetRange['cjkExtB'] = [int('0x20000', 16), int('0x2A6DF', 16)]
self.charsetRange['cjkExtC'] = [int('0x2A700', 16), int('0x2B73F', 16)]
self.charsetRange['cjkExtD'] = [int('0x2B740', 16), int('0x2B81F', 16)]
self.charsetRange['cjkExtE'] = [int('0x2B820', 16), int('0x2CEAF', 16)]
self.charsetRange['pua'] = [int('0xE000', 16), int('0xF900', 16)]
self.charsetRange['puaA'] = [int('0xF0000', 16), int('0xFFFFE', 16)]
self.charsetRange['puaB'] = [int('0x100000', 16), int('0x10FFFE', 16)]
self.charsetRange['cjkCIS'] = [int('0x2F800', 16), int('0x2FA20', 16)]
self.haveHashtagInKeynames = ["ez.cin", "ezsmall.cin", "ezmid.cin", "ezbig.cin"]
self.saveList = ["ename", "cname", "selkey", "keynames", "cincount", "chardefs", "dupchardefs", "privateuse"]
self.curdir = os.path.abspath(os.path.dirname(__file__))
def __del__(self):
del self.keynames
del self.chardefs
del self.dupchardefs
del self.bopomofo
del self.big5F
del self.big5LF
del self.big5S
del self.big5Other
del self.cjk
del self.cjkExtA
del self.cjkExtB
del self.cjkExtC
del self.cjkExtD
del self.cjkExtE
del self.cjkOther
del self.privateuse
del self.phrases
del self.cincount
self.keynames = {}
self.chardefs = {}
self.dupchardefs = {}
self.bopomofo = {}
self.big5F = {}
self.big5LF = {}
self.big5S = {}
self.big5Other = {}
self.cjk = {}
self.cjkExtA = {}
self.cjkExtB = {}
self.cjkExtC = {}
self.cjkExtD = {}
self.cjkExtE = {}
self.cjkOther = {}
self.privateuse = {}
self.phrases = {}
self.cincount = {}
def run(self, file, filePath, sortByCharset):
print(file)
print(filePath)
self.jsonFile = re.sub('\.cin$', '', file) + '.json'
self.sortByCharset = sortByCharset
state = PARSING_HEAD_STATE
if file in self.haveHashtagInKeynames:
if DEBUG_MODE:
print("字根含有 # 符號!")
if not os.path.exists(filePath):
open(filePath, 'w').close()
with io.open(filePath, encoding='utf-8') as fs:
for line in fs:
line = re.sub('^ | $|\\n$', '', line)
if file in self.haveHashtagInKeynames:
if not line or (line[0] == '#' and state == PARSING_HEAD_STATE):
continue
else:
if not line or line[0] == '#':
continue
if state is not PARSE_CHARDEF_STATE:
if CIN_HEAD in line:
continue
if ENAME_HEAD in line:
self.ename = head_rest(ENAME_HEAD, line)
if CNAME_HEAD in line:
self.cname = head_rest(CNAME_HEAD, line)
if ENCODING_HEAD in line:
continue
if SELKEY_HEAD in line:
self.selkey = head_rest(SELKEY_HEAD, line)
if CHARDEF_HEAD in line:
if 'begin' in line:
state = PARSE_CHARDEF_STATE
else:
state = PARSING_HEAD_STATE
continue
if KEYNAME_HEAD in line:
if 'begin' in line:
state = PARSE_KEYNAME_STATE
else:
state = PARSING_HEAD_STATE
continue
if state is PARSE_KEYNAME_STATE:
key, root = safeSplit(line)
key = key.strip().lower()
if ' ' in root:
root = '\u3000'
else:
root = root.strip()
self.keynames[key] = root
continue
else:
if CHARDEF_HEAD in line:
continue
if self.cname == "中標倉頡":
if '#' in line:
line = re.sub('#.+', '', line)
key, root = safeSplit(line)
key = key.strip().lower()
if root == "Error":
if DEBUG_MODE:
print("發生錯誤!")
break
if ' ' in root:
root = '\u3000'
else:
root = root.strip()
charset = self.getCharSet(key, root)
if not self.sortByCharset:
if key in self.chardefs:
if root in self.chardefs[key]:
if DEBUG_MODE:
print("含有重複資料: " + key)
try:
self.dupchardefs[key].append(root)
except KeyError:
self.dupchardefs[key] = [root]
else:
try:
self.chardefs[key].append(root)
except KeyError:
self.chardefs[key] = [root]
self.cincount['totalchardefs'] += 1
else:
try:
self.chardefs[key].append(root)
except KeyError:
self.chardefs[key] = [root]
self.cincount['totalchardefs'] += 1
if self.sortByCharset:
if DEBUG_MODE:
print("排序字元集!")
self.mergeDicts(self.big5F, self.big5LF, self.big5S, self.big5Other, self.bopomofo, self.cjk, self.cjkExtA, self.cjkExtB, self.cjkExtC, self.cjkExtD, self.cjkExtE, self.cjkOther, self.phrases, self.privateuse)
#print("WTF")
#print(self.jsonFile);
self.saveJsonFile(self.jsonFile)
def mergeDicts(self, *chardefsdicts):
for chardefsdict in chardefsdicts:
for key in chardefsdict:
for root in chardefsdict[key]:
if key in self.chardefs:
if root in self.chardefs[key]:
if DEBUG_MODE:
print("含有重複資料: " + key)
try:
self.dupchardefs[key].append(root)
except KeyError:
self.dupchardefs[key] = [root]
else:
try:
self.chardefs[key].append(root)
except KeyError:
self.chardefs[key] = [root]
self.cincount['totalchardefs'] += 1
else:
try:
self.chardefs[key].append(root)
except KeyError:
self.chardefs[key] = [root]
self.cincount['totalchardefs'] += 1
def toJson(self):
return {key: value for key, value in self.__dict__.items() if key in self.saveList}
def saveJsonFile(self, file):
#filename = self.getJsonFile(file)
filename = file
try:
with codecs.open(filename, 'w', 'utf-8') as f:
js = json.dump(self.toJson(), f, ensure_ascii=False, sort_keys=True, indent=4)
except Exception:
print("FIXME")
pass # FIXME: handle I/O errors?
def getJsonDir(self):
json_dir = os.path.join(self.curdir, os.pardir, "json")
os.makedirs(json_dir, mode=0o700, exist_ok=True)
return json_dir
def getJsonFile(self, name):
return os.path.join(self.getJsonDir(), name)
def getCharSet(self, key, root):
matchstr = ''
if len(root) > 1:
try:
self.phrases[key].append(root)
except KeyError:
self.phrases[key] = [root]
self.cincount['phrases'] += 1
return "phrases"
else:
matchstr = root
matchint = ord(matchstr)
if matchint <= self.charsetRange['cjk'][1]:
if (matchint in range(self.charsetRange['bopomofo'][0], self.charsetRange['bopomofo'][1]) or # Bopomofo 區域
matchint in self.charsetRange['bopomofoTone']):
try:
self.bopomofo[key].append(root) # 注音符號
except KeyError:
self.bopomofo[key] = [root]
self.cincount['bopomofo'] += 1
return "bopomofo"
elif matchint in range(self.charsetRange['cjk'][0], self.charsetRange['cjk'][1]): # CJK Unified Ideographs 區域
try:
big5code = matchstr.encode('big5')
big5codeint = int(big5code.hex(), 16)
if big5codeint in range(self.charsetRange['big5F'][0], self.charsetRange['big5F'][1]): # Big5 常用字
try:
self.big5F[key].append(root)
except KeyError:
self.big5F[key] = [root]
self.cincount['big5F'] += 1
return "big5F"
elif big5codeint in range(self.charsetRange['big5LF'][0], self.charsetRange['big5LF'][1]): # Big5 次常用字
try:
self.big5LF[key].append(root)
except KeyError:
self.big5LF[key] = [root]
self.cincount['big5LF'] += 1
return "big5LF"
elif big5codeint in range(self.charsetRange['big5S'][0], self.charsetRange['big5S'][1]): # Big5 符號
try:
self.big5S[key].append(root)
except KeyError:
self.big5S[key] = [root]
self.cincount['big5S'] += 1
return "big5LF"
else: # Big5 其它漢字
try:
self.big5Other[key].append(root)
except KeyError:
self.big5Other[key] = [root]
self.cincount['big5Other'] += 1
return "big5Other"
except: # CJK Unified Ideographs 漢字
try:
self.cjk[key].append(root)
except KeyError:
self.cjk[key] = [root]
self.cincount['cjk'] += 1
return "cjk"
elif matchint in range(self.charsetRange['cjkExtA'][0], self.charsetRange['cjkExtA'][1]): # CJK Unified Ideographs Extension A 區域
try:
self.cjkExtA[key].append(root) # CJK 擴展 A 區
except KeyError:
self.cjkExtA[key] = [root]
self.cincount['cjkExtA'] += 1
return "cjkExtA"
else:
if matchint in range(self.charsetRange['cjkExtB'][0], self.charsetRange['cjkExtB'][1]): # CJK Unified Ideographs Extension B 區域
try:
self.cjkExtB[key].append(root) # CJK 擴展 B 區
except KeyError:
self.cjkExtB[key] = [root]
self.cincount['cjkExtB'] += 1
return "cjkExtB"
elif matchint in range(self.charsetRange['cjkExtC'][0], self.charsetRange['cjkExtC'][1]): # CJK Unified Ideographs Extension C 區域
try:
self.cjkExtC[key].append(root) # CJK 擴展 C 區
except KeyError:
self.cjkExtC[key] = [root]
self.cincount['cjkExtC'] += 1
return "cjkExtC"
elif matchint in range(self.charsetRange['cjkExtD'][0], self.charsetRange['cjkExtD'][1]): # CJK Unified Ideographs Extension D 區域
try:
self.cjkExtD[key].append(root) # CJK 擴展 D 區
except KeyError:
self.cjkExtD[key] = [root]
self.cincount['cjkExtD'] += 1
return "cjkExtD"
elif matchint in range(self.charsetRange['cjkExtE'][0], self.charsetRange['cjkExtE'][1]): # CJK Unified Ideographs Extension E 區域
try:
self.cjkExtE[key].append(root) # CJK 擴展 E 區
except KeyError:
self.cjkExtE[key] = [root]
self.cincount['cjkExtE'] += 1
return "cjkExtE"
elif (matchint in range(self.charsetRange['pua'][0], self.charsetRange['pua'][1]) or # Unicode Private Use 區域
matchint in range(self.charsetRange['puaA'][0], self.charsetRange['puaA'][1]) or
matchint in range(self.charsetRange['puaB'][0], self.charsetRange['puaB'][1])):
try:
self.privateuse[key].append(root) # Unicode 私用區
except KeyError:
self.privateuse[key] = [root]
self.cincount['privateuse'] += 1
return "pua"
elif matchint in range(self.charsetRange['cjkCIS'][0], self.charsetRange['cjkCIS'][1]): # cjk compatibility ideographs supplement 區域
try:
self.privateuse[key].append(root) # CJK 相容字集補充區
except KeyError:
self.privateuse[key] = [root]
self.cincount['cjkCIS'] += 1
return "pua"
# 不在 CJK Unified Ideographs 區域
try:
self.cjkOther[key].append(root) # CJK 其它漢字或其它字集字元
except KeyError:
self.cjkOther[key] = [root]
self.cincount['cjkOther'] += 1
return "cjkOther"
def head_rest(head, line):
return line[len(head):].strip()
def safeSplit(line):
if ' ' in line:
return line.split(' ', 1)
elif '\t' in line:
return line.split('\t', 1)
else:
return line, "Error"
# def main():
#
# app = CinToJson()
# if len(sys.argv) >= 2:
# cinFile = os.path.join(os.path.abspath(os.path.dirname(__file__)), os.pardir, "cin", sys.argv[1])
# if os.path.exists(cinFile):
# if len(sys.argv) >= 3 and sys.argv[2] == "sort":
# app.run(sys.argv[1], cinFile, True)
# else:
# app.run(sys.argv[1], cinFile, False)
# else:
# if len(sys.argv) == 1:
# sortList = ['cnscj.cin', 'CnsPhonetic.cin']
# for file in os.listdir(os.path.join(os.path.abspath(os.path.dirname(__file__)), os.pardir, "cin")):
# if file.endswith(".cin"):
# if DEBUG_MODE:
# print('轉換 ' + file + ' 中...')
# app.__init__()
# cinFile = os.path.join(os.path.abspath(os.path.dirname(__file__)), os.pardir, "cin", file)
# if file in sortList:
# app.run(file, cinFile, True)
# else:
# app.run(file, cinFile, False)
# app.__del__()
# else:
# if DEBUG_MODE:
# print('檔案不存在!')
|
[
"os.makedirs",
"codecs.open",
"os.path.dirname",
"os.path.exists",
"io.open",
"os.path.join",
"re.sub"
] |
[((10624, 10668), 'os.path.join', 'os.path.join', (['self.curdir', 'os.pardir', '"""json"""'], {}), "(self.curdir, os.pardir, 'json')\n", (10636, 10668), False, 'import os\n'), ((10685, 10731), 'os.makedirs', 'os.makedirs', (['json_dir'], {'mode': '(448)', 'exist_ok': '(True)'}), '(json_dir, mode=448, exist_ok=True)\n', (10696, 10731), False, 'import os\n'), ((3427, 3452), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3442, 3452), False, 'import os\n'), ((4528, 4555), 're.sub', 're.sub', (['"""\\\\.cin$"""', '""""""', 'file'], {}), "('\\\\.cin$', '', file)\n", (4534, 4555), False, 'import re\n'), ((4769, 4793), 'os.path.exists', 'os.path.exists', (['filePath'], {}), '(filePath)\n', (4783, 4793), False, 'import os\n'), ((4848, 4883), 'io.open', 'io.open', (['filePath'], {'encoding': '"""utf-8"""'}), "(filePath, encoding='utf-8')\n", (4855, 4883), False, 'import io\n'), ((4958, 4988), 're.sub', 're.sub', (['"""^ | $|\\\\n$"""', '""""""', 'line'], {}), "('^ | $|\\\\n$', '', line)\n", (4964, 4988), False, 'import re\n'), ((10342, 10377), 'codecs.open', 'codecs.open', (['filename', '"""w"""', '"""utf-8"""'], {}), "(filename, 'w', 'utf-8')\n", (10353, 10377), False, 'import codecs\n'), ((6958, 6981), 're.sub', 're.sub', (['"""#.+"""', '""""""', 'line'], {}), "('#.+', '', line)\n", (6964, 6981), False, 'import re\n')]
|
from django.contrib.auth import authenticate, login
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from django.views.generic import DetailView, TemplateView
from django.views.generic.edit import FormView
from playoffs.models import Playoff
class RegistrationView(FormView):
template_name = 'registration/register.html'
form_class = UserCreationForm
success_url = '/'
def form_valid(self, form):
name = form.cleaned_data['username']
password = form.cleaned_data['<PASSWORD>']
user = User.objects.create_user(name, password=password)
new_user = authenticate(username=name, password=password)
login(self.request, new_user)
return super(RegistrationView, self).form_valid(form)
class ProfileView(LoginRequiredMixin, TemplateView):
template_name = 'registration/profile.html'
def get_context_data(self, *args, **kwargs):
context = super(ProfileView, self).get_context_data(*args, **kwargs)
context['playoffs'] = Playoff.objects.filter(owner=self.request.user)
return context
class UserPageView(DetailView):
model = User
context_object_name = 'user'
template_name = 'accounts/user_page.html'
def get_object(self, queryset=None):
username = self.kwargs.get('username')
obj = get_object_or_404(User, username=username)
return obj
def get_context_data(self, *args, **kwargs):
context = super(UserPageView, self).get_context_data(*args, **kwargs)
context['playoffs'] = Playoff.objects.filter(owner=self.object)
return context
|
[
"playoffs.models.Playoff.objects.filter",
"django.contrib.auth.models.User.objects.create_user",
"django.shortcuts.get_object_or_404",
"django.contrib.auth.authenticate",
"django.contrib.auth.login"
] |
[((687, 736), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (['name'], {'password': 'password'}), '(name, password=password)\n', (711, 736), False, 'from django.contrib.auth.models import User\n'), ((756, 802), 'django.contrib.auth.authenticate', 'authenticate', ([], {'username': 'name', 'password': 'password'}), '(username=name, password=password)\n', (768, 802), False, 'from django.contrib.auth import authenticate, login\n'), ((811, 840), 'django.contrib.auth.login', 'login', (['self.request', 'new_user'], {}), '(self.request, new_user)\n', (816, 840), False, 'from django.contrib.auth import authenticate, login\n'), ((1167, 1214), 'playoffs.models.Playoff.objects.filter', 'Playoff.objects.filter', ([], {'owner': 'self.request.user'}), '(owner=self.request.user)\n', (1189, 1214), False, 'from playoffs.models import Playoff\n'), ((1471, 1513), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['User'], {'username': 'username'}), '(User, username=username)\n', (1488, 1513), False, 'from django.shortcuts import get_object_or_404\n'), ((1695, 1736), 'playoffs.models.Playoff.objects.filter', 'Playoff.objects.filter', ([], {'owner': 'self.object'}), '(owner=self.object)\n', (1717, 1736), False, 'from playoffs.models import Playoff\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jun 7 16:12:33 2016
@author: rmcleod
"""
import numpy as np
import matplotlib.pyplot as plt
import os, os.path, glob
mcFRCFiles = glob.glob( "FRC/*mcFRC.npy" )
zorroFRCFiles = glob.glob( "FRC/*zorroFRC.npy" )
zorroFRCs = [None] * len( zorroFRCFiles)
for J in np.arange( len(zorroFRCFiles) ):
zorroFRCs[J] = np.load( zorroFRCFiles[J] )
mcFRCs = [None] * len( mcFRCFiles)
for J in np.arange( len(mcFRCFiles) ):
mcFRCs[J] = np.load( mcFRCFiles[J] )
zorroMeanFRC = np.mean( np.array(zorroFRCs), axis=0 )
mcMeanFRC = np.mean( np.array(mcFRCs), axis=0 )
plt.figure()
plt.plot( mcMeanFRC, '.-', color='firebrick', label='MotionCorr' )
plt.plot( zorroMeanFRC, '.-', color='black', label='Zorro' )
plt.title( "Mean FRC Re-aligned from MotionCorr" )
plt.legend()
plt.xlim( [0,len(mcMeanFRC)] )
plt.savefig( "Dataset_mean_MC_vs_Zorro.png" )
|
[
"matplotlib.pyplot.title",
"numpy.load",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.figure",
"numpy.array",
"glob.glob",
"matplotlib.pyplot.savefig"
] |
[((177, 204), 'glob.glob', 'glob.glob', (['"""FRC/*mcFRC.npy"""'], {}), "('FRC/*mcFRC.npy')\n", (186, 204), False, 'import os, os.path, glob\n'), ((223, 253), 'glob.glob', 'glob.glob', (['"""FRC/*zorroFRC.npy"""'], {}), "('FRC/*zorroFRC.npy')\n", (232, 253), False, 'import os, os.path, glob\n'), ((621, 633), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (631, 633), True, 'import matplotlib.pyplot as plt\n'), ((634, 698), 'matplotlib.pyplot.plot', 'plt.plot', (['mcMeanFRC', '""".-"""'], {'color': '"""firebrick"""', 'label': '"""MotionCorr"""'}), "(mcMeanFRC, '.-', color='firebrick', label='MotionCorr')\n", (642, 698), True, 'import matplotlib.pyplot as plt\n'), ((701, 759), 'matplotlib.pyplot.plot', 'plt.plot', (['zorroMeanFRC', '""".-"""'], {'color': '"""black"""', 'label': '"""Zorro"""'}), "(zorroMeanFRC, '.-', color='black', label='Zorro')\n", (709, 759), True, 'import matplotlib.pyplot as plt\n'), ((762, 810), 'matplotlib.pyplot.title', 'plt.title', (['"""Mean FRC Re-aligned from MotionCorr"""'], {}), "('Mean FRC Re-aligned from MotionCorr')\n", (771, 810), True, 'import matplotlib.pyplot as plt\n'), ((813, 825), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (823, 825), True, 'import matplotlib.pyplot as plt\n'), ((857, 900), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""Dataset_mean_MC_vs_Zorro.png"""'], {}), "('Dataset_mean_MC_vs_Zorro.png')\n", (868, 900), True, 'import matplotlib.pyplot as plt\n'), ((361, 386), 'numpy.load', 'np.load', (['zorroFRCFiles[J]'], {}), '(zorroFRCFiles[J])\n', (368, 386), True, 'import numpy as np\n'), ((488, 510), 'numpy.load', 'np.load', (['mcFRCFiles[J]'], {}), '(mcFRCFiles[J])\n', (495, 510), True, 'import numpy as np\n'), ((542, 561), 'numpy.array', 'np.array', (['zorroFRCs'], {}), '(zorroFRCs)\n', (550, 561), True, 'import numpy as np\n'), ((593, 609), 'numpy.array', 'np.array', (['mcFRCs'], {}), '(mcFRCs)\n', (601, 609), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""
ArduinoMediator.
@auteur: Darkness4
"""
import logging
from threading import Thread
from time import sleep, time
from typing import Optional
from csgo_gsi_arduino_lcd.entities.state import State
from csgo_gsi_arduino_lcd.entities.status import Status
from serial import Serial
class ArduinoMediator(Thread):
"""Give order to the arduino."""
state: Optional[State] = None
__refresh = False # Order to refresh informations
__start = True # Order to start/stop
__status: Status = Status.NONE
ser_arduino: Serial
def __init__(self, ser_arduino: Serial):
"""Init save."""
super(ArduinoMediator, self).__init__()
self.ser_arduino = ser_arduino
@property
def status(self) -> Status:
return self.__status
@status.setter
def status(self, status: Status):
"""Change Messenger behavior."""
self.__status = status
self.__refresh = True # Informations need to be refreshed
def run(self):
"""Thread start."""
while self.__start:
self.refresh() if self.__refresh else sleep(0.1)
logging.info("Messenger is dead.")
def refresh(self):
self.__refresh = False
# Has refreshed
if self.__status in (
Status.BOMB,
Status.DEFUSED,
Status.EXPLODED,
): # Bomb
self.draw_bomb_timer()
elif self.__status == Status.NONE:
self.draw_idling()
else: # Default status
self.write_player_stats()
def draw_bomb_timer(self):
"""40 sec bomb timer on arduino."""
offset = time()
actualtime: int = int(40 - time() + offset)
while actualtime > 0 and self.__status == Status.BOMB:
oldtime = actualtime
sleep(0.1)
actualtime = int(40 - time() + offset)
if oldtime != actualtime: # Actualization only integer change
self.ser_arduino.write(b"BOMB PLANTED")
# Wait for second line
sleep(0.1)
for i in range(0, 40, 5):
self.ser_arduino.write(
ArduinoMediator.progress(actualtime - i)
)
self.ser_arduino.write(str(actualtime).encode())
sleep(0.1)
if self.__status == Status.DEFUSED:
self.ser_arduino.write(b"BOMB DEFUSED")
# Wait for second line
sleep(0.1)
self.ser_arduino.write(b" ")
sleep(0.1)
elif self.__status == Status.EXPLODED:
self.ser_arduino.write(b"BOMB EXPLODED")
# Wait for second line
sleep(0.1)
self.ser_arduino.write(b" ")
sleep(0.1)
def write_player_stats(self):
"""Player stats writer."""
# Not too fast
sleep(0.1)
# Writing health and armor in Serial
self.draw_health_and_armor()
# Wait for second line
sleep(0.1)
# Kill or Money
if self.__status == Status.NOT_FREEZETIME:
self.draw_kills()
elif self.__status == Status.FREEZETIME:
self.draw_money()
sleep(0.1)
def draw_kills(self):
"""Show kills in one line."""
# HS and Kill counter
self.ser_arduino.write(b"K: ")
if self.state is not None:
for i in range(self.state.round_kills):
if i < self.state.round_killhs:
self.ser_arduino.write(b"\x01") # Byte 1 char : HS
else:
self.ser_arduino.write(b"\x00") # Byte 0 char : kill no HS
def draw_money(self):
"""Show money in one line."""
if self.state is not None:
self.ser_arduino.write(f"M: {self.state.money}".encode())
def draw_health_and_armor(self):
"""Show health and armor in one line."""
if self.state is not None:
self.ser_arduino.write(b"H: ")
self.ser_arduino.write(
ArduinoMediator.progress(self.state.health // 5)
)
self.ser_arduino.write(
ArduinoMediator.progress((self.state.health - 25) // 5)
)
self.ser_arduino.write(
ArduinoMediator.progress((self.state.health - 50) // 5)
)
self.ser_arduino.write(
ArduinoMediator.progress((self.state.health - 75) // 5)
)
self.ser_arduino.write(b" A: ")
self.ser_arduino.write(
ArduinoMediator.progress(self.state.armor // 5)
)
self.ser_arduino.write(
ArduinoMediator.progress((self.state.armor - 25) // 5)
)
self.ser_arduino.write(
ArduinoMediator.progress((self.state.armor - 50) // 5)
)
self.ser_arduino.write(
ArduinoMediator.progress((self.state.armor - 75) // 5)
)
def draw_idling(self):
"""Print text while idling."""
self.ser_arduino.write(b"Waiting for")
sleep(0.1)
self.ser_arduino.write(b"matches")
def shutdown(self):
"""Stop the mediator."""
self.__start = False
@staticmethod
def progress(i: int) -> bytes:
"""
Progress bar, for arduino 5px large.
Parameters
----------
i : int
Select which character to send to Arduino.
Returns
-------
bytes : Character send to Arduino.
"""
if i <= 0:
return b"\x07"
elif 1 <= i <= 5:
return bytes([i + 1])
else:
return b"\x06"
|
[
"logging.info",
"time.sleep",
"time.time"
] |
[((1144, 1178), 'logging.info', 'logging.info', (['"""Messenger is dead."""'], {}), "('Messenger is dead.')\n", (1156, 1178), False, 'import logging\n'), ((1661, 1667), 'time.time', 'time', ([], {}), '()\n', (1665, 1667), False, 'from time import sleep, time\n'), ((2893, 2903), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (2898, 2903), False, 'from time import sleep, time\n'), ((3027, 3037), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (3032, 3037), False, 'from time import sleep, time\n'), ((3231, 3241), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (3236, 3241), False, 'from time import sleep, time\n'), ((5144, 5154), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (5149, 5154), False, 'from time import sleep, time\n'), ((1828, 1838), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (1833, 1838), False, 'from time import sleep, time\n'), ((2495, 2505), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (2500, 2505), False, 'from time import sleep, time\n'), ((2559, 2569), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (2564, 2569), False, 'from time import sleep, time\n'), ((1125, 1135), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (1130, 1135), False, 'from time import sleep, time\n'), ((2076, 2086), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (2081, 2086), False, 'from time import sleep, time\n'), ((2341, 2351), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (2346, 2351), False, 'from time import sleep, time\n'), ((2717, 2727), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (2722, 2727), False, 'from time import sleep, time\n'), ((2781, 2791), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (2786, 2791), False, 'from time import sleep, time\n'), ((1703, 1709), 'time.time', 'time', ([], {}), '()\n', (1707, 1709), False, 'from time import sleep, time\n'), ((1873, 1879), 'time.time', 'time', ([], {}), '()\n', (1877, 1879), False, 'from time import sleep, time\n')]
|
import numpy as np
from pyutai import trees
from potentials import cluster
def cpd_size(cpd):
return np.prod(cpd.cardinality)
def unique_values(cpd):
unique, _ = np.unique(cpd.values, return_counts=True)
return len(unique)
def stats(net):
if not net.endswith('.bif'):
raise ValueError('Net format not supported. Expected .bif, got {net}')
file_ = read.read(f'networks/{net}')
model = file_.get_model()
cpds = model.get_cpds()
unique_values = statistics.mean(_unique_values(cpd) for cpd in cpds)
max_values = max(
((i, _unique_values(cpd)) for i, cpd in enumerate(cpds)),
key=lambda x: x[1])
print(
f'Net: {net}. Mean unique value: {unique_values:.2f}. Biggest cpd: {max_values}'
)
def tree_from_cpd(cpd, selector):
if selector is None:
pass
else:
selector = selector(cpd.values, cpd.variables)
cardinality_ = dict(zip(cpd.variables, cpd.cardinality))
return trees.Tree.from_array(cpd.values,
cpd.variables,
cardinality_,
selector=selector)
def cluster_from_cpd(cpd):
return cluster.Cluster.from_array(cpd.values,
cpd.variables)
|
[
"numpy.unique",
"pyutai.trees.Tree.from_array",
"numpy.prod",
"potentials.cluster.Cluster.from_array"
] |
[((107, 131), 'numpy.prod', 'np.prod', (['cpd.cardinality'], {}), '(cpd.cardinality)\n', (114, 131), True, 'import numpy as np\n'), ((174, 215), 'numpy.unique', 'np.unique', (['cpd.values'], {'return_counts': '(True)'}), '(cpd.values, return_counts=True)\n', (183, 215), True, 'import numpy as np\n'), ((978, 1064), 'pyutai.trees.Tree.from_array', 'trees.Tree.from_array', (['cpd.values', 'cpd.variables', 'cardinality_'], {'selector': 'selector'}), '(cpd.values, cpd.variables, cardinality_, selector=\n selector)\n', (999, 1064), False, 'from pyutai import trees\n'), ((1197, 1250), 'potentials.cluster.Cluster.from_array', 'cluster.Cluster.from_array', (['cpd.values', 'cpd.variables'], {}), '(cpd.values, cpd.variables)\n', (1223, 1250), False, 'from potentials import cluster\n')]
|
import os
import threading
from PyQt5 import QtCore
from PyQt5.QtCore import QObject
from src.Apps import Apps
from src.model.Music import Music
from src.model.MusicList import MusicList
from src.service.MP3Parser import MP3
class ScanPaths(QObject, threading.Thread):
""" 异步扫描指定目录(指配置文件)下的所有音乐文件, 并写入数据库 """
# 1/2, 1: 扫描开始, 2: 扫描结束
scan_state_change = QtCore.pyqtSignal(int)
def __init__(self):
super().__init__()
@staticmethod
def scan(slot_func):
scan = ScanPaths()
scan.scan_state_change.connect(slot_func)
scan.start()
def run(self) -> None:
self.scan_state_change.emit(1)
search_paths = list(map(lambda v: v.path, filter(lambda v: v.checked, Apps.config.scanned_paths)))
music_files = ScanPaths.__find_music_files(search_paths)
musics = ScanPaths.__get_mp3_info(music_files)
Apps.musicService.batch_insert(musics)
self.scan_state_change.emit(2)
@staticmethod
def __find_music_files(search_paths: list) -> list:
files = list()
while len(search_paths) > 0:
size = len(search_paths)
for i in range(size):
pop = search_paths.pop()
if not os.path.exists(pop):
continue
listdir = list(map(lambda v: os.path.join(pop, v), ScanPaths.__listdir(pop)))
for ld in listdir:
if os.path.isdir(ld):
search_paths.append(ld)
else:
if ScanPaths.__is_music_file(ld):
files.append(ld)
return files
@staticmethod
def __is_music_file(path):
if (path.endswith("mp3") or path.endswith("MP3")) and os.path.getsize(path) > 100 * 1024:
return True
return False
@staticmethod
def __get_mp3_info(paths: list):
musics = []
for path in paths:
try:
mp3 = MP3(path)
if mp3.ret["has-ID3V2"] and mp3.duration >= 30:
size = os.path.getsize(path)
if size < 1024 * 1024:
size = str(int(size / 1024)) + "KB"
else:
size = str(round(size / 1024 / 1024, 1)) + "MB"
title = mp3.title
if title == "":
title = os.path.basename(path)
artist = mp3.artist
if artist == "":
artist = "未知歌手"
album = mp3.album
if album == "":
album = "未知专辑"
duration = mp3.duration
music = Music()
music.mid = MusicList.DEFAULT_ID
music.path = path
music.title = title
music.artist = artist
music.album = album
music.duration = duration
music.size = size
musics.append(music)
except IndexError as e:
pass
except UnicodeDecodeError as e1:
pass
return musics
@staticmethod
def __listdir(path):
try:
return os.listdir(path)
except PermissionError as e:
print(e.strerror)
return []
|
[
"PyQt5.QtCore.pyqtSignal",
"src.Apps.Apps.musicService.batch_insert",
"os.path.basename",
"os.path.isdir",
"os.path.getsize",
"os.path.exists",
"src.model.Music.Music",
"src.service.MP3Parser.MP3",
"os.path.join",
"os.listdir"
] |
[((370, 392), 'PyQt5.QtCore.pyqtSignal', 'QtCore.pyqtSignal', (['int'], {}), '(int)\n', (387, 392), False, 'from PyQt5 import QtCore\n'), ((889, 927), 'src.Apps.Apps.musicService.batch_insert', 'Apps.musicService.batch_insert', (['musics'], {}), '(musics)\n', (919, 927), False, 'from src.Apps import Apps\n'), ((3317, 3333), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (3327, 3333), False, 'import os\n'), ((1768, 1789), 'os.path.getsize', 'os.path.getsize', (['path'], {}), '(path)\n', (1783, 1789), False, 'import os\n'), ((1991, 2000), 'src.service.MP3Parser.MP3', 'MP3', (['path'], {}), '(path)\n', (1994, 2000), False, 'from src.service.MP3Parser import MP3\n'), ((1237, 1256), 'os.path.exists', 'os.path.exists', (['pop'], {}), '(pop)\n', (1251, 1256), False, 'import os\n'), ((1439, 1456), 'os.path.isdir', 'os.path.isdir', (['ld'], {}), '(ld)\n', (1452, 1456), False, 'import os\n'), ((2092, 2113), 'os.path.getsize', 'os.path.getsize', (['path'], {}), '(path)\n', (2107, 2113), False, 'import os\n'), ((2750, 2757), 'src.model.Music.Music', 'Music', ([], {}), '()\n', (2755, 2757), False, 'from src.model.Music import Music\n'), ((2422, 2444), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (2438, 2444), False, 'import os\n'), ((1332, 1352), 'os.path.join', 'os.path.join', (['pop', 'v'], {}), '(pop, v)\n', (1344, 1352), False, 'import os\n')]
|
#!/usr/bin/env python3
import argparse
import cairo
import parse_test
import subprocess
import typing
def hue_to_rgb(hue: float, lo: float) -> typing.Tuple[float, float, float]:
hue = max(0, min(1, hue))
if hue <= 1/3:
return (1 - (1-lo)*(hue-0)*3, lo + (1-lo)*(hue-0)*3, lo)
if hue <= 2/3:
return (lo, 1-(1-lo)*(hue-1/3)*3, lo + (1-lo)*(hue-1/3)*3)
return (lo + (1-lo)*(hue-2/3)*3, lo, 1-(1-lo)*(hue-2/3)*3)
def text_in_rectangle(context: cairo.Context, text: str, left: float, top: float, width: float, height: float) -> None:
extents = context.text_extents(text)
origin = (left + (width - extents.width)/2 - extents.x_bearing,
top + (height-extents.height)/2 - extents.y_bearing)
context.move_to(*origin)
context.show_text(text)
return
def render_parse(surface: cairo.Surface, parse: parse_test.TestParser,
vert_per_second: float, top_text: str, bottom_text: str) -> None:
context = cairo.Context(surface)
context.select_font_face(
"Sans", cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_BOLD)
num_seats = len(parse.seats)
num_queues = len(parse.queue_to_lanes)
hor_per_track = float(36)
tick_left = float(108)
seats_left = tick_left + 9
seats_right = seats_left + hor_per_track * num_seats
vert_per_header = float(18)
htop = 0
if top_text:
top_text_extents = context.text_extents(top_text)
htop += vert_per_header
seats_orig = (seats_left, htop + 2*vert_per_header)
queues_left = seats_right + hor_per_track
queues_right = queues_left + hor_per_track * \
(parse.queue_lane_sum + (num_queues-1) * 0.1)
page_width = queues_right + hor_per_track*0.5
if top_text:
page_width = max(page_width, top_text_extents.width + 24)
queues_orig = (queues_left, seats_orig[1])
page_height = seats_orig[1] + \
(parse.max_t - parse.min_t) * vert_per_second + 1
if bottom_text:
bottom_text_extents = context.text_extents(bottom_text)
bottom_text_orig = (12 - bottom_text_extents.x_bearing,
page_height + 6 - bottom_text_extents.y_bearing)
page_height += bottom_text_extents.height + 12
page_width = max(
page_width, bottom_text_orig[0] + bottom_text_extents.x_advance)
surface.set_size(page_width, page_height)
print(
f'num_seats={num_seats}, num_queues={num_queues}, queue_lane_sum={parse.queue_lane_sum}, page_width={page_width}, page_height={page_height}')
if top_text:
text_in_rectangle(context, top_text, 0, 0, page_width, vert_per_header)
if bottom_text:
context.move_to(*bottom_text_orig)
context.show_text(bottom_text)
context.set_line_width(0.5)
# Render the secion headings
text_in_rectangle(context, "Seats", seats_left, htop,
seats_right-seats_left, vert_per_header)
text_in_rectangle(context, "Queues", queues_left, htop,
queues_right-queues_left, vert_per_header)
# get ordered list of queues
qids = sorted([qid for qid in parse.queue_to_lanes])
# Render the queue headings
qright = queues_left
qlefts: typing.Mapping[int, float] = dict()
htop += vert_per_header
for qid in qids:
hleft = qright
qlefts[qid] = qright
hwidth = hor_per_track * len(parse.queue_to_lanes[qid].seats)
qright += hwidth + hor_per_track*0.1
id_str = str(qid)
text_in_rectangle(context, id_str, hleft, htop,
hwidth, vert_per_header)
# Render the seat run fills
num_flows = 1 + parse.max_flow
for (reqid, req) in parse.requests.items():
reqid_str = f'{reqid[0]},{reqid[1]},{reqid[2]}'
stop = seats_orig[1] + vert_per_second * \
(req.real_dispatch_t-parse.min_t)
smid = seats_orig[1] + vert_per_second * (req.real_mid_t-parse.min_t)
sheight1 = vert_per_second*(req.real_mid_t-req.real_dispatch_t)
sheight2 = vert_per_second*(req.real_finish_t-req.real_mid_t)
rgb1 = hue_to_rgb(reqid[0]/num_flows, 0.80)
rgb2 = hue_to_rgb(reqid[0]/num_flows, 0.92)
context.new_path()
for (_, run) in enumerate(req.seat_runs1):
left = seats_orig[0] + run[0]*hor_per_track
width = run[1]*hor_per_track
context.rectangle(left, stop, width, sheight1)
context.set_source_rgb(*rgb1)
context.fill()
context.new_path()
for (_, run) in enumerate(req.seat_runs):
left = seats_orig[0] + run[0]*hor_per_track
width = run[1]*hor_per_track
context.rectangle(left, smid, width, sheight2)
context.set_source_rgb(*rgb2)
context.fill()
context.set_source_rgb(0, 0, 0)
# Render the rest
lastick = None
for (reqid, req) in parse.requests.items():
reqid_str = f'{reqid[0]},{reqid[1]},{reqid[2]}'
context.new_path()
stop = seats_orig[1] + vert_per_second * \
(req.real_dispatch_t-parse.min_t)
sheight = vert_per_second*(req.real_finish_t-req.real_dispatch_t)
if lastick is None or stop > lastick + 18:
et_str = str(req.real_dispatch_t-parse.min_t)
text_in_rectangle(context, et_str, 0, stop, seats_left, 0)
lastick = stop
context.move_to(tick_left, stop)
context.line_to(seats_left, stop)
# Render the seat run outlines
for (idx, run) in enumerate(req.seat_runs):
left = seats_orig[0] + run[0]*hor_per_track
width = run[1]*hor_per_track
context.rectangle(left, stop, width, sheight)
if idx == 0:
label = reqid_str
else:
label = reqid_str + chr(97+idx)
text_in_rectangle(context, label, left, stop, width, sheight)
# Render the queue entry
qleft = qlefts[req.queue] + hor_per_track * req.qlane
qtop = queues_orig[1] + vert_per_second * \
(req.virt_dispatch_t-parse.min_t)
qwidth = hor_per_track
qheight = vert_per_second*(req.virt_finish_t - req.virt_dispatch_t)
context.rectangle(qleft, qtop, qwidth, qheight)
text_in_rectangle(context, reqid_str, qleft, qtop, qwidth, qheight)
context.stroke()
eval_y = seats_orig[1] + vert_per_second*(parse.eval_t - parse.min_t)
context.move_to(hor_per_track*0.1, eval_y)
context.line_to(page_width - hor_per_track*0.1, eval_y)
context.set_source_rgb(1, 0, 0)
context.stroke()
context.show_page()
return
def git_credit() -> str:
cp1 = subprocess.run(['git', 'rev-parse', 'HEAD'],
capture_output=True, check=True, text=True)
cp2 = subprocess.run(['git', 'status', '--porcelain'],
capture_output=True, check=True, text=True)
ans = 'Rendered by github.com/MikeSpreitzer/queueset-test-viz commit ' + cp1.stdout.rstrip()
if cp2.stdout.rstrip():
ans += ' dirty'
return ans
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser(
description='render queueset test log')
arg_parser.add_argument('--vert-per-sec', type=float,
default=36, help='points per second, default is 36')
arg_parser.add_argument('--top-text')
arg_parser.add_argument(
'--bottom-text', help='defaults to github reference to renderer')
arg_parser.add_argument('infile', type=argparse.FileType('rt'))
arg_parser.add_argument('outfile', type=argparse.FileType('wb'))
args = arg_parser.parse_args()
if args.bottom_text is None:
bottom_text = git_credit()
else:
bottom_text = args.bottom_text
test_parser = parse_test.TestParser()
test_parser.parse(args.infile)
surface = cairo.PDFSurface(args.outfile, 100, 100)
render_parse(surface, test_parser, args.vert_per_sec,
args.top_text, bottom_text)
surface.finish()
args.outfile.close()
|
[
"subprocess.run",
"argparse.ArgumentParser",
"cairo.Context",
"parse_test.TestParser",
"cairo.PDFSurface",
"argparse.FileType"
] |
[((980, 1002), 'cairo.Context', 'cairo.Context', (['surface'], {}), '(surface)\n', (993, 1002), False, 'import cairo\n'), ((6675, 6768), 'subprocess.run', 'subprocess.run', (["['git', 'rev-parse', 'HEAD']"], {'capture_output': '(True)', 'check': '(True)', 'text': '(True)'}), "(['git', 'rev-parse', 'HEAD'], capture_output=True, check=\n True, text=True)\n", (6689, 6768), False, 'import subprocess\n'), ((6799, 6896), 'subprocess.run', 'subprocess.run', (["['git', 'status', '--porcelain']"], {'capture_output': '(True)', 'check': '(True)', 'text': '(True)'}), "(['git', 'status', '--porcelain'], capture_output=True, check\n =True, text=True)\n", (6813, 6896), False, 'import subprocess\n'), ((7127, 7190), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""render queueset test log"""'}), "(description='render queueset test log')\n", (7150, 7190), False, 'import argparse\n'), ((7791, 7814), 'parse_test.TestParser', 'parse_test.TestParser', ([], {}), '()\n', (7812, 7814), False, 'import parse_test\n'), ((7864, 7904), 'cairo.PDFSurface', 'cairo.PDFSurface', (['args.outfile', '(100)', '(100)'], {}), '(args.outfile, 100, 100)\n', (7880, 7904), False, 'import cairo\n'), ((7527, 7550), 'argparse.FileType', 'argparse.FileType', (['"""rt"""'], {}), "('rt')\n", (7544, 7550), False, 'import argparse\n'), ((7596, 7619), 'argparse.FileType', 'argparse.FileType', (['"""wb"""'], {}), "('wb')\n", (7613, 7619), False, 'import argparse\n')]
|
import os
import logging
import json
import asyncio
from collections import defaultdict
import nacl
from quart import Quart, jsonify, request, websocket
from quart_cors import cors
from blockchat.utils import encryption
from blockchat.types.blockchain import Blockchain, BlockchatJSONEncoder, BlockchatJSONDecoder
from blockchat.types.blockchain import parse_node_addr
import blockchat.utils.storage as storage
numeric_level = getattr(logging, os.getenv("LOG_LEVEL", "WARNING"), "WARNING")
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % os.getenv("LOG_LEVEL"))
logging.basicConfig(level=numeric_level)
# Instantiate the Node
app = Quart(__name__)
app = cors(app, allow_origin="*")
app.json_encoder = BlockchatJSONEncoder
app.json_decoder = BlockchatJSONDecoder
# load node secret and node address from env vars
node_secret = nacl.signing.SigningKey(bytes.fromhex(os.getenv("NODE_KEY")))
node_url = os.getenv("NODE_ADDR", None)
assert node_url is not None
node_url = parse_node_addr(node_url)
node_identifier = encryption.encode_verify_key(node_secret.verify_key)
storage_backend = os.getenv("STORAGE_TYPE", "memory").lower()
if storage_backend == "firebase":
db = storage.FirebaseBlockchatStorage()
logging.warning("Using Firebase storage backend")
else:
db = storage.InMemoryBlockchatStorage()
logging.warning("Using in-memory storage backend")
# Instantiate the Blockchain
blockchain = Blockchain(db, node_url, node_secret)
monitor_tags = defaultdict(set)
monitor_chats = defaultdict(set)
@app.websocket('/transactions/ws')
async def transaction_socket():
global monitor_tags
if 'tag' not in websocket.args:
return 'Tag not specified'
tag = websocket.args.get('tag')
queue = asyncio.Queue()
monitor_tags[tag].add(queue)
await websocket.accept()
if blockchain.db.is_transaction_unconfirmed(tag):
await websocket.send('unc')
elif blockchain.db.is_transaction_confirmed(tag):
await websocket.send('mined')
try:
while True:
data = await queue.get()
await websocket.send(data)
if data == "mined":
break
finally:
monitor_tags[tag].remove(queue)
if not monitor_tags[tag]:
monitor_tags.pop(tag)
@app.websocket('/chat/ws')
async def chat_socket():
global monitor_chats
if 'sender' not in websocket.args:
return 'Sender address not specified'
sender = websocket.args.get('sender')
queue = asyncio.Queue()
monitor_chats[sender].add(queue)
logging.info("Monitoring sender %s", sender)
await websocket.accept()
try:
while True:
data = await queue.get()
await websocket.send(data)
finally:
monitor_chats[sender].remove(queue)
if not monitor_tags[sender]:
monitor_chats.pop(sender)
async def mine_wrapper():
if blockchain.db.num_transactions() == 0:
return False
logging.info("Mining now")
# get the transactions to be added
transactions = blockchain.db.pop_transactions()
# let client know that their transaction is being mined
for transaction in transactions:
if transaction.tag in monitor_tags:
asyncio.gather(*(mtag.put('mining') for mtag in monitor_tags[transaction.tag]))
# ensure chain is the best before mining
blockchain.resolve_conflicts()
last_block = blockchain.last_block
# add a "mine" transaction
blockchain.new_transaction(node_identifier, node_identifier, "<<MINE>>",
self_sign=True, add_to=transactions)
# We run the proof of work algorithm to get the next proof...
proof = blockchain.proof_of_work(last_block, transactions)
# Forge the new Block by adding it to the chain
previous_hash = blockchain.hash(last_block)
block = blockchain.new_block(proof, previous_hash, transactions, last_block)
for transaction in transactions:
if transaction.tag in monitor_tags:
asyncio.gather(*(mtag.put('mined') for mtag in monitor_tags[transaction.tag]))
logging.info("Mined")
return block
@app.route('/block/mine', methods=['GET'])
async def mine():
block = await mine_wrapper()
if not block:
return "Nothing to mine", 200
response = {
'message': "New Block Forged",
'index': block['index'],
'transactions': block['transactions'],
'proof': block['proof'],
'previous_hash': block['previous_hash'],
}
return jsonify(response), 200
@app.route('/chat/messages', methods=['GET'])
async def get_messages():
if not 'user_key' in request.args:
return 'User public key missing', 400
user_key = request.args.get('user_key').strip()
if not user_key:
return 'Invalid user public key', 400
txs = blockchain.db.get_user_messages(user_key)
num_txs = len(txs)
response = {
'transactions': txs,
'length': num_txs
}
return jsonify(response), 200
@app.route('/transactions/new', methods=['POST'])
async def new_transaction():
values = await request.get_json()
# Check that the required fields are in the POST'ed data
required_values = ['sender', 'recipient', 'message', 'signature']
if not all(k in values for k in required_values):
return 'Missing values', 400
# Create a new Transaction
transaction, tag = blockchain.new_transaction(values['sender'], values['recipient'],
values['message'], values['signature'])
if not tag:
return "Cannot verify transaction", 400
if transaction.receiver in monitor_chats:
json_dump = json.dumps(transaction.to_dict())
await asyncio.gather(*(mchat.put(json_dump) for mchat in
monitor_chats[transaction.receiver]))
response = {'message': 'Transaction will be added to the next block.',
'tag': tag}
return jsonify(response), 201
@app.route('/transactions/is_unconfirmed', methods=['GET'])
async def check_transaction_unconfirmed():
if 'tag' not in request.args:
return 'Missing tag in parameters', 400
tag = request.args.get('tag')
unconfirmed = blockchain.db.is_transaction_unconfirmed(tag)
return jsonify({"unconfirmed": unconfirmed}), 201
@app.route('/transactions/is_confirmed', methods=['GET'])
async def check_transaction_confirmed():
if 'tag' not in request.args:
return 'Missing tag in parameters', 400
tag = request.args.get('tag')
confirmed = blockchain.db.is_transaction_confirmed(tag)
return jsonify({"confirmed": confirmed}), 201
@app.route('/chain/get', methods=['GET'])
async def full_chain():
chain = blockchain.db.chain
response = {
'chain': chain,
'length': chain[-1]["index"]
}
return jsonify(response), 200
@app.route('/chain/length', methods=['GET'])
async def chain_length():
response = {
'length': len(blockchain),
}
return jsonify(response), 200
@app.route('/block/add', methods=['POST'])
async def add_block():
values = await request.get_json()
block_to_add = values.get('block')
# try to add block
success = blockchain.add_block(block_to_add)
if success:
return jsonify({
"message": "Block added successfully"}), 200
return "Error: Invalid block", 400
@app.route('/nodes/register', methods=['POST'])
async def register_nodes():
values = await request.get_json()
nodes = values.get('nodes')
if nodes is None:
return "Error: Please supply a valid list of nodes", 400
for node in nodes:
blockchain.register_node(node)
replaced = blockchain.resolve_conflicts()
response = {
'message': 'New nodes have been added',
'total_nodes': list(blockchain.get_nodes()),
'chain_replaced': replaced
}
return jsonify(response), 201
@app.route('/nodes/resolve', methods=['GET'])
async def consensus():
replaced = blockchain.resolve_conflicts()
if replaced:
response = {
'message': 'Our chain was replaced'
}
else:
response = {
'message': 'Our chain is authoritative'
}
return jsonify(response), 200
# schedule mine job every x minutes
@app.before_first_request
async def mine_job_req():
asyncio.create_task(mine_job())
async def mine_job():
while True:
await asyncio.sleep(10)
await mine_wrapper()
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('-p', '--port', default=5000, type=int, help='port to listen on')
args = parser.parse_args()
port = args.port
app.run(host='0.0.0.0', port=port, threaded=False, processes=1)
|
[
"argparse.ArgumentParser",
"quart.websocket.args.get",
"collections.defaultdict",
"quart.Quart",
"quart.websocket.accept",
"blockchat.utils.storage.FirebaseBlockchatStorage",
"blockchat.types.blockchain.parse_node_addr",
"blockchat.types.blockchain.Blockchain",
"quart.request.args.get",
"logging.warning",
"quart_cors.cors",
"asyncio.sleep",
"blockchat.utils.encryption.encode_verify_key",
"blockchat.utils.storage.InMemoryBlockchatStorage",
"quart.request.get_json",
"os.getenv",
"quart.websocket.send",
"logging.basicConfig",
"quart.jsonify",
"logging.info",
"asyncio.Queue"
] |
[((603, 643), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'numeric_level'}), '(level=numeric_level)\n', (622, 643), False, 'import logging\n'), ((674, 689), 'quart.Quart', 'Quart', (['__name__'], {}), '(__name__)\n', (679, 689), False, 'from quart import Quart, jsonify, request, websocket\n'), ((696, 723), 'quart_cors.cors', 'cors', (['app'], {'allow_origin': '"""*"""'}), "(app, allow_origin='*')\n", (700, 723), False, 'from quart_cors import cors\n'), ((943, 971), 'os.getenv', 'os.getenv', (['"""NODE_ADDR"""', 'None'], {}), "('NODE_ADDR', None)\n", (952, 971), False, 'import os\n'), ((1011, 1036), 'blockchat.types.blockchain.parse_node_addr', 'parse_node_addr', (['node_url'], {}), '(node_url)\n', (1026, 1036), False, 'from blockchat.types.blockchain import parse_node_addr\n'), ((1055, 1107), 'blockchat.utils.encryption.encode_verify_key', 'encryption.encode_verify_key', (['node_secret.verify_key'], {}), '(node_secret.verify_key)\n', (1083, 1107), False, 'from blockchat.utils import encryption\n'), ((1451, 1488), 'blockchat.types.blockchain.Blockchain', 'Blockchain', (['db', 'node_url', 'node_secret'], {}), '(db, node_url, node_secret)\n', (1461, 1488), False, 'from blockchat.types.blockchain import Blockchain, BlockchatJSONEncoder, BlockchatJSONDecoder\n'), ((1505, 1521), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (1516, 1521), False, 'from collections import defaultdict\n'), ((1538, 1554), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (1549, 1554), False, 'from collections import defaultdict\n'), ((447, 480), 'os.getenv', 'os.getenv', (['"""LOG_LEVEL"""', '"""WARNING"""'], {}), "('LOG_LEVEL', 'WARNING')\n", (456, 480), False, 'import os\n'), ((1214, 1248), 'blockchat.utils.storage.FirebaseBlockchatStorage', 'storage.FirebaseBlockchatStorage', ([], {}), '()\n', (1246, 1248), True, 'import blockchat.utils.storage as storage\n'), ((1253, 1302), 'logging.warning', 'logging.warning', (['"""Using Firebase storage backend"""'], {}), "('Using Firebase storage backend')\n", (1268, 1302), False, 'import logging\n'), ((1318, 1352), 'blockchat.utils.storage.InMemoryBlockchatStorage', 'storage.InMemoryBlockchatStorage', ([], {}), '()\n', (1350, 1352), True, 'import blockchat.utils.storage as storage\n'), ((1357, 1407), 'logging.warning', 'logging.warning', (['"""Using in-memory storage backend"""'], {}), "('Using in-memory storage backend')\n", (1372, 1407), False, 'import logging\n'), ((1728, 1753), 'quart.websocket.args.get', 'websocket.args.get', (['"""tag"""'], {}), "('tag')\n", (1746, 1753), False, 'from quart import Quart, jsonify, request, websocket\n'), ((1766, 1781), 'asyncio.Queue', 'asyncio.Queue', ([], {}), '()\n', (1779, 1781), False, 'import asyncio\n'), ((2482, 2510), 'quart.websocket.args.get', 'websocket.args.get', (['"""sender"""'], {}), "('sender')\n", (2500, 2510), False, 'from quart import Quart, jsonify, request, websocket\n'), ((2523, 2538), 'asyncio.Queue', 'asyncio.Queue', ([], {}), '()\n', (2536, 2538), False, 'import asyncio\n'), ((2580, 2624), 'logging.info', 'logging.info', (['"""Monitoring sender %s"""', 'sender'], {}), "('Monitoring sender %s', sender)\n", (2592, 2624), False, 'import logging\n'), ((2989, 3015), 'logging.info', 'logging.info', (['"""Mining now"""'], {}), "('Mining now')\n", (3001, 3015), False, 'import logging\n'), ((4127, 4148), 'logging.info', 'logging.info', (['"""Mined"""'], {}), "('Mined')\n", (4139, 4148), False, 'import logging\n'), ((6228, 6251), 'quart.request.args.get', 'request.args.get', (['"""tag"""'], {}), "('tag')\n", (6244, 6251), False, 'from quart import Quart, jsonify, request, websocket\n'), ((6562, 6585), 'quart.request.args.get', 'request.args.get', (['"""tag"""'], {}), "('tag')\n", (6578, 6585), False, 'from quart import Quart, jsonify, request, websocket\n'), ((8619, 8635), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (8633, 8635), False, 'from argparse import ArgumentParser\n'), ((908, 929), 'os.getenv', 'os.getenv', (['"""NODE_KEY"""'], {}), "('NODE_KEY')\n", (917, 929), False, 'import os\n'), ((1127, 1162), 'os.getenv', 'os.getenv', (['"""STORAGE_TYPE"""', '"""memory"""'], {}), "('STORAGE_TYPE', 'memory')\n", (1136, 1162), False, 'import os\n'), ((1825, 1843), 'quart.websocket.accept', 'websocket.accept', ([], {}), '()\n', (1841, 1843), False, 'from quart import Quart, jsonify, request, websocket\n'), ((2635, 2653), 'quart.websocket.accept', 'websocket.accept', ([], {}), '()\n', (2651, 2653), False, 'from quart import Quart, jsonify, request, websocket\n'), ((4554, 4571), 'quart.jsonify', 'jsonify', (['response'], {}), '(response)\n', (4561, 4571), False, 'from quart import Quart, jsonify, request, websocket\n'), ((5020, 5037), 'quart.jsonify', 'jsonify', (['response'], {}), '(response)\n', (5027, 5037), False, 'from quart import Quart, jsonify, request, websocket\n'), ((5142, 5160), 'quart.request.get_json', 'request.get_json', ([], {}), '()\n', (5158, 5160), False, 'from quart import Quart, jsonify, request, websocket\n'), ((6009, 6026), 'quart.jsonify', 'jsonify', (['response'], {}), '(response)\n', (6016, 6026), False, 'from quart import Quart, jsonify, request, websocket\n'), ((6327, 6364), 'quart.jsonify', 'jsonify', (["{'unconfirmed': unconfirmed}"], {}), "({'unconfirmed': unconfirmed})\n", (6334, 6364), False, 'from quart import Quart, jsonify, request, websocket\n'), ((6657, 6690), 'quart.jsonify', 'jsonify', (["{'confirmed': confirmed}"], {}), "({'confirmed': confirmed})\n", (6664, 6690), False, 'from quart import Quart, jsonify, request, websocket\n'), ((6890, 6907), 'quart.jsonify', 'jsonify', (['response'], {}), '(response)\n', (6897, 6907), False, 'from quart import Quart, jsonify, request, websocket\n'), ((7054, 7071), 'quart.jsonify', 'jsonify', (['response'], {}), '(response)\n', (7061, 7071), False, 'from quart import Quart, jsonify, request, websocket\n'), ((7163, 7181), 'quart.request.get_json', 'request.get_json', ([], {}), '()\n', (7179, 7181), False, 'from quart import Quart, jsonify, request, websocket\n'), ((7528, 7546), 'quart.request.get_json', 'request.get_json', ([], {}), '()\n', (7544, 7546), False, 'from quart import Quart, jsonify, request, websocket\n'), ((7948, 7965), 'quart.jsonify', 'jsonify', (['response'], {}), '(response)\n', (7955, 7965), False, 'from quart import Quart, jsonify, request, websocket\n'), ((8290, 8307), 'quart.jsonify', 'jsonify', (['response'], {}), '(response)\n', (8297, 8307), False, 'from quart import Quart, jsonify, request, websocket\n'), ((579, 601), 'os.getenv', 'os.getenv', (['"""LOG_LEVEL"""'], {}), "('LOG_LEVEL')\n", (588, 601), False, 'import os\n'), ((1912, 1933), 'quart.websocket.send', 'websocket.send', (['"""unc"""'], {}), "('unc')\n", (1926, 1933), False, 'from quart import Quart, jsonify, request, websocket\n'), ((4750, 4778), 'quart.request.args.get', 'request.args.get', (['"""user_key"""'], {}), "('user_key')\n", (4766, 4778), False, 'from quart import Quart, jsonify, request, websocket\n'), ((7326, 7374), 'quart.jsonify', 'jsonify', (["{'message': 'Block added successfully'}"], {}), "({'message': 'Block added successfully'})\n", (7333, 7374), False, 'from quart import Quart, jsonify, request, websocket\n'), ((8490, 8507), 'asyncio.sleep', 'asyncio.sleep', (['(10)'], {}), '(10)\n', (8503, 8507), False, 'import asyncio\n'), ((2002, 2025), 'quart.websocket.send', 'websocket.send', (['"""mined"""'], {}), "('mined')\n", (2016, 2025), False, 'from quart import Quart, jsonify, request, websocket\n'), ((2110, 2130), 'quart.websocket.send', 'websocket.send', (['data'], {}), '(data)\n', (2124, 2130), False, 'from quart import Quart, jsonify, request, websocket\n'), ((2738, 2758), 'quart.websocket.send', 'websocket.send', (['data'], {}), '(data)\n', (2752, 2758), False, 'from quart import Quart, jsonify, request, websocket\n')]
|
from reinforcement.agents.td_agent import TDAgent
from reinforcement.models.q_regression_model import QRegressionModel
from reinforcement.policies.e_greedy_policies import NormalEpsilonGreedyPolicy
from reinforcement.reward_functions.q_neuronal import QNeuronal
from unityagents import UnityEnvironment
import tensorflow as tf
from unity_session import UnitySession
UNITY_BINARY = "../environment-builds/RollerBall/RollerBall.exe"
TRAIN_MODE = True
MEMORY_SIZE = 10
LEARNING_RATE = 0.01
ALPHA = 0.2
GAMMA = 0.9
N = 10
START_EPS = 1
TOTAL_EPISODES = 1000
if __name__ == '__main__':
with UnityEnvironment(file_name=UNITY_BINARY) as env, tf.Session():
default_brain = env.brain_names[0]
model = QRegressionModel(4, [100], LEARNING_RATE)
Q = QNeuronal(model, MEMORY_SIZE)
episode = 0
policy = NormalEpsilonGreedyPolicy(lambda: START_EPS / (episode + 1))
agent = TDAgent(policy, Q, N, GAMMA, ALPHA)
sess = UnitySession(env, agent, brain=default_brain, train_mode=TRAIN_MODE)
for e in range(TOTAL_EPISODES):
episode = e
sess.run()
print("Episode {} finished.".format(episode))
|
[
"reinforcement.policies.e_greedy_policies.NormalEpsilonGreedyPolicy",
"tensorflow.Session",
"reinforcement.models.q_regression_model.QRegressionModel",
"reinforcement.agents.td_agent.TDAgent",
"unity_session.UnitySession",
"reinforcement.reward_functions.q_neuronal.QNeuronal",
"unityagents.UnityEnvironment"
] |
[((594, 634), 'unityagents.UnityEnvironment', 'UnityEnvironment', ([], {'file_name': 'UNITY_BINARY'}), '(file_name=UNITY_BINARY)\n', (610, 634), False, 'from unityagents import UnityEnvironment\n'), ((643, 655), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (653, 655), True, 'import tensorflow as tf\n'), ((716, 757), 'reinforcement.models.q_regression_model.QRegressionModel', 'QRegressionModel', (['(4)', '[100]', 'LEARNING_RATE'], {}), '(4, [100], LEARNING_RATE)\n', (732, 757), False, 'from reinforcement.models.q_regression_model import QRegressionModel\n'), ((770, 799), 'reinforcement.reward_functions.q_neuronal.QNeuronal', 'QNeuronal', (['model', 'MEMORY_SIZE'], {}), '(model, MEMORY_SIZE)\n', (779, 799), False, 'from reinforcement.reward_functions.q_neuronal import QNeuronal\n'), ((837, 898), 'reinforcement.policies.e_greedy_policies.NormalEpsilonGreedyPolicy', 'NormalEpsilonGreedyPolicy', (['(lambda : START_EPS / (episode + 1))'], {}), '(lambda : START_EPS / (episode + 1))\n', (862, 898), False, 'from reinforcement.policies.e_greedy_policies import NormalEpsilonGreedyPolicy\n'), ((914, 949), 'reinforcement.agents.td_agent.TDAgent', 'TDAgent', (['policy', 'Q', 'N', 'GAMMA', 'ALPHA'], {}), '(policy, Q, N, GAMMA, ALPHA)\n', (921, 949), False, 'from reinforcement.agents.td_agent import TDAgent\n'), ((965, 1033), 'unity_session.UnitySession', 'UnitySession', (['env', 'agent'], {'brain': 'default_brain', 'train_mode': 'TRAIN_MODE'}), '(env, agent, brain=default_brain, train_mode=TRAIN_MODE)\n', (977, 1033), False, 'from unity_session import UnitySession\n')]
|
import torch
import torch.nn as nn
from .layer import *
##### U^2-Net ####
class U2NET(nn.Module):
'''
详细见U2Net论文(md中有链接)
'''
def __init__(self, in_channels=1, out_channels=3):
super(U2NET, self).__init__()
self.stage1 = RSU7(in_channels, 32, 64)
self.pool12 = nn.MaxPool2d(2, stride=2, ceil_mode=True)
self.stage2 = RSU6(64, 32, 128)
self.pool23 = nn.MaxPool2d(2, stride=2, ceil_mode=True)
self.stage3 = RSU5(128, 64, 256)
self.pool34 = nn.MaxPool2d(2, stride=2, ceil_mode=True)
self.stage4 = RSU4(256, 128, 512)
self.pool45 = nn.MaxPool2d(2, stride=2, ceil_mode=True)
self.stage5 = RSU4F(512, 256, 512)
self.pool56 = nn.MaxPool2d(2, stride=2, ceil_mode=True)
self.stage6 = RSU4F(512, 256, 512)
# decoder
self.stage5d = RSU4F(1024, 256, 512)
self.stage4d = RSU4(1024, 128, 256)
self.stage3d = RSU5(512, 64, 128)
self.stage2d = RSU6(256, 32, 64)
self.stage1d = RSU7(128, 16, 64)
self.side1 = nn.Conv2d(64, out_channels, 3, padding=1)
self.side2 = nn.Conv2d(64, out_channels, 3, padding=1)
self.side3 = nn.Conv2d(128, out_channels, 3, padding=1)
self.side4 = nn.Conv2d(256, out_channels, 3, padding=1)
self.side5 = nn.Conv2d(512, out_channels, 3, padding=1)
self.side6 = nn.Conv2d(512, out_channels, 3, padding=1)
self.outconv = nn.Conv2d(6*out_channels, out_channels, 1)
def forward(self, x):
hx = x
# stage 1
hx1 = self.stage1(hx)
hx = self.pool12(hx1)
# stage 2
hx2 = self.stage2(hx)
hx = self.pool23(hx2)
# stage 3
hx3 = self.stage3(hx)
hx = self.pool34(hx3)
# stage 4
hx4 = self.stage4(hx)
hx = self.pool45(hx4)
# stage 5
hx5 = self.stage5(hx)
hx = self.pool56(hx5)
# stage 6
hx6 = self.stage6(hx)
hx6up = upsample_like(hx6, hx5)
# -------------------- decoder --------------------
hx5d = self.stage5d(torch.cat((hx6up, hx5), 1))
hx5dup = upsample_like(hx5d, hx4)
hx4d = self.stage4d(torch.cat((hx5dup, hx4), 1))
hx4dup = upsample_like(hx4d, hx3)
hx3d = self.stage3d(torch.cat((hx4dup, hx3), 1))
hx3dup = upsample_like(hx3d, hx2)
hx2d = self.stage2d(torch.cat((hx3dup, hx2), 1))
hx2dup = upsample_like(hx2d, hx1)
hx1d = self.stage1d(torch.cat((hx2dup, hx1), 1))
# side output
d1 = self.side1(hx1d)
d2 = self.side2(hx2d)
d2 = upsample_like(d2, d1)
d3 = self.side3(hx3d)
d3 = upsample_like(d3, d1)
d4 = self.side4(hx4d)
d4 = upsample_like(d4, d1)
d5 = self.side5(hx5d)
d5 = upsample_like(d5, d1)
d6 = self.side6(hx6)
d6 = upsample_like(d6, d1)
d0 = self.outconv(torch.cat((d1, d2, d3, d4, d5, d6), 1))
return d0, d1, d2, d3, d4, d5, d6
### U^2-Net small ###
class U2NETP(nn.Module):
def __init__(self, in_channels=1, out_channels=3):
super(U2NETP, self).__init__()
self.stage1 = RSU7(in_channels, 16, 64)
self.pool12 = nn.MaxPool2d(2, stride=2, ceil_mode=True)
self.stage2 = RSU6(64, 16, 64)
self.pool23 = nn.MaxPool2d(2, stride=2, ceil_mode=True)
self.stage3 = RSU5(64, 16, 64)
self.pool34 = nn.MaxPool2d(2, stride=2, ceil_mode=True)
self.stage4 = RSU4(64, 16, 64)
self.pool45 = nn.MaxPool2d(2, stride=2, ceil_mode=True)
self.stage5 = RSU4F(64, 16, 64)
self.pool56 = nn.MaxPool2d(2, stride=2, ceil_mode=True)
self.stage6 = RSU4F(64, 16, 64)
# decoder
self.stage5d = RSU4F(128, 16, 64)
self.stage4d = RSU4(128, 16, 64)
self.stage3d = RSU5(128, 16, 64)
self.stage2d = RSU6(128, 16, 64)
self.stage1d = RSU7(128, 16, 64)
self.side1 = nn.Conv2d(64, out_channels, 3, padding=1)
self.side2 = nn.Conv2d(64, out_channels, 3, padding=1)
self.side3 = nn.Conv2d(64, out_channels, 3, padding=1)
self.side4 = nn.Conv2d(64, out_channels, 3, padding=1)
self.side5 = nn.Conv2d(64, out_channels, 3, padding=1)
self.side6 = nn.Conv2d(64, out_channels, 3, padding=1)
self.outconv = nn.Conv2d(6*out_channels, out_channels, 1)
def forward(self, x):
hx = x
# stage 1
hx1 = self.stage1(hx)
hx = self.pool12(hx1)
# stage 2
hx2 = self.stage2(hx)
hx = self.pool23(hx2)
# stage 3
hx3 = self.stage3(hx)
hx = self.pool34(hx3)
# stage 4
hx4 = self.stage4(hx)
hx = self.pool45(hx4)
# stage 5
hx5 = self.stage5(hx)
hx = self.pool56(hx5)
# stage 6
hx6 = self.stage6(hx)
hx6up = upsample_like(hx6, hx5)
# decoder
hx5d = self.stage5d(torch.cat((hx6up, hx5), 1))
hx5dup = upsample_like(hx5d, hx4)
hx4d = self.stage4d(torch.cat((hx5dup, hx4), 1))
hx4dup = upsample_like(hx4d, hx3)
hx3d = self.stage3d(torch.cat((hx4dup, hx3), 1))
hx3dup = upsample_like(hx3d, hx2)
hx2d = self.stage2d(torch.cat((hx3dup, hx2), 1))
hx2dup = upsample_like(hx2d, hx1)
hx1d = self.stage1d(torch.cat((hx2dup, hx1), 1))
# side output
d1 = self.side1(hx1d)
d2 = self.side2(hx2d)
d2 = upsample_like(d2, d1)
d3 = self.side3(hx3d)
d3 = upsample_like(d3, d1)
d4 = self.side4(hx4d)
d4 = upsample_like(d4, d1)
d5 = self.side5(hx5d)
d5 = upsample_like(d5, d1)
d6 = self.side6(hx6)
d6 = upsample_like(d6, d1)
d0 = self.outconv(torch.cat((d1, d2, d3, d4, d5, d6), 1))
return d0, d1, d2, d3, d4, d5, d6
|
[
"torch.nn.MaxPool2d",
"torch.nn.Conv2d",
"torch.cat"
] |
[((305, 346), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)', 'ceil_mode': '(True)'}), '(2, stride=2, ceil_mode=True)\n', (317, 346), True, 'import torch.nn as nn\n'), ((410, 451), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)', 'ceil_mode': '(True)'}), '(2, stride=2, ceil_mode=True)\n', (422, 451), True, 'import torch.nn as nn\n'), ((516, 557), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)', 'ceil_mode': '(True)'}), '(2, stride=2, ceil_mode=True)\n', (528, 557), True, 'import torch.nn as nn\n'), ((623, 664), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)', 'ceil_mode': '(True)'}), '(2, stride=2, ceil_mode=True)\n', (635, 664), True, 'import torch.nn as nn\n'), ((731, 772), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)', 'ceil_mode': '(True)'}), '(2, stride=2, ceil_mode=True)\n', (743, 772), True, 'import torch.nn as nn\n'), ((1071, 1112), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', 'out_channels', '(3)'], {'padding': '(1)'}), '(64, out_channels, 3, padding=1)\n', (1080, 1112), True, 'import torch.nn as nn\n'), ((1134, 1175), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', 'out_channels', '(3)'], {'padding': '(1)'}), '(64, out_channels, 3, padding=1)\n', (1143, 1175), True, 'import torch.nn as nn\n'), ((1197, 1239), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128)', 'out_channels', '(3)'], {'padding': '(1)'}), '(128, out_channels, 3, padding=1)\n', (1206, 1239), True, 'import torch.nn as nn\n'), ((1261, 1303), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', 'out_channels', '(3)'], {'padding': '(1)'}), '(256, out_channels, 3, padding=1)\n', (1270, 1303), True, 'import torch.nn as nn\n'), ((1325, 1367), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512)', 'out_channels', '(3)'], {'padding': '(1)'}), '(512, out_channels, 3, padding=1)\n', (1334, 1367), True, 'import torch.nn as nn\n'), ((1389, 1431), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512)', 'out_channels', '(3)'], {'padding': '(1)'}), '(512, out_channels, 3, padding=1)\n', (1398, 1431), True, 'import torch.nn as nn\n'), ((1456, 1500), 'torch.nn.Conv2d', 'nn.Conv2d', (['(6 * out_channels)', 'out_channels', '(1)'], {}), '(6 * out_channels, out_channels, 1)\n', (1465, 1500), True, 'import torch.nn as nn\n'), ((3251, 3292), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)', 'ceil_mode': '(True)'}), '(2, stride=2, ceil_mode=True)\n', (3263, 3292), True, 'import torch.nn as nn\n'), ((3355, 3396), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)', 'ceil_mode': '(True)'}), '(2, stride=2, ceil_mode=True)\n', (3367, 3396), True, 'import torch.nn as nn\n'), ((3459, 3500), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)', 'ceil_mode': '(True)'}), '(2, stride=2, ceil_mode=True)\n', (3471, 3500), True, 'import torch.nn as nn\n'), ((3563, 3604), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)', 'ceil_mode': '(True)'}), '(2, stride=2, ceil_mode=True)\n', (3575, 3604), True, 'import torch.nn as nn\n'), ((3668, 3709), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)', 'ceil_mode': '(True)'}), '(2, stride=2, ceil_mode=True)\n', (3680, 3709), True, 'import torch.nn as nn\n'), ((3998, 4039), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', 'out_channels', '(3)'], {'padding': '(1)'}), '(64, out_channels, 3, padding=1)\n', (4007, 4039), True, 'import torch.nn as nn\n'), ((4061, 4102), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', 'out_channels', '(3)'], {'padding': '(1)'}), '(64, out_channels, 3, padding=1)\n', (4070, 4102), True, 'import torch.nn as nn\n'), ((4124, 4165), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', 'out_channels', '(3)'], {'padding': '(1)'}), '(64, out_channels, 3, padding=1)\n', (4133, 4165), True, 'import torch.nn as nn\n'), ((4187, 4228), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', 'out_channels', '(3)'], {'padding': '(1)'}), '(64, out_channels, 3, padding=1)\n', (4196, 4228), True, 'import torch.nn as nn\n'), ((4250, 4291), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', 'out_channels', '(3)'], {'padding': '(1)'}), '(64, out_channels, 3, padding=1)\n', (4259, 4291), True, 'import torch.nn as nn\n'), ((4313, 4354), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', 'out_channels', '(3)'], {'padding': '(1)'}), '(64, out_channels, 3, padding=1)\n', (4322, 4354), True, 'import torch.nn as nn\n'), ((4379, 4423), 'torch.nn.Conv2d', 'nn.Conv2d', (['(6 * out_channels)', 'out_channels', '(1)'], {}), '(6 * out_channels, out_channels, 1)\n', (4388, 4423), True, 'import torch.nn as nn\n'), ((2115, 2141), 'torch.cat', 'torch.cat', (['(hx6up, hx5)', '(1)'], {}), '((hx6up, hx5), 1)\n', (2124, 2141), False, 'import torch\n'), ((2214, 2241), 'torch.cat', 'torch.cat', (['(hx5dup, hx4)', '(1)'], {}), '((hx5dup, hx4), 1)\n', (2223, 2241), False, 'import torch\n'), ((2314, 2341), 'torch.cat', 'torch.cat', (['(hx4dup, hx3)', '(1)'], {}), '((hx4dup, hx3), 1)\n', (2323, 2341), False, 'import torch\n'), ((2414, 2441), 'torch.cat', 'torch.cat', (['(hx3dup, hx2)', '(1)'], {}), '((hx3dup, hx2), 1)\n', (2423, 2441), False, 'import torch\n'), ((2514, 2541), 'torch.cat', 'torch.cat', (['(hx2dup, hx1)', '(1)'], {}), '((hx2dup, hx1), 1)\n', (2523, 2541), False, 'import torch\n'), ((2952, 2990), 'torch.cat', 'torch.cat', (['(d1, d2, d3, d4, d5, d6)', '(1)'], {}), '((d1, d2, d3, d4, d5, d6), 1)\n', (2961, 2990), False, 'import torch\n'), ((4996, 5022), 'torch.cat', 'torch.cat', (['(hx6up, hx5)', '(1)'], {}), '((hx6up, hx5), 1)\n', (5005, 5022), False, 'import torch\n'), ((5095, 5122), 'torch.cat', 'torch.cat', (['(hx5dup, hx4)', '(1)'], {}), '((hx5dup, hx4), 1)\n', (5104, 5122), False, 'import torch\n'), ((5195, 5222), 'torch.cat', 'torch.cat', (['(hx4dup, hx3)', '(1)'], {}), '((hx4dup, hx3), 1)\n', (5204, 5222), False, 'import torch\n'), ((5295, 5322), 'torch.cat', 'torch.cat', (['(hx3dup, hx2)', '(1)'], {}), '((hx3dup, hx2), 1)\n', (5304, 5322), False, 'import torch\n'), ((5395, 5422), 'torch.cat', 'torch.cat', (['(hx2dup, hx1)', '(1)'], {}), '((hx2dup, hx1), 1)\n', (5404, 5422), False, 'import torch\n'), ((5833, 5871), 'torch.cat', 'torch.cat', (['(d1, d2, d3, d4, d5, d6)', '(1)'], {}), '((d1, d2, d3, d4, d5, d6), 1)\n', (5842, 5871), False, 'import torch\n')]
|
import os,sys
import datetime as dt
import numpy as np
try:
#for python 3.0 or later
from urllib.request import urlopen
except ImportError:
#Fall back to python 2 urllib2
from urllib2 import urlopen
import requests
from multiprocessing import Pool
import drms
from shutil import move
import glob
###Remove proxy server variables from Lockheed after using the proxy server to connect to the google calendar 2019/02/20 <NAME>
##os.environ.pop("http_proxy" )
##os.environ.pop("https_proxy")
class dark_times:
def __init__(self,time,
irisweb='http://iris.lmsal.com/health-safety/timeline/iris_tim_archive/{2}/IRIS_science_timeline_{0}.V{1:2d}.txt',
simpleb=False,complexa=False,tol=50):
"""
A python class used for finding and downloading IRIS dark observations. This module requires that parameters be specified in
a parameter file in this directory. The parameter file's name must be "parameter_file" and contain the three following lines:
Line1: email address registered with JSOC (e.g. <EMAIL>)
Line2: A base directory containing the level 1 IRIS dark files. The program will concatenate YYYY/MM/simpleb/ or YYYY/MM/complexa/ onto the base directory
Line3: A base directory containing the level 0 IRIS dark files. The program will concatenate simpleb/YYYY/MM/ or complexa/YYYY/MM/ onto the base directory
Example three lines below:
<EMAIL>
/data/alisdair/IRIS_LEVEL1_DARKS/
/data/alisdair/opabina/scratch/joan/iris/newdat/orbit/level0/
The program will create the level0 and level1 directories as needed.
Parameters
----------
time: str
A string containing the date the dark observations started based on the IRIS calibration-as-run calendar in YYYY/MM/DD format (e.g.
test = gdf.dark_times(time,simpleb=True))
irisweb: string, optional
A formatted text string which corresponds to the location of the IRIS timeline files
(Default = 'http://iris.lmsal.com/health-safety/timeline/iris_tim_archive/{2}/IRIS_science_timeline_{0}.V{1:2d}.txt').
The {0} character string corresponds the date of the timeline uploaded in YYYYMMDD format, while {1:2d}
corresponds to the highest number version of the timeline, which I assume is the timeline uploaded to the spacecraft.
simpleb: boolean, optional
Whether to download simpleb darks can only perform simpleb or complexa darks per call (Default = False).
complexa: boolean, optional
Whether to download complexa darks can only perform simpleb or complexa darks per call (Default = False).
tol: int, optional
The number of darks in a directory before the program decides to download. If greater than tolerance
than it will not download any new darks if less than tolerance then it will download the new darks (Default = 50).
Returns
-------
None
Just downloads files and creates required directories.
"""
#web page location of IRIS timeline
self.irisweb = irisweb #.replace('IRIS',time+'/IRIS')
self.otime = dt.datetime.strptime(time,'%Y/%m/%d')
self.stime = self.otime.strftime('%Y%m%d')
#Type of dark to download simple B or complex A
self.complexa = complexa
self.simpleb = simpleb
#Minimum number of dark files reqiured to run
self.tol = tol
#read lines in parameter file
parU = open('parameter_file','r')
pars = parU.readlines()
parU.close()
#update parameters based on new parameter file
#get email address
self.email = pars[0].strip()
#get level 1/download base directory (without simpleb or complexa subdirectory
bdir = pars[1].strip()
#get level 0 directory
ldir = pars[2].strip()
if complexa:
self.obsid = 'OBSID=4203400000'
if simpleb:
self.obsid = 'OBSID=4202000003'
#make the download directory
if self.simpleb:
self.bdir = bdir+'/{0}/simpleB/'.format(self.otime.strftime('%Y/%m'))
self.ldir = ldir+'/simpleB/{0}/'.format(self.otime.strftime('%Y/%m'))
else:
self.bdir = bdir+'/{0}/complexA/'.format(self.otime.strftime('%Y/%m'))
self.ldir = ldir+'/complexA/{0}/'.format(self.otime.strftime('%Y/%m'))
def request_files(self):
#First check that any time line exists for given day
searching = True
sb = 0 #searching backwards days to correct for weekend or multiday timelines
while searching:
#look in iris's timeline structure
self.stime = (self.otime-dt.timedelta(days=sb)).strftime('%Y%m%d')
irispath = (self.otime-dt.timedelta(days=sb)).strftime('%Y/%m/%d')
inurl = self.irisweb.format(self.stime,0,irispath).replace(' ','0') #searching for V00 file verision
resp = requests.head(inurl)
#leave loop if V00 is found
if resp.status_code == 200: searching =False
else: sb += 1 #look one day back if timeline is missing
if sb >= 9:
searching = False #dont look back more than 9 days
sys.stdout.write('FAILED, IRIS timeline does not exist')#printing this will cause the c-shell script to fail too
sys.exit(1) # exit the python script
check = True
v = 0 #timeline version
#get lastest timeline version
while check == True:
inurl = self.irisweb.format(self.stime, v,irispath).replace(' ','0')
resp = requests.head(inurl)
if resp.status_code != 200:
check = False
v+=-1
inurl = self.irisweb.format(self.stime, v,irispath).replace(' ','0')
else:
v+=1
#get the timeline file information for request timeline
res = urlopen(inurl)
self.res = res
#Need to add decode because python 3 is wonderful 2019/01/16 <NAME>
self.timeline = res.read().decode('utf-8')
def get_start_end(self):
#lines with OBSID=obsid
self.lines = []
for line in self.timeline.split('\n'):
if self.obsid in line:
self.lines.append(line)
#get the last set of OBSIDs (useful for eclipse season)
#Query from start to end time 2019/01/02 <NAME>
self.sta_dark = self.lines[0][3:20]
self.end_dark = self.lines[-1][3:20]
self.sta_dark_dt = self.create_dt_object(self.sta_dark)
self.end_dark_dt = self.create_dt_object(self.end_dark)
self.sta_dark_dt = self.sta_dark_dt-dt.timedelta(minutes=1)
self.end_dark_dt = self.end_dark_dt+dt.timedelta(minutes=1)
#create datetime objects using doy in timeline
def create_dt_object(self,dtobj):
splt = dtobj.split(':')
obj = dt.datetime(int(splt[0]),1,1,int(splt[2]),int(splt[3]))+dt.timedelta(days=int(splt[1])-1) #convert doy to datetime obj
return obj
#set up JSOC query for darks
def dark_query(self):
#use drms module to download from JSOC (https://pypi.python.org/pypi/drms)
client = drms.Client(email=self.email,verbose=False)
fmt = '%Y.%m.%d_%H:%M'
self.qstr = 'iris.lev1[{0}_TAI-{1}_TAI][][? IMG_TYPE ~ "DARK" ?]'.format(self.sta_dark_dt.strftime(fmt),self.end_dark_dt.strftime(fmt))
self.expt = client.export(self.qstr)
#setup string to pass write to sswidl for download
### fmt = '%Y-%m-%dT%H:%M:%S'
### self.response = client.query(jsoc.Time(self.sta_dark_dt.strftime(fmt),self.end_dark_dt.strftime(fmt)),jsoc.Series('iris.lev1'),
### jsoc.Notify('<EMAIL>'),jsoc.Segment('image'))
###
self.get_darks(client)
def get_darks(self,client):
#### import time
#### wait = True
####
#### request = client.request_data(self.response)
#### waittime = 60.*5. #five minute wait to check on data completion
#### time.sleep(waittime) #
####
#### while wait:
#### stat = client.check_request(request)
#### if stat == 1:
#### temp.sleep(waittime)
#### elif stat == 0:
#### wait = False
#### elif stat > 1:
#### break #jump out of loop if you get an error
# check to make sure directory does not exist
if not os.path.exists(self.bdir):
os.makedirs(self.bdir)
#also make level0 directory
if not os.path.exists(self.ldir):
os.makedirs(self.ldir)
#get number of records
try:
index = np.arange(np.size(self.expt.urls.url))
if index[-1] < self.tol: #make sure to have at least 50 darks in archive before downloading
sys.stdout.write("FAILED, LESS THAN {0:2d} DARKS IN ARCHIVE".format(self.tol))
sys.exit(1)
except: #exit nicely if no records exist
sys.stdout.write("FAILED, No JSOC record exists")
sys.exit(1)
#check to see if darks are already downloaded Added 2017/03/20
#make sure the downloaded files are on the same day added 2017/12/05 (<NAME>)
if len(glob.glob(self.bdir+'/iris.lev1.{0}*.fits'.format(self.otime.strftime('%Y-%m-%d')))) < self.tol:
#Dowloand the data using drms in par. (will fuss about mounted drive ocassionaly)
for ii in index: self.download_par(ii)
#DRMS DOES NOT WORK IN PARALELL
#### pool = Pool(processes=4)
#### outf = pool.map(self.download_par,index)
#### pool.close()
### self.expt.download(bdir,1,fname_from_rec=True)
#download the data
#### res = client.get_request(request,path=bdir,progress=True)
#### res.wait()
#
def download_par(self,index):
# get file from JSOC
outf = self.expt.download(self.bdir,index,fname_from_rec=True)
#format output file
fils = str(outf['download'].values[0])
fils = fils.split('/')[-1]
nout = fils[:14]+'-'+fils[14:16]+'-'+fils[16:24]+fils[26:]
#create new file name in same as previous format
if os.path.isfile(str(outf['download'].values[0])):
move(str(outf['download'].values[0]),self.bdir+nout)
#run to completion
def run_all(self):
self.request_files()
self.get_start_end()
self.dark_query()
|
[
"sys.stdout.write",
"numpy.size",
"requests.head",
"os.makedirs",
"os.path.exists",
"datetime.datetime.strptime",
"datetime.timedelta",
"drms.Client",
"urllib2.urlopen",
"sys.exit"
] |
[((3268, 3306), 'datetime.datetime.strptime', 'dt.datetime.strptime', (['time', '"""%Y/%m/%d"""'], {}), "(time, '%Y/%m/%d')\n", (3288, 3306), True, 'import datetime as dt\n'), ((6072, 6086), 'urllib2.urlopen', 'urlopen', (['inurl'], {}), '(inurl)\n', (6079, 6086), False, 'from urllib2 import urlopen\n'), ((7346, 7390), 'drms.Client', 'drms.Client', ([], {'email': 'self.email', 'verbose': '(False)'}), '(email=self.email, verbose=False)\n', (7357, 7390), False, 'import drms\n'), ((5089, 5109), 'requests.head', 'requests.head', (['inurl'], {}), '(inurl)\n', (5102, 5109), False, 'import requests\n'), ((5763, 5783), 'requests.head', 'requests.head', (['inurl'], {}), '(inurl)\n', (5776, 5783), False, 'import requests\n'), ((6824, 6847), 'datetime.timedelta', 'dt.timedelta', ([], {'minutes': '(1)'}), '(minutes=1)\n', (6836, 6847), True, 'import datetime as dt\n'), ((6892, 6915), 'datetime.timedelta', 'dt.timedelta', ([], {'minutes': '(1)'}), '(minutes=1)\n', (6904, 6915), True, 'import datetime as dt\n'), ((8611, 8636), 'os.path.exists', 'os.path.exists', (['self.bdir'], {}), '(self.bdir)\n', (8625, 8636), False, 'import os, sys\n'), ((8650, 8672), 'os.makedirs', 'os.makedirs', (['self.bdir'], {}), '(self.bdir)\n', (8661, 8672), False, 'import os, sys\n'), ((8724, 8749), 'os.path.exists', 'os.path.exists', (['self.ldir'], {}), '(self.ldir)\n', (8738, 8749), False, 'import os, sys\n'), ((8763, 8785), 'os.makedirs', 'os.makedirs', (['self.ldir'], {}), '(self.ldir)\n', (8774, 8785), False, 'import os, sys\n'), ((5371, 5427), 'sys.stdout.write', 'sys.stdout.write', (['"""FAILED, IRIS timeline does not exist"""'], {}), "('FAILED, IRIS timeline does not exist')\n", (5387, 5427), False, 'import os, sys\n'), ((5500, 5511), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5508, 5511), False, 'import os, sys\n'), ((8861, 8888), 'numpy.size', 'np.size', (['self.expt.urls.url'], {}), '(self.expt.urls.url)\n', (8868, 8888), True, 'import numpy as np\n'), ((9105, 9116), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (9113, 9116), False, 'import os, sys\n'), ((9181, 9230), 'sys.stdout.write', 'sys.stdout.write', (['"""FAILED, No JSOC record exists"""'], {}), "('FAILED, No JSOC record exists')\n", (9197, 9230), False, 'import os, sys\n'), ((9243, 9254), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (9251, 9254), False, 'import os, sys\n'), ((4836, 4857), 'datetime.timedelta', 'dt.timedelta', ([], {'days': 'sb'}), '(days=sb)\n', (4848, 4857), True, 'import datetime as dt\n'), ((4913, 4934), 'datetime.timedelta', 'dt.timedelta', ([], {'days': 'sb'}), '(days=sb)\n', (4925, 4934), True, 'import datetime as dt\n')]
|
from .regex_patterns import *
from bs4 import BeautifulSoup
import datetime
import re
def parse(response, option):
"""
Function to extract data from html schedule
:return: Parsed html in dictionary
"""
soup = BeautifulSoup(response.content, 'html.parser')
title_blue_original = soup.find("font", {"color": "#0000FF"}).text.strip()
if option != "classes" and option != "schedule":
size = "4"
else:
size = "5"
title_black_original = soup.find("font", {"size": size}).text.strip()
title_blue_stripped = "".join(title_blue_original.split())[:-1]
date = soup.find_all('font')[-1].get_text(strip=True)
schedule = []
rows = soup.find_all('table')[0].find_all('tr', recursive=False)[1:30:2]
if option != "schedule":
schedule.append(
{'title_blue': title_blue_stripped, 'title_black': title_black_original})
else:
rowspans = {}
for block, row in enumerate(rows, 1):
daycells = row.select('> td')[1:]
daynum, rowspan_offset = 0, 0
for daynum, daycell in enumerate(daycells, 1):
daynum += rowspan_offset
while rowspans.get(daynum, 0):
rowspan_offset += 1
rowspans[daynum] -= 1
daynum += 1
rowspan = (int(daycell.get('rowspan', default=2)) // 2) - 1
if rowspan:
rowspans[daynum] = rowspan
texts = daycell.find_all('font')
if texts:
info = (item.get_text(strip=True) for item in texts)
seperated_info = get_separated_cell_info(info)
time = convert_date(date, daynum)
timetable = convert_timetable(block, block + rowspan)
schedule.append({
'abbrevation': title_blue_stripped,
'title': title_black_original,
'start_begin': timetable[0],
'start_end': timetable[1],
'start_block': block,
'end_begin': timetable[2],
'end_end': timetable[3],
'end_block': block + rowspan,
'daynum': daynum,
'day': time[0],
'date_full': time[1],
'date_year': time[1][0:4],
'date_month': time[1][5:7],
'date_day': time[1][8:10],
'info': seperated_info
})
# print(schedule)
while daynum < 5:
daynum += 1
if rowspans.get(daynum, 0):
rowspans[daynum] -= 1
if not schedule:
schedule = {}
print("Page succesfully parsed")
return schedule
def convert_date(soup_date, daynum):
"""
Function to calculate day and date based on string and daynum
:param soup_date: string containing the date of schedule page
:param daynum: int of current day
:return: tuple with current day and current date
"""
days = {
1: "Maandag",
2: "Dinsdag",
3: "Woensdag",
4: "Donderdag",
5: "Vrijdag"
}
one_day, one_month, one_year = soup_date[0:2], soup_date[3:5], soup_date[6:10]
partials = [one_day, one_month, one_year]
items = [int(i) for i in partials]
d0 = datetime.date(year=items[2], month=items[1], day=items[0])
current_day = days[daynum]
current_date = d0 + datetime.timedelta(days=daynum - 1)
return current_day, str(current_date)
def convert_timetable(start, end):
"""
Function to convert rows to time
:param start: Starting row number
:param end: Ending row number
:return: Tuple with all correct starting and ending times
"""
timetable = {
1: ("8:30", "9:20"),
2: ("9:20", "10:10"),
3: ("10:30", "11:20"),
4: ("11:20", "12:10"),
5: ("12:10", "13:00"),
6: ("13:00", "13:50"),
7: ("13:50", "14:40"),
8: ("15:00", "15:50"),
9: ("15:50", "16:40"),
10: ("17:00", "17:50"),
11: ("17:50", "18:40"),
12: ("18:40", "19:30"),
13: ("19:30", "20:20"),
14: ("20:20", "21:10"),
15: ("21:10", "22:00"),
}
start_begin = timetable[start][0]
start_end = timetable[start][1]
end_begin = timetable[end][0]
end_end = timetable[end][1]
return start_begin, start_end, end_begin, end_end
def combine_dicts(parsed_items, parsed_counters):
"""
Function to combine parsed schedule data and quarter/week-info to a single dictionary
:param parsed_items: defaultdict with nested lists containing separated dicts with crawled data per schedule
:param parsed_counters: defaultdict with nested lists containing week and quarter per schedule
:return: clean dictionary
"""
print("Starting to build final dictionary")
result = {}
empty_schedules = 0
for l1 in parsed_items:
for option, (length, l2) in parsed_counters.items():
if len(l1) == length:
for item in zip(l1, l2):
schedule = bool(item[0])
if schedule:
quarter = item[1][0]
week = item[1][1]
result.setdefault(option, {})
result[option].setdefault(quarter, {})
result[option][quarter].setdefault(week, [])
result[option][quarter][week].append(item[0])
else:
empty_schedules += 1
print("Succesfully builded final dictionary")
print("{amount} schedules were empty.".format(amount=empty_schedules))
return result
def get_separated_cell_info(cell_info):
"""
Function to give each value in
:param cell_info: generator that behaves like an iterator. Cell_info can contain e.g. lecture, teacher code etc.
:return: category(key) of the reg_ex_dict and the matched value
"""
seperated_info = {}
for info in cell_info:
# data contains
# 1. a key from reg_ex_dict
# 2. the value of the result after executing regular expressions on info
data = get_category_and_result(info)
# Some cells only has one value for example Hemelvaartsdag. get_category_and_result won't return this value.
# Therefore, data is None then save the info.
if data is None:
seperated_info["event"] = info
# location needs to be splitted in building, floor and room
elif data[0] == "location":
dotSeperatedParts = data[1].split(".")
seperated_info["building"] = dotSeperatedParts[0]
seperated_info["floor"] = dotSeperatedParts[1]
seperated_info["room"] = dotSeperatedParts[2]
else:
seperated_info[data[0]] = data[1]
return seperated_info
def get_category_and_result(info):
"""
Function to get the category(key) and the matched value after executing a regular expression
:param info: info is a string
:return: category(key) of the reg_ex_dict and the matched value
"""
# catergory e.g. lecture
for category in reg_ex_dict:
# pattern e.g. pattern1
for pattern in reg_ex_dict[category]:
match = re.match(pattern, info)
if match:
return category, match.group()
|
[
"bs4.BeautifulSoup",
"datetime.date",
"re.match",
"datetime.timedelta"
] |
[((232, 278), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content', '"""html.parser"""'], {}), "(response.content, 'html.parser')\n", (245, 278), False, 'from bs4 import BeautifulSoup\n'), ((3508, 3566), 'datetime.date', 'datetime.date', ([], {'year': 'items[2]', 'month': 'items[1]', 'day': 'items[0]'}), '(year=items[2], month=items[1], day=items[0])\n', (3521, 3566), False, 'import datetime\n'), ((3623, 3658), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(daynum - 1)'}), '(days=daynum - 1)\n', (3641, 3658), False, 'import datetime\n'), ((7490, 7513), 're.match', 're.match', (['pattern', 'info'], {}), '(pattern, info)\n', (7498, 7513), False, 'import re\n')]
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""MobileNet v1.
Adapted from tf.keras.applications.mobilenet.MobileNet().
MobileNet is a general architecture and can be used for multiple use cases.
Depending on the use case, it can use different input layer size and different
head (for example: embeddings, localization and classification).
As described in https://arxiv.org/abs/1704.04861.
MobileNets: Efficient Convolutional Neural Networks for
Mobile Vision Applications
<NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
<NAME>, <NAME>, <NAME>
"""
import logging
import tensorflow as tf
from research.mobilenet import common_modules
from research.mobilenet.configs import archs
layers = tf.keras.layers
MobileNetV1Config = archs.MobileNetV1Config
def mobilenet_v1(config: MobileNetV1Config = MobileNetV1Config()
) -> tf.keras.models.Model:
"""Instantiates the MobileNet Model."""
model_name = config.name
input_shape = config.input_shape
img_input = layers.Input(shape=input_shape, name='Input')
# build network base
x = common_modules.mobilenet_base(img_input, config)
# build classification head
x = common_modules.mobilenet_head(x, config)
return tf.keras.models.Model(inputs=img_input,
outputs=x,
name=model_name)
if __name__ == '__main__':
logging.basicConfig(
format='%(asctime)-15s:%(levelname)s:%(module)s:%(message)s',
level=logging.INFO)
model = mobilenet_v1()
model.compile(
optimizer='adam',
loss=tf.keras.losses.categorical_crossentropy,
metrics=[tf.keras.metrics.categorical_crossentropy])
logging.info(model.summary())
|
[
"tensorflow.keras.models.Model",
"research.mobilenet.common_modules.mobilenet_base",
"logging.basicConfig",
"research.mobilenet.common_modules.mobilenet_head"
] |
[((1708, 1756), 'research.mobilenet.common_modules.mobilenet_base', 'common_modules.mobilenet_base', (['img_input', 'config'], {}), '(img_input, config)\n', (1737, 1756), False, 'from research.mobilenet import common_modules\n'), ((1794, 1834), 'research.mobilenet.common_modules.mobilenet_head', 'common_modules.mobilenet_head', (['x', 'config'], {}), '(x, config)\n', (1823, 1834), False, 'from research.mobilenet import common_modules\n'), ((1845, 1912), 'tensorflow.keras.models.Model', 'tf.keras.models.Model', ([], {'inputs': 'img_input', 'outputs': 'x', 'name': 'model_name'}), '(inputs=img_input, outputs=x, name=model_name)\n', (1866, 1912), True, 'import tensorflow as tf\n'), ((2006, 2112), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)-15s:%(levelname)s:%(module)s:%(message)s"""', 'level': 'logging.INFO'}), "(format=\n '%(asctime)-15s:%(levelname)s:%(module)s:%(message)s', level=logging.INFO)\n", (2025, 2112), False, 'import logging\n')]
|
from unittest.mock import MagicMock
import pytest
from seqal.stoppers import BudgetStopper, F1Stopper
class TestF1Stopper:
"""Test F1Stopper class"""
@pytest.mark.parametrize(
"micro,micro_score,macro,macro_score,expected",
[
(True, 16, False, 0, True),
(True, 14, False, 0, False),
(False, 0, True, 16, True),
(False, 0, True, 14, False),
],
)
def test_stop(
self,
micro: bool,
micro_score: int,
macro: bool,
macro_score: int,
expected: bool,
) -> None:
"""Test stop function"""
# Arrange
stopper = F1Stopper(goal=15)
classification_report = {
"micro avg": {"f1-score": micro_score},
"macro avg": {"f1-score": macro_score},
}
result = MagicMock(classification_report=classification_report)
# Act
decision = stopper.stop(result, micro=micro, macro=macro)
# Assert
assert decision == expected
class TestBudgetStopper:
"""Test BudgetStopper class"""
@pytest.mark.parametrize("unit_count,expected", [(10, False), (20, True)])
def test_stop(self, unit_count: int, expected: bool) -> None:
"""Test stop function"""
# Arrange
stopper = BudgetStopper(goal=15, unit_price=1)
# Act
decision = stopper.stop(unit_count)
# Assert
assert decision == expected
|
[
"seqal.stoppers.F1Stopper",
"pytest.mark.parametrize",
"seqal.stoppers.BudgetStopper",
"unittest.mock.MagicMock"
] |
[((164, 360), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""micro,micro_score,macro,macro_score,expected"""', '[(True, 16, False, 0, True), (True, 14, False, 0, False), (False, 0, True, \n 16, True), (False, 0, True, 14, False)]'], {}), "('micro,micro_score,macro,macro_score,expected', [(\n True, 16, False, 0, True), (True, 14, False, 0, False), (False, 0, True,\n 16, True), (False, 0, True, 14, False)])\n", (187, 360), False, 'import pytest\n'), ((1111, 1184), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""unit_count,expected"""', '[(10, False), (20, True)]'], {}), "('unit_count,expected', [(10, False), (20, True)])\n", (1134, 1184), False, 'import pytest\n'), ((669, 687), 'seqal.stoppers.F1Stopper', 'F1Stopper', ([], {'goal': '(15)'}), '(goal=15)\n', (678, 687), False, 'from seqal.stoppers import BudgetStopper, F1Stopper\n'), ((853, 907), 'unittest.mock.MagicMock', 'MagicMock', ([], {'classification_report': 'classification_report'}), '(classification_report=classification_report)\n', (862, 907), False, 'from unittest.mock import MagicMock\n'), ((1320, 1356), 'seqal.stoppers.BudgetStopper', 'BudgetStopper', ([], {'goal': '(15)', 'unit_price': '(1)'}), '(goal=15, unit_price=1)\n', (1333, 1356), False, 'from seqal.stoppers import BudgetStopper, F1Stopper\n')]
|
"""Candid Covariance-Free Incremental PCA (CCIPCA)."""
import numpy as np
from scipy import linalg
from sklearn.utils import check_array
from sklearn.utils.validation import FLOAT_DTYPES
from sklearn.base import BaseEstimator
from sklearn.preprocessing import normalize
import copy
class CCIPCA(BaseEstimator):
"""Candid Covariance-Free Incremental PCA (CCIPCA).
Parameters
----------
n_components : int or None, (default=None)
Number of components to keep. If ``n_components `` is ``None``,
then ``n_components`` is set to ``min(n_samples, n_features)``.
copy : bool, (default=True)
If False, X will be overwritten. ``copy=False`` can be used to
save memory but is unsafe for general use.
References
Candid Covariance-free Incremental Principal Component Analysis
"""
def __init__(self, n_components=10, amnesic=2, copy=True):
self.__name__ = 'Incremental Projection on Latent Space (IPLS)'
self.n_components = n_components
self.amnesic = amnesic
self.n = 0
self.copy = copy
self.x_rotations = None
self.sum_x = None
self.n_features = None
self.eign_values = None
self.x_mean = None
def normalize(self, x):
return normalize(x[:, np.newaxis], axis=0).ravel()
def fit(self, X, Y=None):
X = check_array(X, dtype=FLOAT_DTYPES, copy=self.copy)
n_samples, n_features = X.shape
if self.n == 0:
self.n_features = n_features
self.x_rotations = np.zeros((n_features, self.n_components))
self.eign_values = np.zeros((self.n_components))
self.incremental_mean = 1
for j in range(0, n_samples):
self.n = self.n + 1
u = X[j]
old_mean = (self.n-1)/self.n*self.incremental_mean
new_mean = 1/self.n*u
self.incremental_mean = old_mean+new_mean
if self.n == 1:
self.x_rotations[:, 0] = u
self.sum_x = u
else:
u = u - self.incremental_mean
self.sum_x = self.sum_x + u
k = min(self.n, self.n_components)
for i in range(1, k+1):
if i == self.n:
self.x_rotations[:, i - 1] = u
else:
w1, w2 = (self.n-1-self.amnesic)/self.n, (self.n+self.amnesic)/self.n
v_norm = self.normalize(self.x_rotations[:, i-1])
v_norm = np.expand_dims(v_norm, axis=1)
self.x_rotations[:, i - 1] = w1 * self.x_rotations[:, i - 1] + w2*u*np.dot(u.T, v_norm)[0]
v_norm = self.normalize(self.x_rotations[:, i-1])
v_norm = np.expand_dims(v_norm, axis=1)
u = u - (np.dot(u.T, v_norm)*v_norm)[:, 0]
return self
def transform(self, X, Y=None, copy=True):
"""Apply the dimension reduction learned on the train data."""
X = check_array(X, copy=copy, dtype=FLOAT_DTYPES)
X -= self.incremental_mean
w_rotation = np.zeros(self.x_rotations.shape)
for c in range(0, self.n_components):
w_rotation[:, c] = self.normalize(self.x_rotations[:, c])
return np.dot(X, w_rotation)
|
[
"sklearn.utils.check_array",
"numpy.zeros",
"numpy.expand_dims",
"sklearn.preprocessing.normalize",
"numpy.dot"
] |
[((1416, 1466), 'sklearn.utils.check_array', 'check_array', (['X'], {'dtype': 'FLOAT_DTYPES', 'copy': 'self.copy'}), '(X, dtype=FLOAT_DTYPES, copy=self.copy)\n', (1427, 1466), False, 'from sklearn.utils import check_array\n'), ((3110, 3155), 'sklearn.utils.check_array', 'check_array', (['X'], {'copy': 'copy', 'dtype': 'FLOAT_DTYPES'}), '(X, copy=copy, dtype=FLOAT_DTYPES)\n', (3121, 3155), False, 'from sklearn.utils import check_array\n'), ((3218, 3250), 'numpy.zeros', 'np.zeros', (['self.x_rotations.shape'], {}), '(self.x_rotations.shape)\n', (3226, 3250), True, 'import numpy as np\n'), ((3389, 3410), 'numpy.dot', 'np.dot', (['X', 'w_rotation'], {}), '(X, w_rotation)\n', (3395, 3410), True, 'import numpy as np\n'), ((1611, 1652), 'numpy.zeros', 'np.zeros', (['(n_features, self.n_components)'], {}), '((n_features, self.n_components))\n', (1619, 1652), True, 'import numpy as np\n'), ((1685, 1712), 'numpy.zeros', 'np.zeros', (['self.n_components'], {}), '(self.n_components)\n', (1693, 1712), True, 'import numpy as np\n'), ((1326, 1361), 'sklearn.preprocessing.normalize', 'normalize', (['x[:, np.newaxis]'], {'axis': '(0)'}), '(x[:, np.newaxis], axis=0)\n', (1335, 1361), False, 'from sklearn.preprocessing import normalize\n'), ((2613, 2643), 'numpy.expand_dims', 'np.expand_dims', (['v_norm'], {'axis': '(1)'}), '(v_norm, axis=1)\n', (2627, 2643), True, 'import numpy as np\n'), ((2859, 2889), 'numpy.expand_dims', 'np.expand_dims', (['v_norm'], {'axis': '(1)'}), '(v_norm, axis=1)\n', (2873, 2889), True, 'import numpy as np\n'), ((2733, 2752), 'numpy.dot', 'np.dot', (['u.T', 'v_norm'], {}), '(u.T, v_norm)\n', (2739, 2752), True, 'import numpy as np\n'), ((2920, 2939), 'numpy.dot', 'np.dot', (['u.T', 'v_norm'], {}), '(u.T, v_norm)\n', (2926, 2939), True, 'import numpy as np\n')]
|
import pymongo
"""
mongo数据库的增删改查
1.首先本地启动mongodb: docker-compose -f second_step/example/mongo.yml up
2.运行以下命令
python.exe .\second_step\s7.py
参考:https://www.runoob.com/python3/python-mongodb.html
"""
class Model:
def __init__(self):
client = pymongo.MongoClient("mongodb://localhost:27017")
self.db = client["fruit"]
self.table =self.db["fruit"]
def add(self,fruitDict):
self.table.insert_one(fruitDict)
def update(self,d1,d2):
self.table.update_one(d1,d2)
def delete(self,fruitDict):
self.table.delete_one(fruitDict)
def find(self,fruitDict):
fruit = self.table.find(fruitDict)
return list(fruit)
if __name__ == "__main__":
m = Model()
fruitDict= {"name":"apple","price":100}
m.add(fruitDict)
b=m.find({"name":"apple"})
print(b)
m.update({"name":"apple"},{"$set":{"price":80}})
m.delete({"name":"apple"})
|
[
"pymongo.MongoClient"
] |
[((256, 304), 'pymongo.MongoClient', 'pymongo.MongoClient', (['"""mongodb://localhost:27017"""'], {}), "('mongodb://localhost:27017')\n", (275, 304), False, 'import pymongo\n')]
|
from yaga_ga.evolutionary_algorithm.genes import IntGene, CharGene
from yaga_ga.evolutionary_algorithm.individuals import (
MixedIndividualStructure,
)
def test_initialization_with_tuple():
gene_1 = CharGene()
gene_2 = IntGene(lower_bound=1, upper_bound=1)
individual = MixedIndividualStructure((gene_1, gene_2))
assert len(individual) == 2
built = individual.build()
assert type(built[0]) == str
assert type(built[1]) == int
assert individual[0] == gene_1
assert individual[1] == gene_2
def test_progressive_initialization():
gene_1 = CharGene()
gene_2 = IntGene(lower_bound=1, upper_bound=1)
individual = MixedIndividualStructure(gene_1)
assert len(individual) == 1
built = individual.build()
assert len(built) == 1
assert type(built[0]) == str
individual_2 = individual.add_gene(gene_2)
assert len(individual_2) == 2
assert individual_2[0] == gene_1
assert individual_2[1] == gene_2
built2 = individual_2.build()
assert len(built2) == 2
assert type(built2[0]) == str
assert type(built2[1]) == int
|
[
"yaga_ga.evolutionary_algorithm.genes.CharGene",
"yaga_ga.evolutionary_algorithm.individuals.MixedIndividualStructure",
"yaga_ga.evolutionary_algorithm.genes.IntGene"
] |
[((209, 219), 'yaga_ga.evolutionary_algorithm.genes.CharGene', 'CharGene', ([], {}), '()\n', (217, 219), False, 'from yaga_ga.evolutionary_algorithm.genes import IntGene, CharGene\n'), ((233, 270), 'yaga_ga.evolutionary_algorithm.genes.IntGene', 'IntGene', ([], {'lower_bound': '(1)', 'upper_bound': '(1)'}), '(lower_bound=1, upper_bound=1)\n', (240, 270), False, 'from yaga_ga.evolutionary_algorithm.genes import IntGene, CharGene\n'), ((288, 330), 'yaga_ga.evolutionary_algorithm.individuals.MixedIndividualStructure', 'MixedIndividualStructure', (['(gene_1, gene_2)'], {}), '((gene_1, gene_2))\n', (312, 330), False, 'from yaga_ga.evolutionary_algorithm.individuals import MixedIndividualStructure\n'), ((584, 594), 'yaga_ga.evolutionary_algorithm.genes.CharGene', 'CharGene', ([], {}), '()\n', (592, 594), False, 'from yaga_ga.evolutionary_algorithm.genes import IntGene, CharGene\n'), ((608, 645), 'yaga_ga.evolutionary_algorithm.genes.IntGene', 'IntGene', ([], {'lower_bound': '(1)', 'upper_bound': '(1)'}), '(lower_bound=1, upper_bound=1)\n', (615, 645), False, 'from yaga_ga.evolutionary_algorithm.genes import IntGene, CharGene\n'), ((663, 695), 'yaga_ga.evolutionary_algorithm.individuals.MixedIndividualStructure', 'MixedIndividualStructure', (['gene_1'], {}), '(gene_1)\n', (687, 695), False, 'from yaga_ga.evolutionary_algorithm.individuals import MixedIndividualStructure\n')]
|
from PIL import Image
from PIL import ImageFont
from PIL import ImageDraw
import cv2,time,os
from moviepy.editor import *
from tkinter import filedialog as fd
def im_to_ascii(im:Image,width:int=640,keepAlpha:bool=True,highContrastMode:bool=False,fontResolution:int=5):
ratio:float = width/im.size[0]
im:Image = im.resize((int(im.size[0]*ratio),int(im.size[1]*ratio)),Image.NEAREST).convert("LA")
if highContrastMode: ramp:str = "@. .:-=+*#%@"
else : ramp:str = " .:-=+*#%@"
c:list[str] = []
for h in range(im.size[1]):
row:list[str] = []
for w in range(im.size[0]):
col:tuple = im.getpixel((w,h))
if keepAlpha and col[1]<=127: row.append(" ")
else: row.append(ramp[int((col[0]/255)*len(ramp))-1])
c.append(" ".join(row))
w:int = im.size[0] * fontResolution * 5
h:int = im.size[1] * fontResolution * 6
font:ImageFont = ImageFont.truetype("monogram.ttf", 7 * fontResolution)
img = Image.new("RGB",(w,h),(0,0,0))
ImageDraw.Draw(img).text(
(0, 0),
"\n".join(c),
(255,255,255),
font=font
)
return img
def videoFileToAscii(path:str,skip:bool=False):
if not skip:
def extractFrames(path:str)->tuple[int,int,int]:
print("Extracting Frames...")
starttime = time.time()
vidcap = cv2.VideoCapture(path)
success,image = vidcap.read()
count = 0
length = int(vidcap.get(cv2.CAP_PROP_FRAME_COUNT))
while success:
cv2.imwrite("frame/frame%d.png" % count, image)
success,image = vidcap.read()
count += 1
if time.time()-starttime>=2: print(int((count/length)*100),"%",sep="",end="\r");starttime=time.time()
return count,length,vidcap.get(cv2.CAP_PROP_FPS)
videoFrames, videoLength, videoFramerate = extractFrames(path)
videoTargetWidth = 120
videoTargetFramerate = 10
print("Converting Frames...")
for frame in range(0,videoFrames,int(videoFramerate/videoTargetFramerate)):
starttime = time.time()
with Image.open("frame/frame%d.png" % frame) as im:
im_to_ascii(im,videoTargetWidth,fontResolution=4).save("frame/frame%d.png" % frame)
if time.time()-starttime>=2: print(int((frame/videoFrames)*100),"%",sep="",end="\r");starttime=time.time()
else:
videoFrames = 359
videoFramerate = 30
videoTargetFramerate = 10
clip = ImageSequenceClip([f"frame/frame{frame}.png" for frame in range(0,videoFrames,int(videoFramerate/videoTargetFramerate))], fps = videoTargetFramerate)
clip.write_videofile(os.path.join(os.path.dirname(__file__),"output.mp4"))
if __name__ == "__main__":
path = fd.askopenfile(initialdir=os.path.dirname(__file__))
if True in [path.name.endswith(ext) for ext in [".mp4",".mkv",".avi",".mov"]]:
videoFileToAscii(path.name)
elif True in [path.name.endswith(ext) for ext in [".jpg",".jpeg",".png",".gif"]]:
with Image.open(path.name) as im:
i = im_to_ascii(im,width=516)
i.save("output.png")
i.show()
|
[
"PIL.Image.new",
"cv2.imwrite",
"os.path.dirname",
"time.time",
"PIL.ImageFont.truetype",
"cv2.VideoCapture",
"PIL.Image.open",
"PIL.ImageDraw.Draw"
] |
[((957, 1011), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['"""monogram.ttf"""', '(7 * fontResolution)'], {}), "('monogram.ttf', 7 * fontResolution)\n", (975, 1011), False, 'from PIL import ImageFont\n'), ((1025, 1060), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(w, h)', '(0, 0, 0)'], {}), "('RGB', (w, h), (0, 0, 0))\n", (1034, 1060), False, 'from PIL import Image\n'), ((1061, 1080), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['img'], {}), '(img)\n', (1075, 1080), False, 'from PIL import ImageDraw\n'), ((1396, 1407), 'time.time', 'time.time', ([], {}), '()\n', (1405, 1407), False, 'import cv2, time, os\n'), ((1430, 1452), 'cv2.VideoCapture', 'cv2.VideoCapture', (['path'], {}), '(path)\n', (1446, 1452), False, 'import cv2, time, os\n'), ((2222, 2233), 'time.time', 'time.time', ([], {}), '()\n', (2231, 2233), False, 'import cv2, time, os\n'), ((2869, 2894), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2884, 2894), False, 'import cv2, time, os\n'), ((2980, 3005), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2995, 3005), False, 'import cv2, time, os\n'), ((1626, 1673), 'cv2.imwrite', 'cv2.imwrite', (["('frame/frame%d.png' % count)", 'image'], {}), "('frame/frame%d.png' % count, image)\n", (1637, 1673), False, 'import cv2, time, os\n'), ((2252, 2291), 'PIL.Image.open', 'Image.open', (["('frame/frame%d.png' % frame)"], {}), "('frame/frame%d.png' % frame)\n", (2262, 2291), False, 'from PIL import Image\n'), ((2522, 2533), 'time.time', 'time.time', ([], {}), '()\n', (2531, 2533), False, 'import cv2, time, os\n'), ((3231, 3252), 'PIL.Image.open', 'Image.open', (['path.name'], {}), '(path.name)\n', (3241, 3252), False, 'from PIL import Image\n'), ((1850, 1861), 'time.time', 'time.time', ([], {}), '()\n', (1859, 1861), False, 'import cv2, time, os\n'), ((2430, 2441), 'time.time', 'time.time', ([], {}), '()\n', (2439, 2441), False, 'import cv2, time, os\n'), ((1763, 1774), 'time.time', 'time.time', ([], {}), '()\n', (1772, 1774), False, 'import cv2, time, os\n')]
|
# Find the slit. This function finds the location of the slit in the photograph of the spectrum
# The function takes a single line of the data and scans it to find the maximum value.
# If it finds a block of saturated pixels it finds the middle pixel to be the slit.
# The function returns the column number of the slit.
import math
def find_slit(data):
mx = 0
mxc = 0
startslit = 0
endslit = 0
for c,d in enumerate(data):
if d > mx:
mx = d
mxc = c
if startslit == 0 and d >= 255:
startslit = c
if endslit == 0 and startslit > 0 and d < 254:
endslit = c
break
# We found a slit of saturated values
if startslit > 0 and endslit > startslit:
return math.ceil(0.5 * (endslit - startslit) + startslit)
# Or just return the location of the biggest value found
else:
return mxc
# Reads in the data along with the grating pitch (g in lines/mm) and resolution in radians per pixel
def get_spectrum(data,g,res):
s = find_slit(data)
d2 = data[s::-1]
d = 0.001 / g # convert lines/mm into grating spacing in m
wvl = [ 1e9* d * math.sin(i * res) for i in range(len(d2))]
return (wvl,d2)
|
[
"math.sin",
"math.ceil"
] |
[((880, 930), 'math.ceil', 'math.ceil', (['(0.5 * (endslit - startslit) + startslit)'], {}), '(0.5 * (endslit - startslit) + startslit)\n', (889, 930), False, 'import math\n'), ((1317, 1334), 'math.sin', 'math.sin', (['(i * res)'], {}), '(i * res)\n', (1325, 1334), False, 'import math\n')]
|
import warnings
warnings.filterwarnings('ignore', category=UserWarning, append=True)
RAMS_Units=dict()
# winds
RAMS_Units['UC']='m s-1'
RAMS_Units['VC']='m s-1'
RAMS_Units['WC']='m s-1'
# potential temperature
RAMS_Units['THETA']='K'
RAMS_Units['PI']='J kg-1 K-1'
RAMS_Units['DN0']='kg m-3'
# water vapour mixing ratio:
RAMS_Units['RV']='kg kg-1'
# hydrometeor mass mixing ratios:
mass_mixing_ratios=['RCP','RDP','RRP','RPP','RSP','RAP','RGP','RHP']
for variable in mass_mixing_ratios:
RAMS_Units[variable]='kg kg-1'
# hydrometeor number mixing ratios:
mass_mixing_ratios=['CCP','CDP','CRP','CPP','CSP','CAP','CGP','CHP']
for variable in mass_mixing_ratios:
RAMS_Units[variable]='kg-1'
#hydrometeor precipitation rates:
precipitation_rates=['PCPRR','PCPRD','PCPRS','PCPRH','PCPRP','PCPRA','PCPRG']
for variable in precipitation_rates:
RAMS_Units[variable]='kg m-2'
# hydrometeor precipitation accumulated:
precipitation_accumulated=['ACCPR','ACCPD','ACCPS','ACCPH','ACCPP','ACCPA','ACCPG']
for variable in precipitation_accumulated:
RAMS_Units[variable]='kg m-2 s-1'
# radiation:
RAMS_Units['LWUP']='W m-2'
RAMS_Units['LWDN']='W m-2'
RAMS_Units['SWUP']='W m-2'
RAMS_Units['SWDN']='W m-2'
# individual microphysics processes accumulated
RAMS_processes_mass=[
'NUCCLDRT',
'NUCICERT',
'INUCHOMRT',
'INUCCONTR',
'INUCIFNRT',
'INUCHAZRT',
'VAPCLDT',
'VAPRAINT',
'VAPPRIST',
'VAPSNOWT',
'VAPAGGRT',
'VAPGRAUT',
'VAPHAILT',
'VAPDRIZT',
'MELTSNOWT',
'MELTAGGRT',
'MELTGRAUT',
'MELTHAILT',
'RIMECLDSNOWT',
'RIMECLDAGGRT',
'RIMECLDGRAUT',
'RIMECLDHAILT',
'RAIN2PRT',
'RAIN2SNT',
'RAIN2AGT',
'RAIN2GRT',
'RAIN2HAT',
'AGGRSELFPRIST',
'AGGRSELFSNOWT',
'AGGRPRISSNOWT'
]
for variable in RAMS_processes_mass:
RAMS_Units[variable]='kg kg-1'
# grouped microphysics processes accumulated:
RAMS_processes_mass_grouped=[
'VAPLIQT',
'VAPICET',
'MELTICET',
'CLD2RAINT',
'RIMECLDT',
'RAIN2ICET',
'ICE2RAINT',
'AGGREGATET'
]
for variable in RAMS_processes_mass_grouped:
RAMS_Units[variable]='kg kg-1'
# grouped microphysics processes instantaneous:
RAMS_processes_mass_grouped_instantaneous=[
'VAPLIQ',
'VAPICE',
'MELTICE',
'CLD2RAIN',
'RIMECLD',
'RAIN2ICE',
'ICE2RAIN',
'NUCCLDR',
'NUCICER'
]
for variable in RAMS_processes_mass_grouped_instantaneous:
RAMS_Units[variable]='kg kg-1 s-1'
RAMS_standard_name=dict()
variable_list_derive=[
'air_temperature',
'air_pressure',
'temperature',
'air_density',
'OLR',
'LWC',
'IWC',
'LWP',
'IWP',
'IWV',
'airmass',
'airmass_path',
'surface_precipitation',
'surface_precipitation_average',
'surface_precipitation_accumulated',
'surface_precipitation_instantaneous',
'LWup_TOA',
'LWup_sfc',
'LWdn_TOA',
'LWdn_sfc',
'SWup_TOA',
'SWup_sfc',
'SWdn_TOA',
'SWdn_sfc'
]
def variable_list(filenames):
from iris import load
cubelist=load(filenames[0])
variable_list = [cube.name() for cube in cubelist]
return variable_list
def load(filenames,variable,mode='auto',**kwargs):
if variable in variable_list_derive:
variable_cube=deriveramscube(filenames,variable,**kwargs)
else:
variable_cube=loadramscube(filenames,variable,**kwargs)
# if mode=='auto':
# variable_list_file=variable_list(filenames)
# if variable in variable_list_file:
# variable_cube=loadramscube(filenames,variable,**kwargs)
# elif variable in variable_list_derive:
# variable_cube=deriveramscube(filenames,variable,**kwargs)
# elif variable in variable_dict_pseudonym.keys():
# variable_load=variable_dict_pseudonym[variable]
# variable_cube=loadramscube(filenames,variable_load,**kwargs)
# else:
# raise SystemExit('variable not found')
# elif mode=='file':
# variable_list_file=variable_list(filenames)
# if variable in variable_list_file:
# variable_cube=loadramscube(filenames,variable,**kwargs)
# elif mode=='derive':
# variable_cube=deriveramscube(filenames,variable,**kwargs)
# elif mode=='pseudonym':
# variable_load=variable_dict_pseudonym[variable]
# variable_cube=loadramscube(filenames,variable_load,**kwargs)
# else:
# print("mode=",mode)
# raise SystemExit('unknown mode')
return variable_cube
def loadramscube(filenames,variable,**kwargs):
if type(filenames) is list:
variable_cube=loadramscube_mult(filenames,variable,**kwargs)
elif type(filenames) is str:
variable_cube=loadramscube_single(filenames,variable,**kwargs)
else:
print("filenames=",filenames)
raise SystemExit('Type of input unknown: Must be str of list')
return variable_cube
def loadramscube_single(filenames,variable,constraint=None,add_coordinates=None):
from iris import load_cube
variable_cube=load_cube(filenames,variable)
variable_cube.units=RAMS_Units[variable]
variable_cube=addcoordinates(filenames, variable,variable_cube,add_coordinates=add_coordinates)
return variable_cube
def loadramscube_mult(filenames,variable,constraint=None,add_coordinates=None):
from iris.cube import CubeList
cube_list=[]
for i in range(len(filenames)):
cube_list.append(loadramscube_single(filenames[i],variable,add_coordinates=add_coordinates) )
for member in cube_list:
member.attributes={}
variable_cubes=CubeList(cube_list)
variable_cube=variable_cubes.merge_cube()
variable_cube=variable_cube.extract(constraint)
return variable_cube
def readramsheader(filename):
from numpy import array
searchfile = open(filename, "r")
coord_dict=dict()
variable_dict=dict()
coord_part=False
i_variable=0
n_variable=0
for i,line in enumerate(searchfile):
if (i==0):
num_variables=int(line[:-1])
if (i>0 and i<=num_variables):
line_split=line[:-1].split()
variable_dict[line_split[0]]=int(line_split[2])
if ('__') in line:
coord_part=True
i_variable=i
variable_name=line[2:-1]
variable_list=[]
if coord_part:
if (i==i_variable+1):
n_variable=int(line[:-1])
if n_variable>0:
if (i>=i_variable+2 and i<=i_variable+1+n_variable):
try:
value_out=array(float(line[:-1]))
except:
value_out=line[:-1]
variable_list.append(value_out)
if (i==i_variable+1+n_variable):
coord_dict[variable_name]=array(variable_list)
coord_part=False
# else:
# coord_part=False
return variable_dict, coord_dict
def addcoordinates(filename, variable,variable_cube,**kwargs):
filename_header=filename[:-5]+'head.txt'
domain=filename[-4]
variable_dict, coord_dict=readramsheader(filename_header)
variable_cube=add_dim_coordinates(filename, variable,variable_cube,variable_dict, coord_dict,domain,**kwargs)
variable_cube=add_aux_coordinates(filename, variable,variable_cube,variable_dict, coord_dict,domain,**kwargs)
return variable_cube
def make_time_coord(coord_dict):
from datetime import datetime,timedelta
from iris import coords
timestr=str(int(coord_dict['iyear1'][0]))+str(int(coord_dict['imonth1'][0])).zfill(2)+str(int(coord_dict['idate1'][0])).zfill(2)+str(int(coord_dict['itime1'][0])).zfill(4)
timeobj = datetime.strptime(timestr,"%Y%m%d%H%M")+timedelta(seconds=1)*coord_dict['time'][0]
if timeobj<datetime(100,1,1):
base_date=datetime(1,1,1)
else:
base_date=datetime(1970,1,1)
time_units='days since '+ base_date.strftime('%Y-%m-%d')
time_days=(timeobj - base_date).total_seconds() / timedelta(days=1).total_seconds()
time_coord=coords.DimCoord(time_days, standard_name='time', long_name='time', var_name='time', units=time_units, bounds=None, attributes=None, coord_system=None, circular=False)
return time_coord
def make_model_level_number_coordinate(n_level):
from iris import coords
from numpy import arange
MODEL_LEVEL_NUMBER=arange(0,n_level)
model_level_number=coords.AuxCoord(MODEL_LEVEL_NUMBER, standard_name='model_level_number', units='1')
return model_level_number
def add_dim_coordinates(filename, variable,variable_cube,variable_dict, coord_dict,domain,add_coordinates=None):
from iris import coords
import numpy as np
# from iris import coord_systems
# coord_system=coord_systems.LambertConformal(central_lat=MOAD_CEN_LAT, central_lon=CEN_LON, false_easting=0.0, false_northing=0.0, secant_latitudes=(TRUELAT1, TRUELAT2))
coord_system=None
if (variable_dict[variable]==3):
time_coord=make_time_coord(coord_dict)
variable_cube.add_aux_coord(time_coord)
z_coord=coords.DimCoord(coord_dict['ztn01'], standard_name='geopotential_height', long_name='z', var_name='z', units='m', bounds=None, attributes=None, coord_system=coord_system)
variable_cube.add_dim_coord(z_coord,0)
model_level_number_coord=make_model_level_number_coordinate(len(z_coord.points))
variable_cube.add_aux_coord(model_level_number_coord,0)
x_coord=coords.DimCoord(np.arange(len(coord_dict['xtn0'+domain])), long_name='x', units='1', bounds=None, attributes=None, coord_system=coord_system)
variable_cube.add_dim_coord(x_coord,2)
y_coord=coords.DimCoord(np.arange(len(coord_dict['ytn0'+domain])), long_name='y', units='1', bounds=None, attributes=None, coord_system=coord_system)
variable_cube.add_dim_coord(y_coord,1)
projection_x_coord=coords.DimCoord(coord_dict['xtn0'+domain], standard_name='projection_x_coordinate', long_name='x', var_name='x', units='m', bounds=None, attributes=None, coord_system=coord_system)
variable_cube.add_aux_coord(projection_x_coord,(2))
projection_y_coord=coords.DimCoord(coord_dict['ytn0'+domain], standard_name='projection_y_coordinate', long_name='y', var_name='y', units='m', bounds=None, attributes=None, coord_system=coord_system)
variable_cube.add_aux_coord(projection_y_coord,(1))
elif (variable_dict[variable]==2):
x_coord=coords.DimCoord(np.arange(len(coord_dict['xtn0'+domain])), long_name='x', units='1', bounds=None, attributes=None, coord_system=coord_system)
variable_cube.add_dim_coord(x_coord,1)
y_coord=coords.DimCoord(np.arange(len(coord_dict['ytn0'+domain])), long_name='y', units='1', bounds=None, attributes=None, coord_system=coord_system)
variable_cube.add_dim_coord(y_coord,0)
projection_x_coord=coords.DimCoord(coord_dict['xtn0'+domain], standard_name='projection_x_coordinate', long_name='x', var_name='x', units='m', bounds=None, attributes=None, coord_system=coord_system)
variable_cube.add_aux_coord(projection_x_coord,(1))
projection_y_coord=coords.DimCoord(coord_dict['ytn0'+domain], standard_name='projection_y_coordinate', long_name='y', var_name='y', units='m', bounds=None, attributes=None, coord_system=coord_system)
variable_cube.add_aux_coord(projection_y_coord,(0))
time_coord=make_time_coord(coord_dict)
variable_cube.add_aux_coord(time_coord)
return variable_cube
def add_aux_coordinates(filename,variable,variable_cube,variable_dict, coord_dict,domain,**kwargs):
from iris import load_cube,coords
coord_system=None
latitude=load_cube(filename,'GLAT').core_data()
longitude=load_cube(filename,'GLON').core_data()
lat_coord=coords.AuxCoord(latitude, standard_name='latitude', long_name='latitude', var_name='latitude', units='degrees', bounds=None, attributes=None, coord_system=coord_system)
lon_coord=coords.AuxCoord(longitude, standard_name='longitude', long_name='longitude', var_name='longitude', units='degrees', bounds=None, attributes=None, coord_system=coord_system)
if (variable_dict[variable]==3):
variable_cube.add_aux_coord(lon_coord,(1,2))
variable_cube.add_aux_coord(lat_coord,(1,2))
elif (variable_dict[variable]==2):
variable_cube.add_aux_coord(lon_coord,(0,1))
variable_cube.add_aux_coord(lat_coord,(0,1))
# add_coordinates=kwargs.pop('add_coordinates')
# if type(add_coordinates)!=list:
# add_coordinates1=add_coordinates
# add_coordinates=[]
# add_coordinates.append(add_coordinates1)
# for coordinate in add_coordinates:
# if coordinate=='latlon':
# latitude=load_cube(filename,'GLAT').data
# longitude=load_cube(filename,'GLON').data
# lat_coord=coords.AuxCoord(latitude, standard_name='latitude', long_name='latitude', var_name='latitude', units='degrees', bounds=None, attributes=None, coord_system=coord_system)
# lon_coord=coords.AuxCoord(longitude, standard_name='longitude', long_name='longitude', var_name='longitude', units='degrees', bounds=None, attributes=None, coord_system=coord_system)
# if (variable_dict[variable]==3):
# variable_cube.add_aux_coord(lon_coord,(1,2))
# variable_cube.add_aux_coord(lat_coord,(1,2))
# elif (variable_dict[variable]==2):
# variable_cube.add_aux_coord(lon_coord,(0,1))
# variable_cube.add_aux_coord(lat_coord,(0,1))
return variable_cube
def calculate_rams_LWC(filenames,**kwargs):
RCP=loadramscube(filenames, 'RCP',**kwargs)
RDP=loadramscube(filenames, 'RDP',**kwargs)
RRP=loadramscube(filenames, 'RRP',**kwargs)
LWC=RCP+RDP+RRP
LWC.rename('liquid water content')
#LWC.rename('mass_concentration_of_liquid_water_in_air')
return LWC
#
def calculate_rams_IWC(filenames,**kwargs):
RPP=loadramscube(filenames, 'RPP',**kwargs)
RSP=loadramscube(filenames, 'RSP',**kwargs)
RAP=loadramscube(filenames, 'RAP',**kwargs)
RGP=loadramscube(filenames, 'RGP',**kwargs)
RHP=loadramscube(filenames, 'RHP',**kwargs)
IWC=RPP+RSP+RAP+RGP+RHP
IWC.rename('ice water content')
#IWC.rename('mass_concentration_of_ice_water_in_air')
return IWC
def calculate_rams_airmass(filenames,**kwargs):
from iris.coords import AuxCoord
from numpy import diff
rho=loadramscube(filenames,'DN0',**kwargs)
z=rho.coord('geopotential_height')
z_dim=rho.coord_dims('geopotential_height')
z_diff=AuxCoord(mydiff(z.points),var_name='z_diff')
rho.add_aux_coord(z_diff,data_dims=z_dim)
dx=diff(rho.coord('projection_x_coordinate').points[0:2])
dy=diff(rho.coord('projection_y_coordinate').points[0:2])
Airmass=rho*rho.coord('z_diff')*dx*dy
Airmass.remove_coord('z_diff')
Airmass.rename('mass_of_air')
Airmass.units='kg'
return Airmass
def calculate_rams_airmass_path(filenames,**kwargs):
from iris.coords import AuxCoord
rho=loadramscube(filenames,'DN0',**kwargs)
z=rho.coord('geopotential_height')
z_dim=rho.coord_dims('geopotential_height')
z_diff=AuxCoord(mydiff(z.points),var_name='z_diff')
rho.add_aux_coord(z_diff,data_dims=z_dim)
Airmass=rho*rho.coord('z_diff')
Airmass.remove_coord('z_diff')
Airmass.rename('airmass_path')
Airmass.units='kg m-2'
return Airmass
def calculate_rams_air_temperature(filenames,**kwargs):
from iris.coords import AuxCoord
theta=loadramscube(filenames,'THETA',**kwargs)
pi=loadramscube(filenames,'PI',**kwargs)
cp=AuxCoord(1004,long_name='cp',units='J kg-1 K-1')
t=theta*pi/cp
t.rename('air_temperature')
return t
def calculate_rams_air_pressure(filenames,**kwargs):
from iris.coords import AuxCoord
pi=loadramscube(filenames,'PI',**kwargs)
cp=AuxCoord(1004,long_name='cp',units='J kg-1 K-1')
rd=AuxCoord(287,long_name='rd',units='J kg-1 K-1')
p = 100000 * (pi/cp)**(cp.points/rd.points) # Pressure in Pa
p.rename('air_pressure')
p.units='Pa'
return p
def calculate_rams_density(filenames,**kwargs):
rho=loadramscube(filenames,'DN0',**kwargs)
rho.rename('air_density')
rho.units='kg m-3'
return rho
def calculate_rams_LWP(filenames,**kwargs):
from iris.analysis import SUM
LWC=deriveramscube(filenames,'LWC',**kwargs)
Airmass=deriveramscube(filenames,'airmass_path',**kwargs)
LWP=(LWC*Airmass).collapsed(('geopotential_height'),SUM)
LWP.rename('liquid water path')
#LWP.rename('atmosphere_mass_content_of_cloud_liquid_water')
return LWP
#
def calculate_rams_IWP(filenames,**kwargs):
from iris.analysis import SUM
IWC=deriveramscube(filenames,'IWC',**kwargs)
Airmass=deriveramscube(filenames,'airmass_path',**kwargs)
IWP=(IWC*Airmass).collapsed(('geopotential_height'),SUM)
IWP.rename('ice water path')
#IWP.rename('atmosphere_mass_content_of_cloud_ice_water')
return IWP
def calculate_rams_IWV(filenames,**kwargs):
from iris.analysis import SUM
RV=loadramscube(filenames,'RV',**kwargs)
Airmass=deriveramscube(filenames,'airmass_path',**kwargs)
IWV=(RV*Airmass).collapsed(('geopotential_height'),SUM)
IWV.rename('integrated water vapor')
#IWP.rename('atmosphere_mass_content_of_cloud_ice_water')
return IWV
# Radiation fluxed at the top of the atmospere and at the surface
def calculate_rams_LWup_TOA(filenames,**kwargs):
from iris import Constraint
LWUP=loadramscube(filenames,'LWUP',**kwargs)
LWup_TOA=LWUP.extract(Constraint(model_level_number=LWUP.coord('model_level_number').points[-1]))
LWup_TOA.rename('LWup_TOA')
return LWup_TOA
def calculate_rams_LWup_sfc(filenames,**kwargs):
from iris import Constraint
LWUP=loadramscube(filenames,'LWUP',**kwargs)
LWup_sfc=LWUP.extract(Constraint(model_level_number=0))
LWup_sfc.rename('LWup_sfc')
return LWup_sfc
def calculate_rams_LWdn_TOA(filenames,**kwargs):
from iris import Constraint
LWDN=loadramscube(filenames,'LWDN',**kwargs)
LWdn_TOA=LWDN.extract(Constraint(model_level_number=LWDN.coord('model_level_number').points[-1]))
LWdn_TOA.rename('LWdn_TOA')
return LWdn_TOA
def calculate_rams_LWdn_sfc(filenames,**kwargs):
from iris import Constraint
LWDN=loadramscube(filenames,'LWDN',**kwargs)
LWdn_sfc=LWDN.extract(Constraint(model_level_number=0))
LWdn_sfc.rename('LWdn_sfc')
return LWdn_sfc
def calculate_rams_SWup_TOA(filenames,**kwargs):
from iris import Constraint
SWUP=loadramscube(filenames,'SWUP',**kwargs)
SWup_TOA=SWUP.extract(Constraint(model_level_number=SWUP.coord('model_level_number').points[-1]))
SWup_TOA.rename('SWup_TOA')
return SWup_TOA
def calculate_rams_SWup_sfc(filenames,**kwargs):
from iris import Constraint
SWUP=loadramscube(filenames,'SWUP',**kwargs)
SWup_sfc=SWUP.extract(Constraint(model_level_number=0))
SWup_sfc.rename('SWup_sfc')
return SWup_sfc
def calculate_rams_SWdn_TOA(filenames,**kwargs):
from iris import Constraint
SWDN=loadramscube(filenames,'SWDN',**kwargs)
SWdn_TOA=SWDN.extract(Constraint(model_level_number=SWDN.coord('model_level_number').points[-1]))
SWdn_TOA.rename('SWdn_TOA')
return SWdn_TOA
def calculate_rams_SWdn_sfc(filenames,**kwargs):
from iris import Constraint
SWDN=loadramscube(filenames,'SWDN',**kwargs)
SWdn_sfc=SWDN.extract(Constraint(model_level_number=0))
SWdn_sfc.rename('SWdn_sfc')
return SWdn_sfc
def calculate_rams_surface_precipitation_instantaneous(filenames,**kwargs):
PCPRR=loadramscube(filenames,'PCPRR',**kwargs)
PCPRD=loadramscube(filenames,'PCPRD',**kwargs)
PCPRS=loadramscube(filenames,'PCPRS',**kwargs)
PCPRP=loadramscube(filenames,'PCPRP',**kwargs)
PCPRA=loadramscube(filenames,'PCPRA',**kwargs)
PCPRH=loadramscube(filenames,'PCPRH',**kwargs)
PCPRG=loadramscube(filenames,'PCPRG',**kwargs)
surface_precip=PCPRR+PCPRD+PCPRS+PCPRP+PCPRA+PCPRG+PCPRH
surface_precip.rename('surface_precipitation_instantaneous')
return surface_precip
def calculate_rams_surface_precipitation_accumulated(filenames,**kwargs):
ACCPR=loadramscube(filenames,'ACCPR',**kwargs)
ACCPD=loadramscube(filenames,'ACCPD',**kwargs)
ACCPS=loadramscube(filenames,'ACCPS',**kwargs)
ACCPP=loadramscube(filenames,'ACCPP',**kwargs)
ACCPA=loadramscube(filenames,'ACCPA',**kwargs)
ACCPH=loadramscube(filenames,'ACCPH',**kwargs)
ACCPG=loadramscube(filenames,'ACCPG',**kwargs)
surface_precip_acc=ACCPR+ACCPD+ACCPS+ACCPP+ACCPA+ACCPG+ACCPH
surface_precip_acc.rename('surface_precipitation_accumulated')
#IWP.rename('atmosphere_mass_content_of_cloud_ice_water')
return surface_precip_acc
def calculate_rams_surface_precipitation_average(filenames,**kwargs):
from dask.array import concatenate
surface_precip_accum=calculate_rams_surface_precipitation_accumulated(filenames,**kwargs)
#caclulate timestep in hours
time_coord=surface_precip_accum.coord('time')
dt=(time_coord.units.num2date(time_coord.points[1])-time_coord.units.num2date(time_coord.points[0])).total_seconds()/3600.
#divide difference in precip between timesteps (in mm/h) by timestep (in h):
surface_precip=surface_precip_accum
surface_precip.data=concatenate((0*surface_precip.core_data()[[1],:,:],surface_precip.core_data()[1:,:,:]-surface_precip.core_data()[:-1:,:,:]),axis=0)/dt
surface_precip.rename('surface_precipitation_average')
surface_precip.units= 'mm/h'
return surface_precip
def mydiff(A):
import numpy as np
d1=np.diff(A)
d=np.zeros(A.shape)
d[0]=d1[0]
d[1:-1]=0.5*(d1[0:-1]+d1[1:])
d[-1]=d1[-1]
return d
def deriveramscube(filenames,variable,**kwargs):
# if variable in ['temperature','air_temperature']:
# variable_cube=calculate_rams_temperature(filenames,**kwargs)
# #variable_cube_out=addcoordinates(filenames, 'T',variable_cube,add_coordinates)
# elif variable == 'density':
# variable_cube=calculate_rams_density(filenames,**kwargs)
if variable == 'LWC':
variable_cube=calculate_rams_LWC(filenames,**kwargs)
elif variable == 'IWC':
variable_cube=calculate_rams_IWC(filenames,**kwargs)
elif variable == 'LWP':
variable_cube=calculate_rams_LWP(filenames,**kwargs)
elif variable == 'IWP':
variable_cube=calculate_rams_IWP(filenames,**kwargs)
elif variable == 'IWV':
variable_cube=calculate_rams_IWV(filenames,**kwargs)
elif variable == 'airmass':
variable_cube=calculate_rams_airmass(filenames,**kwargs)
elif variable == 'air_temperature':
variable_cube=calculate_rams_air_temperature(filenames,**kwargs)
elif variable=='air_pressure':
variable_cube=calculate_rams_air_pressure(filenames,**kwargs)
elif variable == 'air_density':
variable_cube=calculate_rams_density(filenames,**kwargs)
elif variable == 'airmass_path':
variable_cube=calculate_rams_airmass_path(filenames,**kwargs)
elif variable == 'surface_precipitation_average':
variable_cube=calculate_rams_surface_precipitation_average(filenames,**kwargs)
elif variable == 'surface_precipitation_accumulated':
variable_cube=calculate_rams_surface_precipitation_accumulated(filenames,**kwargs)
elif (variable == 'surface_precipitation_instantaneous') or (variable == 'surface_precipitation'):
variable_cube=calculate_rams_surface_precipitation_instantaneous(filenames,**kwargs)
elif (variable == 'LWup_TOA'):
variable_cube=calculate_rams_LWup_TOA(filenames,**kwargs)
elif (variable == 'LWup_sfc'):
variable_cube=calculate_rams_LWup_sfc(filenames,**kwargs)
elif (variable == 'LWdn_TOA'):
variable_cube=calculate_rams_LWdn_TOA(filenames,**kwargs)
elif (variable == 'LWdn_sfc'):
variable_cube=calculate_rams_LWdn_sfc(filenames,**kwargs)
elif (variable == 'SWup_TOA'):
variable_cube=calculate_rams_SWup_TOA(filenames,**kwargs)
elif (variable == 'SWup_sfc'):
variable_cube=calculate_rams_SWup_sfc(filenames,**kwargs)
elif (variable == 'SWdn_TOA'):
variable_cube=calculate_rams_SWdn_TOA(filenames,**kwargs)
elif (variable == 'SWdn_sfc'):
variable_cube=calculate_rams_SWdn_sfc(filenames,**kwargs)
else:
raise NameError(variable, 'is not a known variable')
return variable_cube
|
[
"iris.coords.AuxCoord",
"warnings.filterwarnings",
"iris.cube.CubeList",
"numpy.zeros",
"iris.load",
"iris.Constraint",
"datetime.datetime",
"datetime.datetime.strptime",
"iris.load_cube",
"iris.coords.DimCoord",
"numpy.arange",
"numpy.diff",
"datetime.timedelta",
"numpy.array"
] |
[((16, 84), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'category': 'UserWarning', 'append': '(True)'}), "('ignore', category=UserWarning, append=True)\n", (39, 84), False, 'import warnings\n'), ((2997, 3015), 'iris.load', 'load', (['filenames[0]'], {}), '(filenames[0])\n', (3001, 3015), False, 'from iris import load\n'), ((5014, 5044), 'iris.load_cube', 'load_cube', (['filenames', 'variable'], {}), '(filenames, variable)\n', (5023, 5044), False, 'from iris import load_cube, coords\n'), ((5569, 5588), 'iris.cube.CubeList', 'CubeList', (['cube_list'], {}), '(cube_list)\n', (5577, 5588), False, 'from iris.cube import CubeList\n'), ((8120, 8296), 'iris.coords.DimCoord', 'coords.DimCoord', (['time_days'], {'standard_name': '"""time"""', 'long_name': '"""time"""', 'var_name': '"""time"""', 'units': 'time_units', 'bounds': 'None', 'attributes': 'None', 'coord_system': 'None', 'circular': '(False)'}), "(time_days, standard_name='time', long_name='time', var_name\n ='time', units=time_units, bounds=None, attributes=None, coord_system=\n None, circular=False)\n", (8135, 8296), False, 'from iris import load_cube, coords\n'), ((8443, 8461), 'numpy.arange', 'arange', (['(0)', 'n_level'], {}), '(0, n_level)\n', (8449, 8461), False, 'from numpy import arange\n'), ((8484, 8570), 'iris.coords.AuxCoord', 'coords.AuxCoord', (['MODEL_LEVEL_NUMBER'], {'standard_name': '"""model_level_number"""', 'units': '"""1"""'}), "(MODEL_LEVEL_NUMBER, standard_name='model_level_number',\n units='1')\n", (8499, 8570), False, 'from iris import load_cube, coords\n'), ((11882, 12058), 'iris.coords.AuxCoord', 'coords.AuxCoord', (['latitude'], {'standard_name': '"""latitude"""', 'long_name': '"""latitude"""', 'var_name': '"""latitude"""', 'units': '"""degrees"""', 'bounds': 'None', 'attributes': 'None', 'coord_system': 'coord_system'}), "(latitude, standard_name='latitude', long_name='latitude',\n var_name='latitude', units='degrees', bounds=None, attributes=None,\n coord_system=coord_system)\n", (11897, 12058), False, 'from iris import load_cube, coords\n'), ((12065, 12245), 'iris.coords.AuxCoord', 'coords.AuxCoord', (['longitude'], {'standard_name': '"""longitude"""', 'long_name': '"""longitude"""', 'var_name': '"""longitude"""', 'units': '"""degrees"""', 'bounds': 'None', 'attributes': 'None', 'coord_system': 'coord_system'}), "(longitude, standard_name='longitude', long_name='longitude',\n var_name='longitude', units='degrees', bounds=None, attributes=None,\n coord_system=coord_system)\n", (12080, 12245), False, 'from iris import load_cube, coords\n'), ((15855, 15905), 'iris.coords.AuxCoord', 'AuxCoord', (['(1004)'], {'long_name': '"""cp"""', 'units': '"""J kg-1 K-1"""'}), "(1004, long_name='cp', units='J kg-1 K-1')\n", (15863, 15905), False, 'from iris.coords import AuxCoord\n'), ((16110, 16160), 'iris.coords.AuxCoord', 'AuxCoord', (['(1004)'], {'long_name': '"""cp"""', 'units': '"""J kg-1 K-1"""'}), "(1004, long_name='cp', units='J kg-1 K-1')\n", (16118, 16160), False, 'from iris.coords import AuxCoord\n'), ((16166, 16215), 'iris.coords.AuxCoord', 'AuxCoord', (['(287)'], {'long_name': '"""rd"""', 'units': '"""J kg-1 K-1"""'}), "(287, long_name='rd', units='J kg-1 K-1')\n", (16174, 16215), False, 'from iris.coords import AuxCoord\n'), ((21921, 21931), 'numpy.diff', 'np.diff', (['A'], {}), '(A)\n', (21928, 21931), True, 'import numpy as np\n'), ((21938, 21955), 'numpy.zeros', 'np.zeros', (['A.shape'], {}), '(A.shape)\n', (21946, 21955), True, 'import numpy as np\n'), ((7756, 7796), 'datetime.datetime.strptime', 'datetime.strptime', (['timestr', '"""%Y%m%d%H%M"""'], {}), "(timestr, '%Y%m%d%H%M')\n", (7773, 7796), False, 'from datetime import datetime, timedelta\n'), ((7855, 7874), 'datetime.datetime', 'datetime', (['(100)', '(1)', '(1)'], {}), '(100, 1, 1)\n', (7863, 7874), False, 'from datetime import datetime, timedelta\n'), ((7892, 7909), 'datetime.datetime', 'datetime', (['(1)', '(1)', '(1)'], {}), '(1, 1, 1)\n', (7900, 7909), False, 'from datetime import datetime, timedelta\n'), ((7936, 7956), 'datetime.datetime', 'datetime', (['(1970)', '(1)', '(1)'], {}), '(1970, 1, 1)\n', (7944, 7956), False, 'from datetime import datetime, timedelta\n'), ((9174, 9352), 'iris.coords.DimCoord', 'coords.DimCoord', (["coord_dict['ztn01']"], {'standard_name': '"""geopotential_height"""', 'long_name': '"""z"""', 'var_name': '"""z"""', 'units': '"""m"""', 'bounds': 'None', 'attributes': 'None', 'coord_system': 'coord_system'}), "(coord_dict['ztn01'], standard_name='geopotential_height',\n long_name='z', var_name='z', units='m', bounds=None, attributes=None,\n coord_system=coord_system)\n", (9189, 9352), False, 'from iris import load_cube, coords\n'), ((9982, 10173), 'iris.coords.DimCoord', 'coords.DimCoord', (["coord_dict['xtn0' + domain]"], {'standard_name': '"""projection_x_coordinate"""', 'long_name': '"""x"""', 'var_name': '"""x"""', 'units': '"""m"""', 'bounds': 'None', 'attributes': 'None', 'coord_system': 'coord_system'}), "(coord_dict['xtn0' + domain], standard_name=\n 'projection_x_coordinate', long_name='x', var_name='x', units='m',\n bounds=None, attributes=None, coord_system=coord_system)\n", (9997, 10173), False, 'from iris import load_cube, coords\n'), ((10250, 10441), 'iris.coords.DimCoord', 'coords.DimCoord', (["coord_dict['ytn0' + domain]"], {'standard_name': '"""projection_y_coordinate"""', 'long_name': '"""y"""', 'var_name': '"""y"""', 'units': '"""m"""', 'bounds': 'None', 'attributes': 'None', 'coord_system': 'coord_system'}), "(coord_dict['ytn0' + domain], standard_name=\n 'projection_y_coordinate', long_name='y', var_name='y', units='m',\n bounds=None, attributes=None, coord_system=coord_system)\n", (10265, 10441), False, 'from iris import load_cube, coords\n'), ((18126, 18158), 'iris.Constraint', 'Constraint', ([], {'model_level_number': '(0)'}), '(model_level_number=0)\n', (18136, 18158), False, 'from iris import Constraint\n'), ((18654, 18686), 'iris.Constraint', 'Constraint', ([], {'model_level_number': '(0)'}), '(model_level_number=0)\n', (18664, 18686), False, 'from iris import Constraint\n'), ((19182, 19214), 'iris.Constraint', 'Constraint', ([], {'model_level_number': '(0)'}), '(model_level_number=0)\n', (19192, 19214), False, 'from iris import Constraint\n'), ((19710, 19742), 'iris.Constraint', 'Constraint', ([], {'model_level_number': '(0)'}), '(model_level_number=0)\n', (19720, 19742), False, 'from iris import Constraint\n'), ((7796, 7816), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (7805, 7816), False, 'from datetime import datetime, timedelta\n'), ((10970, 11161), 'iris.coords.DimCoord', 'coords.DimCoord', (["coord_dict['xtn0' + domain]"], {'standard_name': '"""projection_x_coordinate"""', 'long_name': '"""x"""', 'var_name': '"""x"""', 'units': '"""m"""', 'bounds': 'None', 'attributes': 'None', 'coord_system': 'coord_system'}), "(coord_dict['xtn0' + domain], standard_name=\n 'projection_x_coordinate', long_name='x', var_name='x', units='m',\n bounds=None, attributes=None, coord_system=coord_system)\n", (10985, 11161), False, 'from iris import load_cube, coords\n'), ((11238, 11429), 'iris.coords.DimCoord', 'coords.DimCoord', (["coord_dict['ytn0' + domain]"], {'standard_name': '"""projection_y_coordinate"""', 'long_name': '"""y"""', 'var_name': '"""y"""', 'units': '"""m"""', 'bounds': 'None', 'attributes': 'None', 'coord_system': 'coord_system'}), "(coord_dict['ytn0' + domain], standard_name=\n 'projection_y_coordinate', long_name='y', var_name='y', units='m',\n bounds=None, attributes=None, coord_system=coord_system)\n", (11253, 11429), False, 'from iris import load_cube, coords\n'), ((11776, 11803), 'iris.load_cube', 'load_cube', (['filename', '"""GLAT"""'], {}), "(filename, 'GLAT')\n", (11785, 11803), False, 'from iris import load_cube, coords\n'), ((11829, 11856), 'iris.load_cube', 'load_cube', (['filename', '"""GLON"""'], {}), "(filename, 'GLON')\n", (11838, 11856), False, 'from iris import load_cube, coords\n'), ((8071, 8088), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (8080, 8088), False, 'from datetime import datetime, timedelta\n'), ((6854, 6874), 'numpy.array', 'array', (['variable_list'], {}), '(variable_list)\n', (6859, 6874), False, 'from numpy import array\n')]
|
from bootstrap3.renderers import FieldRenderer
from bootstrap3.text import text_value
from django.forms import CheckboxInput
from django.forms.utils import flatatt
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.translation import pgettext
from i18nfield.forms import I18nFormField
def render_label(content, label_for=None, label_class=None, label_title='', optional=False):
"""
Render a label with content
"""
attrs = {}
if label_for:
attrs['for'] = label_for
if label_class:
attrs['class'] = label_class
if label_title:
attrs['title'] = label_title
builder = '<{tag}{attrs}>{content}{opt}</{tag}>'
return format_html(
builder,
tag='label',
attrs=mark_safe(flatatt(attrs)) if attrs else '',
opt=mark_safe('<br><span class="optional">{}</span>'.format(pgettext('form', 'Optional'))) if optional else '',
content=text_value(content),
)
class ControlFieldRenderer(FieldRenderer):
def __init__(self, *args, **kwargs):
kwargs['layout'] = 'horizontal'
super().__init__(*args, **kwargs)
def add_label(self, html):
label = self.get_label()
if hasattr(self.field.field, '_required'):
# e.g. payment settings forms where a field is only required if the payment provider is active
required = self.field.field._required
elif isinstance(self.field.field, I18nFormField):
required = self.field.field.one_required
else:
required = self.field.field.required
html = render_label(
label,
label_for=self.field.id_for_label,
label_class=self.get_label_class(),
optional=not required and not isinstance(self.widget, CheckboxInput)
) + html
return html
|
[
"bootstrap3.text.text_value",
"django.forms.utils.flatatt",
"django.utils.translation.pgettext"
] |
[((972, 991), 'bootstrap3.text.text_value', 'text_value', (['content'], {}), '(content)\n', (982, 991), False, 'from bootstrap3.text import text_value\n'), ((802, 816), 'django.forms.utils.flatatt', 'flatatt', (['attrs'], {}), '(attrs)\n', (809, 816), False, 'from django.forms.utils import flatatt\n'), ((904, 932), 'django.utils.translation.pgettext', 'pgettext', (['"""form"""', '"""Optional"""'], {}), "('form', 'Optional')\n", (912, 932), False, 'from django.utils.translation import pgettext\n')]
|
from src.dao.dao_aluno import DaoAluno
from tests.massa_dados import aluno_nome_1
from src.enums.enums import Situacao
from src.model.aluno import Aluno
from tests.massa_dados import materia_nome_2, materia_nome_3
class TestDaoAluno:
def _setup_aluno(self, cria_banco, id=1, nome=aluno_nome_1, cr=0,
situacao=Situacao.em_curso.value):
aluno, dao = self._salva_aluno_banco(cria_banco, id, nome, cr,
situacao)
actual = dao.pega_tudo()
return actual, aluno
def _salva_aluno_banco(self, cria_banco, id, nome, cr, situacao):
aluno = Aluno(nome)
aluno.define_cr(cr)
aluno.define_id(id)
aluno.define_situacao(situacao)
dao = DaoAluno(aluno, cria_banco)
dao.salva()
return aluno, dao
def _setup_lista_alunos(self, cria_banco, id_=3,
situacao=Situacao.em_curso.value,
cr=0, nome=None):
self._setup_aluno(cria_banco)
self._setup_aluno(cria_banco)
expected, actual = self._setup_aluno(cria_banco, id=id_,
situacao=situacao,
cr=cr, nome=nome)
return expected, actual
def test_aluno_pode_ser_atualizado_banco(self, cria_banco, cria_massa_dados,
cria_curso_com_materias):
cria_massa_dados
id_ = "1"
aluno = DaoAluno(None, cria_banco).pega_por_id(id_)
curso = cria_curso_com_materias
materias = {materia_nome_2: 7, materia_nome_3: 9}
expected = 8
aluno.inscreve_curso(curso).atualiza_materias_cursadas(materias)
aluno.pega_coeficiente_rendimento(auto_calculo=True)
DaoAluno(aluno, cria_banco).atualiza(id_)
aluno = DaoAluno(None, cria_banco).pega_por_id(id_)
actual = aluno.pega_coeficiente_rendimento()
assert actual == expected
def test_dao_pega_por_id_retorna_objeto_aluno_com_id_correto(self,
cria_banco):
id_ = 3
_, expected = self._setup_lista_alunos(cria_banco, id_)
actual = DaoAluno(None, cria_banco).pega_por_id(id_)
assert actual.pega_id() == expected.pega_id()
def test_lista_alunos_recuperada_banco_com_nome_correto(self, cria_banco):
indice = 2
nome = aluno_nome_1
expected, actual = self._setup_lista_alunos(cria_banco, nome=nome)
assert actual.pega_nome() == expected[indice].pega_nome()
def test_lista_alunos_recuperada_banco_com_cr_correto(self, cria_banco):
indice = 2
cr = 9
expected, actual = self._setup_lista_alunos(cria_banco, cr=cr)
assert actual.pega_coeficiente_rendimento() == \
expected[indice].pega_coeficiente_rendimento()
def test_lista_alunos_recuperada_banco_com_situacao_correta(self,
cria_banco):
indice = 2
situacao = Situacao.reprovado.value
expected, actual = self._setup_lista_alunos(cria_banco,
situacao=situacao)
assert actual.pega_situacao() == expected[indice].pega_situacao()
def test_lista_alunos_recuperada_banco_com_id_correto(self, cria_banco):
indice = 2
expected, actual = self._setup_lista_alunos(cria_banco)
assert actual.pega_id() == expected[indice].pega_id()
def test_situacao_aluno_recuperado_banco(self, cria_banco):
situacao = "trancado"
expected, actual = self._setup_aluno(cria_banco, situacao=situacao)
assert actual.pega_situacao() == expected[0].pega_situacao()
def test_id_aluno_recuperado_banco(self, cria_banco):
id_ = 1
expected, actual = self._setup_aluno(cria_banco, id=id_)
assert actual.pega_id() == expected[0].pega_id()
def test_cr_diferente_zero_retornado_banco(self, cria_banco):
cr = 7
expected, actual = self._setup_aluno(cria_banco, cr)
assert actual.pega_coeficiente_rendimento() == \
expected[0].pega_coeficiente_rendimento()
def test_coeficiente_rendimento_objeto_aluno_recuperado_banco(self,
cria_banco):
actual, expected = self._setup_aluno(cria_banco)
assert actual[0].pega_coeficiente_rendimento() == \
expected.pega_coeficiente_rendimento()
def test_situacao_objeto_aluno_recuperado_banco(self, cria_banco):
actual, expected = self._setup_aluno(cria_banco)
assert actual[0].pega_situacao() == expected.pega_situacao()
def test_nome_objeto_aluno_recuperado_banco(self, cria_banco):
actual, expected = self._setup_aluno(cria_banco)
assert actual[0].pega_nome() == expected.pega_nome()
|
[
"src.dao.dao_aluno.DaoAluno",
"src.model.aluno.Aluno"
] |
[((638, 649), 'src.model.aluno.Aluno', 'Aluno', (['nome'], {}), '(nome)\n', (643, 649), False, 'from src.model.aluno import Aluno\n'), ((760, 787), 'src.dao.dao_aluno.DaoAluno', 'DaoAluno', (['aluno', 'cria_banco'], {}), '(aluno, cria_banco)\n', (768, 787), False, 'from src.dao.dao_aluno import DaoAluno\n'), ((1508, 1534), 'src.dao.dao_aluno.DaoAluno', 'DaoAluno', (['None', 'cria_banco'], {}), '(None, cria_banco)\n', (1516, 1534), False, 'from src.dao.dao_aluno import DaoAluno\n'), ((1813, 1840), 'src.dao.dao_aluno.DaoAluno', 'DaoAluno', (['aluno', 'cria_banco'], {}), '(aluno, cria_banco)\n', (1821, 1840), False, 'from src.dao.dao_aluno import DaoAluno\n'), ((1871, 1897), 'src.dao.dao_aluno.DaoAluno', 'DaoAluno', (['None', 'cria_banco'], {}), '(None, cria_banco)\n', (1879, 1897), False, 'from src.dao.dao_aluno import DaoAluno\n'), ((2249, 2275), 'src.dao.dao_aluno.DaoAluno', 'DaoAluno', (['None', 'cria_banco'], {}), '(None, cria_banco)\n', (2257, 2275), False, 'from src.dao.dao_aluno import DaoAluno\n')]
|
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from django.views.generic import TemplateView
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.mail import send_mail
from django.shortcuts import redirect, render
from django.utils.html import mark_safe
User = get_user_model()
def message_view(request, message=None, title=None):
"""
provides a generic way to render any old message in a template
(used for when a user is disabled, or unapproved, or unverified, etc.)
"""
context = {"message": mark_safe(message), "title": title or settings.PROJECT_NAME}
return render(request, "core/message.html", context)
def home_page(request):
# print(request.session.get("first_name", "Unknown"))
# request.session['first_name']
context = {
"title": "Hello World!",
"content": " Welcome to the homepage.",
}
if request.user.is_authenticated:
context["premium_content"] = "YEAHHHHHH"
return render(request, "core/index.html", context)
class IndexView(TemplateView):
template_name = "core/index.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["users"] = User.objects.filter(is_active=True)
# context["customers"] = Customer.objects.filter(is_active=True)
return context
|
[
"django.shortcuts.render",
"django.utils.html.mark_safe",
"django.contrib.auth.get_user_model"
] |
[((403, 419), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (417, 419), False, 'from django.contrib.auth import get_user_model\n'), ((731, 776), 'django.shortcuts.render', 'render', (['request', '"""core/message.html"""', 'context'], {}), "(request, 'core/message.html', context)\n", (737, 776), False, 'from django.shortcuts import redirect, render\n'), ((1099, 1142), 'django.shortcuts.render', 'render', (['request', '"""core/index.html"""', 'context'], {}), "(request, 'core/index.html', context)\n", (1105, 1142), False, 'from django.shortcuts import redirect, render\n'), ((659, 677), 'django.utils.html.mark_safe', 'mark_safe', (['message'], {}), '(message)\n', (668, 677), False, 'from django.utils.html import mark_safe\n')]
|
# Defines the data models used within the application
#
# See the Django documentation at https://docs.djangoproject.com/en/1.6/topics/db/models/
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.mail import send_mail
from django.db import models
from django.db.models import Q
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from django.contrib.auth.models import User, Group
from django.contrib.admin.models import LogEntry
from django.core.urlresolvers import reverse
from django.utils.html import format_html
from django.utils import timezone
from itertools import chain
from decimal import Decimal
from datetime import datetime, date, timedelta, tzinfo
from dateutil.tz import tzutc, tzlocal
from multiselectfield import MultiSelectField
import reversion
def get_value_from_choices(choices, code_to_find):
"""Returns the value that corresponds to the given code in the list of choices.
This is used to translate a code value, as stored in the database, to its
corresponding text value from the choices tuple.
"""
return next((value for code, value in choices if code == code_to_find), '')
class FieldIteratorMixin(models.Model):
"""Returns the verbose_name and value for each non-HIDDEN_FIELD on an object"""
def _get_field(self, field):
"""Gets the specified field from the model"""
model_field = self._meta.get_field(field)
name = model_field.verbose_name
if model_field.choices:
display_method = getattr(self, 'get_' + field + '_display')
data = display_method()
else:
data = getattr(self, field)
boolean_field = isinstance(model_field, models.NullBooleanField)
return (name, data, boolean_field)
def _get_field_full(self, field):
"""Gets the specified field from the model, along with the field name"""
model_field = self._meta.get_field(field)
name = model_field.verbose_name
if model_field.choices:
display_method = getattr(self, 'get_' + field + '_display')
data = display_method()
else:
data = getattr(self, field)
boolean_field = isinstance(model_field, models.NullBooleanField)
return (name, data, boolean_field, model_field.name)
def get_model_fields(self):
"""Gets all fields from the model that aren't defined in HIDDEN_FIELDS"""
fields = [field.name for field in self._meta.fields]
fields.remove('id')
for field in self.HIDDEN_FIELDS:
fields.remove(field)
return fields
def get_table_fields(self):
"""Gets all fields from the model to display in table format
Fields defined in HIDDEN_TABLE_FIELDS are excluded.
"""
fields = self.get_model_fields()
for field in self.HIDDEN_TABLE_FIELDS:
fields.remove(field)
field_data = [self._get_field(field) for field in fields]
return field_data
def get_all_fields(self):
"""Gets all non-HIDDEN_FIELDs from the model and their data"""
fields = self.get_model_fields()
field_data = [self._get_field(field) for field in fields]
return field_data
def get_search_fields(self):
"""Gets fields necessary for searching
Fields defined in HIDDEN_SEARCH_FIELDS are excluded
"""
fields = self.get_model_fields()
for field in self.HIDDEN_SEARCH_FIELDS:
fields.remove(field)
field_data = [self._get_field_full(field) for field in fields]
if isinstance(self, Subaward) and hasattr(self, 'comments'):
field_data.append(self._get_field_full('comments'))
return field_data
def get_fieldsets(self):
"""Gets the model's fields and separates them out into the defined FIELDSETS"""
fields = self.get_model_fields()
fieldset_data = []
for fieldset in self.FIELDSETS:
fieldset_fields = []
for field in fieldset['fields']:
fieldset_fields.append(self._get_field(field))
fields.remove(field)
fieldset_data.append((fieldset['title'], fieldset_fields))
if hasattr(self, 'DISPLAY_TABLES'):
for display_table in self.DISPLAY_TABLES:
for row in display_table['rows']:
for field in row['fields']:
fields.remove(field)
fieldset_data.append(
(None, [self._get_field(field) for field in fields]))
return fieldset_data
def get_display_tables(self):
"""Gets the fields and data defined in DISPLAY_TABLES for tabular display"""
display_tables = []
for item in self.DISPLAY_TABLES:
rows = []
for row in item['rows']:
data = {'label': row['label']}
data['fields'] = [
self._get_field(field) for field in row['fields']]
rows.append(data)
display_table = {
'title': item['title'],
'columns': item['columns'],
'rows': rows}
display_tables.append(display_table)
return display_tables
def get_award_setup_report_fields(self):
"""Gets the fields needed for EAS report"""
return [self._get_field(field) for field in self.EAS_REPORT_FIELDS]
class Meta:
abstract = True
class EASUpdateMixin(object):
"""If it's expired or inactive, unset this object from any foriegn key fields"""
def save(self, *args, **kwargs):
super(EASUpdateMixin, self).save(*args, **kwargs)
expired = False
if hasattr(self, 'end_date'):
if self.end_date:
if isinstance(self.end_date, date):
expired = self.end_date < date.today()
else:
expired = self.end_date < datetime.now()
else:
expired = False
if not self.active or expired:
for related_object in self._meta.get_all_related_objects():
accessor_name = related_object.get_accessor_name()
if not hasattr(self, accessor_name):
break
related_queryset = eval('self.%s' % accessor_name)
field_name = related_object.field.name
for item in related_queryset.all():
setattr(item, field_name, None)
item.save()
class AllowedCostSchedule(EASUpdateMixin, models.Model):
"""Model for the AllowedCostSchedule data"""
EAS_FIELD_ORDER = [
'id',
'name',
'end_date',
'active'
]
id = models.BigIntegerField(primary_key=True, unique=True)
name = models.CharField(max_length=30)
end_date = models.DateField(null=True, blank=True)
active = models.BooleanField()
def __unicode__(self):
return self.name
class Meta:
ordering = ['name']
class AwardManager(FieldIteratorMixin, EASUpdateMixin, models.Model):
"""Model for the AwardManager data"""
EAS_FIELD_ORDER = [
'id',
'full_name',
'gwid',
'system_user',
'end_date',
'active'
]
CAYUSE_FIELDS = [
'title',
'first_name',
'middle_name',
'last_name',
'phone',
'email'
]
FIELDSETS = []
HIDDEN_FIELDS = [
'system_user',
'end_date',
'active',
'first_name',
'middle_name',
'last_name'
]
id = models.BigIntegerField(primary_key=True, unique=True)
full_name = models.CharField(max_length=240)
gwid = models.CharField(
max_length=150,
blank=True,
null=True,
verbose_name='GWID')
system_user = models.BooleanField()
end_date = models.DateField(null=True, blank=True)
active = models.BooleanField()
# Cayuse fields
title = models.CharField(max_length=64, blank=True, null=True)
first_name = models.CharField(max_length=64, blank=True)
middle_name = models.CharField(max_length=32, blank=True)
last_name = models.CharField(max_length=64, blank=True)
phone = models.CharField(max_length=32, blank=True, null=True)
email = models.CharField(max_length=64, blank=True, null=True)
def __unicode__(self):
return self.full_name
class AwardOrganization(EASUpdateMixin, models.Model):
"""Model for the AwardOrganization data"""
EAS_FIELD_ORDER = [
'id',
'name',
'organization_type',
'org_info1_meaning',
'org_info2_meaning',
'end_date',
'active'
]
id = models.BigIntegerField(primary_key=True, unique=True)
name = models.CharField(max_length=240)
organization_type = models.CharField(max_length=30, blank=True, null=True)
org_info1_meaning = models.CharField(max_length=80)
org_info2_meaning = models.CharField(max_length=80)
end_date = models.DateField(null=True, blank=True)
active = models.BooleanField()
def __unicode__(self):
return self.name
class Meta:
ordering = ['name']
class AwardTemplate(EASUpdateMixin, models.Model):
"""Model for the AwardTemplate data"""
EAS_FIELD_ORDER = [
'id',
'number',
'short_name',
'active'
]
id = models.BigIntegerField(primary_key=True, unique=True)
number = models.CharField(max_length=15)
short_name = models.CharField(max_length=30)
active = models.BooleanField()
def __unicode__(self):
return u'%s - %s' % (self.number, self.short_name)
class Meta:
ordering = ['number']
class CFDANumber(EASUpdateMixin, models.Model):
"""Model for the CFDANumber data"""
EAS_FIELD_ORDER = [
'flex_value',
'description',
'end_date',
'active'
]
flex_value = models.CharField(
max_length=150,
primary_key=True,
unique=True)
description = models.CharField(max_length=240)
end_date = models.DateField(null=True, blank=True)
active = models.BooleanField()
def __unicode__(self):
return u'%s - %s' % (self.flex_value, self.description)
class Meta:
ordering = ['flex_value']
class FedNegRate(EASUpdateMixin, models.Model):
"""Model for the FedNegRate data"""
EAS_FIELD_ORDER = [
'flex_value',
'description',
'end_date',
'active'
]
flex_value = models.CharField(
max_length=150,
primary_key=True,
unique=True)
description = models.CharField(max_length=240)
end_date = models.DateField(null=True, blank=True)
active = models.BooleanField()
def __unicode__(self):
return self.description
class Meta:
ordering = ['description']
class FundingSource(EASUpdateMixin, models.Model):
"""Model for the FundingSource data"""
EAS_FIELD_ORDER = [
'name',
'number',
'id',
'active',
'end_date'
]
id = models.BigIntegerField(primary_key=True, unique=True)
name = models.CharField(max_length=50)
number = models.CharField(max_length=10)
end_date = models.DateField(null=True, blank=True)
active = models.BooleanField()
def __unicode__(self):
return u'%s - %s' % (self.number, self.name)
class Meta:
ordering = ['number']
class IndirectCost(EASUpdateMixin, models.Model):
"""Model for the IndirectCost data"""
EAS_FIELD_ORDER = [
'id',
'rate_schedule',
'end_date',
'active'
]
id = models.BigIntegerField(primary_key=True, unique=True)
rate_schedule = models.CharField(max_length=30)
end_date = models.DateField(null=True, blank=True)
active = models.BooleanField()
def __unicode__(self):
return self.rate_schedule
class Meta:
ordering = ['rate_schedule']
class PrimeSponsor(EASUpdateMixin, models.Model):
"""Model for the PrimeSponsor data"""
EAS_FIELD_ORDER = [
'name',
'number',
'id',
'active',
]
id = models.BigIntegerField(primary_key=True, unique=True)
name = models.CharField(max_length=50)
number = models.IntegerField()
active = models.BooleanField()
def __unicode__(self):
return self.name
class Meta:
ordering = ['name']
class EASMapping(models.Model):
"""Model used to define a mapping between EAS data and the corresponding value in ATP"""
INTERFACE_CHOICES = (
('C', 'Cayuse'),
('L', 'Lotus'),
)
interface = models.CharField(
choices=INTERFACE_CHOICES,
max_length=1,
default='C')
field = models.CharField(max_length=50)
incoming_value = models.CharField(max_length=250)
atp_model = models.CharField(max_length=50)
atp_pk = models.IntegerField()
def __unicode__(self):
return u'(%s) %s=%s -> %s=%s' % (self.interface,
self.field,
self.incoming_value,
self.atp_model,
self.atp_pk)
class Meta:
unique_together = (
'interface',
'field',
'incoming_value',
'atp_model',
'atp_pk')
class EASMappingException(Exception):
"""Custom exception import processes throw when a new mapping is required"""
def __init__(self, message, interface, field, incoming_value, atp_model):
super(EASMappingException, self).__init__(self, message)
self.interface = interface
self.field = field
self.incoming_value = incoming_value
self.atp_model = atp_model
class ATPAuditTrail(models.Model):
"""It is used internally to track each point of time when an award assinged and completed from a particular stage"""
award = models.IntegerField()
modification = models.CharField(max_length=100)
workflow_step = models.CharField(max_length=100)
date_created = models.DateTimeField(blank=True, null=True)
date_completed = models.DateTimeField(blank=True, null=True)
assigned_user = models.CharField(max_length=100)
class Award(models.Model):
"""The primary model"""
WAIT_FOR = {'RB': 'Revised Budget', 'PA': 'PI Access', 'CA': 'Cost Share Approval', 'FC': 'FCOI',
'PS': 'Proposal Submission', 'SC': 'Sponsor Clarity', 'NO': 'New Org needed',
'IC': 'Internal Clarification', 'DC': 'Documents not in GW Docs'
}
# These fields aren't displayed by the FieldIteratorMixin
HIDDEN_FIELDS = [
'subaward_done',
'award_management_done',
'extracted_to_eas',
]
# Workflow statuses
STATUS_CHOICES = (
(0, 'New'),
(1, 'Award Intake'),
(2, 'Award Negotiation'),
(3, 'Award Setup'),
(4, 'Subaward & Award Management'),
(5, 'Award Closeout'),
(6, 'Complete'),
)
# A mapping for which sections are active in which statuses
STATUS_SECTION_MAPPING = [
[],
['AwardAcceptance'],
['AwardNegotiation'],
['AwardSetup', 'AwardModification'],
['Subaward', 'AwardManagement'],
['AwardCloseout'],
[]
]
# A mapping for relevant user fields, groups, URLs, and statuses for each section
SECTION_FIELD_MAPPING = {
'ProposalIntake': {
'user_field': None,
'group': 'Proposal Intake',
'edit_url': 'edit_proposal_intake',
'edit_status': 0},
'AwardAcceptance': {
'user_field': 'award_acceptance_user',
'group': 'Award Acceptance',
'edit_url': 'edit_award_acceptance',
'edit_status': 1},
'AwardNegotiation': {
'user_field': 'award_negotiation_user',
'group': 'Award Negotiation',
'edit_url': 'edit_award_negotiation',
'edit_status': 2},
'AwardSetup': {
'user_field': 'award_setup_user',
'group': 'Award Setup',
'edit_url': 'edit_award_setup',
'edit_status': 3},
'AwardModification': {
'user_field': 'award_modification_user',
'group': 'Award Modification',
'edit_url': 'edit_award_setup',
'edit_status': 3},
'Subaward': {
'user_field': 'subaward_user',
'group': 'Subaward Management',
'edit_url': 'edit_subawards',
'edit_status': 4},
'AwardManagement': {
'user_field': 'award_management_user',
'group': 'Award Management',
'edit_url': 'edit_award_management',
'edit_status': 4},
'AwardCloseout': {
'user_field': 'award_closeout_user',
'group': 'Award Closeout',
'edit_url': 'edit_award_closeout',
'edit_status': 5},
}
# Associates subsections with their parent sections (used in edit permission checks)
SECTION_PARENT_MAPPING = {
'PTANumber': 'AwardSetup',
'PriorApproval': 'AwardManagement',
'ReportSubmission': 'AwardManagement',
'FinalReport': 'AwardCloseout',
}
START_STATUS = 0
END_STATUS = 6
AWARD_SETUP_STATUS = 3
AWARD_ACCEPTANCE_STATUS = 1
status = models.IntegerField(choices=STATUS_CHOICES, default=0)
creation_date = models.DateField(auto_now_add=True)
extracted_to_eas = models.BooleanField(default=False)
# Limit assignment users to members of the appropriate group
award_acceptance_user = models.ForeignKey(
User,
related_name='+',
verbose_name='Award Intake User',
limit_choices_to=Q(
groups__name='Award Acceptance'))
award_negotiation_user = models.ForeignKey(
User,
null=True,
blank=True,
related_name='+',
verbose_name='Award Negotiation User',
limit_choices_to=Q(
groups__name='Award Negotiation'))
award_setup_user = models.ForeignKey(
User,
related_name='+',
verbose_name='Award Setup User',
limit_choices_to=Q(
groups__name='Award Setup'))
award_modification_user = models.ForeignKey(
User,
null=True,
blank=True,
related_name='+',
verbose_name='Award Modification User',
limit_choices_to=Q(
groups__name='Award Modification'))
subaward_user = models.ForeignKey(
User,
null=True,
blank=True,
related_name='+',
verbose_name='Subaward User',
limit_choices_to=Q(
groups__name='Subaward Management'))
award_management_user = models.ForeignKey(
User,
related_name='+',
verbose_name='Award Management User',
limit_choices_to=Q(
groups__name='Award Management'))
award_closeout_user = models.ForeignKey(
User,
related_name='+',
verbose_name='Award Closeout User',
limit_choices_to=Q(
groups__name='Award Closeout'))
# Because these two sections are active in the same status, we need to
# track their completion independently
subaward_done = models.BooleanField(default=False)
award_management_done = models.BooleanField(default=False)
send_to_modification = models.BooleanField(default=False)
send_to_setup = models.BooleanField(default=False)
common_modification = models.BooleanField(default=False)
award_dual_negotiation = models.BooleanField(default=False)
award_dual_setup = models.BooleanField(default=False)
award_dual_modification = models.BooleanField(default=False)
award_text = models.CharField(max_length=50, blank=True, null=True)
# If an award has a proposal, use that to determine its name. Otherwise,
# use its internal ID
def __unicode__(self):
proposal = self.get_first_real_proposal()
if proposal and proposal.get_unique_identifier() != '':
return u'Award for proposal #%s' % proposal.get_unique_identifier()
else:
return u'Award #%s' % self.id
@classmethod
def get_priority_assignments_for_award_setup_user(cls, user):
assignment_list = []
assign_filter = cls.objects.filter(
(Q(Q(award_setup_user=user) & Q(status=2) & Q(award_dual_setup=True)) | Q(Q(award_setup_user=user) & Q(status=3) & Q(award_dual_setup=True))) |
(Q(award_setup_user=user) & Q(status=3) & Q(send_to_modification=False)) |
(Q(award_modification_user=user) & Q(status=3) & Q(send_to_modification=True)) |
(Q(award_modification_user=user) & Q(status=2) & Q(award_dual_modification=True))
)
award_ids = []
temp_ids = []
award_assignments = []
for award_ in assign_filter:
award_ids.append(award_.id)
assignments_on = AwardAcceptance.objects.filter(award_id__in=award_ids, award_setup_priority='on',
current_modification=True).order_by('creation_date')
assignments_tw = AwardAcceptance.objects.filter(award_id__in=award_ids, award_setup_priority='tw',
current_modification=True).order_by('creation_date')
assignments_th = AwardAcceptance.objects.filter(award_id__in=award_ids, award_setup_priority='th',
current_modification=True).order_by('creation_date')
assignments_fo = AwardAcceptance.objects.filter(award_id__in=award_ids, award_setup_priority='fo',
current_modification=True).order_by('creation_date')
assignments_fi = AwardAcceptance.objects.filter(award_id__in=award_ids, award_setup_priority='fi',
current_modification=True).order_by('creation_date')
assignments_ni = AwardAcceptance.objects.filter(award_id__in=award_ids, award_setup_priority='ni',
current_modification=True).order_by('creation_date')
assignments_none = AwardAcceptance.objects.filter(award_id__in=award_ids, award_setup_priority='',
current_modification=True).order_by('creation_date')
assignments = list(chain(assignments_on, assignments_tw, assignments_th,
assignments_fo, assignments_fi, assignments_ni, assignments_none))
for award in assignments:
if award.award_id in award_ids:
temp_ids.append(award.award_id)
assignments = cls.objects.filter(id__in=temp_ids)
for id in temp_ids:
for award in assignments:
if award.id == id:
award_assignments.append(award)
for award in award_assignments:
active_sections = award.STATUS_SECTION_MAPPING[award.status]
for section in active_sections:
for user_group in user.groups.all():
if section == 'AwardNegotiation' and user_group.name == 'Award Setup':
section = 'AwardSetup'
if section == 'AwardNegotiation' and user_group.name == 'Award Modification':
section = 'AwardModification'
if award.get_user_for_section(section) == user:
edit_url = reverse(
award.SECTION_FIELD_MAPPING[section]['edit_url'],
kwargs={
'award_pk': award.pk})
assignment_list.append((award, edit_url))
return assignment_list
@classmethod
def get_assignments_for_user(cls, user):
"""Given a user, find all currently assigned awards"""
assignments = cls.objects.filter(
(Q(award_acceptance_user=user) & Q(status=1)) |
(Q(Q(award_negotiation_user=user) & Q(status=2)) | Q(Q(award_negotiation_user=user) & Q(status=2) & Q(award_dual_negotiation=True))) |
(Q(Q(award_setup_user=user) & Q(status=2) & Q(award_dual_setup=True)) | Q(Q(award_setup_user=user) & Q(status=3) & Q(award_dual_setup=True))) |
(Q(award_setup_user=user) & Q(status=3) & Q(send_to_modification=False)) |
(Q(award_modification_user=user) & Q(status=3) & Q(Q(send_to_modification=True))) |
(Q(award_modification_user=user) & Q(status=2) & Q(Q(award_dual_modification=True))) |
(Q(subaward_user=user) & Q(status=4)) |
(Q(award_management_user=user) & Q(status=4)) |
(Q(award_closeout_user=user) & Q(status=5))
)
assignment_list = []
for award in assignments:
active_sections = award.STATUS_SECTION_MAPPING[award.status]
for section in active_sections:
for user_group in user.groups.all():
if section == 'AwardNegotiation' and user_group.name == 'Award Setup':
section = 'AwardSetup'
if section == 'AwardNegotiation' and user_group.name == 'Award Modification':
section = 'AwardModification'
if award.get_user_for_section(section) == user:
edit_url = reverse(
award.SECTION_FIELD_MAPPING[section]['edit_url'],
kwargs={
'award_pk': award.pk})
assignment_list.append((award, edit_url))
return assignment_list
def get_absolute_url(self):
"""Gets the URL used to navigate to this object"""
return reverse('award_detail', kwargs={'award_pk': self.pk})
def save(self, *args, **kwargs):
# On initial save, create a dummy proposal and blank sections
if not self.pk:
super(Award, self).save(*args, **kwargs)
Proposal.objects.create(award=self, dummy=True)
AwardAcceptance.objects.create(award=self)
AwardNegotiation.objects.create(award=self)
AwardSetup.objects.create(award=self)
AwardManagement.objects.create(award=self)
AwardCloseout.objects.create(award=self)
else:
check_status = kwargs.pop('check_status', True)
try:
old_object = Award.objects.get(pk=self.pk)
except Award.DoesNotExist:
super(Award, self).save(*args, **kwargs)
return
if any([self.award_acceptance_user != old_object.award_acceptance_user, self.award_closeout_user != old_object.award_closeout_user,
self.award_management_user != old_object.award_management_user, self.award_modification_user != old_object.award_modification_user,
self.award_negotiation_user != old_object.award_negotiation_user, self.award_setup_user != old_object.award_setup_user]):
self.send_to_setup = old_object.send_to_setup
self.send_to_modification = old_object.send_to_modification
self.common_modification = old_object.common_modification
self.award_dual_modification = old_object.award_dual_modification
self.award_dual_setup = old_object.award_dual_setup
self.award_dual_negotiation = old_object.award_dual_negotiation
super(Award, self).save(*args, **kwargs)
if check_status and old_object.status > 1 and self.status == 1 and self.get_current_award_acceptance().phs_funded:
self.send_phs_funded_notification()
def get_proposals(self):
"""Gets all Proposals associated with this Award"""
proposals = []
first_proposal = self.get_first_real_proposal()
if first_proposal:
proposals.append(first_proposal)
proposals.extend(self.get_supplemental_proposals())
return proposals
def get_first_real_proposal(self):
"""Gets the first non-dummy Proposal associated with this Award"""
try:
first_proposal = self.proposal_set.get(
is_first_proposal=True,
dummy=False)
except Proposal.DoesNotExist:
first_proposal = None
return first_proposal
def get_supplemental_proposals(self):
"""Gets all non-dummy Proposals after the first one"""
first_proposal = self.get_first_real_proposal()
supplemental_proposals = None
if first_proposal:
supplemental_proposals = self.proposal_set.filter(dummy=False).exclude(id=first_proposal.id).order_by('id')
return supplemental_proposals
def get_most_recent_proposal(self):
"""Gets the most recent Proposal"""
return self.proposal_set.filter(dummy=False).order_by('id').last()
def get_current_award_acceptance(self, acceptance_flag=False):
if acceptance_flag:
acceptance_object = self.awardacceptance_set.filter(current_modification=True)
if acceptance_object:
return acceptance_object[0]
else:
acceptance_object = AwardAcceptance()
return acceptance_object
award_acceptance = self.awardacceptance_set.filter(current_modification=True).order_by('-creation_date')
if len(award_acceptance) > 1:
for award in award_acceptance[1:]:
award.current_modification = False
award.save()
return award_acceptance[0]
else:
return self.awardacceptance_set.get(current_modification=True)
def get_previous_award_acceptances(self):
return self.awardacceptance_set.filter(current_modification=False)
def get_current_award_negotiation(self):
try:
negotiation_obj = self.awardnegotiation_set.get(current_modification=True)
except:
negotiation_obj = None
award_negotiation = self.awardnegotiation_set.filter(current_modification=True).order_by('-date_assigned')
if len(award_negotiation) > 1:
for award in award_negotiation[1:]:
award.current_modification = False
award.save()
return award_negotiation[0]
elif negotiation_obj:
return self.awardnegotiation_set.get(current_modification=True)
else:
return AwardNegotiation()
def get_previous_award_negotiations(self):
return self.awardnegotiation_set.filter(current_modification=False)
def get_first_pta_number(self):
pta_number = self.ptanumber_set.all().order_by('id')[:1]
if pta_number:
return pta_number[0]
else:
return None
def get_award_numbers(self):
"""Returns a comma-delimited string of award numbers from all PTANumbers in this Award"""
award_numbers = self.ptanumber_set.exclude(award_number='').values_list('award_number', flat=True)
return ', '.join(award_numbers)
def get_date_assigned_to_current_stage(self):
"""Returns the date this Award was moved on to its current stage"""
dates_assigned = []
for section in self.get_active_sections():
try:
if section == 'AwardAcceptance':
correct_instance = AwardAcceptance.objects.get(award=self, current_modification=True)
local_date = correct_instance.creation_date.astimezone(tzlocal())
dates_assigned.append(local_date.strftime('%m/%d/%Y'))
elif section == 'Subaward' or section == 'AwardManagement':
if Subaward.objects.filter(award=self).count() > 0:
correct_instance = Subaward.objects.filter(award=self).latest('creation_date')
local_date = correct_instance.creation_date.astimezone(tzlocal())
dates_assigned.append(local_date.strftime('%m/%d/%Y'))
else:
correct_instance = AwardManagement.objects.get(award=self)
local_date = correct_instance.date_assigned.astimezone(tzlocal())
dates_assigned.append(local_date.strftime('%m/%d/%Y'))
else:
if section == 'AwardNegotiation':
correct_instance = AwardNegotiation.objects.get(award=self, current_modification=True)
elif section == 'AwardSetup':
correct_instance = AwardSetup.objects.get(award=self)
elif section == 'AwardCloseout':
correct_instance = AwardCloseout.objects.get(award=self)
if correct_instance.date_assigned:
local_date = correct_instance.date_assigned.astimezone(tzlocal())
dates_assigned.append(local_date.strftime('%m/%d/%Y'))
except:
pass
dates_assigned = list(set(dates_assigned))
if len(dates_assigned) > 0:
return ', '.join(dates_assigned)
else:
return ''
def get_user_for_section(self, section, modification_flag=False):
"""Uses the SECTION_PARENT_MAPPING to determine the user assigned to the given section"""
if section == 'AwardSetup' and self.award_dual_modification:
section = 'AwardModification'
if modification_flag:
section = 'AwardModification'
if section in self.SECTION_PARENT_MAPPING:
section = self.SECTION_PARENT_MAPPING[section]
try:
return getattr(
self,
self.SECTION_FIELD_MAPPING[section]['user_field'])
except TypeError:
return None
def get_current_award_status_for_display(self):
return 'Award Negotiation and Setup'
def get_award_setup_modification_status(self):
if self.status == 2:
return True
else:
return False
def get_active_sections(self, dual_mode=False):
"""Gets the names of the currently active sections"""
if self.status == self.AWARD_SETUP_STATUS:
active_sections = ['AwardSetup']
elif dual_mode:
active_sections = ['AwardNegotiation', 'AwardSetup']
else:
active_sections = self.STATUS_SECTION_MAPPING[self.status]
return active_sections
def get_users_for_dual_active_sections(self):
active_users = []
for section in ['AwardNegotiation', 'AwardSetup']:
user = self.get_user_for_section(section)
if user:
active_users.append(user)
return active_users
def get_users_for_negotiation_and_moidification_sections(self):
active_users = []
for section in ['AwardNegotiation', 'AwardModification']:
user = self.get_user_for_section(section)
if user:
active_users.append(user)
return active_users
def get_users_for_active_sections(self, section_flag=False):
"""Gets the users assigned to the currently active sections"""
active_users = []
if self.status == 3 and self.send_to_modification:
user_section = "AwardModification"
user = self.get_user_for_section(user_section)
if user:
active_users.append(user)
return active_users
for section in self.get_active_sections():
user = self.get_user_for_section(section)
if user:
active_users.append(user)
return active_users
def get_current_active_users(self):
"""Returns a comma-delimited list of users assigned to the currently active sections"""
if self.award_dual_setup and self.award_dual_negotiation and self.status == 2:
users = self.get_users_for_dual_active_sections()
elif self.award_dual_modification and self.status == 2:
users = self.get_users_for_negotiation_and_moidification_sections()
else:
users = self.get_users_for_active_sections()
names = []
for user in users:
names.append(user.get_full_name())
return ', '.join(names)
def get_award_priority_number(self):
award_accept = self.awardacceptance_set.get(award_id=self.id, current_modification=True)
if award_accept.award_setup_priority:
return AwardAcceptance.PRIORITY_STATUS_DICT[award_accept.award_setup_priority]
else:
return ''
def get_edit_status_for_section(self, section, setup_flow_flag=False):
"""Gets the edit_status for the given section"""
if setup_flow_flag:
return self.SECTION_FIELD_MAPPING['AwardNegotiation']['edit_status']
if section in self.SECTION_PARENT_MAPPING:
section = self.SECTION_PARENT_MAPPING[section]
return self.SECTION_FIELD_MAPPING[section]['edit_status']
def get_editable_sections(self):
"""Returns a list of editable sections.
A section is editable if the Award's status is at or beyond that section
"""
if self.award_dual_negotiation and self.award_dual_setup:
editable_sections = [section for section in self.SECTION_FIELD_MAPPING.keys(
) if self.SECTION_FIELD_MAPPING[section]['edit_status'] <= self.status + 1]
else:
editable_sections = [section for section in self.SECTION_FIELD_MAPPING.keys(
) if self.SECTION_FIELD_MAPPING[section]['edit_status'] <= self.status]
return editable_sections
def send_email_update_if_subaward_user(self):
"""Sends an email update to subaward user if the award send to award setup"""
recipients = [self.get_user_for_section('Subaward').email]
pi_name = ''
most_recent_proposal = self.get_most_recent_proposal()
if most_recent_proposal:
pi_name = ' (PI: {0})'.format(most_recent_proposal.principal_investigator)
send_mail(
'OVPR ATP Update',
'Award for proposal #%s%s has been assigned to Award Setup in ATP. Go to %s%s to review it.' %
(self.id,
pi_name,
settings.EMAIL_URL_HOSTNAME,
self.get_absolute_url()),
'reply<EMAIL>',
recipients,
fail_silently=False)
def send_email_update(self, modification_flag=False):
"""Sends an email update to a user when they've been assigned an active section"""
if self.status == 1:
origional_text = 'Original Award'
workflow = 'AwardAcceptance'
acceptance_count = AwardAcceptance.objects.filter(award=self).count()
if acceptance_count < 2:
self.record_current_state_to_atptrail(origional_text, workflow)
else:
modification = "Modification #%s" % (acceptance_count - 1)
self.record_current_state_to_atptrail(modification, workflow)
if modification_flag:
recipients = [self.get_user_for_section('AwardSetup', modification_flag).email]
else:
if self.award_dual_negotiation and self.award_dual_setup:
recipients = [user.email for user in self.get_users_for_dual_active_sections()]
elif self.award_dual_modification:
recipients = [user.email for user in self.get_users_for_negotiation_and_moidification_sections()]
else:
recipients = [user.email for user in self.get_users_for_active_sections()]
pi_name = ''
most_recent_proposal = self.get_most_recent_proposal()
if most_recent_proposal:
pi_name = ' (PI: {0})'.format(most_recent_proposal.principal_investigator)
send_mail(
'OVPR ATP Update',
'%s%s has been assigned to you in ATP. Go to %s%s to review it.' %
(self,
pi_name,
settings.EMAIL_URL_HOSTNAME,
self.get_absolute_url()),
'<EMAIL>',
recipients,
fail_silently=False)
def send_award_setup_notification(self):
"""Sends an email to the AwardAcceptance user to let them know the award is in Award Setup"""
recipients = [self.get_user_for_section('AwardAcceptance').email]
send_mail(
'OVPR ATP Update',
'%s has been sent to the Award Setup step. This email is simply a notification \
- you are not assigned to perform Award Setup for this award. \
You can view it here: %s%s' %
(self,
settings.EMAIL_URL_HOSTNAME,
self.get_absolute_url()),
'<EMAIL>',
recipients,
fail_silently=False)
def send_fcoi_cleared_notification(self, fcoi_cleared_date):
"""Sends an email to the AwardSetup user when the Award's fcoi_cleared_date is set"""
recipients = [self.get_user_for_section('AwardSetup').email]
send_mail('OVPR ATP Update',
'The FCOI cleared date has been entered on %s - it is %s. \
You can view it here: %s%s' % (self, fcoi_cleared_date, settings.EMAIL_URL_HOSTNAME, self.get_absolute_url()),
'<EMAIL>',
recipients, fail_silently=False)
def send_phs_funded_notification(self):
"""Sends an email to the PHS_FUNDED_RECIPIENTS when the Award has been marked as PHS funded"""
recipients = settings.PHS_FUNDED_RECIPIENTS
send_mail('OVPR ATP Update',
'PHS funded for %s has been received and requires FCOI verification. \
Please go to %s%s to review it.' % (self, settings.EMAIL_URL_HOSTNAME, self.get_absolute_url()),
'<EMAIL>',
recipients, fail_silently=False)
def send_phs_funded_notification_with_modification(self):
"""Sends an email to the PHS_FUNDED_RECIPIENTS when and Award Modification is created
and it's marked as PHS funded
"""
recipients = settings.PHS_FUNDED_RECIPIENTS
send_mail('OVPR ATP Update',
'PHS funded for %s (Modification) has been received and may require FCOI verification. \
Please go to %s%s to review it.' % (self, settings.EMAIL_URL_HOSTNAME, self.get_absolute_url()),
'<EMAIL>',
recipients, fail_silently=False)
def set_date_assigned_for_active_sections(self):
"""Sets the date_assigned, if appliccable, for the currently active section(s)"""
for section in self.get_active_sections():
if section in self.SECTION_FIELD_MAPPING:
current_mod = Q()
if section in ['AwardNegotiation', 'AwardAcceptance']:
current_mod = Q(current_modification=True)
for instance in eval(section).objects.filter(current_mod, award=self):
try:
instance.set_date_assigned()
except AttributeError:
pass
def record_wait_for_reason(self, workflow_old, workflow_new, model_name):
WAIT_FOR = {'RB': 'Revised Budget', 'PA': 'PI Access', 'CA': 'Cost Share Approval', 'FC': 'FCOI',
'PS': 'Proposal Submission', 'SC': 'Sponsor Clarity', 'NO': 'New Org needed',
'IC': 'Internal Clarification', 'DC': 'Documents not in GW Docs'
}
count_value = AwardAcceptance.objects.filter(award=self).count()
if count_value < 2:
origional_text = 'Original Award'
else:
origional_text = "Modification #%s" % (count_value - 1)
user_name = self.get_user_full_name(model_name)
if workflow_new:
try:
trail_object = ATPAuditTrail.objects.get(award=self.id, modification=origional_text,
workflow_step=WAIT_FOR[workflow_new], assigned_user=user_name)
except:
trail_object = None
if trail_object:
trail_object.date_completed = datetime.now()
else:
trail_object = ATPAuditTrail(award=self.id, modification=origional_text, workflow_step=WAIT_FOR[workflow_new],
date_created=datetime.now(), assigned_user=user_name)
trail_object.save()
if workflow_old:
try:
trail_object = ATPAuditTrail.objects.get(award=self.id, modification=origional_text,
workflow_step=WAIT_FOR[workflow_old], assigned_user=user_name)
except:
trail_object = None
if trail_object:
trail_object.date_completed = datetime.now()
trail_object.save()
elif 'Modification' in origional_text:
pass
else:
trail_object = ATPAuditTrail(award=self.id, modification=origional_text, workflow_step=WAIT_FOR[workflow_old],
date_created=datetime.now(), assigned_user=user_name)
trail_object.save()
def record_current_state_to_atptrail(self, modification, workflow):
user_name = self.get_user_full_name(workflow)
try:
trail_object = ATPAuditTrail.objects.get(award=self.id, modification=modification, workflow_step=workflow,
assigned_user=user_name)
except:
trail_object = None
if trail_object:
trail_object.date_completed = datetime.now()
else:
trail_object = ATPAuditTrail(award=self.id, modification=modification, workflow_step=workflow,
date_created=datetime.now(), assigned_user=user_name)
trail_object.save()
def get_user_full_name(self, section):
user = self.get_user_for_section(section)
if user:
return user.first_name + ' ' + user.last_name
else:
return None
def update_completion_date_in_atp_award(self):
origional_text = 'Original Award'
acceptance_workflow = 'AwardAcceptance'
negotiation_workflow = 'AwardNegotiation'
setup_workflow = 'AwardSetup'
modification_workflow = 'AwardModification'
subaward_workflow = 'Subaward'
management_workflow = 'AwardManagement'
closeout_workflow = 'AwardCloseout'
count_value = AwardAcceptance.objects.filter(award=self).count()
modification = "Modification #%s" % (count_value - 1)
if all([self.status == 2, self.award_dual_modification]):
acceptance_object = self.get_current_award_acceptance()
acceptance_object.acceptance_completion_date = timezone.localtime(timezone.now())
acceptance_object.save()
if count_value < 2:
self.record_current_state_to_atptrail(origional_text, acceptance_workflow)
self.record_current_state_to_atptrail(origional_text, negotiation_workflow)
else:
self.record_current_state_to_atptrail(modification, acceptance_workflow)
self.record_current_state_to_atptrail(modification, negotiation_workflow)
self.record_current_state_to_atptrail(modification, modification_workflow)
elif all([self.status == 2, self.award_dual_setup, self.award_dual_negotiation]):
acceptance_object = self.get_current_award_acceptance()
acceptance_object.acceptance_completion_date = timezone.localtime(timezone.now())
acceptance_object.save()
if count_value < 2:
self.record_current_state_to_atptrail(origional_text, acceptance_workflow)
self.record_current_state_to_atptrail(origional_text, negotiation_workflow)
self.record_current_state_to_atptrail(origional_text, setup_workflow)
else:
self.record_current_state_to_atptrail(modification, acceptance_workflow)
self.record_current_state_to_atptrail(modification, negotiation_workflow)
self.record_current_state_to_atptrail(modification, setup_workflow)
elif self.status == 2:
acceptance_object = self.get_current_award_acceptance()
acceptance_object.acceptance_completion_date = timezone.localtime(timezone.now())
acceptance_object.save()
if count_value < 2:
self.record_current_state_to_atptrail(origional_text, acceptance_workflow)
self.record_current_state_to_atptrail(origional_text, negotiation_workflow)
else:
self.record_current_state_to_atptrail(modification, acceptance_workflow)
self.record_current_state_to_atptrail(modification, negotiation_workflow)
elif self.status == 3:
negotiation_user = self.get_user_for_section(negotiation_workflow)
if negotiation_user:
negotiation_object = self.get_current_award_negotiation()
negotiation_object.negotiation_completion_date = timezone.localtime(timezone.now())
negotiation_object.save()
if count_value < 2:
self.record_current_state_to_atptrail(origional_text, negotiation_workflow)
else:
self.record_current_state_to_atptrail(modification, negotiation_workflow)
else:
acceptance_object = self.get_current_award_acceptance()
acceptance_object.acceptance_completion_date = timezone.localtime(timezone.now())
acceptance_object.save()
if count_value < 2:
self.record_current_state_to_atptrail(origional_text, acceptance_workflow)
else:
self.record_current_state_to_atptrail(modification, acceptance_workflow)
if all([not self.award_dual_modification, not self.send_to_modification, not self.award_dual_setup]):
if count_value < 2:
self.record_current_state_to_atptrail(origional_text, setup_workflow)
else:
self.record_current_state_to_atptrail(modification, setup_workflow)
elif self.send_to_modification and not self.send_to_setup:
self.record_current_state_to_atptrail(modification, modification_workflow)
elif self.status == 4:
if all([not self.award_dual_modification, not self.send_to_modification, not self.award_dual_setup]):
setup_object = AwardSetup.objects.get(award=self)
if setup_object.setup_completion_date and count_value == 1:
pass
else:
setup_object.setup_completion_date = timezone.localtime(timezone.now())
setup_object.save()
if count_value < 2:
self.record_current_state_to_atptrail(origional_text, setup_workflow)
else:
self.record_current_state_to_atptrail(modification, setup_workflow)
elif all([not self.send_to_modification, self.award_dual_setup, self.award_dual_negotiation]):
pass
elif all([self.award_dual_modification, self.common_modification]):
pass
elif self.award_dual_modification or self.send_to_modification:
modification_object = AwardModification.objects.all().filter(award=self, is_edited=True).order_by('-id')
if modification_object:
modification_obj = modification_object[0]
modification_obj.modification_completion_date = timezone.localtime(timezone.now())
modification_obj.save()
self.record_current_state_to_atptrail(modification, modification_workflow)
if self.subaward_user:
if count_value < 2:
self.record_current_state_to_atptrail(origional_text, subaward_workflow)
else:
self.record_current_state_to_atptrail(modification, subaward_workflow)
if count_value < 2:
self.record_current_state_to_atptrail(origional_text, management_workflow)
else:
self.record_current_state_to_atptrail(modification, management_workflow)
elif self.status == 5:
if count_value < 2:
self.record_current_state_to_atptrail(origional_text, closeout_workflow)
else:
self.record_current_state_to_atptrail(modification, closeout_workflow)
elif self.status == 6:
closeout = AwardCloseout.objects.get(award=self)
closeout.closeout_completion_date = timezone.localtime(timezone.now())
closeout.save()
if count_value < 2:
self.record_current_state_to_atptrail(origional_text, closeout_workflow)
else:
self.record_current_state_to_atptrail(modification, closeout_workflow)
def move_to_next_step(self, section=None):
"""Moves this Award to the next step in the process"""
# A while loop because we want to advance the status until we find the next
# section with an assigned user
while True:
# We have to do extra work to make sure both Subawards and Award Management
# are complete before we move to the next status
if section in ['Subaward', 'AwardManagement']:
origional_text = 'Original Award'
subaward_workflow = 'Subaward'
management_workflow = 'AwardManagement'
count_value = AwardAcceptance.objects.filter(award=self).count()
modification = "Modification #%s" % (count_value - 1)
if section == 'Subaward' or self.get_user_for_section(
'Subaward') is None:
self.subaward_done = True
if self.subaward_user:
if count_value < 2:
self.record_current_state_to_atptrail(origional_text, subaward_workflow)
else:
self.record_current_state_to_atptrail(modification, subaward_workflow)
try:
correct_instance = Subaward.objects.filter(award=self).latest('creation_date')
if correct_instance:
correct_instance.subaward_completion_date = timezone.localtime(timezone.now())
correct_instance.save()
except:
pass
if section == 'AwardManagement' or self.get_user_for_section(
'AwardManagement') is None:
self.award_management_done = True
if count_value < 2:
self.record_current_state_to_atptrail(origional_text, management_workflow)
else:
self.record_current_state_to_atptrail(modification, management_workflow)
management_object = AwardManagement.objects.get(award=self)
management_object.management_completion_date = timezone.localtime(timezone.now())
management_object.save()
if not (self.subaward_done and self.award_management_done):
self.save()
return False
if self.status == 2 and self.award_dual_negotiation:
self.award_dual_negotiation = False
self.save()
if self.status == 3 and self.award_dual_setup:
self.award_dual_setup = False
self.save()
if self.status == 4 and self.award_dual_modification:
self.award_dual_modification = False
self.save()
if self.status == 2 and self.send_to_modification:
modification_object = AwardModification.objects.all().filter(award=self, is_edited=False).order_by('-id')
if modification_object:
section_object = modification_object[0]
section_object.date_assigned = timezone.localtime(timezone.now())
section_object.save()
self.status += 1
if self.status == self.END_STATUS:
self.save()
break
elif not all(user is None for user in self.get_users_for_active_sections()):
self.set_date_assigned_for_active_sections()
self.save()
break
if self.status not in (self.START_STATUS, self.END_STATUS) and not self.award_dual_setup:
self.send_email_update()
# Send an additional notification when we reach Award Setup
if self.status == 3:
self.awardsetup.copy_from_proposal(self.get_most_recent_proposal())
self.send_award_setup_notification()
if all([self.status == 3, self.subaward_user, not self.send_to_modification, not self.award_dual_setup]):
self.send_email_update_if_subaward_user()
self.update_completion_date_in_atp_award()
return True
def move_award_to_multiple_steps(self, dual_mode):
""" Move award to multiple steps so that multiple teams can work parallel """
if self.award_negotiation_user:
self.status += 1
else:
if self.status == 1:
self.status += 2
try:
setup_obj = AwardSetup.objects.get(award=self)
except AwardSetup.DoesNotExist:
setup_obj = None
if setup_obj:
setup_obj.date_assigned = timezone.localtime(timezone.now())
setup_obj.save()
if dual_mode:
try:
setup_object = AwardSetup.objects.get(award=self)
except AwardSetup.DoesNotExist:
setup_object = None
try:
negotiation_object = AwardNegotiation.objects.get(award=self, current_modification=True)
except AwardNegotiation.DoesNotExist:
negotiation_object = None
if negotiation_object:
negotiation_object.date_assigned = timezone.localtime(timezone.now())
negotiation_object.save()
if setup_object:
setup_object.date_assigned = timezone.localtime(timezone.now())
setup_object.save()
self.award_dual_negotiation = True
self.award_dual_setup = True
self.save()
if self.status not in (self.START_STATUS, self.END_STATUS):
self.send_email_update()
if all([self.status == 2, self.subaward_user, self.award_dual_setup]):
self.send_email_update_if_subaward_user()
self.update_completion_date_in_atp_award()
return True
def move_award_to_negotiation_and_modification(self, dual_modification):
""" Move award to award negotiation and modification steps so that these two teams can work parallel """
if self.award_negotiation_user:
self.status += 1
try:
negotiation_object = AwardNegotiation.objects.get(award=self, current_modification=True)
except AwardNegotiation.DoesNotExists:
negotiation_object = None
if negotiation_object:
if not negotiation_object.date_assigned:
negotiation_object.date_assigned = timezone.localtime(timezone.now())
negotiation_object.save()
else:
if self.status == 1:
self.status += 2
try:
setup_obj = AwardSetup.objects.get(award=self)
except AwardSetup.DoesNotExist:
setup_obj = None
if setup_obj:
setup_obj.date_assigned = timezone.localtime(timezone.now())
setup_obj.save()
modification_object = AwardModification.objects.all().filter(award=self).order_by('-id')
if modification_object:
section_object = modification_object[0]
section_object.date_assigned = timezone.localtime(timezone.now())
section_object.save()
if dual_modification:
self.common_modification = True
self.award_dual_modification = True
self.save()
if self.status not in (self.START_STATUS, self.END_STATUS):
self.send_email_update()
self.update_completion_date_in_atp_award()
return True
def move_setup_or_modification_step(self, modification_flag=False, setup_flag=False):
if self.award_negotiation_user:
self.status += 1
try:
negotiation_object = AwardNegotiation.objects.get(award=self, current_modification=True)
except AwardNegotiation.DoesNotExists:
negotiation_object = None
if negotiation_object:
if not negotiation_object.date_assigned:
negotiation_object.date_assigned = timezone.localtime(timezone.now())
negotiation_object.save()
else:
if self.status == 1:
self.status += 2
try:
setup_obj = AwardSetup.objects.get(award=self)
except AwardSetup.DoesNotExist:
setup_obj = None
if setup_obj:
setup_obj.date_assigned = timezone.localtime(timezone.now())
setup_obj.save()
if modification_flag:
self.send_to_modification = True
self.save()
if setup_flag:
self.send_email_update()
if self.status == self.AWARD_SETUP_STATUS and modification_flag:
self.send_email_update()
# Send an additional notification when we reach Award Setup
if self.status == 3:
self.awardsetup.copy_from_proposal(self.get_most_recent_proposal())
if modification_flag:
try:
modification = AwardModification.objects.get(award_id=self.id, is_edited=False)
except AwardModification.DoesNotExist:
modification = None
if modification:
modification.is_edited = True,
modification.save()
award_setup_object = AwardSetup.objects.filter(award=self).values()
for setup in award_setup_object:
del(setup['id'], setup['is_edited'], setup['setup_completion_date'], setup['wait_for_reson'])
award_modification_object = AwardModification.objects.create(**setup)
self.send_to_modification = True
award_modification_object.save()
self.save()
self.update_completion_date_in_atp_award()
return True
# Django admin helper methods
def get_section_admin_link(self, section):
"""Gets the link to the Django Admin site for the given section"""
return format_html(
'<a href="{0}">{1}</a>',
reverse(
'admin:awards_%s_change' %
section.__class__.__name__.lower(),
args=(
section.id,
)),
section)
def get_foreignkey_admin_link(self, section_class):
"""Gets the link to the Django Admin site for the given section that has a
foreign key to this Award
"""
section_objects = section_class.objects.filter(award=self)
if len(section_objects) == 0:
return '(None)'
elif len(section_objects) == 1:
return self.get_section_admin_link(section_objects[0])
else:
return format_html(
'<a href="{0}?award__id__exact={1}">{2}s</a>',
reverse(
'admin:awards_%s_changelist' %
section_class.__name__.lower()),
self.id,
section_class._meta.verbose_name.capitalize())
# The following methods are referenced in the list_display section of the AwardAdmin class.
# They return the Django Admin links to their respective sections
def proposalintake_admin(self):
return self.get_section_admin_link(self.proposalintake)
def proposal_admin(self):
return format_html('<a href="{0}?award__id__exact={1}">{2}</a>',
reverse('admin:awards_proposal_changelist'),
self.id,
'Proposals')
def awardacceptance_admin(self):
return self.get_foreignkey_admin_link(AwardAcceptance)
def awardnegotiation_admin(self):
return self.get_foreignkey_admin_link(AwardNegotiation)
def awardsetup_admin(self):
return self.get_section_admin_link(self.awardsetup)
def subaward_admin(self):
return self.get_foreignkey_admin_link(Subaward)
def awardmanagement_admin(self):
return self.get_section_admin_link(self.awardmanagement)
def awardcloseout_admin(self):
return self.get_section_admin_link(self.awardcloseout)
class AwardSection(FieldIteratorMixin, models.Model):
"""Abstract base class for all award sections"""
HIDDEN_FIELDS = ['award', 'comments', 'is_edited']
HIDDEN_SEARCH_FIELDS = []
FIELDSETS = []
comments = models.TextField(blank=True, verbose_name='Comments')
is_edited = models.BooleanField(default=False)
class Meta:
abstract = True
def get_class_name(self):
"""Gets the Python class name"""
return self.__class__.__name__
def get_verbose_class_name(self):
return self._meta.verbose_name
def get_most_recent_revision(self):
latest_revision = reversion.get_for_object(self)
if latest_revision:
latest_revision = latest_revision[0].revision
user = latest_revision.user.get_full_name()
else:
user = 'ATP'
if latest_revision:
return (user, latest_revision.date_created)
else:
return (user, None)
class AssignableAwardSection(AwardSection):
"""Base model class for an Award section that can be assigned to a user"""
date_assigned = models.DateTimeField(blank=True, null=True, verbose_name='Date Assigned')
class Meta:
abstract = True
def set_date_assigned(self):
self.date_assigned = datetime.now()
self.save()
class ProposalIntake(AwardSection):
"""Model for the ProposalIntake data"""
user_list = User.objects.filter(is_active=True).order_by('first_name')
users = [(user.first_name + ' ' + user.last_name, user.first_name + ' ' + user.last_name) for user in user_list]
PROPOSAL_STATUS_CHOICES = (
('NS', 'Cancelled - not submitted'),
('PE', 'Planned'),
('RO', 'Routing'),
('SB', 'Submitted'),
)
PROPOSAL_OUTCOME_CHOICES = (
('AW', 'Awarded'),
('UN', 'Unfunded'),
)
SPA1_CHOICES = (
('', ''),
)
SPA1_CHOICES = tuple(users) if users else SPA1_CHOICES
HIDDEN_SEARCH_FIELDS = AwardSection.HIDDEN_SEARCH_FIELDS + [
'principal_investigator',
'agency',
'prime_sponsor',
'program_announcement',
'announcement_link',
'proposal_due_to_sponsor',
'proposal_due_to_ovpr',
'proposal_due_to_aor',
'school',
'phs_funded',
'fcoi_submitted',
'date_received',
'proposal_status',
'proposal_outcome',
'proposal_number',
'five_days_requested',
'five_days_granted',
'jit_request',
'jit_response_submitted',
'creation_date']
minimum_fields = (
)
award = models.OneToOneField(Award, null=True, blank=True)
creation_date = models.DateTimeField(auto_now_add=True, blank=True, null=True, verbose_name='Date Created')
principal_investigator = models.ForeignKey(
AwardManager,
blank=True,
null=True,
limit_choices_to={
'active': True},
verbose_name='Principal Investigator')
agency = models.CharField(max_length=255, blank=True)
prime_sponsor = models.CharField(
max_length=255,
blank=True,
verbose_name='Prime (if GW is subawardee)')
program_announcement = models.CharField(
max_length=50,
blank=True,
verbose_name='Program announcement number')
announcement_link = models.CharField(max_length=250, blank=True)
proposal_due_to_sponsor = models.DateField(null=True, blank=True)
proposal_due_to_ovpr = models.DateField(
null=True,
blank=True,
verbose_name='Proposal due to OVPR')
proposal_due_to_aor = models.DateField(
null=True,
blank=True,
verbose_name='Proposal due to AOR')
spa1 = models.CharField(blank=False, verbose_name='SPA I*', max_length=150, choices=SPA1_CHOICES, null=True)
school = models.CharField(max_length=150, blank=True)
department = models.ForeignKey(
AwardOrganization,
null=True,
blank=True,
limit_choices_to={
'active': True},
verbose_name='Department')
phs_funded = models.NullBooleanField(verbose_name='PHS funded?')
fcoi_submitted = models.NullBooleanField(
verbose_name='FCOI disclosure submitted for each investigator?')
date_received = models.DateField(
null=True,
blank=True,
verbose_name='Date received by SPA I')
proposal_status = models.CharField(
choices=PROPOSAL_STATUS_CHOICES,
max_length=2,
blank=True)
proposal_outcome = models.CharField(
choices=PROPOSAL_OUTCOME_CHOICES,
max_length=2,
blank=True)
proposal_number = models.CharField(max_length=15, blank=True, verbose_name="Cayuse Proposal Number")
five_days_requested = models.DateField(
null=True,
blank=True,
verbose_name='Date 5 days waiver requested')
five_days_granted = models.DateField(
null=True,
blank=True,
verbose_name='Date 5 days waiver granted')
jit_request = models.NullBooleanField(verbose_name='JIT request?')
jit_response_submitted = models.DateField(
null=True,
blank=True,
verbose_name='JIT response submitted?')
five_days_waiver_request = models.NullBooleanField(
null=True,
blank=True,
verbose_name="5 day waiver granted?")
def __unicode__(self):
return u'Proposal Intake %s' % (self.id)
def get_absolute_url(self):
"""Gets the URL used to navigate to this object"""
if self.award:
return reverse(
'edit_proposal_intake',
kwargs={
'award_pk': self.award.pk})
else:
return reverse(
'edit_standalone_proposal_intake',
kwargs={
'proposalintake_pk': self.id})
def get_proposal_status(self):
"""Gets the human-readable value of the Proposal's status"""
return get_value_from_choices(self.PROPOSAL_STATUS_CHOICES, self.proposal_status)
def get_proposal_outcome(self):
return get_value_from_choices(self.PROPOSAL_OUTCOME_CHOICES, self.proposal_outcome)
class Proposal(AwardSection):
"""Model for the Proposal data"""
# HIDDEN_FIELDS aren't rendered by FieldIteratorMixin
HIDDEN_FIELDS = AwardSection.HIDDEN_FIELDS + [
'dummy',
'is_first_proposal',
'lotus_id',
'lotus_agency_name',
'lotus_department_code',
'employee_id',
'proposal_id']
HIDDEN_SEARCH_FIELDS = AwardSection.HIDDEN_SEARCH_FIELDS + [
'creation_date',
'sponsor_deadline',
'is_subcontract',
'federal_identifier',
'is_change_in_grantee_inst',
'responsible_entity',
'departmental_id_primary',
'departmental_id_secondary',
'departmental_name_primary',
'departmental_name_secondary',
'are_vertebrate_animals_used',
'is_iacuc_review_pending',
'iacuc_protocol_number',
'iacuc_approval_date',
'are_human_subjects_used',
'is_irb_review_pending',
'irb_protocol_number',
'irb_review_date',
'budget_first_per_start_date',
'budget_first_per_end_date',
'cost_shr_mand_is_committed',
'cost_shr_mand_source',
'cost_shr_vol_is_committed',
'cost_shr_vol_source',
'tracking_number',
'total_costs_y1',
'total_costs_y2',
'total_costs_y3',
'total_costs_y4',
'total_costs_y5',
'total_costs_y6',
'total_costs_y7',
'total_costs_y8',
'total_costs_y9',
'total_costs_y10',
'total_direct_costs_y1',
'total_direct_costs_y2',
'total_direct_costs_y3',
'total_direct_costs_y4',
'total_direct_costs_y5',
'total_direct_costs_y6',
'total_direct_costs_y7',
'total_direct_costs_y8',
'total_direct_costs_y9',
'total_direct_costs_y10',
'total_indirect_costs_y1',
'total_indirect_costs_y2',
'total_indirect_costs_y3',
'total_indirect_costs_y4',
'total_indirect_costs_y5',
'total_indirect_costs_y6',
'total_indirect_costs_y7',
'total_indirect_costs_y8',
'total_indirect_costs_y9',
'total_indirect_costs_y10']
# Fieldsets are grouped together at the top of the section under the title
FIELDSETS = [{'title': 'Proposal Summary',
'fields': ('creation_date',
'proposal_number',
'proposal_title',
'proposal_type',
'principal_investigator',
'project_title',
'department_name',
'division_name',
'agency_name',
'is_subcontract',
'who_is_prime',
'tracking_number',
'project_start_date',
'project_end_date',
'submission_date',
'sponsor_deadline'
)},
{'title': 'Project Data',
'fields': ('agency_type',
'application_type_code',
'federal_identifier',
'is_change_in_grantee_inst',
'project_type'
)},
{'title': 'Project Administration',
'fields': ('responsible_entity',
'departmental_id_primary',
'departmental_id_secondary',
'departmental_name_primary',
'departmental_name_secondary'
)},
{'title': 'Compliance: Animal Subjects',
'fields': ('are_vertebrate_animals_used',
'is_iacuc_review_pending',
'iacuc_protocol_number',
'iacuc_approval_date'
)},
{'title': 'Compliance: Human Subjects',
'fields': ('are_human_subjects_used',
'is_irb_review_pending',
'irb_protocol_number',
'irb_review_date'
)},
{'title': 'Compliance: Lab Safety',
'fields': ('is_haz_mat',
)},
{'title': 'Compliance: Export Controls',
'fields': ('will_involve_foreign_nationals',
'will_involve_shipment',
'will_involve_foreign_contract'
)},
{'title': 'Budget Data',
'fields': ('budget_first_per_start_date',
'budget_first_per_end_date',
'cost_shr_mand_is_committed',
'cost_shr_mand_amount',
'cost_shr_mand_source',
'cost_shr_vol_is_committed',
'cost_shr_vol_amount',
'cost_shr_vol_source'
)}
]
# Display tables are displayed at the end of a section in an HTML table
DISPLAY_TABLES = [
{
'title': 'Budgeted Costs', 'columns': (
'Direct Costs', 'Indirect Costs', 'Total Costs'), 'rows': [
{
'label': 'Total', 'fields': (
'total_direct_costs', 'total_indirect_costs', 'total_costs')}, {
'label': 'Y1', 'fields': (
'total_direct_costs_y1', 'total_indirect_costs_y1', 'total_costs_y1')}, {
'label': 'Y2', 'fields': (
'total_direct_costs_y2', 'total_indirect_costs_y2', 'total_costs_y2')}, {
'label': 'Y3', 'fields': (
'total_direct_costs_y3', 'total_indirect_costs_y3', 'total_costs_y3')}, {
'label': 'Y4', 'fields': (
'total_direct_costs_y4', 'total_indirect_costs_y4', 'total_costs_y4')}, {
'label': 'Y5', 'fields': (
'total_direct_costs_y5', 'total_indirect_costs_y5', 'total_costs_y5')}, {
'label': 'Y6', 'fields': (
'total_direct_costs_y6', 'total_indirect_costs_y6', 'total_costs_y6')}, {
'label': 'Y7', 'fields': (
'total_direct_costs_y7', 'total_indirect_costs_y7', 'total_costs_y7')}, {
'label': 'Y8', 'fields': (
'total_direct_costs_y8', 'total_indirect_costs_y8', 'total_costs_y8')}, {
'label': 'Y9', 'fields': (
'total_direct_costs_y9', 'total_indirect_costs_y9', 'total_costs_y9')}, {
'label': 'Y10', 'fields': (
'total_direct_costs_y10', 'total_indirect_costs_y10', 'total_costs_y10')}, ]
}
]
# Entries here appear on the EAS Award Setup report screen
EAS_REPORT_FIELDS = [
'proposal_id',
'project_title',
'department_name',
'is_subcontract',
'who_is_prime',
'agency_name',
]
# A small mapping to help figure out which field data to use when conforming
# Lotus Notes legacy data to EAS data when importing a proposal from Lotus
LOTUS_FK_LOOKUPS = {
'lotus_agency_name': 'agency_name',
'lotus_department_code': 'department_name',
'employee_id': 'principal_investigator'
}
award = models.ForeignKey(
Award,
null=True,
blank=True,
on_delete=models.SET_NULL)
dummy = models.BooleanField(default=False)
is_first_proposal = models.BooleanField(default=False)
creation_date = models.DateTimeField(auto_now_add=True, blank=True, null=True, verbose_name='Date Created')
lotus_id = models.CharField(max_length=20, blank=True)
employee_id = models.CharField(
max_length=40,
blank=True,
verbose_name='Employee ID')
proposal_id = models.BigIntegerField(
unique=True,
null=True,
blank=True,
verbose_name='Proposal ID')
proposal_number = models.CharField(
max_length=50,
null=True,
blank=True,
verbose_name='Proposal Number')
proposal_title = models.CharField(
max_length=256,
blank=True,
verbose_name='Internal Proposal Title')
proposal_type = models.CharField(max_length=256, blank=True)
principal_investigator = models.ForeignKey(
AwardManager,
null=True,
blank=True,
limit_choices_to={
'active': True},
verbose_name='Principal Investigator')
project_title = models.CharField(max_length=255, blank=True)
lotus_department_code = models.CharField(max_length=128, blank=True)
department_name = models.ForeignKey(
AwardOrganization,
null=True,
blank=True,
limit_choices_to={
'active': True},
verbose_name='Department Code & Name')
division_name = models.CharField(max_length=150, blank=True)
agency_name = models.ForeignKey(
FundingSource,
null=True,
blank=True,
limit_choices_to={
'active': True})
is_subcontract = models.CharField(
max_length=10,
blank=True,
verbose_name='Is this a subcontract?')
who_is_prime = models.ForeignKey(
PrimeSponsor,
null=True,
blank=True,
limit_choices_to={
'active': True},
verbose_name='Prime Sponsor')
tracking_number = models.CharField(
max_length=15,
blank=True,
verbose_name='Grants.gov tracking number')
project_start_date = models.DateField(null=True, blank=True)
project_end_date = models.DateField(null=True, blank=True)
submission_date = models.DateField(null=True, blank=True)
sponsor_deadline = models.DateField(null=True, blank=True)
lotus_agency_name = models.CharField(max_length=250, blank=True)
project_title = models.CharField(max_length=256, blank=True)
agency_type = models.CharField(max_length=256, blank=True)
application_type_code = models.CharField(
max_length=25,
blank=True,
verbose_name='Kind of application')
federal_identifier = models.CharField(max_length=256, blank=True, verbose_name='Previous Grant #')
is_change_in_grantee_inst = models.CharField(
max_length=10,
blank=True,
verbose_name='Change in grantee institution?')
project_type = models.CharField(max_length=256, blank=True)
responsible_entity = models.CharField(max_length=256, blank=True)
departmental_id_primary = models.CharField(
max_length=256,
blank=True,
verbose_name='Departmental ID primary')
departmental_id_secondary = models.CharField(
max_length=256,
blank=True,
verbose_name='Departmental ID secondary')
departmental_name_primary = models.CharField(max_length=256, blank=True)
departmental_name_secondary = models.CharField(max_length=256, blank=True)
are_vertebrate_animals_used = models.CharField(
max_length=10,
blank=True,
verbose_name='Are vertebrate animals used?')
is_iacuc_review_pending = models.CharField(
max_length=10,
blank=True,
verbose_name='Is IACUC review pending?')
iacuc_protocol_number = models.CharField(
max_length=256,
blank=True,
verbose_name='IACUC protocol number')
iacuc_approval_date = models.DateField(
null=True,
blank=True,
verbose_name='IACUC approval date')
are_human_subjects_used = models.CharField(
max_length=10,
blank=True,
verbose_name='Are human subjects used?')
is_irb_review_pending = models.CharField(
max_length=10,
blank=True,
verbose_name='Is IRB review pending?')
irb_protocol_number = models.CharField(
max_length=256,
blank=True,
verbose_name='IRB protocol number')
irb_review_date = models.DateField(
null=True,
blank=True,
verbose_name='IRB review date')
is_haz_mat = models.CharField(max_length=10, blank=True, verbose_name='Uses hazardous materials')
budget_first_per_start_date = models.DateField(
null=True,
blank=True,
verbose_name='Budget first period start date')
budget_first_per_end_date = models.DateField(
null=True,
blank=True,
verbose_name='Budget first period end date')
cost_shr_mand_is_committed = models.CharField(max_length=10, blank=True)
cost_shr_mand_amount = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
cost_shr_mand_source = models.CharField(max_length=256, blank=True)
cost_shr_vol_is_committed = models.CharField(max_length=10, blank=True)
cost_shr_vol_amount = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
cost_shr_vol_source = models.CharField(max_length=256, blank=True)
will_involve_foreign_nationals = models.CharField(
max_length=10,
blank=True)
will_involve_shipment = models.CharField(max_length=10, blank=True)
will_involve_foreign_contract = models.CharField(max_length=10, blank=True)
total_costs = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_costs_y1 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_costs_y2 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_costs_y3 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_costs_y4 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_costs_y5 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_costs_y6 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_costs_y7 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_costs_y8 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_costs_y9 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_costs_y10 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_direct_costs = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_direct_costs_y1 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_direct_costs_y2 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_direct_costs_y3 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_direct_costs_y4 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_direct_costs_y5 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_direct_costs_y6 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_direct_costs_y7 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_direct_costs_y8 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_direct_costs_y9 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_direct_costs_y10 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_indirect_costs = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_indirect_costs_y1 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_indirect_costs_y2 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_indirect_costs_y3 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_indirect_costs_y4 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_indirect_costs_y5 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_indirect_costs_y6 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_indirect_costs_y7 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_indirect_costs_y8 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_indirect_costs_y9 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
total_indirect_costs_y10 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
def __unicode__(self):
return u'Proposal #%s' % (self.get_unique_identifier())
class Meta:
index_together = [
["award", "is_first_proposal"],
]
def get_absolute_url(self):
"""Gets the URL used to navigate to this object"""
return reverse(
'edit_proposal',
kwargs={
'award_pk': self.award.pk,
'proposal_pk': self.id})
def get_unique_identifier(self):
"""Gets a value that uniquely identifies this Proposal"""
return self.proposal_number
def save(self, *args, **kwargs):
"""Overrides the parent save method.
If this is a new Proposal, copy certain fields over to the AwardAcceptance object
"""
if not self.dummy and not self.pk:
try:
award_intake = self.award.get_current_award_acceptance()
award_intake.copy_from_proposal(self)
except:
pass
super(Proposal, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
"""Overrides the parent delete method.
If this Proposal came from Lotus, just remove the reference to the Award instead of
deleting from the database.
"""
if self.lotus_id:
self.award = None
self.save()
else:
super(Proposal, self).delete(*args, **kwargs)
def set_first_proposal(award, proposals):
"""Set the is_first_proposal flag on the appropriate proposal"""
proposals.update(is_first_proposal=False)
first_proposal = proposals.order_by('id').first()
first_proposal.is_first_proposal = True
first_proposal.save()
@receiver(post_delete, sender=Proposal)
@receiver(post_save, sender=Proposal)
def check_first_proposal(sender, instance, **kwargs):
"""Use Django signals to keep the is_first_proposal flag up to date"""
try:
award = instance.award
except Award.DoesNotExist:
award = None
if not award:
return
proposals = Proposal.objects.filter(award=award)
try:
dummy_proposal = Proposal.objects.get(award=award, dummy=True)
except Proposal.DoesNotExist:
dummy_proposal = None
if len(proposals) == 0:
Proposal.objects.create(award=award, dummy=True)
return
elif len(proposals) > 1 and dummy_proposal:
dummy_proposal.delete()
first_proposals = Proposal.objects.filter(
award=award,
is_first_proposal=True)
if len(first_proposals) != 1:
set_first_proposal(award, proposals)
class KeyPersonnel(FieldIteratorMixin, models.Model):
"""Model for the KeyPersonnel data"""
HIDDEN_FIELDS = ['proposal']
HIDDEN_TABLE_FIELDS = []
proposal = models.ForeignKey(Proposal)
employee_id = models.CharField(
max_length=40,
blank=True,
verbose_name='Emp ID')
last_name = models.CharField(max_length=64, blank=True)
first_name = models.CharField(max_length=64, blank=True)
middle_name = models.CharField(max_length=32, blank=True)
project_role = models.CharField(max_length=128, blank=True)
calendar_months = models.DecimalField(
decimal_places=3,
max_digits=5,
null=True,
blank=True,
verbose_name='Calendar mos.')
academic_months = models.DecimalField(
decimal_places=3,
max_digits=5,
null=True,
blank=True,
verbose_name='Academic mos.')
summer_months = models.DecimalField(
decimal_places=3,
max_digits=5,
null=True,
blank=True,
verbose_name='Summer mos.')
effort = models.CharField(max_length=10, blank=True)
def __unicode__(self):
return u'%s, %s %s on %s' % (
self.last_name, self.first_name, self.middle_name, self.proposal)
def get_absolute_url(self):
"""Gets the URL used to navigate to this object"""
return reverse(
'edit_key_personnel',
kwargs={
'award_pk': self.proposal.award.pk,
'proposal_pk': self.proposal.pk,
'key_personnel_pk': self.id})
def get_delete_url(self):
"""Gets the URL used to delete this object"""
return reverse(
'delete_key_personnel',
kwargs={
'award_pk': self.proposal.award.pk,
'proposal_pk': self.proposal.pk,
'key_personnel_pk': self.id})
class PerformanceSite(FieldIteratorMixin, models.Model):
"""Model for the PerformanceSite data"""
HIDDEN_FIELDS = ['proposal']
HIDDEN_TABLE_FIELDS = []
proposal = models.ForeignKey(Proposal)
ps_organization = models.CharField(
max_length=255,
blank=True,
verbose_name='Organization')
ps_duns = models.BigIntegerField(
null=True,
blank=True,
verbose_name='DUNS')
ps_street1 = models.CharField(
max_length=255,
blank=True,
verbose_name='Street 1')
ps_street2 = models.CharField(
max_length=255,
blank=True,
verbose_name='Street 2')
ps_city = models.CharField(max_length=255, blank=True, verbose_name='City')
ps_state = models.CharField(
max_length=100,
blank=True,
verbose_name='State')
ps_zipcode = models.CharField(
max_length=128,
blank=True,
verbose_name='Zip')
ps_country = models.CharField(
max_length=128,
blank=True,
verbose_name='Country')
def __unicode__(self):
return u'%s %s, %s' % (self.ps_street1, self.ps_city, self.ps_state)
def get_absolute_url(self):
"""Gets the URL used to navigate to this object"""
return reverse(
'edit_performance_site',
kwargs={
'award_pk': self.proposal.award.pk,
'proposal_pk': self.proposal.pk,
'performance_site_pk': self.id})
def get_delete_url(self):
"""Gets the URL used to delete this object"""
return reverse(
'delete_performance_site',
kwargs={
'award_pk': self.proposal.award.pk,
'proposal_pk': self.proposal.pk,
'performance_site_pk': self.id})
class AwardModificationMixin(object):
"""Mixin used for Award sections that can have modifications"""
def clean(self, *args, **kwargs):
"""Overrides the base clean method. Verifies there are no other current modifications."""
section = self.__class__
active_modifications = section.objects.filter(
award=self.award,
current_modification=True).exclude(
pk=self.id)
if self.current_modification and len(active_modifications) > 0:
raise ValidationError(
'Another %s is already the current modification for %s. \
Set "current modification" on all other %s objects and try again.' %
(section.__name__, self.award, section.__name__))
super(AwardModificationMixin, self).clean(*args, **kwargs)
class AwardAcceptance(AwardModificationMixin, AwardSection):
"""Model for the AwardAcceptance data"""
EAS_STATUS_CHOICES = (
('A', 'Active'),
('OH', 'On hold'),
('AR', 'At risk'),
('C', 'Closed')
)
PRIORITY_STATUS_CHOICES = (
('on', 1),
('tw', 2),
('th', 3),
('fo', 4),
('fi', 5),
('ni', 9)
)
PRIORITY_STATUS_DICT = {'on': 1,
'tw': 2,
'th': 3,
'fo': 4,
'fi': 5,
'ni': 9
}
HIDDEN_FIELDS = AwardSection.HIDDEN_FIELDS + ['current_modification', 'award_text']
HIDDEN_SEARCH_FIELDS = AwardSection.HIDDEN_SEARCH_FIELDS + [
'fcoi_cleared_date',
'project_title',
'full_f_a_recovery',
'explanation',
'mfa_investigators',
'award_total_costs_y1',
'award_total_costs_y2',
'award_total_costs_y3',
'award_total_costs_y4',
'award_total_costs_y5',
'award_total_costs_y6',
'award_total_costs_y7',
'award_total_costs_y8',
'award_total_costs_y9',
'award_total_costs_y10',
'award_direct_costs_y1',
'award_direct_costs_y2',
'award_direct_costs_y3',
'award_direct_costs_y4',
'award_direct_costs_y5',
'award_direct_costs_y6',
'award_direct_costs_y7',
'award_direct_costs_y8',
'award_direct_costs_y9',
'award_direct_costs_y10',
'award_indirect_costs_y1',
'award_indirect_costs_y2',
'award_indirect_costs_y3',
'award_indirect_costs_y4',
'award_indirect_costs_y5',
'award_indirect_costs_y6',
'award_indirect_costs_y7',
'award_indirect_costs_y8',
'award_indirect_costs_y9',
'award_indirect_costs_y10',
'contracting_official',
'gmo_co_email',
'gmo_co_phone_number',
'creation_date']
DISPLAY_TABLES = [
{
'title': 'Costs', 'columns': (
'Total Direct Costs', 'Total Indirect Costs', 'Total Costs'), 'rows': [
{
'label': 'Total', 'fields': (
'award_direct_costs', 'award_indirect_costs', 'award_total_costs')}, {
'label': 'Y1', 'fields': (
'award_direct_costs_y1', 'award_indirect_costs_y1', 'award_total_costs_y1')}, {
'label': 'Y2', 'fields': (
'award_direct_costs_y2', 'award_indirect_costs_y2', 'award_total_costs_y2')}, {
'label': 'Y3', 'fields': (
'award_direct_costs_y3', 'award_indirect_costs_y3', 'award_total_costs_y3')}, {
'label': 'Y4', 'fields': (
'award_direct_costs_y4', 'award_indirect_costs_y4', 'award_total_costs_y4')}, {
'label': 'Y5', 'fields': (
'award_direct_costs_y5', 'award_indirect_costs_y5', 'award_total_costs_y5')}, {
'label': 'Y6', 'fields': (
'award_direct_costs_y6', 'award_indirect_costs_y6', 'award_total_costs_y6')}, {
'label': 'Y7', 'fields': (
'award_direct_costs_y7', 'award_indirect_costs_y7', 'award_total_costs_y7')}, {
'label': 'Y8', 'fields': (
'award_direct_costs_y8', 'award_indirect_costs_y8', 'award_total_costs_y8')}, {
'label': 'Y9', 'fields': (
'award_direct_costs_y9', 'award_indirect_costs_y9', 'award_total_costs_y9')}, {
'label': 'Y10', 'fields': (
'award_direct_costs_y10', 'award_indirect_costs_y10', 'award_total_costs_y10')}, ]
}
]
EAS_REPORT_FIELDS = [
'eas_status',
'award_issue_date',
'award_acceptance_date',
'sponsor_award_number',
'agency_award_number',
]
# These fields must have values before this section can be completed
minimum_fields = (
'award_issue_date',
)
award = models.ForeignKey(Award)
creation_date = models.DateTimeField(auto_now_add=True, blank=True, null=True, verbose_name='Date Created')
current_modification = models.BooleanField(default=True)
eas_status = models.CharField(
choices=EAS_STATUS_CHOICES,
max_length=2,
blank=True,
verbose_name='EAS status')
new_funding = models.NullBooleanField(verbose_name='New Funding?')
fcoi_cleared_date = models.DateField(
null=True,
blank=True,
verbose_name='FCOI cleared date')
phs_funded = models.NullBooleanField(verbose_name='PHS funded?')
award_setup_priority = models.CharField(
choices=PRIORITY_STATUS_CHOICES,
max_length=2,
blank=True,
verbose_name='Award Setup Priority'
)
priority_by_director = models.NullBooleanField(blank=True, null=True, verbose_name='Prioritized by Director?')
project_title = models.CharField(
max_length=250,
blank=True,
verbose_name='Project Title (if different from Proposal)')
foreign_travel = models.NullBooleanField(verbose_name='Foreign Travel?')
f_a_rate = models.CharField(
max_length=250,
blank=True,
verbose_name='F&A rate')
full_f_a_recovery = models.NullBooleanField(
verbose_name='Full F&A Recovery?')
explanation = models.CharField(
max_length=250,
blank=True,
verbose_name='If no full F&A, provide explanation')
mfa_investigators = models.NullBooleanField(
verbose_name='MFA investigators?')
admin_establishment = models.NullBooleanField(
verbose_name='Administrative establishment?')
award_issue_date = models.DateField(null=True, blank=True)
award_acceptance_date = models.DateField(null=True, blank=True)
agency_award_number = models.CharField(max_length=50, blank=True)
sponsor_award_number = models.CharField(
max_length=50,
blank=True,
verbose_name='Prime Award # (if GW is subawardee)')
award_total_costs = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True,
verbose_name='Total award costs')
award_direct_costs = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True,
verbose_name='Total award direct costs')
award_indirect_costs = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True,
verbose_name='Total award indirect costs')
award_total_costs_y1 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_direct_costs_y1 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_indirect_costs_y1 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_total_costs_y2 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_direct_costs_y2 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_indirect_costs_y2 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_total_costs_y3 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_direct_costs_y3 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_indirect_costs_y3 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_total_costs_y4 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_direct_costs_y4 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_indirect_costs_y4 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_total_costs_y5 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_direct_costs_y5 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_indirect_costs_y5 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_total_costs_y6 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_direct_costs_y6 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_indirect_costs_y6 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_total_costs_y7 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_direct_costs_y7 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_indirect_costs_y7 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_total_costs_y8 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_direct_costs_y8 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_indirect_costs_y8 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_total_costs_y9 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_direct_costs_y9 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_indirect_costs_y9 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_total_costs_y10 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_direct_costs_y10 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
award_indirect_costs_y10 = models.DecimalField(
decimal_places=2,
max_digits=15,
null=True,
blank=True)
contracting_official = models.CharField(
max_length=500,
blank=True,
verbose_name='GMO or CO')
gmo_co_phone_number = models.CharField(
max_length=15,
blank=True,
verbose_name='GMO/CO phone number')
gmo_co_email = models.CharField(
max_length=50,
blank=True,
verbose_name='GMO/CO email')
pta_modification = models.NullBooleanField(verbose_name='Do you want to send this to the post-award team for modification?')
acceptance_completion_date = models.DateTimeField(blank=True, null=True, verbose_name='Completion Date')
award_text = models.CharField(max_length=50, blank=True, null=True)
def __unicode__(self):
return u'Award Intake %s' % (self.id)
def get_absolute_url(self):
"""Gets the URL used to navigate to this object."""
return reverse(
'edit_award_acceptance',
kwargs={
'award_pk': self.award.pk})
def copy_from_proposal(self, proposal):
"""Copies common fields to this object from the given Proposal."""
self.project_title = proposal.project_title
self.award_total_costs = proposal.total_costs
self.award_total_costs_y1 = proposal.total_costs_y1
self.award_total_costs_y2 = proposal.total_costs_y2
self.award_total_costs_y3 = proposal.total_costs_y3
self.award_total_costs_y4 = proposal.total_costs_y4
self.award_total_costs_y5 = proposal.total_costs_y5
self.award_total_costs_y6 = proposal.total_costs_y6
self.award_total_costs_y7 = proposal.total_costs_y7
self.award_total_costs_y8 = proposal.total_costs_y8
self.award_total_costs_y9 = proposal.total_costs_y9
self.award_total_costs_y10 = proposal.total_costs_y10
self.award_direct_costs = proposal.total_direct_costs
self.award_direct_costs_y1 = proposal.total_direct_costs_y1
self.award_direct_costs_y2 = proposal.total_direct_costs_y2
self.award_direct_costs_y3 = proposal.total_direct_costs_y3
self.award_direct_costs_y4 = proposal.total_direct_costs_y4
self.award_direct_costs_y5 = proposal.total_direct_costs_y5
self.award_direct_costs_y6 = proposal.total_direct_costs_y6
self.award_direct_costs_y7 = proposal.total_direct_costs_y7
self.award_direct_costs_y8 = proposal.total_direct_costs_y8
self.award_direct_costs_y9 = proposal.total_direct_costs_y9
self.award_direct_costs_y10 = proposal.total_direct_costs_y10
self.award_indirect_costs = proposal.total_indirect_costs
self.award_indirect_costs_y1 = proposal.total_indirect_costs_y1
self.award_indirect_costs_y2 = proposal.total_indirect_costs_y2
self.award_indirect_costs_y3 = proposal.total_indirect_costs_y3
self.award_indirect_costs_y4 = proposal.total_indirect_costs_y4
self.award_indirect_costs_y5 = proposal.total_indirect_costs_y5
self.award_indirect_costs_y6 = proposal.total_indirect_costs_y6
self.award_indirect_costs_y7 = proposal.total_indirect_costs_y7
self.award_indirect_costs_y8 = proposal.total_indirect_costs_y8
self.award_indirect_costs_y9 = proposal.total_indirect_costs_y9
self.award_indirect_costs_y10 = proposal.total_indirect_costs_y10
self.save()
class Meta:
verbose_name = 'Award intake'
verbose_name_plural = 'Award intakes'
def save(self, *args, **kwargs):
"""Overrides the base save method.
If it was an existing AwardAcceptance, check to see if FCOI and/or PHS funded
emails need to be sent.
"""
try:
old_object = AwardAcceptance.objects.get(pk=self.pk)
except AwardAcceptance.DoesNotExist:
super(AwardAcceptance, self).save(*args, **kwargs)
return
super(AwardAcceptance, self).save(*args, **kwargs)
# Send email to Award Setup user when FCOI cleared date is populated
if not old_object.fcoi_cleared_date and self.fcoi_cleared_date:
self.award.send_fcoi_cleared_notification(self.fcoi_cleared_date)
if not old_object.phs_funded and self.phs_funded:
self.award.send_phs_funded_notification()
class NegotiationStatus(models.Model):
NEGOTIATION_CHOICES = (
('IQ', 'In queue'),
('IP', 'In progress'),
('WFS', 'Waiting for sponsor'),
('WFP', 'Waiting for PI'),
('WFO', 'Waiting for other department'),
('CD', 'Completed'),
('UD', 'Unrealized')
)
NEGOTIATION_STATUS_CHOICES = (
'In queue',
'In progress',
'Waiting for sponsor',
'Waiting for PI',
'Waiting for other department',
'Completed',
'Unrealized'
)
NEGOTIATION_CHOICES_DICT = {'IQ': 'In queue',
'IP': 'In progress',
'WFS': 'Waiting for sponsor',
'WFP': 'Waiting for PI',
'WFO': 'Waiting for other department',
'CD': 'Completed',
'UD': 'Unrealized'
}
negotiation_status = models.CharField(
choices=NEGOTIATION_CHOICES,
max_length=50,
blank=True)
negotiation_status_changed_user = models.CharField(
max_length=100,
blank=True)
negotiation_notes = models.TextField(
blank=True)
award = models.ForeignKey(Award)
negotiation_status_date = models.DateTimeField(blank=True, null=True)
def __unicode__(self):
return u'%s Status %s' % (self.award, self.negotiation_status)
class AwardNegotiation(AwardModificationMixin, AssignableAwardSection):
"""Model for the AwardNegotiation data"""
AWARD_TYPE_CHOICES = (
('CR', 'Contract: Cost-reimbursable'),
('FP', 'Contract: Fixed price'),
('TM', 'Contract: Time & materials'),
('GC', 'Grant: Cost-reimbursable'),
('GF', 'Grant: Fixed amount award'),
('CA', 'Cooperative agreement'),
('CD', 'CRADA'),
('ND', 'NDA'),
('TA', 'Teaming agreement'),
('DU', 'DUA'),
('RF', 'RFP'),
('MT', 'MTA'),
('MA', 'Master agreement'),
('OT', 'Other')
)
NEGOTIATION_CHOICES = (
('IQ', 'In queue'),
('IP', 'In progress'),
('WFS', 'Waiting for sponsor'),
('WFP', 'Waiting for PI'),
('WFO', 'Waiting for other department'),
('CD', 'Completed'),
('UD', 'Unrealized')
)
HIDDEN_FIELDS = AwardSection.HIDDEN_FIELDS + ['current_modification', 'date_received', 'award_text']
HIDDEN_SEARCH_FIELDS = AwardSection.HIDDEN_SEARCH_FIELDS + [
'subcontracting_plan',
'under_master_agreement',
'retention_period',
'gw_doesnt_own_ip',
'gw_background_ip',
'foreign_restrictions',
'certificates_insurance',
'insurance_renewal',
'government_property',
'everify',
'date_assigned']
EAS_REPORT_FIELDS = [
'award_type',
]
minimum_fields = (
'award_type',
)
award = models.ForeignKey(Award)
current_modification = models.BooleanField(default=True)
subcontracting_plan = models.NullBooleanField(
verbose_name='Is Small Business Subcontracting Plan required?')
under_master_agreement = models.NullBooleanField(
verbose_name='Under Master Agreement?')
award_type = models.CharField(
choices=AWARD_TYPE_CHOICES,
max_length=3,
blank=True,
verbose_name='Award Type')
other_award_type = models.CharField(max_length=255, blank=True)
related_other_agreements = models.NullBooleanField(
verbose_name='Related Other Agreements?')
related_other_comments = models.TextField(
blank=True,
verbose_name='Related other agreements comments')
negotiator = models.CharField(
max_length=500,
blank=True,
verbose_name='Negotiator Assist')
date_received = models.DateField(
null=True,
blank=True,
verbose_name='Date Received')
retention_period = models.CharField(
max_length=500,
blank=True,
verbose_name='Sponsor Retention Period')
gw_doesnt_own_ip = models.NullBooleanField(
verbose_name="GW Doesn't Own IP?")
gw_background_ip = models.NullBooleanField(
verbose_name='GW Background IP?')
negotiation_status = models.CharField(
choices=NEGOTIATION_CHOICES,
max_length=3,
blank=True,
verbose_name='Negotiation Status',
default='IQ')
negotiation_notes = models.TextField(
blank=True,
verbose_name='Negotiation Notes')
foreign_restrictions = models.NullBooleanField(
verbose_name='Foreign Participation Restrictions?')
certificates_insurance = models.NullBooleanField(
verbose_name='Certificate of Insurance Needed?')
insurance_renewal = models.DateField(
null=True,
blank=True,
verbose_name='Certificate of Insurance Renewal Date')
government_property = models.NullBooleanField(
verbose_name='Government Furnished Property?')
data_security_restrictions = models.NullBooleanField(
verbose_name='Data/Security Restrictions?')
everify = models.NullBooleanField(verbose_name='E-verify?')
publication_restriction = models.NullBooleanField(
verbose_name='Publication Restriction?')
negotiation_completion_date = models.DateTimeField(blank=True, null=True, verbose_name='Completion Date')
award_text = models.CharField(max_length=50, blank=True, null=True)
def __unicode__(self):
return u'Award Negotiation %s' % (self.id)
def get_absolute_url(self):
"""Gets the URL used to navigate to this object"""
return reverse(
'edit_award_negotiation',
kwargs={
'award_pk': self.award.pk})
class AwardSetup(AssignableAwardSection):
"""Model for the AwardSetup data"""
WAIT_FOR = {'RB': 'Revised Budget', 'PA': 'PI Access', 'CA': 'Cost Share Approval', 'FC': 'FCOI',
'PS': 'Proposal Submission', 'SC': 'Sponsor Clarity', 'NO': 'New Org needed',
'IC': 'Internal Clarification', 'DC': 'Documents not in GW Docs'
}
WAIT_FOR_CHOICES = (
('RB', 'Revised Budget'),
('PA', 'PI Access'),
('CA', 'Cost Share Approval'),
('FC', 'FCOI'),
('PS', 'Proposal Submission'),
('SC', 'Sponsor Clarity'),
('NO', 'New Org needed'),
('IC', 'Internal Clarification'),
('DC', 'Documents not in GW Docs')
)
SP_TYPE_CHOICES = (
('SP1', 'SP1 - Research and Development'),
('SP2', 'SP2 - Training'),
('SP3', 'SP3 - Other'),
('SP4', 'SP4 - Clearing and Suspense'),
('SP5', 'SP5 - Program Income'),
)
REPORTING_CHOICES = (
('MN', 'Monthly'),
('QR', 'Quarterly'),
('SA', 'Semi-annually'),
('AN', 'Annually'),
('OT', 'Other (specify)')
)
EAS_AWARD_CHOICES = (
('C', 'Contract'),
('G', 'Grant'),
('I', 'Internal Funding'),
('PP', 'Per Patient'),
('PA', 'Pharmaceutical')
)
PROPERTY_CHOICES = (
('TG', 'Title to GW'),
('TS', 'Title to Sponsor'),
('TD', 'Title to be determined at purchase'),
('SE', 'Special EAS Value')
)
ONR_CHOICES = (
('Y', 'Yes, Administered'),
('N', 'No, Administered')
)
COST_SHARING_CHOICES = (
('M', 'Mandatory'),
('V', 'Voluntary'),
('B', 'Both')
)
PERFORMANCE_SITE_CHOICES = (
('ON', 'On-campus'),
('OF', 'Off-campus'),
('OT', 'Other')
)
TASK_LOCATION_CHOICES = (
('AL', 'AL - ALEXANDRIA'),
('BE', 'BE - BETHESDA'),
('CC', 'CC - CRYSTAL CITY'),
('CL', 'CL - CLARENDON'),
('CM', 'CM - ST MARY\'S COUNTY, CALIFORNIA, MD'),
('CW', 'CW - K STREET CENTER OFF-CAMPUS DC'),
('DE', 'DE - DISTANCE EDUCATION'),
('FB', 'FB - FOGGY BOTTOM'),
('FC', 'FC - CITY OF FALLS CHURCH'),
('FX', 'FX - FAIRFAX COUNTY'),
('GS', 'GS - GODDARD SPACE FLIGHT CENTER'),
('HR', 'HR - HAMPTON ROADS'),
('IN', 'IN - INTERNATIONAL'),
('LA', 'LA - LANGLEY AIR FORCE BASE'),
('LO', 'LO - LOUDOUN COUNTY OTHER'),
('MV', 'MV - MOUNT VERNON CAMPUS'),
('OA', 'OA - OTHER ARLINGTON COUNTY'),
('OD', 'OD - OTHER DISTRICT OF COLUMBIA'),
('OG', 'OG - OTHER MONTGOMERY COUNTY'),
('OM', 'OM - OTHER MARYLAND'),
('OV', 'OV - OTHER VIRGINIA'),
('PA', 'PA - PACE - Classes at Sea'),
('RI', 'RI - RICHMOND, CITY OF'),
('RO', 'RO - ROSSLYN ARLINGTON COUNTY'),
('RV', 'RV - ROCKVILLE'),
('SM', 'SM - SUBURBAN MARYLAND'),
('T', 'T - TOTAL LOCATION'),
('US', 'US - OTHER US'),
('VC', 'VC - VIRGINIA CAMPUS'),
('VR', 'VR - VIRGINIA RESEARCH AND TECHNOLOGY CENTER'),
('VS', 'VS - VIRGINIA SQUARE'),
)
EAS_SETUP_CHOICES = (
('Y', 'Yes'),
('N', 'No'),
('M', 'Manual'),
)
HIDDEN_FIELDS = AwardSection.HIDDEN_FIELDS + [
'award_template',
'short_name',
'task_location',
'start_date',
'end_date',
'final_reports_due_date',
'eas_award_type',
'sp_type',
'indirect_cost_schedule',
'allowed_cost_schedule',
'cfda_number',
'federal_negotiated_rate',
'bill_to_address',
'billing_events',
'contact_name',
'phone',
'financial_reporting_req',
'financial_reporting_oth',
'property_equip_code',
'onr_administered_code',
'cost_sharing_code',
'document_number',
'performance_site',
'award_setup_complete',
'qa_screening_complete',
'ready_for_eas_setup',
]
HIDDEN_SEARCH_FIELDS = AwardSection.HIDDEN_SEARCH_FIELDS + [
'nine_ninety_form_needed',
'patent_reporting_req',
'invention_reporting_req',
'property_reporting_req',
'equipment_reporting_req',
'budget_restrictions',
'record_destroy_date',
'date_assigned']
EAS_REPORT_FIELDS = [
# PTA info first
'award_template',
'short_name',
'task_location',
'start_date',
'end_date',
'final_reports_due_date',
'eas_award_type',
'sp_type',
'indirect_cost_schedule',
'allowed_cost_schedule',
'cfda_number',
'federal_negotiated_rate',
'bill_to_address',
'contact_name',
'phone',
'financial_reporting_req',
'financial_reporting_oth',
'property_equip_code',
'onr_administered_code',
'cost_sharing_code',
'billing_events',
'document_number',
'nine_ninety_form_needed',
]
minimum_fields = (
)
MULTIPLE_SELECT_FIELDS = (
'financial_reporting_req',
'technical_reporting_req',
)
award = models.OneToOneField(Award)
short_name = models.CharField(
max_length=30,
blank=True,
verbose_name='Award short name')
start_date = models.DateField(null=True, blank=True)
end_date = models.DateField(null=True, blank=True)
final_reports_due_date = models.DateField(
null=True,
blank=True,
verbose_name='Final Reports/Final Invoice Due Date (Close Date)')
eas_award_type = models.CharField(
choices=EAS_AWARD_CHOICES,
max_length=2,
blank=True,
verbose_name='EAS award type')
sp_type = models.CharField(
choices=SP_TYPE_CHOICES,
max_length=3,
blank=True,
verbose_name='SP Type')
indirect_cost_schedule = models.ForeignKey(
IndirectCost,
null=True,
blank=True,
limit_choices_to={
'active': True})
allowed_cost_schedule = models.ForeignKey(
AllowedCostSchedule,
null=True,
blank=True,
limit_choices_to={
'active': True})
cfda_number = models.ForeignKey(
CFDANumber,
null=True,
blank=True,
limit_choices_to={
'active': True},
verbose_name='CFDA number')
federal_negotiated_rate = models.ForeignKey(
FedNegRate,
null=True,
blank=True,
limit_choices_to={
'active': True})
property_equip_code = models.CharField(
choices=PROPERTY_CHOICES,
max_length=2,
blank=True,
verbose_name='T&C: Property and Equipment Code')
onr_administered_code = models.CharField(
choices=ONR_CHOICES,
max_length=2,
blank=True,
verbose_name='T&C: ONR Administered Code')
cost_sharing_code = models.CharField(
choices=COST_SHARING_CHOICES,
max_length=2,
blank=True,
verbose_name='T&C: Cost Sharing Code')
bill_to_address = models.TextField(blank=True)
contact_name = models.CharField(
max_length=150,
blank=True,
verbose_name='Contact Name (Last, First)')
phone = models.CharField(max_length=50, blank=True)
billing_events = models.TextField(blank=True)
document_number = models.CharField(max_length=100, blank=True)
date_wait_for_updated = models.DateTimeField(blank=True, null=True, verbose_name='Date Wait for Updated')
wait_for_reson = models.CharField(
choices=WAIT_FOR_CHOICES,
max_length=2,
blank=True,
null=True,
verbose_name='Wait for'
)
nine_ninety_form_needed = models.NullBooleanField(
verbose_name='990 Form Needed?')
task_location = models.CharField(
choices=TASK_LOCATION_CHOICES,
max_length=2,
blank=True)
performance_site = models.CharField(
choices=PERFORMANCE_SITE_CHOICES,
max_length=2,
blank=True)
expanded_authority = models.NullBooleanField(
verbose_name='Expanded Authority?')
financial_reporting_req = MultiSelectField(
choices=REPORTING_CHOICES,
blank=True,
verbose_name='Financial Reporting Requirements')
financial_reporting_oth = models.CharField(
max_length=250,
blank=True,
verbose_name='Other financial reporting requirements')
technical_reporting_req = MultiSelectField(
choices=REPORTING_CHOICES,
blank=True,
verbose_name='Technical Reporting Requirements')
technical_reporting_oth = models.CharField(
max_length=250,
blank=True,
verbose_name='Other technical reporting requirements')
patent_reporting_req = models.DateField(
null=True,
blank=True,
verbose_name='Patent Report Requirement')
invention_reporting_req = models.DateField(
null=True,
blank=True,
verbose_name='Invention Report Requirement')
property_reporting_req = models.DateField(
null=True,
blank=True,
verbose_name='Property Report Requirement')
equipment_reporting_req = models.DateField(
null=True,
blank=True,
verbose_name='Equipment Report Requirement')
budget_restrictions = models.NullBooleanField(
verbose_name='Budget Restrictions?')
award_template = models.ForeignKey(
AwardTemplate,
null=True,
blank=True,
limit_choices_to={
'active': True})
award_setup_complete = models.DateField(
null=True,
blank=True,
verbose_name='Award Setup Complete')
qa_screening_complete = models.DateField(
null=True,
blank=True,
verbose_name='QA Screening Complete')
pre_award_spending_auth = models.NullBooleanField(
verbose_name='Pre-award spending authorized?')
record_destroy_date = models.DateField(
null=True,
blank=True,
verbose_name='Record Retention Destroy Date')
ready_for_eas_setup = models.CharField(
choices=EAS_SETUP_CHOICES,
max_length=3,
blank=True,
verbose_name='Ready for EAS Setup?')
wait_for = models.TextField(blank=True)
setup_completion_date = models.DateTimeField(blank=True, null=True, verbose_name='Completion Date')
def __unicode__(self):
return u'Award Setup %s' % (self.id)
def get_absolute_url(self):
"""Gets the URL used to navigate to this object"""
return reverse('edit_award_setup', kwargs={'award_pk': self.award.pk})
def copy_from_proposal(self, proposal):
"""Copy common fields from the given proposal to this AwardSetup"""
if proposal:
self.start_date = proposal.project_start_date
self.end_date = proposal.project_end_date
self.save()
def get_waiting_reason(self):
return self.WAIT_FOR.get(self.wait_for_reson) if self.wait_for_reson else ''
class AwardModification(AssignableAwardSection):
"""Model for the AwardModification data"""
WAIT_FOR_CHOICES = (
('RB', 'Revised Budget'),
('PA', 'PI Access'),
('CA', 'Cost Share Approval'),
('FC', 'FCOI'),
('PS', 'Proposal Submission'),
('SC', 'Sponsor Clarity'),
('NO', 'New Org needed'),
('IC', 'Internal Clarification'),
('DC', 'Documents not in GW Docs'))
SP_TYPE_CHOICES = (
('SP1', 'SP1 - Research and Development'),
('SP2', 'SP2 - Training'),
('SP3', 'SP3 - Other'),
('SP4', 'SP4 - Clearing and Suspense'),
('SP5', 'SP5 - Program Income'),
)
REPORTING_CHOICES = (
('MN', 'Monthly'),
('QR', 'Quarterly'),
('SA', 'Semi-annually'),
('AN', 'Annually'),
('OT', 'Other (specify)')
)
EAS_AWARD_CHOICES = (
('C', 'Contract'),
('G', 'Grant'),
('I', 'Internal Funding'),
('PP', 'Per Patient'),
('PA', 'Pharmaceutical')
)
PROPERTY_CHOICES = (
('TG', 'Title to GW'),
('TS', 'Title to Sponsor'),
('TD', 'Title to be determined at purchase'),
('SE', 'Special EAS Value')
)
ONR_CHOICES = (
('Y', 'Yes, Administered'),
('N', 'No, Administered')
)
COST_SHARING_CHOICES = (
('M', 'Mandatory'),
('V', 'Voluntary'),
('B', 'Both')
)
PERFORMANCE_SITE_CHOICES = (
('ON', 'On-campus'),
('OF', 'Off-campus'),
('OT', 'Other')
)
TASK_LOCATION_CHOICES = (
('AL', 'AL - ALEXANDRIA'),
('BE', 'BE - BETHESDA'),
('CC', 'CC - CRYSTAL CITY'),
('CL', 'CL - CLARENDON'),
('CM', 'CM - ST MARY\'S COUNTY, CALIFORNIA, MD'),
('CW', 'CW - K STREET CENTER OFF-CAMPUS DC'),
('DE', 'DE - DISTANCE EDUCATION'),
('FB', 'FB - FOGGY BOTTOM'),
('FC', 'FC - CITY OF FALLS CHURCH'),
('FX', 'FX - FAIRFAX COUNTY'),
('GS', 'GS - GODDARD SPACE FLIGHT CENTER'),
('HR', 'HR - HAMPTON ROADS'),
('IN', 'IN - INTERNATIONAL'),
('LA', 'LA - LANGLEY AIR FORCE BASE'),
('LO', 'LO - LOUDOUN COUNTY OTHER'),
('MV', 'MV - MOUNT VERNON CAMPUS'),
('OA', 'OA - OTHER ARLINGTON COUNTY'),
('OD', 'OD - OTHER DISTRICT OF COLUMBIA'),
('OG', 'OG - OTHER MONTGOMERY COUNTY'),
('OM', 'OM - OTHER MARYLAND'),
('OV', 'OV - OTHER VIRGINIA'),
('PA', 'PA - PACE - Classes at Sea'),
('RI', 'RI - RICHMOND, CITY OF'),
('RO', 'RO - ROSSLYN ARLINGTON COUNTY'),
('RV', 'RV - ROCKVILLE'),
('SM', 'SM - SUBURBAN MARYLAND'),
('T', 'T - TOTAL LOCATION'),
('US', 'US - OTHER US'),
('VC', 'VC - VIRGINIA CAMPUS'),
('VR', 'VR - VIRGINIA RESEARCH AND TECHNOLOGY CENTER'),
('VS', 'VS - VIRGINIA SQUARE'),
)
EAS_SETUP_CHOICES = (
('Y', 'Yes'),
('N', 'No'),
('M', 'Manual'),
)
HIDDEN_FIELDS = AwardSection.HIDDEN_FIELDS + [
'award_template',
'short_name',
'task_location',
'start_date',
'end_date',
'final_reports_due_date',
'eas_award_type',
'sp_type',
'indirect_cost_schedule',
'allowed_cost_schedule',
'cfda_number',
'federal_negotiated_rate',
'bill_to_address',
'billing_events',
'contact_name',
'phone',
'financial_reporting_req',
'financial_reporting_oth',
'property_equip_code',
'onr_administered_code',
'cost_sharing_code',
'document_number',
'performance_site',
'award_setup_complete',
'qa_screening_complete',
'ready_for_eas_setup',
]
HIDDEN_SEARCH_FIELDS = AwardSection.HIDDEN_SEARCH_FIELDS + [
'nine_ninety_form_needed',
'patent_reporting_req',
'invention_reporting_req',
'property_reporting_req',
'equipment_reporting_req',
'budget_restrictions',
'record_destroy_date',
'date_assigned']
EAS_REPORT_FIELDS = [
# PTA info first
'award_template',
'short_name',
'task_location',
'start_date',
'end_date',
'final_reports_due_date',
'eas_award_type',
'sp_type',
'indirect_cost_schedule',
'allowed_cost_schedule',
'cfda_number',
'federal_negotiated_rate',
'bill_to_address',
'contact_name',
'phone',
'financial_reporting_req',
'financial_reporting_oth',
'property_equip_code',
'onr_administered_code',
'cost_sharing_code',
'billing_events',
'document_number',
'nine_ninety_form_needed',
]
minimum_fields = (
)
MULTIPLE_SELECT_FIELDS = (
'financial_reporting_req',
'technical_reporting_req',
)
award = models.ForeignKey(Award)
short_name = models.CharField(
max_length=30,
blank=True,
verbose_name='Award short name')
start_date = models.DateField(null=True, blank=True)
end_date = models.DateField(null=True, blank=True)
final_reports_due_date = models.DateField(
null=True,
blank=True,
verbose_name='Final Reports/Final Invoice Due Date (Close Date)')
eas_award_type = models.CharField(
choices=EAS_AWARD_CHOICES,
max_length=2,
blank=True,
verbose_name='EAS award type')
sp_type = models.CharField(
choices=SP_TYPE_CHOICES,
max_length=3,
blank=True,
verbose_name='SP Type')
indirect_cost_schedule = models.ForeignKey(
IndirectCost,
null=True,
blank=True,
limit_choices_to={
'active': True})
allowed_cost_schedule = models.ForeignKey(
AllowedCostSchedule,
null=True,
blank=True,
limit_choices_to={
'active': True})
cfda_number = models.ForeignKey(
CFDANumber,
null=True,
blank=True,
limit_choices_to={
'active': True},
verbose_name='CFDA number')
federal_negotiated_rate = models.ForeignKey(
FedNegRate,
null=True,
blank=True,
limit_choices_to={
'active': True})
property_equip_code = models.CharField(
choices=PROPERTY_CHOICES,
max_length=2,
blank=True,
verbose_name='T&C: Property and Equipment Code')
onr_administered_code = models.CharField(
choices=ONR_CHOICES,
max_length=2,
blank=True,
verbose_name='T&C: ONR Administered Code')
cost_sharing_code = models.CharField(
choices=COST_SHARING_CHOICES,
max_length=2,
blank=True,
verbose_name='T&C: Cost Sharing Code')
bill_to_address = models.TextField(blank=True)
contact_name = models.CharField(
max_length=150,
blank=True,
verbose_name='Contact Name (Last, First)')
phone = models.CharField(max_length=50, blank=True)
billing_events = models.TextField(blank=True)
document_number = models.CharField(max_length=100, blank=True)
date_wait_for_updated = models.DateTimeField(blank=True, null=True, verbose_name='Date Wait for Updated')
wait_for_reson = models.CharField(
choices=WAIT_FOR_CHOICES,
max_length=2,
blank=True,
null=True,
verbose_name='Wait for'
)
nine_ninety_form_needed = models.NullBooleanField(
verbose_name='990 Form Needed?')
task_location = models.CharField(
choices=TASK_LOCATION_CHOICES,
max_length=2,
blank=True)
performance_site = models.CharField(
choices=PERFORMANCE_SITE_CHOICES,
max_length=2,
blank=True)
expanded_authority = models.NullBooleanField(
verbose_name='Expanded Authority?')
financial_reporting_req = MultiSelectField(
choices=REPORTING_CHOICES,
blank=True,
verbose_name='Financial Reporting Requirements')
financial_reporting_oth = models.CharField(
max_length=250,
blank=True,
verbose_name='Other financial reporting requirements')
technical_reporting_req = MultiSelectField(
choices=REPORTING_CHOICES,
blank=True,
verbose_name='Technical Reporting Requirements')
technical_reporting_oth = models.CharField(
max_length=250,
blank=True,
verbose_name='Other technical reporting requirements')
patent_reporting_req = models.DateField(
null=True,
blank=True,
verbose_name='Patent Report Requirement')
invention_reporting_req = models.DateField(
null=True,
blank=True,
verbose_name='Invention Report Requirement')
property_reporting_req = models.DateField(
null=True,
blank=True,
verbose_name='Property Report Requirement')
equipment_reporting_req = models.DateField(
null=True,
blank=True,
verbose_name='Equipment Report Requirement')
budget_restrictions = models.NullBooleanField(
verbose_name='Budget Restrictions?')
award_template = models.ForeignKey(
AwardTemplate,
null=True,
blank=True,
limit_choices_to={
'active': True})
award_setup_complete = models.DateField(
null=True,
blank=True,
verbose_name='Award Setup Complete')
qa_screening_complete = models.DateField(
null=True,
blank=True,
verbose_name='QA Screening Complete')
pre_award_spending_auth = models.NullBooleanField(
verbose_name='Pre-award spending authorized?')
record_destroy_date = models.DateField(
null=True,
blank=True,
verbose_name='Record Retention Destroy Date')
ready_for_eas_setup = models.CharField(
choices=EAS_SETUP_CHOICES,
max_length=3,
blank=True,
verbose_name='Ready for EAS Setup?')
modification_completion_date = models.DateTimeField(blank=True, null=True, verbose_name='Completion Date')
wait_for = models.TextField(blank=True)
def __unicode__(self):
return u'Award Modification %s' % (self.id)
def get_absolute_url(self):
"""Gets the URL used to navigate to this object"""
return reverse('edit_award_setup', kwargs={'award_pk': self.award.pk})
class PTANumber(FieldIteratorMixin, models.Model):
"""Model for the PTANumber data"""
EAS_AWARD_CHOICES = (
('C', 'Contract'),
('G', 'Grant'),
('I', 'Internal Funding'),
('PP', 'Per Patient'),
('PA', 'Pharmaceutical')
)
SP_TYPE_CHOICES = (
('SP1', 'SP1 - Research and Development'),
('SP2', 'SP2 - Training'),
('SP3', 'SP3 - Other'),
('SP4', 'SP4 - Clearing and Suspense'),
('SP5', 'SP5 - Program Income'),
('SP7', 'SP7 - Symposium/Conference/Seminar'),
)
EAS_SETUP_CHOICES = (
('Y', 'Yes'),
('N', 'No'),
('M', 'Manual'),
)
EAS_STATUS_CHOICES = (
('A', 'Active'),
('OH', 'On hold'),
('AR', 'At risk'),
('C', 'Closed')
)
HIDDEN_FIELDS = ['award']
HIDDEN_TABLE_FIELDS = []
HIDDEN_SEARCH_FIELDS = AwardSection.HIDDEN_SEARCH_FIELDS + [
'parent_banner_number',
'banner_number',
'cs_banner_number',
'allowed_cost_schedule',
'award_template',
'preaward_date',
'federal_negotiated_rate',
'indirect_cost_schedule',
'sponsor_banner_number',
'ready_for_eas_setup']
award = models.ForeignKey(Award)
project_number = models.CharField(
max_length=100,
blank=True,
verbose_name='Project #')
task_number = models.CharField(
max_length=100,
blank=True,
verbose_name='Task #')
award_number = models.CharField(
max_length=100,
blank=True,
verbose_name='Award #')
award_setup_complete = models.DateField(
null=True,
blank=True,
verbose_name='Award Setup Complete')
total_pta_amount = models.DecimalField(
decimal_places=2,
max_digits=10,
null=True,
blank=True,
verbose_name='Total PTA Amt')
parent_banner_number = models.CharField(
max_length=100,
blank=True,
verbose_name='Prnt Banner #')
banner_number = models.CharField(
max_length=100,
blank=True,
verbose_name='Banner #')
cs_banner_number = models.CharField(
max_length=100,
blank=True,
verbose_name='CS Banner #')
principal_investigator = models.ForeignKey(
AwardManager,
null=True,
blank=True,
limit_choices_to={
'active': True},
verbose_name='PI*')
agency_name = models.ForeignKey(
FundingSource,
null=True,
blank=True,
limit_choices_to={
'active': True},
verbose_name='Agency Name*')
department_name = models.ForeignKey(
AwardOrganization,
null=True,
blank=True,
limit_choices_to={
'active': True},
verbose_name='Department Code & Name*')
project_title = models.CharField(max_length=256, blank=True, verbose_name='Project Title*')
who_is_prime = models.ForeignKey(
PrimeSponsor,
null=True,
blank=True,
limit_choices_to={
'active': True})
allowed_cost_schedule = models.ForeignKey(
AllowedCostSchedule,
null=True,
blank=True,
limit_choices_to={
'active': True},
verbose_name='Allowed Cost Schedule*')
award_template = models.ForeignKey(
AwardTemplate,
null=True,
blank=True,
limit_choices_to={
'active': True},
verbose_name='Award Template*')
cfda_number = models.ForeignKey(
CFDANumber,
null=True,
blank=True,
limit_choices_to={
'active': True},
verbose_name='CFDA number*')
eas_award_type = models.CharField(
choices=EAS_AWARD_CHOICES,
max_length=2,
blank=True,
verbose_name='EAS Award Type*')
preaward_date = models.DateField(null=True, blank=True)
start_date = models.DateField(null=True, blank=True, verbose_name='Start Date*')
end_date = models.DateField(null=True, blank=True, verbose_name='End Date*')
final_reports_due_date = models.DateField(
null=True,
blank=True,
verbose_name='Final Reports/Final Invoice Due Date (Close Date)*')
federal_negotiated_rate = models.ForeignKey(
FedNegRate,
null=True,
blank=True,
limit_choices_to={
'active': True},
verbose_name='Federal Negotiated Rate*')
indirect_cost_schedule = models.ForeignKey(
IndirectCost,
null=True,
blank=True,
limit_choices_to={
'active': True},
verbose_name='Indirect Cost Schedule*')
sp_type = models.CharField(
choices=SP_TYPE_CHOICES,
max_length=3,
blank=True,
verbose_name='SP Type*')
short_name = models.CharField(
max_length=30,
blank=True,
verbose_name='Award Short Name*')
agency_award_number = models.CharField(
max_length=50,
blank=True,
verbose_name='Agency Award Number*')
sponsor_award_number = models.CharField(
max_length=50,
blank=True,
verbose_name='Prime Award # (if GW is subawardee)*')
sponsor_banner_number = models.CharField(max_length=50, blank=True)
eas_status = models.CharField(
choices=EAS_STATUS_CHOICES,
max_length=2,
blank=True,
verbose_name='EAS Status*')
ready_for_eas_setup = models.CharField(
choices=EAS_SETUP_CHOICES,
max_length=3,
blank=True,
verbose_name='Ready for EAS Setup?')
is_edited = models.BooleanField(default=False)
pta_number_updated = models.DateField(
null=True,
blank=True)
def __unicode__(self):
return u'PTA #%s' % (self.project_number)
def save(self, *args, **kwargs):
"""Overrides the parent save method.
If this is the first PTANumber entered (either on creation or save later),
update some fields back to the most recent Proposal.
"""
super(PTANumber, self).save(*args, **kwargs)
if self == self.award.get_first_pta_number():
proposal = self.award.get_most_recent_proposal()
if proposal and self.agency_name != proposal.agency_name:
proposal.agency_name = self.agency_name
proposal.save()
if proposal and self.who_is_prime != proposal.who_is_prime:
proposal.who_is_prime = self.who_is_prime
proposal.save()
if proposal and self.project_title != proposal.project_title:
proposal.project_title = self.project_title
proposal.save()
if proposal and self.start_date != proposal.project_start_date:
proposal.project_start_date = self.start_date
proposal.save()
if proposal and self.end_date != proposal.project_end_date:
proposal.project_end_date = self.end_date
proposal.save()
award_acceptance = self.award.get_current_award_acceptance()
if self.agency_award_number != award_acceptance.agency_award_number:
award_acceptance.agency_award_number = self.agency_award_number
award_acceptance.save()
if self.sponsor_award_number != award_acceptance.sponsor_award_number:
award_acceptance.sponsor_award_number = self.sponsor_award_number
award_acceptance.save()
if self.eas_status != award_acceptance.eas_status:
award_acceptance.eas_status = self.eas_status
award_acceptance.save()
if self.project_title != award_acceptance.project_title:
award_acceptance.project_title = self.project_title
award_acceptance.save()
def get_absolute_url(self):
"""Gets the URL used to navigate to this object"""
return reverse(
'edit_pta_number',
kwargs={
'award_pk': self.award.pk,
'pta_pk': self.id})
def get_delete_url(self):
"""Gets the URL used to delete this object"""
return reverse(
'delete_pta_number',
kwargs={
'award_pk': self.award.pk,
'pta_pk': self.id})
def get_recent_ptanumber_revision(self):
"""Gets the most recent revision of the model, using django-reversion"""
latest_revision = reversion.get_for_object(self)[0].revision
if latest_revision.user:
user = latest_revision.user.get_full_name()
else:
user = 'ATP'
return (user, latest_revision.date_created)
class Subaward(AwardSection):
"""Model for the Subaward data"""
RISK_CHOICES = (
('L', 'Low'),
('M', 'Medium'),
('H', 'High')
)
SUBRECIPIENT_TYPE_CHOICES = (
('F', 'Foundation'),
('FP', 'For-Profit'),
('SG', 'State Government'),
('LG', 'Local Government'),
('I', 'International'),
('ON', 'Other non-profit'),
('U', 'University')
)
AGREEMENT_CHOICES = (
('SA', 'Subaward'),
('SC', 'Subcontract'),
('IC', 'ICA'),
('M', 'Modification'),
('H', 'Honorarium'),
('C', 'Consultant'),
('CS', 'Contract Service')
)
SUBAWARD_STATUS_CHOICES = (
('R', 'Review'),
('G', 'Waiting for GCAS approval'),
('D', 'Waiting for Department'),
('P', 'Procurement'),
('S', 'Sent to recepient'),
)
CONTRACT_CHOICES = (
('FP', 'Fixed price subcontract'),
('CR', 'Cost-reimbursable subcontract'),
('FA', 'Fixed amount award'),
('OT', 'Other')
)
minimum_fields = (
'subrecipient_type',
'risk',
'amount',
'gw_number',
'contact_information',
'subaward_start',
'subaward_end',
'agreement_type',
'debarment_check',
'international',
'sent',
'ffata_reportable',
'zip_code',
)
HIDDEN_SEARCH_FIELDS = AwardSection.HIDDEN_SEARCH_FIELDS + [
'creation_date',
'modification_number',
'subaward_ready',
'sent',
'reminder',
'fcoi_cleared',
'citi_cleared',
'amount',
'contact_information',
'zip_code',
'subaward_start',
'subaward_end',
'debarment_check',
'international',
'cfda_number',
'ffata_submitted',
'tech_report_received']
award = models.ForeignKey(Award)
creation_date = models.DateTimeField(auto_now_add=True, blank=True, null=True, verbose_name='Date Created')
recipient = models.CharField(max_length=250, blank=True)
agreement_type = models.CharField(
choices=AGREEMENT_CHOICES,
max_length=2,
blank=True)
modification_number = models.CharField(max_length=50, blank=True)
subrecipient_type = models.CharField(
choices=SUBRECIPIENT_TYPE_CHOICES,
max_length=2,
blank=True,
verbose_name='Subrecipient Type')
assist = models.CharField(max_length=100, blank=True)
date_received = models.DateField(null=True, blank=True)
status = models.CharField(
choices=SUBAWARD_STATUS_CHOICES,
max_length=2,
blank=True)
risk = models.CharField(choices=RISK_CHOICES, max_length=2, blank=True)
approval_expiration = models.DateField(
null=True,
blank=True,
verbose_name='Date of Expiration for Approval')
subaward_ready = models.DateField(
null=True,
blank=True,
verbose_name='Subaward ready to be initiated')
sent = models.DateField(
null=True,
blank=True,
verbose_name='Subagreement sent to recipient')
reminder = models.NullBooleanField(
verbose_name='Reminder sent to Subawardee?')
received = models.DateField(
null=True,
blank=True,
verbose_name='Receipt of Partially Executed Subagreement')
fcoi_cleared = models.DateField(
null=True,
blank=True,
verbose_name='Subaward Cleared FCOI Procedures')
citi_cleared = models.DateField(
null=True,
blank=True,
verbose_name='Subaward Completed CITI Training')
date_fully_executed = models.DateField(null=True, blank=True)
amount = models.DecimalField(
decimal_places=2,
max_digits=10,
null=True,
blank=True,
verbose_name='Subaward Total Amount')
gw_number = models.CharField(
max_length=50,
blank=True,
verbose_name='GW Subaward Number')
funding_mechanism = models.CharField(
choices=CONTRACT_CHOICES,
max_length=2,
blank=True,
verbose_name='Funding mechanism')
other_mechanism = models.CharField(
max_length=255,
blank=True,
verbose_name='Other funding mechanism')
contact_information = models.TextField(
blank=True,
verbose_name='Subawardee contact information')
zip_code = models.CharField(
max_length=50,
blank=True,
verbose_name='ZIP code')
subaward_start = models.DateField(
null=True,
blank=True,
verbose_name='Subaward Performance Period Start')
subaward_end = models.DateField(
null=True,
blank=True,
verbose_name='Subaward Performance Period End')
debarment_check = models.NullBooleanField(
verbose_name='Debarment or suspension check?')
international = models.NullBooleanField(verbose_name='International?')
cfda_number = models.CharField(
max_length=50,
blank=True,
verbose_name='CFDA number')
fain = models.CharField(max_length=50, blank=True, verbose_name='FAIN')
ein = models.CharField(max_length=50, blank=True, verbose_name='EIN')
duns_number = models.CharField(
max_length=50,
blank=True,
verbose_name='DUNS number')
ffata_reportable = models.NullBooleanField(
verbose_name='FFATA Reportable?')
ffata_submitted = models.DateField(
null=True,
blank=True,
verbose_name='FFATA Report Submitted Date')
tech_report_due = models.DateField(
null=True,
blank=True,
verbose_name='Technical Report Due Date')
tech_report_received = models.DateField(
null=True,
blank=True,
verbose_name='Technical Report Received Date')
subaward_completion_date = models.DateTimeField(blank=True, null=True, verbose_name='Completion Date')
def __unicode__(self):
return u'Subaward %s' % (self.gw_number)
def get_absolute_url(self):
"""Gets the URL used to navigate to this object"""
return reverse(
'edit_subaward',
kwargs={
'award_pk': self.award.pk,
'subaward_pk': self.id})
class AwardManagement(AssignableAwardSection):
"""Model for the AwardManagement data"""
minimum_fields = (
)
HIDDEN_SEARCH_FIELDS = AwardSection.HIDDEN_SEARCH_FIELDS + [
'date_assigned']
award = models.OneToOneField(Award)
management_completion_date = models.DateTimeField(blank=True, null=True, verbose_name='Completion Date')
def __unicode__(self):
return u'Award Management %s' % (self.id)
def get_absolute_url(self):
"""Gets the URL used to navigate to this object"""
return reverse(
'edit_award_management',
kwargs={
'award_pk': self.award.pk})
class PriorApproval(FieldIteratorMixin, models.Model):
"""Model for the PriorApproval data"""
HIDDEN_FIELDS = ['award']
HIDDEN_TABLE_FIELDS = []
REQUEST_CHOICES = (
('AB', 'Absence or Change of Key Personnel'),
('CF', 'Carry-forward of unexpended balances to subsequent funding periods'),
('CS', 'Change in Scope'),
('ER', 'Effort Reduction'),
('EN', 'Equipment not in approved budget'),
('FC', 'Faculty consulting compensation that exceeds base salary'),
('FT', 'Foreign Travel'),
('IN', 'Initial no-cost extension of up to 12 months (per competitive segment)'),
('OT', 'Other'),
('RA', 'Rebudgeting among budget categories'),
('RB', 'Rebudgeting between direct and F&A costs'),
('RF', 'Rebudgeting of funds allotted for training (direct payment to trainees) to other categories of expense'),
('SN', 'Subsequent no-cost extension or extention of more than 12 months'),
)
PRIOR_APPROVAL_STATUS_CHOICES = (
('PN', 'Pending'),
('AP', 'Approved'),
('NA', 'Not Approved'),
)
award = models.ForeignKey(Award)
request = models.CharField(
choices=REQUEST_CHOICES,
max_length=2,
blank=True)
date_submitted = models.DateField(null=True, blank=True)
status = models.CharField(
choices=PRIOR_APPROVAL_STATUS_CHOICES,
max_length=2,
blank=True)
date_approved = models.DateField(null=True, blank=True)
def __unicode__(self):
return u'Prior Approval #%s' % (self.id)
def get_absolute_url(self):
"""Gets the URL used to navigate to this object."""
return reverse(
'edit_prior_approval',
kwargs={
'award_pk': self.award.pk,
'prior_approval_pk': self.id})
def get_delete_url(self):
"""Gets the URL used to delete this object"""
return reverse(
'delete_prior_approval',
kwargs={
'award_pk': self.award.pk,
'prior_approval_pk': self.id})
class ReportSubmission(FieldIteratorMixin, models.Model):
"""Model for the ReportSubmission data"""
HIDDEN_FIELDS = ['award']
HIDDEN_TABLE_FIELDS = []
REPORT_CHOICES = (
('TA', 'Technical Annual'),
('TS', 'Technical Semiannual'),
('TQ', 'Technical Quarterly'),
('IP', 'Interim Progress Report (Non-Competing Continuations)'),
('DL', 'Deliverables'),
('IP', 'Invention/Patent Annual'),
('PA', 'Property Annual'),
('EA', 'Equipment Annual')
)
award = models.ForeignKey(Award)
report = models.CharField(choices=REPORT_CHOICES, max_length=2, blank=True)
due_date = models.DateField(null=True, blank=True)
submitted_date = models.DateField(null=True, blank=True)
def __unicode__(self):
return u'Report Submission #%s' % (self.id)
def get_absolute_url(self):
"""Gets the URL used to navigate to this object"""
return reverse(
'edit_report_submission',
kwargs={
'award_pk': self.award.pk,
'report_submission_pk': self.id})
def get_delete_url(self):
"""Gets the URL used to delete this object"""
return reverse(
'delete_report_submission',
kwargs={
'award_pk': self.award.pk,
'report_submission_pk': self.id})
class AwardCloseout(AssignableAwardSection):
"""Model for the AwardCloseout data"""
minimum_fields = (
)
HIDDEN_SEARCH_FIELDS = AwardSection.HIDDEN_SEARCH_FIELDS + [
'date_assigned']
award = models.OneToOneField(Award)
closeout_completion_date = models.DateTimeField(blank=True, null=True, verbose_name='Completion Date')
def __unicode__(self):
return u'Award Closeout %s' % (self.id)
def get_absolute_url(self):
"""Gets the URL used to navigate to this object"""
return reverse(
'edit_award_closeout',
kwargs={
'award_pk': self.award.pk})
class FinalReport(FieldIteratorMixin, models.Model):
"""Model for the FinalReport data"""
HIDDEN_FIELDS = ['award']
HIDDEN_TABLE_FIELDS = []
FINAL_REPORT_CHOICES = (
('FT', 'Final Technical'),
('FP', 'Final Progress Report'),
('FD', 'Final Deliverable(s)'),
('IP', 'Final Invention/Patent'),
('FI', 'Final Invention'),
('FP', 'Final Property'),
('FE', 'Final Equipment'),
)
award = models.ForeignKey(Award)
report = models.CharField(
choices=FINAL_REPORT_CHOICES,
max_length=2,
blank=True)
due_date = models.DateField(null=True, blank=True)
submitted_date = models.DateField(null=True, blank=True)
def __unicode__(self):
return u'Final Report #%s' % (self.id)
def get_absolute_url(self):
""" Gets the URL used to navigate to this object"""
return reverse(
'edit_final_report',
kwargs={
'award_pk': self.award.pk,
'final_report_pk': self.id})
def get_delete_url(self):
"""Gets the URL used to delete this object"""
return reverse(
'delete_final_report',
kwargs={
'award_pk': self.award.pk,
'final_report_pk': self.id})
|
[
"django.db.models.TextField",
"django.db.models.NullBooleanField",
"django.core.exceptions.ValidationError",
"django.core.urlresolvers.reverse",
"django.db.models.ForeignKey",
"django.dispatch.receiver",
"django.contrib.auth.models.User.objects.filter",
"django.db.models.DateField",
"django.db.models.BigIntegerField",
"reversion.get_for_object",
"django.utils.timezone.now",
"django.db.models.BooleanField",
"dateutil.tz.tzlocal",
"itertools.chain",
"datetime.datetime.now",
"datetime.date.today",
"django.db.models.DecimalField",
"django.db.models.DateTimeField",
"multiselectfield.MultiSelectField",
"django.db.models.OneToOneField",
"django.db.models.CharField",
"django.db.models.Q",
"django.db.models.IntegerField"
] |
[((91807, 91845), 'django.dispatch.receiver', 'receiver', (['post_delete'], {'sender': 'Proposal'}), '(post_delete, sender=Proposal)\n', (91815, 91845), False, 'from django.dispatch import receiver\n'), ((91847, 91883), 'django.dispatch.receiver', 'receiver', (['post_save'], {'sender': 'Proposal'}), '(post_save, sender=Proposal)\n', (91855, 91883), False, 'from django.dispatch import receiver\n'), ((6822, 6875), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'primary_key': '(True)', 'unique': '(True)'}), '(primary_key=True, unique=True)\n', (6844, 6875), False, 'from django.db import models\n'), ((6887, 6918), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (6903, 6918), False, 'from django.db import models\n'), ((6934, 6973), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (6950, 6973), False, 'from django.db import models\n'), ((6987, 7008), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (7006, 7008), False, 'from django.db import models\n'), ((7693, 7746), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'primary_key': '(True)', 'unique': '(True)'}), '(primary_key=True, unique=True)\n', (7715, 7746), False, 'from django.db import models\n'), ((7763, 7795), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(240)'}), '(max_length=240)\n', (7779, 7795), False, 'from django.db import models\n'), ((7807, 7883), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)', 'blank': '(True)', 'null': '(True)', 'verbose_name': '"""GWID"""'}), "(max_length=150, blank=True, null=True, verbose_name='GWID')\n", (7823, 7883), False, 'from django.db import models\n'), ((7935, 7956), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (7954, 7956), False, 'from django.db import models\n'), ((7972, 8011), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (7988, 8011), False, 'from django.db import models\n'), ((8025, 8046), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (8044, 8046), False, 'from django.db import models\n'), ((8080, 8134), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'blank': '(True)', 'null': '(True)'}), '(max_length=64, blank=True, null=True)\n', (8096, 8134), False, 'from django.db import models\n'), ((8152, 8195), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'blank': '(True)'}), '(max_length=64, blank=True)\n', (8168, 8195), False, 'from django.db import models\n'), ((8214, 8257), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(32)', 'blank': '(True)'}), '(max_length=32, blank=True)\n', (8230, 8257), False, 'from django.db import models\n'), ((8274, 8317), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'blank': '(True)'}), '(max_length=64, blank=True)\n', (8290, 8317), False, 'from django.db import models\n'), ((8330, 8384), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(32)', 'blank': '(True)', 'null': '(True)'}), '(max_length=32, blank=True, null=True)\n', (8346, 8384), False, 'from django.db import models\n'), ((8397, 8451), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'blank': '(True)', 'null': '(True)'}), '(max_length=64, blank=True, null=True)\n', (8413, 8451), False, 'from django.db import models\n'), ((8809, 8862), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'primary_key': '(True)', 'unique': '(True)'}), '(primary_key=True, unique=True)\n', (8831, 8862), False, 'from django.db import models\n'), ((8874, 8906), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(240)'}), '(max_length=240)\n', (8890, 8906), False, 'from django.db import models\n'), ((8931, 8985), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'blank': '(True)', 'null': '(True)'}), '(max_length=30, blank=True, null=True)\n', (8947, 8985), False, 'from django.db import models\n'), ((9010, 9041), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(80)'}), '(max_length=80)\n', (9026, 9041), False, 'from django.db import models\n'), ((9066, 9097), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(80)'}), '(max_length=80)\n', (9082, 9097), False, 'from django.db import models\n'), ((9113, 9152), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (9129, 9152), False, 'from django.db import models\n'), ((9166, 9187), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (9185, 9187), False, 'from django.db import models\n'), ((9494, 9547), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'primary_key': '(True)', 'unique': '(True)'}), '(primary_key=True, unique=True)\n', (9516, 9547), False, 'from django.db import models\n'), ((9561, 9592), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(15)'}), '(max_length=15)\n', (9577, 9592), False, 'from django.db import models\n'), ((9610, 9641), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (9626, 9641), False, 'from django.db import models\n'), ((9655, 9676), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (9674, 9676), False, 'from django.db import models\n'), ((10032, 10095), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)', 'primary_key': '(True)', 'unique': '(True)'}), '(max_length=150, primary_key=True, unique=True)\n', (10048, 10095), False, 'from django.db import models\n'), ((10139, 10171), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(240)'}), '(max_length=240)\n', (10155, 10171), False, 'from django.db import models\n'), ((10187, 10226), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (10203, 10226), False, 'from django.db import models\n'), ((10240, 10261), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (10259, 10261), False, 'from django.db import models\n'), ((10626, 10689), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)', 'primary_key': '(True)', 'unique': '(True)'}), '(max_length=150, primary_key=True, unique=True)\n', (10642, 10689), False, 'from django.db import models\n'), ((10733, 10765), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(240)'}), '(max_length=240)\n', (10749, 10765), False, 'from django.db import models\n'), ((10781, 10820), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (10797, 10820), False, 'from django.db import models\n'), ((10834, 10855), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (10853, 10855), False, 'from django.db import models\n'), ((11190, 11243), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'primary_key': '(True)', 'unique': '(True)'}), '(primary_key=True, unique=True)\n', (11212, 11243), False, 'from django.db import models\n'), ((11255, 11286), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (11271, 11286), False, 'from django.db import models\n'), ((11300, 11331), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)'}), '(max_length=10)\n', (11316, 11331), False, 'from django.db import models\n'), ((11347, 11386), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (11363, 11386), False, 'from django.db import models\n'), ((11400, 11421), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (11419, 11421), False, 'from django.db import models\n'), ((11761, 11814), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'primary_key': '(True)', 'unique': '(True)'}), '(primary_key=True, unique=True)\n', (11783, 11814), False, 'from django.db import models\n'), ((11835, 11866), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (11851, 11866), False, 'from django.db import models\n'), ((11882, 11921), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (11898, 11921), False, 'from django.db import models\n'), ((11935, 11956), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (11954, 11956), False, 'from django.db import models\n'), ((12274, 12327), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'primary_key': '(True)', 'unique': '(True)'}), '(primary_key=True, unique=True)\n', (12296, 12327), False, 'from django.db import models\n'), ((12339, 12370), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (12355, 12370), False, 'from django.db import models\n'), ((12384, 12405), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (12403, 12405), False, 'from django.db import models\n'), ((12419, 12440), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (12438, 12440), False, 'from django.db import models\n'), ((12765, 12835), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'INTERFACE_CHOICES', 'max_length': '(1)', 'default': '"""C"""'}), "(choices=INTERFACE_CHOICES, max_length=1, default='C')\n", (12781, 12835), False, 'from django.db import models\n'), ((12873, 12904), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (12889, 12904), False, 'from django.db import models\n'), ((12926, 12958), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)'}), '(max_length=250)\n', (12942, 12958), False, 'from django.db import models\n'), ((12975, 13006), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (12991, 13006), False, 'from django.db import models\n'), ((13020, 13041), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (13039, 13041), False, 'from django.db import models\n'), ((14099, 14120), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (14118, 14120), False, 'from django.db import models\n'), ((14140, 14172), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (14156, 14172), False, 'from django.db import models\n'), ((14193, 14225), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (14209, 14225), False, 'from django.db import models\n'), ((14245, 14288), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (14265, 14288), False, 'from django.db import models\n'), ((14310, 14353), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (14330, 14353), False, 'from django.db import models\n'), ((14374, 14406), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (14390, 14406), False, 'from django.db import models\n'), ((17583, 17637), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'STATUS_CHOICES', 'default': '(0)'}), '(choices=STATUS_CHOICES, default=0)\n', (17602, 17637), False, 'from django.db import models\n'), ((17658, 17693), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (17674, 17693), False, 'from django.db import models\n'), ((17717, 17751), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (17736, 17751), False, 'from django.db import models\n'), ((19494, 19528), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (19513, 19528), False, 'from django.db import models\n'), ((19557, 19591), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (19576, 19591), False, 'from django.db import models\n'), ((19619, 19653), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (19638, 19653), False, 'from django.db import models\n'), ((19674, 19708), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (19693, 19708), False, 'from django.db import models\n'), ((19735, 19769), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (19754, 19769), False, 'from django.db import models\n'), ((19799, 19833), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (19818, 19833), False, 'from django.db import models\n'), ((19857, 19891), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (19876, 19891), False, 'from django.db import models\n'), ((19922, 19956), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (19941, 19956), False, 'from django.db import models\n'), ((19974, 20028), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)', 'null': '(True)'}), '(max_length=50, blank=True, null=True)\n', (19990, 20028), False, 'from django.db import models\n'), ((66144, 66197), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'verbose_name': '"""Comments"""'}), "(blank=True, verbose_name='Comments')\n", (66160, 66197), False, 'from django.db import models\n'), ((66214, 66248), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (66233, 66248), False, 'from django.db import models\n'), ((67035, 67108), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Date Assigned"""'}), "(blank=True, null=True, verbose_name='Date Assigned')\n", (67055, 67108), False, 'from django.db import models\n'), ((68547, 68597), 'django.db.models.OneToOneField', 'models.OneToOneField', (['Award'], {'null': '(True)', 'blank': '(True)'}), '(Award, null=True, blank=True)\n', (68567, 68597), False, 'from django.db import models\n'), ((68618, 68714), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Date Created"""'}), "(auto_now_add=True, blank=True, null=True, verbose_name\n ='Date Created')\n", (68638, 68714), False, 'from django.db import models\n'), ((68740, 68873), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AwardManager'], {'blank': '(True)', 'null': '(True)', 'limit_choices_to': "{'active': True}", 'verbose_name': '"""Principal Investigator"""'}), "(AwardManager, blank=True, null=True, limit_choices_to={\n 'active': True}, verbose_name='Principal Investigator')\n", (68757, 68873), False, 'from django.db import models\n'), ((68936, 68980), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)'}), '(max_length=255, blank=True)\n', (68952, 68980), False, 'from django.db import models\n'), ((69001, 69094), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)', 'verbose_name': '"""Prime (if GW is subawardee)"""'}), "(max_length=255, blank=True, verbose_name=\n 'Prime (if GW is subawardee)')\n", (69017, 69094), False, 'from django.db import models\n'), ((69142, 69234), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)', 'verbose_name': '"""Program announcement number"""'}), "(max_length=50, blank=True, verbose_name=\n 'Program announcement number')\n", (69158, 69234), False, 'from django.db import models\n'), ((69279, 69323), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'blank': '(True)'}), '(max_length=250, blank=True)\n', (69295, 69323), False, 'from django.db import models\n'), ((69354, 69393), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (69370, 69393), False, 'from django.db import models\n'), ((69421, 69497), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Proposal due to OVPR"""'}), "(null=True, blank=True, verbose_name='Proposal due to OVPR')\n", (69437, 69497), False, 'from django.db import models\n'), ((69549, 69624), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Proposal due to AOR"""'}), "(null=True, blank=True, verbose_name='Proposal due to AOR')\n", (69565, 69624), False, 'from django.db import models\n'), ((69661, 69766), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(False)', 'verbose_name': '"""SPA I*"""', 'max_length': '(150)', 'choices': 'SPA1_CHOICES', 'null': '(True)'}), "(blank=False, verbose_name='SPA I*', max_length=150,\n choices=SPA1_CHOICES, null=True)\n", (69677, 69766), False, 'from django.db import models\n'), ((69776, 69820), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)', 'blank': '(True)'}), '(max_length=150, blank=True)\n', (69792, 69820), False, 'from django.db import models\n'), ((69838, 69963), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AwardOrganization'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}", 'verbose_name': '"""Department"""'}), "(AwardOrganization, null=True, blank=True,\n limit_choices_to={'active': True}, verbose_name='Department')\n", (69855, 69963), False, 'from django.db import models\n'), ((70031, 70082), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""PHS funded?"""'}), "(verbose_name='PHS funded?')\n", (70054, 70082), False, 'from django.db import models\n'), ((70104, 70197), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""FCOI disclosure submitted for each investigator?"""'}), "(verbose_name=\n 'FCOI disclosure submitted for each investigator?')\n", (70127, 70197), False, 'from django.db import models\n'), ((70222, 70300), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Date received by SPA I"""'}), "(null=True, blank=True, verbose_name='Date received by SPA I')\n", (70238, 70300), False, 'from django.db import models\n'), ((70348, 70423), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'PROPOSAL_STATUS_CHOICES', 'max_length': '(2)', 'blank': '(True)'}), '(choices=PROPOSAL_STATUS_CHOICES, max_length=2, blank=True)\n', (70364, 70423), False, 'from django.db import models\n'), ((70472, 70548), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'PROPOSAL_OUTCOME_CHOICES', 'max_length': '(2)', 'blank': '(True)'}), '(choices=PROPOSAL_OUTCOME_CHOICES, max_length=2, blank=True)\n', (70488, 70548), False, 'from django.db import models\n'), ((70596, 70683), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(15)', 'blank': '(True)', 'verbose_name': '"""Cayuse Proposal Number"""'}), "(max_length=15, blank=True, verbose_name=\n 'Cayuse Proposal Number')\n", (70612, 70683), False, 'from django.db import models\n'), ((70705, 70794), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Date 5 days waiver requested"""'}), "(null=True, blank=True, verbose_name=\n 'Date 5 days waiver requested')\n", (70721, 70794), False, 'from django.db import models\n'), ((70839, 70926), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Date 5 days waiver granted"""'}), "(null=True, blank=True, verbose_name=\n 'Date 5 days waiver granted')\n", (70855, 70926), False, 'from django.db import models\n'), ((70965, 71017), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""JIT request?"""'}), "(verbose_name='JIT request?')\n", (70988, 71017), False, 'from django.db import models\n'), ((71047, 71126), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""JIT response submitted?"""'}), "(null=True, blank=True, verbose_name='JIT response submitted?')\n", (71063, 71126), False, 'from django.db import models\n'), ((71183, 71272), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""5 day waiver granted?"""'}), "(null=True, blank=True, verbose_name=\n '5 day waiver granted?')\n", (71206, 71272), False, 'from django.db import models\n'), ((79726, 79800), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Award'], {'null': '(True)', 'blank': '(True)', 'on_delete': 'models.SET_NULL'}), '(Award, null=True, blank=True, on_delete=models.SET_NULL)\n', (79743, 79800), False, 'from django.db import models\n'), ((79846, 79880), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (79865, 79880), False, 'from django.db import models\n'), ((79905, 79939), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (79924, 79939), False, 'from django.db import models\n'), ((79961, 80057), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Date Created"""'}), "(auto_now_add=True, blank=True, null=True, verbose_name\n ='Date Created')\n", (79981, 80057), False, 'from django.db import models\n'), ((80068, 80111), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'blank': '(True)'}), '(max_length=20, blank=True)\n', (80084, 80111), False, 'from django.db import models\n'), ((80131, 80202), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'blank': '(True)', 'verbose_name': '"""Employee ID"""'}), "(max_length=40, blank=True, verbose_name='Employee ID')\n", (80147, 80202), False, 'from django.db import models\n'), ((80246, 80337), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'unique': '(True)', 'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Proposal ID"""'}), "(unique=True, null=True, blank=True, verbose_name=\n 'Proposal ID')\n", (80268, 80337), False, 'from django.db import models\n'), ((80388, 80479), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Proposal Number"""'}), "(max_length=50, null=True, blank=True, verbose_name=\n 'Proposal Number')\n", (80404, 80479), False, 'from django.db import models\n'), ((80529, 80618), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)', 'verbose_name': '"""Internal Proposal Title"""'}), "(max_length=256, blank=True, verbose_name=\n 'Internal Proposal Title')\n", (80545, 80618), False, 'from django.db import models\n'), ((80659, 80703), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)'}), '(max_length=256, blank=True)\n', (80675, 80703), False, 'from django.db import models\n'), ((80733, 80866), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AwardManager'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}", 'verbose_name': '"""Principal Investigator"""'}), "(AwardManager, null=True, blank=True, limit_choices_to={\n 'active': True}, verbose_name='Principal Investigator')\n", (80750, 80866), False, 'from django.db import models\n'), ((80936, 80980), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)'}), '(max_length=255, blank=True)\n', (80952, 80980), False, 'from django.db import models\n'), ((81009, 81053), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'blank': '(True)'}), '(max_length=128, blank=True)\n', (81025, 81053), False, 'from django.db import models\n'), ((81076, 81213), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AwardOrganization'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}", 'verbose_name': '"""Department Code & Name"""'}), "(AwardOrganization, null=True, blank=True,\n limit_choices_to={'active': True}, verbose_name='Department Code & Name')\n", (81093, 81213), False, 'from django.db import models\n'), ((81284, 81328), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)', 'blank': '(True)'}), '(max_length=150, blank=True)\n', (81300, 81328), False, 'from django.db import models\n'), ((81347, 81442), 'django.db.models.ForeignKey', 'models.ForeignKey', (['FundingSource'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}"}), "(FundingSource, null=True, blank=True, limit_choices_to={\n 'active': True})\n", (81364, 81442), False, 'from django.db import models\n'), ((81505, 81592), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)', 'verbose_name': '"""Is this a subcontract?"""'}), "(max_length=10, blank=True, verbose_name=\n 'Is this a subcontract?')\n", (81521, 81592), False, 'from django.db import models\n'), ((81632, 81756), 'django.db.models.ForeignKey', 'models.ForeignKey', (['PrimeSponsor'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}", 'verbose_name': '"""Prime Sponsor"""'}), "(PrimeSponsor, null=True, blank=True, limit_choices_to={\n 'active': True}, verbose_name='Prime Sponsor')\n", (81649, 81756), False, 'from django.db import models\n'), ((81828, 81919), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(15)', 'blank': '(True)', 'verbose_name': '"""Grants.gov tracking number"""'}), "(max_length=15, blank=True, verbose_name=\n 'Grants.gov tracking number')\n", (81844, 81919), False, 'from django.db import models\n'), ((81965, 82004), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (81981, 82004), False, 'from django.db import models\n'), ((82028, 82067), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (82044, 82067), False, 'from django.db import models\n'), ((82090, 82129), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (82106, 82129), False, 'from django.db import models\n'), ((82153, 82192), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (82169, 82192), False, 'from django.db import models\n'), ((82217, 82261), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'blank': '(True)'}), '(max_length=250, blank=True)\n', (82233, 82261), False, 'from django.db import models\n'), ((82282, 82326), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)'}), '(max_length=256, blank=True)\n', (82298, 82326), False, 'from django.db import models\n'), ((82345, 82389), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)'}), '(max_length=256, blank=True)\n', (82361, 82389), False, 'from django.db import models\n'), ((82418, 82497), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(25)', 'blank': '(True)', 'verbose_name': '"""Kind of application"""'}), "(max_length=25, blank=True, verbose_name='Kind of application')\n", (82434, 82497), False, 'from django.db import models\n'), ((82548, 82625), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)', 'verbose_name': '"""Previous Grant #"""'}), "(max_length=256, blank=True, verbose_name='Previous Grant #')\n", (82564, 82625), False, 'from django.db import models\n'), ((82658, 82753), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)', 'verbose_name': '"""Change in grantee institution?"""'}), "(max_length=10, blank=True, verbose_name=\n 'Change in grantee institution?')\n", (82674, 82753), False, 'from django.db import models\n'), ((82793, 82837), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)'}), '(max_length=256, blank=True)\n', (82809, 82837), False, 'from django.db import models\n'), ((82863, 82907), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)'}), '(max_length=256, blank=True)\n', (82879, 82907), False, 'from django.db import models\n'), ((82938, 83027), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)', 'verbose_name': '"""Departmental ID primary"""'}), "(max_length=256, blank=True, verbose_name=\n 'Departmental ID primary')\n", (82954, 83027), False, 'from django.db import models\n'), ((83080, 83171), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)', 'verbose_name': '"""Departmental ID secondary"""'}), "(max_length=256, blank=True, verbose_name=\n 'Departmental ID secondary')\n", (83096, 83171), False, 'from django.db import models\n'), ((83224, 83268), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)'}), '(max_length=256, blank=True)\n', (83240, 83268), False, 'from django.db import models\n'), ((83303, 83347), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)'}), '(max_length=256, blank=True)\n', (83319, 83347), False, 'from django.db import models\n'), ((83382, 83475), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)', 'verbose_name': '"""Are vertebrate animals used?"""'}), "(max_length=10, blank=True, verbose_name=\n 'Are vertebrate animals used?')\n", (83398, 83475), False, 'from django.db import models\n'), ((83526, 83615), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)', 'verbose_name': '"""Is IACUC review pending?"""'}), "(max_length=10, blank=True, verbose_name=\n 'Is IACUC review pending?')\n", (83542, 83615), False, 'from django.db import models\n'), ((83664, 83751), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)', 'verbose_name': '"""IACUC protocol number"""'}), "(max_length=256, blank=True, verbose_name=\n 'IACUC protocol number')\n", (83680, 83751), False, 'from django.db import models\n'), ((83798, 83873), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""IACUC approval date"""'}), "(null=True, blank=True, verbose_name='IACUC approval date')\n", (83814, 83873), False, 'from django.db import models\n'), ((83929, 84018), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)', 'verbose_name': '"""Are human subjects used?"""'}), "(max_length=10, blank=True, verbose_name=\n 'Are human subjects used?')\n", (83945, 84018), False, 'from django.db import models\n'), ((84067, 84154), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)', 'verbose_name': '"""Is IRB review pending?"""'}), "(max_length=10, blank=True, verbose_name=\n 'Is IRB review pending?')\n", (84083, 84154), False, 'from django.db import models\n'), ((84201, 84286), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)', 'verbose_name': '"""IRB protocol number"""'}), "(max_length=256, blank=True, verbose_name='IRB protocol number'\n )\n", (84217, 84286), False, 'from django.db import models\n'), ((84329, 84400), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""IRB review date"""'}), "(null=True, blank=True, verbose_name='IRB review date')\n", (84345, 84400), False, 'from django.db import models\n'), ((84443, 84532), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)', 'verbose_name': '"""Uses hazardous materials"""'}), "(max_length=10, blank=True, verbose_name=\n 'Uses hazardous materials')\n", (84459, 84532), False, 'from django.db import models\n'), ((84562, 84653), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Budget first period start date"""'}), "(null=True, blank=True, verbose_name=\n 'Budget first period start date')\n", (84578, 84653), False, 'from django.db import models\n'), ((84706, 84795), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Budget first period end date"""'}), "(null=True, blank=True, verbose_name=\n 'Budget first period end date')\n", (84722, 84795), False, 'from django.db import models\n'), ((84849, 84892), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)'}), '(max_length=10, blank=True)\n', (84865, 84892), False, 'from django.db import models\n'), ((84920, 84995), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (84939, 84995), False, 'from django.db import models\n'), ((85056, 85100), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)'}), '(max_length=256, blank=True)\n', (85072, 85100), False, 'from django.db import models\n'), ((85133, 85176), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)'}), '(max_length=10, blank=True)\n', (85149, 85176), False, 'from django.db import models\n'), ((85203, 85278), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (85222, 85278), False, 'from django.db import models\n'), ((85338, 85382), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)'}), '(max_length=256, blank=True)\n', (85354, 85382), False, 'from django.db import models\n'), ((85420, 85463), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)'}), '(max_length=10, blank=True)\n', (85436, 85463), False, 'from django.db import models\n'), ((85509, 85552), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)'}), '(max_length=10, blank=True)\n', (85525, 85552), False, 'from django.db import models\n'), ((85589, 85632), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)'}), '(max_length=10, blank=True)\n', (85605, 85632), False, 'from django.db import models\n'), ((85652, 85727), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (85671, 85727), False, 'from django.db import models\n'), ((85782, 85857), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (85801, 85857), False, 'from django.db import models\n'), ((85912, 85987), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (85931, 85987), False, 'from django.db import models\n'), ((86042, 86117), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (86061, 86117), False, 'from django.db import models\n'), ((86172, 86247), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (86191, 86247), False, 'from django.db import models\n'), ((86302, 86377), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (86321, 86377), False, 'from django.db import models\n'), ((86432, 86507), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (86451, 86507), False, 'from django.db import models\n'), ((86562, 86637), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (86581, 86637), False, 'from django.db import models\n'), ((86692, 86767), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (86711, 86767), False, 'from django.db import models\n'), ((86822, 86897), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (86841, 86897), False, 'from django.db import models\n'), ((86953, 87028), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (86972, 87028), False, 'from django.db import models\n'), ((87087, 87162), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (87106, 87162), False, 'from django.db import models\n'), ((87224, 87299), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (87243, 87299), False, 'from django.db import models\n'), ((87361, 87436), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (87380, 87436), False, 'from django.db import models\n'), ((87498, 87573), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (87517, 87573), False, 'from django.db import models\n'), ((87635, 87710), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (87654, 87710), False, 'from django.db import models\n'), ((87772, 87847), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (87791, 87847), False, 'from django.db import models\n'), ((87909, 87984), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (87928, 87984), False, 'from django.db import models\n'), ((88046, 88121), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (88065, 88121), False, 'from django.db import models\n'), ((88183, 88258), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (88202, 88258), False, 'from django.db import models\n'), ((88320, 88395), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (88339, 88395), False, 'from django.db import models\n'), ((88458, 88533), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (88477, 88533), False, 'from django.db import models\n'), ((88594, 88669), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (88613, 88669), False, 'from django.db import models\n'), ((88733, 88808), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (88752, 88808), False, 'from django.db import models\n'), ((88872, 88947), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (88891, 88947), False, 'from django.db import models\n'), ((89011, 89086), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (89030, 89086), False, 'from django.db import models\n'), ((89150, 89225), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (89169, 89225), False, 'from django.db import models\n'), ((89289, 89364), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (89308, 89364), False, 'from django.db import models\n'), ((89428, 89503), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (89447, 89503), False, 'from django.db import models\n'), ((89567, 89642), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (89586, 89642), False, 'from django.db import models\n'), ((89706, 89781), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (89725, 89781), False, 'from django.db import models\n'), ((89845, 89920), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (89864, 89920), False, 'from django.db import models\n'), ((89985, 90060), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (90004, 90060), False, 'from django.db import models\n'), ((92876, 92903), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Proposal'], {}), '(Proposal)\n', (92893, 92903), False, 'from django.db import models\n'), ((92923, 92989), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'blank': '(True)', 'verbose_name': '"""Emp ID"""'}), "(max_length=40, blank=True, verbose_name='Emp ID')\n", (92939, 92989), False, 'from django.db import models\n'), ((93031, 93074), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'blank': '(True)'}), '(max_length=64, blank=True)\n', (93047, 93074), False, 'from django.db import models\n'), ((93092, 93135), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'blank': '(True)'}), '(max_length=64, blank=True)\n', (93108, 93135), False, 'from django.db import models\n'), ((93154, 93197), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(32)', 'blank': '(True)'}), '(max_length=32, blank=True)\n', (93170, 93197), False, 'from django.db import models\n'), ((93217, 93261), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'blank': '(True)'}), '(max_length=128, blank=True)\n', (93233, 93261), False, 'from django.db import models\n'), ((93284, 93392), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(3)', 'max_digits': '(5)', 'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Calendar mos."""'}), "(decimal_places=3, max_digits=5, null=True, blank=True,\n verbose_name='Calendar mos.')\n", (93303, 93392), False, 'from django.db import models\n'), ((93452, 93560), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(3)', 'max_digits': '(5)', 'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Academic mos."""'}), "(decimal_places=3, max_digits=5, null=True, blank=True,\n verbose_name='Academic mos.')\n", (93471, 93560), False, 'from django.db import models\n'), ((93618, 93724), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(3)', 'max_digits': '(5)', 'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Summer mos."""'}), "(decimal_places=3, max_digits=5, null=True, blank=True,\n verbose_name='Summer mos.')\n", (93637, 93724), False, 'from django.db import models\n'), ((93775, 93818), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)'}), '(max_length=10, blank=True)\n', (93791, 93818), False, 'from django.db import models\n'), ((94780, 94807), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Proposal'], {}), '(Proposal)\n', (94797, 94807), False, 'from django.db import models\n'), ((94831, 94904), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)', 'verbose_name': '"""Organization"""'}), "(max_length=255, blank=True, verbose_name='Organization')\n", (94847, 94904), False, 'from django.db import models\n'), ((94944, 95010), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""DUNS"""'}), "(null=True, blank=True, verbose_name='DUNS')\n", (94966, 95010), False, 'from django.db import models\n'), ((95053, 95122), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)', 'verbose_name': '"""Street 1"""'}), "(max_length=255, blank=True, verbose_name='Street 1')\n", (95069, 95122), False, 'from django.db import models\n'), ((95165, 95234), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)', 'verbose_name': '"""Street 2"""'}), "(max_length=255, blank=True, verbose_name='Street 2')\n", (95181, 95234), False, 'from django.db import models\n'), ((95274, 95339), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)', 'verbose_name': '"""City"""'}), "(max_length=255, blank=True, verbose_name='City')\n", (95290, 95339), False, 'from django.db import models\n'), ((95355, 95421), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)', 'verbose_name': '"""State"""'}), "(max_length=100, blank=True, verbose_name='State')\n", (95371, 95421), False, 'from django.db import models\n'), ((95464, 95528), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'blank': '(True)', 'verbose_name': '"""Zip"""'}), "(max_length=128, blank=True, verbose_name='Zip')\n", (95480, 95528), False, 'from django.db import models\n'), ((95571, 95639), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'blank': '(True)', 'verbose_name': '"""Country"""'}), "(max_length=128, blank=True, verbose_name='Country')\n", (95587, 95639), False, 'from django.db import models\n'), ((101207, 101231), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Award'], {}), '(Award)\n', (101224, 101231), False, 'from django.db import models\n'), ((101252, 101348), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Date Created"""'}), "(auto_now_add=True, blank=True, null=True, verbose_name\n ='Date Created')\n", (101272, 101348), False, 'from django.db import models\n'), ((101371, 101404), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (101390, 101404), False, 'from django.db import models\n'), ((101423, 101524), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'EAS_STATUS_CHOICES', 'max_length': '(2)', 'blank': '(True)', 'verbose_name': '"""EAS status"""'}), "(choices=EAS_STATUS_CHOICES, max_length=2, blank=True,\n verbose_name='EAS status')\n", (101439, 101524), False, 'from django.db import models\n'), ((101572, 101624), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""New Funding?"""'}), "(verbose_name='New Funding?')\n", (101595, 101624), False, 'from django.db import models\n'), ((101649, 101722), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""FCOI cleared date"""'}), "(null=True, blank=True, verbose_name='FCOI cleared date')\n", (101665, 101722), False, 'from django.db import models\n'), ((101765, 101816), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""PHS funded?"""'}), "(verbose_name='PHS funded?')\n", (101788, 101816), False, 'from django.db import models\n'), ((101844, 101960), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'PRIORITY_STATUS_CHOICES', 'max_length': '(2)', 'blank': '(True)', 'verbose_name': '"""Award Setup Priority"""'}), "(choices=PRIORITY_STATUS_CHOICES, max_length=2, blank=True,\n verbose_name='Award Setup Priority')\n", (101860, 101960), False, 'from django.db import models\n'), ((102022, 102114), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Prioritized by Director?"""'}), "(blank=True, null=True, verbose_name=\n 'Prioritized by Director?')\n", (102045, 102114), False, 'from django.db import models\n'), ((102130, 102238), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'blank': '(True)', 'verbose_name': '"""Project Title (if different from Proposal)"""'}), "(max_length=250, blank=True, verbose_name=\n 'Project Title (if different from Proposal)')\n", (102146, 102238), False, 'from django.db import models\n'), ((102280, 102335), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Foreign Travel?"""'}), "(verbose_name='Foreign Travel?')\n", (102303, 102335), False, 'from django.db import models\n'), ((102351, 102420), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'blank': '(True)', 'verbose_name': '"""F&A rate"""'}), "(max_length=250, blank=True, verbose_name='F&A rate')\n", (102367, 102420), False, 'from django.db import models\n'), ((102470, 102528), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Full F&A Recovery?"""'}), "(verbose_name='Full F&A Recovery?')\n", (102493, 102528), False, 'from django.db import models\n'), ((102556, 102657), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'blank': '(True)', 'verbose_name': '"""If no full F&A, provide explanation"""'}), "(max_length=250, blank=True, verbose_name=\n 'If no full F&A, provide explanation')\n", (102572, 102657), False, 'from django.db import models\n'), ((102702, 102760), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""MFA investigators?"""'}), "(verbose_name='MFA investigators?')\n", (102725, 102760), False, 'from django.db import models\n'), ((102796, 102865), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Administrative establishment?"""'}), "(verbose_name='Administrative establishment?')\n", (102819, 102865), False, 'from django.db import models\n'), ((102898, 102937), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (102914, 102937), False, 'from django.db import models\n'), ((102966, 103005), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (102982, 103005), False, 'from django.db import models\n'), ((103032, 103075), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)'}), '(max_length=50, blank=True)\n', (103048, 103075), False, 'from django.db import models\n'), ((103103, 103203), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)', 'verbose_name': '"""Prime Award # (if GW is subawardee)"""'}), "(max_length=50, blank=True, verbose_name=\n 'Prime Award # (if GW is subawardee)')\n", (103119, 103203), False, 'from django.db import models\n'), ((103249, 103362), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Total award costs"""'}), "(decimal_places=2, max_digits=15, null=True, blank=True,\n verbose_name='Total award costs')\n", (103268, 103362), False, 'from django.db import models\n'), ((103425, 103545), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Total award direct costs"""'}), "(decimal_places=2, max_digits=15, null=True, blank=True,\n verbose_name='Total award direct costs')\n", (103444, 103545), False, 'from django.db import models\n'), ((103610, 103732), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Total award indirect costs"""'}), "(decimal_places=2, max_digits=15, null=True, blank=True,\n verbose_name='Total award indirect costs')\n", (103629, 103732), False, 'from django.db import models\n'), ((103797, 103872), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (103816, 103872), False, 'from django.db import models\n'), ((103934, 104009), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (103953, 104009), False, 'from django.db import models\n'), ((104073, 104148), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (104092, 104148), False, 'from django.db import models\n'), ((104209, 104284), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (104228, 104284), False, 'from django.db import models\n'), ((104346, 104421), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (104365, 104421), False, 'from django.db import models\n'), ((104485, 104560), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (104504, 104560), False, 'from django.db import models\n'), ((104621, 104696), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (104640, 104696), False, 'from django.db import models\n'), ((104758, 104833), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (104777, 104833), False, 'from django.db import models\n'), ((104897, 104972), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (104916, 104972), False, 'from django.db import models\n'), ((105033, 105108), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (105052, 105108), False, 'from django.db import models\n'), ((105170, 105245), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (105189, 105245), False, 'from django.db import models\n'), ((105309, 105384), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (105328, 105384), False, 'from django.db import models\n'), ((105445, 105520), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (105464, 105520), False, 'from django.db import models\n'), ((105582, 105657), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (105601, 105657), False, 'from django.db import models\n'), ((105721, 105796), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (105740, 105796), False, 'from django.db import models\n'), ((105857, 105932), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (105876, 105932), False, 'from django.db import models\n'), ((105994, 106069), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (106013, 106069), False, 'from django.db import models\n'), ((106133, 106208), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (106152, 106208), False, 'from django.db import models\n'), ((106269, 106344), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (106288, 106344), False, 'from django.db import models\n'), ((106406, 106481), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (106425, 106481), False, 'from django.db import models\n'), ((106545, 106620), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (106564, 106620), False, 'from django.db import models\n'), ((106681, 106756), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (106700, 106756), False, 'from django.db import models\n'), ((106818, 106893), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (106837, 106893), False, 'from django.db import models\n'), ((106957, 107032), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (106976, 107032), False, 'from django.db import models\n'), ((107093, 107168), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (107112, 107168), False, 'from django.db import models\n'), ((107230, 107305), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (107249, 107305), False, 'from django.db import models\n'), ((107369, 107444), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (107388, 107444), False, 'from django.db import models\n'), ((107506, 107581), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (107525, 107581), False, 'from django.db import models\n'), ((107644, 107719), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (107663, 107719), False, 'from django.db import models\n'), ((107784, 107859), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(15)', 'null': '(True)', 'blank': '(True)'}), '(decimal_places=2, max_digits=15, null=True, blank=True)\n', (107803, 107859), False, 'from django.db import models\n'), ((107921, 107991), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)', 'blank': '(True)', 'verbose_name': '"""GMO or CO"""'}), "(max_length=500, blank=True, verbose_name='GMO or CO')\n", (107937, 107991), False, 'from django.db import models\n'), ((108043, 108122), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(15)', 'blank': '(True)', 'verbose_name': '"""GMO/CO phone number"""'}), "(max_length=15, blank=True, verbose_name='GMO/CO phone number')\n", (108059, 108122), False, 'from django.db import models\n'), ((108167, 108239), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)', 'verbose_name': '"""GMO/CO email"""'}), "(max_length=50, blank=True, verbose_name='GMO/CO email')\n", (108183, 108239), False, 'from django.db import models\n'), ((108288, 108398), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Do you want to send this to the post-award team for modification?"""'}), "(verbose_name=\n 'Do you want to send this to the post-award team for modification?')\n", (108311, 108398), False, 'from django.db import models\n'), ((108427, 108502), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Completion Date"""'}), "(blank=True, null=True, verbose_name='Completion Date')\n", (108447, 108502), False, 'from django.db import models\n'), ((108520, 108574), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)', 'null': '(True)'}), '(max_length=50, blank=True, null=True)\n', (108536, 108574), False, 'from django.db import models\n'), ((113168, 113240), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'NEGOTIATION_CHOICES', 'max_length': '(50)', 'blank': '(True)'}), '(choices=NEGOTIATION_CHOICES, max_length=50, blank=True)\n', (113184, 113240), False, 'from django.db import models\n'), ((113304, 113348), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)'}), '(max_length=100, blank=True)\n', (113320, 113348), False, 'from django.db import models\n'), ((113390, 113418), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (113406, 113418), False, 'from django.db import models\n'), ((113440, 113464), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Award'], {}), '(Award)\n', (113457, 113464), False, 'from django.db import models\n'), ((113495, 113538), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (113515, 113538), False, 'from django.db import models\n'), ((115158, 115182), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Award'], {}), '(Award)\n', (115175, 115182), False, 'from django.db import models\n'), ((115210, 115243), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (115229, 115243), False, 'from django.db import models\n'), ((115271, 115363), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Is Small Business Subcontracting Plan required?"""'}), "(verbose_name=\n 'Is Small Business Subcontracting Plan required?')\n", (115294, 115363), False, 'from django.db import models\n'), ((115397, 115460), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Under Master Agreement?"""'}), "(verbose_name='Under Master Agreement?')\n", (115420, 115460), False, 'from django.db import models\n'), ((115487, 115588), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'AWARD_TYPE_CHOICES', 'max_length': '(3)', 'blank': '(True)', 'verbose_name': '"""Award Type"""'}), "(choices=AWARD_TYPE_CHOICES, max_length=3, blank=True,\n verbose_name='Award Type')\n", (115503, 115588), False, 'from django.db import models\n'), ((115641, 115685), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)'}), '(max_length=255, blank=True)\n', (115657, 115685), False, 'from django.db import models\n'), ((115717, 115782), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Related Other Agreements?"""'}), "(verbose_name='Related Other Agreements?')\n", (115740, 115782), False, 'from django.db import models\n'), ((115821, 115899), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'verbose_name': '"""Related other agreements comments"""'}), "(blank=True, verbose_name='Related other agreements comments')\n", (115837, 115899), False, 'from django.db import models\n'), ((115934, 116012), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)', 'blank': '(True)', 'verbose_name': '"""Negotiator Assist"""'}), "(max_length=500, blank=True, verbose_name='Negotiator Assist')\n", (115950, 116012), False, 'from django.db import models\n'), ((116058, 116127), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Date Received"""'}), "(null=True, blank=True, verbose_name='Date Received')\n", (116074, 116127), False, 'from django.db import models\n'), ((116176, 116266), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)', 'blank': '(True)', 'verbose_name': '"""Sponsor Retention Period"""'}), "(max_length=500, blank=True, verbose_name=\n 'Sponsor Retention Period')\n", (116192, 116266), False, 'from django.db import models\n'), ((116310, 116368), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""GW Doesn\'t Own IP?"""'}), '(verbose_name="GW Doesn\'t Own IP?")\n', (116333, 116368), False, 'from django.db import models\n'), ((116401, 116458), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""GW Background IP?"""'}), "(verbose_name='GW Background IP?')\n", (116424, 116458), False, 'from django.db import models\n'), ((116493, 116617), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'NEGOTIATION_CHOICES', 'max_length': '(3)', 'blank': '(True)', 'verbose_name': '"""Negotiation Status"""', 'default': '"""IQ"""'}), "(choices=NEGOTIATION_CHOICES, max_length=3, blank=True,\n verbose_name='Negotiation Status', default='IQ')\n", (116509, 116617), False, 'from django.db import models\n'), ((116679, 116741), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'verbose_name': '"""Negotiation Notes"""'}), "(blank=True, verbose_name='Negotiation Notes')\n", (116695, 116741), False, 'from django.db import models\n'), ((116786, 116861), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Foreign Participation Restrictions?"""'}), "(verbose_name='Foreign Participation Restrictions?')\n", (116809, 116861), False, 'from django.db import models\n'), ((116900, 116972), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Certificate of Insurance Needed?"""'}), "(verbose_name='Certificate of Insurance Needed?')\n", (116923, 116972), False, 'from django.db import models\n'), ((117006, 117104), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Certificate of Insurance Renewal Date"""'}), "(null=True, blank=True, verbose_name=\n 'Certificate of Insurance Renewal Date')\n", (117022, 117104), False, 'from django.db import models\n'), ((117151, 117221), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Government Furnished Property?"""'}), "(verbose_name='Government Furnished Property?')\n", (117174, 117221), False, 'from django.db import models\n'), ((117264, 117331), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Data/Security Restrictions?"""'}), "(verbose_name='Data/Security Restrictions?')\n", (117287, 117331), False, 'from django.db import models\n'), ((117355, 117404), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""E-verify?"""'}), "(verbose_name='E-verify?')\n", (117378, 117404), False, 'from django.db import models\n'), ((117435, 117499), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Publication Restriction?"""'}), "(verbose_name='Publication Restriction?')\n", (117458, 117499), False, 'from django.db import models\n'), ((117543, 117618), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Completion Date"""'}), "(blank=True, null=True, verbose_name='Completion Date')\n", (117563, 117618), False, 'from django.db import models\n'), ((117636, 117690), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)', 'null': '(True)'}), '(max_length=50, blank=True, null=True)\n', (117652, 117690), False, 'from django.db import models\n'), ((123273, 123300), 'django.db.models.OneToOneField', 'models.OneToOneField', (['Award'], {}), '(Award)\n', (123293, 123300), False, 'from django.db import models\n'), ((123319, 123395), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'blank': '(True)', 'verbose_name': '"""Award short name"""'}), "(max_length=30, blank=True, verbose_name='Award short name')\n", (123335, 123395), False, 'from django.db import models\n'), ((123438, 123477), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (123454, 123477), False, 'from django.db import models\n'), ((123493, 123532), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (123509, 123532), False, 'from django.db import models\n'), ((123562, 123672), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Final Reports/Final Invoice Due Date (Close Date)"""'}), "(null=True, blank=True, verbose_name=\n 'Final Reports/Final Invoice Due Date (Close Date)')\n", (123578, 123672), False, 'from django.db import models\n'), ((123714, 123818), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'EAS_AWARD_CHOICES', 'max_length': '(2)', 'blank': '(True)', 'verbose_name': '"""EAS award type"""'}), "(choices=EAS_AWARD_CHOICES, max_length=2, blank=True,\n verbose_name='EAS award type')\n", (123730, 123818), False, 'from django.db import models\n'), ((123862, 123957), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'SP_TYPE_CHOICES', 'max_length': '(3)', 'blank': '(True)', 'verbose_name': '"""SP Type"""'}), "(choices=SP_TYPE_CHOICES, max_length=3, blank=True,\n verbose_name='SP Type')\n", (123878, 123957), False, 'from django.db import models\n'), ((124016, 124110), 'django.db.models.ForeignKey', 'models.ForeignKey', (['IndirectCost'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}"}), "(IndirectCost, null=True, blank=True, limit_choices_to={\n 'active': True})\n", (124033, 124110), False, 'from django.db import models\n'), ((124180, 124280), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AllowedCostSchedule'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}"}), "(AllowedCostSchedule, null=True, blank=True,\n limit_choices_to={'active': True})\n", (124197, 124280), False, 'from django.db import models\n'), ((124341, 124461), 'django.db.models.ForeignKey', 'models.ForeignKey', (['CFDANumber'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}", 'verbose_name': '"""CFDA number"""'}), "(CFDANumber, null=True, blank=True, limit_choices_to={\n 'active': True}, verbose_name='CFDA number')\n", (124358, 124461), False, 'from django.db import models\n'), ((124541, 124633), 'django.db.models.ForeignKey', 'models.ForeignKey', (['FedNegRate'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}"}), "(FedNegRate, null=True, blank=True, limit_choices_to={\n 'active': True})\n", (124558, 124633), False, 'from django.db import models\n'), ((124701, 124822), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'PROPERTY_CHOICES', 'max_length': '(2)', 'blank': '(True)', 'verbose_name': '"""T&C: Property and Equipment Code"""'}), "(choices=PROPERTY_CHOICES, max_length=2, blank=True,\n verbose_name='T&C: Property and Equipment Code')\n", (124717, 124822), False, 'from django.db import models\n'), ((124880, 124990), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'ONR_CHOICES', 'max_length': '(2)', 'blank': '(True)', 'verbose_name': '"""T&C: ONR Administered Code"""'}), "(choices=ONR_CHOICES, max_length=2, blank=True,\n verbose_name='T&C: ONR Administered Code')\n", (124896, 124990), False, 'from django.db import models\n'), ((125044, 125159), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'COST_SHARING_CHOICES', 'max_length': '(2)', 'blank': '(True)', 'verbose_name': '"""T&C: Cost Sharing Code"""'}), "(choices=COST_SHARING_CHOICES, max_length=2, blank=True,\n verbose_name='T&C: Cost Sharing Code')\n", (125060, 125159), False, 'from django.db import models\n'), ((125211, 125239), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (125227, 125239), False, 'from django.db import models\n'), ((125259, 125351), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)', 'blank': '(True)', 'verbose_name': '"""Contact Name (Last, First)"""'}), "(max_length=150, blank=True, verbose_name=\n 'Contact Name (Last, First)')\n", (125275, 125351), False, 'from django.db import models\n'), ((125384, 125427), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)'}), '(max_length=50, blank=True)\n', (125400, 125427), False, 'from django.db import models\n'), ((125449, 125477), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (125465, 125477), False, 'from django.db import models\n'), ((125500, 125544), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)'}), '(max_length=100, blank=True)\n', (125516, 125544), False, 'from django.db import models\n'), ((125573, 125659), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Date Wait for Updated"""'}), "(blank=True, null=True, verbose_name=\n 'Date Wait for Updated')\n", (125593, 125659), False, 'from django.db import models\n'), ((125676, 125785), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'WAIT_FOR_CHOICES', 'max_length': '(2)', 'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Wait for"""'}), "(choices=WAIT_FOR_CHOICES, max_length=2, blank=True, null=\n True, verbose_name='Wait for')\n", (125692, 125785), False, 'from django.db import models\n'), ((125857, 125913), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""990 Form Needed?"""'}), "(verbose_name='990 Form Needed?')\n", (125880, 125913), False, 'from django.db import models\n'), ((125943, 126016), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'TASK_LOCATION_CHOICES', 'max_length': '(2)', 'blank': '(True)'}), '(choices=TASK_LOCATION_CHOICES, max_length=2, blank=True)\n', (125959, 126016), False, 'from django.db import models\n'), ((126065, 126141), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'PERFORMANCE_SITE_CHOICES', 'max_length': '(2)', 'blank': '(True)'}), '(choices=PERFORMANCE_SITE_CHOICES, max_length=2, blank=True)\n', (126081, 126141), False, 'from django.db import models\n'), ((126192, 126251), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Expanded Authority?"""'}), "(verbose_name='Expanded Authority?')\n", (126215, 126251), False, 'from django.db import models\n'), ((126292, 126401), 'multiselectfield.MultiSelectField', 'MultiSelectField', ([], {'choices': 'REPORTING_CHOICES', 'blank': '(True)', 'verbose_name': '"""Financial Reporting Requirements"""'}), "(choices=REPORTING_CHOICES, blank=True, verbose_name=\n 'Financial Reporting Requirements')\n", (126308, 126401), False, 'from multiselectfield import MultiSelectField\n'), ((126452, 126556), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'blank': '(True)', 'verbose_name': '"""Other financial reporting requirements"""'}), "(max_length=250, blank=True, verbose_name=\n 'Other financial reporting requirements')\n", (126468, 126556), False, 'from django.db import models\n'), ((126607, 126716), 'multiselectfield.MultiSelectField', 'MultiSelectField', ([], {'choices': 'REPORTING_CHOICES', 'blank': '(True)', 'verbose_name': '"""Technical Reporting Requirements"""'}), "(choices=REPORTING_CHOICES, blank=True, verbose_name=\n 'Technical Reporting Requirements')\n", (126623, 126716), False, 'from multiselectfield import MultiSelectField\n'), ((126767, 126871), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'blank': '(True)', 'verbose_name': '"""Other technical reporting requirements"""'}), "(max_length=250, blank=True, verbose_name=\n 'Other technical reporting requirements')\n", (126783, 126871), False, 'from django.db import models\n'), ((126919, 127005), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Patent Report Requirement"""'}), "(null=True, blank=True, verbose_name=\n 'Patent Report Requirement')\n", (126935, 127005), False, 'from django.db import models\n'), ((127056, 127145), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Invention Report Requirement"""'}), "(null=True, blank=True, verbose_name=\n 'Invention Report Requirement')\n", (127072, 127145), False, 'from django.db import models\n'), ((127195, 127283), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Property Report Requirement"""'}), "(null=True, blank=True, verbose_name=\n 'Property Report Requirement')\n", (127211, 127283), False, 'from django.db import models\n'), ((127334, 127423), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Equipment Report Requirement"""'}), "(null=True, blank=True, verbose_name=\n 'Equipment Report Requirement')\n", (127350, 127423), False, 'from django.db import models\n'), ((127471, 127531), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Budget Restrictions?"""'}), "(verbose_name='Budget Restrictions?')\n", (127494, 127531), False, 'from django.db import models\n'), ((127562, 127657), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AwardTemplate'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}"}), "(AwardTemplate, null=True, blank=True, limit_choices_to={\n 'active': True})\n", (127579, 127657), False, 'from django.db import models\n'), ((127726, 127802), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Award Setup Complete"""'}), "(null=True, blank=True, verbose_name='Award Setup Complete')\n", (127742, 127802), False, 'from django.db import models\n'), ((127856, 127933), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""QA Screening Complete"""'}), "(null=True, blank=True, verbose_name='QA Screening Complete')\n", (127872, 127933), False, 'from django.db import models\n'), ((127989, 128059), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Pre-award spending authorized?"""'}), "(verbose_name='Pre-award spending authorized?')\n", (128012, 128059), False, 'from django.db import models\n'), ((128095, 128185), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Record Retention Destroy Date"""'}), "(null=True, blank=True, verbose_name=\n 'Record Retention Destroy Date')\n", (128111, 128185), False, 'from django.db import models\n'), ((128232, 128342), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'EAS_SETUP_CHOICES', 'max_length': '(3)', 'blank': '(True)', 'verbose_name': '"""Ready for EAS Setup?"""'}), "(choices=EAS_SETUP_CHOICES, max_length=3, blank=True,\n verbose_name='Ready for EAS Setup?')\n", (128248, 128342), False, 'from django.db import models\n'), ((128388, 128416), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (128404, 128416), False, 'from django.db import models\n'), ((128445, 128520), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Completion Date"""'}), "(blank=True, null=True, verbose_name='Completion Date')\n", (128465, 128520), False, 'from django.db import models\n'), ((134164, 134188), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Award'], {}), '(Award)\n', (134181, 134188), False, 'from django.db import models\n'), ((134207, 134283), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'blank': '(True)', 'verbose_name': '"""Award short name"""'}), "(max_length=30, blank=True, verbose_name='Award short name')\n", (134223, 134283), False, 'from django.db import models\n'), ((134326, 134365), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (134342, 134365), False, 'from django.db import models\n'), ((134381, 134420), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (134397, 134420), False, 'from django.db import models\n'), ((134450, 134560), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Final Reports/Final Invoice Due Date (Close Date)"""'}), "(null=True, blank=True, verbose_name=\n 'Final Reports/Final Invoice Due Date (Close Date)')\n", (134466, 134560), False, 'from django.db import models\n'), ((134602, 134706), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'EAS_AWARD_CHOICES', 'max_length': '(2)', 'blank': '(True)', 'verbose_name': '"""EAS award type"""'}), "(choices=EAS_AWARD_CHOICES, max_length=2, blank=True,\n verbose_name='EAS award type')\n", (134618, 134706), False, 'from django.db import models\n'), ((134750, 134845), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'SP_TYPE_CHOICES', 'max_length': '(3)', 'blank': '(True)', 'verbose_name': '"""SP Type"""'}), "(choices=SP_TYPE_CHOICES, max_length=3, blank=True,\n verbose_name='SP Type')\n", (134766, 134845), False, 'from django.db import models\n'), ((134904, 134998), 'django.db.models.ForeignKey', 'models.ForeignKey', (['IndirectCost'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}"}), "(IndirectCost, null=True, blank=True, limit_choices_to={\n 'active': True})\n", (134921, 134998), False, 'from django.db import models\n'), ((135068, 135168), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AllowedCostSchedule'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}"}), "(AllowedCostSchedule, null=True, blank=True,\n limit_choices_to={'active': True})\n", (135085, 135168), False, 'from django.db import models\n'), ((135229, 135349), 'django.db.models.ForeignKey', 'models.ForeignKey', (['CFDANumber'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}", 'verbose_name': '"""CFDA number"""'}), "(CFDANumber, null=True, blank=True, limit_choices_to={\n 'active': True}, verbose_name='CFDA number')\n", (135246, 135349), False, 'from django.db import models\n'), ((135429, 135521), 'django.db.models.ForeignKey', 'models.ForeignKey', (['FedNegRate'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}"}), "(FedNegRate, null=True, blank=True, limit_choices_to={\n 'active': True})\n", (135446, 135521), False, 'from django.db import models\n'), ((135589, 135710), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'PROPERTY_CHOICES', 'max_length': '(2)', 'blank': '(True)', 'verbose_name': '"""T&C: Property and Equipment Code"""'}), "(choices=PROPERTY_CHOICES, max_length=2, blank=True,\n verbose_name='T&C: Property and Equipment Code')\n", (135605, 135710), False, 'from django.db import models\n'), ((135768, 135878), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'ONR_CHOICES', 'max_length': '(2)', 'blank': '(True)', 'verbose_name': '"""T&C: ONR Administered Code"""'}), "(choices=ONR_CHOICES, max_length=2, blank=True,\n verbose_name='T&C: ONR Administered Code')\n", (135784, 135878), False, 'from django.db import models\n'), ((135932, 136047), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'COST_SHARING_CHOICES', 'max_length': '(2)', 'blank': '(True)', 'verbose_name': '"""T&C: Cost Sharing Code"""'}), "(choices=COST_SHARING_CHOICES, max_length=2, blank=True,\n verbose_name='T&C: Cost Sharing Code')\n", (135948, 136047), False, 'from django.db import models\n'), ((136099, 136127), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (136115, 136127), False, 'from django.db import models\n'), ((136147, 136239), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)', 'blank': '(True)', 'verbose_name': '"""Contact Name (Last, First)"""'}), "(max_length=150, blank=True, verbose_name=\n 'Contact Name (Last, First)')\n", (136163, 136239), False, 'from django.db import models\n'), ((136272, 136315), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)'}), '(max_length=50, blank=True)\n', (136288, 136315), False, 'from django.db import models\n'), ((136337, 136365), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (136353, 136365), False, 'from django.db import models\n'), ((136388, 136432), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)'}), '(max_length=100, blank=True)\n', (136404, 136432), False, 'from django.db import models\n'), ((136461, 136547), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Date Wait for Updated"""'}), "(blank=True, null=True, verbose_name=\n 'Date Wait for Updated')\n", (136481, 136547), False, 'from django.db import models\n'), ((136564, 136673), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'WAIT_FOR_CHOICES', 'max_length': '(2)', 'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Wait for"""'}), "(choices=WAIT_FOR_CHOICES, max_length=2, blank=True, null=\n True, verbose_name='Wait for')\n", (136580, 136673), False, 'from django.db import models\n'), ((136745, 136801), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""990 Form Needed?"""'}), "(verbose_name='990 Form Needed?')\n", (136768, 136801), False, 'from django.db import models\n'), ((136831, 136904), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'TASK_LOCATION_CHOICES', 'max_length': '(2)', 'blank': '(True)'}), '(choices=TASK_LOCATION_CHOICES, max_length=2, blank=True)\n', (136847, 136904), False, 'from django.db import models\n'), ((136953, 137029), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'PERFORMANCE_SITE_CHOICES', 'max_length': '(2)', 'blank': '(True)'}), '(choices=PERFORMANCE_SITE_CHOICES, max_length=2, blank=True)\n', (136969, 137029), False, 'from django.db import models\n'), ((137080, 137139), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Expanded Authority?"""'}), "(verbose_name='Expanded Authority?')\n", (137103, 137139), False, 'from django.db import models\n'), ((137180, 137289), 'multiselectfield.MultiSelectField', 'MultiSelectField', ([], {'choices': 'REPORTING_CHOICES', 'blank': '(True)', 'verbose_name': '"""Financial Reporting Requirements"""'}), "(choices=REPORTING_CHOICES, blank=True, verbose_name=\n 'Financial Reporting Requirements')\n", (137196, 137289), False, 'from multiselectfield import MultiSelectField\n'), ((137340, 137444), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'blank': '(True)', 'verbose_name': '"""Other financial reporting requirements"""'}), "(max_length=250, blank=True, verbose_name=\n 'Other financial reporting requirements')\n", (137356, 137444), False, 'from django.db import models\n'), ((137495, 137604), 'multiselectfield.MultiSelectField', 'MultiSelectField', ([], {'choices': 'REPORTING_CHOICES', 'blank': '(True)', 'verbose_name': '"""Technical Reporting Requirements"""'}), "(choices=REPORTING_CHOICES, blank=True, verbose_name=\n 'Technical Reporting Requirements')\n", (137511, 137604), False, 'from multiselectfield import MultiSelectField\n'), ((137655, 137759), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'blank': '(True)', 'verbose_name': '"""Other technical reporting requirements"""'}), "(max_length=250, blank=True, verbose_name=\n 'Other technical reporting requirements')\n", (137671, 137759), False, 'from django.db import models\n'), ((137807, 137893), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Patent Report Requirement"""'}), "(null=True, blank=True, verbose_name=\n 'Patent Report Requirement')\n", (137823, 137893), False, 'from django.db import models\n'), ((137944, 138033), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Invention Report Requirement"""'}), "(null=True, blank=True, verbose_name=\n 'Invention Report Requirement')\n", (137960, 138033), False, 'from django.db import models\n'), ((138083, 138171), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Property Report Requirement"""'}), "(null=True, blank=True, verbose_name=\n 'Property Report Requirement')\n", (138099, 138171), False, 'from django.db import models\n'), ((138222, 138311), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Equipment Report Requirement"""'}), "(null=True, blank=True, verbose_name=\n 'Equipment Report Requirement')\n", (138238, 138311), False, 'from django.db import models\n'), ((138359, 138419), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Budget Restrictions?"""'}), "(verbose_name='Budget Restrictions?')\n", (138382, 138419), False, 'from django.db import models\n'), ((138450, 138545), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AwardTemplate'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}"}), "(AwardTemplate, null=True, blank=True, limit_choices_to={\n 'active': True})\n", (138467, 138545), False, 'from django.db import models\n'), ((138614, 138690), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Award Setup Complete"""'}), "(null=True, blank=True, verbose_name='Award Setup Complete')\n", (138630, 138690), False, 'from django.db import models\n'), ((138744, 138821), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""QA Screening Complete"""'}), "(null=True, blank=True, verbose_name='QA Screening Complete')\n", (138760, 138821), False, 'from django.db import models\n'), ((138877, 138947), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Pre-award spending authorized?"""'}), "(verbose_name='Pre-award spending authorized?')\n", (138900, 138947), False, 'from django.db import models\n'), ((138983, 139073), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Record Retention Destroy Date"""'}), "(null=True, blank=True, verbose_name=\n 'Record Retention Destroy Date')\n", (138999, 139073), False, 'from django.db import models\n'), ((139120, 139230), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'EAS_SETUP_CHOICES', 'max_length': '(3)', 'blank': '(True)', 'verbose_name': '"""Ready for EAS Setup?"""'}), "(choices=EAS_SETUP_CHOICES, max_length=3, blank=True,\n verbose_name='Ready for EAS Setup?')\n", (139136, 139230), False, 'from django.db import models\n'), ((139295, 139370), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Completion Date"""'}), "(blank=True, null=True, verbose_name='Completion Date')\n", (139315, 139370), False, 'from django.db import models\n'), ((139387, 139415), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (139403, 139415), False, 'from django.db import models\n'), ((140916, 140940), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Award'], {}), '(Award)\n', (140933, 140940), False, 'from django.db import models\n'), ((140963, 141033), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)', 'verbose_name': '"""Project #"""'}), "(max_length=100, blank=True, verbose_name='Project #')\n", (140979, 141033), False, 'from django.db import models\n'), ((141077, 141144), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)', 'verbose_name': '"""Task #"""'}), "(max_length=100, blank=True, verbose_name='Task #')\n", (141093, 141144), False, 'from django.db import models\n'), ((141189, 141257), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)', 'verbose_name': '"""Award #"""'}), "(max_length=100, blank=True, verbose_name='Award #')\n", (141205, 141257), False, 'from django.db import models\n'), ((141310, 141386), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Award Setup Complete"""'}), "(null=True, blank=True, verbose_name='Award Setup Complete')\n", (141326, 141386), False, 'from django.db import models\n'), ((141435, 141544), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(10)', 'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Total PTA Amt"""'}), "(decimal_places=2, max_digits=10, null=True, blank=True,\n verbose_name='Total PTA Amt')\n", (141454, 141544), False, 'from django.db import models\n'), ((141609, 141683), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)', 'verbose_name': '"""Prnt Banner #"""'}), "(max_length=100, blank=True, verbose_name='Prnt Banner #')\n", (141625, 141683), False, 'from django.db import models\n'), ((141729, 141798), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)', 'verbose_name': '"""Banner #"""'}), "(max_length=100, blank=True, verbose_name='Banner #')\n", (141745, 141798), False, 'from django.db import models\n'), ((141847, 141919), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)', 'verbose_name': '"""CS Banner #"""'}), "(max_length=100, blank=True, verbose_name='CS Banner #')\n", (141863, 141919), False, 'from django.db import models\n'), ((141974, 142088), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AwardManager'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}", 'verbose_name': '"""PI*"""'}), "(AwardManager, null=True, blank=True, limit_choices_to={\n 'active': True}, verbose_name='PI*')\n", (141991, 142088), False, 'from django.db import models\n'), ((142156, 142280), 'django.db.models.ForeignKey', 'models.ForeignKey', (['FundingSource'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}", 'verbose_name': '"""Agency Name*"""'}), "(FundingSource, null=True, blank=True, limit_choices_to={\n 'active': True}, verbose_name='Agency Name*')\n", (142173, 142280), False, 'from django.db import models\n'), ((142352, 142490), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AwardOrganization'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}", 'verbose_name': '"""Department Code & Name*"""'}), "(AwardOrganization, null=True, blank=True,\n limit_choices_to={'active': True}, verbose_name='Department Code & Name*')\n", (142369, 142490), False, 'from django.db import models\n'), ((142585, 142660), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(True)', 'verbose_name': '"""Project Title*"""'}), "(max_length=256, blank=True, verbose_name='Project Title*')\n", (142601, 142660), False, 'from django.db import models\n'), ((142680, 142774), 'django.db.models.ForeignKey', 'models.ForeignKey', (['PrimeSponsor'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}"}), "(PrimeSponsor, null=True, blank=True, limit_choices_to={\n 'active': True})\n", (142697, 142774), False, 'from django.db import models\n'), ((142864, 143003), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AllowedCostSchedule'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}", 'verbose_name': '"""Allowed Cost Schedule*"""'}), "(AllowedCostSchedule, null=True, blank=True,\n limit_choices_to={'active': True}, verbose_name='Allowed Cost Schedule*')\n", (142881, 143003), False, 'from django.db import models\n'), ((143075, 143202), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AwardTemplate'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}", 'verbose_name': '"""Award Template*"""'}), "(AwardTemplate, null=True, blank=True, limit_choices_to={\n 'active': True}, verbose_name='Award Template*')\n", (143092, 143202), False, 'from django.db import models\n'), ((143270, 143391), 'django.db.models.ForeignKey', 'models.ForeignKey', (['CFDANumber'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}", 'verbose_name': '"""CFDA number*"""'}), "(CFDANumber, null=True, blank=True, limit_choices_to={\n 'active': True}, verbose_name='CFDA number*')\n", (143287, 143391), False, 'from django.db import models\n'), ((143462, 143567), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'EAS_AWARD_CHOICES', 'max_length': '(2)', 'blank': '(True)', 'verbose_name': '"""EAS Award Type*"""'}), "(choices=EAS_AWARD_CHOICES, max_length=2, blank=True,\n verbose_name='EAS Award Type*')\n", (143478, 143567), False, 'from django.db import models\n'), ((143617, 143656), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (143633, 143656), False, 'from django.db import models\n'), ((143674, 143741), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Start Date*"""'}), "(null=True, blank=True, verbose_name='Start Date*')\n", (143690, 143741), False, 'from django.db import models\n'), ((143757, 143822), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""End Date*"""'}), "(null=True, blank=True, verbose_name='End Date*')\n", (143773, 143822), False, 'from django.db import models\n'), ((143852, 143963), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Final Reports/Final Invoice Due Date (Close Date)*"""'}), "(null=True, blank=True, verbose_name=\n 'Final Reports/Final Invoice Due Date (Close Date)*')\n", (143868, 143963), False, 'from django.db import models\n'), ((144014, 144147), 'django.db.models.ForeignKey', 'models.ForeignKey', (['FedNegRate'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}", 'verbose_name': '"""Federal Negotiated Rate*"""'}), "(FedNegRate, null=True, blank=True, limit_choices_to={\n 'active': True}, verbose_name='Federal Negotiated Rate*')\n", (144031, 144147), False, 'from django.db import models\n'), ((144226, 144360), 'django.db.models.ForeignKey', 'models.ForeignKey', (['IndirectCost'], {'null': '(True)', 'blank': '(True)', 'limit_choices_to': "{'active': True}", 'verbose_name': '"""Indirect Cost Schedule*"""'}), "(IndirectCost, null=True, blank=True, limit_choices_to={\n 'active': True}, verbose_name='Indirect Cost Schedule*')\n", (144243, 144360), False, 'from django.db import models\n'), ((144424, 144520), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'SP_TYPE_CHOICES', 'max_length': '(3)', 'blank': '(True)', 'verbose_name': '"""SP Type*"""'}), "(choices=SP_TYPE_CHOICES, max_length=3, blank=True,\n verbose_name='SP Type*')\n", (144440, 144520), False, 'from django.db import models\n'), ((144567, 144644), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'blank': '(True)', 'verbose_name': '"""Award Short Name*"""'}), "(max_length=30, blank=True, verbose_name='Award Short Name*')\n", (144583, 144644), False, 'from django.db import models\n'), ((144696, 144781), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)', 'verbose_name': '"""Agency Award Number*"""'}), "(max_length=50, blank=True, verbose_name='Agency Award Number*'\n )\n", (144712, 144781), False, 'from django.db import models\n'), ((144831, 144932), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)', 'verbose_name': '"""Prime Award # (if GW is subawardee)*"""'}), "(max_length=50, blank=True, verbose_name=\n 'Prime Award # (if GW is subawardee)*')\n", (144847, 144932), False, 'from django.db import models\n'), ((144981, 145024), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)'}), '(max_length=50, blank=True)\n', (144997, 145024), False, 'from django.db import models\n'), ((145042, 145144), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'EAS_STATUS_CHOICES', 'max_length': '(2)', 'blank': '(True)', 'verbose_name': '"""EAS Status*"""'}), "(choices=EAS_STATUS_CHOICES, max_length=2, blank=True,\n verbose_name='EAS Status*')\n", (145058, 145144), False, 'from django.db import models\n'), ((145200, 145310), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'EAS_SETUP_CHOICES', 'max_length': '(3)', 'blank': '(True)', 'verbose_name': '"""Ready for EAS Setup?"""'}), "(choices=EAS_SETUP_CHOICES, max_length=3, blank=True,\n verbose_name='Ready for EAS Setup?')\n", (145216, 145310), False, 'from django.db import models\n'), ((145357, 145391), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (145376, 145391), False, 'from django.db import models\n'), ((145417, 145456), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (145433, 145456), False, 'from django.db import models\n'), ((150379, 150403), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Award'], {}), '(Award)\n', (150396, 150403), False, 'from django.db import models\n'), ((150424, 150520), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Date Created"""'}), "(auto_now_add=True, blank=True, null=True, verbose_name\n ='Date Created')\n", (150444, 150520), False, 'from django.db import models\n'), ((150533, 150577), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'blank': '(True)'}), '(max_length=250, blank=True)\n', (150549, 150577), False, 'from django.db import models\n'), ((150599, 150668), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'AGREEMENT_CHOICES', 'max_length': '(2)', 'blank': '(True)'}), '(choices=AGREEMENT_CHOICES, max_length=2, blank=True)\n', (150615, 150668), False, 'from django.db import models\n'), ((150720, 150763), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)'}), '(max_length=50, blank=True)\n', (150736, 150763), False, 'from django.db import models\n'), ((150788, 150904), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'SUBRECIPIENT_TYPE_CHOICES', 'max_length': '(2)', 'blank': '(True)', 'verbose_name': '"""Subrecipient Type"""'}), "(choices=SUBRECIPIENT_TYPE_CHOICES, max_length=2, blank=\n True, verbose_name='Subrecipient Type')\n", (150804, 150904), False, 'from django.db import models\n'), ((150946, 150990), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)'}), '(max_length=100, blank=True)\n', (150962, 150990), False, 'from django.db import models\n'), ((151011, 151050), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (151027, 151050), False, 'from django.db import models\n'), ((151064, 151139), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'SUBAWARD_STATUS_CHOICES', 'max_length': '(2)', 'blank': '(True)'}), '(choices=SUBAWARD_STATUS_CHOICES, max_length=2, blank=True)\n', (151080, 151139), False, 'from django.db import models\n'), ((151176, 151240), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'RISK_CHOICES', 'max_length': '(2)', 'blank': '(True)'}), '(choices=RISK_CHOICES, max_length=2, blank=True)\n', (151192, 151240), False, 'from django.db import models\n'), ((151267, 151359), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Date of Expiration for Approval"""'}), "(null=True, blank=True, verbose_name=\n 'Date of Expiration for Approval')\n", (151283, 151359), False, 'from django.db import models\n'), ((151401, 151492), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Subaward ready to be initiated"""'}), "(null=True, blank=True, verbose_name=\n 'Subaward ready to be initiated')\n", (151417, 151492), False, 'from django.db import models\n'), ((151524, 151615), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Subagreement sent to recipient"""'}), "(null=True, blank=True, verbose_name=\n 'Subagreement sent to recipient')\n", (151540, 151615), False, 'from django.db import models\n'), ((151651, 151719), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Reminder sent to Subawardee?"""'}), "(verbose_name='Reminder sent to Subawardee?')\n", (151674, 151719), False, 'from django.db import models\n'), ((151744, 151847), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Receipt of Partially Executed Subagreement"""'}), "(null=True, blank=True, verbose_name=\n 'Receipt of Partially Executed Subagreement')\n", (151760, 151847), False, 'from django.db import models\n'), ((151887, 151980), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Subaward Cleared FCOI Procedures"""'}), "(null=True, blank=True, verbose_name=\n 'Subaward Cleared FCOI Procedures')\n", (151903, 151980), False, 'from django.db import models\n'), ((152020, 152113), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Subaward Completed CITI Training"""'}), "(null=True, blank=True, verbose_name=\n 'Subaward Completed CITI Training')\n", (152036, 152113), False, 'from django.db import models\n'), ((152160, 152199), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (152176, 152199), False, 'from django.db import models\n'), ((152213, 152330), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(10)', 'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Subaward Total Amount"""'}), "(decimal_places=2, max_digits=10, null=True, blank=True,\n verbose_name='Subaward Total Amount')\n", (152232, 152330), False, 'from django.db import models\n'), ((152384, 152462), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)', 'verbose_name': '"""GW Subaward Number"""'}), "(max_length=50, blank=True, verbose_name='GW Subaward Number')\n", (152400, 152462), False, 'from django.db import models\n'), ((152512, 152618), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'CONTRACT_CHOICES', 'max_length': '(2)', 'blank': '(True)', 'verbose_name': '"""Funding mechanism"""'}), "(choices=CONTRACT_CHOICES, max_length=2, blank=True,\n verbose_name='Funding mechanism')\n", (152528, 152618), False, 'from django.db import models\n'), ((152670, 152759), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)', 'verbose_name': '"""Other funding mechanism"""'}), "(max_length=255, blank=True, verbose_name=\n 'Other funding mechanism')\n", (152686, 152759), False, 'from django.db import models\n'), ((152806, 152881), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'verbose_name': '"""Subawardee contact information"""'}), "(blank=True, verbose_name='Subawardee contact information')\n", (152822, 152881), False, 'from django.db import models\n'), ((152914, 152982), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)', 'verbose_name': '"""ZIP code"""'}), "(max_length=50, blank=True, verbose_name='ZIP code')\n", (152930, 152982), False, 'from django.db import models\n'), ((153029, 153123), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Subaward Performance Period Start"""'}), "(null=True, blank=True, verbose_name=\n 'Subaward Performance Period Start')\n", (153045, 153123), False, 'from django.db import models\n'), ((153163, 153255), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Subaward Performance Period End"""'}), "(null=True, blank=True, verbose_name=\n 'Subaward Performance Period End')\n", (153179, 153255), False, 'from django.db import models\n'), ((153298, 153368), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""Debarment or suspension check?"""'}), "(verbose_name='Debarment or suspension check?')\n", (153321, 153368), False, 'from django.db import models\n'), ((153398, 153452), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""International?"""'}), "(verbose_name='International?')\n", (153421, 153452), False, 'from django.db import models\n'), ((153471, 153542), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)', 'verbose_name': '"""CFDA number"""'}), "(max_length=50, blank=True, verbose_name='CFDA number')\n", (153487, 153542), False, 'from django.db import models\n'), ((153579, 153643), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)', 'verbose_name': '"""FAIN"""'}), "(max_length=50, blank=True, verbose_name='FAIN')\n", (153595, 153643), False, 'from django.db import models\n'), ((153654, 153717), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)', 'verbose_name': '"""EIN"""'}), "(max_length=50, blank=True, verbose_name='EIN')\n", (153670, 153717), False, 'from django.db import models\n'), ((153736, 153807), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'blank': '(True)', 'verbose_name': '"""DUNS number"""'}), "(max_length=50, blank=True, verbose_name='DUNS number')\n", (153752, 153807), False, 'from django.db import models\n'), ((153856, 153913), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'verbose_name': '"""FFATA Reportable?"""'}), "(verbose_name='FFATA Reportable?')\n", (153879, 153913), False, 'from django.db import models\n'), ((153945, 154033), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""FFATA Report Submitted Date"""'}), "(null=True, blank=True, verbose_name=\n 'FFATA Report Submitted Date')\n", (153961, 154033), False, 'from django.db import models\n'), ((154076, 154162), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Technical Report Due Date"""'}), "(null=True, blank=True, verbose_name=\n 'Technical Report Due Date')\n", (154092, 154162), False, 'from django.db import models\n'), ((154210, 154301), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Technical Report Received Date"""'}), "(null=True, blank=True, verbose_name=\n 'Technical Report Received Date')\n", (154226, 154301), False, 'from django.db import models\n'), ((154353, 154428), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Completion Date"""'}), "(blank=True, null=True, verbose_name='Completion Date')\n", (154373, 154428), False, 'from django.db import models\n'), ((154986, 155013), 'django.db.models.OneToOneField', 'models.OneToOneField', (['Award'], {}), '(Award)\n', (155006, 155013), False, 'from django.db import models\n'), ((155047, 155122), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Completion Date"""'}), "(blank=True, null=True, verbose_name='Completion Date')\n", (155067, 155122), False, 'from django.db import models\n'), ((156566, 156590), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Award'], {}), '(Award)\n', (156583, 156590), False, 'from django.db import models\n'), ((156606, 156673), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'REQUEST_CHOICES', 'max_length': '(2)', 'blank': '(True)'}), '(choices=REQUEST_CHOICES, max_length=2, blank=True)\n', (156622, 156673), False, 'from django.db import models\n'), ((156720, 156759), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (156736, 156759), False, 'from django.db import models\n'), ((156773, 156859), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'PRIOR_APPROVAL_STATUS_CHOICES', 'max_length': '(2)', 'blank': '(True)'}), '(choices=PRIOR_APPROVAL_STATUS_CHOICES, max_length=2, blank\n =True)\n', (156789, 156859), False, 'from django.db import models\n'), ((156900, 156939), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (156916, 156939), False, 'from django.db import models\n'), ((158082, 158106), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Award'], {}), '(Award)\n', (158099, 158106), False, 'from django.db import models\n'), ((158121, 158187), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'REPORT_CHOICES', 'max_length': '(2)', 'blank': '(True)'}), '(choices=REPORT_CHOICES, max_length=2, blank=True)\n', (158137, 158187), False, 'from django.db import models\n'), ((158203, 158242), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (158219, 158242), False, 'from django.db import models\n'), ((158264, 158303), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (158280, 158303), False, 'from django.db import models\n'), ((159142, 159169), 'django.db.models.OneToOneField', 'models.OneToOneField', (['Award'], {}), '(Award)\n', (159162, 159169), False, 'from django.db import models\n'), ((159201, 159276), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Completion Date"""'}), "(blank=True, null=True, verbose_name='Completion Date')\n", (159221, 159276), False, 'from django.db import models\n'), ((160038, 160062), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Award'], {}), '(Award)\n', (160055, 160062), False, 'from django.db import models\n'), ((160077, 160149), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'FINAL_REPORT_CHOICES', 'max_length': '(2)', 'blank': '(True)'}), '(choices=FINAL_REPORT_CHOICES, max_length=2, blank=True)\n', (160093, 160149), False, 'from django.db import models\n'), ((160190, 160229), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (160206, 160229), False, 'from django.db import models\n'), ((160251, 160290), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (160267, 160290), False, 'from django.db import models\n'), ((26054, 26107), 'django.core.urlresolvers.reverse', 'reverse', (['"""award_detail"""'], {'kwargs': "{'award_pk': self.pk}"}), "('award_detail', kwargs={'award_pk': self.pk})\n", (26061, 26107), False, 'from django.core.urlresolvers import reverse\n'), ((66547, 66577), 'reversion.get_for_object', 'reversion.get_for_object', (['self'], {}), '(self)\n', (66571, 66577), False, 'import reversion\n'), ((67213, 67227), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (67225, 67227), False, 'from datetime import datetime, date, timedelta, tzinfo\n'), ((90392, 90480), 'django.core.urlresolvers.reverse', 'reverse', (['"""edit_proposal"""'], {'kwargs': "{'award_pk': self.award.pk, 'proposal_pk': self.id}"}), "('edit_proposal', kwargs={'award_pk': self.award.pk, 'proposal_pk':\n self.id})\n", (90399, 90480), False, 'from django.core.urlresolvers import reverse\n'), ((94071, 94211), 'django.core.urlresolvers.reverse', 'reverse', (['"""edit_key_personnel"""'], {'kwargs': "{'award_pk': self.proposal.award.pk, 'proposal_pk': self.proposal.pk,\n 'key_personnel_pk': self.id}"}), "('edit_key_personnel', kwargs={'award_pk': self.proposal.award.pk,\n 'proposal_pk': self.proposal.pk, 'key_personnel_pk': self.id})\n", (94078, 94211), False, 'from django.core.urlresolvers import reverse\n'), ((94383, 94525), 'django.core.urlresolvers.reverse', 'reverse', (['"""delete_key_personnel"""'], {'kwargs': "{'award_pk': self.proposal.award.pk, 'proposal_pk': self.proposal.pk,\n 'key_personnel_pk': self.id}"}), "('delete_key_personnel', kwargs={'award_pk': self.proposal.award.pk,\n 'proposal_pk': self.proposal.pk, 'key_personnel_pk': self.id})\n", (94390, 94525), False, 'from django.core.urlresolvers import reverse\n'), ((95878, 96024), 'django.core.urlresolvers.reverse', 'reverse', (['"""edit_performance_site"""'], {'kwargs': "{'award_pk': self.proposal.award.pk, 'proposal_pk': self.proposal.pk,\n 'performance_site_pk': self.id}"}), "('edit_performance_site', kwargs={'award_pk': self.proposal.award.pk,\n 'proposal_pk': self.proposal.pk, 'performance_site_pk': self.id})\n", (95885, 96024), False, 'from django.core.urlresolvers import reverse\n'), ((96196, 96345), 'django.core.urlresolvers.reverse', 'reverse', (['"""delete_performance_site"""'], {'kwargs': "{'award_pk': self.proposal.award.pk, 'proposal_pk': self.proposal.pk,\n 'performance_site_pk': self.id}"}), "('delete_performance_site', kwargs={'award_pk': self.proposal.award.\n pk, 'proposal_pk': self.proposal.pk, 'performance_site_pk': self.id})\n", (96203, 96345), False, 'from django.core.urlresolvers import reverse\n'), ((108758, 108826), 'django.core.urlresolvers.reverse', 'reverse', (['"""edit_award_acceptance"""'], {'kwargs': "{'award_pk': self.award.pk}"}), "('edit_award_acceptance', kwargs={'award_pk': self.award.pk})\n", (108765, 108826), False, 'from django.core.urlresolvers import reverse\n'), ((117878, 117947), 'django.core.urlresolvers.reverse', 'reverse', (['"""edit_award_negotiation"""'], {'kwargs': "{'award_pk': self.award.pk}"}), "('edit_award_negotiation', kwargs={'award_pk': self.award.pk})\n", (117885, 117947), False, 'from django.core.urlresolvers import reverse\n'), ((128701, 128764), 'django.core.urlresolvers.reverse', 'reverse', (['"""edit_award_setup"""'], {'kwargs': "{'award_pk': self.award.pk}"}), "('edit_award_setup', kwargs={'award_pk': self.award.pk})\n", (128708, 128764), False, 'from django.core.urlresolvers import reverse\n'), ((139604, 139667), 'django.core.urlresolvers.reverse', 'reverse', (['"""edit_award_setup"""'], {'kwargs': "{'award_pk': self.award.pk}"}), "('edit_award_setup', kwargs={'award_pk': self.award.pk})\n", (139611, 139667), False, 'from django.core.urlresolvers import reverse\n'), ((147710, 147795), 'django.core.urlresolvers.reverse', 'reverse', (['"""edit_pta_number"""'], {'kwargs': "{'award_pk': self.award.pk, 'pta_pk': self.id}"}), "('edit_pta_number', kwargs={'award_pk': self.award.pk, 'pta_pk':\n self.id})\n", (147717, 147795), False, 'from django.core.urlresolvers import reverse\n'), ((147951, 148038), 'django.core.urlresolvers.reverse', 'reverse', (['"""delete_pta_number"""'], {'kwargs': "{'award_pk': self.award.pk, 'pta_pk': self.id}"}), "('delete_pta_number', kwargs={'award_pk': self.award.pk, 'pta_pk':\n self.id})\n", (147958, 148038), False, 'from django.core.urlresolvers import reverse\n'), ((154614, 154702), 'django.core.urlresolvers.reverse', 'reverse', (['"""edit_subaward"""'], {'kwargs': "{'award_pk': self.award.pk, 'subaward_pk': self.id}"}), "('edit_subaward', kwargs={'award_pk': self.award.pk, 'subaward_pk':\n self.id})\n", (154621, 154702), False, 'from django.core.urlresolvers import reverse\n'), ((155309, 155377), 'django.core.urlresolvers.reverse', 'reverse', (['"""edit_award_management"""'], {'kwargs': "{'award_pk': self.award.pk}"}), "('edit_award_management', kwargs={'award_pk': self.award.pk})\n", (155316, 155377), False, 'from django.core.urlresolvers import reverse\n'), ((157126, 157226), 'django.core.urlresolvers.reverse', 'reverse', (['"""edit_prior_approval"""'], {'kwargs': "{'award_pk': self.award.pk, 'prior_approval_pk': self.id}"}), "('edit_prior_approval', kwargs={'award_pk': self.award.pk,\n 'prior_approval_pk': self.id})\n", (157133, 157226), False, 'from django.core.urlresolvers import reverse\n'), ((157382, 157484), 'django.core.urlresolvers.reverse', 'reverse', (['"""delete_prior_approval"""'], {'kwargs': "{'award_pk': self.award.pk, 'prior_approval_pk': self.id}"}), "('delete_prior_approval', kwargs={'award_pk': self.award.pk,\n 'prior_approval_pk': self.id})\n", (157389, 157484), False, 'from django.core.urlresolvers import reverse\n'), ((158492, 158598), 'django.core.urlresolvers.reverse', 'reverse', (['"""edit_report_submission"""'], {'kwargs': "{'award_pk': self.award.pk, 'report_submission_pk': self.id}"}), "('edit_report_submission', kwargs={'award_pk': self.award.pk,\n 'report_submission_pk': self.id})\n", (158499, 158598), False, 'from django.core.urlresolvers import reverse\n'), ((158754, 158862), 'django.core.urlresolvers.reverse', 'reverse', (['"""delete_report_submission"""'], {'kwargs': "{'award_pk': self.award.pk, 'report_submission_pk': self.id}"}), "('delete_report_submission', kwargs={'award_pk': self.award.pk,\n 'report_submission_pk': self.id})\n", (158761, 158862), False, 'from django.core.urlresolvers import reverse\n'), ((159461, 159527), 'django.core.urlresolvers.reverse', 'reverse', (['"""edit_award_closeout"""'], {'kwargs': "{'award_pk': self.award.pk}"}), "('edit_award_closeout', kwargs={'award_pk': self.award.pk})\n", (159468, 159527), False, 'from django.core.urlresolvers import reverse\n'), ((160475, 160571), 'django.core.urlresolvers.reverse', 'reverse', (['"""edit_final_report"""'], {'kwargs': "{'award_pk': self.award.pk, 'final_report_pk': self.id}"}), "('edit_final_report', kwargs={'award_pk': self.award.pk,\n 'final_report_pk': self.id})\n", (160482, 160571), False, 'from django.core.urlresolvers import reverse\n'), ((160727, 160825), 'django.core.urlresolvers.reverse', 'reverse', (['"""delete_final_report"""'], {'kwargs': "{'award_pk': self.award.pk, 'final_report_pk': self.id}"}), "('delete_final_report', kwargs={'award_pk': self.award.pk,\n 'final_report_pk': self.id})\n", (160734, 160825), False, 'from django.core.urlresolvers import reverse\n'), ((17972, 18006), 'django.db.models.Q', 'Q', ([], {'groups__name': '"""Award Acceptance"""'}), "(groups__name='Award Acceptance')\n", (17973, 18006), False, 'from django.db.models import Q\n'), ((18220, 18255), 'django.db.models.Q', 'Q', ([], {'groups__name': '"""Award Negotiation"""'}), "(groups__name='Award Negotiation')\n", (18221, 18255), False, 'from django.db.models import Q\n'), ((18418, 18447), 'django.db.models.Q', 'Q', ([], {'groups__name': '"""Award Setup"""'}), "(groups__name='Award Setup')\n", (18419, 18447), False, 'from django.db.models import Q\n'), ((18663, 18699), 'django.db.models.Q', 'Q', ([], {'groups__name': '"""Award Modification"""'}), "(groups__name='Award Modification')\n", (18664, 18699), False, 'from django.db.models import Q\n'), ((18895, 18932), 'django.db.models.Q', 'Q', ([], {'groups__name': '"""Subaward Management"""'}), "(groups__name='Subaward Management')\n", (18896, 18932), False, 'from django.db.models import Q\n'), ((19105, 19139), 'django.db.models.Q', 'Q', ([], {'groups__name': '"""Award Management"""'}), "(groups__name='Award Management')\n", (19106, 19139), False, 'from django.db.models import Q\n'), ((19308, 19340), 'django.db.models.Q', 'Q', ([], {'groups__name': '"""Award Closeout"""'}), "(groups__name='Award Closeout')\n", (19309, 19340), False, 'from django.db.models import Q\n'), ((22702, 22825), 'itertools.chain', 'chain', (['assignments_on', 'assignments_tw', 'assignments_th', 'assignments_fo', 'assignments_fi', 'assignments_ni', 'assignments_none'], {}), '(assignments_on, assignments_tw, assignments_th, assignments_fo,\n assignments_fi, assignments_ni, assignments_none)\n', (22707, 22825), False, 'from itertools import chain\n'), ((46123, 46137), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (46135, 46137), False, 'from datetime import datetime, date, timedelta, tzinfo\n'), ((65206, 65249), 'django.core.urlresolvers.reverse', 'reverse', (['"""admin:awards_proposal_changelist"""'], {}), "('admin:awards_proposal_changelist')\n", (65213, 65249), False, 'from django.core.urlresolvers import reverse\n'), ((67346, 67381), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', ([], {'is_active': '(True)'}), '(is_active=True)\n', (67365, 67381), False, 'from django.contrib.auth.models import User, Group\n'), ((71505, 71572), 'django.core.urlresolvers.reverse', 'reverse', (['"""edit_proposal_intake"""'], {'kwargs': "{'award_pk': self.award.pk}"}), "('edit_proposal_intake', kwargs={'award_pk': self.award.pk})\n", (71512, 71572), False, 'from django.core.urlresolvers import reverse\n'), ((71660, 71745), 'django.core.urlresolvers.reverse', 'reverse', (['"""edit_standalone_proposal_intake"""'], {'kwargs': "{'proposalintake_pk': self.id}"}), "('edit_standalone_proposal_intake', kwargs={'proposalintake_pk':\n self.id})\n", (71667, 71745), False, 'from django.core.urlresolvers import reverse\n'), ((96942, 97162), 'django.core.exceptions.ValidationError', 'ValidationError', (['(\'Another %s is already the current modification for %s. Set "current modification" on all other %s objects and try again.\'\n % (section.__name__, self.award, section.__name__))'], {}), '(\n \'Another %s is already the current modification for %s. Set "current modification" on all other %s objects and try again.\'\n % (section.__name__, self.award, section.__name__))\n', (96957, 97162), False, 'from django.core.exceptions import ValidationError\n'), ((43131, 43134), 'django.db.models.Q', 'Q', ([], {}), '()\n', (43132, 43134), False, 'from django.db.models import Q\n'), ((44581, 44595), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (44593, 44595), False, 'from datetime import datetime, date, timedelta, tzinfo\n'), ((45268, 45282), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (45280, 45282), False, 'from datetime import datetime, date, timedelta, tzinfo\n'), ((47350, 47364), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (47362, 47364), False, 'from django.utils import timezone\n'), ((60978, 60992), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (60990, 60992), False, 'from django.utils import timezone\n'), ((148246, 148276), 'reversion.get_for_object', 'reversion.get_for_object', (['self'], {}), '(self)\n', (148270, 148276), False, 'import reversion\n'), ((20964, 20995), 'django.db.models.Q', 'Q', ([], {'award_dual_modification': '(True)'}), '(award_dual_modification=True)\n', (20965, 20995), False, 'from django.db.models import Q\n'), ((25001, 25028), 'django.db.models.Q', 'Q', ([], {'award_closeout_user': 'user'}), '(award_closeout_user=user)\n', (25002, 25028), False, 'from django.db.models import Q\n'), ((25031, 25042), 'django.db.models.Q', 'Q', ([], {'status': '(5)'}), '(status=5)\n', (25032, 25042), False, 'from django.db.models import Q\n'), ((43240, 43268), 'django.db.models.Q', 'Q', ([], {'current_modification': '(True)'}), '(current_modification=True)\n', (43241, 43268), False, 'from django.db.models import Q\n'), ((46313, 46327), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (46325, 46327), False, 'from datetime import datetime, date, timedelta, tzinfo\n'), ((48139, 48153), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (48151, 48153), False, 'from django.utils import timezone\n'), ((58480, 58494), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (58492, 58494), False, 'from django.utils import timezone\n'), ((59035, 59049), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (59047, 59049), False, 'from django.utils import timezone\n'), ((59187, 59201), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (59199, 59201), False, 'from django.utils import timezone\n'), ((60685, 60699), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (60697, 60699), False, 'from django.utils import timezone\n'), ((62274, 62288), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (62286, 62288), False, 'from django.utils import timezone\n'), ((5944, 5956), 'datetime.date.today', 'date.today', ([], {}), '()\n', (5954, 5956), False, 'from datetime import datetime, date, timedelta, tzinfo\n'), ((6025, 6039), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6037, 6039), False, 'from datetime import datetime, date, timedelta, tzinfo\n'), ((20871, 20899), 'django.db.models.Q', 'Q', ([], {'send_to_modification': '(True)'}), '(send_to_modification=True)\n', (20872, 20899), False, 'from django.db.models import Q\n'), ((20916, 20947), 'django.db.models.Q', 'Q', ([], {'award_modification_user': 'user'}), '(award_modification_user=user)\n', (20917, 20947), False, 'from django.db.models import Q\n'), ((20950, 20961), 'django.db.models.Q', 'Q', ([], {'status': '(2)'}), '(status=2)\n', (20951, 20961), False, 'from django.db.models import Q\n'), ((23798, 23891), 'django.core.urlresolvers.reverse', 'reverse', (["award.SECTION_FIELD_MAPPING[section]['edit_url']"], {'kwargs': "{'award_pk': award.pk}"}), "(award.SECTION_FIELD_MAPPING[section]['edit_url'], kwargs={\n 'award_pk': award.pk})\n", (23805, 23891), False, 'from django.core.urlresolvers import reverse\n'), ((24941, 24970), 'django.db.models.Q', 'Q', ([], {'award_management_user': 'user'}), '(award_management_user=user)\n', (24942, 24970), False, 'from django.db.models import Q\n'), ((24973, 24984), 'django.db.models.Q', 'Q', ([], {'status': '(4)'}), '(status=4)\n', (24974, 24984), False, 'from django.db.models import Q\n'), ((25681, 25774), 'django.core.urlresolvers.reverse', 'reverse', (["award.SECTION_FIELD_MAPPING[section]['edit_url']"], {'kwargs': "{'award_pk': award.pk}"}), "(award.SECTION_FIELD_MAPPING[section]['edit_url'], kwargs={\n 'award_pk': award.pk})\n", (25688, 25774), False, 'from django.core.urlresolvers import reverse\n'), ((31878, 31887), 'dateutil.tz.tzlocal', 'tzlocal', ([], {}), '()\n', (31885, 31887), False, 'from dateutil.tz import tzutc, tzlocal\n'), ((44799, 44813), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (44811, 44813), False, 'from datetime import datetime, date, timedelta, tzinfo\n'), ((48952, 48966), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (48964, 48966), False, 'from django.utils import timezone\n'), ((55969, 55983), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (55981, 55983), False, 'from django.utils import timezone\n'), ((56956, 56970), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (56968, 56970), False, 'from django.utils import timezone\n'), ((60299, 60313), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (60311, 60313), False, 'from django.utils import timezone\n'), ((61888, 61902), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (61900, 61902), False, 'from django.utils import timezone\n'), ((20777, 20806), 'django.db.models.Q', 'Q', ([], {'send_to_modification': '(False)'}), '(send_to_modification=False)\n', (20778, 20806), False, 'from django.db.models import Q\n'), ((20823, 20854), 'django.db.models.Q', 'Q', ([], {'award_modification_user': 'user'}), '(award_modification_user=user)\n', (20824, 20854), False, 'from django.db.models import Q\n'), ((20857, 20868), 'django.db.models.Q', 'Q', ([], {'status': '(3)'}), '(status=3)\n', (20858, 20868), False, 'from django.db.models import Q\n'), ((24889, 24910), 'django.db.models.Q', 'Q', ([], {'subaward_user': 'user'}), '(subaward_user=user)\n', (24890, 24910), False, 'from django.db.models import Q\n'), ((24913, 24924), 'django.db.models.Q', 'Q', ([], {'status': '(4)'}), '(status=4)\n', (24914, 24924), False, 'from django.db.models import Q\n'), ((45594, 45608), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (45606, 45608), False, 'from datetime import datetime, date, timedelta, tzinfo\n'), ((20736, 20760), 'django.db.models.Q', 'Q', ([], {'award_setup_user': 'user'}), '(award_setup_user=user)\n', (20737, 20760), False, 'from django.db.models import Q\n'), ((20763, 20774), 'django.db.models.Q', 'Q', ([], {'status': '(3)'}), '(status=3)\n', (20764, 20774), False, 'from django.db.models import Q\n'), ((32294, 32303), 'dateutil.tz.tzlocal', 'tzlocal', ([], {}), '()\n', (32301, 32303), False, 'from dateutil.tz import tzutc, tzlocal\n'), ((32572, 32581), 'dateutil.tz.tzlocal', 'tzlocal', ([], {}), '()\n', (32579, 32581), False, 'from dateutil.tz import tzutc, tzlocal\n'), ((33246, 33255), 'dateutil.tz.tzlocal', 'tzlocal', ([], {}), '()\n', (33253, 33255), False, 'from dateutil.tz import tzutc, tzlocal\n'), ((49719, 49733), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (49731, 49733), False, 'from django.utils import timezone\n'), ((50197, 50211), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (50209, 50211), False, 'from django.utils import timezone\n'), ((20623, 20647), 'django.db.models.Q', 'Q', ([], {'award_dual_setup': '(True)'}), '(award_dual_setup=True)\n', (20624, 20647), False, 'from django.db.models import Q\n'), ((20694, 20718), 'django.db.models.Q', 'Q', ([], {'award_dual_setup': '(True)'}), '(award_dual_setup=True)\n', (20695, 20718), False, 'from django.db.models import Q\n'), ((24790, 24821), 'django.db.models.Q', 'Q', ([], {'award_modification_user': 'user'}), '(award_modification_user=user)\n', (24791, 24821), False, 'from django.db.models import Q\n'), ((24824, 24835), 'django.db.models.Q', 'Q', ([], {'status': '(2)'}), '(status=2)\n', (24825, 24835), False, 'from django.db.models import Q\n'), ((24840, 24871), 'django.db.models.Q', 'Q', ([], {'award_dual_modification': '(True)'}), '(award_dual_modification=True)\n', (24841, 24871), False, 'from django.db.models import Q\n'), ((55220, 55234), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (55232, 55234), False, 'from django.utils import timezone\n'), ((20582, 20606), 'django.db.models.Q', 'Q', ([], {'award_setup_user': 'user'}), '(award_setup_user=user)\n', (20583, 20606), False, 'from django.db.models import Q\n'), ((20609, 20620), 'django.db.models.Q', 'Q', ([], {'status': '(2)'}), '(status=2)\n', (20610, 20620), False, 'from django.db.models import Q\n'), ((20653, 20677), 'django.db.models.Q', 'Q', ([], {'award_setup_user': 'user'}), '(award_setup_user=user)\n', (20654, 20677), False, 'from django.db.models import Q\n'), ((20680, 20691), 'django.db.models.Q', 'Q', ([], {'status': '(3)'}), '(status=3)\n', (20681, 20691), False, 'from django.db.models import Q\n'), ((24648, 24677), 'django.db.models.Q', 'Q', ([], {'send_to_modification': '(False)'}), '(send_to_modification=False)\n', (24649, 24677), False, 'from django.db.models import Q\n'), ((24694, 24725), 'django.db.models.Q', 'Q', ([], {'award_modification_user': 'user'}), '(award_modification_user=user)\n', (24695, 24725), False, 'from django.db.models import Q\n'), ((24728, 24739), 'django.db.models.Q', 'Q', ([], {'status': '(3)'}), '(status=3)\n', (24729, 24739), False, 'from django.db.models import Q\n'), ((24744, 24772), 'django.db.models.Q', 'Q', ([], {'send_to_modification': '(True)'}), '(send_to_modification=True)\n', (24745, 24772), False, 'from django.db.models import Q\n'), ((51423, 51437), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (51435, 51437), False, 'from django.utils import timezone\n'), ((53423, 53437), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (53435, 53437), False, 'from django.utils import timezone\n'), ((24607, 24631), 'django.db.models.Q', 'Q', ([], {'award_setup_user': 'user'}), '(award_setup_user=user)\n', (24608, 24631), False, 'from django.db.models import Q\n'), ((24634, 24645), 'django.db.models.Q', 'Q', ([], {'status': '(3)'}), '(status=3)\n', (24635, 24645), False, 'from django.db.models import Q\n'), ((24244, 24273), 'django.db.models.Q', 'Q', ([], {'award_acceptance_user': 'user'}), '(award_acceptance_user=user)\n', (24245, 24273), False, 'from django.db.models import Q\n'), ((24276, 24287), 'django.db.models.Q', 'Q', ([], {'status': '(1)'}), '(status=1)\n', (24277, 24287), False, 'from django.db.models import Q\n'), ((24494, 24518), 'django.db.models.Q', 'Q', ([], {'award_dual_setup': '(True)'}), '(award_dual_setup=True)\n', (24495, 24518), False, 'from django.db.models import Q\n'), ((24565, 24589), 'django.db.models.Q', 'Q', ([], {'award_dual_setup': '(True)'}), '(award_dual_setup=True)\n', (24566, 24589), False, 'from django.db.models import Q\n'), ((52346, 52360), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (52358, 52360), False, 'from django.utils import timezone\n'), ((24306, 24336), 'django.db.models.Q', 'Q', ([], {'award_negotiation_user': 'user'}), '(award_negotiation_user=user)\n', (24307, 24336), False, 'from django.db.models import Q\n'), ((24339, 24350), 'django.db.models.Q', 'Q', ([], {'status': '(2)'}), '(status=2)\n', (24340, 24350), False, 'from django.db.models import Q\n'), ((24403, 24433), 'django.db.models.Q', 'Q', ([], {'award_dual_negotiation': '(True)'}), '(award_dual_negotiation=True)\n', (24404, 24433), False, 'from django.db.models import Q\n'), ((24453, 24477), 'django.db.models.Q', 'Q', ([], {'award_setup_user': 'user'}), '(award_setup_user=user)\n', (24454, 24477), False, 'from django.db.models import Q\n'), ((24480, 24491), 'django.db.models.Q', 'Q', ([], {'status': '(2)'}), '(status=2)\n', (24481, 24491), False, 'from django.db.models import Q\n'), ((24524, 24548), 'django.db.models.Q', 'Q', ([], {'award_setup_user': 'user'}), '(award_setup_user=user)\n', (24525, 24548), False, 'from django.db.models import Q\n'), ((24551, 24562), 'django.db.models.Q', 'Q', ([], {'status': '(3)'}), '(status=3)\n', (24552, 24562), False, 'from django.db.models import Q\n'), ((24356, 24386), 'django.db.models.Q', 'Q', ([], {'award_negotiation_user': 'user'}), '(award_negotiation_user=user)\n', (24357, 24386), False, 'from django.db.models import Q\n'), ((24389, 24400), 'django.db.models.Q', 'Q', ([], {'status': '(2)'}), '(status=2)\n', (24390, 24400), False, 'from django.db.models import Q\n')]
|
# Copyright 2021, 2022 Cambridge Quantum Computing Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = ['SpidersReader', 'bag_of_words_reader', 'spiders_reader']
from discopy import Word
from discopy.rigid import Diagram, Spider
from lambeq.core.types import AtomicType
from lambeq.core.utils import SentenceType, tokenised_sentence_type_check
from lambeq.text2diagram.base import Reader
S = AtomicType.SENTENCE
class SpidersReader(Reader):
"""A reader that combines words using a spider."""
def sentence2diagram(self,
sentence: SentenceType,
tokenised: bool = False) -> Diagram:
if tokenised:
if not tokenised_sentence_type_check(sentence):
raise ValueError('`tokenised` set to `True`, but variable '
'`sentence` does not have type `list[str]`.')
else:
if not isinstance(sentence, str):
raise ValueError('`tokenised` set to `False`, but variable '
'`sentence` does not have type `str`.')
sentence = sentence.split()
words = [Word(word, S) for word in sentence]
diagram = Diagram.tensor(*words) >> Spider(len(words), 1, S)
return diagram
spiders_reader = SpidersReader()
bag_of_words_reader = spiders_reader
|
[
"discopy.rigid.Diagram.tensor",
"discopy.Word",
"lambeq.core.utils.tokenised_sentence_type_check"
] |
[((1659, 1672), 'discopy.Word', 'Word', (['word', 'S'], {}), '(word, S)\n', (1663, 1672), False, 'from discopy import Word\n'), ((1713, 1735), 'discopy.rigid.Diagram.tensor', 'Diagram.tensor', (['*words'], {}), '(*words)\n', (1727, 1735), False, 'from discopy.rigid import Diagram, Spider\n'), ((1195, 1234), 'lambeq.core.utils.tokenised_sentence_type_check', 'tokenised_sentence_type_check', (['sentence'], {}), '(sentence)\n', (1224, 1234), False, 'from lambeq.core.utils import SentenceType, tokenised_sentence_type_check\n')]
|
"""PPO Agent for CRMDPs."""
import torch
import random
import numpy as np
from typing import Generator, List
from safe_grid_agents.common.utils import track_metrics
from safe_grid_agents.common.agents.policy_cnn import PPOCNNAgent
from safe_grid_agents.types import Rollout
from ai_safety_gridworlds.environments.tomato_crmdp import REWARD_FACTOR
def _get_agent_position(board, agent_value):
x_pos, y_pos = np.unravel_index(
np.argwhere(np.ravel(board) == agent_value), board.shape
)
x_pos, y_pos = x_pos.flat[0], y_pos.flat[0]
return x_pos, y_pos
def _manhatten_distance(x1, x2, y1, y2):
return abs(x1 - x2) + abs(y1 - y2)
def d_tomato_crmdp(X, Y):
assert X.shape == Y.shape
return REWARD_FACTOR * np.sum(X != Y)
def d_toy_gridworlds(X, Y):
assert X.shape == Y.shape
X = X[0, ...]
Y = Y[0, ...]
# toy gridworlds use value 0 to denote the agent on the board
X_pos_x, X_pos_y = _get_agent_position(X, agent_value=0)
Y_pos_x, Y_pos_y = _get_agent_position(Y, agent_value=0)
return _manhatten_distance(X_pos_x, Y_pos_x, X_pos_y, Y_pos_y)
def d_trans_boat(X, Y):
assert X.shape == Y.shape
X_initial, X_final = X[0, ...], X[1, ...]
Y_initial, Y_final = Y[0, ...], Y[1, ...]
# deepmind gridworlds use value 2 to denote the agent on the board
X_initial_pos_x, X_initial_pos_y = _get_agent_position(X_initial, agent_value=2)
Y_initial_pos_x, Y_initial_pos_y = _get_agent_position(Y_initial, agent_value=2)
X_final_pos_x, X_final_pos_y = _get_agent_position(X_final, agent_value=2)
Y_final_pos_x, Y_final_pos_y = _get_agent_position(Y_final, agent_value=2)
X_direction_x = X_final_pos_x - X_initial_pos_x
X_direction_y = X_final_pos_y - X_initial_pos_y
Y_direction_x = Y_final_pos_x - Y_initial_pos_x
Y_direction_y = Y_final_pos_y - Y_initial_pos_y
initial_position_distance = _manhatten_distance(
X_initial_pos_x, Y_initial_pos_x, X_initial_pos_y, Y_initial_pos_y
)
direction_distance = int(X_direction_x != Y_direction_x)
direction_distance += int(X_direction_y != Y_direction_y)
return initial_position_distance + direction_distance
ENV_TO_D = {
"corners": d_toy_gridworlds,
"way": d_toy_gridworlds,
"tomato-crmdp": d_tomato_crmdp,
"trans-boat": d_trans_boat,
}
class PPOCRMDPAgent(PPOCNNAgent):
"""PPO Agent for CRMDPs."""
def __init__(self, env, args) -> None:
super().__init__(env, args)
self.states = dict()
self.d = ENV_TO_D[args.env_alias]
self.epsilon = 1e-3
self.rllb = dict()
self.state_memory_cap = 0
def _mark_state_corrupt(self, board, reward) -> None:
assert board.dtype == np.float32
self.states[board.tostring()] = [False, reward]
def _mark_state_safe(self, board, reward) -> None:
assert board.dtype == np.float32
self.states[board.tostring()] = [True, reward]
def _is_state_corrupt(self, board) -> bool:
if board.tostring() in self.states:
return not self.states[board.tostring()][0]
else:
return False
def _iterate_safe_states(self) -> Generator[np.array, None, None]:
for board_str in self.states.keys():
if self.states[board_str][0]:
board = np.fromstring(board_str, dtype=np.float32, count=self.n_input)
board = np.reshape(board, self.board_shape)
yield board, self.states[board_str][1]
def _iterate_corrupt_states(self) -> Generator[np.array, None, None]:
for board_str in self.states.keys():
if not self.states[board_str][0]:
board = np.fromstring(board_str, dtype=np.float32, count=self.n_input)
board = np.reshape(board, self.board_shape)
yield board, self.states[board_str][1]
def _update_rllb(self) -> None:
"""Update the reward lower Lipschitz bound."""
for corrupt_board, corrupt_reward in self._iterate_corrupt_states():
board_string = corrupt_board.tostring()
rllb = self.rllb.get(board_string, None)
for safe_board, safe_reward in self._iterate_safe_states():
bound = safe_reward - self.d(safe_board, corrupt_board)
if rllb is None or bound > rllb:
rllb = bound
self.rllb[board_string] = rllb
def _get_TLV(self, boardX, rewardX, state_iterator) -> float:
"""Return the total Lipschitz violation of a state X w.r.t a set of states.
Each state is only added once to the TLV."""
TLV = 0
unique_states = set()
for boardY, rewardY in state_iterator:
if boardY.tostring() not in unique_states:
TLV += max(0, abs(rewardX - rewardY) - self.d(boardY, boardX))
unique_states.add(boardY.tostring())
return TLV
def _purge_memory(self) -> None:
"""Drop random noncorrupt states from the memory for performance reasons."""
if len(self.states) > self.state_memory_cap:
to_remove = [
state
for state in random.sample(
self.states.keys(), len(self.states) - self.state_memory_cap / 2
)
if self.states[state][0]
]
for state in to_remove:
del self.states[state]
# we might have too many corrupt states, so update the bounds
if len(self.states) > 2 * self.state_memory_cap / 3:
self.state_memory_cap *= 2
def get_modified_reward(self, board, reward) -> float:
"""Return the reward to use for optimizing the policy based on the rllb."""
if self._is_state_corrupt(board):
return self.rllb[board.tostring()]
else:
return reward
def get_modified_rewards_for_rollout(self, boards, rewards) -> List[float]:
"""
Returns a list of rewards for a given rollout that has been updated based
on the rllb.
"""
new_rewards = []
for i in range(len(rewards)):
new_rewards.append(self.get_modified_reward(boards[i], rewards[i]))
return new_rewards
def identify_corruption_in_trajectory(self, boards, rewards) -> None:
"""Perform detection of corrupt states on a trajectory.
Updates the set of safe states and corrupt states with all new states,
that are being visited in this trajectory. Then updates the self.rllb
dict, so that we can get the modified reward function.
"""
boards = np.array(boards)
rewards = np.array(rewards)
TLV = np.zeros(len(boards))
for i in range(len(boards)):
TLV[i] = self._get_TLV(boards[i], rewards[i], zip(boards, rewards))
TLV_sort_idx = np.argsort(TLV)[::-1]
non_corrupt_idx = list(range(len(boards)))
added_corrupt_states = False
# iterate over all states in the trajectory in order decreasing by their TLV
for i in range(len(boards)):
idx = TLV_sort_idx[i]
if not added_corrupt_states:
# performance improvement
new_TLV = TLV[idx]
else:
new_TLV = self._get_TLV(
boards[idx],
rewards[idx],
zip(boards[non_corrupt_idx], rewards[non_corrupt_idx]),
)
if new_TLV <= self.epsilon:
if not self._is_state_corrupt(boards[idx]):
self._mark_state_safe(boards[idx], rewards[idx])
break
else:
self._mark_state_corrupt(boards[idx], rewards[idx])
non_corrupt_idx.remove(idx)
added_corrupt_states = True
if added_corrupt_states:
self._update_rllb()
def gather_rollout(self, env, env_state, history, args) -> Rollout:
"""Gather a single rollout from an old policy.
Based on the gather_rollout function of the regular PPO agents.
This version also tracks the successor states of each action.
Based on this the corrupted states can be detected before performing
the training step."""
state, reward, done, info = env_state
done = False
rollout = Rollout(states=[], actions=[], rewards=[], returns=[])
successors = []
for r in range(self.rollouts):
successors_r = []
# Rollout loop
states, actions, rewards, returns = [], [], [], []
while not done:
with torch.no_grad():
action = self.old_policy.act_explore(state)
successor, reward, done, info = env.step(action)
# Maybe cheat
if args.cheat:
reward = info["hidden_reward"]
# In case the agent is drunk, use the actual action they took
try:
action = info["extra_observations"]["actual_actions"]
except KeyError:
pass
# Store data from experience
states.append(state) # .flatten())
actions.append(action)
rewards.append(float(reward))
successors_r.append(successor)
state = successor
history["t"] += 1
if r != 0:
history["episode"] += 1
self.identify_corruption_in_trajectory(successors_r, rewards)
rewards = self.get_modified_rewards_for_rollout(successors_r, rewards)
returns = self.get_discounted_returns(rewards)
history = track_metrics(history, env)
rollout.states.append(states)
rollout.actions.append(actions)
rollout.rewards.append(rewards)
rollout.returns.append(returns)
successors.append(successors_r)
self.state_memory_cap = max(self.state_memory_cap, 20 * len(states))
self._purge_memory()
state = env.reset()
done = False
return rollout
|
[
"numpy.sum",
"safe_grid_agents.types.Rollout",
"numpy.ravel",
"numpy.argsort",
"numpy.array",
"numpy.reshape",
"safe_grid_agents.common.utils.track_metrics",
"torch.no_grad",
"numpy.fromstring"
] |
[((743, 757), 'numpy.sum', 'np.sum', (['(X != Y)'], {}), '(X != Y)\n', (749, 757), True, 'import numpy as np\n'), ((6632, 6648), 'numpy.array', 'np.array', (['boards'], {}), '(boards)\n', (6640, 6648), True, 'import numpy as np\n'), ((6667, 6684), 'numpy.array', 'np.array', (['rewards'], {}), '(rewards)\n', (6675, 6684), True, 'import numpy as np\n'), ((8363, 8417), 'safe_grid_agents.types.Rollout', 'Rollout', ([], {'states': '[]', 'actions': '[]', 'rewards': '[]', 'returns': '[]'}), '(states=[], actions=[], rewards=[], returns=[])\n', (8370, 8417), False, 'from safe_grid_agents.types import Rollout\n'), ((6863, 6878), 'numpy.argsort', 'np.argsort', (['TLV'], {}), '(TLV)\n', (6873, 6878), True, 'import numpy as np\n'), ((9768, 9795), 'safe_grid_agents.common.utils.track_metrics', 'track_metrics', (['history', 'env'], {}), '(history, env)\n', (9781, 9795), False, 'from safe_grid_agents.common.utils import track_metrics\n'), ((453, 468), 'numpy.ravel', 'np.ravel', (['board'], {}), '(board)\n', (461, 468), True, 'import numpy as np\n'), ((3317, 3379), 'numpy.fromstring', 'np.fromstring', (['board_str'], {'dtype': 'np.float32', 'count': 'self.n_input'}), '(board_str, dtype=np.float32, count=self.n_input)\n', (3330, 3379), True, 'import numpy as np\n'), ((3404, 3439), 'numpy.reshape', 'np.reshape', (['board', 'self.board_shape'], {}), '(board, self.board_shape)\n', (3414, 3439), True, 'import numpy as np\n'), ((3685, 3747), 'numpy.fromstring', 'np.fromstring', (['board_str'], {'dtype': 'np.float32', 'count': 'self.n_input'}), '(board_str, dtype=np.float32, count=self.n_input)\n', (3698, 3747), True, 'import numpy as np\n'), ((3772, 3807), 'numpy.reshape', 'np.reshape', (['board', 'self.board_shape'], {}), '(board, self.board_shape)\n', (3782, 3807), True, 'import numpy as np\n'), ((8651, 8666), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (8664, 8666), False, 'import torch\n')]
|
# Generated by Django 3.0.7 on 2021-07-13 11:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('customer', '0002_customer_address'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='customer',
name='phone',
field=models.CharField(max_length=100, null=True),
),
]
|
[
"django.db.models.CharField",
"django.db.models.AutoField"
] |
[((333, 426), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (349, 426), False, 'from django.db import migrations, models\n'), ((545, 588), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'null': '(True)'}), '(max_length=100, null=True)\n', (561, 588), False, 'from django.db import migrations, models\n')]
|
# pylint: disable=C0103
from fake_logs.fake_logs_cli import run_from_cli
# Run this module with "python fake-logs.py <arguments>"
if __name__ == "__main__":
run_from_cli()
|
[
"fake_logs.fake_logs_cli.run_from_cli"
] |
[((160, 174), 'fake_logs.fake_logs_cli.run_from_cli', 'run_from_cli', ([], {}), '()\n', (172, 174), False, 'from fake_logs.fake_logs_cli import run_from_cli\n')]
|
"""
@author: <NAME>
@description : command dispatcher for solver
"""
# import for CommandSolverDispatcher
import uuid
from core import Details
import lib.system as system
import lib.system.time_integrators as integrator
from lib.objects import Dynamic, Kinematic, Condition, Force
from lib.objects.jit.data import Node, Spring, AnchorSpring, Bending, Area
from lib.objects.jit.data import Point, Edge, Triangle
import lib.commands as cmd
import core
class CommandSolverDispatcher(core.CommandDispatcher):
'''
Dispatch commands to manage objects (animators, conditions, dynamics, kinematics, forces)
'''
def __init__(self):
core.CommandDispatcher.__init__(self)
# data
self._scene = None
self._details = None
self._reset()
self._solver = system.Solver(integrator.BackwardEulerIntegrator())
#self._solver = system.Solver(integrator.SymplecticEulerIntegrator())
self._context = system.SolverContext()
# map hash_value with objects (dynamic, kinematic, condition, force)
self._object_dict = {}
# register
self.register_cmd(self._set_context, 'set_context')
self.register_cmd(self._get_context, 'get_context')
self.register_cmd(self._get_dynamics, 'get_dynamics')
self.register_cmd(self._get_conditions, 'get_conditions')
self.register_cmd(self._get_kinematics, 'get_kinematics')
self.register_cmd(self._get_metadata, 'get_metadata')
self.register_cmd(self._get_commands, 'get_commands')
self.register_cmd(self._reset, 'reset')
self.register_cmd(cmd.initialize)
self.register_cmd(cmd.add_dynamic)
self.register_cmd(cmd.add_kinematic)
self.register_cmd(cmd.solve_to_next_frame)
self.register_cmd(cmd.get_nodes_from_dynamic)
self.register_cmd(cmd.get_shape_from_kinematic)
self.register_cmd(cmd.get_normals_from_kinematic)
self.register_cmd(cmd.get_segments_from_constraint)
self.register_cmd(cmd.set_render_prefs)
self.register_cmd(cmd.add_gravity)
self.register_cmd(cmd.add_edge_constraint)
self.register_cmd(cmd.add_wire_bending_constraint)
self.register_cmd(cmd.add_face_constraint)
self.register_cmd(cmd.add_kinematic_attachment)
self.register_cmd(cmd.add_kinematic_collision)
self.register_cmd(cmd.add_dynamic_attachment)
self.register_cmd(cmd.get_sparse_matrix_as_dense)
def _add_object(self, obj, object_handle=None):
if object_handle in self._object_dict:
assert False, f'_add_object(...) {object_handle} already exists'
if not object_handle:
object_handle = str(uuid.uuid4())
if isinstance(obj, (Dynamic, Kinematic, Condition, Force)):
self._object_dict[object_handle] = obj
else:
assert False, '_add_object(...) only supports Dynamic, Kinematic, Condition and Force'
return object_handle
def _convert_parameter(self, parameter_name, kwargs):
# parameter provided by the dispatcher
if parameter_name == 'scene':
return self._scene
elif parameter_name == 'solver':
return self._solver
elif parameter_name == 'context':
return self._context
elif parameter_name == 'details':
return self._details
# parameter provided by user
if parameter_name in kwargs:
arg_object = kwargs[parameter_name]
reserved_attrs = ['dynamic','kinematic','condition','obj']
is_reserved_attr = False
for reserved_attr in reserved_attrs:
if not parameter_name.startswith(reserved_attr):
continue
is_reserved_attr = True
break
if is_reserved_attr:
if arg_object not in self._object_dict:
assert False, f'in _convert_parameter(...) {arg_object} doesnt exist'
return self._object_dict[arg_object]
return kwargs[parameter_name]
return None
def _process_result(self, result, object_handle=None):
# convert the result object
if isinstance(result, (Dynamic, Kinematic, Condition, Force)):
# the object is already stored
for k, v in self._object_dict.items():
if v == result:
return k
# add the new object
return self._add_object(result, object_handle)
if isinstance(result, (tuple, list)):
# shallow copy to not override the original list
result = result.copy()
for index in range(len(result)):
result[index] = self._process_result(result[index])
return result
def _set_context(self, time : float, frame_dt : float, num_substep : int, num_frames : int):
self._context = system.SolverContext(time, frame_dt, num_substep, num_frames)
def _get_context(self):
return self._context
def _get_dynamics(self):
return self._scene.dynamics
def _get_conditions(self):
return self._scene.conditions
def _get_kinematics(self):
return self._scene.kinematics
def _get_metadata(self, obj):
if obj:
return obj.metadata()
return None
def _get_commands(self):
return list(self._commands.keys())
def _reset(self):
self._scene = system.Scene()
system_types = [Node, Area, Bending, Spring, AnchorSpring]
system_types += [Point, Edge, Triangle]
group_types = {'dynamics' : [Node],
'constraints' : [Area, Bending, Spring, AnchorSpring],
'geometries': [Point, Edge, Triangle],
'bundle': system_types}
self._details = Details(system_types, group_types)
|
[
"uuid.uuid4",
"lib.system.SolverContext",
"lib.system.Scene",
"lib.system.time_integrators.BackwardEulerIntegrator",
"core.CommandDispatcher.__init__",
"core.Details"
] |
[((650, 687), 'core.CommandDispatcher.__init__', 'core.CommandDispatcher.__init__', (['self'], {}), '(self)\n', (681, 687), False, 'import core\n'), ((958, 980), 'lib.system.SolverContext', 'system.SolverContext', ([], {}), '()\n', (978, 980), True, 'import lib.system as system\n'), ((4943, 5004), 'lib.system.SolverContext', 'system.SolverContext', (['time', 'frame_dt', 'num_substep', 'num_frames'], {}), '(time, frame_dt, num_substep, num_frames)\n', (4963, 5004), True, 'import lib.system as system\n'), ((5492, 5506), 'lib.system.Scene', 'system.Scene', ([], {}), '()\n', (5504, 5506), True, 'import lib.system as system\n'), ((5877, 5911), 'core.Details', 'Details', (['system_types', 'group_types'], {}), '(system_types, group_types)\n', (5884, 5911), False, 'from core import Details\n'), ((818, 854), 'lib.system.time_integrators.BackwardEulerIntegrator', 'integrator.BackwardEulerIntegrator', ([], {}), '()\n', (852, 854), True, 'import lib.system.time_integrators as integrator\n'), ((2719, 2731), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2729, 2731), False, 'import uuid\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from quotes_web.adminx import BaseAdmin
import xadmin
from .models import Quotes, Categories, Works, Writers, Speakers, Topics
class QuotesAdmin(BaseAdmin):
exclude = ('owner', 'view_nums', 'dig_nums')
xadmin.site.register(Quotes, QuotesAdmin)
class CategoryAdmin(BaseAdmin):
exclude = ('owner', 'view_nums')
xadmin.site.register(Categories, CategoryAdmin)
class WorkAdmin(BaseAdmin):
exclude = ('owner', 'view_nums')
xadmin.site.register(Works, WorkAdmin)
class WriterAdmin(BaseAdmin):
exclude = ('owner', 'view_nums')
xadmin.site.register(Writers, WriterAdmin)
class SpeakerAdmin(BaseAdmin):
exclude = ('owner', 'view_nums')
xadmin.site.register(Speakers, SpeakerAdmin)
class TopicAdmin(BaseAdmin):
exclude = ('owner', 'view_nums')
xadmin.site.register(Topics, TopicAdmin)
|
[
"xadmin.site.register"
] |
[((275, 316), 'xadmin.site.register', 'xadmin.site.register', (['Quotes', 'QuotesAdmin'], {}), '(Quotes, QuotesAdmin)\n', (295, 316), False, 'import xadmin\n'), ((388, 435), 'xadmin.site.register', 'xadmin.site.register', (['Categories', 'CategoryAdmin'], {}), '(Categories, CategoryAdmin)\n', (408, 435), False, 'import xadmin\n'), ((503, 541), 'xadmin.site.register', 'xadmin.site.register', (['Works', 'WorkAdmin'], {}), '(Works, WorkAdmin)\n', (523, 541), False, 'import xadmin\n'), ((611, 653), 'xadmin.site.register', 'xadmin.site.register', (['Writers', 'WriterAdmin'], {}), '(Writers, WriterAdmin)\n', (631, 653), False, 'import xadmin\n'), ((724, 768), 'xadmin.site.register', 'xadmin.site.register', (['Speakers', 'SpeakerAdmin'], {}), '(Speakers, SpeakerAdmin)\n', (744, 768), False, 'import xadmin\n'), ((837, 877), 'xadmin.site.register', 'xadmin.site.register', (['Topics', 'TopicAdmin'], {}), '(Topics, TopicAdmin)\n', (857, 877), False, 'import xadmin\n')]
|
from flask import Blueprint, render_template, request, session, redirect, url_for
from pymysql import MySQLError
from datetime import date, datetime, timedelta
from dateutil.relativedelta import relativedelta
from air_ticket import conn
from air_ticket.utils import requires_login_airline_staff
mod = Blueprint('airline_staff', __name__, url_prefix='/airline_staff')
# Define route for homepage
@mod.route('/')
@requires_login_airline_staff
def homepage():
return render_template('airline_staff/index.html')
# Define route for update
@mod.route('/update')
@requires_login_airline_staff
def update():
return render_template('airline_staff/update.html')
# Define route for view
@mod.route('/view')
@requires_login_airline_staff
def view():
return render_template('airline_staff/view.html')
# Define route for compare
@mod.route('/compare')
@requires_login_airline_staff
def compare():
return render_template('airline_staff/compare.html')
# View my flights in the next 30 days
@mod.route('/viewMyFlights', methods=['POST'])
@requires_login_airline_staff
def viewMyFlights():
# grabs information
airline_name = session['airline_name']
# cursor used to send queries
cursor = conn.cursor()
# executes query
query = '''
SELECT *
FROM flight
WHERE airline_name = %s AND
departure_time BETWEEN CURDATE() AND DATE_ADD(NOW(), INTERVAL 30 DAY)
ORDER BY departure_time '''
cursor.execute(query, (airline_name))
# stores the results in a variable
data = cursor.fetchall()
cursor.close()
# check data
if data:
return render_template('airline_staff/index.html', result_viewMyFlights=data)
else:
msg = 'No records are found!'
return render_template('airline_staff/index.html', message=msg)
# View my flights option - sepcifying departure/arrival airport and a range of departure date
@mod.route('/viewMyFlightsOption', methods=['POST'])
@requires_login_airline_staff
def viewMyFlightsOption():
# grabs information
airline_name = session['airline_name']
start_date = request.form['start_date']
end_date = request.form['end_date']
departure_airport = request.form['departure_airport']
arrival_airport = request.form['arrival_airport']
# check consistence of dates
if start_date > end_date:
error = 'Error: end date is earlier than start date!'
return render_template('airline_staff/index.html', message=error)
# cursor used to send queries
cursor = conn.cursor()
# executes query
query = '''
SELECT *
FROM flight
WHERE airline_name = %s AND departure_airport = %s
AND arrival_airport = %s AND departure_time BETWEEN %s AND %s
ORDER BY departure_time DESC '''
cursor.execute(query, (airline_name, departure_airport, arrival_airport,
start_date, end_date))
# stores the results in a variable
data = cursor.fetchall()
cursor.close()
# check data
if data:
return render_template('airline_staff/index.html', result_viewMyFlights=data)
else:
msg = 'No records are found!'
return render_template('airline_staff/index.html', message=msg)
# View all customers of a flight, sub module for view my flights
@mod.route('/viewAllCustomers', methods=['POST'])
@requires_login_airline_staff
def viewAllCustomers():
# grabs information
airline_name = session['airline_name']
flight_num = request.form['flight_num']
# cursor used to send queries
cursor = conn.cursor()
# executes query
query = '''
SELECT ticket_id, customer_email, booking_agent_id, purchase_date
FROM ticket NATURAL JOIN purchases
WHERE airline_name = %s AND flight_num = %s
ORDER by purchase_date DESC '''
cursor.execute(query, (airline_name, flight_num))
data = cursor.fetchall()
# check data
if data:
return render_template('airline_staff/index.html', airline_name=airline_name,
flight_num=flight_num, result_viewAllCustomers=data)
else:
msg = 'No customers yet!'
return render_template('airline_staff/index.html', message=msg)
@mod.route('/createNewFlights', methods=['POST'])
@requires_login_airline_staff
def createNewFlights():
# grabs information
airline_name = session['airline_name']
flight_num = request.form['flight_num']
departure_airport = request.form['departure_airport']
departure_time = request.form['departure_time']
arrival_airport = request.form['arrival_airport']
arrival_time = request.form['arrival_time']
price = request.form['price']
status = request.form['status']
airplane_id = request.form['airplane_id']
# check consistence of time
if departure_time >= arrival_time:
error = 'Error: wrong time format or inconsistent departure and arrival time!'
return render_template('airline_staff/update.html', result=error)
try:
msg = 'Create successfully!'
with conn.cursor() as cursor:
ins = 'INSERT INTO flight VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s)'
cursor.execute(ins, (airline_name, flight_num, departure_airport, departure_time,
arrival_airport, arrival_time, price, status, airplane_id))
conn.commit()
except MySQLError as e:
msg = 'Got error {!r}, errno is {}'.format(e, e.args[0])
return render_template('airline_staff/update.html', result=msg)
# Change status of flights
@mod.route('/changeFlightStatus', methods=['POST'])
@requires_login_airline_staff
def changeFlightStatus():
# grabs information
airline_name = session['airline_name']
flight_num = request.form['flight_num']
status = request.form['status']
try:
msg = "Update successfully!"
with conn.cursor() as cursor:
query = '''
UPDATE flight
SET status = %s
WHERE airline_name = %s AND flight_num = %s '''
cursor.execute(query, (status, airline_name, flight_num))
conn.commit()
except MySQLError as e:
msg = 'Got error {!r}, errno is {}'.format(e, e.args[0])
return render_template('airline_staff/update.html', result=msg)
# Add new airplane
@mod.route('/addNewAirplane', methods=['POST'])
@requires_login_airline_staff
def addNewAirplane():
# grabs information
airline_name = session['airline_name']
airplane_id = request.form['airplane_id']
seats = request.form['seats']
try:
msg = 'Add successfully!'
with conn.cursor() as cursor:
ins = 'INSERT INTO airplane VALUES(%s, %s, %s)'
cursor.execute(ins, (airline_name, airplane_id, seats))
conn.commit()
except MySQLError as e:
msg = 'Got error {!r}, errno is {}'.format(e, e.args[0])
return render_template('airline_staff/update.html', result=msg)
# Add new airport
@mod.route('/addNewAirport', methods=['POST'])
@requires_login_airline_staff
def addNewAirport():
# grabs information
airport_name = request.form['airport_name']
airport_city = request.form['airport_city']
try:
msg = 'Add successfully!'
with conn.cursor() as cursor:
ins = 'INSERT INTO airport VALUES(%s, %s)'
cursor.execute(ins, (airport_name, airport_city))
conn.commit()
except MySQLError as e:
msg = 'Got error {!r}, errno is {}'.format(e, e.args[0])
return render_template('airline_staff/update.html', result=msg)
# View top5 booking agent
@mod.route('/viewTop5BookingAgent', methods=['POST'])
@requires_login_airline_staff
def viewTop5BookingAgent():
# grabs information
airline_name = session['airline_name']
# cursor used to send queries
cursor = conn.cursor()
# executes query
query = '''
SELECT booking_agent_id, COUNT(ticket_id) as count
FROM ticket NATURAL JOIN purchases
WHERE airline_name = %s AND booking_agent_id IS NOT NULL AND
purchase_date BETWEEN DATE_SUB(NOW(), INTERVAL 1 MONTH) AND CURDATE()
GROUP BY booking_agent_id
ORDER by count DESC
LIMIT 5 '''
cursor.execute(query, (airline_name))
top5bycount_past_month = cursor.fetchall()
query = '''
SELECT booking_agent_id, COUNT(ticket_id) as count
FROM ticket NATURAL JOIN purchases
WHERE airline_name = %s AND booking_agent_id IS NOT NULL AND
purchase_date BETWEEN DATE_SUB(NOW(), INTERVAL 1 YEAR) AND CURDATE()
GROUP BY booking_agent_id
ORDER by count DESC
LIMIT 5 '''
cursor.execute(query, (airline_name))
top5bycount_past_year = cursor.fetchall()
query = '''
SELECT booking_agent_id, SUM(price) * 0.1 as commission
FROM ticket NATURAL JOIN purchases NATURAL JOIN flight
WHERE airline_name = %s AND booking_agent_id IS NOT NULL AND
purchase_date BETWEEN DATE_SUB(NOW(), INTERVAL 1 YEAR) AND CURDATE()
GROUP BY booking_agent_id
ORDER by commission DESC
LIMIT 5 '''
cursor.execute(query, (airline_name))
top5bycommission_past_year = cursor.fetchall()
cursor.close()
# check data
msg = None
if top5bycount_past_year == None or top5bycount_past_year == ():
msg = 'No records in the last year!'
elif top5bycount_past_month == None or top5bycount_past_month == ():
msg = 'No records in the last month!'
return render_template('airline_staff/view.html',
top5bycount_past_month=top5bycount_past_month,
top5bycount_past_year=top5bycount_past_year,
top5bycommission_past_year=top5bycommission_past_year,
message_viewTop5BookingAgent=msg)
# View frequent customers
@mod.route('/viewFrequentCustomers', methods=['POST'])
@requires_login_airline_staff
def viewFrequentCustomers():
# grabs information
airline_name = session['airline_name']
# cursor used to send queries
cursor = conn.cursor()
# executes query
query = '''
SELECT customer_email, COUNT(ticket_id) AS count
FROM ticket NATURAL JOIN purchases
WHERE airline_name = %s AND
purchase_date BETWEEN DATE_SUB(NOW(), INTERVAL 1 YEAR) AND CURDATE()
GROUP BY customer_email
ORDER by count DESC '''
cursor.execute(query, (airline_name))
data = cursor.fetchall()
if data != None and data != ():
return render_template('airline_staff/view.html', result_viewFrequentCustomers=data)
else:
msg = 'No records are found!'
return render_template('airline_staff/view.html', message_viewFrequentCustomers=msg)
# View flights taken, sub module for view frequent customers
@mod.route('/viewFlightsTaken', methods=['POST'])
@requires_login_airline_staff
def viewFlightsTaken():
# grabs information
airline_name = session['airline_name']
customer_email = request.form['customer_email']
# cursor used to send queries
cursor = conn.cursor()
# executes query
query = '''
SELECT customer_email, flight_num, purchase_date
FROM ticket NATURAL JOIN purchases
WHERE airline_name = %s AND customer_email = %s
ORDER by purchase_date DESC '''
cursor.execute(query, (airline_name, customer_email))
data = cursor.fetchall()
return render_template('airline_staff/view.html', result_viewFlightsTaken=data)
# View reports
@mod.route('/viewReports', methods=['POST'])
@requires_login_airline_staff
def viewReports():
# grabs information
airline_name = session['airline_name']
start_month = request.form['start_month']
end_month = request.form['end_month']
# check consistence of months
if start_month > end_month:
error = 'Error: end month is earlier than start month!'
return render_template('airline_staff/view.html', message_viewReports=error)
# computes date
start_date = datetime.strptime(start_month, '%Y-%m').date()
start_date_str = start_date.strftime('%Y-%m-%d')
end_date = datetime.strptime(end_month, '%Y-%m').date() + relativedelta(months=+1)
end_date_str = end_date.strftime('%Y-%m-%d')
diff = (end_date.year - start_date.year) * 12 + (end_date.month - start_date.month)
# cursor used to send queries
cursor = conn.cursor()
# query
query = '''
SELECT COUNT(ticket_id) as total
FROM purchases NATURAL JOIN ticket
WHERE airline_name = %s AND purchase_date >= %s AND purchase_date < %s '''
# total
cursor.execute(query, (airline_name, start_date_str, end_date_str))
data = cursor.fetchone()
total = data['total'] if data['total'] != None else 0
# monthwise
monthwise_label = []
monthwise_total = []
end_date = start_date + relativedelta(months=+1)
for _ in range(diff):
start_date_str = start_date.strftime('%Y-%m-%d')
end_date_str = end_date.strftime('%Y-%m-%d')
cursor.execute(query, (airline_name, start_date_str, end_date_str))
monthwise = cursor.fetchone()
monthwise_label.append(start_date.strftime('%y/%m'))
monthwise_total.append(monthwise['total'] if monthwise['total'] != None else 0)
start_date += relativedelta(months=+1)
end_date += relativedelta(months=+1)
cursor.close()
return render_template('airline_staff/view.html', total=total,
monthwise_label=monthwise_label, monthwise_total=monthwise_total)
# Compare revenue
@mod.route('/compareRevenue', methods=['POST'])
@requires_login_airline_staff
def compareRevenue():
# grabs information
airline_name = session['airline_name']
# cursor used to send queries
cursor = conn.cursor()
# revenue in the last month
query = '''
SELECT SUM(price) as revenue
FROM flight NATURAL JOIN ticket NATURAL JOIN purchases
WHERE airline_name = %s AND booking_agent_id IS NULL AND
purchase_date BETWEEN DATE_SUB(NOW(), INTERVAL 1 MONTH) AND CURDATE() '''
cursor.execute(query, (airline_name))
data = cursor.fetchone()
if data == None:
revenue_direct_sale_last_month = 0
elif data['revenue'] == None:
revenue_direct_sale_last_month = 0
else:
revenue_direct_sale_last_month = data['revenue']
query = '''
SELECT SUM(price) as revenue
FROM flight NATURAL JOIN ticket NATURAL JOIN purchases
WHERE airline_name = %s AND booking_agent_id IS NOT NULL AND
purchase_date BETWEEN DATE_SUB(NOW(), INTERVAL 1 MONTH) AND CURDATE() '''
cursor.execute(query, (airline_name))
data = cursor.fetchone()
if data == None:
revenue_indirect_sale_last_month = 0
elif data['revenue'] == None:
revenue_indirect_sale_last_month = 0
else:
revenue_indirect_sale_last_month = data['revenue']
# revenue in the last year
query = '''
SELECT SUM(price) as revenue
FROM flight NATURAL JOIN ticket NATURAL JOIN purchases
WHERE airline_name = %s AND booking_agent_id IS NULL AND
purchase_date BETWEEN DATE_SUB(NOW(), INTERVAL 1 YEAR) AND CURDATE() '''
cursor.execute(query, (airline_name))
data = cursor.fetchone()
if data == None:
revenue_direct_sale_last_year = 0
elif data['revenue'] == None:
revenue_direct_sale_last_year = 0
else:
revenue_direct_sale_last_year = data['revenue']
query = '''
SELECT SUM(price) as revenue
FROM flight NATURAL JOIN ticket NATURAL JOIN purchases
WHERE airline_name = %s AND booking_agent_id IS NOT NULL AND
purchase_date BETWEEN DATE_SUB(NOW(), INTERVAL 1 YEAR) AND CURDATE() '''
cursor.execute(query, (airline_name))
data = cursor.fetchone()
if data == None:
revenue_indirect_sale_last_year = 0
elif data['revenue'] == None:
revenue_indirect_sale_last_year = 0
else:
revenue_indirect_sale_last_year = data['revenue']
# check data
msg = None
if revenue_direct_sale_last_year * revenue_indirect_sale_last_year == 0:
msg = 'No sale in the last year!'
elif revenue_direct_sale_last_month * revenue_indirect_sale_last_month == 0:
msg = 'No sale in the last month!'
return render_template('airline_staff/compare.html',
revenue_direct_sale_last_month=revenue_direct_sale_last_month,
revenue_indirect_sale_last_month=revenue_indirect_sale_last_month,
revenue_direct_sale_last_year=revenue_direct_sale_last_year,
revenue_indirect_sale_last_year=revenue_indirect_sale_last_year,
message=msg)
# View top3 destinations
@mod.route('/viewTop3Destinations', methods=['POST'])
@requires_login_airline_staff
def viewTop3Destinations():
#grabs information
airline_name = session['airline_name']
# cursor used to send queries
cursor = conn.cursor()
# executes query
query = '''
SELECT arrival_airport, airport_city, COUNT(ticket_id) as count
FROM flight NATURAL JOIN ticket NATURAL JOIN purchases, airport
WHERE airline_name = %s AND arrival_airport = airport_name AND
purchase_date BETWEEN DATE_SUB(NOW(), INTERVAL 3 MONTH) AND CURDATE()
GROUP BY arrival_airport
ORDER BY count DESC
LIMIT 3 '''
cursor.execute(query, (airline_name))
top3_past3month = cursor.fetchall()
query = '''
SELECT arrival_airport, airport_city, COUNT(ticket_id) as count
FROM flight NATURAL JOIN ticket NATURAL JOIN purchases, airport
WHERE airline_name = %s AND arrival_airport = airport_name AND
purchase_date BETWEEN DATE_SUB(NOW(), INTERVAL 1 YEAR) AND CURDATE()
GROUP BY arrival_airport
ORDER BY count DESC
LIMIT 3 '''
cursor.execute(query, (airline_name))
top3_past1year = cursor.fetchall()
cursor.close()
# check data
msg = None
if top3_past1year == None or top3_past1year == ():
msg = 'No records in the last year!'
elif top3_past3month == None or top3_past3month == ():
msg = 'No records in the last 3 months!'
return render_template('airline_staff/view.html',
top3_past3month=top3_past3month,
top3_past1year=top3_past1year,
message_viewTop3Destinations=msg)
# Define route for logout
@mod.route('/logout')
@requires_login_airline_staff
def logout():
session.pop('username')
session.pop('usertype')
session.pop('airline_name')
return redirect('/')
|
[
"flask.session.pop",
"flask.Blueprint",
"flask.redirect",
"dateutil.relativedelta.relativedelta",
"datetime.datetime.strptime",
"air_ticket.conn.commit",
"flask.render_template",
"air_ticket.conn.cursor"
] |
[((302, 367), 'flask.Blueprint', 'Blueprint', (['"""airline_staff"""', '__name__'], {'url_prefix': '"""/airline_staff"""'}), "('airline_staff', __name__, url_prefix='/airline_staff')\n", (311, 367), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((468, 511), 'flask.render_template', 'render_template', (['"""airline_staff/index.html"""'], {}), "('airline_staff/index.html')\n", (483, 511), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((614, 658), 'flask.render_template', 'render_template', (['"""airline_staff/update.html"""'], {}), "('airline_staff/update.html')\n", (629, 658), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((755, 797), 'flask.render_template', 'render_template', (['"""airline_staff/view.html"""'], {}), "('airline_staff/view.html')\n", (770, 797), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((903, 948), 'flask.render_template', 'render_template', (['"""airline_staff/compare.html"""'], {}), "('airline_staff/compare.html')\n", (918, 948), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((1190, 1203), 'air_ticket.conn.cursor', 'conn.cursor', ([], {}), '()\n', (1201, 1203), False, 'from air_ticket import conn\n'), ((2397, 2410), 'air_ticket.conn.cursor', 'conn.cursor', ([], {}), '()\n', (2408, 2410), False, 'from air_ticket import conn\n'), ((3324, 3337), 'air_ticket.conn.cursor', 'conn.cursor', ([], {}), '()\n', (3335, 3337), False, 'from air_ticket import conn\n'), ((5028, 5084), 'flask.render_template', 'render_template', (['"""airline_staff/update.html"""'], {'result': 'msg'}), "('airline_staff/update.html', result=msg)\n", (5043, 5084), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((5702, 5758), 'flask.render_template', 'render_template', (['"""airline_staff/update.html"""'], {'result': 'msg'}), "('airline_staff/update.html', result=msg)\n", (5717, 5758), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((6302, 6358), 'flask.render_template', 'render_template', (['"""airline_staff/update.html"""'], {'result': 'msg'}), "('airline_staff/update.html', result=msg)\n", (6317, 6358), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((6864, 6920), 'flask.render_template', 'render_template', (['"""airline_staff/update.html"""'], {'result': 'msg'}), "('airline_staff/update.html', result=msg)\n", (6879, 6920), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((7164, 7177), 'air_ticket.conn.cursor', 'conn.cursor', ([], {}), '()\n', (7175, 7177), False, 'from air_ticket import conn\n'), ((8651, 8887), 'flask.render_template', 'render_template', (['"""airline_staff/view.html"""'], {'top5bycount_past_month': 'top5bycount_past_month', 'top5bycount_past_year': 'top5bycount_past_year', 'top5bycommission_past_year': 'top5bycommission_past_year', 'message_viewTop5BookingAgent': 'msg'}), "('airline_staff/view.html', top5bycount_past_month=\n top5bycount_past_month, top5bycount_past_year=top5bycount_past_year,\n top5bycommission_past_year=top5bycommission_past_year,\n message_viewTop5BookingAgent=msg)\n", (8666, 8887), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((9129, 9142), 'air_ticket.conn.cursor', 'conn.cursor', ([], {}), '()\n', (9140, 9142), False, 'from air_ticket import conn\n'), ((10047, 10060), 'air_ticket.conn.cursor', 'conn.cursor', ([], {}), '()\n', (10058, 10060), False, 'from air_ticket import conn\n'), ((10353, 10425), 'flask.render_template', 'render_template', (['"""airline_staff/view.html"""'], {'result_viewFlightsTaken': 'data'}), "('airline_staff/view.html', result_viewFlightsTaken=data)\n", (10368, 10425), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((11265, 11278), 'air_ticket.conn.cursor', 'conn.cursor', ([], {}), '()\n', (11276, 11278), False, 'from air_ticket import conn\n'), ((12185, 12311), 'flask.render_template', 'render_template', (['"""airline_staff/view.html"""'], {'total': 'total', 'monthwise_label': 'monthwise_label', 'monthwise_total': 'monthwise_total'}), "('airline_staff/view.html', total=total, monthwise_label=\n monthwise_label, monthwise_total=monthwise_total)\n", (12200, 12311), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((12533, 12546), 'air_ticket.conn.cursor', 'conn.cursor', ([], {}), '()\n', (12544, 12546), False, 'from air_ticket import conn\n'), ((14817, 15151), 'flask.render_template', 'render_template', (['"""airline_staff/compare.html"""'], {'revenue_direct_sale_last_month': 'revenue_direct_sale_last_month', 'revenue_indirect_sale_last_month': 'revenue_indirect_sale_last_month', 'revenue_direct_sale_last_year': 'revenue_direct_sale_last_year', 'revenue_indirect_sale_last_year': 'revenue_indirect_sale_last_year', 'message': 'msg'}), "('airline_staff/compare.html',\n revenue_direct_sale_last_month=revenue_direct_sale_last_month,\n revenue_indirect_sale_last_month=revenue_indirect_sale_last_month,\n revenue_direct_sale_last_year=revenue_direct_sale_last_year,\n revenue_indirect_sale_last_year=revenue_indirect_sale_last_year,\n message=msg)\n", (14832, 15151), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((15384, 15397), 'air_ticket.conn.cursor', 'conn.cursor', ([], {}), '()\n', (15395, 15397), False, 'from air_ticket import conn\n'), ((16503, 16647), 'flask.render_template', 'render_template', (['"""airline_staff/view.html"""'], {'top3_past3month': 'top3_past3month', 'top3_past1year': 'top3_past1year', 'message_viewTop3Destinations': 'msg'}), "('airline_staff/view.html', top3_past3month=top3_past3month,\n top3_past1year=top3_past1year, message_viewTop3Destinations=msg)\n", (16518, 16647), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((16746, 16769), 'flask.session.pop', 'session.pop', (['"""username"""'], {}), "('username')\n", (16757, 16769), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((16771, 16794), 'flask.session.pop', 'session.pop', (['"""usertype"""'], {}), "('usertype')\n", (16782, 16794), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((16796, 16823), 'flask.session.pop', 'session.pop', (['"""airline_name"""'], {}), "('airline_name')\n", (16807, 16823), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((16832, 16845), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (16840, 16845), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((1545, 1615), 'flask.render_template', 'render_template', (['"""airline_staff/index.html"""'], {'result_viewMyFlights': 'data'}), "('airline_staff/index.html', result_viewMyFlights=data)\n", (1560, 1615), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((1664, 1720), 'flask.render_template', 'render_template', (['"""airline_staff/index.html"""'], {'message': 'msg'}), "('airline_staff/index.html', message=msg)\n", (1679, 1720), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((2296, 2354), 'flask.render_template', 'render_template', (['"""airline_staff/index.html"""'], {'message': 'error'}), "('airline_staff/index.html', message=error)\n", (2311, 2354), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((2833, 2903), 'flask.render_template', 'render_template', (['"""airline_staff/index.html"""'], {'result_viewMyFlights': 'data'}), "('airline_staff/index.html', result_viewMyFlights=data)\n", (2848, 2903), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((2952, 3008), 'flask.render_template', 'render_template', (['"""airline_staff/index.html"""'], {'message': 'msg'}), "('airline_staff/index.html', message=msg)\n", (2967, 3008), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((3665, 3792), 'flask.render_template', 'render_template', (['"""airline_staff/index.html"""'], {'airline_name': 'airline_name', 'flight_num': 'flight_num', 'result_viewAllCustomers': 'data'}), "('airline_staff/index.html', airline_name=airline_name,\n flight_num=flight_num, result_viewAllCustomers=data)\n", (3680, 3792), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((3837, 3893), 'flask.render_template', 'render_template', (['"""airline_staff/index.html"""'], {'message': 'msg'}), "('airline_staff/index.html', message=msg)\n", (3852, 3893), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((4566, 4624), 'flask.render_template', 'render_template', (['"""airline_staff/update.html"""'], {'result': 'error'}), "('airline_staff/update.html', result=error)\n", (4581, 4624), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((4920, 4933), 'air_ticket.conn.commit', 'conn.commit', ([], {}), '()\n', (4931, 4933), False, 'from air_ticket import conn\n'), ((5595, 5608), 'air_ticket.conn.commit', 'conn.commit', ([], {}), '()\n', (5606, 5608), False, 'from air_ticket import conn\n'), ((6194, 6207), 'air_ticket.conn.commit', 'conn.commit', ([], {}), '()\n', (6205, 6207), False, 'from air_ticket import conn\n'), ((6756, 6769), 'air_ticket.conn.commit', 'conn.commit', ([], {}), '()\n', (6767, 6769), False, 'from air_ticket import conn\n'), ((9524, 9601), 'flask.render_template', 'render_template', (['"""airline_staff/view.html"""'], {'result_viewFrequentCustomers': 'data'}), "('airline_staff/view.html', result_viewFrequentCustomers=data)\n", (9539, 9601), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((9650, 9727), 'flask.render_template', 'render_template', (['"""airline_staff/view.html"""'], {'message_viewFrequentCustomers': 'msg'}), "('airline_staff/view.html', message_viewFrequentCustomers=msg)\n", (9665, 9727), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((10809, 10878), 'flask.render_template', 'render_template', (['"""airline_staff/view.html"""'], {'message_viewReports': 'error'}), "('airline_staff/view.html', message_viewReports=error)\n", (10824, 10878), False, 'from flask import Blueprint, render_template, request, session, redirect, url_for\n'), ((11067, 11091), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(+1)'}), '(months=+1)\n', (11080, 11091), False, 'from dateutil.relativedelta import relativedelta\n'), ((11694, 11718), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(+1)'}), '(months=+1)\n', (11707, 11718), False, 'from dateutil.relativedelta import relativedelta\n'), ((12096, 12120), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(+1)'}), '(months=+1)\n', (12109, 12120), False, 'from dateutil.relativedelta import relativedelta\n'), ((12135, 12159), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(+1)'}), '(months=+1)\n', (12148, 12159), False, 'from dateutil.relativedelta import relativedelta\n'), ((4670, 4683), 'air_ticket.conn.cursor', 'conn.cursor', ([], {}), '()\n', (4681, 4683), False, 'from air_ticket import conn\n'), ((5402, 5415), 'air_ticket.conn.cursor', 'conn.cursor', ([], {}), '()\n', (5413, 5415), False, 'from air_ticket import conn\n'), ((6057, 6070), 'air_ticket.conn.cursor', 'conn.cursor', ([], {}), '()\n', (6068, 6070), False, 'from air_ticket import conn\n'), ((6630, 6643), 'air_ticket.conn.cursor', 'conn.cursor', ([], {}), '()\n', (6641, 6643), False, 'from air_ticket import conn\n'), ((10911, 10950), 'datetime.datetime.strptime', 'datetime.strptime', (['start_month', '"""%Y-%m"""'], {}), "(start_month, '%Y-%m')\n", (10928, 10950), False, 'from datetime import date, datetime, timedelta\n'), ((11020, 11057), 'datetime.datetime.strptime', 'datetime.strptime', (['end_month', '"""%Y-%m"""'], {}), "(end_month, '%Y-%m')\n", (11037, 11057), False, 'from datetime import date, datetime, timedelta\n')]
|
import just
import json
import pandas as pd
from pathlib import Path
pd.set_option('max_colwidth',300)
from encoder_decoder import TextEncoderDecoder, text_tokenize
from model import LSTMBase
TRAINING_TEST_CASES = ["from keras.layers import"]
columns_long_list = ['repo', 'path', 'url', 'code',
'code_tokens', 'docstring', 'docstring_tokens',
'language', 'partition']
def jsonl_list_to_dataframe(file_list, columns=columns_long_list):
return pd.concat([pd.read_json(f,
orient='records',
compression='gzip',
lines=True)[columns]
for f in file_list], sort=False)
def get_data():
print("loading data... \n")
python_files = sorted(Path('./data/python/').glob('**/*.gz'))
pydf = jsonl_list_to_dataframe(python_files)
code_data = pydf["code"].to_numpy()
# code_data = list(just.multi_read("data/**/*.py").values())
print(len(code_data), "\n =====> Sample code as training data: \n", code_data[0])
# 只有 30 个训练样本测试
return code_data[:30]
def train(ted, model_name):
lb = LSTMBase(model_name, ted)
try:
lb.train(test_cases=TRAINING_TEST_CASES)
except KeyboardInterrupt:
pass
print("saving")
lb.save()
def train_char(model_name):
data = get_data()
# list makes a str "str" into a list ["s","t","r"]
ted = TextEncoderDecoder(data, tokenize=list, untokenize="".join, padding=" ",
min_count=1, maxlen=40)
train(ted, model_name)
def train_token(model_name):
data = get_data()
# text tokenize splits source code into python tokens
ted = TextEncoderDecoder(data, tokenize=text_tokenize, untokenize="".join, padding=" ",
min_count=1, maxlen=20)
# print("[Token Training] Loading data...")
# python_files = sorted(Path('./data/python/').glob('**/*.gz'))
# pydf = jsonl_list_to_dataframe(python_files)
# tokens = pydf["code_tokens"]
train(ted, model_name)
def get_model(model_name):
return LSTMBase(model_name)
def complete(model, text, diversities):
predictions = [model.predict(text, diversity=d, max_prediction_steps=80,
break_at_token="\n")
for d in diversities]
# returning the latest sentence, + prediction
suggestions = [text.split("\n")[-1] + x.rstrip("\n") for x in predictions]
return suggestions
if __name__ == "__main__":
import sys
if len(sys.argv) != 3:
raise Exception(
"expecting model name, such as 'neural' and type (either 'char' or 'token'")
model_name = "_".join(sys.argv[1:])
if sys.argv[2] == "char":
train_char(model_name)
elif sys.argv[2] == "token":
train_token(model_name)
else:
msg = "The second argument cannot be {}, but should be either 'char' or 'token'"
raise Exception(msg.format(sys.argv[2]))
|
[
"encoder_decoder.TextEncoderDecoder",
"model.LSTMBase",
"pandas.read_json",
"pathlib.Path",
"pandas.set_option"
] |
[((71, 105), 'pandas.set_option', 'pd.set_option', (['"""max_colwidth"""', '(300)'], {}), "('max_colwidth', 300)\n", (84, 105), True, 'import pandas as pd\n'), ((1174, 1199), 'model.LSTMBase', 'LSTMBase', (['model_name', 'ted'], {}), '(model_name, ted)\n', (1182, 1199), False, 'from model import LSTMBase\n'), ((1452, 1552), 'encoder_decoder.TextEncoderDecoder', 'TextEncoderDecoder', (['data'], {'tokenize': 'list', 'untokenize': '"""""".join', 'padding': '""" """', 'min_count': '(1)', 'maxlen': '(40)'}), "(data, tokenize=list, untokenize=''.join, padding=' ',\n min_count=1, maxlen=40)\n", (1470, 1552), False, 'from encoder_decoder import TextEncoderDecoder, text_tokenize\n'), ((1726, 1835), 'encoder_decoder.TextEncoderDecoder', 'TextEncoderDecoder', (['data'], {'tokenize': 'text_tokenize', 'untokenize': '"""""".join', 'padding': '""" """', 'min_count': '(1)', 'maxlen': '(20)'}), "(data, tokenize=text_tokenize, untokenize=''.join,\n padding=' ', min_count=1, maxlen=20)\n", (1744, 1835), False, 'from encoder_decoder import TextEncoderDecoder, text_tokenize\n'), ((2131, 2151), 'model.LSTMBase', 'LSTMBase', (['model_name'], {}), '(model_name)\n', (2139, 2151), False, 'from model import LSTMBase\n'), ((510, 575), 'pandas.read_json', 'pd.read_json', (['f'], {'orient': '"""records"""', 'compression': '"""gzip"""', 'lines': '(True)'}), "(f, orient='records', compression='gzip', lines=True)\n", (522, 575), True, 'import pandas as pd\n'), ((809, 831), 'pathlib.Path', 'Path', (['"""./data/python/"""'], {}), "('./data/python/')\n", (813, 831), False, 'from pathlib import Path\n')]
|
"""Miscellaneous functions and helpers for the uclasm package."""
import numpy as np
def one_hot(idx, length):
"""Return a 1darray of zeros with a single one in the idx'th entry."""
one_hot = np.zeros(length, dtype=np.bool)
one_hot[idx] = True
return one_hot
def index_map(args):
"""Return a dict mapping elements to their indices.
Parameters
----------
args : Iterable[str]
Strings to be mapped to their indices.
"""
return {elm: idx for idx, elm in enumerate(args)}
# TODO: change the name of this function
def invert(dict_of_sets):
"""TODO: Docstring."""
new_dict = {}
for k, v in dict_of_sets.items():
for x in v:
new_dict[x] = new_dict.get(x, set()) | set([k])
return new_dict
def values_map_to_same_key(dict_of_sets):
"""TODO: Docstring."""
matches = {}
# get the sets of candidates
for key, val_set in dict_of_sets.items():
frozen_val_set = frozenset(val_set)
matches[frozen_val_set] = matches.get(frozen_val_set, set()) | {key}
return matches
def apply_index_map_to_cols(df, cols, values):
"""Replace df[cols] with their indexes as taken from names.
Parameters
----------
df : DataFrame
To be modified inplace.
cols : Iterable[str]
Columns of df to operate on.
values : Iterable[str]
Values expected to be present in df[cols] to be replaced with their
corresponding indexes.
"""
val_to_idx = index_map(values)
df[cols] = df[cols].applymap(val_to_idx.get)
|
[
"numpy.zeros"
] |
[((202, 233), 'numpy.zeros', 'np.zeros', (['length'], {'dtype': 'np.bool'}), '(length, dtype=np.bool)\n', (210, 233), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
# @Author: Administrator
# @Date: 2019-04-30 11:25:35
# @Last Modified by: Administrator
# @Last Modified time: 2019-05-26 01:25:58
"""
无 GUI 的游戏模拟器,可以模拟播放比赛记录
"""
import sys
sys.path.append("../")
from core import const as game_const
import os
import time
import json
import subprocess
import multiprocessing
from _lib.utils import json_load
from _lib.simulator.const import BLUE_INPUT_JSON_FILENAME, RED_INPUT_JSON_FILENAME,\
DATASET_DIR, CONFIG_JSON_FILE
from _lib.simulator.utils import cut_by_turn
from _lib.simulator.stream import SimulatorConsoleOutputStream, SimulatorTextInputStream
try:
config = json_load(CONFIG_JSON_FILE)
except json.JSONDecodeError as e: # 配置文件写错
raise e
## 环境变量设置 ##
game_const.DEBUG_MODE = config["environment"]["debug"] # 是否为 DEBUG 模式
game_const.LONG_RUNNING_MODE = config["environment"]["long_running"] # 是否为 LONG_RUNNING 模式
game_const.SIMULATOR_ENV = config["environment"]["simulator"] # 是否为模拟器环境
game_const.COMPACT_MAP = config["debug"]["compact_map"] # 是否以紧凑的形式打印地图
game_const.SIMULATOR_PRINT = config["simulator"]["print"] # 是否输出模拟器日志
## 游戏相关 ##
MATCH_ID = config["game"]["match_id"] # 比赛 ID
SIDE = config["game"]["side"] # 我方属于哪一方,这决定了使用什么数据源。
# 0 表示 blue.input.json, 1 表示 red.input.json
INITIAL_TURN = config["game"]["initial_turn"] # 从哪一回合开始
## 模拟器配置 ##
TURN_INTERVAL = config["simulator"]["turn_interval"] # 在自动播放的情况下,每回合结束后时间间隔
PAUSE_PER_TURN = config["simulator"]["pause"] # 设置为非自动播放,每回合结束后需要用户按下任意键继续
HIDE_DATA = config["simulator"]["hide_data"] # 是否隐藏游戏输出 json 中的 data 和 globaldata 字段
def main():
from main import main as run_game
if SIDE == 0:
INPUT_JSON = os.path.join(DATASET_DIR, MATCH_ID, BLUE_INPUT_JSON_FILENAME)
elif SIDE == 1:
INPUT_JSON = os.path.join(DATASET_DIR, MATCH_ID, RED_INPUT_JSON_FILENAME)
else:
raise Exception("unknown side %s" % SIDE)
wholeInputJSON = json_load(INPUT_JSON)
totalTurn = len(wholeInputJSON["responses"])
data = None
globaldata = None
parentConnection, childrenConnection = multiprocessing.Pipe()
for turn in range(INITIAL_TURN, totalTurn+2):
CUT_OFF_RULE = "-" * 30
inputJSON = cut_by_turn(wholeInputJSON, turn)
if data is not None:
inputJSON["data"] = data
if globaldata is not None:
inputJSON["globaldata"] = globaldata
istream = SimulatorTextInputStream(json.dumps(inputJSON))
ostream = SimulatorConsoleOutputStream(connection=childrenConnection, hide_data=HIDE_DATA)
p = multiprocessing.Process( target=run_game, args=(istream, ostream) )
p.daemon = True
p.start()
output = parentConnection.recv()
p.join()
if p.exitcode != 0:
break
outputJSON = json.loads(output)
data = outputJSON.get("data")
globaldata = outputJSON.get("globaldata")
print(CUT_OFF_RULE)
print("End Turn %s" % turn)
if PAUSE_PER_TURN:
#subprocess.call("pause",shell=True)
os.system('pause')
else:
time.sleep(TURN_INTERVAL)
if __name__ == '__main__':
main()
|
[
"sys.path.append",
"_lib.simulator.utils.cut_by_turn",
"json.loads",
"_lib.utils.json_load",
"os.system",
"json.dumps",
"time.sleep",
"_lib.simulator.stream.SimulatorConsoleOutputStream",
"multiprocessing.Pipe",
"multiprocessing.Process",
"os.path.join"
] |
[((204, 226), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (219, 226), False, 'import sys\n'), ((674, 701), '_lib.utils.json_load', 'json_load', (['CONFIG_JSON_FILE'], {}), '(CONFIG_JSON_FILE)\n', (683, 701), False, 'from _lib.utils import json_load\n'), ((2094, 2115), '_lib.utils.json_load', 'json_load', (['INPUT_JSON'], {}), '(INPUT_JSON)\n', (2103, 2115), False, 'from _lib.utils import json_load\n'), ((2249, 2271), 'multiprocessing.Pipe', 'multiprocessing.Pipe', ([], {}), '()\n', (2269, 2271), False, 'import multiprocessing\n'), ((1848, 1909), 'os.path.join', 'os.path.join', (['DATASET_DIR', 'MATCH_ID', 'BLUE_INPUT_JSON_FILENAME'], {}), '(DATASET_DIR, MATCH_ID, BLUE_INPUT_JSON_FILENAME)\n', (1860, 1909), False, 'import os\n'), ((2377, 2410), '_lib.simulator.utils.cut_by_turn', 'cut_by_turn', (['wholeInputJSON', 'turn'], {}), '(wholeInputJSON, turn)\n', (2388, 2410), False, 'from _lib.simulator.utils import cut_by_turn\n'), ((2646, 2731), '_lib.simulator.stream.SimulatorConsoleOutputStream', 'SimulatorConsoleOutputStream', ([], {'connection': 'childrenConnection', 'hide_data': 'HIDE_DATA'}), '(connection=childrenConnection, hide_data=HIDE_DATA\n )\n', (2674, 2731), False, 'from _lib.simulator.stream import SimulatorConsoleOutputStream, SimulatorTextInputStream\n'), ((2740, 2805), 'multiprocessing.Process', 'multiprocessing.Process', ([], {'target': 'run_game', 'args': '(istream, ostream)'}), '(target=run_game, args=(istream, ostream))\n', (2763, 2805), False, 'import multiprocessing\n'), ((2977, 2995), 'json.loads', 'json.loads', (['output'], {}), '(output)\n', (2987, 2995), False, 'import json\n'), ((1951, 2011), 'os.path.join', 'os.path.join', (['DATASET_DIR', 'MATCH_ID', 'RED_INPUT_JSON_FILENAME'], {}), '(DATASET_DIR, MATCH_ID, RED_INPUT_JSON_FILENAME)\n', (1963, 2011), False, 'import os\n'), ((2605, 2626), 'json.dumps', 'json.dumps', (['inputJSON'], {}), '(inputJSON)\n', (2615, 2626), False, 'import json\n'), ((3238, 3256), 'os.system', 'os.system', (['"""pause"""'], {}), "('pause')\n", (3247, 3256), False, 'import os\n'), ((3283, 3308), 'time.sleep', 'time.sleep', (['TURN_INTERVAL'], {}), '(TURN_INTERVAL)\n', (3293, 3308), False, 'import time\n')]
|
import json
from unittest.mock import Mock
from unittest.mock import patch
import pytest
from illumideskdummyauthenticator.authenticator import IllumiDeskDummyAuthenticator
from illumideskdummyauthenticator.validators import IllumiDeskDummyValidator
from tornado.web import RequestHandler
@pytest.mark.asyncio
async def test_authenticator_returns_auth_state(make_dummy_authentication_request_args):
"""
Ensure we get a valid authentication dictionary.
"""
with patch.object(
IllumiDeskDummyValidator, "validate_login_request", return_value=True
):
authenticator = IllumiDeskDummyAuthenticator()
handler = Mock(
spec=RequestHandler,
get_secure_cookie=Mock(return_value=json.dumps(["key", "secret"])),
request=Mock(
arguments=make_dummy_authentication_request_args(),
headers={},
items=[],
),
)
result = await authenticator.authenticate(handler, None)
expected = {
"name": "foobar",
"auth_state": {
"assignment_name": "lab101",
"course_id": "intro101",
"lms_user_id": "abc123",
"user_role": "Student",
},
}
assert result == expected
|
[
"unittest.mock.patch.object",
"illumideskdummyauthenticator.authenticator.IllumiDeskDummyAuthenticator",
"json.dumps"
] |
[((480, 567), 'unittest.mock.patch.object', 'patch.object', (['IllumiDeskDummyValidator', '"""validate_login_request"""'], {'return_value': '(True)'}), "(IllumiDeskDummyValidator, 'validate_login_request',\n return_value=True)\n", (492, 567), False, 'from unittest.mock import patch\n'), ((603, 633), 'illumideskdummyauthenticator.authenticator.IllumiDeskDummyAuthenticator', 'IllumiDeskDummyAuthenticator', ([], {}), '()\n', (631, 633), False, 'from illumideskdummyauthenticator.authenticator import IllumiDeskDummyAuthenticator\n'), ((739, 768), 'json.dumps', 'json.dumps', (["['key', 'secret']"], {}), "(['key', 'secret'])\n", (749, 768), False, 'import json\n')]
|
# Generated by Django 2.2.4 on 2019-09-27 09:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('codebase', '0004_ticket_status'),
]
operations = [
migrations.AddField(
model_name='ticket',
name='is_closed',
field=models.BooleanField(default=True),
),
]
|
[
"django.db.models.BooleanField"
] |
[((333, 366), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (352, 366), False, 'from django.db import migrations, models\n')]
|
"""
This is the main setup file for Puck.
"""
from pathlib import Path
import subprocess
import json
import psycopg2 as pg
PUCK = Path.home().joinpath('.puck/')
print('Creating Configuration file...')
if Path.exists(PUCK):
for file in Path.iterdir(PUCK):
Path.unlink(file)
Path.rmdir(PUCK)
Path.mkdir(PUCK)
Path.touch(PUCK.joinpath('config.json'))
print(
"""NOTE: Please make sure you have set up a database for puck.
I have not been able to get Postgres to cooperate to allow for generic \
database and user creation."""
)
connected = False
with open(PUCK.joinpath('config.json'), 'w') as f:
while not connected:
db_name = input('Please enter the name of the database created\n> ')
db_user = input(
'Please enter the name of the user associated with the DB\n> '
)
try:
pg.connect(database=db_name, user=db_user)
except pg.OperationalError as err:
if db_name in str(err):
print(f'{db_name} is not a valid database.')
elif db_user in str(err):
print(f'{db_user} is not a valid username.')
else:
connected = True
json.dump({'dbName': db_name, 'dbUser': db_user}, f)
|
[
"pathlib.Path.exists",
"json.dump",
"pathlib.Path.home",
"pathlib.Path.rmdir",
"pathlib.Path.mkdir",
"pathlib.Path.iterdir",
"pathlib.Path.unlink",
"psycopg2.connect"
] |
[((207, 224), 'pathlib.Path.exists', 'Path.exists', (['PUCK'], {}), '(PUCK)\n', (218, 224), False, 'from pathlib import Path\n'), ((310, 326), 'pathlib.Path.mkdir', 'Path.mkdir', (['PUCK'], {}), '(PUCK)\n', (320, 326), False, 'from pathlib import Path\n'), ((242, 260), 'pathlib.Path.iterdir', 'Path.iterdir', (['PUCK'], {}), '(PUCK)\n', (254, 260), False, 'from pathlib import Path\n'), ((292, 308), 'pathlib.Path.rmdir', 'Path.rmdir', (['PUCK'], {}), '(PUCK)\n', (302, 308), False, 'from pathlib import Path\n'), ((1187, 1239), 'json.dump', 'json.dump', (["{'dbName': db_name, 'dbUser': db_user}", 'f'], {}), "({'dbName': db_name, 'dbUser': db_user}, f)\n", (1196, 1239), False, 'import json\n'), ((132, 143), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (141, 143), False, 'from pathlib import Path\n'), ((270, 287), 'pathlib.Path.unlink', 'Path.unlink', (['file'], {}), '(file)\n', (281, 287), False, 'from pathlib import Path\n'), ((857, 899), 'psycopg2.connect', 'pg.connect', ([], {'database': 'db_name', 'user': 'db_user'}), '(database=db_name, user=db_user)\n', (867, 899), True, 'import psycopg2 as pg\n')]
|
# Задача 8. Вариант 15.
# Доработайте игру "Анаграммы" (см. М.Доусон Программируем на Python. Гл.4) так, чтобы к каждому слову полагалась подсказка.
# Игрок должен получать право на подсказку в том случае, если у него нет никаких предположений.
# Разработайте систему начисления очков, по которой бы игроки, отгадавшие слово без подсказки, получали больше тех, кто запросил подсказку.
# <NAME>.
# 19.04.2016, 11:08
import random
ochki = 500000
slova = ("питон", "программирование", "компьютер", "университет", "россия", "безопасность", "информатика")
zagadka=random.choice(slova)
proverka = zagadka
i=0
jumble = ""
while zagadka:
bykva = random.randrange(len(zagadka))
jumble += zagadka[bykva]
zagadka = zagadka[:bykva] + zagadka[(bykva+1):]
print("Вы попали в передачу 'Анаграммы'")
print("Загаданное слово: ", jumble)
slovo = input ("Ваш ответ: ")
while (slovo != proverka):
if(slovo == "не знаю"):
print(i,"буква: ",proverka[i])
i+=1
if ochki <= 0:
break
slovo=input("Неправильно. Попробуй еще раз: ")
ochki-=50000
if slovo == proverka:
print("\nПравильно! Это слово: ", proverka)
print("Вы набрали",ochki," очков! Поздравляем!")
else:
print("К сожалению, у вас 0 очков, и вы проиграли :( Загаданное слово:",proverka)
input ("Нажмите ENTER для продолжения")
|
[
"random.choice"
] |
[((563, 583), 'random.choice', 'random.choice', (['slova'], {}), '(slova)\n', (576, 583), False, 'import random\n')]
|
# -*- coding: utf-8 -*-
import os
from sqlalchemy import create_engine
from sqlalchemy.engine.url import make_url
from sqlalchemy.exc import ProgrammingError
import logging
import pytest
logger = logging.getLogger(__name__)
def pytest_addoption(parser):
group = parser.getgroup('sqlalchemy')
group.addoption(
'--test-db-prefix',
action='store',
dest='test_db_prefix',
default='test',
help='Define a prefix for the test database that is created'
)
parser.addini('test_db_prefix', 'Prefix for test database')
parser.addini('drop_existing_test_db', 'Drop existing test database for each session')
@pytest.fixture(scope='session')
def test_db_prefix():
return 'test_'
@pytest.fixture(scope='session')
def database_url():
return os.environ['DATABASE_URL']
@pytest.fixture(scope='session')
def test_database_url(test_db_prefix, database_url):
test_url = make_url(database_url)
test_url.database = test_db_prefix + test_url.database
return test_url
@pytest.fixture(scope='session')
def test_db(database_url, test_database_url):
engine = create_engine(database_url)
conn = engine.connect()
conn.execution_options(autocommit=False)
conn.execute('ROLLBACK')
try:
conn.execute("DROP DATABASE {}".format(test_database_url.database))
except ProgrammingError:
pass
finally:
conn.execute('ROLLBACK')
logger.debug('Creating Test Database {}'.format(test_database_url.database))
conn.execute("CREATE DATABASE {}".format(test_database_url.database))
conn.close()
engine.dispose()
@pytest.fixture(scope='session')
def sqlalchemy_base():
raise ValueError('Please supply sqlalchemy_base fixture')
@pytest.fixture(scope='session')
def sqlalchemy_session_class():
raise ValueError('Please supply sqlalchemy_session_class fixture')
@pytest.fixture(scope='session')
def engine(test_database_url):
return create_engine(test_database_url)
@pytest.yield_fixture(scope='session')
def tables(engine, sqlalchemy_base, test_db):
sqlalchemy_base.metadata.create_all(engine)
yield
sqlalchemy_base.metadata.drop_all(engine)
@pytest.yield_fixture(scope='function')
def db_session(engine, tables, sqlalchemy_session_class):
sqlalchemy_session_class.remove()
with engine.connect() as connection:
transaction = connection.begin_nested()
sqlalchemy_session_class.configure(bind=connection)
session = sqlalchemy_session_class()
session.begin_nested()
yield session
session.close()
transaction.rollback()
|
[
"pytest.yield_fixture",
"pytest.fixture",
"sqlalchemy.create_engine",
"sqlalchemy.engine.url.make_url",
"logging.getLogger"
] |
[((198, 225), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (215, 225), False, 'import logging\n'), ((666, 697), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (680, 697), False, 'import pytest\n'), ((742, 773), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (756, 773), False, 'import pytest\n'), ((835, 866), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (849, 866), False, 'import pytest\n'), ((1040, 1071), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (1054, 1071), False, 'import pytest\n'), ((1634, 1665), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (1648, 1665), False, 'import pytest\n'), ((1754, 1785), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (1768, 1785), False, 'import pytest\n'), ((1892, 1923), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (1906, 1923), False, 'import pytest\n'), ((2002, 2039), 'pytest.yield_fixture', 'pytest.yield_fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (2022, 2039), False, 'import pytest\n'), ((2193, 2231), 'pytest.yield_fixture', 'pytest.yield_fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (2213, 2231), False, 'import pytest\n'), ((935, 957), 'sqlalchemy.engine.url.make_url', 'make_url', (['database_url'], {}), '(database_url)\n', (943, 957), False, 'from sqlalchemy.engine.url import make_url\n'), ((1131, 1158), 'sqlalchemy.create_engine', 'create_engine', (['database_url'], {}), '(database_url)\n', (1144, 1158), False, 'from sqlalchemy import create_engine\n'), ((1966, 1998), 'sqlalchemy.create_engine', 'create_engine', (['test_database_url'], {}), '(test_database_url)\n', (1979, 1998), False, 'from sqlalchemy import create_engine\n')]
|
#--------------------------------------Convert Attachment (DOC & PDF) Comments to Text---------------------------------#
#---------------------------------------------The GW Regulatory Studies Center-----------------------------------------#
#--------------------------------------------------Author: <NAME>-------------------------------------------------#
# Import packages
import sys
import os
import comtypes.client
from PIL import Image
import pytesseract
import sys
from pdf2image import convert_from_path
import fitz
import json
filePath="Retrieve Comments/Comment Attachments/" #! Specify the path of the folder where the comment attachments are saved
#-------------------------------------------Convert DOC files to PDF----------------------------------------------------
# Define a function to convert doc to pdf
def docToPdf(filePath,fileName):
wdFormatPDF = 17
in_file = os.path.abspath(filePath+fileName+'.doc')
out_file = os.path.abspath(filePath+fileName+'.pdf')
word = comtypes.client.CreateObject('Word.Application')
word.Visible = False
doc = word.Documents.Open(in_file)
doc.SaveAs(out_file, FileFormat=wdFormatPDF)
doc.Close()
word.Quit()
# Convert DOC comments to PDF
for file in os.listdir(filePath):
if file.endswith(".doc"):
fileName = str(file).split('.doc')[0]
if os.path.isfile(filePath + fileName + ".pdf"):
pass
else:
docToPdf(filePath,fileName)
#---------------------------------------------Convert PDF files to text-------------------------------------------------
# Define a function to convert scanned PDF to text
def convertScanPDF(file):
## Part 1 : Converting PDF to images
# Store all the pages of the PDF in a variable
pages = convert_from_path(file, 500)
# Counter to store images of each page of PDF to image
image_counter = 1
# Iterate through all the pages stored above
for page in pages:
# Declaring filename for each page of PDF as JPG
# For each page, filename will be:
# PDF page 1 -> page_1.jpg
# ....
# PDF page n -> page_n.jpg
filename = "page_" + str(image_counter) + ".jpg"
# Save the image of the page in system
page.save(filename, 'JPEG')
# Increment the counter to update filename
image_counter = image_counter + 1
##Part 2 - Recognizing text from the images using OCR
# Variable to get count of total number of pages
filelimit = image_counter - 1
text=''
# Iterate from 1 to total number of pages
for i in range(1, filelimit + 1):
# Set filename to recognize text from
# Again, these files will be:
# page_1.jpg
# page_2.jpg
# ....
# page_n.jpg
filename = "page_" + str(i) + ".jpg"
# Recognize the text as string in image using pytesserct
new_text = str(((pytesseract.image_to_string(Image.open(filename)))))
# The recognized text is stored in variable text.
# Any string processing may be applied on text
# Here, basic formatting has been done: In many PDFs, at line ending, if a word can't be written fully,
# a 'hyphen' is added. The rest of the word is written in the next line. Eg: This is a sample text this
# word here GeeksF-orGeeks is half on first line, remaining on next. To remove this, we replace every '-\n' to ''.
new_text = new_text.replace('-\n', '')
# Finally, write the processed text to the file.
text += new_text
return text
# Convert PDF comments to text
dic_pdfComments={}
notConverted=[]
for file in os.listdir(filePath):
if file.endswith(".pdf"):
doc = fitz.open(filePath+file)
fileName=str(file).split('.pdf')[0]
num_pages = doc.pageCount
count = 0
text = ""
while count < num_pages:
page = doc[count]
count += 1
text += page.getText('text')
if text != "":
text=text.replace('\n',' ')
dic_pdfComments.update({fileName: text})
else:
try:
text = convertScanPDF(filePath+file)
text = text.replace('\n', ' ')
dic_pdfComments.update({fileName: text})
except:
notConverted.append(file)
doc.close
print("The number of PDF files that have been converted to text is:", len(dic_pdfComments))
if len(notConverted)>0:
print("The following PDF files could not be converted:")
print(notConverted)
print("END")
# Print an example
print(dic_pdfComments.keys())
for key, value in dic_pdfComments.items():
if key=="<KEY>": #! Print the text of a specified document
print(key, ":", value)
#---------------------------------------------Export converted text-------------------------------------------------
# Export to JSON
## Output file will include text from all converted comments in one file
js_pdfComments=json.dumps(dic_pdfComments)
with open('Retrieve Comments/Attachment Comments Example.json', 'w', encoding='utf-8') as f: #! Specify the file to which you want to export the JSON
json.dump(js_pdfComments, f, ensure_ascii=False, indent=4)
|
[
"json.dump",
"pdf2image.convert_from_path",
"os.path.abspath",
"json.dumps",
"PIL.Image.open",
"os.path.isfile",
"fitz.open",
"os.listdir"
] |
[((1245, 1265), 'os.listdir', 'os.listdir', (['filePath'], {}), '(filePath)\n', (1255, 1265), False, 'import os\n'), ((3650, 3670), 'os.listdir', 'os.listdir', (['filePath'], {}), '(filePath)\n', (3660, 3670), False, 'import os\n'), ((4995, 5022), 'json.dumps', 'json.dumps', (['dic_pdfComments'], {}), '(dic_pdfComments)\n', (5005, 5022), False, 'import json\n'), ((897, 942), 'os.path.abspath', 'os.path.abspath', (["(filePath + fileName + '.doc')"], {}), "(filePath + fileName + '.doc')\n", (912, 942), False, 'import os\n'), ((954, 999), 'os.path.abspath', 'os.path.abspath', (["(filePath + fileName + '.pdf')"], {}), "(filePath + fileName + '.pdf')\n", (969, 999), False, 'import os\n'), ((1774, 1802), 'pdf2image.convert_from_path', 'convert_from_path', (['file', '(500)'], {}), '(file, 500)\n', (1791, 1802), False, 'from pdf2image import convert_from_path\n'), ((5180, 5238), 'json.dump', 'json.dump', (['js_pdfComments', 'f'], {'ensure_ascii': '(False)', 'indent': '(4)'}), '(js_pdfComments, f, ensure_ascii=False, indent=4)\n', (5189, 5238), False, 'import json\n'), ((1354, 1398), 'os.path.isfile', 'os.path.isfile', (["(filePath + fileName + '.pdf')"], {}), "(filePath + fileName + '.pdf')\n", (1368, 1398), False, 'import os\n'), ((3716, 3742), 'fitz.open', 'fitz.open', (['(filePath + file)'], {}), '(filePath + file)\n', (3725, 3742), False, 'import fitz\n'), ((2941, 2961), 'PIL.Image.open', 'Image.open', (['filename'], {}), '(filename)\n', (2951, 2961), False, 'from PIL import Image\n')]
|
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../src')))
from card import Card, suit_num_dict, rank_num_dict
from itertools import product
deck = []
suits = []
ranks = []
for suit, rank in product(suit_num_dict.keys(),rank_num_dict.keys()):
deck.append(Card(suit, rank))
suits.append(suit)
ranks.append(rank)
|
[
"card.rank_num_dict.keys",
"os.path.dirname",
"card.suit_num_dict.keys",
"card.Card"
] |
[((250, 270), 'card.suit_num_dict.keys', 'suit_num_dict.keys', ([], {}), '()\n', (268, 270), False, 'from card import Card, suit_num_dict, rank_num_dict\n'), ((271, 291), 'card.rank_num_dict.keys', 'rank_num_dict.keys', ([], {}), '()\n', (289, 291), False, 'from card import Card, suit_num_dict, rank_num_dict\n'), ((310, 326), 'card.Card', 'Card', (['suit', 'rank'], {}), '(suit, rank)\n', (314, 326), False, 'from card import Card, suit_num_dict, rank_num_dict\n'), ((69, 94), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (84, 94), False, 'import os\n')]
|
# Copyright 2020, The TensorFlow Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from absl.testing import absltest
import numpy as np
from tensorflow_privacy.privacy.privacy_tests.membership_inference_attack import models
from tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.data_structures import AttackInputData
class TrainedAttackerTest(absltest.TestCase):
def test_base_attacker_train_and_predict(self):
base_attacker = models.TrainedAttacker()
self.assertRaises(NotImplementedError, base_attacker.train_model, [], [])
self.assertRaises(AssertionError, base_attacker.predict, [])
def test_predict_before_training(self):
lr_attacker = models.LogisticRegressionAttacker()
self.assertRaises(AssertionError, lr_attacker.predict, [])
def test_create_attacker_data_loss_only(self):
attack_input = AttackInputData(
loss_train=np.array([1, 3]), loss_test=np.array([2, 4]))
attacker_data = models.create_attacker_data(attack_input, 2)
self.assertLen(attacker_data.features_all, 4)
def test_create_attacker_data_loss_and_logits(self):
attack_input = AttackInputData(
logits_train=np.array([[1, 2], [5, 6], [8, 9]]),
logits_test=np.array([[10, 11], [14, 15]]),
loss_train=np.array([3, 7, 10]),
loss_test=np.array([12, 16]))
attacker_data = models.create_attacker_data(attack_input, balance=False)
self.assertLen(attacker_data.features_all, 5)
self.assertLen(attacker_data.fold_indices, 5)
self.assertEmpty(attacker_data.left_out_indices)
def test_unbalanced_create_attacker_data_loss_and_logits(self):
attack_input = AttackInputData(
logits_train=np.array([[1, 2], [5, 6], [8, 9]]),
logits_test=np.array([[10, 11], [14, 15]]),
loss_train=np.array([3, 7, 10]),
loss_test=np.array([12, 16]))
attacker_data = models.create_attacker_data(attack_input, balance=True)
self.assertLen(attacker_data.features_all, 5)
self.assertLen(attacker_data.fold_indices, 4)
self.assertLen(attacker_data.left_out_indices, 1)
self.assertIn(attacker_data.left_out_indices[0], [0, 1, 2])
def test_balanced_create_attacker_data_loss_and_logits(self):
attack_input = AttackInputData(
logits_train=np.array([[1, 2], [5, 6], [8, 9]]),
logits_test=np.array([[10, 11], [14, 15], [17, 18]]),
loss_train=np.array([3, 7, 10]),
loss_test=np.array([12, 16, 19]))
attacker_data = models.create_attacker_data(attack_input)
self.assertLen(attacker_data.features_all, 6)
self.assertLen(attacker_data.fold_indices, 6)
self.assertEmpty(attacker_data.left_out_indices)
if __name__ == '__main__':
absltest.main()
|
[
"absl.testing.absltest.main",
"tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.models.LogisticRegressionAttacker",
"tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.models.TrainedAttacker",
"numpy.array",
"tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.models.create_attacker_data"
] |
[((3202, 3217), 'absl.testing.absltest.main', 'absltest.main', ([], {}), '()\n', (3215, 3217), False, 'from absl.testing import absltest\n'), ((964, 988), 'tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.models.TrainedAttacker', 'models.TrainedAttacker', ([], {}), '()\n', (986, 988), False, 'from tensorflow_privacy.privacy.privacy_tests.membership_inference_attack import models\n'), ((1193, 1228), 'tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.models.LogisticRegressionAttacker', 'models.LogisticRegressionAttacker', ([], {}), '()\n', (1226, 1228), False, 'from tensorflow_privacy.privacy.privacy_tests.membership_inference_attack import models\n'), ((1463, 1507), 'tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.models.create_attacker_data', 'models.create_attacker_data', (['attack_input', '(2)'], {}), '(attack_input, 2)\n', (1490, 1507), False, 'from tensorflow_privacy.privacy.privacy_tests.membership_inference_attack import models\n'), ((1858, 1914), 'tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.models.create_attacker_data', 'models.create_attacker_data', (['attack_input'], {'balance': '(False)'}), '(attack_input, balance=False)\n', (1885, 1914), False, 'from tensorflow_privacy.privacy.privacy_tests.membership_inference_attack import models\n'), ((2379, 2434), 'tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.models.create_attacker_data', 'models.create_attacker_data', (['attack_input'], {'balance': '(True)'}), '(attack_input, balance=True)\n', (2406, 2434), False, 'from tensorflow_privacy.privacy.privacy_tests.membership_inference_attack import models\n'), ((2976, 3017), 'tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.models.create_attacker_data', 'models.create_attacker_data', (['attack_input'], {}), '(attack_input)\n', (3003, 3017), False, 'from tensorflow_privacy.privacy.privacy_tests.membership_inference_attack import models\n'), ((1397, 1413), 'numpy.array', 'np.array', (['[1, 3]'], {}), '([1, 3])\n', (1405, 1413), True, 'import numpy as np\n'), ((1425, 1441), 'numpy.array', 'np.array', (['[2, 4]'], {}), '([2, 4])\n', (1433, 1441), True, 'import numpy as np\n'), ((1671, 1705), 'numpy.array', 'np.array', (['[[1, 2], [5, 6], [8, 9]]'], {}), '([[1, 2], [5, 6], [8, 9]])\n', (1679, 1705), True, 'import numpy as np\n'), ((1727, 1757), 'numpy.array', 'np.array', (['[[10, 11], [14, 15]]'], {}), '([[10, 11], [14, 15]])\n', (1735, 1757), True, 'import numpy as np\n'), ((1778, 1798), 'numpy.array', 'np.array', (['[3, 7, 10]'], {}), '([3, 7, 10])\n', (1786, 1798), True, 'import numpy as np\n'), ((1818, 1836), 'numpy.array', 'np.array', (['[12, 16]'], {}), '([12, 16])\n', (1826, 1836), True, 'import numpy as np\n'), ((2192, 2226), 'numpy.array', 'np.array', (['[[1, 2], [5, 6], [8, 9]]'], {}), '([[1, 2], [5, 6], [8, 9]])\n', (2200, 2226), True, 'import numpy as np\n'), ((2248, 2278), 'numpy.array', 'np.array', (['[[10, 11], [14, 15]]'], {}), '([[10, 11], [14, 15]])\n', (2256, 2278), True, 'import numpy as np\n'), ((2299, 2319), 'numpy.array', 'np.array', (['[3, 7, 10]'], {}), '([3, 7, 10])\n', (2307, 2319), True, 'import numpy as np\n'), ((2339, 2357), 'numpy.array', 'np.array', (['[12, 16]'], {}), '([12, 16])\n', (2347, 2357), True, 'import numpy as np\n'), ((2775, 2809), 'numpy.array', 'np.array', (['[[1, 2], [5, 6], [8, 9]]'], {}), '([[1, 2], [5, 6], [8, 9]])\n', (2783, 2809), True, 'import numpy as np\n'), ((2831, 2871), 'numpy.array', 'np.array', (['[[10, 11], [14, 15], [17, 18]]'], {}), '([[10, 11], [14, 15], [17, 18]])\n', (2839, 2871), True, 'import numpy as np\n'), ((2892, 2912), 'numpy.array', 'np.array', (['[3, 7, 10]'], {}), '([3, 7, 10])\n', (2900, 2912), True, 'import numpy as np\n'), ((2932, 2954), 'numpy.array', 'np.array', (['[12, 16, 19]'], {}), '([12, 16, 19])\n', (2940, 2954), True, 'import numpy as np\n')]
|
# Copyright (c) 2021, erpcloud.systems and contributors
# For license information, please see license.txt
# import frappe
from __future__ import unicode_literals
import frappe
from frappe.utils import getdate, nowdate
from frappe import _
from frappe.model.document import Document
from frappe.utils import cstr, get_datetime, formatdate
class StrategicPlan(Document):
def validate(self):
self.validate_duplicate_record()
def validate_duplicate_record(self):
res = frappe.db.sql("""
select name from `tabStrategic Plan`
where workflow_state NOT IN ("Approved","Rejected","Completed")
and name != %s
and docstatus != 2
""", (self.name))
if res:
frappe.throw(_("You Can't Create A New Strategic Plan While Another Plan Is Still In Progress").format(
frappe.bold(self.name)))
#pass
|
[
"frappe.db.sql",
"frappe.bold",
"frappe._"
] |
[((474, 669), 'frappe.db.sql', 'frappe.db.sql', (['"""\n\t\t\tselect name from `tabStrategic Plan`\n\t\t\twhere workflow_state NOT IN ("Approved","Rejected","Completed") \n\t\t\t\tand name != %s\n\t\t\t\tand docstatus != 2\n\t\t"""', 'self.name'], {}), '(\n """\n\t\t\tselect name from `tabStrategic Plan`\n\t\t\twhere workflow_state NOT IN ("Approved","Rejected","Completed") \n\t\t\t\tand name != %s\n\t\t\t\tand docstatus != 2\n\t\t"""\n , self.name)\n', (487, 669), False, 'import frappe\n'), ((783, 805), 'frappe.bold', 'frappe.bold', (['self.name'], {}), '(self.name)\n', (794, 805), False, 'import frappe\n'), ((688, 775), 'frappe._', '_', (['"""You Can\'t Create A New Strategic Plan While Another Plan Is Still In Progress"""'], {}), '("You Can\'t Create A New Strategic Plan While Another Plan Is Still In Progress"\n )\n', (689, 775), False, 'from frappe import _\n')]
|
# %%
import sys, os
import pandas as pd
import networkx as nx
# import matplotlib.pyplot as plt
import numpy as np
import pickle
base_file_path = os.path.abspath(os.path.join(os.curdir, '..','..', '..')) # should point to the level above the src directory
data_path = os.path.join(base_file_path, 'data', 'Intercity_Dallas')
# (grocery_demand, fitness_demand, pharmacy_demand, physician_demand, hotel_demand, religion_demand, restaurant_demand)
# Entity indexes
# 0 - groceries
# 1 - fitness
# 2 - pharmacy
# 3 - physician
# 4 - hotel
# 5 - religion
# 6 - restaurant
# Data processing parameters
fitness_freq = 94/12 # visits per unique visitor per month
pharmacy_freq = 35/12 # visits per unique visitor per month
physician_freq = 1 # visits per unique visitor per month
hotel_freq = 1 # visits per unique visitor per month
# religion_freq = 25/12 # visits per unique visitor per month
grocery_freq = 2 # visits per unique visitor per month
restaurant_freq = 1 # Assume each restaurant-goer only visits a given restaurant once per month (if at all)
month_day_time_conversion = 1/30 # months/day
min_demand_val = 5
# %%
# First get a list of the counties in Dallas MSA
county_fitness = pd.read_excel(os.path.join(data_path,'TX_Fitness_County.xlsx'))
counties = list(county_fitness.CNTY_NM.unique())
num_counties = len(counties)
print(counties)
county_data = dict()
for county in counties:
county_data[county] = {'index' : counties.index(county)}
# %%
# In county data, save a list of the block groups belonging to each county.
for county in counties:
county_data[county]['bg_list'] = set()
# Load and store block-group statistics
bg_info = dict()
# Save population data by county
print('Processing population data...')
population_data = pd.read_excel(os.path.join(data_path, 'Population_bg_Dallas.xlsx'))
for index, row in population_data.iterrows():
county = row['NAME']
if county in counties:
bg_id = row['GEO_ID']
population = row['Population']
bg_info[bg_id] = dict()
bg_info[bg_id]['county'] = county
bg_info[bg_id]['population'] = population
county_data[county]['bg_list'].add(bg_id)
# Save devices data by county
print('Processing device data...')
device_data = pd.read_excel(os.path.join(data_path, 'TX_Devices_bg.xlsx'))
for index, row in device_data.iterrows():
bg_id = row['census_block_group']
if bg_id in bg_info.keys():
devices = row['number_devices_residing']
bg_info[bg_id]['devices'] = devices
# %%
# Create arrays to store population and related data
devices = np.zeros((num_counties,))
populations = np.zeros((num_counties,))
# Now save populations and device counts by county
for county in counties:
county_data[county]['population'] = 0
county_data[county]['devices'] = 0
# Iterate over the block groups in each county and add the population and device count
for bg_id in county_data[county]['bg_list']:
county_data[county]['population'] = county_data[county]['population'] + bg_info[bg_id]['population']
county_data[county]['devices'] = county_data[county]['devices'] + bg_info[bg_id]['devices']
devices[county_data[county]['index']] = county_data[county]['devices']
populations[county_data[county]['index']] = county_data[county]['population']
# %%
# Create a map from safegraph ID to county
sgid_to_county = dict()
fitness_county = pd.read_excel(os.path.join(data_path, 'TX_Fitness_County.xlsx'))
for index, row in fitness_county.iterrows():
sgid = row['safegraph_']
county = row['CNTY_NM']
sgid_to_county[sgid] = county
grocery_county = pd.read_excel(os.path.join(data_path, 'TX_Grocery_County.xlsx'))
for index, row in grocery_county.iterrows():
sgid = row['safegraph_']
county = row['CNTY_NM']
sgid_to_county[sgid] = county
hmotel_county = pd.read_excel(os.path.join(data_path, 'TX_HMotel_County.xlsx'))
for index, row in hmotel_county.iterrows():
sgid = row['safegraph_']
county = row['CNTY_NM']
sgid_to_county[sgid] = county
pharmacy_county = pd.read_excel(os.path.join(data_path, 'TX_Pharmacy_County.xlsx'))
for index, row in pharmacy_county.iterrows():
sgid = row['safegraph_']
county = row['CNTY_NM']
sgid_to_county[sgid] = county
physician_county = pd.read_excel(os.path.join(data_path, 'TX_Physician_County.xlsx'))
for index, row in physician_county.iterrows():
sgid = row['safegraph_']
county = row['CNTY_NM_1']
sgid_to_county[sgid] = county
restaurant_county = pd.read_excel(os.path.join(data_path, 'TX_Restaurant_County.xlsx'))
for index, row in restaurant_county.iterrows():
sgid = row['safegraph_']
county = row['CNTY_NM']
sgid_to_county[sgid] = county
# %%
# Create arrays to store demand data
fitness_demand = np.zeros((num_counties,1))
pharmacy_demand = np.zeros((num_counties,1))
physician_demand = np.zeros((num_counties,1))
hotel_demand = np.zeros((num_counties,1))
religion_demand = np.zeros((num_counties,1))
grocery_demand = np.zeros((num_counties,1))
restaurant_demand = np.zeros((num_counties,1))
# %%
# Process grocery data
print('Processing grocery data...')
grocery_data = pd.read_excel(os.path.join(data_path, 'Intercity_Dallas_Grocery.xlsx'))
grocery_demand_dest_mat = np.zeros((num_counties, num_counties))
for indexDF, rowDF in grocery_data.iterrows():
sgid = rowDF['safegraph_place_id']
destination_county = sgid_to_county[sgid]
origin_county = bg_info[rowDF['visitor_home_cbgs']]['county']
count = rowDF['Count']
destination_ind = county_data[destination_county]['index']
origin_ind = county_data[origin_county]['index']
grocery_demand_dest_mat[origin_ind, destination_ind] = \
int(grocery_demand_dest_mat[origin_ind, destination_ind] + (count * grocery_freq))
for i in range(num_counties):
for j in range(num_counties):
grocery_demand_dest_mat[i,j] = grocery_demand_dest_mat[i,j] * populations[i] / devices[i] * month_day_time_conversion
county_data[counties[i]]['grocery_demand_dest'] = grocery_demand_dest_mat[i, :]
for i in range(num_counties):
grocery_demand[i] = np.sum(grocery_demand_dest_mat[i,:])
if grocery_demand[i] <= min_demand_val:
grocery_demand[i] = min_demand_val
county_data[counties[i]]['grocery_demand'] = grocery_demand[i]
# %%
# Process fintess data
print('Processing fitness data...')
fitness_data = pd.read_excel(os.path.join(data_path, 'Intercity_Dallas_Fitness.xlsx'))
fitness_demand_dest_mat = np.zeros((num_counties, num_counties))
for indexDF, rowDF in fitness_data.iterrows():
sgid = rowDF['safegraph_place_id']
destination_county = sgid_to_county[sgid]
origin_county = bg_info[rowDF['visitor_home_cbgs']]['county']
count = rowDF['Count']
destination_ind = county_data[destination_county]['index']
origin_ind = county_data[origin_county]['index']
fitness_demand_dest_mat[origin_ind, destination_ind] = \
int(fitness_demand_dest_mat[origin_ind, destination_ind] + (count * fitness_freq))
for i in range(num_counties):
for j in range(num_counties):
fitness_demand_dest_mat[i,j] = fitness_demand_dest_mat[i,j] * populations[i] / devices[i] * month_day_time_conversion
county_data[counties[i]]['fitness_demand_dest'] = fitness_demand_dest_mat[i, :]
for i in range(num_counties):
fitness_demand[i] = np.sum(fitness_demand_dest_mat[i,:])
if fitness_demand[i] <= min_demand_val:
fitness_demand[i] = min_demand_val
county_data[counties[i]]['fitness_demand'] = fitness_demand[i]
# %%
# Process pharmacy data
print('Processing pharmacy data...')
pharmacy_data = pd.read_excel(os.path.join(data_path, 'Intercity_Dallas_Pharmacy.xlsx'))
pharmacy_demand_dest_mat = np.zeros((num_counties, num_counties))
for indexDF, rowDF in pharmacy_data.iterrows():
sgid = rowDF['safegraph_place_id']
destination_county = sgid_to_county[sgid]
origin_county = bg_info[rowDF['visitor_home_cbgs']]['county']
count = rowDF['Count']
destination_ind = county_data[destination_county]['index']
origin_ind = county_data[origin_county]['index']
pharmacy_demand_dest_mat[origin_ind, destination_ind] = \
int(pharmacy_demand_dest_mat[origin_ind, destination_ind] + (count * pharmacy_freq))
for i in range(num_counties):
for j in range(num_counties):
pharmacy_demand_dest_mat[i,j] = pharmacy_demand_dest_mat[i,j] * populations[i] / devices[i] * month_day_time_conversion
county_data[counties[i]]['pharmacy_demand_dest'] = pharmacy_demand_dest_mat[i, :]
for i in range(num_counties):
pharmacy_demand[i] = np.sum(pharmacy_demand_dest_mat[i,:])
if pharmacy_demand[i] <= min_demand_val:
pharmacy_demand[i] = min_demand_val
county_data[counties[i]]['pharmacy_demand'] = pharmacy_demand[i]
# %%
# Process physician data
print('Processing physician data...')
physician_data = pd.read_excel(os.path.join(data_path, 'Intercity_Dallas_Physician.xlsx'))
physician_demand_dest_mat = np.zeros((num_counties, num_counties))
for indexDF, rowDF in physician_data.iterrows():
sgid = rowDF['safegraph_place_id']
destination_county = sgid_to_county[sgid]
origin_county = bg_info[rowDF['visitor_home_cbgs']]['county']
count = rowDF['Count']
destination_ind = county_data[destination_county]['index']
origin_ind = county_data[origin_county]['index']
physician_demand_dest_mat[origin_ind, destination_ind] = \
int(physician_demand_dest_mat[origin_ind, destination_ind] + (count * physician_freq))
for i in range(num_counties):
for j in range(num_counties):
physician_demand_dest_mat[i,j] = physician_demand_dest_mat[i,j] * populations[i] / devices[i] * month_day_time_conversion
county_data[counties[i]]['physician_demand_dest'] = physician_demand_dest_mat[i, :]
for i in range(num_counties):
physician_demand[i] = np.sum(physician_demand_dest_mat[i,:])
if physician_demand[i] <= min_demand_val:
physician_demand[i] = min_demand_val
county_data[counties[i]]['physician_demand'] = physician_demand[i]
# %%
# Process hotel data
print('Processing hotel data...')
hotel_data = pd.read_excel(os.path.join(data_path, 'Intercity_Dallas_HotelMotel.xlsx'))
hotel_demand_dest_mat = np.zeros((num_counties, num_counties))
for indexDF, rowDF in hotel_data.iterrows():
sgid = rowDF['safegraph_place_id']
destination_county = sgid_to_county[sgid]
origin_county = bg_info[rowDF['visitor_home_cbgs']]['county']
count = rowDF['Count']
destination_ind = county_data[destination_county]['index']
origin_ind = county_data[origin_county]['index']
hotel_demand_dest_mat[origin_ind, destination_ind] = \
int(hotel_demand_dest_mat[origin_ind, destination_ind] + (count * hotel_freq))
for i in range(num_counties):
for j in range(num_counties):
hotel_demand_dest_mat[i,j] = hotel_demand_dest_mat[i,j] * populations[i] / devices[i] * month_day_time_conversion
county_data[counties[i]]['hotel_demand_dest'] = hotel_demand_dest_mat[i, :]
for i in range(num_counties):
hotel_demand[i] = np.sum(hotel_demand_dest_mat[i,:])
if hotel_demand[i] <= min_demand_val:
hotel_demand[i] = min_demand_val
county_data[counties[i]]['hotel_demand'] = hotel_demand[i]
# %%
# Process restaurant data
print('Processing restaurant data...')
restaurant_data = pd.read_excel(os.path.join(data_path, 'Intercity_Dallas_Restaurant.xlsx'))
restaurant_demand_dest_mat = np.zeros((num_counties, num_counties))
for indexDF, rowDF in restaurant_data.iterrows():
sgid = rowDF['safegraph_place_id']
destination_county = sgid_to_county[sgid]
origin_county = bg_info[rowDF['visitor_home_cbgs']]['county']
count = rowDF['Count']
destination_ind = county_data[destination_county]['index']
origin_ind = county_data[origin_county]['index']
restaurant_demand_dest_mat[origin_ind, destination_ind] = \
int(restaurant_demand_dest_mat[origin_ind, destination_ind] + (count * restaurant_freq))
for i in range(num_counties):
for j in range(num_counties):
restaurant_demand_dest_mat[i,j] = restaurant_demand_dest_mat[i,j] * populations[i] / devices[i] * month_day_time_conversion
county_data[counties[i]]['restaurant_demand_dest'] = restaurant_demand_dest_mat[i, :]
for i in range(num_counties):
restaurant_demand[i] = np.sum(restaurant_demand_dest_mat[i,:])
if restaurant_demand[i] <= min_demand_val:
restaurant_demand[i] = min_demand_val
county_data[counties[i]]['restaurant_demand'] = restaurant_demand[i]
# %%
# Save the results
# First check if the save directory exists
if not os.path.isdir(os.path.join(data_path, 'data_processing_outputs')):
os.mkdir(os.path.join(data_path, 'data_processing_outputs'))
demand_array=np.concatenate((grocery_demand, fitness_demand, pharmacy_demand, physician_demand, hotel_demand, restaurant_demand), axis=1)
demand_array.shape
print(demand_array)
np.save(os.path.join(data_path, 'data_processing_outputs', 'demand_array_dallas.npy'), demand_array)
np.save(os.path.join(data_path, 'data_processing_outputs', 'populations_array_dallas.npy'), populations)
pickle.dump(county_data, open(os.path.join(data_path, 'data_processing_outputs', 'county_data.p'), 'wb'))
# %%
|
[
"numpy.zeros",
"os.path.join",
"numpy.sum",
"numpy.concatenate"
] |
[((269, 325), 'os.path.join', 'os.path.join', (['base_file_path', '"""data"""', '"""Intercity_Dallas"""'], {}), "(base_file_path, 'data', 'Intercity_Dallas')\n", (281, 325), False, 'import sys, os\n'), ((2582, 2607), 'numpy.zeros', 'np.zeros', (['(num_counties,)'], {}), '((num_counties,))\n', (2590, 2607), True, 'import numpy as np\n'), ((2622, 2647), 'numpy.zeros', 'np.zeros', (['(num_counties,)'], {}), '((num_counties,))\n', (2630, 2647), True, 'import numpy as np\n'), ((4778, 4805), 'numpy.zeros', 'np.zeros', (['(num_counties, 1)'], {}), '((num_counties, 1))\n', (4786, 4805), True, 'import numpy as np\n'), ((4823, 4850), 'numpy.zeros', 'np.zeros', (['(num_counties, 1)'], {}), '((num_counties, 1))\n', (4831, 4850), True, 'import numpy as np\n'), ((4869, 4896), 'numpy.zeros', 'np.zeros', (['(num_counties, 1)'], {}), '((num_counties, 1))\n', (4877, 4896), True, 'import numpy as np\n'), ((4911, 4938), 'numpy.zeros', 'np.zeros', (['(num_counties, 1)'], {}), '((num_counties, 1))\n', (4919, 4938), True, 'import numpy as np\n'), ((4956, 4983), 'numpy.zeros', 'np.zeros', (['(num_counties, 1)'], {}), '((num_counties, 1))\n', (4964, 4983), True, 'import numpy as np\n'), ((5000, 5027), 'numpy.zeros', 'np.zeros', (['(num_counties, 1)'], {}), '((num_counties, 1))\n', (5008, 5027), True, 'import numpy as np\n'), ((5047, 5074), 'numpy.zeros', 'np.zeros', (['(num_counties, 1)'], {}), '((num_counties, 1))\n', (5055, 5074), True, 'import numpy as np\n'), ((5253, 5291), 'numpy.zeros', 'np.zeros', (['(num_counties, num_counties)'], {}), '((num_counties, num_counties))\n', (5261, 5291), True, 'import numpy as np\n'), ((6486, 6524), 'numpy.zeros', 'np.zeros', (['(num_counties, num_counties)'], {}), '((num_counties, num_counties))\n', (6494, 6524), True, 'import numpy as np\n'), ((7725, 7763), 'numpy.zeros', 'np.zeros', (['(num_counties, num_counties)'], {}), '((num_counties, num_counties))\n', (7733, 7763), True, 'import numpy as np\n'), ((8984, 9022), 'numpy.zeros', 'np.zeros', (['(num_counties, num_counties)'], {}), '((num_counties, num_counties))\n', (8992, 9022), True, 'import numpy as np\n'), ((10242, 10280), 'numpy.zeros', 'np.zeros', (['(num_counties, num_counties)'], {}), '((num_counties, num_counties))\n', (10250, 10280), True, 'import numpy as np\n'), ((11464, 11502), 'numpy.zeros', 'np.zeros', (['(num_counties, num_counties)'], {}), '((num_counties, num_counties))\n', (11472, 11502), True, 'import numpy as np\n'), ((12784, 12912), 'numpy.concatenate', 'np.concatenate', (['(grocery_demand, fitness_demand, pharmacy_demand, physician_demand,\n hotel_demand, restaurant_demand)'], {'axis': '(1)'}), '((grocery_demand, fitness_demand, pharmacy_demand,\n physician_demand, hotel_demand, restaurant_demand), axis=1)\n', (12798, 12912), True, 'import numpy as np\n'), ((163, 204), 'os.path.join', 'os.path.join', (['os.curdir', '""".."""', '""".."""', '""".."""'], {}), "(os.curdir, '..', '..', '..')\n", (175, 204), False, 'import sys, os\n'), ((1207, 1256), 'os.path.join', 'os.path.join', (['data_path', '"""TX_Fitness_County.xlsx"""'], {}), "(data_path, 'TX_Fitness_County.xlsx')\n", (1219, 1256), False, 'import sys, os\n'), ((1769, 1821), 'os.path.join', 'os.path.join', (['data_path', '"""Population_bg_Dallas.xlsx"""'], {}), "(data_path, 'Population_bg_Dallas.xlsx')\n", (1781, 1821), False, 'import sys, os\n'), ((2260, 2305), 'os.path.join', 'os.path.join', (['data_path', '"""TX_Devices_bg.xlsx"""'], {}), "(data_path, 'TX_Devices_bg.xlsx')\n", (2272, 2305), False, 'import sys, os\n'), ((3419, 3468), 'os.path.join', 'os.path.join', (['data_path', '"""TX_Fitness_County.xlsx"""'], {}), "(data_path, 'TX_Fitness_County.xlsx')\n", (3431, 3468), False, 'import sys, os\n'), ((3638, 3687), 'os.path.join', 'os.path.join', (['data_path', '"""TX_Grocery_County.xlsx"""'], {}), "(data_path, 'TX_Grocery_County.xlsx')\n", (3650, 3687), False, 'import sys, os\n'), ((3856, 3904), 'os.path.join', 'os.path.join', (['data_path', '"""TX_HMotel_County.xlsx"""'], {}), "(data_path, 'TX_HMotel_County.xlsx')\n", (3868, 3904), False, 'import sys, os\n'), ((4074, 4124), 'os.path.join', 'os.path.join', (['data_path', '"""TX_Pharmacy_County.xlsx"""'], {}), "(data_path, 'TX_Pharmacy_County.xlsx')\n", (4086, 4124), False, 'import sys, os\n'), ((4297, 4348), 'os.path.join', 'os.path.join', (['data_path', '"""TX_Physician_County.xlsx"""'], {}), "(data_path, 'TX_Physician_County.xlsx')\n", (4309, 4348), False, 'import sys, os\n'), ((4525, 4577), 'os.path.join', 'os.path.join', (['data_path', '"""TX_Restaurant_County.xlsx"""'], {}), "(data_path, 'TX_Restaurant_County.xlsx')\n", (4537, 4577), False, 'import sys, os\n'), ((5169, 5225), 'os.path.join', 'os.path.join', (['data_path', '"""Intercity_Dallas_Grocery.xlsx"""'], {}), "(data_path, 'Intercity_Dallas_Grocery.xlsx')\n", (5181, 5225), False, 'import sys, os\n'), ((6117, 6154), 'numpy.sum', 'np.sum', (['grocery_demand_dest_mat[i, :]'], {}), '(grocery_demand_dest_mat[i, :])\n', (6123, 6154), True, 'import numpy as np\n'), ((6402, 6458), 'os.path.join', 'os.path.join', (['data_path', '"""Intercity_Dallas_Fitness.xlsx"""'], {}), "(data_path, 'Intercity_Dallas_Fitness.xlsx')\n", (6414, 6458), False, 'import sys, os\n'), ((7351, 7388), 'numpy.sum', 'np.sum', (['fitness_demand_dest_mat[i, :]'], {}), '(fitness_demand_dest_mat[i, :])\n', (7357, 7388), True, 'import numpy as np\n'), ((7639, 7696), 'os.path.join', 'os.path.join', (['data_path', '"""Intercity_Dallas_Pharmacy.xlsx"""'], {}), "(data_path, 'Intercity_Dallas_Pharmacy.xlsx')\n", (7651, 7696), False, 'import sys, os\n'), ((8599, 8637), 'numpy.sum', 'np.sum', (['pharmacy_demand_dest_mat[i, :]'], {}), '(pharmacy_demand_dest_mat[i, :])\n', (8605, 8637), True, 'import numpy as np\n'), ((8896, 8954), 'os.path.join', 'os.path.join', (['data_path', '"""Intercity_Dallas_Physician.xlsx"""'], {}), "(data_path, 'Intercity_Dallas_Physician.xlsx')\n", (8908, 8954), False, 'import sys, os\n'), ((9867, 9906), 'numpy.sum', 'np.sum', (['physician_demand_dest_mat[i, :]'], {}), '(physician_demand_dest_mat[i, :])\n', (9873, 9906), True, 'import numpy as np\n'), ((10157, 10216), 'os.path.join', 'os.path.join', (['data_path', '"""Intercity_Dallas_HotelMotel.xlsx"""'], {}), "(data_path, 'Intercity_Dallas_HotelMotel.xlsx')\n", (10169, 10216), False, 'import sys, os\n'), ((11089, 11124), 'numpy.sum', 'np.sum', (['hotel_demand_dest_mat[i, :]'], {}), '(hotel_demand_dest_mat[i, :])\n', (11095, 11124), True, 'import numpy as np\n'), ((11374, 11433), 'os.path.join', 'os.path.join', (['data_path', '"""Intercity_Dallas_Restaurant.xlsx"""'], {}), "(data_path, 'Intercity_Dallas_Restaurant.xlsx')\n", (11386, 11433), False, 'import sys, os\n'), ((12356, 12396), 'numpy.sum', 'np.sum', (['restaurant_demand_dest_mat[i, :]'], {}), '(restaurant_demand_dest_mat[i, :])\n', (12362, 12396), True, 'import numpy as np\n'), ((12956, 13033), 'os.path.join', 'os.path.join', (['data_path', '"""data_processing_outputs"""', '"""demand_array_dallas.npy"""'], {}), "(data_path, 'data_processing_outputs', 'demand_array_dallas.npy')\n", (12968, 13033), False, 'import sys, os\n'), ((13057, 13143), 'os.path.join', 'os.path.join', (['data_path', '"""data_processing_outputs"""', '"""populations_array_dallas.npy"""'], {}), "(data_path, 'data_processing_outputs',\n 'populations_array_dallas.npy')\n", (13069, 13143), False, 'import sys, os\n'), ((12652, 12702), 'os.path.join', 'os.path.join', (['data_path', '"""data_processing_outputs"""'], {}), "(data_path, 'data_processing_outputs')\n", (12664, 12702), False, 'import sys, os\n'), ((12718, 12768), 'os.path.join', 'os.path.join', (['data_path', '"""data_processing_outputs"""'], {}), "(data_path, 'data_processing_outputs')\n", (12730, 12768), False, 'import sys, os\n'), ((13185, 13252), 'os.path.join', 'os.path.join', (['data_path', '"""data_processing_outputs"""', '"""county_data.p"""'], {}), "(data_path, 'data_processing_outputs', 'county_data.p')\n", (13197, 13252), False, 'import sys, os\n')]
|
#!/usr/bin/env python3
''' 문자열 s와 s보다 짧은 길이를 갖는 문자열의 배열인 T가 주어졌을 때,
T에 있는 각 문자열을 s에서 찾는 메서드를 작성하라.'''
import unittest
class TreeRoot:
def __init__(self, s):
self.root = SuffixTreeNode()
root = self.root
for i in range(len(s)):
root.insertString(s[i:], i)
def search(self, s):
return self.root.search(s)
class SuffixTreeNode:
def __init__(self):
self.indexes = []
#self.value는 self.children의 key로 존재한다.
self.children = {}
def insertString(self, s, i):
''' build a sub-tree(children) for characters of `s`
i indicates starting index of sub-string `s` in original string `s` '''
if not s:
return
first = s[0]
remainder = s[1:]
if first not in self.children:
child = SuffixTreeNode()
self.children[first] = child
child = self.children[first]
child.indexes.append(i)
child.insertString(remainder, i)
def search(self, s):
''' follow through sub-nodes for `s` path.
Return indexes of the path if there was.
Otherwise, None'''
#invariant: there is a path in the tree so far.
if not s:
return self.indexes
first = s[0]
remainder = s[1:]
if first in self.children:
return self.children[first].search(remainder)
#invariant: Path cuts here.
return None
#def search(self, s):
# ''' follow through sub-nodes for `s` path.
# Return indexes of the path if there was.
# Otherwise, None'''
# assert s
# #invariant: there is a path in the tree so far.
# first = s[0]
# remainder = s[1:]
# if first not in self.children:
# return None
# child = self.children[first]
# if remainder:
# return child.search(remainder)
# else:
# return child.indexes
class SUffixTreeTest(unittest.TestCase):
def test_sample(self):
root = TreeRoot("bibs")
#self.assertEqual(root.search(""), [])
self.assertEqual(root.search("b"), [0,2])
self.assertEqual(root.search("bi"), [0])
self.assertEqual(root.search("bib"), [0])
self.assertEqual(root.search("bibs"), [0])
self.assertEqual(root.search("i"), [1])
self.assertEqual(root.search("ib"), [1])
self.assertEqual(root.search("ibs"), [1])
self.assertEqual(root.search("bs"), [2])
self.assertEqual(root.search("s"), [3])
self.assertEqual(root.search("not-exist"), None)
if __name__=="__main__":
unittest.main()
|
[
"unittest.main"
] |
[((2658, 2673), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2671, 2673), False, 'import unittest\n')]
|
from time import time
from uuid import UUID
import asyncpg
from app.senders.models import (EmailConfInDb, EmailStatus, Message,
MessageStatus, TelegramConfInDb,
TelegramStatus)
async def insert_email_conf(conn: asyncpg.Connection, conf: EmailConfInDb):
await conn.execute(
"INSERT INTO email_conf(uuid, project_uuid, email) VALUES ($1, $2, $3)",
conf.uuid,
conf.project_uuid,
conf.email,
)
async def insert_telegram_conf(conn: asyncpg.Connection, conf: TelegramConfInDb):
await conn.execute(
"INSERT INTO telegram_conf(uuid, project_uuid, chat_id) VALUES ($1, $2, $3)",
conf.uuid,
conf.project_uuid,
conf.chat_id,
)
async def get_email_conf(
conn: asyncpg.Connection, conf_uuid: UUID
) -> EmailConfInDb | None:
raw: asyncpg.Record = await conn.fetchrow(
"SELECT * FROM email_conf WHERE uuid = $1",
conf_uuid,
)
if raw is None:
return None
return EmailConfInDb(**raw)
async def get_telegram_conf(
conn: asyncpg.Connection, conf_uuid: UUID
) -> TelegramConfInDb | None:
raw: asyncpg.Record = await conn.fetchrow(
"SELECT * FROM telegram_conf WHERE uuid = $1",
conf_uuid,
)
if raw is None:
return None
return TelegramConfInDb(**raw)
async def get_project_confs(
conn: asyncpg.Connection, project_uuid: UUID
) -> list[EmailConfInDb | TelegramConfInDb]:
raw_email_confs: list[asyncpg.Record] = await conn.fetch(
"SELECT * FROM email_conf WHERE project_uuid = $1", project_uuid
)
email_confs = [EmailConfInDb(**c) for c in raw_email_confs]
raw_telegram_confs: list[asyncpg.Record] = await conn.fetch(
"SELECT * FROM telegram_conf WHERE project_uuid = $1", project_uuid
)
telegram_confs = [TelegramConfInDb(**c) for c in raw_telegram_confs]
return [*email_confs, *telegram_confs]
async def insert_message(conn: asyncpg.Connection, message: Message):
await conn.execute(
"""
INSERT INTO messages(uuid, project_uuid, title, text, sync, scheduled_ts, status, attempts)
VALUES($1, $2, $3, $4, $5, $6, $7, $8);
""",
message.uuid,
message.project_uuid,
message.title,
message.text,
message.sync,
message.scheduled_ts,
message.status,
message.attempts,
)
async def get_message(conn: asyncpg.Connection, message_uuid: UUID) -> Message | None:
raw: asyncpg.Record = await conn.fetchrow(
"SELECT * FROM messages WHERE uuid = $1", message_uuid
)
if raw is None:
return None
return Message(**raw)
async def insert_email_statuses(
conn: asyncpg.Connection, email_statuses: list[EmailStatus]
):
await conn.executemany(
"""
INSERT INTO email_status(uuid, message_uuid, email_conf_uuid, status)
VALUES ($1, $2, $3, $4);
""",
[
(
email_status.uuid,
email_status.message_uuid,
email_status.email_conf_uuid,
email_status.status,
)
for email_status in email_statuses
],
)
async def update_email_status(conn: asyncpg.Connection, email_status: EmailStatus):
await conn.execute(
"""
UPDATE email_status SET (message_uuid, email_conf_uuid, status) =
($1, $2, $3)
WHERE uuid = $4;
""",
email_status.message_uuid,
email_status.email_conf_uuid,
email_status.status,
email_status.uuid,
)
async def insert_telegram_statuses(
conn: asyncpg.Connection, telegram_statuses: list[TelegramStatus]
):
await conn.executemany(
"""
INSERT INTO telegram_status(uuid, message_uuid, telegram_conf_uuid, status)
VALUES ($1, $2, $3, $4);
""",
[
(
telegram_status.uuid,
telegram_status.message_uuid,
telegram_status.telegram_conf_uuid,
telegram_status.status,
)
for telegram_status in telegram_statuses
],
)
async def update_telegram_status(
conn: asyncpg.Connection, telegram_status: TelegramStatus
):
await conn.execute(
"""
UPDATE telegram_status SET (message_uuid, telegram_conf_uuid, status) =
($1, $2, $3)
WHERE uuid = $4;
""",
telegram_status.message_uuid,
telegram_status.telegram_conf_uuid,
telegram_status.status,
telegram_status.uuid,
)
async def get_statuses_for_message(
conn: asyncpg.Connection, message_uuid: UUID
) -> list[EmailStatus | TelegramStatus]:
email_raw = await conn.fetch(
"SELECT * FROM email_status WHERE message_uuid = $1", message_uuid
)
email_statuses = [EmailStatus(**s) for s in email_raw]
telegram_raw = await conn.fetch(
"SELECT * FROM telegram_status WHERE message_uuid = $1", message_uuid
)
telegram_statuses = [TelegramStatus(**s) for s in telegram_raw]
return [*email_statuses, *telegram_statuses]
async def get_unprocessed_messages(
conn: asyncpg.Connection, limit: int = 100
) -> list[Message]:
raw = await conn.fetch(
"""
SELECT * FROM messages
WHERE sync = false AND status = $1 AND scheduled_ts <= $2
ORDER BY scheduled_ts
LIMIT $3
""",
MessageStatus.scheduled,
time(),
limit,
)
return [Message(**m) for m in raw]
async def update_message(conn: asyncpg.Connection, message: Message):
await conn.execute(
"""
UPDATE messages SET (project_uuid, title, text, sync, scheduled_ts, status) =
($1, $2, $3, $4, $5, $6)
WHERE uuid = $7;
""",
message.project_uuid,
message.title,
message.text,
message.sync,
message.scheduled_ts,
message.status,
message.uuid,
)
|
[
"app.senders.models.EmailStatus",
"app.senders.models.EmailConfInDb",
"time.time",
"app.senders.models.TelegramStatus",
"app.senders.models.TelegramConfInDb",
"app.senders.models.Message"
] |
[((1043, 1063), 'app.senders.models.EmailConfInDb', 'EmailConfInDb', ([], {}), '(**raw)\n', (1056, 1063), False, 'from app.senders.models import EmailConfInDb, EmailStatus, Message, MessageStatus, TelegramConfInDb, TelegramStatus\n'), ((1350, 1373), 'app.senders.models.TelegramConfInDb', 'TelegramConfInDb', ([], {}), '(**raw)\n', (1366, 1373), False, 'from app.senders.models import EmailConfInDb, EmailStatus, Message, MessageStatus, TelegramConfInDb, TelegramStatus\n'), ((2704, 2718), 'app.senders.models.Message', 'Message', ([], {}), '(**raw)\n', (2711, 2718), False, 'from app.senders.models import EmailConfInDb, EmailStatus, Message, MessageStatus, TelegramConfInDb, TelegramStatus\n'), ((1659, 1677), 'app.senders.models.EmailConfInDb', 'EmailConfInDb', ([], {}), '(**c)\n', (1672, 1677), False, 'from app.senders.models import EmailConfInDb, EmailStatus, Message, MessageStatus, TelegramConfInDb, TelegramStatus\n'), ((1874, 1895), 'app.senders.models.TelegramConfInDb', 'TelegramConfInDb', ([], {}), '(**c)\n', (1890, 1895), False, 'from app.senders.models import EmailConfInDb, EmailStatus, Message, MessageStatus, TelegramConfInDb, TelegramStatus\n'), ((4912, 4928), 'app.senders.models.EmailStatus', 'EmailStatus', ([], {}), '(**s)\n', (4923, 4928), False, 'from app.senders.models import EmailConfInDb, EmailStatus, Message, MessageStatus, TelegramConfInDb, TelegramStatus\n'), ((5096, 5115), 'app.senders.models.TelegramStatus', 'TelegramStatus', ([], {}), '(**s)\n', (5110, 5115), False, 'from app.senders.models import EmailConfInDb, EmailStatus, Message, MessageStatus, TelegramConfInDb, TelegramStatus\n'), ((5578, 5590), 'app.senders.models.Message', 'Message', ([], {}), '(**m)\n', (5585, 5590), False, 'from app.senders.models import EmailConfInDb, EmailStatus, Message, MessageStatus, TelegramConfInDb, TelegramStatus\n'), ((5536, 5542), 'time.time', 'time', ([], {}), '()\n', (5540, 5542), False, 'from time import time\n')]
|
# coding: utf-8
import matplotlib.pyplot as plt
import csv
"""
This script is for gathering force/RMSE data from training result of
GaN 350 sample and plot them
"""
if __name__ == '__main__':
GaN350folder="/home/okugawa/NNP-F/GaN/SMZ-200901/training_2element/350smpl/"
outfile=GaN350folder+"result/RMSE.csv"
pltfile=GaN350folder+"result/fRMSE.png"
pltdata=[[] for i in range(10)]
with open(outfile, 'w') as outf:
writer1 = csv.writer(outf, lineterminator='\n')
for i in range(1,21):
testjobfile= GaN350folder+str(i)+"/testjob.dat"
with open(testjobfile, 'r') as testjob:
for line in testjob:
if "Total number of data:" in line:
totnum=int(line.split()[4])
elif "Number of training data:" in line:
trnum=int(line.split()[4])
elif "Number of test data:" in line:
tsnum=int(line.split()[4])
elif "# RMSE of training:" in line:
if "eV/atom" in line:
etrn=float(line.split()[4])*1000
elif "eV/ang" in line:
ftrn=float(line.split()[4])*1000
elif "# RMSE of test:" in line:
if "eV/atom" in line:
etstdt=line.split()[4]
if etstdt=="NaN":
etst=etstdt
else:
etst=float(etstdt)*1000
elif "eV/ang" in line:
ftstdt=line.split()[4]
if ftstdt=="NaN":
ftst=ftstdt
else:
ftst=float(ftstdt)*1000
if i<11:
pltdata[i-1].append(ftst)
else:
pltdata[i-11].append(ftst)
wrdata= [i,totnum,trnum,tsnum,etrn,ftrn,etst,ftst]
writer1.writerow(wrdata)
#Plot force/RMSE data
xlbl=["2:8","5:5"]
clr=["b","green"]
fig = plt.figure()
ax1 = fig.add_subplot(111)
plt.title("GaN 350sample force/RMSE")
ax1.set_xlabel("Loss-F Energy:Force")
ax1.set_ylabel("force/RMSE (meV/ang)")
ax1.grid(True)
for j in range(10):
ax1.scatter(xlbl,pltdata[j],c=clr,marker='.')
plt.savefig(pltfile)
plt.close()
|
[
"matplotlib.pyplot.title",
"csv.writer",
"matplotlib.pyplot.close",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.savefig"
] |
[((2310, 2322), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2320, 2322), True, 'import matplotlib.pyplot as plt\n'), ((2358, 2395), 'matplotlib.pyplot.title', 'plt.title', (['"""GaN 350sample force/RMSE"""'], {}), "('GaN 350sample force/RMSE')\n", (2367, 2395), True, 'import matplotlib.pyplot as plt\n'), ((2582, 2602), 'matplotlib.pyplot.savefig', 'plt.savefig', (['pltfile'], {}), '(pltfile)\n', (2593, 2602), True, 'import matplotlib.pyplot as plt\n'), ((2607, 2618), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (2616, 2618), True, 'import matplotlib.pyplot as plt\n'), ((459, 496), 'csv.writer', 'csv.writer', (['outf'], {'lineterminator': '"""\n"""'}), "(outf, lineterminator='\\n')\n", (469, 496), False, 'import csv\n')]
|
from cv2 import cv2
import numpy as np
import anki_vector
from anki_vector.util import distance_mm, speed_mmps, degrees
def empty(a):
pass
robot=anki_vector.Robot()
robot.connect()
robot.camera.init_camera_feed()
robot.behavior.set_lift_height(0.0)
robot.behavior.set_head_angle(degrees(0))
cv2.namedWindow("TrackBars")
cv2.resizeWindow("TrackBars", 640, 600)
cv2.createTrackbar("Hue Min", "TrackBars", 10, 179, empty)
cv2.createTrackbar("Hue Max", "TrackBars", 47, 179, empty)
cv2.createTrackbar("Sat Min", "TrackBars", 66, 255, empty)
cv2.createTrackbar("Sat Max", "TrackBars", 186, 255, empty)
cv2.createTrackbar("Val Min", "TrackBars", 171, 255, empty)
cv2.createTrackbar("Val Max", "TrackBars", 255, 255, empty)
while True:
h_min = cv2.getTrackbarPos("Hue Min", "TrackBars")
h_max = cv2.getTrackbarPos("Hue Max", "TrackBars")
s_min = cv2.getTrackbarPos("Sat Min", "TrackBars")
s_max = cv2.getTrackbarPos("Sat Max", "TrackBars")
v_min = cv2.getTrackbarPos("Val Min", "TrackBars")
v_max = cv2.getTrackbarPos("Val Max", "TrackBars")
img = np.array(robot.camera.latest_image.raw_image)
img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
imgBlur = cv2.GaussianBlur(img, (3,3), 1)
imgHSV = cv2.cvtColor(imgBlur, cv2.COLOR_BGR2HSV)
print(h_min, h_max, s_min, s_max, v_min, v_max)
lower = np.array([h_min, s_min, v_min])
upper = np.array([h_max, s_max, v_max])
mask = cv2.inRange(imgHSV, lower, upper)
# Alternative method to find the Ball: Approximation of the area with a Polygon.
contours, hierarchy = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
for cnt in contours:
peri = cv2.arcLength(cnt, True)
approx = cv2.approxPolyDP(cnt, 0.02*peri,True)
objCor = len(approx) # Number of corners
print(objCor)
x, y, w, h = cv2.boundingRect(approx)
if objCor > 6:
cv2.circle(img, center=(int(x+w/2), int(y+h/2)), radius=int((h)/2), color=(0, 255, 0), thickness=3)
cv2.imshow("Camera", img)
cv2.imshow("Mask", mask)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
|
[
"cv2.cv2.namedWindow",
"cv2.cv2.arcLength",
"anki_vector.Robot",
"cv2.cv2.boundingRect",
"cv2.cv2.resizeWindow",
"cv2.cv2.getTrackbarPos",
"cv2.cv2.findContours",
"cv2.cv2.inRange",
"cv2.cv2.approxPolyDP",
"anki_vector.util.degrees",
"cv2.cv2.createTrackbar",
"numpy.array",
"cv2.cv2.GaussianBlur",
"cv2.cv2.waitKey",
"cv2.cv2.cvtColor",
"cv2.cv2.imshow"
] |
[((154, 173), 'anki_vector.Robot', 'anki_vector.Robot', ([], {}), '()\n', (171, 173), False, 'import anki_vector\n'), ((301, 329), 'cv2.cv2.namedWindow', 'cv2.namedWindow', (['"""TrackBars"""'], {}), "('TrackBars')\n", (316, 329), False, 'from cv2 import cv2\n'), ((330, 369), 'cv2.cv2.resizeWindow', 'cv2.resizeWindow', (['"""TrackBars"""', '(640)', '(600)'], {}), "('TrackBars', 640, 600)\n", (346, 369), False, 'from cv2 import cv2\n'), ((370, 428), 'cv2.cv2.createTrackbar', 'cv2.createTrackbar', (['"""Hue Min"""', '"""TrackBars"""', '(10)', '(179)', 'empty'], {}), "('Hue Min', 'TrackBars', 10, 179, empty)\n", (388, 428), False, 'from cv2 import cv2\n'), ((429, 487), 'cv2.cv2.createTrackbar', 'cv2.createTrackbar', (['"""Hue Max"""', '"""TrackBars"""', '(47)', '(179)', 'empty'], {}), "('Hue Max', 'TrackBars', 47, 179, empty)\n", (447, 487), False, 'from cv2 import cv2\n'), ((488, 546), 'cv2.cv2.createTrackbar', 'cv2.createTrackbar', (['"""Sat Min"""', '"""TrackBars"""', '(66)', '(255)', 'empty'], {}), "('Sat Min', 'TrackBars', 66, 255, empty)\n", (506, 546), False, 'from cv2 import cv2\n'), ((547, 606), 'cv2.cv2.createTrackbar', 'cv2.createTrackbar', (['"""Sat Max"""', '"""TrackBars"""', '(186)', '(255)', 'empty'], {}), "('Sat Max', 'TrackBars', 186, 255, empty)\n", (565, 606), False, 'from cv2 import cv2\n'), ((607, 666), 'cv2.cv2.createTrackbar', 'cv2.createTrackbar', (['"""Val Min"""', '"""TrackBars"""', '(171)', '(255)', 'empty'], {}), "('Val Min', 'TrackBars', 171, 255, empty)\n", (625, 666), False, 'from cv2 import cv2\n'), ((667, 726), 'cv2.cv2.createTrackbar', 'cv2.createTrackbar', (['"""Val Max"""', '"""TrackBars"""', '(255)', '(255)', 'empty'], {}), "('Val Max', 'TrackBars', 255, 255, empty)\n", (685, 726), False, 'from cv2 import cv2\n'), ((288, 298), 'anki_vector.util.degrees', 'degrees', (['(0)'], {}), '(0)\n', (295, 298), False, 'from anki_vector.util import distance_mm, speed_mmps, degrees\n'), ((757, 799), 'cv2.cv2.getTrackbarPos', 'cv2.getTrackbarPos', (['"""Hue Min"""', '"""TrackBars"""'], {}), "('Hue Min', 'TrackBars')\n", (775, 799), False, 'from cv2 import cv2\n'), ((812, 854), 'cv2.cv2.getTrackbarPos', 'cv2.getTrackbarPos', (['"""Hue Max"""', '"""TrackBars"""'], {}), "('Hue Max', 'TrackBars')\n", (830, 854), False, 'from cv2 import cv2\n'), ((867, 909), 'cv2.cv2.getTrackbarPos', 'cv2.getTrackbarPos', (['"""Sat Min"""', '"""TrackBars"""'], {}), "('Sat Min', 'TrackBars')\n", (885, 909), False, 'from cv2 import cv2\n'), ((922, 964), 'cv2.cv2.getTrackbarPos', 'cv2.getTrackbarPos', (['"""Sat Max"""', '"""TrackBars"""'], {}), "('Sat Max', 'TrackBars')\n", (940, 964), False, 'from cv2 import cv2\n'), ((977, 1019), 'cv2.cv2.getTrackbarPos', 'cv2.getTrackbarPos', (['"""Val Min"""', '"""TrackBars"""'], {}), "('Val Min', 'TrackBars')\n", (995, 1019), False, 'from cv2 import cv2\n'), ((1032, 1074), 'cv2.cv2.getTrackbarPos', 'cv2.getTrackbarPos', (['"""Val Max"""', '"""TrackBars"""'], {}), "('Val Max', 'TrackBars')\n", (1050, 1074), False, 'from cv2 import cv2\n'), ((1086, 1131), 'numpy.array', 'np.array', (['robot.camera.latest_image.raw_image'], {}), '(robot.camera.latest_image.raw_image)\n', (1094, 1131), True, 'import numpy as np\n'), ((1142, 1178), 'cv2.cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_RGB2BGR'], {}), '(img, cv2.COLOR_RGB2BGR)\n', (1154, 1178), False, 'from cv2 import cv2\n'), ((1193, 1225), 'cv2.cv2.GaussianBlur', 'cv2.GaussianBlur', (['img', '(3, 3)', '(1)'], {}), '(img, (3, 3), 1)\n', (1209, 1225), False, 'from cv2 import cv2\n'), ((1238, 1278), 'cv2.cv2.cvtColor', 'cv2.cvtColor', (['imgBlur', 'cv2.COLOR_BGR2HSV'], {}), '(imgBlur, cv2.COLOR_BGR2HSV)\n', (1250, 1278), False, 'from cv2 import cv2\n'), ((1344, 1375), 'numpy.array', 'np.array', (['[h_min, s_min, v_min]'], {}), '([h_min, s_min, v_min])\n', (1352, 1375), True, 'import numpy as np\n'), ((1388, 1419), 'numpy.array', 'np.array', (['[h_max, s_max, v_max]'], {}), '([h_max, s_max, v_max])\n', (1396, 1419), True, 'import numpy as np\n'), ((1431, 1464), 'cv2.cv2.inRange', 'cv2.inRange', (['imgHSV', 'lower', 'upper'], {}), '(imgHSV, lower, upper)\n', (1442, 1464), False, 'from cv2 import cv2\n'), ((1580, 1646), 'cv2.cv2.findContours', 'cv2.findContours', (['mask', 'cv2.RETR_EXTERNAL', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n', (1596, 1646), False, 'from cv2 import cv2\n'), ((2037, 2062), 'cv2.cv2.imshow', 'cv2.imshow', (['"""Camera"""', 'img'], {}), "('Camera', img)\n", (2047, 2062), False, 'from cv2 import cv2\n'), ((2067, 2091), 'cv2.cv2.imshow', 'cv2.imshow', (['"""Mask"""', 'mask'], {}), "('Mask', mask)\n", (2077, 2091), False, 'from cv2 import cv2\n'), ((1687, 1711), 'cv2.cv2.arcLength', 'cv2.arcLength', (['cnt', '(True)'], {}), '(cnt, True)\n', (1700, 1711), False, 'from cv2 import cv2\n'), ((1729, 1769), 'cv2.cv2.approxPolyDP', 'cv2.approxPolyDP', (['cnt', '(0.02 * peri)', '(True)'], {}), '(cnt, 0.02 * peri, True)\n', (1745, 1769), False, 'from cv2 import cv2\n'), ((1859, 1883), 'cv2.cv2.boundingRect', 'cv2.boundingRect', (['approx'], {}), '(approx)\n', (1875, 1883), False, 'from cv2 import cv2\n'), ((2100, 2114), 'cv2.cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (2111, 2114), False, 'from cv2 import cv2\n')]
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from rally_openstack.task.contexts.network import existing_network
from tests.unit import test
CTX = "rally_openstack.task.contexts.network"
class ExistingNetworkTestCase(test.TestCase):
def setUp(self):
super(ExistingNetworkTestCase, self).setUp()
self.config = {"foo": "bar"}
self.context = test.get_test_context()
self.context.update({
"users": [
{"id": 1,
"tenant_id": "tenant1",
"credential": mock.Mock(tenant_name="tenant_1")},
{"id": 2,
"tenant_id": "tenant2",
"credential": mock.Mock(tenant_name="tenant_2")},
],
"tenants": {
"tenant1": {},
"tenant2": {},
},
"config": {
"existing_network": self.config
},
})
@mock.patch("rally_openstack.common.osclients.Clients")
def test_setup(self, mock_clients):
clients = {
# key is tenant_name
"tenant_1": mock.MagicMock(),
"tenant_2": mock.MagicMock()
}
mock_clients.side_effect = lambda cred: clients[cred.tenant_name]
networks = {
# key is tenant_id
"tenant_1": [mock.Mock(), mock.Mock()],
"tenant_2": [mock.Mock()]
}
subnets = {
# key is tenant_id
"tenant_1": [mock.Mock()],
"tenant_2": [mock.Mock()]
}
neutron1 = clients["tenant_1"].neutron.return_value
neutron2 = clients["tenant_2"].neutron.return_value
neutron1.list_networks.return_value = {
"networks": networks["tenant_1"]}
neutron2.list_networks.return_value = {
"networks": networks["tenant_2"]}
neutron1.list_subnets.return_value = {"subnets": subnets["tenant_1"]}
neutron2.list_subnets.return_value = {"subnets": subnets["tenant_2"]}
context = existing_network.ExistingNetwork(self.context)
context.setup()
mock_clients.assert_has_calls([
mock.call(u["credential"]) for u in self.context["users"]])
neutron1.list_networks.assert_called_once_with()
neutron1.list_subnets.assert_called_once_with()
neutron2.list_networks.assert_called_once_with()
neutron2.list_subnets.assert_called_once_with()
self.assertEqual(
self.context["tenants"],
{
"tenant1": {"networks": networks["tenant_1"],
"subnets": subnets["tenant_1"]},
"tenant2": {"networks": networks["tenant_2"],
"subnets": subnets["tenant_2"]},
}
)
def test_cleanup(self):
# NOTE(stpierre): Test that cleanup is not abstract
existing_network.ExistingNetwork({"task": mock.MagicMock()}).cleanup()
|
[
"rally_openstack.task.contexts.network.existing_network.ExistingNetwork",
"unittest.mock.MagicMock",
"unittest.mock.Mock",
"unittest.mock.patch",
"tests.unit.test.get_test_context",
"unittest.mock.call"
] |
[((1485, 1539), 'unittest.mock.patch', 'mock.patch', (['"""rally_openstack.common.osclients.Clients"""'], {}), "('rally_openstack.common.osclients.Clients')\n", (1495, 1539), False, 'from unittest import mock\n'), ((919, 942), 'tests.unit.test.get_test_context', 'test.get_test_context', ([], {}), '()\n', (940, 942), False, 'from tests.unit import test\n'), ((2574, 2620), 'rally_openstack.task.contexts.network.existing_network.ExistingNetwork', 'existing_network.ExistingNetwork', (['self.context'], {}), '(self.context)\n', (2606, 2620), False, 'from rally_openstack.task.contexts.network import existing_network\n'), ((1657, 1673), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1671, 1673), False, 'from unittest import mock\n'), ((1699, 1715), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1713, 1715), False, 'from unittest import mock\n'), ((1878, 1889), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (1887, 1889), False, 'from unittest import mock\n'), ((1891, 1902), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (1900, 1902), False, 'from unittest import mock\n'), ((1930, 1941), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (1939, 1941), False, 'from unittest import mock\n'), ((2029, 2040), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (2038, 2040), False, 'from unittest import mock\n'), ((2068, 2079), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (2077, 2079), False, 'from unittest import mock\n'), ((2698, 2724), 'unittest.mock.call', 'mock.call', (["u['credential']"], {}), "(u['credential'])\n", (2707, 2724), False, 'from unittest import mock\n'), ((1094, 1127), 'unittest.mock.Mock', 'mock.Mock', ([], {'tenant_name': '"""tenant_1"""'}), "(tenant_name='tenant_1')\n", (1103, 1127), False, 'from unittest import mock\n'), ((1228, 1261), 'unittest.mock.Mock', 'mock.Mock', ([], {'tenant_name': '"""tenant_2"""'}), "(tenant_name='tenant_2')\n", (1237, 1261), False, 'from unittest import mock\n'), ((3472, 3488), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3486, 3488), False, 'from unittest import mock\n')]
|
# coding: utf-8
from unidecode import unidecode
import re
from .utils import stop_words
class Parser:
"""Parse user's query"""
def __init__(self, user_query):
self.user_query = user_query
def clean_string(self):
"""remove accents, upper and punctuation
and split into list
compare to stop_words reference and remove found items"""
cleaned = unidecode(self.user_query).lower()
cleaned = re.compile("\w+").findall(cleaned)
return [item for item in cleaned if item not in stop_words]
|
[
"unidecode.unidecode",
"re.compile"
] |
[((399, 425), 'unidecode.unidecode', 'unidecode', (['self.user_query'], {}), '(self.user_query)\n', (408, 425), False, 'from unidecode import unidecode\n'), ((452, 470), 're.compile', 're.compile', (['"""\\\\w+"""'], {}), "('\\\\w+')\n", (462, 470), False, 'import re\n')]
|
from rest_framework.response import Response
class SortModelMixin(object):
sort_child_name = None
sort_parent = None
sort_serializer = None
def get_sort_serializer(self, *args, **kwargs):
serializer_class = self.sort_serializer
kwargs["context"] = self.get_serializer_context()
return serializer_class(*args, **kwargs)
def sort(self, request, *args, **kwargs):
parent_pk = kwargs.get("pk", None)
parent = self.sort_parent.objects.get(pk=parent_pk)
serializer = self.get_sort_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save(parent)
collection = getattr(parent, self.sort_child_name).all()
serializer = self.get_serializer(collection, many=True)
return Response(serializer.data)
|
[
"rest_framework.response.Response"
] |
[((807, 832), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (815, 832), False, 'from rest_framework.response import Response\n')]
|
# -*- coding: utf-8 -*-
from metamapper.celery import app
from datetime import timedelta
from django.utils.timezone import now
from app.audit.models import Activity
@app.task(bind=True)
def audit(self,
actor_id,
workspace_id,
verb,
old_values,
new_values,
extras=None,
target_object_id=None,
target_content_type_id=None,
action_object_object_id=None,
action_object_content_type_id=None):
"""Task to commit an audit activity to a database.
"""
activity_kwargs = {
'actor_id': actor_id,
'workspace_id': workspace_id,
'verb': verb,
'target_object_id': target_object_id,
'target_content_type_id': target_content_type_id,
'action_object_object_id': action_object_object_id,
'action_object_content_type_id': action_object_content_type_id,
}
defaults = {
'extras': extras or {},
'timestamp': now(),
'old_values': old_values,
'new_values': new_values,
}
datefrom = now() - timedelta(minutes=15)
queryset = (
Activity.objects
.filter(**activity_kwargs)
.filter(timestamp__gte=datefrom)
)
for field in old_values.keys():
queryset = queryset.filter(old_values__has_key=field)
activity = queryset.first()
if activity:
activity.update_attributes(**defaults)
else:
activity_kwargs.update(defaults)
activity = Activity.objects.create(**activity_kwargs)
return activity.pk
|
[
"django.utils.timezone.now",
"app.audit.models.Activity.objects.create",
"metamapper.celery.app.task",
"datetime.timedelta",
"app.audit.models.Activity.objects.filter"
] |
[((170, 189), 'metamapper.celery.app.task', 'app.task', ([], {'bind': '(True)'}), '(bind=True)\n', (178, 189), False, 'from metamapper.celery import app\n'), ((982, 987), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (985, 987), False, 'from django.utils.timezone import now\n'), ((1079, 1084), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (1082, 1084), False, 'from django.utils.timezone import now\n'), ((1087, 1108), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(15)'}), '(minutes=15)\n', (1096, 1108), False, 'from datetime import timedelta\n'), ((1516, 1558), 'app.audit.models.Activity.objects.create', 'Activity.objects.create', ([], {}), '(**activity_kwargs)\n', (1539, 1558), False, 'from app.audit.models import Activity\n'), ((1134, 1176), 'app.audit.models.Activity.objects.filter', 'Activity.objects.filter', ([], {}), '(**activity_kwargs)\n', (1157, 1176), False, 'from app.audit.models import Activity\n')]
|
from src import swift_project
from helpers import path_helper
import unittest
class TestSourceKitten(unittest.TestCase):
# Test with a simple project directory
# (i.e. without xcodeproj)
def test_source_files_simple_project(self):
project_directory = path_helper.monkey_example_directory()
output = swift_project.source_files(project_directory)
expectation = [
project_directory + "/Banana.swift",
project_directory + "/Monkey.swift"
]
self.assertEqual(sorted(list(output)), sorted(expectation))
|
[
"helpers.path_helper.monkey_example_directory",
"src.swift_project.source_files"
] |
[((272, 310), 'helpers.path_helper.monkey_example_directory', 'path_helper.monkey_example_directory', ([], {}), '()\n', (308, 310), False, 'from helpers import path_helper\n'), ((329, 374), 'src.swift_project.source_files', 'swift_project.source_files', (['project_directory'], {}), '(project_directory)\n', (355, 374), False, 'from src import swift_project\n')]
|
import pandas as pd
file = r'file.log'
cols=['host','1','userid','date','tz','endpoint','status','data','referer','user_agent']
df=pd.read_csv(file,delim_whitespace=True,names=cols).drop('1',1)
print (df.head())
unique_ip=df.host.unique()
print(unique_ip)
total = df['data'].sum()
print('the server traffic is :',(total))
status_freq = pd.DataFrame(columns=['status', 'Frequency'])
status_freq['Frequency'] = df.groupby('status').size()
status_freq['status']=df.groupby('status').agg({'status':lambda x:list(x).__getitem__(1)})
ap = status_freq[status_freq['status']>=500].sum()
print ('the requests generated requests a 5xx server error :',(ap['Frequency']))
print('distring ips visited server :',len(unique_ip))
|
[
"pandas.DataFrame",
"pandas.read_csv"
] |
[((365, 410), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['status', 'Frequency']"}), "(columns=['status', 'Frequency'])\n", (377, 410), True, 'import pandas as pd\n'), ((140, 192), 'pandas.read_csv', 'pd.read_csv', (['file'], {'delim_whitespace': '(True)', 'names': 'cols'}), '(file, delim_whitespace=True, names=cols)\n', (151, 192), True, 'import pandas as pd\n')]
|
import unittest
from calculator import *
class CalculatorTest(unittest.TestCase):
def test_suma_dos_numeros(self):
calc = Calculator(5, 10)
self.assertEqual(15, calc.suma())
def test_resta_dos_numeros(self):
calc = Calculator(19, 8)
self.assertEqual(11, calc.resta())
def test_multiplica_dos_numeros(self):
calc = Calculator(42, 2)
self.assertEqual(84, calc.multiplicacion())
def test_divide_dos_numeros(self):
calc = Calculator(18, 3)
self.assertEqual(6, calc.division())
def test_potencia_de_un_numero(self):
calc = Calculator(3, 3)
self.assertEqual(27, calc.potencia())
def test_raiz_de_un_numero(self):
calc = Calculator(216, 3)
self.assertEqual(6, calc.raiz())
def test_dividir_entre_cero(self):
calc = Calculator(25, 0)
self.assertEqual(0, calc.division())
def test_dividir_entre_cero_2(self):
calc = Calculator(0, 25)
self.assertEqual(0, calc.division())
def test_raiz_num_negativo(self):
calc = Calculator(-8,2)
self.assertEqual(0, calc.raiz())
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main"
] |
[((1188, 1203), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1201, 1203), False, 'import unittest\n')]
|
import copy
import logging
from dataclasses import dataclass
from typing import Any, Optional, Type, TypeVar
from thenewboston_node.business_logic.exceptions import ValidationError
from thenewboston_node.business_logic.models.base import BaseDataclass
from thenewboston_node.core.logging import validates
from thenewboston_node.core.utils.cryptography import derive_public_key
from thenewboston_node.core.utils.dataclass import cover_docstring, revert_docstring
from thenewboston_node.core.utils.types import hexstr
from ..mixins.signable import SignableMixin
from ..signed_change_request_message import SignedChangeRequestMessage
T = TypeVar('T', bound='SignedChangeRequest')
logger = logging.getLogger(__name__)
@revert_docstring
@dataclass
@cover_docstring
class SignedChangeRequest(SignableMixin, BaseDataclass):
message: SignedChangeRequestMessage
@classmethod
def deserialize_from_dict(cls, dict_, complain_excessive_keys=True, override: Optional[dict[str, Any]] = None):
from . import SIGNED_CHANGE_REQUEST_TYPE_MAP
# TODO(dmu) MEDIUM: This polymorphic deserializer duplicates the logic in Block/BlockMessage.
# Consider keeping only this serializer
# TODO(dmu) MEDIUM: Maybe we do not really need to subclass SignedChangeRequest, but
# subclassing of SignedChangeRequestMessage is enough
signed_change_request_type = (dict_.get('message') or {}).get('signed_change_request_type')
if cls == SignedChangeRequest:
class_ = SIGNED_CHANGE_REQUEST_TYPE_MAP.get(signed_change_request_type)
if class_ is None:
raise ValidationError('message.signed_change_request_type must be provided')
return class_.deserialize_from_dict(dict_, complain_excessive_keys=complain_excessive_keys) # type: ignore
if signed_change_request_type:
class_ = SIGNED_CHANGE_REQUEST_TYPE_MAP.get(signed_change_request_type)
if class_ is None:
raise ValidationError(f'Unsupported signed_change_request_type: {signed_change_request_type}')
if not issubclass(cls, class_):
raise ValidationError(
f'{cls} does not match with signed_change_request_type: {signed_change_request_type}'
)
return super().deserialize_from_dict(dict_, complain_excessive_keys=complain_excessive_keys)
@classmethod
def create_from_signed_change_request_message(
cls: Type[T], message: SignedChangeRequestMessage, signing_key: hexstr
) -> T:
request = cls(signer=derive_public_key(signing_key), message=copy.deepcopy(message))
request.sign(signing_key)
return request
@validates('signed request')
def validate(self, blockchain, block_number: int):
self.validate_message()
with validates('block signature'):
self.validate_signature()
@validates('signed request message')
def validate_message(self):
self.message.validate()
def get_updated_account_states(self, blockchain):
raise NotImplementedError('Must be implemented in subclass')
|
[
"thenewboston_node.core.utils.cryptography.derive_public_key",
"copy.deepcopy",
"thenewboston_node.core.logging.validates",
"typing.TypeVar",
"thenewboston_node.business_logic.exceptions.ValidationError",
"logging.getLogger"
] |
[((638, 679), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {'bound': '"""SignedChangeRequest"""'}), "('T', bound='SignedChangeRequest')\n", (645, 679), False, 'from typing import Any, Optional, Type, TypeVar\n'), ((690, 717), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (707, 717), False, 'import logging\n'), ((2753, 2780), 'thenewboston_node.core.logging.validates', 'validates', (['"""signed request"""'], {}), "('signed request')\n", (2762, 2780), False, 'from thenewboston_node.core.logging import validates\n'), ((2955, 2990), 'thenewboston_node.core.logging.validates', 'validates', (['"""signed request message"""'], {}), "('signed request message')\n", (2964, 2990), False, 'from thenewboston_node.core.logging import validates\n'), ((2881, 2909), 'thenewboston_node.core.logging.validates', 'validates', (['"""block signature"""'], {}), "('block signature')\n", (2890, 2909), False, 'from thenewboston_node.core.logging import validates\n'), ((1669, 1739), 'thenewboston_node.business_logic.exceptions.ValidationError', 'ValidationError', (['"""message.signed_change_request_type must be provided"""'], {}), "('message.signed_change_request_type must be provided')\n", (1684, 1739), False, 'from thenewboston_node.business_logic.exceptions import ValidationError\n'), ((2038, 2131), 'thenewboston_node.business_logic.exceptions.ValidationError', 'ValidationError', (['f"""Unsupported signed_change_request_type: {signed_change_request_type}"""'], {}), "(\n f'Unsupported signed_change_request_type: {signed_change_request_type}')\n", (2053, 2131), False, 'from thenewboston_node.business_logic.exceptions import ValidationError\n'), ((2194, 2306), 'thenewboston_node.business_logic.exceptions.ValidationError', 'ValidationError', (['f"""{cls} does not match with signed_change_request_type: {signed_change_request_type}"""'], {}), "(\n f'{cls} does not match with signed_change_request_type: {signed_change_request_type}'\n )\n", (2209, 2306), False, 'from thenewboston_node.business_logic.exceptions import ValidationError\n'), ((2626, 2656), 'thenewboston_node.core.utils.cryptography.derive_public_key', 'derive_public_key', (['signing_key'], {}), '(signing_key)\n', (2643, 2656), False, 'from thenewboston_node.core.utils.cryptography import derive_public_key\n'), ((2666, 2688), 'copy.deepcopy', 'copy.deepcopy', (['message'], {}), '(message)\n', (2679, 2688), False, 'import copy\n')]
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: harness/grpc.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from harness import net_pb2 as harness_dot_net__pb2
from harness import wire_pb2 as harness_dot_wire__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='harness/grpc.proto',
package='harness.grpc',
syntax='proto3',
serialized_options=None,
serialized_pb=b'\n\x12harness/grpc.proto\x12\x0charness.grpc\x1a\x11harness/net.proto\x1a\x12harness/wire.proto\"6\n\x07\x43hannel\x12+\n\x07\x61\x64\x64ress\x18\x01 \x01(\x0b\x32\x13.harness.net.SocketB\x05\x92}\x02\x08\x02\"2\n\x06Server\x12(\n\x04\x62ind\x18\x01 \x01(\x0b\x32\x13.harness.net.SocketB\x05\x92}\x02\x08\x02\x62\x06proto3'
,
dependencies=[harness_dot_net__pb2.DESCRIPTOR,harness_dot_wire__pb2.DESCRIPTOR,])
_CHANNEL = _descriptor.Descriptor(
name='Channel',
full_name='harness.grpc.Channel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='address', full_name='harness.grpc.Channel.address', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\222}\002\010\002', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=75,
serialized_end=129,
)
_SERVER = _descriptor.Descriptor(
name='Server',
full_name='harness.grpc.Server',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='bind', full_name='harness.grpc.Server.bind', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\222}\002\010\002', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=131,
serialized_end=181,
)
_CHANNEL.fields_by_name['address'].message_type = harness_dot_net__pb2._SOCKET
_SERVER.fields_by_name['bind'].message_type = harness_dot_net__pb2._SOCKET
DESCRIPTOR.message_types_by_name['Channel'] = _CHANNEL
DESCRIPTOR.message_types_by_name['Server'] = _SERVER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Channel = _reflection.GeneratedProtocolMessageType('Channel', (_message.Message,), {
'DESCRIPTOR' : _CHANNEL,
'__module__' : 'harness.grpc_pb2'
# @@protoc_insertion_point(class_scope:harness.grpc.Channel)
})
_sym_db.RegisterMessage(Channel)
Server = _reflection.GeneratedProtocolMessageType('Server', (_message.Message,), {
'DESCRIPTOR' : _SERVER,
'__module__' : 'harness.grpc_pb2'
# @@protoc_insertion_point(class_scope:harness.grpc.Server)
})
_sym_db.RegisterMessage(Server)
_CHANNEL.fields_by_name['address']._options = None
_SERVER.fields_by_name['bind']._options = None
# @@protoc_insertion_point(module_scope)
|
[
"google.protobuf.symbol_database.Default",
"google.protobuf.descriptor.FieldDescriptor",
"google.protobuf.reflection.GeneratedProtocolMessageType",
"google.protobuf.descriptor.FileDescriptor"
] |
[((380, 406), 'google.protobuf.symbol_database.Default', '_symbol_database.Default', ([], {}), '()\n', (404, 406), True, 'from google.protobuf import symbol_database as _symbol_database\n'), ((530, 1071), 'google.protobuf.descriptor.FileDescriptor', '_descriptor.FileDescriptor', ([], {'name': '"""harness/grpc.proto"""', 'package': '"""harness.grpc"""', 'syntax': '"""proto3"""', 'serialized_options': 'None', 'serialized_pb': 'b\'\\n\\x12harness/grpc.proto\\x12\\x0charness.grpc\\x1a\\x11harness/net.proto\\x1a\\x12harness/wire.proto"6\\n\\x07Channel\\x12+\\n\\x07address\\x18\\x01 \\x01(\\x0b2\\x13.harness.net.SocketB\\x05\\x92}\\x02\\x08\\x02"2\\n\\x06Server\\x12(\\n\\x04bind\\x18\\x01 \\x01(\\x0b2\\x13.harness.net.SocketB\\x05\\x92}\\x02\\x08\\x02b\\x06proto3\'', 'dependencies': '[harness_dot_net__pb2.DESCRIPTOR, harness_dot_wire__pb2.DESCRIPTOR]'}), '(name=\'harness/grpc.proto\', package=\n \'harness.grpc\', syntax=\'proto3\', serialized_options=None, serialized_pb\n =\n b\'\\n\\x12harness/grpc.proto\\x12\\x0charness.grpc\\x1a\\x11harness/net.proto\\x1a\\x12harness/wire.proto"6\\n\\x07Channel\\x12+\\n\\x07address\\x18\\x01 \\x01(\\x0b2\\x13.harness.net.SocketB\\x05\\x92}\\x02\\x08\\x02"2\\n\\x06Server\\x12(\\n\\x04bind\\x18\\x01 \\x01(\\x0b2\\x13.harness.net.SocketB\\x05\\x92}\\x02\\x08\\x02b\\x06proto3\'\n , dependencies=[harness_dot_net__pb2.DESCRIPTOR, harness_dot_wire__pb2.\n DESCRIPTOR])\n', (556, 1071), True, 'from google.protobuf import descriptor as _descriptor\n'), ((2921, 3058), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', (['"""Channel"""', '(_message.Message,)', "{'DESCRIPTOR': _CHANNEL, '__module__': 'harness.grpc_pb2'}"], {}), "('Channel', (_message.Message,), {\n 'DESCRIPTOR': _CHANNEL, '__module__': 'harness.grpc_pb2'})\n", (2961, 3058), True, 'from google.protobuf import reflection as _reflection\n'), ((3170, 3305), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', (['"""Server"""', '(_message.Message,)', "{'DESCRIPTOR': _SERVER, '__module__': 'harness.grpc_pb2'}"], {}), "('Server', (_message.Message,), {\n 'DESCRIPTOR': _SERVER, '__module__': 'harness.grpc_pb2'})\n", (3210, 3305), True, 'from google.protobuf import reflection as _reflection\n'), ((1257, 1616), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""address"""', 'full_name': '"""harness.grpc.Channel.address"""', 'index': '(0)', 'number': '(1)', 'type': '(11)', 'cpp_type': '(10)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': 'None', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': "b'\\x92}\\x02\\x08\\x02'", 'file': 'DESCRIPTOR'}), "(name='address', full_name=\n 'harness.grpc.Channel.address', index=0, number=1, type=11, cpp_type=10,\n label=1, has_default_value=False, default_value=None, message_type=None,\n enum_type=None, containing_type=None, is_extension=False,\n extension_scope=None, serialized_options=b'\\x92}\\x02\\x08\\x02', file=\n DESCRIPTOR)\n", (1284, 1616), True, 'from google.protobuf import descriptor as _descriptor\n'), ((2015, 2367), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""bind"""', 'full_name': '"""harness.grpc.Server.bind"""', 'index': '(0)', 'number': '(1)', 'type': '(11)', 'cpp_type': '(10)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': 'None', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': "b'\\x92}\\x02\\x08\\x02'", 'file': 'DESCRIPTOR'}), "(name='bind', full_name=\n 'harness.grpc.Server.bind', index=0, number=1, type=11, cpp_type=10,\n label=1, has_default_value=False, default_value=None, message_type=None,\n enum_type=None, containing_type=None, is_extension=False,\n extension_scope=None, serialized_options=b'\\x92}\\x02\\x08\\x02', file=\n DESCRIPTOR)\n", (2042, 2367), True, 'from google.protobuf import descriptor as _descriptor\n')]
|
import os
import shelve
APP_SETTING_FILE = os.path.join(os.getcwd(), 'instance', "data", "app")
CACHE_DIR = os.path.join(os.getcwd(), 'instance', 'cache')
try:
os.makedirs(CACHE_DIR)
os.makedirs(os.path.dirname(APP_SETTING_FILE))
except OSError:
pass
# for item, value in os.environ.items():
# print(f"{item} > {value}")
MEDIA_HOME = os.environ.get('MEDIA_HOME')
if MEDIA_HOME is not None:
with shelve.open(APP_SETTING_FILE) as db:
db['MEDIA_HOME'] = list(map(lambda x: os.path.abspath(x), MEDIA_HOME.split(':')))
with shelve.open(APP_SETTING_FILE) as db:
print(dict(db))
|
[
"os.path.abspath",
"os.makedirs",
"os.getcwd",
"os.path.dirname",
"shelve.open",
"os.environ.get"
] |
[((355, 383), 'os.environ.get', 'os.environ.get', (['"""MEDIA_HOME"""'], {}), "('MEDIA_HOME')\n", (369, 383), False, 'import os\n'), ((58, 69), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (67, 69), False, 'import os\n'), ((123, 134), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (132, 134), False, 'import os\n'), ((167, 189), 'os.makedirs', 'os.makedirs', (['CACHE_DIR'], {}), '(CACHE_DIR)\n', (178, 189), False, 'import os\n'), ((554, 583), 'shelve.open', 'shelve.open', (['APP_SETTING_FILE'], {}), '(APP_SETTING_FILE)\n', (565, 583), False, 'import shelve\n'), ((206, 239), 'os.path.dirname', 'os.path.dirname', (['APP_SETTING_FILE'], {}), '(APP_SETTING_FILE)\n', (221, 239), False, 'import os\n'), ((421, 450), 'shelve.open', 'shelve.open', (['APP_SETTING_FILE'], {}), '(APP_SETTING_FILE)\n', (432, 450), False, 'import shelve\n'), ((504, 522), 'os.path.abspath', 'os.path.abspath', (['x'], {}), '(x)\n', (519, 522), False, 'import os\n')]
|
# -*- coding: UTF-8 -*
from __future__ import print_function
__version__ = "1.2.0"
def get_certificate(hostname, port, sername=None):
import idna
from socket import socket
from OpenSSL import SSL
sock = socket()
sock.setblocking(True)
sock.connect((hostname, port), )
ctx = SSL.Context(SSL.SSLv23_METHOD)
ctx.check_hostname = False
ctx.verify_mode = SSL.VERIFY_NONE
sock_ssl = SSL.Connection(ctx, sock)
sock_ssl.set_tlsext_host_name(idna.encode(sername or hostname))
sock_ssl.set_connect_state()
sock_ssl.do_handshake()
cert = sock_ssl.get_peer_certificate()
sock_ssl.close()
sock.close()
return cert
_last_line = ''
def _print_status(s):
import sys
global _last_line
if not sys.stdout.isatty():
return
if _last_line:
print('\b' * len(_last_line), end='')
sys.stdout.flush()
print(' ' * len(_last_line), end='')
sys.stdout.flush()
print(u'\r%s' % s, end='')
_last_line = s
sys.stdout.flush()
def main():
import io
import sys
import time
import socket
import argparse
import datetime
from collections import OrderedDict
import ssl
try:
import urlparse as parse
import urllib2
urlopen = urllib2.urlopen
except:
from urllib import parse, request
urlopen = request.urlopen
ssl._create_default_https_context = ssl._create_unverified_context
parser = argparse.ArgumentParser(add_help=True)
parser.add_argument('-f', '--file', help='the text file(or uri) to read URLs')
parser.add_argument('-e', '--expire', help='the expire days for ssl certificate', type=int, default=7)
parser.add_argument('-c', '--code', help='the http response status code', type=int, default=[200], nargs='*')
parser.add_argument('-t', '--timeout', help='the timeout to check', type=int, default=10)
parser.add_argument('urls', help='the URLs what will be check', default=[], type=str, nargs='*')
args = parser.parse_args()
start = time.time()
rawurls = [] + args.urls
if args.file:
if '://' in args.file:
# uri
_print_status('fetch urls file from %s...' % args.file)
r = urlopen(args.file)
for l in r.readlines():
if type(l) != type(''):
l = l.decode()
rawurls.append(l)
else:
rawurls += list(io.open(args.file, encoding='utf-8').readlines())
urls = []
for l in rawurls:
if '://' not in l:
continue
ls = l.split('#')
if not ls:
continue
u = ls[0].strip()
if not u or u in urls:
continue
ud = {
'url': u
}
urls.append(ud)
if not urls:
_print_status('')
print('no url to check', file=sys.stderr)
exit(1)
today = datetime.datetime.today()
results = []
socket.setdefaulttimeout(args.timeout)
errct = 0
for ix, ud in enumerate(urls):
url = ud['url']
_print_status(u'%s/%d/%d %s...' % (errct, ix + 1, len(urls), url))
rs = parse.urlparse(url)
res = OrderedDict()
if args.expire and rs.scheme == 'https':
# ssl check
err = ''
try:
cert = get_certificate(rs.hostname, int(rs.port or 443))
es = cert.get_notAfter()[:-1]
if type(es) != type(''):
es = es.decode()
expdate = datetime.datetime.strptime(es, '%Y%m%d%H%M%S')
offdays = (expdate - today).days
if offdays <= args.expire:
err = 'days %s' % offdays
except Exception as e:
err = str(e) or str(type(e).__name__)
res['ssl'] = {
'title': 'ssl',
'error': err
}
if args.code:
# check http status
err = ''
try:
code = urlopen(url, timeout=args.timeout).getcode()
if code not in args.code:
err = 'code %s' % code
except Exception as e:
err = str(e)
res['http'] = {
'title': 'http',
'error': err
}
errors = list([u'%s(%s)' % (r['title'], r['error']) for r in res.values() if r['error']])
results.append({
'title': ud.get('title', url),
'url': url,
'result': res,
'error': u'/'.join(errors) if errors else ''
})
if errors:
errct += 1
# print(results)
_print_status('')
errors = list(['%s [%s]' % (r['title'], r['error']) for r in results if r['error']])
print('TIME:%ds CHECKED:%d ERROR:%s' % (int(time.time() - start), len(results), len(errors)))
if errors:
print('\n'.join(errors))
if __name__ == '__main__':
main()
|
[
"argparse.ArgumentParser",
"OpenSSL.SSL.Connection",
"datetime.datetime.today",
"time.time",
"socket.setdefaulttimeout",
"datetime.datetime.strptime",
"sys.stdout.flush",
"OpenSSL.SSL.Context",
"socket",
"idna.encode",
"sys.stdout.isatty",
"io.open",
"collections.OrderedDict",
"urllib.parse.urlparse"
] |
[((223, 231), 'socket', 'socket', ([], {}), '()\n', (229, 231), False, 'import socket\n'), ((306, 336), 'OpenSSL.SSL.Context', 'SSL.Context', (['SSL.SSLv23_METHOD'], {}), '(SSL.SSLv23_METHOD)\n', (317, 336), False, 'from OpenSSL import SSL\n'), ((422, 447), 'OpenSSL.SSL.Connection', 'SSL.Connection', (['ctx', 'sock'], {}), '(ctx, sock)\n', (436, 447), False, 'from OpenSSL import SSL\n'), ((1018, 1036), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1034, 1036), False, 'import sys\n'), ((1484, 1522), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(True)'}), '(add_help=True)\n', (1507, 1522), False, 'import argparse\n'), ((2065, 2076), 'time.time', 'time.time', ([], {}), '()\n', (2074, 2076), False, 'import time\n'), ((2932, 2957), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (2955, 2957), False, 'import datetime\n'), ((2979, 3017), 'socket.setdefaulttimeout', 'socket.setdefaulttimeout', (['args.timeout'], {}), '(args.timeout)\n', (3003, 3017), False, 'import socket\n'), ((482, 514), 'idna.encode', 'idna.encode', (['(sername or hostname)'], {}), '(sername or hostname)\n', (493, 514), False, 'import idna\n'), ((764, 783), 'sys.stdout.isatty', 'sys.stdout.isatty', ([], {}), '()\n', (781, 783), False, 'import sys\n'), ((873, 891), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (889, 891), False, 'import sys\n'), ((945, 963), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (961, 963), False, 'import sys\n'), ((3179, 3198), 'urllib.parse.urlparse', 'parse.urlparse', (['url'], {}), '(url)\n', (3193, 3198), False, 'from urllib import parse, request\n'), ((3213, 3226), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (3224, 3226), False, 'from collections import OrderedDict\n'), ((3561, 3607), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['es', '"""%Y%m%d%H%M%S"""'], {}), "(es, '%Y%m%d%H%M%S')\n", (3587, 3607), False, 'import datetime\n'), ((2463, 2499), 'io.open', 'io.open', (['args.file'], {'encoding': '"""utf-8"""'}), "(args.file, encoding='utf-8')\n", (2470, 2499), False, 'import io\n'), ((4857, 4868), 'time.time', 'time.time', ([], {}), '()\n', (4866, 4868), False, 'import time\n')]
|
from __future__ import absolute_import
from unittest import TestCase, skip
from ..wcs import WCS
import numpy as np
import os
import re
import sys
from astropy.io import fits
from astropy.modeling import (models, fitting, Model)
import matplotlib.pyplot as plt
from ccdproc import CCDData
class TestWCSBase(TestCase):
def setUp(self):
self.data_path = os.path.join(
os.path.dirname(sys.modules['goodman_pipeline'].__file__),
'data/test_data/wcs_data')
self.wcs = WCS()
@staticmethod
def _recover_lines(ccd):
lines_pixel = []
lines_angstrom = []
pixel_keywords = ccd.header['GSP_P*']
for pixel_key in pixel_keywords:
if re.match(r'GSP_P\d{3}', pixel_key) is not None:
angstrom_key = re.sub('GSP_P', 'GSP_A', pixel_key)
if int(ccd.header[angstrom_key]) != 0:
lines_pixel.append(float(ccd.header[pixel_key]))
lines_angstrom.append(float(ccd.header[angstrom_key]))
return lines_pixel, lines_angstrom
class TestWCS(TestWCSBase):
# def test_wcs__call__(self):
# self.assertRaisesRegex(SystemExit, '1', self.wcs)
# self.assertRaises(SystemExit, self.wcs)
def test_fit_chebyshev(self):
test_file = os.path.join(self.data_path,
'goodman_comp_400M1_HgArNe.fits')
ccd = CCDData.read(test_file, unit='adu')
pixel, angstrom = self._recover_lines(ccd=ccd)
model = self.wcs.fit(physical=pixel, wavelength=angstrom)
self.assertIsInstance(model, Model)
self.assertEqual(model.__class__.__name__, ccd.header['GSP_FUNC'])
self.assertEqual(model.degree, ccd.header['GSP_ORDR'])
for i in range(model.degree + 1):
self.assertAlmostEqual(model.__getattribute__('c{:d}'.format(i)).value,
ccd.header['GSP_C{:03d}'.format(i)])
def test_fit_linear(self):
test_file = os.path.join(self.data_path,
'goodman_comp_400M1_HgArNe.fits')
ccd = CCDData.read(test_file, unit='adu')
pixel, angstrom = self._recover_lines(ccd=ccd)
model = self.wcs.fit(physical=pixel,
wavelength=angstrom,
model_name='linear')
self.assertIsInstance(model, Model)
def test_fit_invalid(self):
test_file = os.path.join(self.data_path,
'goodman_comp_400M1_HgArNe.fits')
ccd = CCDData.read(test_file, unit='adu')
pixel, angstrom = self._recover_lines(ccd=ccd)
self.assertRaisesRegex(NotImplementedError,
'The model invalid is not implemented',
self.wcs.fit,
pixel,
angstrom,
'invalid')
self.assertRaises(NotImplementedError,
self.wcs.fit,
pixel,
angstrom,
'invalid')
def test_fit__unable_to_fit(self):
pixel = [0, 1, 2, 3]
angstrom = [20, 30, 40]
# self.assertRaisesRegex(ValueError,
# 'x and y should have the same shape',
# self.wcs.fit, pixel, angstrom)
self.assertRaises(ValueError, self.wcs.fit, pixel, angstrom)
def test_read__linear(self):
test_file = os.path.join(self.data_path,
'linear_fits_solution.fits')
self.assertTrue(os.path.isfile(test_file))
ccd = CCDData.read(test_file, unit='adu')
result = self.wcs.read(ccd=ccd)
self.assertIsInstance(result, list)
self.assertEqual(len(result), 2)
self.assertIsInstance(self.wcs.get_model(), Model)
def test_read__log_linear(self):
test_file = os.path.join(self.data_path,
'log-linear_fits_solution.fits')
self.assertTrue(os.path.isfile(test_file))
ccd = CCDData.read(test_file, unit='adu')
#
# result = self.wcs.read(ccd=ccd)
#
# self.assertIsInstance(result, list)
# self.assertEqual(len(result), 2)
# self.assertIsInstance(self.wcs.get_model(), Model)
self.assertRaises(NotImplementedError, self.wcs.read, ccd)
def test_read__non_linear_chebyshev(self):
test_file = os.path.join(self.data_path,
'non-linear_fits_solution_cheb.fits')
self.assertTrue(os.path.isfile(test_file))
ccd = CCDData.read(test_file, unit='adu')
result = self.wcs.read(ccd=ccd)
self.assertIsInstance(self.wcs.model, Model)
self.assertEqual(self.wcs.model.__class__.__name__, 'Chebyshev1D')
def test_read__non_linear_legendre(self):
test_file = os.path.join(self.data_path,
'non-linear_fits_solution_legendre.fits')
self.assertTrue(os.path.isfile(test_file))
ccd = CCDData.read(test_file, unit='adu')
result = self.wcs.read(ccd=ccd)
self.assertIsInstance(self.wcs.model, Model)
self.assertEqual(self.wcs.model.__class__.__name__, 'Legendre1D')
def test_read__non_linear_lspline(self):
test_file = os.path.join(self.data_path,
'non-linear_fits_solution_linear-spline.fits')
self.assertTrue(os.path.isfile(test_file))
ccd = CCDData.read(test_file, unit='adu')
# self.wcs.read(ccd=ccd)
self.assertRaises(NotImplementedError, self.wcs.read, ccd)
self.assertRaisesRegex(NotImplementedError,
'Linear spline is not implemented',
self.wcs.read, ccd)
def test_read__non_linear_cspline(self):
test_file = os.path.join(self.data_path,
'non-linear_fits_solution_cubic-spline.fits')
self.assertTrue(os.path.isfile(test_file))
ccd = CCDData.read(test_file, unit='adu')
self.assertRaises(NotImplementedError, self.wcs.read, ccd)
self.assertRaisesRegex(NotImplementedError,
'Cubic spline is not implemented',
self.wcs.read, ccd)
def test_write_fits_wcs(self):
self.assertRaises(NotImplementedError, self.wcs.write_fits_wcs,
None,
None)
def test_read__invalid(self):
test_file = os.path.join(self.data_path,
'linear_fits_solution.fits')
self.assertTrue(os.path.isfile(test_file))
ccd = CCDData.read(test_file, unit='adu')
ccd.wcs.wcs.ctype[0] = 'INVALID'
self.assertRaisesRegex(NotImplementedError,
'CTYPE INVALID is not recognized',
self.wcs.read,
ccd)
self.assertRaises(NotImplementedError, self.wcs.read, ccd)
def test_write_gsp_wcs(self):
test_file = os.path.join(self.data_path,
'goodman_comp_400M1_HgArNe.fits')
ccd = CCDData.read(test_file, unit='adu')
pixel, angstrom = self._recover_lines(ccd=ccd)
model = self.wcs.fit(physical=pixel, wavelength=angstrom)
self.assertIsInstance(model, Model)
blank_ccd = CCDData(data=np.ones(ccd.data.shape),
meta=fits.Header(),
unit='adu')
blank_ccd.header.set('GSP_WREJ', value=None, comment='empty')
new_ccd = self.wcs.write_gsp_wcs(ccd=blank_ccd, model=model)
self.assertEqual(new_ccd.header['GSP_FUNC'], ccd.header['GSP_FUNC'])
self.assertEqual(new_ccd.header['GSP_ORDR'], ccd.header['GSP_ORDR'])
self.assertEqual(new_ccd.header['GSP_NPIX'], ccd.header['GSP_NPIX'])
for i in range(model.degree + 1):
self.assertAlmostEqual(new_ccd.header['GSP_C{:03d}'.format(i)],
ccd.header['GSP_C{:03d}'.format(i)])
def test_read_gsp_wcs(self):
test_file = os.path.join(self.data_path,
'goodman_comp_400M1_HgArNe.fits')
self.assertTrue(os.path.isfile(test_file))
ccd = CCDData.read(test_file, unit='adu')
result = self.wcs.read_gsp_wcs(ccd=ccd)
self.assertIsInstance(result, list)
self.assertEqual(len(result), 2)
self.assertIsInstance(self.wcs.get_model(), Model)
def test_get_model_is_None(self):
self.wcs.model = None
self.assertIsNone(self.wcs.get_model())
def test_get_model_is_not_None(self):
self.wcs.model = models.Chebyshev1D(degree=3)
self.assertIsInstance(self.wcs.get_model(), Model)
def test_pm_none(self):
# test_file = os.path.join(self.data_path,
# 'non-linear_fits_solution_cheb.fits')
# self.assertTrue(os.path.isfile(test_file))
#
# ccd = CCDData.read(test_file, unit='adu')
#
# WAT2_001 = 'wtype = multispec spec1 = "1 1 2 1. 1.5114461210693 4096 0. 834.39 864'
# WAT2_002 = '.39 1. 0. 1 3 1616.37 3259.98 5115.64008185559 535.515983711607 -0.7'
# WAT2_003 = '79265625182385"'
#
# dtype = -1
self.assertRaises(NotImplementedError, self.wcs._none)
|
[
"ccdproc.CCDData.read",
"os.path.dirname",
"re.match",
"numpy.ones",
"os.path.isfile",
"astropy.modeling.models.Chebyshev1D",
"astropy.io.fits.Header",
"os.path.join",
"re.sub"
] |
[((1309, 1371), 'os.path.join', 'os.path.join', (['self.data_path', '"""goodman_comp_400M1_HgArNe.fits"""'], {}), "(self.data_path, 'goodman_comp_400M1_HgArNe.fits')\n", (1321, 1371), False, 'import os\n'), ((1419, 1454), 'ccdproc.CCDData.read', 'CCDData.read', (['test_file'], {'unit': '"""adu"""'}), "(test_file, unit='adu')\n", (1431, 1454), False, 'from ccdproc import CCDData\n'), ((2003, 2065), 'os.path.join', 'os.path.join', (['self.data_path', '"""goodman_comp_400M1_HgArNe.fits"""'], {}), "(self.data_path, 'goodman_comp_400M1_HgArNe.fits')\n", (2015, 2065), False, 'import os\n'), ((2113, 2148), 'ccdproc.CCDData.read', 'CCDData.read', (['test_file'], {'unit': '"""adu"""'}), "(test_file, unit='adu')\n", (2125, 2148), False, 'from ccdproc import CCDData\n'), ((2446, 2508), 'os.path.join', 'os.path.join', (['self.data_path', '"""goodman_comp_400M1_HgArNe.fits"""'], {}), "(self.data_path, 'goodman_comp_400M1_HgArNe.fits')\n", (2458, 2508), False, 'import os\n'), ((2556, 2591), 'ccdproc.CCDData.read', 'CCDData.read', (['test_file'], {'unit': '"""adu"""'}), "(test_file, unit='adu')\n", (2568, 2591), False, 'from ccdproc import CCDData\n'), ((3535, 3592), 'os.path.join', 'os.path.join', (['self.data_path', '"""linear_fits_solution.fits"""'], {}), "(self.data_path, 'linear_fits_solution.fits')\n", (3547, 3592), False, 'import os\n'), ((3692, 3727), 'ccdproc.CCDData.read', 'CCDData.read', (['test_file'], {'unit': '"""adu"""'}), "(test_file, unit='adu')\n", (3704, 3727), False, 'from ccdproc import CCDData\n'), ((3972, 4033), 'os.path.join', 'os.path.join', (['self.data_path', '"""log-linear_fits_solution.fits"""'], {}), "(self.data_path, 'log-linear_fits_solution.fits')\n", (3984, 4033), False, 'import os\n'), ((4133, 4168), 'ccdproc.CCDData.read', 'CCDData.read', (['test_file'], {'unit': '"""adu"""'}), "(test_file, unit='adu')\n", (4145, 4168), False, 'from ccdproc import CCDData\n'), ((4516, 4582), 'os.path.join', 'os.path.join', (['self.data_path', '"""non-linear_fits_solution_cheb.fits"""'], {}), "(self.data_path, 'non-linear_fits_solution_cheb.fits')\n", (4528, 4582), False, 'import os\n'), ((4682, 4717), 'ccdproc.CCDData.read', 'CCDData.read', (['test_file'], {'unit': '"""adu"""'}), "(test_file, unit='adu')\n", (4694, 4717), False, 'from ccdproc import CCDData\n'), ((4954, 5024), 'os.path.join', 'os.path.join', (['self.data_path', '"""non-linear_fits_solution_legendre.fits"""'], {}), "(self.data_path, 'non-linear_fits_solution_legendre.fits')\n", (4966, 5024), False, 'import os\n'), ((5124, 5159), 'ccdproc.CCDData.read', 'CCDData.read', (['test_file'], {'unit': '"""adu"""'}), "(test_file, unit='adu')\n", (5136, 5159), False, 'from ccdproc import CCDData\n'), ((5394, 5469), 'os.path.join', 'os.path.join', (['self.data_path', '"""non-linear_fits_solution_linear-spline.fits"""'], {}), "(self.data_path, 'non-linear_fits_solution_linear-spline.fits')\n", (5406, 5469), False, 'import os\n'), ((5569, 5604), 'ccdproc.CCDData.read', 'CCDData.read', (['test_file'], {'unit': '"""adu"""'}), "(test_file, unit='adu')\n", (5581, 5604), False, 'from ccdproc import CCDData\n'), ((5941, 6015), 'os.path.join', 'os.path.join', (['self.data_path', '"""non-linear_fits_solution_cubic-spline.fits"""'], {}), "(self.data_path, 'non-linear_fits_solution_cubic-spline.fits')\n", (5953, 6015), False, 'import os\n'), ((6115, 6150), 'ccdproc.CCDData.read', 'CCDData.read', (['test_file'], {'unit': '"""adu"""'}), "(test_file, unit='adu')\n", (6127, 6150), False, 'from ccdproc import CCDData\n'), ((6614, 6671), 'os.path.join', 'os.path.join', (['self.data_path', '"""linear_fits_solution.fits"""'], {}), "(self.data_path, 'linear_fits_solution.fits')\n", (6626, 6671), False, 'import os\n'), ((6771, 6806), 'ccdproc.CCDData.read', 'CCDData.read', (['test_file'], {'unit': '"""adu"""'}), "(test_file, unit='adu')\n", (6783, 6806), False, 'from ccdproc import CCDData\n'), ((7171, 7233), 'os.path.join', 'os.path.join', (['self.data_path', '"""goodman_comp_400M1_HgArNe.fits"""'], {}), "(self.data_path, 'goodman_comp_400M1_HgArNe.fits')\n", (7183, 7233), False, 'import os\n'), ((7281, 7316), 'ccdproc.CCDData.read', 'CCDData.read', (['test_file'], {'unit': '"""adu"""'}), "(test_file, unit='adu')\n", (7293, 7316), False, 'from ccdproc import CCDData\n'), ((8235, 8297), 'os.path.join', 'os.path.join', (['self.data_path', '"""goodman_comp_400M1_HgArNe.fits"""'], {}), "(self.data_path, 'goodman_comp_400M1_HgArNe.fits')\n", (8247, 8297), False, 'import os\n'), ((8397, 8432), 'ccdproc.CCDData.read', 'CCDData.read', (['test_file'], {'unit': '"""adu"""'}), "(test_file, unit='adu')\n", (8409, 8432), False, 'from ccdproc import CCDData\n'), ((8811, 8839), 'astropy.modeling.models.Chebyshev1D', 'models.Chebyshev1D', ([], {'degree': '(3)'}), '(degree=3)\n', (8829, 8839), False, 'from astropy.modeling import models, fitting, Model\n'), ((395, 452), 'os.path.dirname', 'os.path.dirname', (["sys.modules['goodman_pipeline'].__file__"], {}), "(sys.modules['goodman_pipeline'].__file__)\n", (410, 452), False, 'import os\n'), ((3650, 3675), 'os.path.isfile', 'os.path.isfile', (['test_file'], {}), '(test_file)\n', (3664, 3675), False, 'import os\n'), ((4091, 4116), 'os.path.isfile', 'os.path.isfile', (['test_file'], {}), '(test_file)\n', (4105, 4116), False, 'import os\n'), ((4640, 4665), 'os.path.isfile', 'os.path.isfile', (['test_file'], {}), '(test_file)\n', (4654, 4665), False, 'import os\n'), ((5082, 5107), 'os.path.isfile', 'os.path.isfile', (['test_file'], {}), '(test_file)\n', (5096, 5107), False, 'import os\n'), ((5527, 5552), 'os.path.isfile', 'os.path.isfile', (['test_file'], {}), '(test_file)\n', (5541, 5552), False, 'import os\n'), ((6073, 6098), 'os.path.isfile', 'os.path.isfile', (['test_file'], {}), '(test_file)\n', (6087, 6098), False, 'import os\n'), ((6729, 6754), 'os.path.isfile', 'os.path.isfile', (['test_file'], {}), '(test_file)\n', (6743, 6754), False, 'import os\n'), ((8356, 8381), 'os.path.isfile', 'os.path.isfile', (['test_file'], {}), '(test_file)\n', (8370, 8381), False, 'import os\n'), ((722, 756), 're.match', 're.match', (['"""GSP_P\\\\d{3}"""', 'pixel_key'], {}), "('GSP_P\\\\d{3}', pixel_key)\n", (730, 756), False, 'import re\n'), ((801, 836), 're.sub', 're.sub', (['"""GSP_P"""', '"""GSP_A"""', 'pixel_key'], {}), "('GSP_P', 'GSP_A', pixel_key)\n", (807, 836), False, 'import re\n'), ((7516, 7539), 'numpy.ones', 'np.ones', (['ccd.data.shape'], {}), '(ccd.data.shape)\n', (7523, 7539), True, 'import numpy as np\n'), ((7572, 7585), 'astropy.io.fits.Header', 'fits.Header', ([], {}), '()\n', (7583, 7585), False, 'from astropy.io import fits\n')]
|
from guizero import App, TextBox, Text
def count():
character_count.value = len(entered_text.value)
app = App()
entered_text = TextBox(app, command=count)
character_count = Text(app)
app.display()
|
[
"guizero.TextBox",
"guizero.App",
"guizero.Text"
] |
[((112, 117), 'guizero.App', 'App', ([], {}), '()\n', (115, 117), False, 'from guizero import App, TextBox, Text\n'), ((133, 160), 'guizero.TextBox', 'TextBox', (['app'], {'command': 'count'}), '(app, command=count)\n', (140, 160), False, 'from guizero import App, TextBox, Text\n'), ((179, 188), 'guizero.Text', 'Text', (['app'], {}), '(app)\n', (183, 188), False, 'from guizero import App, TextBox, Text\n')]
|
from django.views.generic import RedirectView
from mobile.constants import DEFAULT_REDIRECT_URL, DEFAULT_REDIRECTORS
from mobile.services.mobile_redirector_service import DesktopToMobileRedirectorService
from share.models import Session
class MobileDataToolView(RedirectView):
def get_redirect_url(self, *args, **kwargs):
hash_id = kwargs.get('hash_id', '')
try:
session_id = Session.id_from_hash(hash_id)[0]
session = Session.objects.get(id=session_id)
self.filters = session.query.get('filters', {})
except (IndexError, Session.DoesNotExist):
self.filters = {}
redirect_urls = DesktopToMobileRedirectorService(DEFAULT_REDIRECTORS).perform(self.filters)
if len(redirect_urls) == 1:
return redirect_urls[0]
return DEFAULT_REDIRECT_URL
|
[
"share.models.Session.objects.get",
"mobile.services.mobile_redirector_service.DesktopToMobileRedirectorService",
"share.models.Session.id_from_hash"
] |
[((467, 501), 'share.models.Session.objects.get', 'Session.objects.get', ([], {'id': 'session_id'}), '(id=session_id)\n', (486, 501), False, 'from share.models import Session\n'), ((412, 441), 'share.models.Session.id_from_hash', 'Session.id_from_hash', (['hash_id'], {}), '(hash_id)\n', (432, 441), False, 'from share.models import Session\n'), ((668, 721), 'mobile.services.mobile_redirector_service.DesktopToMobileRedirectorService', 'DesktopToMobileRedirectorService', (['DEFAULT_REDIRECTORS'], {}), '(DEFAULT_REDIRECTORS)\n', (700, 721), False, 'from mobile.services.mobile_redirector_service import DesktopToMobileRedirectorService\n')]
|
from util import generate_doc_src, auto_dict
from rdflib import Graph
from urllib.error import URLError
# Pull the latest Brick.ttl to /static/schema
try:
g = Graph()
g.parse("https://github.com/brickschema/Brick/releases/latest/download/Brick.ttl", format="turtle")
g.serialize("static/schema/Brick.ttl", format="turtle")
except URLError as e:
print("[WARN]: Unable to pull the latest version of Brick!")
# Doc config
doc_spec = auto_dict()
# Brick v1.0.3
doc_spec["1.0.3"]["input"] = ["static/schema/1.0.3"]
doc_spec["1.0.3"]["ns_restriction"] = [
"https://brickschema.org/schema/1.0.3/Brick#",
"https://brickschema.org/schema/1.0.3/BrickFrame#",
]
doc_spec["1.0.3"]["classes"]["type_restriction"] = [
"http://www.w3.org/2002/07/owl#Class"
]
doc_spec["1.0.3"]["relationships"]["type_restriction"] = [
"http://www.w3.org/2002/07/owl#ObjectProperty"
]
# Brick v1.1
doc_spec["1.1"]["input"] = ["static/schema/1.1"]
doc_spec["1.1"]["ns_restriction"] = ["https://brickschema.org/schema/1.1/Brick#"]
doc_spec["1.1"]["classes"]["type_restriction"] = ["http://www.w3.org/2002/07/owl#Class"]
doc_spec["1.1"]["relationships"]["type_restriction"] = [
"http://www.w3.org/2002/07/owl#ObjectProperty"
]
# Brick v1.2
doc_spec["1.2"]["input"] = ["static/schema/1.2"]
doc_spec["1.2"]["ns_restriction"] = ["https://brickschema.org/schema/Brick#"]
doc_spec["1.2"]["classes"]["type_restriction"] = ["http://www.w3.org/2002/07/owl#Class"]
doc_spec["1.2"]["relationships"]["type_restriction"] = [
"http://www.w3.org/2002/07/owl#ObjectProperty"
]
if __name__ == "__main__":
generate_doc_src(doc_spec)
# Structure
# doc_spec = {
# "1.0.3": {
# "ns_restriction": ["https://brickschema.org/schema/1.0.3/Brick#", "https://brickschema.org/schema/1.0.3/BrickFrame#"]
# "classes" : {
# "roots": [],
# "type_restriction": ["http://www.w3.org/2002/07/owl#Class"]
# "ns_restriction": [
# "https://brickschema.org/schema/1.0.3/Brick#",
# "https://brickschema.org/schema/1.0.3/BrickFrame#"
# ],
# "parent_restriction": [],
# "no_expansion": [],
# "exclusions": []
# }
# }
# }
|
[
"util.auto_dict",
"rdflib.Graph",
"util.generate_doc_src"
] |
[((449, 460), 'util.auto_dict', 'auto_dict', ([], {}), '()\n', (458, 460), False, 'from util import generate_doc_src, auto_dict\n'), ((164, 171), 'rdflib.Graph', 'Graph', ([], {}), '()\n', (169, 171), False, 'from rdflib import Graph\n'), ((1606, 1632), 'util.generate_doc_src', 'generate_doc_src', (['doc_spec'], {}), '(doc_spec)\n', (1622, 1632), False, 'from util import generate_doc_src, auto_dict\n')]
|
from sciapp.action import Free
import scipy.ndimage as ndimg
import numpy as np, wx
# from imagepy import IPy
#matplotlib.use('WXAgg')
import matplotlib.pyplot as plt
def block(arr):
img = np.zeros((len(arr),30,30), dtype=np.uint8)
img.T[:] = arr
return np.hstack(img)
class Temperature(Free):
title = 'Temperature Difference'
asyn = False
def run(self, para = None):
xs = np.array([1,2,3,4,5,6,7,8,9,10,11,12])
ys = np.array([1,2,1,2,2,3,8,9,8,10,9,10], dtype=np.float32)
ds = ndimg.convolve1d(ys, [0,1,-1])
lbs = ['Jan','Feb','Mar','Apr','May','June',
'Jul','Aug','Sep','Oct','Nov','Dec']
plt.xticks(xs, lbs)
plt.plot(xs, ys, '-o', label='Temperature')
plt.plot(xs, ds, '-o', label='Difference')
plt.grid()
plt.gca().legend()
plt.title('Temperature in XX')
plt.xlabel('Month')
plt.ylabel('Temperature (C)')
plt.show()
self.app.show_img([block((ys-ys.min())*(180/ys.max()-ys.min()))], 'Temperature')
self.app.show_img([block((ds-ds.min())*(180/ds.max()-ds.min()))], 'Difference')
class Shake(Free):
title = 'Shake Damping'
asyn = False
def run(self, para = None):
xs = np.array([1,2,3,4,5,6,7,8,9,10])
ys = np.array([10,-9,8,-7,6,-5,4,-3,2,-1], dtype=np.float32)
ds = ndimg.convolve1d(ys, [1/3,1/3,1/3])
print(ds)
plt.plot(xs, ys, '-o', label='Shake')
plt.plot(xs, ds, '-o', label='Damping')
plt.grid()
plt.gca().legend()
plt.title('Shake Damping')
plt.xlabel('Time')
plt.ylabel('Amplitude')
plt.show()
self.app.show_img([block(ys*10+128)], 'Shake')
self.app.show_img([block(ds*10+128)], 'Damping')
class Inertia(Free):
title = 'Psychological Inertia'
asyn = False
def run(self, para = None):
xs = np.array([1,2,3,4,5,6,7,8,9,10])
ys = np.array([90,88,93,95,91,70,89,92,94,89], dtype=np.float32)
ds = ndimg.convolve1d(ys, [1/3,1/3,1/3])
print(ds)
plt.plot(xs, ys, '-o', label='Psychological')
plt.plot(xs, ds, '-o', label='Inertia')
plt.grid()
plt.gca().legend()
plt.title('Psychological Inertia')
plt.xlabel('Time')
plt.ylabel('Score')
plt.show()
self.app.show_img([block((ys-80)*3+80)], 'Psychological')
self.app.show_img([block((ds-80)*3+80)], 'Inertia')
class GaussCore(Free):
title = 'Gaussian Core'
asyn = False
def run(self, para = None):
x, y = np.ogrid[-3:3:10j, -3:3:10j]
z = np.exp(-(x ** 2 + y ** 2)/1)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.plot_wireframe(x, y, z)
z = np.exp(-(x ** 2 + y ** 2)/4)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.plot_wireframe(x, y, z)
plt.show()
class LoGCore(Free):
title = 'Laplace of Gaussian Core'
asyn = False
def run(self, para = None):
plt.figure()
x = np.linspace(-3,3,50)
y = np.exp(-x**2)
dy = np.exp(-x**2)*(4*x**2-2)
plt.plot(x, y, label='Gauss')
plt.plot(x, -dy, label="Gauss''")
plt.grid()
plt.legend()
x, y = np.ogrid[-3:3:20j, -3:3:20j]
z = (4*x**2-2)*np.exp(-y**2-x**2)+(4*y**2-2)*np.exp(-x**2-y**2)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.plot_wireframe(x, y, -z)
plt.show()
class DogCore(Free):
title = 'Difference of Gaussian Core'
asyn = False
def run(self, para = None):
plt.figure()
x = np.linspace(-3,3,50)
y = np.exp(-x**2)
yy = np.exp(-x**2/4)/2
plt.plot(x, y, label='sigma = 1')
plt.plot(x, yy, label='sigma = 2')
plt.plot(x, y-yy, 'r', lw=3, label="Difference")
plt.grid()
plt.legend()
x, y = np.ogrid[-3:3:20j, -3:3:20j]
z = np.exp(-(x ** 2 + y ** 2)/1)-np.exp(-(x ** 2 + y ** 2)/4)/2
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.plot_wireframe(x, y, z)
plt.show()
class LaplaceSharp(Free):
title = 'Show how to Laplace Sharp'
asyn = False
def run(self, para = None):
x = np.linspace(-10,10,300)
y = np.arctan(x)
fig, axes = plt.subplots(nrows=2, ncols=2)
ax0, ax1, ax2, ax3 = axes.flatten()
ax0.set_title('y = arctan(x)')
ax0.plot(x, y)
ax0.grid()
ax1.set_title("y = arctan(x)'")
ax1.plot(x, y)
ax1.plot(x, 1/(x**2+1))
ax1.grid()
ax2.set_title("y = arctan(x)''")
ax2.plot(x, y)
ax2.plot(x, (2*x)/(x**4+2*x**2+1))
ax2.grid()
ax3.set_title("y = arctan(x) + arctan(x)''")
ax3.plot(x, y)
ax3.plot(x, y+(2*x)/(x**4+2*x**2+1))
ax3.grid()
fig.tight_layout()
plt.show()
self.app.show_img([(((y*70)+128)*np.ones((30,1))).astype(np.uint8)], 'tan(x)')
self.app.show_img([((100/(x**2+1))*np.ones((30,1))).astype(np.uint8)], "tan(x)'")
self.app.show_img([((((2*x)/(x**4+2*x**2+1)*70)+128)*
np.ones((30,1))).astype(np.uint8)], "tan(x))''")
self.app.show_img([((((y+(2*x)/(x**4+2*x**2+1))*70)+128)*
np.ones((30,1))).astype(np.uint8)], "tan(x)+tan(x)''")
class UnSharp(Free):
title = 'Show how to Unsharp Mask'
asyn = False
def run(self, para = None):
x = np.linspace(-10,10,300)
y = np.arctan(x)
fig, axes = plt.subplots(nrows=2, ncols=2)
ax0, ax1, ax2, ax3 = axes.flatten()
gy = ndimg.gaussian_filter1d(y, 30)
ax0, ax1, ax2, ax3 = axes.flatten()
ax0.set_title('y = arctan(x)')
ax0.plot(x, y)
ax0.grid()
ax1.set_title("gaussian")
ax1.plot(x, y)
ax1.plot(x, gy)
ax1.grid()
ax2.set_title("y = arctan(x) - gaussian")
ax2.plot(x, y)
ax2.plot(x, y-gy)
ax2.grid()
ax3.set_title("y = arctan(x) + diff")
ax3.plot(x, y)
ax3.plot(x, y+2*(y-gy))
ax3.grid()
fig.tight_layout()
plt.show()
self.app.show_img([((y*70+128)*np.ones((30,1))).astype(np.uint8)], 'tan(x)')
self.app.show_img([((gy*70+128)*np.ones((30,1))).astype(np.uint8)], 'gaussian')
self.app.show_img([(((y-gy)*100+128)*np.ones((30,1))).astype(np.uint8)], 'arctan(x) - gaussian')
self.app.show_img([(((y+2*(y-gy))*70+128)*np.ones((30,1))).astype(np.uint8)], "arctan(x) + diff")
plgs = [Temperature, Shake, Inertia, GaussCore, LoGCore, DogCore, LaplaceSharp, UnSharp]
|
[
"matplotlib.pyplot.title",
"scipy.ndimage.gaussian_filter1d",
"numpy.ones",
"matplotlib.pyplot.figure",
"numpy.exp",
"matplotlib.pyplot.gca",
"numpy.linspace",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show",
"scipy.ndimage.convolve1d",
"matplotlib.pyplot.legend",
"numpy.hstack",
"numpy.arctan",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.plot",
"numpy.array",
"matplotlib.pyplot.xlabel"
] |
[((258, 272), 'numpy.hstack', 'np.hstack', (['img'], {}), '(img)\n', (267, 272), True, 'import numpy as np, wx\n'), ((384, 433), 'numpy.array', 'np.array', (['[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]'], {}), '([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12])\n', (392, 433), True, 'import numpy as np, wx\n'), ((430, 496), 'numpy.array', 'np.array', (['[1, 2, 1, 2, 2, 3, 8, 9, 8, 10, 9, 10]'], {'dtype': 'np.float32'}), '([1, 2, 1, 2, 2, 3, 8, 9, 8, 10, 9, 10], dtype=np.float32)\n', (438, 496), True, 'import numpy as np, wx\n'), ((493, 525), 'scipy.ndimage.convolve1d', 'ndimg.convolve1d', (['ys', '[0, 1, -1]'], {}), '(ys, [0, 1, -1])\n', (509, 525), True, 'import scipy.ndimage as ndimg\n'), ((619, 638), 'matplotlib.pyplot.xticks', 'plt.xticks', (['xs', 'lbs'], {}), '(xs, lbs)\n', (629, 638), True, 'import matplotlib.pyplot as plt\n'), ((642, 685), 'matplotlib.pyplot.plot', 'plt.plot', (['xs', 'ys', '"""-o"""'], {'label': '"""Temperature"""'}), "(xs, ys, '-o', label='Temperature')\n", (650, 685), True, 'import matplotlib.pyplot as plt\n'), ((688, 730), 'matplotlib.pyplot.plot', 'plt.plot', (['xs', 'ds', '"""-o"""'], {'label': '"""Difference"""'}), "(xs, ds, '-o', label='Difference')\n", (696, 730), True, 'import matplotlib.pyplot as plt\n'), ((733, 743), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (741, 743), True, 'import matplotlib.pyplot as plt\n'), ((768, 798), 'matplotlib.pyplot.title', 'plt.title', (['"""Temperature in XX"""'], {}), "('Temperature in XX')\n", (777, 798), True, 'import matplotlib.pyplot as plt\n'), ((801, 820), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Month"""'], {}), "('Month')\n", (811, 820), True, 'import matplotlib.pyplot as plt\n'), ((823, 852), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Temperature (C)"""'], {}), "('Temperature (C)')\n", (833, 852), True, 'import matplotlib.pyplot as plt\n'), ((856, 866), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (864, 866), True, 'import matplotlib.pyplot as plt\n'), ((1128, 1169), 'numpy.array', 'np.array', (['[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]'], {}), '([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])\n', (1136, 1169), True, 'import numpy as np, wx\n'), ((1168, 1232), 'numpy.array', 'np.array', (['[10, -9, 8, -7, 6, -5, 4, -3, 2, -1]'], {'dtype': 'np.float32'}), '([10, -9, 8, -7, 6, -5, 4, -3, 2, -1], dtype=np.float32)\n', (1176, 1232), True, 'import numpy as np, wx\n'), ((1231, 1274), 'scipy.ndimage.convolve1d', 'ndimg.convolve1d', (['ys', '[1 / 3, 1 / 3, 1 / 3]'], {}), '(ys, [1 / 3, 1 / 3, 1 / 3])\n', (1247, 1274), True, 'import scipy.ndimage as ndimg\n'), ((1281, 1318), 'matplotlib.pyplot.plot', 'plt.plot', (['xs', 'ys', '"""-o"""'], {'label': '"""Shake"""'}), "(xs, ys, '-o', label='Shake')\n", (1289, 1318), True, 'import matplotlib.pyplot as plt\n'), ((1321, 1360), 'matplotlib.pyplot.plot', 'plt.plot', (['xs', 'ds', '"""-o"""'], {'label': '"""Damping"""'}), "(xs, ds, '-o', label='Damping')\n", (1329, 1360), True, 'import matplotlib.pyplot as plt\n'), ((1363, 1373), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (1371, 1373), True, 'import matplotlib.pyplot as plt\n'), ((1398, 1424), 'matplotlib.pyplot.title', 'plt.title', (['"""Shake Damping"""'], {}), "('Shake Damping')\n", (1407, 1424), True, 'import matplotlib.pyplot as plt\n'), ((1427, 1445), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time"""'], {}), "('Time')\n", (1437, 1445), True, 'import matplotlib.pyplot as plt\n'), ((1448, 1471), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Amplitude"""'], {}), "('Amplitude')\n", (1458, 1471), True, 'import matplotlib.pyplot as plt\n'), ((1474, 1484), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1482, 1484), True, 'import matplotlib.pyplot as plt\n'), ((1691, 1732), 'numpy.array', 'np.array', (['[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]'], {}), '([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])\n', (1699, 1732), True, 'import numpy as np, wx\n'), ((1731, 1799), 'numpy.array', 'np.array', (['[90, 88, 93, 95, 91, 70, 89, 92, 94, 89]'], {'dtype': 'np.float32'}), '([90, 88, 93, 95, 91, 70, 89, 92, 94, 89], dtype=np.float32)\n', (1739, 1799), True, 'import numpy as np, wx\n'), ((1798, 1841), 'scipy.ndimage.convolve1d', 'ndimg.convolve1d', (['ys', '[1 / 3, 1 / 3, 1 / 3]'], {}), '(ys, [1 / 3, 1 / 3, 1 / 3])\n', (1814, 1841), True, 'import scipy.ndimage as ndimg\n'), ((1848, 1893), 'matplotlib.pyplot.plot', 'plt.plot', (['xs', 'ys', '"""-o"""'], {'label': '"""Psychological"""'}), "(xs, ys, '-o', label='Psychological')\n", (1856, 1893), True, 'import matplotlib.pyplot as plt\n'), ((1896, 1935), 'matplotlib.pyplot.plot', 'plt.plot', (['xs', 'ds', '"""-o"""'], {'label': '"""Inertia"""'}), "(xs, ds, '-o', label='Inertia')\n", (1904, 1935), True, 'import matplotlib.pyplot as plt\n'), ((1938, 1948), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (1946, 1948), True, 'import matplotlib.pyplot as plt\n'), ((1973, 2007), 'matplotlib.pyplot.title', 'plt.title', (['"""Psychological Inertia"""'], {}), "('Psychological Inertia')\n", (1982, 2007), True, 'import matplotlib.pyplot as plt\n'), ((2010, 2028), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time"""'], {}), "('Time')\n", (2020, 2028), True, 'import matplotlib.pyplot as plt\n'), ((2031, 2050), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Score"""'], {}), "('Score')\n", (2041, 2050), True, 'import matplotlib.pyplot as plt\n'), ((2053, 2063), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2061, 2063), True, 'import matplotlib.pyplot as plt\n'), ((2315, 2345), 'numpy.exp', 'np.exp', (['(-(x ** 2 + y ** 2) / 1)'], {}), '(-(x ** 2 + y ** 2) / 1)\n', (2321, 2345), True, 'import numpy as np, wx\n'), ((2352, 2364), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2362, 2364), True, 'import matplotlib.pyplot as plt\n'), ((2445, 2475), 'numpy.exp', 'np.exp', (['(-(x ** 2 + y ** 2) / 4)'], {}), '(-(x ** 2 + y ** 2) / 4)\n', (2451, 2475), True, 'import numpy as np, wx\n'), ((2482, 2494), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2492, 2494), True, 'import matplotlib.pyplot as plt\n'), ((2571, 2581), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2579, 2581), True, 'import matplotlib.pyplot as plt\n'), ((2686, 2698), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2696, 2698), True, 'import matplotlib.pyplot as plt\n'), ((2705, 2727), 'numpy.linspace', 'np.linspace', (['(-3)', '(3)', '(50)'], {}), '(-3, 3, 50)\n', (2716, 2727), True, 'import numpy as np, wx\n'), ((2732, 2747), 'numpy.exp', 'np.exp', (['(-x ** 2)'], {}), '(-x ** 2)\n', (2738, 2747), True, 'import numpy as np, wx\n'), ((2780, 2809), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y'], {'label': '"""Gauss"""'}), "(x, y, label='Gauss')\n", (2788, 2809), True, 'import matplotlib.pyplot as plt\n'), ((2812, 2845), 'matplotlib.pyplot.plot', 'plt.plot', (['x', '(-dy)'], {'label': '"""Gauss\'\'"""'}), '(x, -dy, label="Gauss\'\'")\n', (2820, 2845), True, 'import matplotlib.pyplot as plt\n'), ((2848, 2858), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (2856, 2858), True, 'import matplotlib.pyplot as plt\n'), ((2861, 2873), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2871, 2873), True, 'import matplotlib.pyplot as plt\n'), ((2986, 2998), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2996, 2998), True, 'import matplotlib.pyplot as plt\n'), ((3076, 3086), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3084, 3086), True, 'import matplotlib.pyplot as plt\n'), ((3195, 3207), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3205, 3207), True, 'import matplotlib.pyplot as plt\n'), ((3214, 3236), 'numpy.linspace', 'np.linspace', (['(-3)', '(3)', '(50)'], {}), '(-3, 3, 50)\n', (3225, 3236), True, 'import numpy as np, wx\n'), ((3241, 3256), 'numpy.exp', 'np.exp', (['(-x ** 2)'], {}), '(-x ** 2)\n', (3247, 3256), True, 'import numpy as np, wx\n'), ((3282, 3315), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y'], {'label': '"""sigma = 1"""'}), "(x, y, label='sigma = 1')\n", (3290, 3315), True, 'import matplotlib.pyplot as plt\n'), ((3318, 3352), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'yy'], {'label': '"""sigma = 2"""'}), "(x, yy, label='sigma = 2')\n", (3326, 3352), True, 'import matplotlib.pyplot as plt\n'), ((3356, 3406), 'matplotlib.pyplot.plot', 'plt.plot', (['x', '(y - yy)', '"""r"""'], {'lw': '(3)', 'label': '"""Difference"""'}), "(x, y - yy, 'r', lw=3, label='Difference')\n", (3364, 3406), True, 'import matplotlib.pyplot as plt\n'), ((3407, 3417), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (3415, 3417), True, 'import matplotlib.pyplot as plt\n'), ((3420, 3432), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (3430, 3432), True, 'import matplotlib.pyplot as plt\n'), ((3545, 3557), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3555, 3557), True, 'import matplotlib.pyplot as plt\n'), ((3634, 3644), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3642, 3644), True, 'import matplotlib.pyplot as plt\n'), ((3759, 3784), 'numpy.linspace', 'np.linspace', (['(-10)', '(10)', '(300)'], {}), '(-10, 10, 300)\n', (3770, 3784), True, 'import numpy as np, wx\n'), ((3789, 3801), 'numpy.arctan', 'np.arctan', (['x'], {}), '(x)\n', (3798, 3801), True, 'import numpy as np, wx\n'), ((3817, 3847), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(2)', 'ncols': '(2)'}), '(nrows=2, ncols=2)\n', (3829, 3847), True, 'import matplotlib.pyplot as plt\n'), ((4280, 4290), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4288, 4290), True, 'import matplotlib.pyplot as plt\n'), ((4790, 4815), 'numpy.linspace', 'np.linspace', (['(-10)', '(10)', '(300)'], {}), '(-10, 10, 300)\n', (4801, 4815), True, 'import numpy as np, wx\n'), ((4820, 4832), 'numpy.arctan', 'np.arctan', (['x'], {}), '(x)\n', (4829, 4832), True, 'import numpy as np, wx\n'), ((4847, 4877), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(2)', 'ncols': '(2)'}), '(nrows=2, ncols=2)\n', (4859, 4877), True, 'import matplotlib.pyplot as plt\n'), ((4923, 4953), 'scipy.ndimage.gaussian_filter1d', 'ndimg.gaussian_filter1d', (['y', '(30)'], {}), '(y, 30)\n', (4946, 4953), True, 'import scipy.ndimage as ndimg\n'), ((5344, 5354), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5352, 5354), True, 'import matplotlib.pyplot as plt\n'), ((2753, 2768), 'numpy.exp', 'np.exp', (['(-x ** 2)'], {}), '(-x ** 2)\n', (2759, 2768), True, 'import numpy as np, wx\n'), ((3262, 3281), 'numpy.exp', 'np.exp', (['(-x ** 2 / 4)'], {}), '(-x ** 2 / 4)\n', (3268, 3281), True, 'import numpy as np, wx\n'), ((3477, 3507), 'numpy.exp', 'np.exp', (['(-(x ** 2 + y ** 2) / 1)'], {}), '(-(x ** 2 + y ** 2) / 1)\n', (3483, 3507), True, 'import numpy as np, wx\n'), ((746, 755), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (753, 755), True, 'import matplotlib.pyplot as plt\n'), ((1376, 1385), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (1383, 1385), True, 'import matplotlib.pyplot as plt\n'), ((1951, 1960), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (1958, 1960), True, 'import matplotlib.pyplot as plt\n'), ((2929, 2953), 'numpy.exp', 'np.exp', (['(-y ** 2 - x ** 2)'], {}), '(-y ** 2 - x ** 2)\n', (2935, 2953), True, 'import numpy as np, wx\n'), ((2959, 2983), 'numpy.exp', 'np.exp', (['(-x ** 2 - y ** 2)'], {}), '(-x ** 2 - y ** 2)\n', (2965, 2983), True, 'import numpy as np, wx\n'), ((3506, 3536), 'numpy.exp', 'np.exp', (['(-(x ** 2 + y ** 2) / 4)'], {}), '(-(x ** 2 + y ** 2) / 4)\n', (3512, 3536), True, 'import numpy as np, wx\n'), ((4326, 4342), 'numpy.ones', 'np.ones', (['(30, 1)'], {}), '((30, 1))\n', (4333, 4342), True, 'import numpy as np, wx\n'), ((4409, 4425), 'numpy.ones', 'np.ones', (['(30, 1)'], {}), '((30, 1))\n', (4416, 4425), True, 'import numpy as np, wx\n'), ((4515, 4531), 'numpy.ones', 'np.ones', (['(30, 1)'], {}), '((30, 1))\n', (4522, 4531), True, 'import numpy as np, wx\n'), ((4627, 4643), 'numpy.ones', 'np.ones', (['(30, 1)'], {}), '((30, 1))\n', (4634, 4643), True, 'import numpy as np, wx\n'), ((5388, 5404), 'numpy.ones', 'np.ones', (['(30, 1)'], {}), '((30, 1))\n', (5395, 5404), True, 'import numpy as np, wx\n'), ((5468, 5484), 'numpy.ones', 'np.ones', (['(30, 1)'], {}), '((30, 1))\n', (5475, 5484), True, 'import numpy as np, wx\n'), ((5555, 5571), 'numpy.ones', 'np.ones', (['(30, 1)'], {}), '((30, 1))\n', (5562, 5571), True, 'import numpy as np, wx\n'), ((5659, 5675), 'numpy.ones', 'np.ones', (['(30, 1)'], {}), '((30, 1))\n', (5666, 5675), True, 'import numpy as np, wx\n')]
|
from api_keys import CENSUS_KEY
import json
import requests
def getCensusResponse(table_url,get_ls,geo):
'''
Concatenates url string and returns response from census api query
input:
table_url (str): census api table url
get_ls (ls): list of tables to get data from
geo (str): geographic area and filter
output:
response (requests.response): api response
'''
get = 'NAME,' + ",".join(get_ls)
url = f'{table_url}get={get}&for={geo}&key={CENSUS_KEY}'
response = requests.get(url)
return(response)
def searchTable(table_json_ls, keyword_ls=list(), filter_function_ls=list()):
'''
Filters variable tables by keyword and filter
input:
table_json_ls (response.json() object): list of lists from census variable table api
keyword_ls (list): list of keyword strings
keyword filter applied to the third element of the input list (concept column)
filter_function_ls (list): list of functions that filter table_json_ls with filter method
output:
return_json_ls (list): list, same format as table_json_ls, filtered
'''
#verifies parameters are lists
assert (type(table_json_ls)==type(keyword_ls)==type(filter_function_ls)==list), "searchTable Parameters must be lists"
return_json_ls = list()
#runs filter for each function in filter_function_ls
for f in filter_function_ls:
table_json_ls = list(filter(f, table_json_ls))
#adds rows with keyword(s) in concept column to return_json_ls
for d in table_json_ls:
try:
for k in keyword_ls:
#d[2] is the concept column, d[1] is the label column
if k.lower() in d[2].lower() or k.lower() in d[1].lower():
continue
else:
break
else:
return_json_ls.append(d)
except:
continue
return return_json_ls
|
[
"requests.get"
] |
[((525, 542), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (537, 542), False, 'import requests\n')]
|
from django.apps import AppConfig
from django.utils.translation import ugettext, ugettext_lazy as _
from pretix import __version__ as version
class BadgesApp(AppConfig):
name = 'pretix.plugins.badges'
verbose_name = _("Badges")
class PretixPluginMeta:
name = _("Badges")
author = _("the pretix team")
version = version
category = "FEATURE"
description = _("This plugin allows you to generate badges or name tags for your attendees.")
def ready(self):
from . import signals # NOQA
def installed(self, event):
if not event.badge_layouts.exists():
event.badge_layouts.create(
name=ugettext('Default'),
default=True,
)
default_app_config = 'pretix.plugins.badges.BadgesApp'
|
[
"django.utils.translation.ugettext",
"django.utils.translation.ugettext_lazy"
] |
[((227, 238), 'django.utils.translation.ugettext_lazy', '_', (['"""Badges"""'], {}), "('Badges')\n", (228, 238), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((283, 294), 'django.utils.translation.ugettext_lazy', '_', (['"""Badges"""'], {}), "('Badges')\n", (284, 294), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((312, 332), 'django.utils.translation.ugettext_lazy', '_', (['"""the pretix team"""'], {}), "('the pretix team')\n", (313, 332), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((410, 489), 'django.utils.translation.ugettext_lazy', '_', (['"""This plugin allows you to generate badges or name tags for your attendees."""'], {}), "('This plugin allows you to generate badges or name tags for your attendees.')\n", (411, 489), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((689, 708), 'django.utils.translation.ugettext', 'ugettext', (['"""Default"""'], {}), "('Default')\n", (697, 708), False, 'from django.utils.translation import ugettext, ugettext_lazy as _\n')]
|
# coding: utf-8
"""
Shutterstock API Reference
The Shutterstock API provides access to Shutterstock's library of media, as well as information about customers' accounts and the contributors that provide the media. # noqa: E501
OpenAPI spec version: 1.0.11
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class LicenseRequestMetadata(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'customer_id': 'str',
'geo_location': 'str',
'number_viewed': 'str',
'search_term': 'str'
}
attribute_map = {
'customer_id': 'customer_ID',
'geo_location': 'geo_location',
'number_viewed': 'number_viewed',
'search_term': 'search_term'
}
def __init__(self, customer_id=None, geo_location=None, number_viewed=None, search_term=None): # noqa: E501
"""LicenseRequestMetadata - a model defined in Swagger""" # noqa: E501
self._customer_id = None
self._geo_location = None
self._number_viewed = None
self._search_term = None
self.discriminator = None
if customer_id is not None:
self.customer_id = customer_id
if geo_location is not None:
self.geo_location = geo_location
if number_viewed is not None:
self.number_viewed = number_viewed
if search_term is not None:
self.search_term = search_term
@property
def customer_id(self):
"""Gets the customer_id of this LicenseRequestMetadata. # noqa: E501
The ID of a revenue-sharing partner's customer # noqa: E501
:return: The customer_id of this LicenseRequestMetadata. # noqa: E501
:rtype: str
"""
return self._customer_id
@customer_id.setter
def customer_id(self, customer_id):
"""Sets the customer_id of this LicenseRequestMetadata.
The ID of a revenue-sharing partner's customer # noqa: E501
:param customer_id: The customer_id of this LicenseRequestMetadata. # noqa: E501
:type: str
"""
self._customer_id = customer_id
@property
def geo_location(self):
"""Gets the geo_location of this LicenseRequestMetadata. # noqa: E501
The customer's location # noqa: E501
:return: The geo_location of this LicenseRequestMetadata. # noqa: E501
:rtype: str
"""
return self._geo_location
@geo_location.setter
def geo_location(self, geo_location):
"""Sets the geo_location of this LicenseRequestMetadata.
The customer's location # noqa: E501
:param geo_location: The geo_location of this LicenseRequestMetadata. # noqa: E501
:type: str
"""
self._geo_location = geo_location
@property
def number_viewed(self):
"""Gets the number_viewed of this LicenseRequestMetadata. # noqa: E501
How many pieces of media the customer viewed # noqa: E501
:return: The number_viewed of this LicenseRequestMetadata. # noqa: E501
:rtype: str
"""
return self._number_viewed
@number_viewed.setter
def number_viewed(self, number_viewed):
"""Sets the number_viewed of this LicenseRequestMetadata.
How many pieces of media the customer viewed # noqa: E501
:param number_viewed: The number_viewed of this LicenseRequestMetadata. # noqa: E501
:type: str
"""
self._number_viewed = number_viewed
@property
def search_term(self):
"""Gets the search_term of this LicenseRequestMetadata. # noqa: E501
The search term that the customer used # noqa: E501
:return: The search_term of this LicenseRequestMetadata. # noqa: E501
:rtype: str
"""
return self._search_term
@search_term.setter
def search_term(self, search_term):
"""Sets the search_term of this LicenseRequestMetadata.
The search term that the customer used # noqa: E501
:param search_term: The search_term of this LicenseRequestMetadata. # noqa: E501
:type: str
"""
self._search_term = search_term
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(LicenseRequestMetadata, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, LicenseRequestMetadata):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"six.iteritems"
] |
[((4744, 4777), 'six.iteritems', 'six.iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (4757, 4777), False, 'import six\n')]
|
import pandas as pd
import numpy as np
from texttable import Texttable
from cape_privacy.pandas import dtypes
from cape_privacy.pandas.transformations import NumericPerturbation
from cape_privacy.pandas.transformations import DatePerturbation
from cape_privacy.pandas.transformations import NumericRounding
from cape_privacy.pandas.transformations import Tokenizer
from faker import Faker
from anonympy.pandas import utils_pandas as _utils
from sklearn.decomposition import PCA
class dfAnonymizer(object):
"""
Initializes pandas DataFrame as a dfAnonymizer object.
Parameters:
----------
df: pandas DataFrame
Returns:
----------
dfAnonymizer object
Raises
----------
Exception:
* If ``df`` is not a DataFrame
See also
----------
dfAnonymizer.to_df : Return a DataFrame
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset
Contructing dfAnonymizer object:
>>> df = load_dataset()
>>> anonym = dfAnonymizer(df)
>>> anonym.to_df()
name age ... email ssn
0 Bruce 33 ... <EMAIL> 343554334
1 Tony 48 ... <EMAIL> 656564664
"""
def __init__(self,
df: pd.DataFrame):
if df.__class__.__name__ != "DataFrame":
raise Exception(f"{df} is not a pandas DataFrame.")
# Private Attributes
self._df = df.copy()
self._df2 = df.copy()
self._methods_applied = {}
self._synthetic_data = 'Synthetic Data'
self._tokenization = 'Tokenization'
self._numeric_perturbation = 'Numeric Perturbation'
self._datetime_perturbation = 'Datetime Perturbation'
self._round = 'Generalization - Rounding'
self._bin = 'Generalization - Binning'
self._drop = 'Column Suppression'
self._sample = 'Resampling'
self._PCA = 'PCA Masking'
self._email = 'Partial Masking'
# Public Attributes
self.anonymized_columns = []
self.columns = self._df.columns.tolist()
self.unanonymized_columns = self.columns.copy()
self.numeric_columns = _utils.get_numeric_columns(self._df)
self.categorical_columns = _utils.get_categorical_columns(self._df)
self.datetime_columns = _utils.get_datetime_columns(self._df)
self._available_methods = _utils.av_methods
self._fake_methods = _utils.faker_methods
def __str__(self):
return self._info().draw()
def __repr__(self):
return self._info().draw()
def _dtype_checker(self, column: str):
'''
Returns the dtype of the column
Parameters
----------
column: str
Returns
----------
dtype: numpy dtype
'''
dtype = self._df[column].dtype
if dtype == np.float32:
return dtypes.Float
elif dtype == np.float64:
return dtypes.Double
elif dtype == np.byte:
return dtypes.Byte
elif dtype == np.short:
return dtypes.Short
elif dtype == np.int32:
return dtypes.Integer
elif dtype == np.int64:
return dtypes.Long
else:
return None
def anonymize(self,
methods=None,
locale=['en_US'],
seed=None,
inplace=True):
'''
Anonymize all columns using different methods for each dtype.
If dictionary is not provided, for numerical columns
``numeric_rounding`` is applied.
``categorical_fake`` and ``categorical_tokenization`` for
categorical columns
and ``datetime_noise`` or ``datetime_fake`` are applied for columns of
datetime type.
Parameters
----------
methods : Optional[Dict[str, str]], default None
{column_name: anonympy_method}. Call ``available_methods`` for list
of all methods.
locale : str or List[str], default ['en_US']
See https://faker.readthedocs.io/en/master/locales.html for all
faker's locales.
inplace : bool, default True
If True the changes will be applied to `dfAnonymizer` obejct, else
output is returned.
seed : Optional[int], default None
Pass an integer for reproducible output across multiple function
calls.
Returns
----------
If inplace is False, pandas Series or DataFrame is returned
See Also
--------
dfAnonymizer.categorical_fake_auto : Replace values with synthetically
generated ones
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset, \
available_methods
>>> df = load_dataset()
>>> anonym = dfAnonymizer(df)
If methods None:
>>> anonym.anonymize(inplace = False)
name age ... email ssn
0 <NAME> 30 ... <EMAIL> 718-51-5290
1 <NAME> 50 ... <EMAIL> 684-81-8137
Passing a dict for specifying which methods to apply:
>>> available_methods('numeric')
numeric_noise numeric_binning numeric_masking numeric_rounding
>>> anonym.anonymize({'name':'categorical_fake',
... 'age':'numeric_noise',
... 'email':'categorical_email_masking',
... 'salary': 'numeric_rounding'}, inplace = False)
name age email salary
0 <NAME> 37 <EMAIL> 60000.0
1 <NAME> 52 <EMAIL> 50000.0
'''
if not methods:
if inplace:
# try synthetic data
self.categorical_fake_auto(locale=locale, seed=seed)
# if there are still columns left unanonymized
if self.unanonymized_columns:
for column in self.unanonymized_columns.copy():
if column in self.numeric_columns:
self.numeric_rounding(column)
elif column in self.categorical_columns:
self.categorical_tokenization(column,
key=str(seed))
elif column in self.datetime_columns:
self.datetime_noise(column, seed=seed)
else:
# try synthetic data
temp = self.categorical_fake_auto(locale=locale,
inplace=False,
seed=seed)
unanonymized = self.unanonymized_columns.copy()
if isinstance(temp, pd.DataFrame):
unanonymized = [column for column in unanonymized
if column not in temp.columns.to_list()]
elif isinstance(temp, pd.Series):
unanonymized.remove(temp.name)
temp = pd.DataFrame(temp)
else: # if temp is a already a dataframe
temp = pd.DataFrame()
if unanonymized:
for column in unanonymized:
if column in self.numeric_columns:
temp[column] = self.numeric_rounding(column,
inplace=False)
elif column in self.categorical_columns:
temp[column] = self.categorical_tokenization(
column,
inplace=False,
key=str(seed))
elif column in self.datetime_columns:
temp[column] = self.datetime_noise(column,
inplace=False,
seed=seed)
return temp
# if dictionary with methods was passed
else:
if inplace:
for key, value in methods.items():
# numeric
if value == "numeric_noise":
self.numeric_noise(key, seed=seed)
elif value == "numeric_binning":
self.numeric_binning(key)
elif value == "numeric_masking":
self.numeric_masking(key)
elif value == "numeric_rounding":
self.numeric_rounding(key)
# categorical
elif value == "categorical_fake":
self.categorical_fake(key, seed=seed)
elif value == "categorical_resampling":
self.categorical_resampling(key, seed=seed)
elif value == "categorical_tokenization":
self.categorical_tokenization(key, key=str(seed))
elif value == "categorical_email_masking":
self.categorical_email_masking(key)
# datetime
elif value == "datetime_fake":
self.datetime_fake(key, seed=seed)
elif value == "datetime_noise":
self.datetime_noise(key, seed=seed)
# drop
elif value == "column_suppression":
self.column_suppression(key)
else:
temp = pd.DataFrame()
for key, value in methods.items():
# numeric
if value == "numeric_noise":
temp[key] = self.numeric_noise(key,
inplace=False,
seed=seed)
elif value == "numeric_binning":
temp[key] = self.numeric_binning(key, inplace=False)
elif value == "numeric_masking":
temp[key] = self.numeric_masking(key, inplace=False)
elif value == "numeric_rounding":
temp[key] = self.numeric_rounding(key, inplace=False)
# categorical
elif value == "categorical_fake":
temp[key] = self.categorical_fake(key,
inplace=False,
seed=seed)
elif value == "categorical_resampling":
temp[key] = self.categorical_resampling(key,
inplace=False,
seed=seed)
elif value == "categorical_tokenization":
temp[key] = self.categorical_tokenization(
key,
inplace=False,
key=str(seed))
elif value == 'categorical_email_masking':
temp[key] = self.categorical_email_masking(
key,
inplace=False)
# datetime
elif value == "datetime_fake":
temp[key] = self.datetime_fake(key,
inplace=False,
seed=seed)
elif value == "datetime_noise":
temp[key] = self.datetime_noise(key,
inplace=False,
seed=seed)
# drop
elif value == "column_suppression":
pass
if len(temp.columns) > 1:
return temp
elif len(temp.columns) == 1:
return pd.Series(temp[temp.columns[0]])
def _fake_column(self,
column,
method,
locale=['en_US'],
seed=None,
inplace=True):
'''
Anonymize pandas Series object using synthetic data generator
Based on faker.Faker.
Parameters
----------
column : str
Column name which data will be substituted.
method : str
Method name. List of all methods ``fake_methods``.
locale : str or List[str], default ['en_US']
See https://faker.readthedocs.io/en/master/locales.html for all
faker's locales.
seed : Optional[int], default None
Pass an integer for reproducible output across multiple function
calls.
inplace : bool, default True
If True the changes will be applied to `dfAnonymizer` obejct, else
output is returned.
Returns
----------
None if inplace is True, else pandas Series is returned
See also
----------
dfAnonymizer.categorical_fake : Replace values with synthetically
generated ones by specifying which methods to apply
'''
Faker.seed(seed)
fake = Faker(locale=locale)
method = getattr(fake, method)
faked = self._df[column].apply(lambda x: method())
if inplace:
if column in self.anonymized_columns:
print(f'`{column}` column already anonymized!')
else:
self._df[column] = faked
self.unanonymized_columns.remove(column)
self.anonymized_columns.append(column)
self._methods_applied[column] = self._synthetic_data
else:
return faked
def categorical_fake(self,
columns,
locale=['en_US'],
seed=None,
inplace=True):
'''
Replace data with synthetic data using faker's generator.
To see the list of all faker's methods, call ``fake_methods``.
If column name and faker's method are similar, then pass a string or a
list of strings for `columns` argument
Otherwise, pass a dictionary with column name as a key and faker's
method as a value `{col_name: fake_method}`.
Parameters
----------
columns : Union[str, List[str], Dict[str, str]]
If a string or list of strings is passed, function will assume that
method name is same as column name.
locale : str or List[str], default ['en_US']
See https://faker.readthedocs.io/en/master/locales.html for all
faker's locales.
seed : Optional[int], default None
Pass an integer for reproducible output across multiple function
calls.
inplace : bool, default True
If True the changes will be applied to `dfAnonymizer` obejct, else
output is returned.
Returns
----------
None if inplace is True, else pandas Series or pandas DataFrame is
returned
See Also
--------
dfAnonymizer.categorical_fake_auto : Replace values with synthetically
generated ones
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset
>>> df = load_dataset()
>>> anonym = dfAnonymizer(df)
If methods are not specified, locale Great Britain:
>>> anonym.categorical_fake(['name', 'email', 'ssn'],
... locale = 'en_GB',
... inplace = False)
name email ssn
0 <NAME> <EMAIL> ZZ 180372 T
1 <NAME> <EMAIL> ZZ780511T
Passing a specific method, locale Russia:
>>> fake_methods('n')
name, name_female, name_male, name_nonbinary, nic_handle,
nic_handles, null_boolean, numerify
>>> anonym.categorical_fake({'name': 'name_nonbinary', 'web': 'url'},
... locale = 'ru_RU',
... inplace = False)
name web
0 <NAME> https://shestakov.biz
1 <NAME> https://monetka.net
'''
# if a single column is passed (str)
if isinstance(columns, str) or (len(columns) == 1 and
isinstance(columns, list)):
if isinstance(columns, list):
columns = columns[0]
if inplace:
self._fake_column(columns,
columns,
inplace=True,
seed=seed,
locale=locale)
else:
return self._fake_column(columns,
columns,
inplace=False,
seed=seed,
locale=locale)
# if a list of columns is passed
elif isinstance(columns, list):
temp = pd.DataFrame()
if inplace:
for column in columns:
self._fake_column(column,
column,
inplace=True,
seed=seed,
locale=locale)
else:
for column in columns:
faked = self._fake_column(column,
column,
inplace=False,
seed=seed,
locale=locale)
temp[column] = faked
return temp
# if a dictionary with column name and method name is passed
elif isinstance(columns, dict):
temp = pd.DataFrame()
if inplace:
for column, method in columns.items():
self._fake_column(column,
method,
inplace=True,
seed=seed,
locale=locale)
else:
for column, method in columns.items():
faked = self._fake_column(column,
method,
inplace=False,
seed=seed,
locale=locale)
temp[column] = faked
if len(columns) == 1:
return temp[column]
else:
return temp
def categorical_fake_auto(self,
locale=['en_US'],
seed=None,
inplace=True):
'''
Anonymize only those column which names are in ``fake_methods`` list.
Parameters
----------
locale : str or List[str], default ['en_US']
See https://faker.readthedocs.io/en/master/locales.html for all
faker's locales.
seed : Optional[int], default None
Pass an integer for reproducible output across multiple function
calls.
inplace : bool, default True
If True the changes will be applied to `dfAnonymizer` obejct, else
output is returned.
Returns
----------
None if inplace = True, else an anonymized pandas Series or pandas
DataFrame
See also
----------
dfAnonymizer.categorical_fake : Replace values with synthetically
generated ones by specifying which methods to apply
Notes
----------
In order to produce synthetic data, column name should have same name
as faker's method name
Function will go over all columns and if column name mathces any
faker's method, values will be replaced.
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset, fake_methods
Change column names so the function can understand which method to
apply:
>>> df = load_dataset()
>>> fake_methods('n')
name, name_female, name_male, name_nonbinary, nic_handle,
nic_handles, null_boolean, numerify
>>> df.rename(columns={'name': 'name_female'}, inplace = True)
>>> anonym = dfAnonymizer(df)
Calling the method without specifying which methods to apply, locale
Japan:
>>> anonym.categorical_fake_auto(local = 'ja_JP',
... inplace = False)
name_female email ssn
0 西村 あすか <EMAIL> 783-28-2531
1 山口 直子 <EMAIL> 477-58-9577
'''
temp = pd.DataFrame()
for column in self.columns:
func = column.strip().lower()
if func in _utils._fake_methods:
if inplace:
if column in self.anonymized_columns:
print(f'`{column}` column already anonymized!')
else:
self._fake_column(column,
func,
inplace=True,
seed=seed,
locale=locale)
else:
temp[column] = self._fake_column(column,
func,
inplace=False,
seed=seed,
locale=locale)
if not inplace:
if len(temp.columns) > 1:
return temp
elif len(temp.columns) == 1:
return pd.Series(temp[temp.columns[0]])
else:
return None
def numeric_noise(self,
columns,
MIN=-10,
MAX=10,
seed=None,
inplace=True):
'''
Add uniform random noise
Based on cape-privacy's NumericPerturbation function.
Mask a numeric pandas Series/DataFrame by adding uniform random
noise to each value. The amount of noise is drawn from
the interval [min, max).
Parameters
----------
columns : Union[str, List[str]]
Column name or a list of column names.
MIN : (int, float), default -10
The values generated will be greater then or equal to min.
MAX : (int, float), default 10
The values generated will be less than max.
seed : int, default None
To initialize the random generator.
inplace : bool, default True
If True the changes will be applied to `dfAnonymizer` obejct,
else output is returned.
Returns
----------
ser: pandas Series or pandas DataFrame with uniform random noise added
See also
----------
dfAnonymizer.numeric_binning : Bin values into discrete intervals
dfAnonymizer.numeric_masking : Apply PCA masking to numeric values
dfAnonymizer.numeric_rounding : Round values to the given number
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset
>>> df = load_dataset()
>>> anonym = dfAnonymizer(df)
Applying numeric perturbation:
>>> anonym.numeric_noise('age', inplace = False)
0 29
1 48
dtype: int64
'''
# If a single column is passed
if isinstance(columns, str) or (len(columns) == 1 and
isinstance(columns, list)):
if isinstance(columns, list):
columns = columns[0]
dtype = self._dtype_checker(columns)
noise = NumericPerturbation(dtype=dtype,
min=MIN,
max=MAX,
seed=seed)
ser = noise(self._df[columns].copy()).astype(dtype)
if inplace:
if columns in self.anonymized_columns:
print(f'`{columns}` column already anonymized!')
else:
self._df[columns] = ser
self.anonymized_columns.append(columns)
self.unanonymized_columns.remove(columns)
self._methods_applied[columns] = self._numeric_perturbation
else:
return ser.astype(dtype)
# if a list of columns is passed
else:
temp = pd.DataFrame()
for column in columns:
dtype = self._dtype_checker(column)
noise = NumericPerturbation(dtype=dtype,
min=MIN,
max=MAX,
seed=seed)
ser = noise(self._df[column].copy()).astype(dtype)
if inplace:
if column in self.anonymized_columns:
print(f'`{column}` column already anonymized!')
else:
self._df[column] = ser
self.anonymized_columns.append(column)
self.unanonymized_columns.remove(column)
self._methods_applied[column] = self._numeric_perturbation # noqa: E501
else:
temp[column] = ser
if not inplace:
return temp
def datetime_noise(self,
columns,
frequency=("MONTH", "DAY"),
MIN=(-10, -5, -5),
MAX=(10, 5, 5),
seed=None,
inplace=True):
'''
Add uniform random noise to a Pandas series of timestamps
Based on cape-privacy's DatePerturbation function
Parameters
----------
columns : Union[str, List[str]]
Column name or a list of column names.
frequency : Union[str, Tuple[str]], default ("MONTH", "DAY")
One or more frequencies to perturbate
MIN : Union[int, Tuple[int, ...]], default (-10, -5, -5)
The values generated will be greater then or equal to min.
MAX : Union[int, Tuple[int, ...]], default (10, 5, 5)
The values generated will be less than max.
seed : int, default None
To initialize the random generator.
inplace : bool, default True
If True the changes will be applied to `dfAnonymizer` obejct,
else output is returned.
Returns
----------
ser: pandas Series or pandas DataFrame
See also
----------
dfAnonymizer.datetime_fake : Replace values with synthetic dates
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset
>>> df = load_dataset()
>>> anonym = dfAnonymizer(df)
Calling the method with specifying the frequency to perturbate:
>>> anonym.datetime_noise('birthdate',
frequency=('YEAR', 'MONTH', 'DAY'),
inplace = False)
0 1916-03-16
1 1971-04-24
Name: birthdate, dtype: datetime64[ns]
'''
# if a single column is passed
if isinstance(columns, str) or (len(columns) == 1 and
isinstance(columns, list)):
if isinstance(columns, list):
columns = columns[0]
noise = DatePerturbation(frequency=frequency,
min=MIN,
max=MAX,
seed=seed)
ser = noise(self._df[columns].copy())
if inplace:
if columns in self.anonymized_columns:
print(f'`{columns}` column already anonymized!')
else:
self._df[columns] = ser
self.anonymized_columns.append(columns)
self.unanonymized_columns.remove(columns)
self._methods_applied[columns] = self._datetime_perturbation # noqa: E501
else:
return ser
# if a list of columns is passed
else:
temp = pd.DataFrame()
for column in columns:
noise = DatePerturbation(frequency=frequency,
min=MIN,
max=MAX,
seed=seed)
ser = noise(self._df[column].copy())
if inplace:
if column in self.anonymized_columns:
print(f'`{column}` column already anonymized!')
else:
self._df[column] = ser
self.anonymized_columns.append(column)
self.unanonymized_columns.remove(column)
self._methods_applied[column] = self._datetime_perturbation # noqa: E501
else:
temp[column] = ser
if not inplace:
return temp
def numeric_rounding(self,
columns,
precision=None,
inplace=True):
'''
Round each value in the Pandas Series to the given number
Based on cape-privacy's NumericRounding.
Parameters
----------
columns : Union[str, List[str]]
Column name or a list of column names.
precision : int, default None
The number of digits.
inplace : bool, default True
If True the changes will be applied to `dfAnonymizer` obejct,
else output is returned.
Returns
----------
pandas Series or pandas DataFrame if inplace = False, else None
See also
----------
dfAnonymizer.numeric_binning : Bin values into discrete intervals
dfAnonymizer.numeric_masking : Apply PCA masking
dfAnonymizer.numeric_noise : Add uniform random noise
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset
>>> df = load_dataset()
>>> anonym = dfAnonymizer(df)
Apply Numeric Rounding:
>>> anonym.numeric_rounding(['age', 'salary'], inplace = False)
age salary
0 30 60000.0
1 50 50000.0
'''
# if a single column is passed
if isinstance(columns, str) or (len(columns) == 1 and
isinstance(columns, list)):
if isinstance(columns, list):
columns = columns[0]
dtype = self._dtype_checker(columns)
if precision is None:
precision = len(str(int(self._df[columns].mean()))) - 1
rounding = NumericRounding(dtype=dtype, precision=-precision)
ser = rounding(self._df[columns].copy()).astype(dtype)
if inplace:
if columns in self.anonymized_columns:
print(f'`{columns}` column already anonymized!')
else:
self._df[columns] = ser
self.anonymized_columns.append(columns)
self.unanonymized_columns.remove(columns)
self._methods_applied[columns] = self._round
else:
return ser
# if a list of columns is passed
else:
temp = pd.DataFrame()
for column in columns:
dtype = self._dtype_checker(column)
precision = len(str(int(self._df[column].mean()))) - 1
rounding = NumericRounding(dtype=dtype, precision=-precision)
ser = rounding(self._df[column].copy())
if inplace:
if column in self.anonymized_columns:
print(f'`{column}` column already anonymized!')
else:
self._df[column] = ser
self.anonymized_columns.append(column)
self.unanonymized_columns.remove(column)
self._methods_applied[column] = self._round
else:
temp[column] = ser.astype(dtype)
if not inplace:
return temp
def numeric_masking(self,
columns,
inplace=True):
'''
Apply PCA masking to a column/columns
Based on sklearn's PCA function
Parameters
----------
columns : Union[str, List[str]]
Column name or a list of column names.
inplace : bool, default True
If True the changes will be applied to `dfAnonymizer` obejct,
else output is returned.
Returns
----------
ser : pandas Series or pandas DataFrame
See also
----------
dfAnonymizer.numeric_binning : Bin values into discrete intervals
dfAnonymizer.numeric_rounding : Apply PCA masking
dfAnonymizer.numeric_noise : Round values to the given number
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset
>>> df = load_dataset()
>>> anonym = dfAnonymizer(df)
Apply PCA Masking:
>>> num_cols = anonym.numeric_columns
>>> anonym.numeric_masking(num_cols, inplace = False)
age salary
0 -4954.900676 5.840671e-15
1 4954.900676 5.840671e-15
'''
# if a single column is passed
if isinstance(columns, str) or (len(columns) == 1 and
isinstance(columns, list)):
if isinstance(columns, list):
columns = columns[0]
pca = PCA(n_components=1)
ser = pd.DataFrame(pca.fit_transform(self._df[[columns]]),
columns=[columns])
if inplace:
if columns in self.anonymized_columns:
print(f'`{columns}` column already anonymized!')
else:
self._df[columns] = ser[columns]
self.anonymized_columns.append(columns)
self.unanonymized_columns.remove(columns)
self._methods_applied[columns] = self._PCA
else:
return ser[columns]
# if a list of columns is passed
else:
if not inplace:
pca = PCA(n_components=len(columns))
return pd.DataFrame(pca.fit_transform(self._df[columns]),
columns=columns)
else:
for column in columns:
if column in self.anonymized_columns:
print(f'`{column}` column already anonymized!')
else:
self.anonymized_columns.append(column)
self.unanonymized_columns.remove(column)
self._methods_applied[column] = self._PCA
pca = PCA(n_components=len(columns))
self._df[columns] = pca.fit_transform(self._df[columns])
def categorical_tokenization(self,
columns,
max_token_len=10,
key=None,
inplace=True):
'''
Maps a string to a token (hexadecimal string) to obfuscate it.
Parameters
----------
columns : Union[str, List[str]]
Column name or a list of column names.
max_token_len : int, default 10
Control the token length.
key : str, default None
String or Byte String. If not specified, key will be set to a
random byte string.
inplace : bool, default True
If True the changes will be applied to `dfAnonymizer` obejct, else
output is returned.
Returns
----------
ser : pandas Series or pandas DataFrame
See also
----------
dfAnonymizer.categorical_fake : Replace values with synthetically
generated ones by specifying which methods to apply
dfAnonymizer.categorical_resampling : Resample values from the same
distribution
dfAnonymizer.categorical_email_masking : Apply partial masking to
emails
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset
>>> df = load_dataset()
>>> anonym = dfAnonymizer(df)
Passing only categorical columns:
>>> anonym.categorical_columns
['name', 'web', 'email', 'ssn']
>>> anonym.categorical_tokenization(['name', 'web', 'email', 'ssn'],
inplace = False)
name web email ssn
0 a6488532f8 f8516a7ce9 a07981a4d6 9285bc9cb7
1 f7231e5026 44dfa9af8e 25ca1a128b a7a16a7c7d
'''
# if a single column is passed
if isinstance(columns, str) or (len(columns) == 1 and
isinstance(columns, list)):
if isinstance(columns, list):
columns = columns[0]
tokenize = Tokenizer(max_token_len=max_token_len, key=key)
ser = tokenize(self._df[columns])
if inplace:
if columns in self.anonymized_columns:
print(f'`{columns}` column already anonymized!')
else:
self._df[columns] = ser
self.anonymized_columns.append(columns)
self.unanonymized_columns.remove(columns)
self._methods_applied[columns] = self._tokenization
else:
return ser
# if a list of columns is passed
else:
temp = pd.DataFrame()
for column in columns:
tokenize = Tokenizer(max_token_len=max_token_len, key=key)
ser = tokenize(self._df[column])
if inplace:
if column in self.anonymized_columns:
print(f'`{column}` column already anonymized!')
else:
self._df[column] = ser
self.anonymized_columns.append(column)
self.unanonymized_columns.remove(column)
self._methods_applied[column] = self._tokenization
else:
temp[column] = ser
if not inplace:
return temp
def _mask(self, s):
'''
Mask a single email
Parameters
----------
s : str
string to mask.
Returns
----------
masked : str
See also
----------
dfAnonymizer.categorical_email_masking : Apply partial masking to email
'''
lo = s.find('@')
if lo > 0:
masked = s[0] + '*****' + s[lo-1:]
return masked
else:
raise Exception('Invalid Email')
def categorical_email_masking(self,
columns,
inplace=True):
'''
Apply Partial Masking to emails.
Parameters
----------
columns: Union[str, List[str]]
Column name or a list of column names.
inplace: Optional[bool] = True
If True the changes will be applied to `dfAnonymizer` obejct, else
output is returned.
Returns
----------
ser : pandas Series or pandas DataFrame
See also
----------
dfAnonymizer.categorical_fake : Replace values with synthetically
generated ones by specifying which methods to apply
dfAnonymizer.categorical_resampling : Resample values from the same
distribution
dfAnonymizer.categorical_tokenization : Map a string to a token
Notes
----------
Applicable only to column with email strings.
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset
>>> df = load_dataset()
>>> anonym = dfAnonymizer(df)
Calling the method on email column:
>>> anonym.categorical_email_masking('email', inplace=False)
0 <EMAIL>
1 <EMAIL>
Name: email, dtype: object
'''
# if a single column is passed
if isinstance(columns, str) or (len(columns) == 1 and
isinstance(columns, list)):
if isinstance(columns, list):
columns = columns[0]
ser = self._df[columns].apply(lambda x: self._mask(x))
if inplace:
if columns in self.anonymized_columns:
print(f'`{columns}` column already anonymized!')
else:
self._df[columns] = ser
self.anonymized_columns.append(columns)
self.unanonymized_columns.remove(columns)
self._methods_applied[columns] = self._email
else:
return ser
# if a list of columns is passed
else:
temp = pd.DataFrame()
for column in columns:
ser = self._df[column].apply(lambda x: self._mask(x))
if inplace:
if column in self.anonymized_columns:
print(f'`{column}` column already anonymized!')
else:
self._df[column] = ser
self.anonymized_columns.append(column)
self.unanonymized_columns.remove(column)
self._methods_applied[column] = self._email
else:
temp[column] = ser
if not inplace:
return temp
def datetime_fake(self,
columns,
pattern='%Y-%m-%d',
end_datetime=None,
seed=None,
locale=['en_US'],
inplace=True):
'''
Replace Column's values with synthetic dates between January 1, 1970
and now.
Based on faker `date()` method
Parameters
----------
columns : Union[str, List[str]]
Column name or a list of column names.
pattern : str, default '%Y-%m-%d'
end_datetime : Union[datetime.date, datetime.datetime,
datetime.timedelta, str, int, None], default None
locale : str or List[str], default ['en_US']
See https://faker.readthedocs.io/en/master/locales.html for all
faker's locales.
inplace : bool, default True
If True the changes will be applied to `dfAnonymizer` obejct, else
output is returned.
Returns
----------
ser : pandas Series or pandas DataFrame
See also
----------
dfAnonymizer.datetime_noise : Add uniform random noise to the column
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset
>>> df = load_dataset()
>>> anonym = dfAnonymizer(df)
Calling the method with specifying the datetime column
>>> anonym.datetime_fake('birthdate', inplace = False)
0 2018-04-09
1 2005-05-28
Name: birthdate, dtype: datetime64[ns]
'''
Faker.seed(seed)
fake = Faker(locale=locale)
# if a single column is passed
if isinstance(columns, str) or (len(columns) == 1 and
isinstance(columns, list)):
if isinstance(columns, list):
columns = columns[0]
ser = self._df[columns].apply(lambda x: pd.to_datetime(fake.date(
pattern=pattern,
end_datetime=end_datetime)))
if inplace:
if columns in self.anonymized_columns:
print(f'`{columns}` column already anonymized!')
else:
self._df[columns] = ser
self.anonymized_columns.append(columns)
self.unanonymized_columns.remove(columns)
self._methods_applied[columns] = self._synthetic_data
else:
return ser
# if a list of columns is passed
else:
temp = pd.DataFrame()
for column in columns:
ser = self._df[column].apply(
lambda x: pd.to_datetime(fake.date(
pattern=pattern,
end_datetime=end_datetime)))
if inplace:
if column in self.anonymized_columns:
print(f'`{column}` column already anonymized!')
else:
self._df[column] = ser
self.anonymized_columns.append(column)
self.unanonymized_columns.remove(column)
self._methods_applied[column] = self._synthetic_data
else:
temp[column] = ser
if not inplace:
return temp
def column_suppression(self,
columns,
inplace=True):
'''
Redact a column (drop)
Based on pandas `drop` method
Parameters
----------
columns : Union[str, List[str]]
Column name or a list of column names.
inplace : bool, default True
If True the changes will be applied to `dfAnonymizer` obejct, else
output is returned.
Returns
----------
ser : None if inplace = True, else pandas Series or pandas DataFrame
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset
>>> df = load_dataset()
>>> anonym = dfAnonymizer(df)
>>> anonym.to_df()
name age ... email ssn
0 Bruce 33 ... <EMAIL> 343554334
1 Tony 48 ... <EMAIL> 656564664
Dropping `ssn` column
>>> anonym.column_suppression('ssn', inplace = False)
name age ... web email # noqa: E501
0 Bruce 33 ... http://www.alandrosenburgcpapc.co.uk <EMAIL>
1 Tony 48 ... http://www.capgeminiamerica.co.uk <EMAIL>
'''
# if single column is passed
if isinstance(columns, str) or (len(columns) == 1 and
isinstance(columns, list)):
if isinstance(columns, list):
columns = columns[0]
if inplace:
if columns in self.anonymized_columns:
print(f'`{columns}` column already anonymized!')
else:
self._df.drop(columns, axis=1, inplace=True)
self.anonymized_columns.append(columns)
self.unanonymized_columns.remove(columns)
self._methods_applied[columns] = self._drop
else:
return self._df2.drop(columns, axis=1, inplace=False)
# if a list of columns is passed
else:
if inplace:
for column in columns:
if column in self.anonymized_columns:
print(f'`{column}` column already anonymized!')
else:
self._df.drop(column, axis=1, inplace=True)
self.anonymized_columns.append(column)
self.unanonymized_columns.remove(column)
self._methods_applied[column] = self._drop
else:
return self._df2.drop(columns, axis=1, inplace=False)
def numeric_binning(self,
columns,
bins=4,
inplace=True):
'''
Bin values into discrete intervals.
Based on pandas `cut` method
Parameters
----------
columns : Union[str, List[str]]
Column name or a list of column names.
bins : int, default 4
the number of equal-width bins in the range of `bins`
inplace : bool, default True
If True the changes will be applied to `dfAnonymizer` obejct,
else output is returned.
Returns
----------
ser : None if inplace = True, else pandas Series or pandas DataFrame
See also
----------
dfAnonymizer.numeric_noise : Add uniform random noise
dfAnonymizer.numeric_masking : Apply PCA masking to numeric values
dfAnonymizer.numeric_rounding : Round values to the given number
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset
>>> df = load_dataset()
>>> anonym = dfAnonymizer(df)
Call the method with specifying the number of bins:
>>> anonym.numeric_binning('age', bins = 2, inplace = False)
0 (33.0, 40.0]
1 (40.0, 48.0]
Name: age, dtype: category
'''
# if a single column is passed
if isinstance(columns, str) or (len(columns) == 1 and
isinstance(columns, list)):
if isinstance(columns, list):
columns = columns[0]
ser = pd.cut(self._df[columns], bins=bins, precision=0)
if inplace:
if columns in self.anonymized_columns:
print(f'`{columns}` column already anonymized!')
else:
self._df[columns] = ser
self.anonymized_columns.append(columns)
self.unanonymized_columns.remove(columns)
self._methods_applied[columns] = self._bin
else:
return ser
# if a list of columns is passed
else:
temp = pd.DataFrame()
for column in columns:
ser = pd.cut(self._df[column], bins=bins, precision=0)
if inplace:
if column in self.anonymized_columns:
print(f'`{column}` column already anonymized!')
else:
self._df[column] = ser
self.anonymized_columns.append(column)
self.unanonymized_columns.remove(column)
self._methods_applied[column] = self._bin
else:
temp[column] = ser
if not inplace:
return temp
def categorical_resampling(self,
columns,
seed=None,
inplace=True):
'''
Sampling from the same distribution
Parameters
----------
columns : Union[str, List[str]]
Column name or a list of column names.
inplace : bool, default True
If True the changes will be applied to `dfAnonymizer` obejct, else
output is returned.
Returns
----------
ser : None if inplace = True, else pandas Series or pandas DataFrame
See also:
----------
dfAnonymizer.categorical_fake : Replace values with synthetically
generated ones by specifying which methods to apply
dfAnonymizer.categorical_email_masking : Apply partial masking to
email column
dfAnonymizer.categorical_tokenization : Map a string to a token
Notes
----------
This method should be used on categorical data with finite number of
unique elements.
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset
>>> df = load_dataset()
>>> anonym = dfAnonymizer(df)
>>> anonym.categorical_resampling('name', inplace =False)
0 Bruce
1 Bruce
dtype: object
'''
# if a single column is passed
np.random.seed(seed)
if isinstance(columns, str) or (len(columns) == 1 and
isinstance(columns, list)):
if isinstance(columns, list):
columns = columns[0]
counts = self._df[columns].value_counts(normalize=True)
if inplace:
if columns in self.anonymized_columns:
print(f'`{columns}` column already anonymized!')
else:
self._df[columns] = np.random.choice(counts.index,
p=counts.values,
size=len(self._df))
self.anonymized_columns.append(columns)
self.unanonymized_columns.remove(columns)
self._methods_applied[columns] = self._sample
else:
return pd.Series(np.random.choice(counts.index,
p=counts.values,
size=len(self._df)))
# if a list of columns is passed
else:
temp = pd.DataFrame()
for column in columns:
counts = self._df[column].value_counts(normalize=True)
if inplace:
if column in self.anonymized_columns:
print(f'`{column}` column already anonymized!')
else:
self._df[column] = np.random.choice(counts.index,
p=counts.values,
size=len(self._df))
self.anonymized_columns.append(column)
self.unanonymized_columns.remove(column)
self._methods_applied[column] = self._sample
else:
temp[column] = np.random.choice(counts.index,
p=counts.values,
size=len(self._df))
if not inplace:
return temp
def _info(self):
'''
Print a summary of the a DataFrame.
Which columns have been anonymized and which haven't.
Returns
----------
None
See also
----------
dfAnonymizer.info : Print a summy of the DataFrame
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset
>>> df = load_dataset()
>>> anonym = dfAnonymizer(df)
Method gets called when the instance of `dfAnonymizer` object is called
>>> anonym
+-------------------------------+
| Total number of columns: 7 |
+===============================+
| Anonymized Column -> Method: |
+-------------------------------+
| Unanonymized Columns: |
| - name |
| - age |
| - birthdate |
| - salary |
| - web |
| - email |
| - ssn |
+-------------------------------+
'''
t = Texttable(max_width=150)
header = f'Total number of columns: {self._df.shape[1]}'
row1 = 'Anonymized Column -> Method: '
for column in self.anonymized_columns:
row1 += '\n- ' + column + ' -> ' + \
self._methods_applied.get(column)
row2 = 'Unanonymized Columns: \n'
row2 += '\n'.join([f'- {i}' for i in self.unanonymized_columns])
t.add_rows([[header], [row1], [row2]])
return t
def info(self):
'''
Print a summary of the a DataFrame.
Which columns have been anonymized using which methods.
`status = 1 ` means the column have been anonymized and `status = 0 `
means the contrary.
Returns
----------
None
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset
>>> df = load_dataset()
>>> anonym = dfAnonymizer(df)
>>> anonym.info()
+-----------+--------+--------+
| Column | Status | Method |
+===========+========+========+
| name | 0 | |
+-----------+--------+--------+
| age | 0 | |
+-----------+--------+--------+
| birthdate | 0 | |
+-----------+--------+--------+
| salary | 0 | |
+-----------+--------+--------+
| web | 0 | |
+-----------+--------+--------+
| email | 0 | |
+-----------+--------+--------+
| ssn | 0 | |
+-----------+--------+--------+
'''
t = Texttable(150)
t.header(['Column', 'Status', 'Type', 'Method'])
for i in range(len(self.columns)):
column = self.columns[i]
if column in self.anonymized_columns:
status = 1
method = self._methods_applied[column]
else:
status = 0
method = ''
if column in self.numeric_columns:
dtype = 'numeric'
elif column in self.categorical_columns:
dtype = 'categorical'
elif column in self.datetime_columns:
dtype = 'datetime'
else:
dtype = str(self._df[column].dtype)
row = [column, status, dtype, method]
t.add_row(row)
print(t.draw())
def to_df(self):
'''
Convert dfAnonymizer object back to pandas DataFrame
Returns
----------
DataFrame object
Examples
----------
>>> from anonympy.pandas import dfAnonymizer
>>> from anonympy.pandas.utils_pandas import load_dataset
>>> df = load_dataset()
>>> anonym = dfAnonymizer(df)
>>> anonym.to_df()
name age ... email ssn
0 Bruce 33 ... <EMAIL> 343554334
1 Tony 48 ... <EMAIL> 656564664
'''
return self._df.copy()
|
[
"pandas.DataFrame",
"cape_privacy.pandas.transformations.DatePerturbation",
"numpy.random.seed",
"faker.Faker",
"faker.Faker.seed",
"anonympy.pandas.utils_pandas.get_datetime_columns",
"anonympy.pandas.utils_pandas.get_categorical_columns",
"anonympy.pandas.utils_pandas.get_numeric_columns",
"pandas.cut",
"cape_privacy.pandas.transformations.NumericPerturbation",
"sklearn.decomposition.PCA",
"pandas.Series",
"cape_privacy.pandas.transformations.Tokenizer",
"texttable.Texttable",
"cape_privacy.pandas.transformations.NumericRounding"
] |
[((2237, 2273), 'anonympy.pandas.utils_pandas.get_numeric_columns', '_utils.get_numeric_columns', (['self._df'], {}), '(self._df)\n', (2263, 2273), True, 'from anonympy.pandas import utils_pandas as _utils\n'), ((2309, 2349), 'anonympy.pandas.utils_pandas.get_categorical_columns', '_utils.get_categorical_columns', (['self._df'], {}), '(self._df)\n', (2339, 2349), True, 'from anonympy.pandas import utils_pandas as _utils\n'), ((2382, 2419), 'anonympy.pandas.utils_pandas.get_datetime_columns', '_utils.get_datetime_columns', (['self._df'], {}), '(self._df)\n', (2409, 2419), True, 'from anonympy.pandas import utils_pandas as _utils\n'), ((13962, 13978), 'faker.Faker.seed', 'Faker.seed', (['seed'], {}), '(seed)\n', (13972, 13978), False, 'from faker import Faker\n'), ((13994, 14014), 'faker.Faker', 'Faker', ([], {'locale': 'locale'}), '(locale=locale)\n', (13999, 14014), False, 'from faker import Faker\n'), ((22107, 22121), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (22119, 22121), True, 'import pandas as pd\n'), ((45869, 45885), 'faker.Faker.seed', 'Faker.seed', (['seed'], {}), '(seed)\n', (45879, 45885), False, 'from faker import Faker\n'), ((45901, 45921), 'faker.Faker', 'Faker', ([], {'locale': 'locale'}), '(locale=locale)\n', (45906, 45921), False, 'from faker import Faker\n'), ((54921, 54941), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (54935, 54941), True, 'import numpy as np\n'), ((58339, 58363), 'texttable.Texttable', 'Texttable', ([], {'max_width': '(150)'}), '(max_width=150)\n', (58348, 58363), False, 'from texttable import Texttable\n'), ((60062, 60076), 'texttable.Texttable', 'Texttable', (['(150)'], {}), '(150)\n', (60071, 60076), False, 'from texttable import Texttable\n'), ((25382, 25443), 'cape_privacy.pandas.transformations.NumericPerturbation', 'NumericPerturbation', ([], {'dtype': 'dtype', 'min': 'MIN', 'max': 'MAX', 'seed': 'seed'}), '(dtype=dtype, min=MIN, max=MAX, seed=seed)\n', (25401, 25443), False, 'from cape_privacy.pandas.transformations import NumericPerturbation\n'), ((26178, 26192), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (26190, 26192), True, 'import pandas as pd\n'), ((29307, 29373), 'cape_privacy.pandas.transformations.DatePerturbation', 'DatePerturbation', ([], {'frequency': 'frequency', 'min': 'MIN', 'max': 'MAX', 'seed': 'seed'}), '(frequency=frequency, min=MIN, max=MAX, seed=seed)\n', (29323, 29373), False, 'from cape_privacy.pandas.transformations import DatePerturbation\n'), ((30086, 30100), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (30098, 30100), True, 'import pandas as pd\n'), ((32784, 32834), 'cape_privacy.pandas.transformations.NumericRounding', 'NumericRounding', ([], {'dtype': 'dtype', 'precision': '(-precision)'}), '(dtype=dtype, precision=-precision)\n', (32799, 32834), False, 'from cape_privacy.pandas.transformations import NumericRounding\n'), ((33423, 33437), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (33435, 33437), True, 'import pandas as pd\n'), ((35844, 35863), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': '(1)'}), '(n_components=1)\n', (35847, 35863), False, 'from sklearn.decomposition import PCA\n'), ((39436, 39483), 'cape_privacy.pandas.transformations.Tokenizer', 'Tokenizer', ([], {'max_token_len': 'max_token_len', 'key': 'key'}), '(max_token_len=max_token_len, key=key)\n', (39445, 39483), False, 'from cape_privacy.pandas.transformations import Tokenizer\n'), ((40058, 40072), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (40070, 40072), True, 'import pandas as pd\n'), ((43539, 43553), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (43551, 43553), True, 'import pandas as pd\n'), ((46912, 46926), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (46924, 46926), True, 'import pandas as pd\n'), ((52186, 52235), 'pandas.cut', 'pd.cut', (['self._df[columns]'], {'bins': 'bins', 'precision': '(0)'}), '(self._df[columns], bins=bins, precision=0)\n', (52192, 52235), True, 'import pandas as pd\n'), ((52756, 52770), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (52768, 52770), True, 'import pandas as pd\n'), ((56095, 56109), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (56107, 56109), True, 'import pandas as pd\n'), ((9956, 9970), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (9968, 9970), True, 'import pandas as pd\n'), ((18114, 18128), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (18126, 18128), True, 'import pandas as pd\n'), ((26304, 26365), 'cape_privacy.pandas.transformations.NumericPerturbation', 'NumericPerturbation', ([], {'dtype': 'dtype', 'min': 'MIN', 'max': 'MAX', 'seed': 'seed'}), '(dtype=dtype, min=MIN, max=MAX, seed=seed)\n', (26323, 26365), False, 'from cape_privacy.pandas.transformations import NumericPerturbation\n'), ((30161, 30227), 'cape_privacy.pandas.transformations.DatePerturbation', 'DatePerturbation', ([], {'frequency': 'frequency', 'min': 'MIN', 'max': 'MAX', 'seed': 'seed'}), '(frequency=frequency, min=MIN, max=MAX, seed=seed)\n', (30177, 30227), False, 'from cape_privacy.pandas.transformations import DatePerturbation\n'), ((33624, 33674), 'cape_privacy.pandas.transformations.NumericRounding', 'NumericRounding', ([], {'dtype': 'dtype', 'precision': '(-precision)'}), '(dtype=dtype, precision=-precision)\n', (33639, 33674), False, 'from cape_privacy.pandas.transformations import NumericRounding\n'), ((40136, 40183), 'cape_privacy.pandas.transformations.Tokenizer', 'Tokenizer', ([], {'max_token_len': 'max_token_len', 'key': 'key'}), '(max_token_len=max_token_len, key=key)\n', (40145, 40183), False, 'from cape_privacy.pandas.transformations import Tokenizer\n'), ((52829, 52877), 'pandas.cut', 'pd.cut', (['self._df[column]'], {'bins': 'bins', 'precision': '(0)'}), '(self._df[column], bins=bins, precision=0)\n', (52835, 52877), True, 'import pandas as pd\n'), ((18980, 18994), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (18992, 18994), True, 'import pandas as pd\n'), ((23190, 23222), 'pandas.Series', 'pd.Series', (['temp[temp.columns[0]]'], {}), '(temp[temp.columns[0]])\n', (23199, 23222), True, 'import pandas as pd\n'), ((7305, 7323), 'pandas.DataFrame', 'pd.DataFrame', (['temp'], {}), '(temp)\n', (7317, 7323), True, 'import pandas as pd\n'), ((7410, 7424), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (7422, 7424), True, 'import pandas as pd\n'), ((12688, 12720), 'pandas.Series', 'pd.Series', (['temp[temp.columns[0]]'], {}), '(temp[temp.columns[0]])\n', (12697, 12720), True, 'import pandas as pd\n')]
|
import matplotlib.pyplot as plt
import torch
# 回归类型的例子
data_shape = torch.ones(400, 2)
x0 = torch.normal(2 * data_shape, 1)
y0 = torch.zeros(data_shape.size()[0])
x1 = torch.normal(-2 * data_shape, 1)
y1 = torch.ones(data_shape.size()[0])
x = torch.cat((x0, x1), 0).type(torch.FloatTensor)
y = torch.cat((y0, y1)).type(torch.LongTensor)
print(y.size())
print(y)
# plt.scatter(x.data.numpy()[:, 0], x.data.numpy()[:, 1], c=y.data.numpy(), s=8, lw=0, cmap='RdYlGn')
# plt.show()
# create network
class Net(torch.nn.Module):
def __init__(self, n_input, n_hidden, n_output):
super(Net, self).__init__()
self.hidden = torch.nn.Linear(n_input, n_hidden)
self.output = torch.nn.Linear(n_hidden, n_output)
def forward(self, x):
x = torch.sigmoid(self.hidden(x))
return self.output(x)
net = Net(n_input=2, n_hidden=10, n_output=2)
print(net)
# train network
optimizer = torch.optim.SGD(net.parameters(), lr=0.01)
loss_func = torch.nn.CrossEntropyLoss()
plt.ion()
for i in range(1000):
out = net(x)
loss = loss_func(out, y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if i % 20 == 0:
plt.cla()
# temp = torch.softmax(out, 1)
prediction = torch.max(out, 1)[1]
# prediction = torch.max(out)
pred_y = prediction.data.numpy().squeeze()
target_y = y.data.numpy()
plt.scatter(x.data.numpy()[:, 0], x.data.numpy()[:, 1], c=pred_y, s=8, lw=0, cmap='RdYlGn')
accuracy = float((pred_y == target_y).astype(int).sum()) / float(target_y.size)
plt.text(1.5, -4, 'Accuracy=%.4f' % accuracy, fontdict={'size': 12, 'color': 'orange'})
plt.pause(0.1)
if accuracy == 1.0:
print('perfect')
break
print('end')
plt.ioff()
plt.show()
|
[
"torch.ones",
"matplotlib.pyplot.show",
"matplotlib.pyplot.ioff",
"torch.nn.CrossEntropyLoss",
"torch.cat",
"torch.normal",
"matplotlib.pyplot.text",
"matplotlib.pyplot.ion",
"matplotlib.pyplot.cla",
"torch.max",
"torch.nn.Linear",
"matplotlib.pyplot.pause"
] |
[((69, 87), 'torch.ones', 'torch.ones', (['(400)', '(2)'], {}), '(400, 2)\n', (79, 87), False, 'import torch\n'), ((93, 124), 'torch.normal', 'torch.normal', (['(2 * data_shape)', '(1)'], {}), '(2 * data_shape, 1)\n', (105, 124), False, 'import torch\n'), ((169, 201), 'torch.normal', 'torch.normal', (['(-2 * data_shape)', '(1)'], {}), '(-2 * data_shape, 1)\n', (181, 201), False, 'import torch\n'), ((975, 1002), 'torch.nn.CrossEntropyLoss', 'torch.nn.CrossEntropyLoss', ([], {}), '()\n', (1000, 1002), False, 'import torch\n'), ((1004, 1013), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (1011, 1013), True, 'import matplotlib.pyplot as plt\n'), ((1790, 1800), 'matplotlib.pyplot.ioff', 'plt.ioff', ([], {}), '()\n', (1798, 1800), True, 'import matplotlib.pyplot as plt\n'), ((1801, 1811), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1809, 1811), True, 'import matplotlib.pyplot as plt\n'), ((245, 267), 'torch.cat', 'torch.cat', (['(x0, x1)', '(0)'], {}), '((x0, x1), 0)\n', (254, 267), False, 'import torch\n'), ((296, 315), 'torch.cat', 'torch.cat', (['(y0, y1)'], {}), '((y0, y1))\n', (305, 315), False, 'import torch\n'), ((640, 674), 'torch.nn.Linear', 'torch.nn.Linear', (['n_input', 'n_hidden'], {}), '(n_input, n_hidden)\n', (655, 674), False, 'import torch\n'), ((697, 732), 'torch.nn.Linear', 'torch.nn.Linear', (['n_hidden', 'n_output'], {}), '(n_hidden, n_output)\n', (712, 732), False, 'import torch\n'), ((1180, 1189), 'matplotlib.pyplot.cla', 'plt.cla', ([], {}), '()\n', (1187, 1189), True, 'import matplotlib.pyplot as plt\n'), ((1591, 1682), 'matplotlib.pyplot.text', 'plt.text', (['(1.5)', '(-4)', "('Accuracy=%.4f' % accuracy)"], {'fontdict': "{'size': 12, 'color': 'orange'}"}), "(1.5, -4, 'Accuracy=%.4f' % accuracy, fontdict={'size': 12, 'color':\n 'orange'})\n", (1599, 1682), True, 'import matplotlib.pyplot as plt\n'), ((1687, 1701), 'matplotlib.pyplot.pause', 'plt.pause', (['(0.1)'], {}), '(0.1)\n', (1696, 1701), True, 'import matplotlib.pyplot as plt\n'), ((1250, 1267), 'torch.max', 'torch.max', (['out', '(1)'], {}), '(out, 1)\n', (1259, 1267), False, 'import torch\n')]
|
import unittest
from pyblynkrestapi.PyBlynkRestApi import PyBlynkRestApi
class TestBase(unittest.TestCase):
def __init__(self,*args, **kwargs):
super(TestBase, self).__init__(*args, **kwargs)
self.auth_token = ''
self.blynk = PyBlynkRestApi(auth_token=self.auth_token)
|
[
"pyblynkrestapi.PyBlynkRestApi.PyBlynkRestApi"
] |
[((257, 299), 'pyblynkrestapi.PyBlynkRestApi.PyBlynkRestApi', 'PyBlynkRestApi', ([], {'auth_token': 'self.auth_token'}), '(auth_token=self.auth_token)\n', (271, 299), False, 'from pyblynkrestapi.PyBlynkRestApi import PyBlynkRestApi\n')]
|
# -*- coding: utf-8 -*-
import os
import pathlib
from setuptools import setup, find_packages, Extension
from setuptools.command.build_ext import build_ext as build_ext_orig
_VERSION = '0.2.2'
class CMakeExtension(Extension):
def __init__(self, name):
super().__init__(name, sources=[])
class build_ext(build_ext_orig):
def run(self):
for ext in self.extensions:
self.build_cmake(ext)
super().run()
def build_cmake(self, ext):
cwd = pathlib.Path().absolute()
build_temp = pathlib.Path(self.build_temp)
build_temp.mkdir(parents=True, exist_ok=True)
extdir = pathlib.Path(self.get_ext_fullpath(ext.name))
extdir.mkdir(parents=True, exist_ok=True)
config = 'Debug' if self.debug else 'Release'
cmake_args = [
'-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=' + str(extdir.parent.parent.absolute()),
'-DCMAKE_RUNTIME_OUTPUT_DIRECTORY=' + str(extdir.parent.parent.parent.absolute()),
'-DCMAKE_BUILD_TYPE=' + config
]
build_args = [
'--config', config,
'--', '-j4'
]
os.chdir(str(build_temp))
self.spawn(['cmake', str(cwd)] + cmake_args)
if not self.dry_run:
self.spawn(['cmake', '--build', '.'] + build_args)
os.chdir(str(cwd))
setup(
name='qlazy',
version=_VERSION,
url='https://github.com/samn33/qlazy',
author='Sam.N',
author_email='<EMAIL>',
description='Quantum Computing Simulator',
long_description='',
packages=find_packages(),
include_package_data=True,
install_requires=[
'numpy'
],
license='Apache Software License',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'License :: OSI Approved :: Apache Software License',
],
keywords=['quantum', 'simulator'],
ext_modules=[CMakeExtension('qlazy/lib/c/qlz')],
cmdclass={
'build_ext': build_ext,
},
entry_points="""
[console_scripts]
qlazy = qlazy.core:main
""",
)
|
[
"pathlib.Path",
"setuptools.find_packages"
] |
[((541, 570), 'pathlib.Path', 'pathlib.Path', (['self.build_temp'], {}), '(self.build_temp)\n', (553, 570), False, 'import pathlib\n'), ((1570, 1585), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (1583, 1585), False, 'from setuptools import setup, find_packages, Extension\n'), ((494, 508), 'pathlib.Path', 'pathlib.Path', ([], {}), '()\n', (506, 508), False, 'import pathlib\n')]
|
from flask import Flask, request, jsonify
from flask_sqlalchemy import SQLAlchemy
from flask_marshmallow import Marshmallow
import os
# Init app
app = Flask(__name__)
basedir = os.path.abspath(os.path.dirname(__file__))
# Database
app.config['SQLALCHEMY_DATABASE_URI'] = ('sqlite:///' +
os.path.join(basedir, 'db.sqlite'))
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
# Init db
db = SQLAlchemy(app)
# Init ma
ma = Marshmallow(app)
# Product Class/Model
class Product(db.Model):
id = db.Column(db.Integer, primary_key=True)
patient_age_quantile = db.Column(db.Float)
def __init__(self, patient_age_quantile):
self.patient_age_quantile = patient_age_quantile
#Product Schema
class ProductSchema(ma.Schema):
class Meta:
fields = ('id', 'patient_age_quantile')
product_schema = ProductSchema()
products_schema = ProductSchema(many=True)
# Create a Product
@app.route('/product', methods=['POST'])
def add_product():
patient_age_quantile = request.json['patient_age_quantile']
new_product = Product(patient_age_quantile)
db.session.add(new_product)
db.session.commit()
return product_schema.jsonify(new_product)
# Run Server
if __name__ == '__main__':
app.run(debug=True)
|
[
"os.path.dirname",
"flask.Flask",
"flask_marshmallow.Marshmallow",
"flask_sqlalchemy.SQLAlchemy",
"os.path.join"
] |
[((152, 167), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (157, 167), False, 'from flask import Flask, request, jsonify\n'), ((397, 412), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (407, 412), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((428, 444), 'flask_marshmallow.Marshmallow', 'Marshmallow', (['app'], {}), '(app)\n', (439, 444), False, 'from flask_marshmallow import Marshmallow\n'), ((194, 219), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (209, 219), False, 'import os\n'), ((293, 327), 'os.path.join', 'os.path.join', (['basedir', '"""db.sqlite"""'], {}), "(basedir, 'db.sqlite')\n", (305, 327), False, 'import os\n')]
|
#!/usr/bin/env python
"""Module for setting up statistical models"""
from __future__ import division
from math import pi
import numpy as np
import pymc as mc
import graphics
import data_fds
import external_fds
def fds_mlr():
"""PyMC configuration with FDS as the model."""
# Priors
# FDS inputs: abs_coeff, A, E, emissivity, HoR, k, rho, c
theta = mc.Uniform(
'theta',
lower=[1, 7.5e12, 187e3, 0.75, 500, 0.01, 500, 0.5],
value=[2500, 8.5e12, 188e3, 0.85, 750, 0.25, 1000, 3.0],
upper=[5000, 9.5e12, 189e3, 1.00, 2000, 0.50, 2000, 6.0])
sigma = mc.Uniform('sigma', lower=0., upper=10., value=0.100)
# Model
@mc.deterministic
def y_mean(theta=theta):
casename = external_fds.gen_input(
abs_coeff=theta[0],
A=theta[1],
E=theta[2],
emissivity=theta[3],
HoR=theta[4],
k=theta[5],
rho=theta[6],
c=theta[7])
external_fds.run_fds(casename)
mlrs = external_fds.read_fds(casename)
mlr = mlrs[:, 2]
# Print MLR vs. time for each iteration
graphics.plot_fds_mlr(mlr)
return mlr
# Likelihood
# The likelihood is N(y_mean, sigma^2), where sigma
# is pulled from a uniform distribution.
y_obs = mc.Normal('y_obs',
value=data_fds.mlr,
mu=y_mean,
tau=sigma**-2,
observed=True)
return vars()
|
[
"external_fds.read_fds",
"external_fds.gen_input",
"pymc.Uniform",
"graphics.plot_fds_mlr",
"pymc.Normal",
"external_fds.run_fds"
] |
[((369, 605), 'pymc.Uniform', 'mc.Uniform', (['"""theta"""'], {'lower': '[1, 7500000000000.0, 187000.0, 0.75, 500, 0.01, 500, 0.5]', 'value': '[2500, 8500000000000.0, 188000.0, 0.85, 750, 0.25, 1000, 3.0]', 'upper': '[5000, 9500000000000.0, 189000.0, 1.0, 2000, 0.5, 2000, 6.0]'}), "('theta', lower=[1, 7500000000000.0, 187000.0, 0.75, 500, 0.01, \n 500, 0.5], value=[2500, 8500000000000.0, 188000.0, 0.85, 750, 0.25, \n 1000, 3.0], upper=[5000, 9500000000000.0, 189000.0, 1.0, 2000, 0.5, \n 2000, 6.0])\n", (379, 605), True, 'import pymc as mc\n'), ((609, 662), 'pymc.Uniform', 'mc.Uniform', (['"""sigma"""'], {'lower': '(0.0)', 'upper': '(10.0)', 'value': '(0.1)'}), "('sigma', lower=0.0, upper=10.0, value=0.1)\n", (619, 662), True, 'import pymc as mc\n'), ((1330, 1416), 'pymc.Normal', 'mc.Normal', (['"""y_obs"""'], {'value': 'data_fds.mlr', 'mu': 'y_mean', 'tau': '(sigma ** -2)', 'observed': '(True)'}), "('y_obs', value=data_fds.mlr, mu=y_mean, tau=sigma ** -2, observed\n =True)\n", (1339, 1416), True, 'import pymc as mc\n'), ((746, 889), 'external_fds.gen_input', 'external_fds.gen_input', ([], {'abs_coeff': 'theta[0]', 'A': 'theta[1]', 'E': 'theta[2]', 'emissivity': 'theta[3]', 'HoR': 'theta[4]', 'k': 'theta[5]', 'rho': 'theta[6]', 'c': 'theta[7]'}), '(abs_coeff=theta[0], A=theta[1], E=theta[2],\n emissivity=theta[3], HoR=theta[4], k=theta[5], rho=theta[6], c=theta[7])\n', (768, 889), False, 'import external_fds\n'), ((992, 1022), 'external_fds.run_fds', 'external_fds.run_fds', (['casename'], {}), '(casename)\n', (1012, 1022), False, 'import external_fds\n'), ((1038, 1069), 'external_fds.read_fds', 'external_fds.read_fds', (['casename'], {}), '(casename)\n', (1059, 1069), False, 'import external_fds\n'), ((1152, 1178), 'graphics.plot_fds_mlr', 'graphics.plot_fds_mlr', (['mlr'], {}), '(mlr)\n', (1173, 1178), False, 'import graphics\n')]
|
import collections
class Solution:
def knightProbability(self, N: int, K: int, r: int, c: int) -> float:
def valid(curr, curc):
if curr < 0 or curr > N - 1 or curc < 0 or curc > N - 1:
return False
return True
if valid(r, c) == False:
return 0
if K == 0:
return 1
bfs = collections.deque([(r, c, 0, True)])
dirs = [(1, 2), (-1, 2), (1, -2), (-1, -2), (2, 1), (-2, 1), (2, -1), (-2, -1)]
out_cnt = 0
in_cnt = 0
while bfs and bfs[0][2] < K:
curr, curc, curt, curvalid = bfs.popleft()
for dir in dirs:
if curvalid == False:
if curt + 1 == K:
out_cnt += 1
bfs.append((curr + dir[0], curc + dir[1], curt + 1, False))
else:
nxtr = curr + dir[0]
nxtc = curc + dir[1]
is_valid = valid(nxtr, nxtc)
if is_valid and curt + 1 == K:
in_cnt += 1
else:
if curt + 1 == K:
out_cnt += 1
bfs.append((nxtr, nxtc, curt + 1, is_valid))
return in_cnt/(in_cnt + out_cnt)
a = Solution()
b = a.knightProbability(3, 2, 0, 0)
print(b)
|
[
"collections.deque"
] |
[((388, 424), 'collections.deque', 'collections.deque', (['[(r, c, 0, True)]'], {}), '([(r, c, 0, True)])\n', (405, 424), False, 'import collections\n')]
|
# Copyright 2019 <NAME> GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module with functions for reading traces."""
import os
from typing import List
from . import DictEvent
from .babeltrace import get_babeltrace_impl
impl = get_babeltrace_impl()
def is_trace_directory(path: str) -> bool:
"""
Check recursively if a path is a trace directory.
:param path: the path to check
:return: `True` if it is a trace directory, `False` otherwise
"""
path = os.path.expanduser(path)
if not os.path.isdir(path):
return False
return impl.is_trace_directory(path) # type: ignore
def get_trace_events(trace_directory: str) -> List[DictEvent]:
"""
Get the events of a trace.
:param trace_directory: the path to the main/top trace directory
:return: events
"""
return impl.get_trace_events(trace_directory) # type: ignore
|
[
"os.path.isdir",
"os.path.expanduser"
] |
[((987, 1011), 'os.path.expanduser', 'os.path.expanduser', (['path'], {}), '(path)\n', (1005, 1011), False, 'import os\n'), ((1023, 1042), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (1036, 1042), False, 'import os\n')]
|
#!/usr/bin/env python3
import asyncio
import logging
from typing import TextIO
import click
import yaml
from rich.logging import RichHandler
from hasspad.config import HasspadConfig
from hasspad.hasspad import Hasspad
logging.basicConfig(
level="INFO",
format="%(message)s",
datefmt="[%X]",
handlers=[RichHandler(rich_tracebacks=True)],
)
logger = logging.getLogger(__file__)
@click.command()
@click.argument("config", type=click.File("r"))
def main(config: TextIO) -> None:
hasspad = Hasspad(HasspadConfig(**yaml.safe_load(config)))
asyncio.run(hasspad.listen())
|
[
"click.File",
"click.command",
"yaml.safe_load",
"rich.logging.RichHandler",
"logging.getLogger"
] |
[((369, 396), 'logging.getLogger', 'logging.getLogger', (['__file__'], {}), '(__file__)\n', (386, 396), False, 'import logging\n'), ((400, 415), 'click.command', 'click.command', ([], {}), '()\n', (413, 415), False, 'import click\n'), ((447, 462), 'click.File', 'click.File', (['"""r"""'], {}), "('r')\n", (457, 462), False, 'import click\n'), ((321, 354), 'rich.logging.RichHandler', 'RichHandler', ([], {'rich_tracebacks': '(True)'}), '(rich_tracebacks=True)\n', (332, 354), False, 'from rich.logging import RichHandler\n'), ((536, 558), 'yaml.safe_load', 'yaml.safe_load', (['config'], {}), '(config)\n', (550, 558), False, 'import yaml\n')]
|
from adaptivefiltering.utils import AdaptiveFilteringError, is_iterable
# Mapping from human-readable name to class codes
_name_to_class = {
"unclassified": (0, 1),
"ground": (2,),
"low_vegetation": (3,),
"medium_vegetation": (4,),
"high_vegetation": (5,),
"building": (6,),
"low_point": (7,),
"water": (9,),
"road_surface": (11,),
}
# Inverse mapping from class codes to human readable names
_class_to_name = ["(not implemented)"] * 256
# Populate the inverse mapping
for name, classes in _name_to_class.items():
for c in classes:
_class_to_name[c] = name
def asprs_class_code(name):
"""Map ASPRS classification name to code"""
try:
return _name_to_class[name]
except KeyError:
raise AdaptiveFilteringError(
f"Classification identifier '{name}'' not known to adaptivefiltering"
)
def asprs_class_name(code):
"""Map ASPRS classification code to name"""
try:
return _class_to_name[code]
except IndexError:
raise AdaptiveFilteringError(
f"Classification code '{code}' not in range [0, 255]"
)
def asprs(vals):
"""Map a number of values to ASPRS classification codes
:param vals:
An arbitrary number of values that somehow describe an ASPRS
code. Can be integers which will used directy, can be strings
which will be split at commas and then turned into integers
:returns:
A sorted tuple of integers with ASPRS codes:
:rtype: tuple
"""
if is_iterable(vals):
return tuple(sorted(set(sum((_asprs(v) for v in vals), ()))))
else:
return asprs([vals])
def _asprs(val):
if isinstance(val, str):
# First, we split at commas and go into recursion
pieces = val.split(",")
if len(pieces) > 1:
return asprs(pieces)
# If this is a simple string token it must match a code
return asprs_class_code(pieces[0].strip())
elif isinstance(val, int):
if val < 0 or val > 255:
raise AdaptiveFilteringError(
"Classification values need to be in the interval [0, 255]"
)
return (val,)
elif isinstance(val, slice):
# If start is not given, it is zero
start = val.start
if start is None:
start = 0
# If stop is not given, it is the maximum possible classification value: 255
stop = val.stop
if stop is None:
stop = 255
# This adaptation is necessary to be able to use the range generator below
stop = stop + 1
# Collect the list of arguments to the range generator
args = [start, stop]
# Add a third parameter iff the slice step parameter was given
if val.step is not None:
args.append(val.step)
# Return the tuple of classification values
return tuple(range(*args))
else:
raise ValueError(f"Cannot handle type {type(val)} in ASPRS classification")
|
[
"adaptivefiltering.utils.is_iterable",
"adaptivefiltering.utils.AdaptiveFilteringError"
] |
[((1548, 1565), 'adaptivefiltering.utils.is_iterable', 'is_iterable', (['vals'], {}), '(vals)\n', (1559, 1565), False, 'from adaptivefiltering.utils import AdaptiveFilteringError, is_iterable\n'), ((768, 866), 'adaptivefiltering.utils.AdaptiveFilteringError', 'AdaptiveFilteringError', (['f"""Classification identifier \'{name}\'\' not known to adaptivefiltering"""'], {}), '(\n f"Classification identifier \'{name}\'\' not known to adaptivefiltering")\n', (790, 866), False, 'from adaptivefiltering.utils import AdaptiveFilteringError, is_iterable\n'), ((1044, 1121), 'adaptivefiltering.utils.AdaptiveFilteringError', 'AdaptiveFilteringError', (['f"""Classification code \'{code}\' not in range [0, 255]"""'], {}), '(f"Classification code \'{code}\' not in range [0, 255]")\n', (1066, 1121), False, 'from adaptivefiltering.utils import AdaptiveFilteringError, is_iterable\n'), ((2073, 2161), 'adaptivefiltering.utils.AdaptiveFilteringError', 'AdaptiveFilteringError', (['"""Classification values need to be in the interval [0, 255]"""'], {}), "(\n 'Classification values need to be in the interval [0, 255]')\n", (2095, 2161), False, 'from adaptivefiltering.utils import AdaptiveFilteringError, is_iterable\n')]
|
# SPDX-FileCopyrightText: 2022-present <NAME> <<EMAIL>>
#
# SPDX-License-Identifier: MIT
from hatchling.builders.hooks.plugin.interface import BuildHookInterface
class VCSBuildHook(BuildHookInterface):
PLUGIN_NAME = 'vcs'
def __init__(self, *args, **kwargs):
super(VCSBuildHook, self).__init__(*args, **kwargs)
self.__config_version_file = None
self.__config_template = None
@property
def config_version_file(self):
if self.__config_version_file is None:
version_file = self.config.get('version-file', '')
if not isinstance(version_file, str):
raise TypeError('Option `version-file` for build hook `{}` must be a string'.format(self.PLUGIN_NAME))
elif not version_file:
raise ValueError('Option `version-file` for build hook `{}` is required'.format(self.PLUGIN_NAME))
self.__config_version_file = version_file
return self.__config_version_file
@property
def config_template(self):
if self.__config_template is None:
template = self.config.get('template', '')
if not isinstance(template, str):
raise TypeError('Option `template` for build hook `{}` must be a string'.format(self.PLUGIN_NAME))
self.__config_template = template
return self.__config_template
def initialize(self, version, build_data):
from setuptools_scm import dump_version
dump_version(self.root, self.metadata.version, self.config_version_file, template=self.config_template)
build_data['artifacts'].append('/{}'.format(self.config_version_file))
|
[
"setuptools_scm.dump_version"
] |
[((1484, 1591), 'setuptools_scm.dump_version', 'dump_version', (['self.root', 'self.metadata.version', 'self.config_version_file'], {'template': 'self.config_template'}), '(self.root, self.metadata.version, self.config_version_file,\n template=self.config_template)\n', (1496, 1591), False, 'from setuptools_scm import dump_version\n')]
|
#!/usr/bin/env python
# encoding=utf-8
"""
Copyright (c) 2021 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
import io
from threading import Thread, Lock
import paramiko
import scp
from paramiko.client import MissingHostKeyPolicy
class _Buffer:
"""
Thread-safe Buffer using StringIO
"""
def __init__(self):
self._io = io.StringIO()
self._lock = Lock()
def write(self, data):
try:
self._lock.acquire()
self._io.write(data)
finally:
self._lock.release()
def getvalue(self):
return self._io.getvalue()
class _AllOkPolicy(MissingHostKeyPolicy):
"""
accept all missing host key policy for paramiko
"""
def missing_host_key(self, client, hostname, key):
pass
class SSH:
"""
Ssh client to execute remote shell command and scp files or directories
"""
@staticmethod
def _read_to(stream, buffer):
"""
Read stream to buffer in other thread
"""
def _read_handle():
line = stream.readline()
while line:
buffer.write(line)
line = stream.readline()
thread = Thread(target=_read_handle)
thread.start()
return thread
def __init__(self, user, password, host, port, **kwargs):
self.host = host
self.port = port
self.user = user
self.password = password
self.timeout = kwargs.get('timeout', None)
self._do_connect()
def _do_connect(self):
"""
do the ssh2 session connect with username and password
"""
self._session = paramiko.SSHClient()
self._session.set_missing_host_key_policy(_AllOkPolicy)
self._session.connect(self.host, self.port, self.user, self.password, timeout=self.timeout)
def sh(self, cmd, *params, **kwargs) -> (int, str):
"""
execute shell command in remote host with ssh2 protocol
:param cmd: command in text
:param params: arguments for command format
:param kwargs: named-arguments for command format
:return: (exit_code, output(include stderr and stdout))
"""
channel = self._session.get_transport().open_session()
if len(params) == 0 and len(kwargs) == 0:
real_cmd = cmd
else:
real_cmd = cmd.format(*params, kwargs)
channel.exec_command(real_cmd.format(*params, **kwargs))
stdout = channel.makefile('r', 40960)
stderr = channel.makefile_stderr('r', 40960)
buffer = _Buffer()
stdout_reader = self._read_to(stdout, buffer)
stderr_reader = self._read_to(stderr, buffer)
stdout_reader.join()
stderr_reader.join()
return_code = channel.recv_exit_status()
return return_code, buffer.getvalue()
def scp_get(self, _from, to, force=False):
"""
get remote directory or files to local
:param _from: the remote path to fetch
:param to: the local path
:param force: force override local exists files
:exception IOError if io error occur
"""
scp.get(self._session.get_transport(), _from, to, recursive=True)
def scp_put(self, _from, to, force=False):
"""
put local file or directory to remote host
:param _from: local file or directory
:param to: remote file or directory
:param force: force override exists files or not
"""
scp.put(self._session.get_transport(), _from, to, recursive=True)
def close(self):
"""
close the ssh2 session connection, when call to a closed ssh instance error will be raise
"""
self._session.close()
|
[
"threading.Lock",
"threading.Thread",
"io.StringIO",
"paramiko.SSHClient"
] |
[((793, 806), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (804, 806), False, 'import io\n'), ((828, 834), 'threading.Lock', 'Lock', ([], {}), '()\n', (832, 834), False, 'from threading import Thread, Lock\n'), ((1644, 1671), 'threading.Thread', 'Thread', ([], {'target': '_read_handle'}), '(target=_read_handle)\n', (1650, 1671), False, 'from threading import Thread, Lock\n'), ((2105, 2125), 'paramiko.SSHClient', 'paramiko.SSHClient', ([], {}), '()\n', (2123, 2125), False, 'import paramiko\n')]
|
from pydantic import BaseModel
from bson import ObjectId
from typing import Any, List
from models.comment import CommentBase
from db.mongodb import get_database
class PostUpdates(BaseModel):
id: str
text: str
title: str
user_id: str
published: bool
up_vote: List[str]
down_vote: List[str]
comment_ids: List[str] = []
@classmethod
async def find_by_id(cls, _id: str):
db = await get_database()
post = await db.posts.find_one({"_id": ObjectId(_id)})
if post:
post["id"] = str(post["_id"])
return PostUpdates(**post)
return None
@classmethod
async def find_by_user_id(cls, user_id):
db = await get_database()
posts_count = await db.posts.count_documents({"user_id": user_id})
posts = await db.posts.find({"user_id": user_id}).to_list(posts_count)
if posts:
all_posts = []
for post in posts:
post["id"] = str(post["_id"])
post["comments"] = await CommentBase.find_by_post_id(
post["id"]
)
all_posts.append(PostUpdates(**post))
return all_posts
return []
async def add_comment(self, comment_id: str):
db = await get_database()
self.comment_ids.append(comment_id)
done = await db.posts.update_one({"_id": ObjectId(self.id)},
{"$set": {
"comment_ids": self.comment_ids
}})
return done
async def delete(self):
db = await get_database()
await CommentBase.delete_all_comments_for_post(self.id)
done = await db.posts.delete_one({"_id": self.id})
return done.acknowledged
async def vote(self, vote_type, user_id):
db = await get_database()
if vote_type == "UP_VOTE":
if user_id in self.up_vote:
self.up_vote.remove(user_id)
else:
if user_id in self.down_vote:
self.down_vote.remove(user_id)
self.up_vote.append(user_id)
elif vote_type == "DOWN_VOTE":
if user_id in self.down_vote:
self.down_vote.remove(user_id)
else:
if user_id in self.up_vote:
self.up_vote.remove(user_id)
self.down_vote.append(user_id)
done = await db.posts.update_one(
{"_id": ObjectId(self.id)},
{"$set": {"up_vote": self.up_vote, "down_vote": self.down_vote}}
)
return done.acknowledged
@classmethod
async def delete_all_posts_for_user(cls, user_id):
db = await get_database()
done = await db.posts.delete_many({"user_id": user_id})
return done.acknowledged
|
[
"db.mongodb.get_database",
"models.comment.CommentBase.find_by_post_id",
"models.comment.CommentBase.delete_all_comments_for_post",
"bson.ObjectId"
] |
[((429, 443), 'db.mongodb.get_database', 'get_database', ([], {}), '()\n', (441, 443), False, 'from db.mongodb import get_database\n'), ((707, 721), 'db.mongodb.get_database', 'get_database', ([], {}), '()\n', (719, 721), False, 'from db.mongodb import get_database\n'), ((1289, 1303), 'db.mongodb.get_database', 'get_database', ([], {}), '()\n', (1301, 1303), False, 'from db.mongodb import get_database\n'), ((1659, 1673), 'db.mongodb.get_database', 'get_database', ([], {}), '()\n', (1671, 1673), False, 'from db.mongodb import get_database\n'), ((1688, 1737), 'models.comment.CommentBase.delete_all_comments_for_post', 'CommentBase.delete_all_comments_for_post', (['self.id'], {}), '(self.id)\n', (1728, 1737), False, 'from models.comment import CommentBase\n'), ((1896, 1910), 'db.mongodb.get_database', 'get_database', ([], {}), '()\n', (1908, 1910), False, 'from db.mongodb import get_database\n'), ((2771, 2785), 'db.mongodb.get_database', 'get_database', ([], {}), '()\n', (2783, 2785), False, 'from db.mongodb import get_database\n'), ((491, 504), 'bson.ObjectId', 'ObjectId', (['_id'], {}), '(_id)\n', (499, 504), False, 'from bson import ObjectId\n'), ((1040, 1079), 'models.comment.CommentBase.find_by_post_id', 'CommentBase.find_by_post_id', (["post['id']"], {}), "(post['id'])\n", (1067, 1079), False, 'from models.comment import CommentBase\n'), ((1397, 1414), 'bson.ObjectId', 'ObjectId', (['self.id'], {}), '(self.id)\n', (1405, 1414), False, 'from bson import ObjectId\n'), ((2539, 2556), 'bson.ObjectId', 'ObjectId', (['self.id'], {}), '(self.id)\n', (2547, 2556), False, 'from bson import ObjectId\n')]
|
import csv
import random
import re
import sys
import tqdm
import numpy as np
import torch
from torch.utils.data import TensorDataset
from transformers.tokenization_bert import BertTokenizer
def load_glove_txt(file_path="glove.840B.300d.txt"):
results = {}
num_file = sum([1 for i in open(file_path, "r", encoding='utf8')])
with open(file_path, 'r', encoding='utf8') as infile:
for line in tqdm.tqdm(infile, total=num_file):
data = line.strip().split(' ')
word = data[0]
results[word] = 1
return results
def clean_str(string):
# string = re.sub("[^A-Za-z0-9(),!?\'\`]", " ", string)
string = re.sub("\'s", " \'s", string)
string = re.sub("\'ve", " \'ve", string)
string = re.sub("n\'t", " n\'t", string)
string = re.sub("\'re", " \'re", string)
string = re.sub("\'d", " \'d", string)
string = re.sub("\'ll", " \'ll", string)
string = re.sub('"', " ", string)
string = re.sub("'", " ", string)
string = re.sub("`", " ", string)
string = re.sub(r"\\", " ", string)
string = re.sub(r"[\[\]<>/&#\^$%{}‘\.…*]", " ", string)
# string = re.sub(",", " , ", string)
# string = re.sub("!", " ! ", string)
# string = re.sub("\(", " \( ", string)
# string = re.sub("\)", " \) ", string)
# string = re.sub("\?", " \? ", string)
# string = re.sub("\\\?", "?", string)
# string = re.sub("\s{2,}", " ", string)
# string = re.sub("-", ' ', string)
return string.strip().split()
def shuffle_data(x, y):
idx = list(range(len(x)))
np.random.shuffle(idx)
new_x = []
new_y = []
for id_ in idx:
new_x.append(x[id_])
new_y.append(y[id_])
return new_x, new_y
def read_TREC(cv=None, scale_rate=1):
data = {}
def read(mode):
x, y = [], []
with open("data/TREC/" + mode + ".tsv", "r", encoding="utf-8") as f:
reader = csv.reader(f, delimiter="\t", quotechar=None)
for line in reader:
x.append(clean_str(line[0]))
y.append(line[1])
if mode == "train":
label2data = {}
for x_, y_ in zip(x, y):
if y_ not in label2data:
label2data[y_] = [x_]
else:
label2data[y_].append(x_)
new_train_x = []
new_train_y = []
for y_ in label2data.keys():
train_idx = max(int(len(label2data[y_]) * scale_rate), 1)
for x_ in label2data[y_][:train_idx]:
new_train_x.append(x_)
new_train_y.append(y_)
x, y = shuffle_data(new_train_x, new_train_y)
data["train_x"], data["train_y"] = x, y
else:
data["test_x"], data["test_y"] = x, y
read("train")
read("test")
return data
def read_SST1(cv=None, scale_rate=1):
data = {}
def read(mode):
x, y = [], []
with open("data/SST1/" + mode + ".tsv", "r", encoding="utf-8") as f:
reader = csv.reader(f, delimiter="\t", quotechar=None)
for line in reader:
y.append(line[1])
x.append(clean_str(line[0]))
# x.append(line[0])
if mode == "train":
with open("data/SST1/stsa.fine.phrases.train", "r", encoding="utf-8", errors='ignore') as f:
for line in f:
y.append(line[0])
x.append(clean_str(line[2:]))
label2data = {}
for x_, y_ in zip(x, y):
if y_ not in label2data:
label2data[y_] = [x_]
else:
label2data[y_].append(x_)
new_train_x = []
new_train_y = []
for y_ in label2data.keys():
train_idx = max(int(len(label2data[y_]) * scale_rate), 1)
for x_ in label2data[y_][:train_idx]:
new_train_x.append(x_)
new_train_y.append(y_)
x, y = shuffle_data(new_train_x, new_train_y)
data["train_x"], data["train_y"] = x, y
else:
data["test_x"], data["test_y"] = x, y
read("train")
read("test")
return data
def read_SST2(cv=None, scale_rate=1):
data = {}
def read(mode):
x, y = [], []
with open("data/SST2/" + mode + ".tsv", "r", encoding="utf-8") as f:
reader = csv.reader(f, delimiter="\t", quotechar=None)
for line in reader:
y.append(line[1])
x.append(clean_str(line[0]))
# x.append(line[0])
if mode == "train":
with open("data/SST2/stsa.binary.phrases.train", "r", encoding="utf-8", errors='ignore') as f:
for line in f:
y.append(line[0])
x.append(clean_str(line[2:]))
label2data = {}
for x_, y_ in zip(x, y):
if y_ not in label2data:
label2data[y_] = [x_]
else:
label2data[y_].append(x_)
new_train_x = []
new_train_y = []
for y_ in label2data.keys():
train_idx = max(int(len(label2data[y_]) * scale_rate), 1)
for x_ in label2data[y_][:train_idx]:
new_train_x.append(x_)
new_train_y.append(y_)
x, y = shuffle_data(new_train_x, new_train_y)
data["train_x"], data["train_y"] = x, y
else:
data["test_x"], data["test_y"] = x, y
read("train")
read("test")
return data
def read_SUBJ(cv=0, scale_rate=1):
data = {}
x, y = [], []
with open("data/SUBJ/subj.all", "r", encoding="utf-8", errors='ignore') as f:
# reader = csv.reader(f, delimiter="\t", quotechar=None)
for line in f:
x.append(clean_str(line[2:]))
# x.append(line[0])
y.append(line[0])
idx = list(range(len(x)))
np.random.shuffle(idx)
test_index = cv # 0-9
train_x = []
train_y = []
test_x = []
test_y = []
for i, id_ in enumerate(idx):
index = i % 10
if index == test_index:
test_x.append(x[id_])
test_y.append(y[id_])
else:
train_x.append(x[id_])
train_y.append(y[id_])
label2data = {}
for x_, y_ in zip(train_x, train_y):
if y_ not in label2data:
label2data[y_] = [x_]
else:
label2data[y_].append(x_)
new_train_x = []
new_train_y = []
for y_ in label2data.keys():
train_idx = max(int(len(label2data[y_]) * scale_rate), 1)
for x_ in label2data[y_][:train_idx]:
new_train_x.append(x_)
new_train_y.append(y_)
train_x, train_y = shuffle_data(new_train_x, new_train_y)
data["train_x"], data["train_y"] = train_x, train_y
data["test_x"], data["test_y"] = test_x, test_y
return data
def read_MR(cv=0, scale_rate=1):
data = {}
x, y = [], []
with open("data/MR/rt-polarity.pos", "r", encoding="utf-8") as f:
for line in f:
if line[-1] == "\n":
line = line[:-1]
x.append(clean_str(line))
y.append(1)
with open("data/MR/rt-polarity.neg", "r", encoding="utf-8") as f:
for line in f:
if line[-1] == "\n":
line = line[:-1]
x.append(clean_str(line))
y.append(0)
idx = list(range(len(x)))
np.random.shuffle(idx)
test_index = cv # 0-9
# dev_index = (cv+1)%10
train_x = []
train_y = []
test_x = []
test_y = []
for i, id_ in enumerate(idx):
index = i % 10
if index == test_index:
test_x.append(x[id_])
test_y.append(y[id_])
else:
train_x.append(x[id_])
train_y.append(y[id_])
label2data = {}
for x_, y_ in zip(train_x, train_y):
if y_ not in label2data:
label2data[y_] = [x_]
else:
label2data[y_].append(x_)
new_train_x = []
new_train_y = []
for y_ in label2data.keys():
train_idx = max(int(len(label2data[y_]) * scale_rate), 1)
for x_ in label2data[y_][:train_idx]:
new_train_x.append(x_)
new_train_y.append(y_)
train_x, train_y = shuffle_data(new_train_x, new_train_y)
data["train_x"], data["train_y"] = train_x, train_y
data["test_x"], data["test_y"] = test_x, test_y
return data
def refind_sent(sent, g_dict):
new_sent = []
for word in sent:
if word in g_dict:
new_sent.append(word)
elif '-' in word:
for wd in word.split('-'):
new_sent.append(wd)
elif '\/' in word:
for wd in word.split('\/'):
new_sent.append(wd)
elif word.lower() in g_dict:
new_sent.append(word.lower())
else:
continue
return new_sent
def preprocess_data(data, VOCAB_SIZE, MAX_SENT_LEN, dtype='train'):
x = []
for sent in data[dtype + "_x"]:
sent_tmp = [data['word_to_idx']["<BOS>"]]
for word in sent:
if len(sent_tmp) < MAX_SENT_LEN - 1:
sent_tmp.append(data['word_to_idx'][word])
sent_tmp.append(data['word_to_idx']["<EOS>"])
if len(sent_tmp) < MAX_SENT_LEN:
sent_tmp += [VOCAB_SIZE + 1] * (MAX_SENT_LEN - len(sent_tmp))
x.append(sent_tmp)
y = [data["classes"].index(c) for c in data[dtype + "_y"]]
x = torch.LongTensor(x)
y = torch.LongTensor(y)
return x, y
def load_dataset(options):
mod = sys.modules[__name__]
if options.classifier != 'BERT':
data = getattr(mod, f"read_{options.dataset}")(cv=options.cv, scale_rate=options.scale_rate)
g_dict = load_glove_txt()
for i in range(len(data['train_x'])):
data['train_x'][i] = refind_sent(data['train_x'][i], g_dict)
for i in range(len(data['test_x'])):
data['test_x'][i] = refind_sent(data['test_x'][i], g_dict)
data["vocab"] = sorted(
list(set([w for sent in data["train_x"] + data["test_x"] for w in sent] + ["<BOS>", "<EOS>"])))
data["classes"] = sorted(list(set(data["train_y"])))
data["word_to_idx"] = {w: i for i, w in enumerate(data["vocab"])}
data["idx_to_word"] = {i: w for i, w in enumerate(data["vocab"])}
options.VOCAB_SIZE = len(data["vocab"])
if not hasattr(options, 'MAX_SENT_LEN'):
options.MAX_SENT_LEN = max([len(sent) for sent in data["train_x"] + data["test_x"]])
options.CLASS_SIZE = len(data["classes"])
train_x, train_y = preprocess_data(data, options.VOCAB_SIZE, options.MAX_SENT_LEN, 'train')
train_set = TensorDataset(train_x, train_y)
test_x, test_y = preprocess_data(data, options.VOCAB_SIZE, options.MAX_SENT_LEN, 'test')
test_set = TensorDataset(test_x, test_y)
return train_set, test_set, data
else:
data = {}
dset = getattr(mod, f"{options.dataset}_Processor")(cv=options.cv)
train_examples = dset.train_examples
test_examples = dset.test_examples
data['tokenizer'] = BertTokenizer(vocab_file='./bert-base-uncased/vocab.txt'
, do_basic_tokenize=True)
data["classes"] = sorted(list(set([z.label for z in train_examples])))
options.CLASS_SIZE = len(data["classes"])
options.VOCAB_SIZE = len(data['tokenizer'].vocab)
if not hasattr(options, 'MAX_SENT_LEN'):
setattr(options, 'MAX_SENT_LEN',
max([len(example.text_a.split(' ')) for example in train_examples + test_examples]) + 2)
# print("max",max([len(example.text_a.split(' ')) for example in train_examples + test_examples]))
train_set = _make_data_loader(train_examples, data["classes"], data['tokenizer'], options.MAX_SENT_LEN)
test_set = _make_data_loader(test_examples, data["classes"], data['tokenizer'], options.MAX_SENT_LEN)
return train_set, test_set, data
def _make_data_loader(examples, label_list, tokenizer, MAX_SEQ_LENGTH):
all_features = _convert_examples_to_features(
examples=examples,
label_list=label_list,
max_seq_length=MAX_SEQ_LENGTH,
tokenizer=tokenizer,
output_mode='classification')
all_input_ids = torch.tensor(
[f.input_ids for f in all_features], dtype=torch.long)
all_input_mask = torch.tensor(
[f.input_mask for f in all_features], dtype=torch.long)
all_segment_ids = torch.tensor(
[f.segment_ids for f in all_features], dtype=torch.long)
all_label_ids = torch.tensor(
[f.label_id for f in all_features], dtype=torch.long)
all_ids = torch.arange(len(examples))
dataset = TensorDataset(
all_input_ids, all_input_mask, all_segment_ids, all_label_ids, all_ids)
return dataset
def _convert_examples_to_features(examples, label_list, max_seq_length,
tokenizer, output_mode):
"""Loads a data file into a list of `InputBatch`s."""
label_map = {label: i for i, label in enumerate(label_list)}
features = []
for (ex_index, example) in enumerate(examples):
tokens_a = tokenizer.tokenize(example.text_a)
tokens_b = None
if example.text_b:
tokens_b = tokenizer.tokenize(example.text_b)
# Modifies `tokens_a` and `tokens_b` in place so that the total
# length is less than the specified length.
# Account for [CLS], [SEP], [SEP] with "- 3"
_truncate_seq_pair(tokens_a, tokens_b, max_seq_length - 3)
else:
# Account for [CLS] and [SEP] with "- 2"
if len(tokens_a) > max_seq_length - 2:
tokens_a = tokens_a[:(max_seq_length - 2)]
tokens = ["[CLS]"] + tokens_a + ["[SEP]"]
segment_ids = [0] * len(tokens)
if tokens_b:
tokens += tokens_b + ["[SEP]"]
segment_ids += [1] * (len(tokens_b) + 1)
input_ids = tokenizer.convert_tokens_to_ids(tokens)
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
input_mask = [1] * len(input_ids)
# Zero-pad up to the sequence length.
padding = [0] * (max_seq_length - len(input_ids))
input_ids += padding
input_mask += padding
segment_ids += padding
# print(len(input_ids),len(input_mask),len(segment_ids),max_seq_length)
assert len(input_ids) == max_seq_length
assert len(input_mask) == max_seq_length
assert len(segment_ids) == max_seq_length
if output_mode == "classification":
label_id = label_map[example.label]
elif output_mode == "regression":
label_id = float(example.label)
else:
raise KeyError(output_mode)
features.append(
InputFeatures(input_ids=input_ids,
input_mask=input_mask,
segment_ids=segment_ids,
label_id=label_id))
return features
def _truncate_seq_pair(tokens_a, tokens_b, max_length):
"""Truncates a sequence pair in place to the maximum length."""
# This is a simple heuristic which will always truncate the longer sequence
# one token at a time. This makes more sense than truncating an equal
# percent of tokens from each, since if one sequence is very short then each
# token that's truncated likely contains more information than a longer
# sequence.
while True:
total_length = len(tokens_a) + len(tokens_b)
if total_length <= max_length:
break
if len(tokens_a) > len(tokens_b):
tokens_a.pop()
else:
tokens_b.pop()
def csv_reader(filename):
print('read file:', filename)
f = open(filename, 'r', encoding='utf8')
reader = csv.reader(f, delimiter="\t", quotechar=None)
return reader
class InputExample:
"""A single training/test example for simple sequence classification."""
def __init__(self, guid, text_a, text_b=None, label=None):
self.guid = guid
self.text_a = text_a
self.text_b = text_b
self.label = label
class InputFeatures(object):
"""A single set of features of data."""
def __init__(self, input_ids, input_mask, segment_ids, label_id):
self.input_ids = input_ids
self.input_mask = input_mask
self.segment_ids = segment_ids
self.label_id = label_id
def __getitem__(self, item):
return [self.input_ids, self.input_mask,
self.segment_ids, self.label_id][item]
class DatasetProcessor:
def get_train_examples(self):
raise NotImplementedError
def get_dev_examples(self):
raise NotImplementedError
def get_test_examples(self):
raise NotImplementedError
def get_labels(self):
raise NotImplementedError
class SST1_Processor(DatasetProcessor):
"""Processor for the SST-5 data set."""
def __init__(self, cv=0):
train_file = "./data/SST1/train.tsv"
test_file = "./data/SST1/test.tsv"
print("processing train_file{},test_file".format(train_file, test_file))
self._train_set, self._test_set = csv_reader(train_file), csv_reader(test_file)
self.train_examples, self.test_examples = self.get_train_examples(), self.get_test_examples()
x, y = [], []
with open("data/SST1/stsa.fine.phrases.train", "r", encoding="utf-8", errors='ignore') as f:
for line in f:
y.append(line[0])
x.append(line[2:])
self.train_examples_extra = self._create_examples(zip(x, y), "train")
self.train_examples = self.train_examples + self.train_examples_extra
def get_train_examples(self):
"""See base class."""
examples = self._create_examples(self._train_set, "train")
print('getting train examples,len = ', len(examples))
return examples
def get_test_examples(self):
"""See base class."""
examples = self._create_examples(self._test_set, "test")
print('getting test examples,len = ', len(examples))
return examples
def get_labels(self):
"""See base class."""
label_set = set()
for example in self.train_examples:
label_set.add(example.label)
return sorted(list(label_set))
def _create_examples(self, dataset, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, data) in enumerate(dataset):
guid = "%s-%s" % (set_type, i)
examples.append(InputExample(
guid=guid,
text_a=data[0],
label=data[1]
))
# return examples
return examples
class SST2_Processor(DatasetProcessor):
"""Processor for the SST-5 data set."""
def __init__(self, cv=0):
train_file = "./data/SST2/train.tsv"
test_file = "./data/SST2/test.tsv"
x, y = [], []
with open("data/SST2/stsa.binary.phrases.train", "r", encoding="utf-8", errors='ignore') as f:
for line in f:
y.append(line[0])
x.append(line[2:])
self.train_examples_extra = self._create_examples(zip(x, y), "train")
print("processing train_file{},test_file".format(train_file, test_file))
self._train_set, self._test_set = csv_reader(train_file), csv_reader(test_file)
self.train_examples, self.test_examples = self.get_train_examples(), self.get_test_examples()
self.train_examples = self.train_examples + self.train_examples_extra
def get_train_examples(self):
"""See base class."""
examples = self._create_examples(self._train_set, "train")
print('getting train examples,len = ', len(examples))
return examples
def get_test_examples(self):
"""See base class."""
examples = self._create_examples(self._test_set, "test")
print('getting test examples,len = ', len(examples))
return examples
def get_labels(self):
"""See base class."""
label_set = set()
for example in self.train_examples:
label_set.add(example.label)
return sorted(list(label_set))
def _create_examples(self, dataset, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, data) in enumerate(dataset):
guid = "%s-%s" % (set_type, i)
examples.append(InputExample(
guid=guid,
text_a=data[0],
label=data[1]
))
# return examples
return examples
class TREC_Processor(DatasetProcessor):
"""Processor for the SST-5 data set."""
def __init__(self, cv=0):
train_file = "./data/TREC/train.tsv"
test_file = "./data/TREC/test.tsv"
print("processing train_file{},test_file,{}".format(train_file, test_file))
self._train_set, self._test_set = csv_reader(train_file), csv_reader(test_file)
self.train_examples, self.test_examples = self.get_train_examples(), self.get_test_examples()
def get_train_examples(self):
"""See base class."""
examples = self._create_examples(self._train_set, "train")
print('getting train examples,len = ', len(examples))
return examples
def get_test_examples(self):
"""See base class."""
examples = self._create_examples(self._test_set, "test")
print('getting test examples,len = ', len(examples))
return examples
def get_labels(self):
"""See base class."""
label_set = set()
for example in self.train_examples:
label_set.add(example.label)
return sorted(list(label_set))
def _create_examples(self, dataset, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, data) in enumerate(dataset):
guid = "%s-%s" % (set_type, i)
examples.append(InputExample(
guid=guid,
text_a=data[0],
label=data[1]
))
# return examples
return examples
class SUBJ_Processor(DatasetProcessor):
"""Processor for the SST-5 data set."""
def __init__(self, cv):
all_file = "./data/SUBJ/data_all.tsv"
print("processing all_file{}".format(all_file))
self._all_set = csv_reader(all_file)
self.train_examples, self.test_examples = self.get_train_examples(cv=cv)
def _read_examples(self):
examples = self._create_examples(self._all_set, "all")
return examples
def get_train_examples(self, cv=0):
"""See base class."""
examples = self._read_examples()
idx = list(range(len(examples)))
np.random.shuffle(idx)
test_index = cv
test_example = []
train_example = []
for i, id_ in enumerate(idx):
index = i % 10
if index == test_index:
test_example.append(examples[id_])
else:
train_example.append(examples[id_])
return train_example, test_example
def get_labels(self):
"""See base class."""
label_set = set()
for example in self.train_examples:
label_set.add(example.label)
return sorted(list(label_set))
def _create_examples(self, dataset, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, data) in enumerate(dataset):
guid = "%s-%s" % (set_type, i)
examples.append(InputExample(
guid=guid,
text_a=data[0],
label=data[1]
))
return examples
# return shuffle_data(examples)
class MR_Processor(DatasetProcessor):
"""Processor for the SST-5 data set."""
def __init__(self, cv=0):
pos_file = "./data/MR/rt-polarity.pos"
neg_file = "./data/MR/rt-polarity.neg"
print("processing pos_file:{},neg_file:{}".format(pos_file, neg_file))
self._pos_set, self._neg_set = csv_reader(pos_file), csv_reader(neg_file)
self.train_examples, self.test_examples = self.get_train_examples(cv=cv)
def _read_examples(self):
pos_examples = self._create_examples(self._pos_set, "pos")
neg_examples = self._create_examples(self._neg_set, "neg")
examples = []
for ex in pos_examples:
examples.append(InputExample(
guid=ex.guid,
text_a=ex.text_a,
label=1
))
for ex in neg_examples:
examples.append(InputExample(
guid=ex.guid,
text_a=ex.text_a,
label=0
))
return examples
def get_train_examples(self, cv=0):
"""See base class."""
examples = self._read_examples()
idx = list(range(len(examples)))
np.random.shuffle(idx)
test_index = cv
test_example = []
train_example = []
for i, id_ in enumerate(idx):
index = i % 10
if index == test_index:
test_example.append(examples[id_])
else:
train_example.append(examples[id_])
return train_example, test_example
def get_labels(self):
"""See base class."""
label_set = set()
for example in self.train_examples:
label_set.add(example.label)
return sorted(list(label_set))
def _create_examples(self, dataset, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, data) in enumerate(dataset):
guid = "%s-%s" % (set_type, i)
examples.append(InputExample(
guid=guid,
text_a=data[0],
))
return examples
if __name__ == "__main__":
processor = TREC_Processor(cv=2)
print(processor.get_labels())
train = processor.train_examples
for x in train:
print(x.text_a, x.label)
break
# class OPT:
# def __init__(self):
# self.dataset="SUBJ"
# self.cv = "0"
# self.scale_rate=1
# self.MAX_SENT_LEN=-1
# opt = OPT()
# dset = getattr(sys.modules[__name__],'load_dataset')(opt)
# for x in dset[0]:
# print(x)
# break
# from torch.utils.data import DataLoader
# train_loader = DataLoader(dset[0], batch_size=50, shuffle=True)
|
[
"tqdm.tqdm",
"transformers.tokenization_bert.BertTokenizer",
"numpy.random.shuffle",
"csv.reader",
"torch.LongTensor",
"torch.utils.data.TensorDataset",
"re.sub",
"torch.tensor"
] |
[((663, 690), 're.sub', 're.sub', (['"""\'s"""', '""" \'s"""', 'string'], {}), '("\'s", " \'s", string)\n', (669, 690), False, 'import re\n'), ((706, 735), 're.sub', 're.sub', (['"""\'ve"""', '""" \'ve"""', 'string'], {}), '("\'ve", " \'ve", string)\n', (712, 735), False, 'import re\n'), ((751, 780), 're.sub', 're.sub', (['"""n\'t"""', '""" n\'t"""', 'string'], {}), '("n\'t", " n\'t", string)\n', (757, 780), False, 'import re\n'), ((796, 825), 're.sub', 're.sub', (['"""\'re"""', '""" \'re"""', 'string'], {}), '("\'re", " \'re", string)\n', (802, 825), False, 'import re\n'), ((841, 868), 're.sub', 're.sub', (['"""\'d"""', '""" \'d"""', 'string'], {}), '("\'d", " \'d", string)\n', (847, 868), False, 'import re\n'), ((884, 913), 're.sub', 're.sub', (['"""\'ll"""', '""" \'ll"""', 'string'], {}), '("\'ll", " \'ll", string)\n', (890, 913), False, 'import re\n'), ((929, 953), 're.sub', 're.sub', (['"""\\""""', '""" """', 'string'], {}), '(\'"\', \' \', string)\n', (935, 953), False, 'import re\n'), ((967, 991), 're.sub', 're.sub', (['"""\'"""', '""" """', 'string'], {}), '("\'", \' \', string)\n', (973, 991), False, 'import re\n'), ((1005, 1029), 're.sub', 're.sub', (['"""`"""', '""" """', 'string'], {}), "('`', ' ', string)\n", (1011, 1029), False, 'import re\n'), ((1043, 1070), 're.sub', 're.sub', (['"""\\\\\\\\"""', '""" """', 'string'], {}), "('\\\\\\\\', ' ', string)\n", (1049, 1070), False, 'import re\n'), ((1083, 1132), 're.sub', 're.sub', (['"""[\\\\[\\\\]<>/&#\\\\^$%{}‘\\\\.…*]"""', '""" """', 'string'], {}), "('[\\\\[\\\\]<>/&#\\\\^$%{}‘\\\\.…*]', ' ', string)\n", (1089, 1132), False, 'import re\n'), ((1568, 1590), 'numpy.random.shuffle', 'np.random.shuffle', (['idx'], {}), '(idx)\n', (1585, 1590), True, 'import numpy as np\n'), ((6033, 6055), 'numpy.random.shuffle', 'np.random.shuffle', (['idx'], {}), '(idx)\n', (6050, 6055), True, 'import numpy as np\n'), ((7559, 7581), 'numpy.random.shuffle', 'np.random.shuffle', (['idx'], {}), '(idx)\n', (7576, 7581), True, 'import numpy as np\n'), ((9609, 9628), 'torch.LongTensor', 'torch.LongTensor', (['x'], {}), '(x)\n', (9625, 9628), False, 'import torch\n'), ((9637, 9656), 'torch.LongTensor', 'torch.LongTensor', (['y'], {}), '(y)\n', (9653, 9656), False, 'import torch\n'), ((12490, 12557), 'torch.tensor', 'torch.tensor', (['[f.input_ids for f in all_features]'], {'dtype': 'torch.long'}), '([f.input_ids for f in all_features], dtype=torch.long)\n', (12502, 12557), False, 'import torch\n'), ((12588, 12656), 'torch.tensor', 'torch.tensor', (['[f.input_mask for f in all_features]'], {'dtype': 'torch.long'}), '([f.input_mask for f in all_features], dtype=torch.long)\n', (12600, 12656), False, 'import torch\n'), ((12688, 12757), 'torch.tensor', 'torch.tensor', (['[f.segment_ids for f in all_features]'], {'dtype': 'torch.long'}), '([f.segment_ids for f in all_features], dtype=torch.long)\n', (12700, 12757), False, 'import torch\n'), ((12787, 12853), 'torch.tensor', 'torch.tensor', (['[f.label_id for f in all_features]'], {'dtype': 'torch.long'}), '([f.label_id for f in all_features], dtype=torch.long)\n', (12799, 12853), False, 'import torch\n'), ((12920, 13009), 'torch.utils.data.TensorDataset', 'TensorDataset', (['all_input_ids', 'all_input_mask', 'all_segment_ids', 'all_label_ids', 'all_ids'], {}), '(all_input_ids, all_input_mask, all_segment_ids, all_label_ids,\n all_ids)\n', (12933, 13009), False, 'from torch.utils.data import TensorDataset\n'), ((16092, 16137), 'csv.reader', 'csv.reader', (['f'], {'delimiter': '"""\t"""', 'quotechar': 'None'}), "(f, delimiter='\\t', quotechar=None)\n", (16102, 16137), False, 'import csv\n'), ((411, 444), 'tqdm.tqdm', 'tqdm.tqdm', (['infile'], {'total': 'num_file'}), '(infile, total=num_file)\n', (420, 444), False, 'import tqdm\n'), ((10854, 10885), 'torch.utils.data.TensorDataset', 'TensorDataset', (['train_x', 'train_y'], {}), '(train_x, train_y)\n', (10867, 10885), False, 'from torch.utils.data import TensorDataset\n'), ((11002, 11031), 'torch.utils.data.TensorDataset', 'TensorDataset', (['test_x', 'test_y'], {}), '(test_x, test_y)\n', (11015, 11031), False, 'from torch.utils.data import TensorDataset\n'), ((11292, 11378), 'transformers.tokenization_bert.BertTokenizer', 'BertTokenizer', ([], {'vocab_file': '"""./bert-base-uncased/vocab.txt"""', 'do_basic_tokenize': '(True)'}), "(vocab_file='./bert-base-uncased/vocab.txt', do_basic_tokenize\n =True)\n", (11305, 11378), False, 'from transformers.tokenization_bert import BertTokenizer\n'), ((23130, 23152), 'numpy.random.shuffle', 'np.random.shuffle', (['idx'], {}), '(idx)\n', (23147, 23152), True, 'import numpy as np\n'), ((25313, 25335), 'numpy.random.shuffle', 'np.random.shuffle', (['idx'], {}), '(idx)\n', (25330, 25335), True, 'import numpy as np\n'), ((1918, 1963), 'csv.reader', 'csv.reader', (['f'], {'delimiter': '"""\t"""', 'quotechar': 'None'}), "(f, delimiter='\\t', quotechar=None)\n", (1928, 1963), False, 'import csv\n'), ((3057, 3102), 'csv.reader', 'csv.reader', (['f'], {'delimiter': '"""\t"""', 'quotechar': 'None'}), "(f, delimiter='\\t', quotechar=None)\n", (3067, 3102), False, 'import csv\n'), ((4453, 4498), 'csv.reader', 'csv.reader', (['f'], {'delimiter': '"""\t"""', 'quotechar': 'None'}), "(f, delimiter='\\t', quotechar=None)\n", (4463, 4498), False, 'import csv\n')]
|
# Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from azure.servicemanagement import ConfigurationSetInputEndpoint
from azure.servicemanagement import ConfigurationSet
from azure.servicemanagement import PublicKey
from azure.servicemanagement import LinuxConfigurationSet
from azure.servicemanagement import OSVirtualHardDisk
from azure.storage.blob.baseblobservice import BaseBlobService
# project
from azurectl.defaults import Defaults
from azurectl.azurectl_exceptions import (
AzureCustomDataTooLargeError,
AzureVmCreateError,
AzureVmDeleteError,
AzureVmRebootError,
AzureVmShutdownError,
AzureVmStartError,
AzureStorageNotReachableByCloudServiceError,
AzureImageNotReachableByCloudServiceError
)
class VirtualMachine(object):
"""
Implements creation/deletion and management of virtual
machine instances from a given image name
"""
def __init__(self, account):
self.account = account
self.service = self.account.get_management_service()
def create_linux_configuration(
self, username='azureuser', instance_name=None,
disable_ssh_password_authentication=True,
password=None, custom_data=None, fingerprint=''
):
"""
create a linux configuration
"""
self.__validate_custom_data_length(custom_data)
# The given instance name is used as the host name in linux
linux_config = LinuxConfigurationSet(
instance_name, username, password,
disable_ssh_password_authentication,
custom_data
)
if fingerprint:
ssh_key_file = '/home/' + username + '/.ssh/authorized_keys'
ssh_pub_key = PublicKey(
fingerprint, ssh_key_file
)
linux_config.ssh.public_keys = [ssh_pub_key]
return linux_config
def create_network_configuration(self, network_endpoints):
"""
create a network configuration
"""
network_config = ConfigurationSet()
for endpoint in network_endpoints:
network_config.input_endpoints.input_endpoints.append(endpoint)
network_config.configuration_set_type = 'NetworkConfiguration'
return network_config
def create_network_endpoint(
self, name, public_port, local_port, protocol
):
"""
create a network service endpoint
"""
return ConfigurationSetInputEndpoint(
name, protocol, public_port, local_port
)
def create_instance(
self, cloud_service_name, disk_name, system_config,
network_config=None, label=None, group='production',
machine_size='Small', reserved_ip_name=None
):
"""
create a virtual disk image instance
"""
if not self.__storage_reachable_by_cloud_service(cloud_service_name):
message = [
'The cloud service "%s" and the storage account "%s"',
'are not in the same region, cannot launch an instance.'
]
raise AzureStorageNotReachableByCloudServiceError(
' '.join(message) % (
cloud_service_name, self.account.storage_name()
)
)
if not self.__image_reachable_by_cloud_service(
cloud_service_name, disk_name
):
message = [
'The selected image "%s" is not available',
'in the region of the selected cloud service "%s",',
'cannot launch instance'
]
raise AzureImageNotReachableByCloudServiceError(
' '.join(message) % (
disk_name, cloud_service_name
)
)
deployment_exists = self.__get_deployment(
cloud_service_name
)
if label and deployment_exists:
message = [
'A deployment of the name: %s already exists.',
'Assignment of a label can only happen for the',
'initial deployment.'
]
raise AzureVmCreateError(
' '.join(message) % cloud_service_name
)
if reserved_ip_name and deployment_exists:
message = [
'A deployment of the name: %s already exists.',
'Assignment of a reserved IP name can only happen for the',
'initial deployment.'
]
raise AzureVmCreateError(
' '.join(message) % cloud_service_name
)
storage = BaseBlobService(
self.account.storage_name(),
self.account.storage_key(),
endpoint_suffix=self.account.get_blob_service_host_base()
)
media_link = storage.make_blob_url(
self.account.storage_container(), ''.join(
[
cloud_service_name,
'_instance_', system_config.host_name,
'_image_', disk_name
]
)
)
instance_disk = OSVirtualHardDisk(disk_name, media_link)
instance_record = {
'deployment_name': cloud_service_name,
'network_config': network_config,
'role_name': system_config.host_name,
'role_size': machine_size,
'service_name': cloud_service_name,
'system_config': system_config,
'os_virtual_hard_disk': instance_disk,
'provision_guest_agent': True
}
if network_config:
instance_record['network_config'] = network_config
try:
if deployment_exists:
result = self.service.add_role(
**instance_record
)
else:
instance_record['deployment_slot'] = group
if reserved_ip_name:
instance_record['reserved_ip_name'] = reserved_ip_name
if label:
instance_record['label'] = label
else:
instance_record['label'] = cloud_service_name
result = self.service.create_virtual_machine_deployment(
**instance_record
)
return {
'request_id': format(result.request_id),
'cloud_service_name': cloud_service_name,
'instance_name': system_config.host_name
}
except Exception as e:
raise AzureVmCreateError(
'%s: %s' % (type(e).__name__, format(e))
)
def delete_instance(
self, cloud_service_name, instance_name
):
"""
delete a virtual disk image instance
"""
try:
result = self.service.delete_role(
cloud_service_name, cloud_service_name, instance_name, True
)
return(Defaults.unify_id(result.request_id))
except Exception as e:
raise AzureVmDeleteError(
'%s: %s' % (type(e).__name__, format(e))
)
def shutdown_instance(
self, cloud_service_name, instance_name, deallocate_resources=False
):
"""
Shuts down the specified virtual disk image instance
If deallocate_resources is set to true the machine shuts down
and releases the compute resources. You are not billed for
the compute resources that this Virtual Machine uses in this case.
If a static Virtual Network IP address is assigned to the
Virtual Machine, it is reserved.
"""
post_shutdown_action = 'Stopped'
if deallocate_resources:
post_shutdown_action = 'StoppedDeallocated'
try:
result = self.service.shutdown_role(
cloud_service_name, cloud_service_name,
instance_name, post_shutdown_action
)
return(Defaults.unify_id(result.request_id))
except Exception as e:
raise AzureVmShutdownError(
'%s: %s' % (type(e).__name__, format(e))
)
def start_instance(
self, cloud_service_name, instance_name
):
"""
Start the specified virtual disk image instance.
"""
try:
result = self.service.start_role(
cloud_service_name, cloud_service_name,
instance_name
)
return(Defaults.unify_id(result.request_id))
except Exception as e:
raise AzureVmStartError(
'%s: %s' % (type(e).__name__, format(e))
)
def reboot_instance(
self, cloud_service_name, instance_name
):
"""
Requests reboot of a virtual disk image instance
"""
try:
result = self.service.reboot_role_instance(
cloud_service_name, cloud_service_name, instance_name
)
return(Defaults.unify_id(result.request_id))
except Exception as e:
raise AzureVmRebootError(
'%s: %s' % (type(e).__name__, format(e))
)
def instance_status(
self, cloud_service_name, instance_name=None
):
"""
Request instance status. An instance can be in different
states like Initializing, Running, Stopped. This method
returns the current state name.
"""
instance_state = 'Undefined'
if not instance_name:
instance_name = cloud_service_name
try:
properties = self.service.get_hosted_service_properties(
service_name=cloud_service_name,
embed_detail=True
)
for deployment in properties.deployments:
for instance in deployment.role_instance_list:
if instance.instance_name == instance_name:
instance_state = instance.instance_status
except Exception:
# if the properties can't be requested due to an error
# the default state value set to Undefined will be returned
pass
return instance_state
def __validate_custom_data_length(self, custom_data):
if (custom_data and (len(custom_data) > self.__max_custom_data_len())):
raise AzureCustomDataTooLargeError(
"The custom data specified is too large. Custom Data must" +
"be less than %d bytes" % self.__max_custom_data_len()
)
return True
def __get_deployment(self, cloud_service_name):
"""
check if the virtual machine deployment already exists.
Any other than a ResourceNotFound error will be treated
as an exception to stop processing
"""
try:
return self.service.get_deployment_by_name(
service_name=cloud_service_name,
deployment_name=cloud_service_name
)
except Exception as e:
if 'ResourceNotFound' in format(e):
return None
raise AzureVmCreateError(
'%s: %s' % (type(e).__name__, format(e))
)
def __cloud_service_location(self, cloud_service_name):
return self.service.get_hosted_service_properties(
cloud_service_name
).hosted_service_properties.location
def __storage_location(self):
return self.service.get_storage_account_properties(
self.account.storage_name()
).storage_service_properties.location
def __image_locations(self, disk_name):
try:
image_properties = self.service.get_os_image(disk_name)
return image_properties.location.split(';')
except Exception:
# if image does not exist return without an exception.
pass
def __storage_reachable_by_cloud_service(self, cloud_service_name):
service_location = self.__cloud_service_location(
cloud_service_name
)
storage_location = self.__storage_location()
if service_location == storage_location:
return True
else:
return False
def __image_reachable_by_cloud_service(self, cloud_service_name, disk_name):
service_location = self.__cloud_service_location(
cloud_service_name
)
image_locations = self.__image_locations(disk_name)
if not image_locations:
return False
if service_location in image_locations:
return True
else:
return False
def __max_custom_data_len(self):
"""
Custom Data is limited to 64K
https://msdn.microsoft.com/library/azure/jj157186.aspx
"""
return 65536
|
[
"azurectl.defaults.Defaults.unify_id",
"azure.servicemanagement.PublicKey",
"azure.servicemanagement.OSVirtualHardDisk",
"azure.servicemanagement.LinuxConfigurationSet",
"azure.servicemanagement.ConfigurationSet",
"azure.servicemanagement.ConfigurationSetInputEndpoint"
] |
[((1993, 2103), 'azure.servicemanagement.LinuxConfigurationSet', 'LinuxConfigurationSet', (['instance_name', 'username', 'password', 'disable_ssh_password_authentication', 'custom_data'], {}), '(instance_name, username, password,\n disable_ssh_password_authentication, custom_data)\n', (2014, 2103), False, 'from azure.servicemanagement import LinuxConfigurationSet\n'), ((2577, 2595), 'azure.servicemanagement.ConfigurationSet', 'ConfigurationSet', ([], {}), '()\n', (2593, 2595), False, 'from azure.servicemanagement import ConfigurationSet\n'), ((2996, 3066), 'azure.servicemanagement.ConfigurationSetInputEndpoint', 'ConfigurationSetInputEndpoint', (['name', 'protocol', 'public_port', 'local_port'], {}), '(name, protocol, public_port, local_port)\n', (3025, 3066), False, 'from azure.servicemanagement import ConfigurationSetInputEndpoint\n'), ((5669, 5709), 'azure.servicemanagement.OSVirtualHardDisk', 'OSVirtualHardDisk', (['disk_name', 'media_link'], {}), '(disk_name, media_link)\n', (5686, 5709), False, 'from azure.servicemanagement import OSVirtualHardDisk\n'), ((2269, 2305), 'azure.servicemanagement.PublicKey', 'PublicKey', (['fingerprint', 'ssh_key_file'], {}), '(fingerprint, ssh_key_file)\n', (2278, 2305), False, 'from azure.servicemanagement import PublicKey\n'), ((7516, 7552), 'azurectl.defaults.Defaults.unify_id', 'Defaults.unify_id', (['result.request_id'], {}), '(result.request_id)\n', (7533, 7552), False, 'from azurectl.defaults import Defaults\n'), ((8566, 8602), 'azurectl.defaults.Defaults.unify_id', 'Defaults.unify_id', (['result.request_id'], {}), '(result.request_id)\n', (8583, 8602), False, 'from azurectl.defaults import Defaults\n'), ((9089, 9125), 'azurectl.defaults.Defaults.unify_id', 'Defaults.unify_id', (['result.request_id'], {}), '(result.request_id)\n', (9106, 9125), False, 'from azurectl.defaults import Defaults\n'), ((9604, 9640), 'azurectl.defaults.Defaults.unify_id', 'Defaults.unify_id', (['result.request_id'], {}), '(result.request_id)\n', (9621, 9640), False, 'from azurectl.defaults import Defaults\n')]
|
from typing import List, Optional
import pytest
from httpx import AsyncClient
from starlette.applications import Starlette
from starlette.requests import Request
from starlette.responses import JSONResponse, Response
from ops2deb.exceptions import Ops2debUpdaterError
from ops2deb.logger import enable_debug
from ops2deb.updater import GenericUpdateStrategy, GithubUpdateStrategy
enable_debug(True)
@pytest.fixture
def app_factory():
def _app_response(request: Request):
return Response(status_code=200)
def _app_factory(versions: List[str]):
app = Starlette(debug=True)
for version in versions:
app.add_route(
f"/releases/{version}/some-app.tar.gz", _app_response, ["HEAD", "GET"]
)
return app
return _app_factory
@pytest.fixture
def github_app_factory():
def _github_app_factory(latest_release: str, versions: Optional[List[str]] = None):
versions = versions or []
app = Starlette(debug=True)
@app.route("/owner/name/releases/{version}/some-app.tar.gz")
def github_asset(request: Request):
version = request.path_params["version"]
status = 200 if version in versions or version == latest_release else 404
return Response(status_code=status)
@app.route("/repos/owner/name/releases/latest")
def github_release_api(request: Request):
return JSONResponse({"tag_name": latest_release})
return app
return _github_app_factory
@pytest.mark.parametrize(
"versions,expected_result",
[
(["1.0.0", "1.1.0"], "1.1.0"),
(["1.0.0", "1.1.3"], "1.1.3"),
(["1.0.0", "1.0.1", "1.1.0"], "1.1.0"),
(["1.0.0", "1.1.1", "2.0.0"], "1.1.1"),
(["1.0.0", "2.0.0"], "2.0.0"),
(["1.0.0", "2.0.3"], "2.0.3"),
(["1.0.0", "1.1.0", "2.0.0"], "1.1.0"),
(["1.0.0", "1.0.1", "1.0.2", "1.1.0", "1.1.1"], "1.1.1"),
],
)
async def test_generic_update_strategy_should_find_expected_blueprint_release(
blueprint_factory, app_factory, versions, expected_result
):
blueprint = blueprint_factory(
fetch={
"url": "http://test/releases/{{version}}/some-app.tar.gz",
"sha256": "deadbeef",
}
)
app = app_factory(versions)
async with AsyncClient(app=app) as client:
update_strategy = GenericUpdateStrategy(client)
assert await update_strategy(blueprint) == expected_result
@pytest.mark.parametrize(
"fetch_url,tag_name",
[
("https://github.com/owner/name/releases/{{version}}/some-app.tar.gz", "2.3.0"),
("https://github.com/owner/name/releases/v{{version}}/some-app.tar.gz", "v2.3.0"),
],
)
async def test_github_update_strategy_should_find_expected_blueprint_release(
blueprint_factory, github_app_factory, fetch_url, tag_name
):
app = github_app_factory(tag_name)
blueprint = blueprint_factory(fetch={"url": fetch_url, "sha256": "deadbeef"})
async with AsyncClient(app=app) as client:
update_strategy = GithubUpdateStrategy(client)
assert await update_strategy(blueprint) == "2.3.0"
async def test_github_update_strategy_should_not_return_an_older_version_than_current_one(
blueprint_factory, github_app_factory
):
app = github_app_factory("0.1.0", versions=["1.0.0"])
url = "https://github.com/owner/name/releases/{{version}}/some-app.tar.gz"
blueprint = blueprint_factory(fetch={"url": url, "sha256": "deadbeef"})
async with AsyncClient(app=app) as client:
update_strategy = GithubUpdateStrategy(client)
assert await update_strategy(blueprint) == "1.0.0"
async def test_github_update_strategy_should_fail_gracefully_when_asset_not_found(
blueprint_factory, github_app_factory
):
app = github_app_factory("someapp-v2.3.0")
url = "https://github.com/owner/name/releases/someapp-v{{version}}/some-app.tar.gz"
blueprint = blueprint_factory(fetch={"url": url, "sha256": "deadbeef"})
async with AsyncClient(app=app) as client:
with pytest.raises(Ops2debUpdaterError) as e:
await GithubUpdateStrategy(client)(blueprint)
assert "Failed to determine latest release URL" in str(e)
|
[
"starlette.applications.Starlette",
"ops2deb.updater.GithubUpdateStrategy",
"ops2deb.logger.enable_debug",
"starlette.responses.Response",
"starlette.responses.JSONResponse",
"httpx.AsyncClient",
"pytest.raises",
"pytest.mark.parametrize",
"ops2deb.updater.GenericUpdateStrategy"
] |
[((383, 401), 'ops2deb.logger.enable_debug', 'enable_debug', (['(True)'], {}), '(True)\n', (395, 401), False, 'from ops2deb.logger import enable_debug\n'), ((1533, 1904), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""versions,expected_result"""', "[(['1.0.0', '1.1.0'], '1.1.0'), (['1.0.0', '1.1.3'], '1.1.3'), (['1.0.0',\n '1.0.1', '1.1.0'], '1.1.0'), (['1.0.0', '1.1.1', '2.0.0'], '1.1.1'), ([\n '1.0.0', '2.0.0'], '2.0.0'), (['1.0.0', '2.0.3'], '2.0.3'), (['1.0.0',\n '1.1.0', '2.0.0'], '1.1.0'), (['1.0.0', '1.0.1', '1.0.2', '1.1.0',\n '1.1.1'], '1.1.1')]"], {}), "('versions,expected_result', [(['1.0.0', '1.1.0'],\n '1.1.0'), (['1.0.0', '1.1.3'], '1.1.3'), (['1.0.0', '1.0.1', '1.1.0'],\n '1.1.0'), (['1.0.0', '1.1.1', '2.0.0'], '1.1.1'), (['1.0.0', '2.0.0'],\n '2.0.0'), (['1.0.0', '2.0.3'], '2.0.3'), (['1.0.0', '1.1.0', '2.0.0'],\n '1.1.0'), (['1.0.0', '1.0.1', '1.0.2', '1.1.0', '1.1.1'], '1.1.1')])\n", (1556, 1904), False, 'import pytest\n'), ((2492, 2721), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""fetch_url,tag_name"""', "[('https://github.com/owner/name/releases/{{version}}/some-app.tar.gz',\n '2.3.0'), (\n 'https://github.com/owner/name/releases/v{{version}}/some-app.tar.gz',\n 'v2.3.0')]"], {}), "('fetch_url,tag_name', [(\n 'https://github.com/owner/name/releases/{{version}}/some-app.tar.gz',\n '2.3.0'), (\n 'https://github.com/owner/name/releases/v{{version}}/some-app.tar.gz',\n 'v2.3.0')])\n", (2515, 2721), False, 'import pytest\n'), ((495, 520), 'starlette.responses.Response', 'Response', ([], {'status_code': '(200)'}), '(status_code=200)\n', (503, 520), False, 'from starlette.responses import JSONResponse, Response\n'), ((579, 600), 'starlette.applications.Starlette', 'Starlette', ([], {'debug': '(True)'}), '(debug=True)\n', (588, 600), False, 'from starlette.applications import Starlette\n'), ((986, 1007), 'starlette.applications.Starlette', 'Starlette', ([], {'debug': '(True)'}), '(debug=True)\n', (995, 1007), False, 'from starlette.applications import Starlette\n'), ((2334, 2354), 'httpx.AsyncClient', 'AsyncClient', ([], {'app': 'app'}), '(app=app)\n', (2345, 2354), False, 'from httpx import AsyncClient\n'), ((2392, 2421), 'ops2deb.updater.GenericUpdateStrategy', 'GenericUpdateStrategy', (['client'], {}), '(client)\n', (2413, 2421), False, 'from ops2deb.updater import GenericUpdateStrategy, GithubUpdateStrategy\n'), ((3018, 3038), 'httpx.AsyncClient', 'AsyncClient', ([], {'app': 'app'}), '(app=app)\n', (3029, 3038), False, 'from httpx import AsyncClient\n'), ((3076, 3104), 'ops2deb.updater.GithubUpdateStrategy', 'GithubUpdateStrategy', (['client'], {}), '(client)\n', (3096, 3104), False, 'from ops2deb.updater import GenericUpdateStrategy, GithubUpdateStrategy\n'), ((3530, 3550), 'httpx.AsyncClient', 'AsyncClient', ([], {'app': 'app'}), '(app=app)\n', (3541, 3550), False, 'from httpx import AsyncClient\n'), ((3588, 3616), 'ops2deb.updater.GithubUpdateStrategy', 'GithubUpdateStrategy', (['client'], {}), '(client)\n', (3608, 3616), False, 'from ops2deb.updater import GenericUpdateStrategy, GithubUpdateStrategy\n'), ((4032, 4052), 'httpx.AsyncClient', 'AsyncClient', ([], {'app': 'app'}), '(app=app)\n', (4043, 4052), False, 'from httpx import AsyncClient\n'), ((1280, 1308), 'starlette.responses.Response', 'Response', ([], {'status_code': 'status'}), '(status_code=status)\n', (1288, 1308), False, 'from starlette.responses import JSONResponse, Response\n'), ((1435, 1477), 'starlette.responses.JSONResponse', 'JSONResponse', (["{'tag_name': latest_release}"], {}), "({'tag_name': latest_release})\n", (1447, 1477), False, 'from starlette.responses import JSONResponse, Response\n'), ((4077, 4111), 'pytest.raises', 'pytest.raises', (['Ops2debUpdaterError'], {}), '(Ops2debUpdaterError)\n', (4090, 4111), False, 'import pytest\n'), ((4136, 4164), 'ops2deb.updater.GithubUpdateStrategy', 'GithubUpdateStrategy', (['client'], {}), '(client)\n', (4156, 4164), False, 'from ops2deb.updater import GenericUpdateStrategy, GithubUpdateStrategy\n')]
|
from rest_framework import serializers
# djangorestframework-recursive
from rest_framework_recursive.fields import RecursiveField
# local
from .question import QuestionSerializer
from ..models import Section
class SectionSerializer(serializers.ModelSerializer):
children = RecursiveField(required=False, allow_null=True, many=True)
question_set = QuestionSerializer(many=True)
class Meta:
model = Section
fields = (
'id',
'url',
'title',
'parent',
'question_set',
'children',
)
|
[
"rest_framework_recursive.fields.RecursiveField"
] |
[((281, 339), 'rest_framework_recursive.fields.RecursiveField', 'RecursiveField', ([], {'required': '(False)', 'allow_null': '(True)', 'many': '(True)'}), '(required=False, allow_null=True, many=True)\n', (295, 339), False, 'from rest_framework_recursive.fields import RecursiveField\n')]
|
from django import forms
from django.contrib.auth import authenticate
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from .models import CustomUser, Profile
class CustomUserCreationForm(UserCreationForm):
class Meta:
model = CustomUser
fields = ("first_name", "last_name", "email")
class CustomUserChangeForm(UserChangeForm):
class Meta:
model = CustomUser
fields = ("first_name", "last_name", "email")
class CustomUserLoginForm(forms.Form):
email = forms.EmailField(widget=forms.EmailInput(attrs={"autofocus": True}))
password = forms.CharField(
strip=False,
widget=forms.PasswordInput(attrs={"autocomplete": "current-password"}),
)
def clean(self) -> None:
if self.is_valid():
email = self.cleaned_data["email"]
password = self.cleaned_data["password"]
user = authenticate(email=email, password=password)
if not user:
raise forms.ValidationError("Invalid login credentials!!", "invalid")
class ProfileCreationForm(forms.ModelForm):
class Meta:
model = Profile
exclude = ["user"]
widgets = {"bio": forms.Textarea(attrs={"cols": 80, "rows": 20})}
|
[
"django.forms.PasswordInput",
"django.forms.EmailInput",
"django.forms.ValidationError",
"django.contrib.auth.authenticate",
"django.forms.Textarea"
] |
[((549, 592), 'django.forms.EmailInput', 'forms.EmailInput', ([], {'attrs': "{'autofocus': True}"}), "(attrs={'autofocus': True})\n", (565, 592), False, 'from django import forms\n'), ((662, 725), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {'attrs': "{'autocomplete': 'current-password'}"}), "(attrs={'autocomplete': 'current-password'})\n", (681, 725), False, 'from django import forms\n'), ((910, 954), 'django.contrib.auth.authenticate', 'authenticate', ([], {'email': 'email', 'password': 'password'}), '(email=email, password=password)\n', (922, 954), False, 'from django.contrib.auth import authenticate\n'), ((1205, 1251), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'cols': 80, 'rows': 20}"}), "(attrs={'cols': 80, 'rows': 20})\n", (1219, 1251), False, 'from django import forms\n'), ((1002, 1065), 'django.forms.ValidationError', 'forms.ValidationError', (['"""Invalid login credentials!!"""', '"""invalid"""'], {}), "('Invalid login credentials!!', 'invalid')\n", (1023, 1065), False, 'from django import forms\n')]
|
from django.contrib import admin
from django.urls import path, include
from drf_yasg import openapi
from drf_yasg.views import get_schema_view
from rest_framework import permissions
schema_view = get_schema_view(
openapi.Info(
title='Movie DB API',
default_version='v1',
description='API to fetch movie data.',
),
public=True,
permission_classes=(permissions.AllowAny,),
authentication_classes=(),
)
docs_urlpatterns = [
path(
'docs/',
schema_view.with_ui('swagger', cache_timeout=0),
name='schema-swagger',
),
path(
'dock-redoc',
schema_view.with_ui('redoc', cache_timeout=0),
name='schema-redoc',
),
]
urlpatterns = [
path('admin/', admin.site.urls),
path('api/', include('movie.urls', namespace='movie')),
path(
'api-auth/',
include('rest_framework.urls', namespace='rest_framework')
),
] + docs_urlpatterns
|
[
"drf_yasg.openapi.Info",
"django.urls.path",
"django.urls.include"
] |
[((218, 319), 'drf_yasg.openapi.Info', 'openapi.Info', ([], {'title': '"""Movie DB API"""', 'default_version': '"""v1"""', 'description': '"""API to fetch movie data."""'}), "(title='Movie DB API', default_version='v1', description=\n 'API to fetch movie data.')\n", (230, 319), False, 'from drf_yasg import openapi\n'), ((736, 767), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (740, 767), False, 'from django.urls import path, include\n'), ((786, 826), 'django.urls.include', 'include', (['"""movie.urls"""'], {'namespace': '"""movie"""'}), "('movie.urls', namespace='movie')\n", (793, 826), False, 'from django.urls import path, include\n'), ((868, 926), 'django.urls.include', 'include', (['"""rest_framework.urls"""'], {'namespace': '"""rest_framework"""'}), "('rest_framework.urls', namespace='rest_framework')\n", (875, 926), False, 'from django.urls import path, include\n')]
|
import sys
import os
import pickle
import math
import numpy as np
import matplotlib.pyplot as plt
from pprint import pprint
os.chdir('C:/Users/<NAME>/Documents/Data/WHI long term record/coatings/')
file = open('fraction of detectable notch positions by BC core size - aged.pickl', 'r')
fractions_detectable_aged = pickle.load(file)
file.close()
os.chdir('C:/Users/<NAME>/Documents/Data/WHI long term record/coatings/')
file = open('fraction of detectable notch positions by BC core size - fresh.pickl', 'r')
fractions_detectable_fresh = pickle.load(file)
file.close()
fractions_detectable_fresh.pop(0) #get rid of 65-70 bin, since no data really here
fractions_detectable_aged.pop(0) #get rid of 65-70 bin, since no data really here
pprint(fractions_detectable_aged)
pprint(fractions_detectable_fresh)
##plotting
bins_aged = [row[0] for row in fractions_detectable_aged]
fractions_aged = [row[1] for row in fractions_detectable_aged]
bins_fresh = [row[0] for row in fractions_detectable_fresh]
fractions_fresh = [row[1] for row in fractions_detectable_fresh]
#####plotting
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(bins_aged, fractions_aged, color = 'b', label = 'Background')
ax.scatter(bins_fresh, fractions_fresh, color = 'r', label = 'Fresh emissions')
ax.set_ylim(0,1.0)
ax.set_ylabel('fraction of particles with detectable notch position')
ax.set_xlabel('rBC core VED (nm)')
#ax.axvline(95, color='g', linestyle='-')
ax.axvline(155, color='r', linestyle='--')
ax.axvline(180, color='r', linestyle='--')
plt.legend(loc = 2)
os.chdir('C:/Users/<NAME>/Documents/Data/WHI long term record/coatings/')
plt.savefig('fraction of particles with detectable zero-crossing', bbox_inches='tight')
plt.show()
|
[
"matplotlib.pyplot.show",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.figure",
"pickle.load",
"pprint.pprint",
"os.chdir",
"matplotlib.pyplot.savefig"
] |
[((126, 199), 'os.chdir', 'os.chdir', (['"""C:/Users/<NAME>/Documents/Data/WHI long term record/coatings/"""'], {}), "('C:/Users/<NAME>/Documents/Data/WHI long term record/coatings/')\n", (134, 199), False, 'import os\n'), ((316, 333), 'pickle.load', 'pickle.load', (['file'], {}), '(file)\n', (327, 333), False, 'import pickle\n'), ((348, 421), 'os.chdir', 'os.chdir', (['"""C:/Users/<NAME>/Documents/Data/WHI long term record/coatings/"""'], {}), "('C:/Users/<NAME>/Documents/Data/WHI long term record/coatings/')\n", (356, 421), False, 'import os\n'), ((540, 557), 'pickle.load', 'pickle.load', (['file'], {}), '(file)\n', (551, 557), False, 'import pickle\n'), ((741, 774), 'pprint.pprint', 'pprint', (['fractions_detectable_aged'], {}), '(fractions_detectable_aged)\n', (747, 774), False, 'from pprint import pprint\n'), ((775, 809), 'pprint.pprint', 'pprint', (['fractions_detectable_fresh'], {}), '(fractions_detectable_fresh)\n', (781, 809), False, 'from pprint import pprint\n'), ((1094, 1106), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1104, 1106), True, 'import matplotlib.pyplot as plt\n'), ((1539, 1556), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(2)'}), '(loc=2)\n', (1549, 1556), True, 'import matplotlib.pyplot as plt\n'), ((1560, 1633), 'os.chdir', 'os.chdir', (['"""C:/Users/<NAME>/Documents/Data/WHI long term record/coatings/"""'], {}), "('C:/Users/<NAME>/Documents/Data/WHI long term record/coatings/')\n", (1568, 1633), False, 'import os\n'), ((1634, 1725), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""fraction of particles with detectable zero-crossing"""'], {'bbox_inches': '"""tight"""'}), "('fraction of particles with detectable zero-crossing',\n bbox_inches='tight')\n", (1645, 1725), True, 'import matplotlib.pyplot as plt\n'), ((1723, 1733), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1731, 1733), True, 'import matplotlib.pyplot as plt\n')]
|
import numpy as np
import torch
import pytorch_lightning as pl
from torch.utils.data import DataLoader
from implem.utils import device
class SimpleDataset(torch.utils.data.Dataset):
def __init__(self, data, offset=1, start=None, end=None):
super(SimpleDataset, self).__init__()
assert len(data.shape) >= 2 #[T,*D], where D can be [C,W,H] etc.
self.T = len(data)
self.data = data
self.offset = offset
self.start = 0 if start is None else start
self.end = self.T-np.asarray(self.offset).max() if end is None else end
assert self.end > self.start
self.idx = torch.arange(self.start, self.end, requires_grad=False, device='cpu')
def __getitem__(self, index):
""" Generate one batch of data """
x = self.data[self.idx[index]].reshape(*self.data.shape[1:])
y = self.data[self.idx[index]+self.offset].reshape(len(self.offset), *self.data.shape[1:])
return x,y
def __len__(self):
return len(self.idx)
class MultiTrialDataset(torch.utils.data.Dataset):
def __init__(self, data, offset=1, start=None, end=None):
super(MultiTrialDataset, self).__init__()
assert len(data.shape) >= 3 #[N,T,*D], where D can be [C,W,H] etc.
self.N, self.T = data.shape[:2]
self.data = data.reshape(-1, *data.shape[2:]) #[NT,*D]
self.offset = offset
self.start = 0 if start is None else start
self.end = self.T-np.asarray(self.offset).max() if end is None else end
assert self.end > self.start
idx = torch.arange(self.start, self.end, requires_grad=False, device='cpu')
idx = [idx for j in range(self.N)]
self.idx = torch.cat([j*self.T + idx[j] for j in range(len(idx))])
def __getitem__(self, index):
""" Generate one batch of data """
x = self.data[self.idx[index]].reshape(*self.data.shape[1:])
y = self.data[self.idx[index]+self.offset].reshape(*self.data.shape[1:])
return x,y
def __len__(self):
return len(self.idx)
class MultiStepMultiTrialDataset(MultiTrialDataset):
def __init__(self, data, offset=1, start=None, end=None):
super(MultiStepMultiTrialDataset, self).__init__(data=data, offset=offset, start=start, end=end)
self.offset = torch.as_tensor(np.asarray(offset, dtype=np.int).reshape(1,-1), device='cpu')
def __getitem__(self, index):
""" Generate one batch of data """
io = (self.idx[index].reshape(-1,1) + self.offset.reshape(1,-1)).flatten()
x = self.data[self.idx[index]].reshape(*self.data.shape[1:])
y = self.data[io].reshape(np.prod(self.offset.shape), *self.data.shape[1:])
return x,y
class DataModule(pl.LightningDataModule):
def __init__(self, data, train_valid_split: int = 0.9,
batch_size: int = 2, offset: int = 1, Dataset=SimpleDataset,
**kwargs):
super().__init__()
self.data = data
self.Dataset = Dataset
self.batch_size = batch_size
self.offset = offset if isinstance(offset, np.ndarray) else np.arange(offset)
self.num_workers = 0
assert 0. < train_valid_split and train_valid_split <= 1.
self.train_valid_split = train_valid_split
def setup(self, stage=None):
if stage == 'fit' or stage is None:
split_index = int(len(self.data) * self.train_valid_split)
self.train_data = self.Dataset(data = self.data[:split_index], offset = self.offset)
self.valid_data = self.Dataset(data = self.data[split_index:], offset = self.offset)
def train_dataloader(self):
return DataLoader(self.train_data, batch_size=self.batch_size, num_workers=self.num_workers,
shuffle=True, generator=torch.Generator(device=device))
def val_dataloader(self):
return DataLoader(self.valid_data, batch_size=self.batch_size, num_workers=self.num_workers,
shuffle=False, generator=torch.Generator(device=device))
|
[
"numpy.asarray",
"numpy.prod",
"numpy.arange",
"torch.arange",
"torch.Generator"
] |
[((653, 722), 'torch.arange', 'torch.arange', (['self.start', 'self.end'], {'requires_grad': '(False)', 'device': '"""cpu"""'}), "(self.start, self.end, requires_grad=False, device='cpu')\n", (665, 722), False, 'import torch\n'), ((1611, 1680), 'torch.arange', 'torch.arange', (['self.start', 'self.end'], {'requires_grad': '(False)', 'device': '"""cpu"""'}), "(self.start, self.end, requires_grad=False, device='cpu')\n", (1623, 1680), False, 'import torch\n'), ((2689, 2715), 'numpy.prod', 'np.prod', (['self.offset.shape'], {}), '(self.offset.shape)\n', (2696, 2715), True, 'import numpy as np\n'), ((3164, 3181), 'numpy.arange', 'np.arange', (['offset'], {}), '(offset)\n', (3173, 3181), True, 'import numpy as np\n'), ((3857, 3887), 'torch.Generator', 'torch.Generator', ([], {'device': 'device'}), '(device=device)\n', (3872, 3887), False, 'import torch\n'), ((4072, 4102), 'torch.Generator', 'torch.Generator', ([], {'device': 'device'}), '(device=device)\n', (4087, 4102), False, 'import torch\n'), ((2363, 2395), 'numpy.asarray', 'np.asarray', (['offset'], {'dtype': 'np.int'}), '(offset, dtype=np.int)\n', (2373, 2395), True, 'import numpy as np\n'), ((542, 565), 'numpy.asarray', 'np.asarray', (['self.offset'], {}), '(self.offset)\n', (552, 565), True, 'import numpy as np\n'), ((1505, 1528), 'numpy.asarray', 'np.asarray', (['self.offset'], {}), '(self.offset)\n', (1515, 1528), True, 'import numpy as np\n')]
|