max_stars_repo_path stringlengths 3 269 | max_stars_repo_name stringlengths 4 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.05M | score float64 0.23 5.13 | int_score int64 0 5 |
|---|---|---|---|---|---|---|
dm_construction/environments_test.py | frangipane/dm_construction | 25 | 13300 | #!/usr/bin/python
#
# Copyright 2020 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Tests the open source construction environments."""
from absl import flags
from absl.testing import absltest
from absl.testing import parameterized
import dm_construction
import numpy as np
FLAGS = flags.FLAGS
flags.DEFINE_string("backend", "docker", "")
def _make_random_action(action_spec, observation):
"""Makes a random action given an action spec and observation."""
# Sample the random action.
action = {}
for name, spec in action_spec.items():
if name == "Index":
value = np.random.randint(observation["n_edge"])
elif spec.dtype in (np.int32, np.int64, int):
value = np.random.randint(spec.minimum, spec.maximum + 1)
else:
value = np.random.uniform(spec.minimum, spec.maximum)
action[name] = value
return action
def _random_unroll(env, seed=1234, num_steps=10, difficulty=5,
random_choice_before_reset=False):
"""Take random actions in the given environment."""
np.random.seed(seed)
action_spec = env.action_spec()
if random_choice_before_reset:
np.random.choice([8], p=[1.])
timestep = env.reset(difficulty=difficulty)
trajectory = [timestep]
actions = [None]
for _ in range(num_steps):
if timestep.last():
if random_choice_before_reset:
np.random.choice([8], p=[1.])
timestep = env.reset(difficulty=difficulty)
action = _make_random_action(action_spec, timestep.observation)
timestep = env.step(action)
trajectory.append(timestep)
actions.append(action)
return trajectory, actions
class TestEnvironments(parameterized.TestCase):
def _make_environment(
self, problem_type, curriculum_sample, wrapper_type, backend_type=None):
"""Make the new version of the construction task."""
if backend_type is None:
backend_type = FLAGS.backend
return dm_construction.get_environment(
problem_type,
unity_environment=self._unity_envs[backend_type],
wrapper_type=wrapper_type,
curriculum_sample=curriculum_sample)
@classmethod
def setUpClass(cls):
super(TestEnvironments, cls).setUpClass()
# Construct the unity environment.
cls._unity_envs = {
"docker": dm_construction.get_unity_environment("docker"),
}
@classmethod
def tearDownClass(cls):
super(TestEnvironments, cls).tearDownClass()
for env in cls._unity_envs.values():
env.close()
@parameterized.named_parameters(
("covering", "covering"),
("covering_hard", "covering_hard"),
("connecting", "connecting"),
("silhouette", "silhouette"),
("marble_run", "marble_run"))
def test_discrete_relative_environments_curriculum_sample(self, name):
"""Smoke test for discrete relative wrapper with curriculum_sample=True."""
env = self._make_environment(name, True, "discrete_relative")
_random_unroll(env, difficulty=env.core_env.max_difficulty)
@parameterized.named_parameters(
("covering", "covering"),
("covering_hard", "covering_hard"),
("connecting", "connecting"),
("silhouette", "silhouette"),
("marble_run", "marble_run"))
def test_continuous_absolute_environments_curriculum_sample(self, name):
"""Smoke test for continuous absolute wrapper w/ curriculum_sample=True."""
env = self._make_environment(name, True, "continuous_absolute")
_random_unroll(env, difficulty=env.core_env.max_difficulty)
@parameterized.named_parameters(
("connecting_additional_layer", "connecting", "additional_layer"),
("connecting_mixed_height_targets", "connecting", "mixed_height_targets"),
("silhouette_double_the_targets", "silhouette", "double_the_targets"),)
def test_generalization_modes(self, name, generalization_mode):
"""Smoke test for discrete relative wrapper with curriculum_sample=True."""
env = self._make_environment(name, False, "discrete_relative")
_random_unroll(env, difficulty=generalization_mode)
if __name__ == "__main__":
absltest.main()
| 2.4375 | 2 |
Modulo_3/semana 2/imagenes/imagen.py | rubens233/cocid_python | 0 | 13301 | <gh_stars>0
from tkinter import *
ventana = Tk()
ventana.geometry("500x500")
ventana.title('PythonGuides')
img = PhotoImage(file='./logo.png')
img = img.subsample(3, 3)
Label( ventana, image=img ).pack(fill="both")
ventana.mainloop() | 2.828125 | 3 |
htdfsdk/utils.py | youngqqcn/htdfsdk | 2 | 13302 | #coding:utf8
#author: yqq
#date: 2020/12/15 下午5:38
#descriptions:
from decimal import Decimal, getcontext
# getcontext()
def htdf_to_satoshi(amount_htdf: [float, int, str]) -> int:
return int(Decimal(str(amount_htdf)) * (10 ** 8))
if __name__ == '__main__':
assert htdf_to_satoshi(139623.71827296) == 13962371827296
assert htdf_to_satoshi('139623.71827296') == 13962371827296
assert htdf_to_satoshi(13962371827296) == 13962371827296 * 10 ** 8
pass
| 2.46875 | 2 |
hanzi_font_deconstructor/scripts/create_training_data.py | chanind/hanzi-font-deconstructor | 0 | 13303 | from dataclasses import asdict
from hanzi_font_deconstructor.common.generate_training_data import (
STROKE_VIEW_BOX,
get_training_input_svg_and_masks,
)
from os import path, makedirs
from pathlib import Path
import shutil
import argparse
PROJECT_ROOT = Path(__file__).parents[2]
DEST_FOLDER = PROJECT_ROOT / "data"
parser = argparse.ArgumentParser(
description="Generate training data for a model to deconstruct hanzi into strokes"
)
parser.add_argument("--max-strokes-per-img", default=5, type=int)
parser.add_argument("--total-images", default=50, type=int)
args = parser.parse_args()
if __name__ == "__main__":
# create and empty the dest folder
if path.exists(DEST_FOLDER):
shutil.rmtree(DEST_FOLDER)
makedirs(DEST_FOLDER)
makedirs(DEST_FOLDER / "sample_svgs")
# create the data
data = {
"viewbox": STROKE_VIEW_BOX,
"imgs": [],
}
for i in range(args.total_images):
(img_svg, stroke_masks) = get_training_input_svg_and_masks(256)
label = f"{i}-{len(stroke_masks)}"
with open(DEST_FOLDER / "sample_svgs" / f"{label}.svg", "w") as img_file:
img_file.write(img_svg)
print(".")
print("Done!")
| 2.734375 | 3 |
examples/generated_sample_regression.py | micheleantonazzi/gibson-dataset | 3 | 13304 | from generic_dataset.data_pipeline import DataPipeline
from generic_dataset.generic_sample import synchronize_on_fields
from generic_dataset.sample_generator import SampleGenerator
import numpy as np
import generic_dataset.utilities.save_load_methods as slm
pipeline_rgb_to_gbr = DataPipeline().add_operation(lambda data, engine: (data[:, :, [2, 1, 0]], engine))
@synchronize_on_fields(field_names={'field_3'}, check_pipeline=False)
def field_3_is_positive(sample) -> bool:
return sample.get_field_3() > 0
# To model a regression problem, label_set parameter must be empty
GeneratedSampleRegression = SampleGenerator(name='GeneratedSampleRegression', label_set=set()).add_dataset_field(field_name='rgb_image', field_type=np.ndarray, save_function=slm.save_compressed_numpy_array, load_function=slm.load_compressed_numpy_array) \
.add_dataset_field(field_name='bgr_image', field_type=np.ndarray, save_function=slm.save_cv2_image_bgr, load_function=slm.load_cv2_image_bgr) \
.add_field(field_name='field_3', field_type=int) \
.add_custom_pipeline(method_name='create_pipeline_convert_rgb_to_bgr', elaborated_field='rgb_image', final_field='bgr_image', pipeline=pipeline_rgb_to_gbr) \
.add_custom_method(method_name='field_3_is_positive', function=field_3_is_positive) \
.generate_sample_class() | 2.421875 | 2 |
setup.py | dilayercelik/neural-networks-tfw1 | 0 | 13305 | from setuptools import setup
setup(name='neural_networks_tfw1',
version='0.1',
description='Implementing Neural Networks with Tensorflow',
packages=['neural_networks_tfw1'],
author='<NAME>',
author_email='<EMAIL>',
zip_safe=False)
| 1.132813 | 1 |
half_json/json_util.py | half-pie/half-json | 4 | 13306 | <filename>half_json/json_util.py
# coding=utf8
import re
import json.decoder
from collections import namedtuple
from json.decoder import JSONDecoder
from json.scanner import py_make_scanner
from json.decoder import py_scanstring
class JSONDecodeError(object):
def __init__(self, parser, message):
self.message = message
self.parser = parser
def __eq__(self, err):
return err.parser == self.parser and self.message in err.message
class errors(object):
StringInvalidUXXXXEscape = JSONDecodeError("py_scanstring", "Invalid \\uXXXX escape")
# 2 different case
StringUnterminatedString = JSONDecodeError("py_scanstring", "Unterminated string starting at")
StringInvalidControlCharacter = JSONDecodeError("py_scanstring", "Invalid control character")
StringInvalidEscape = JSONDecodeError("py_scanstring", "Invalid \\escape")
ObjectExceptColon = JSONDecodeError("JSONObject", "Expecting ':' delimiter")
ObjectExceptObject = JSONDecodeError("JSONObject", "Expecting object")
# 2 different case
ObjectExceptKey = JSONDecodeError("JSONObject", "Expecting property name enclosed in double quotes")
ObjectExceptComma = JSONDecodeError("JSONObject", "Expecting ',' delimiter")
ArrayExceptObject = JSONDecodeError("JSONArray", "Expecting object")
ArrayExceptComma = JSONDecodeError("JSONArray", "Expecting ',' delimiter")
@classmethod
def get_decode_error(cls, parser, message):
err = JSONDecodeError(parser, message)
for _, value in cls.__dict__.items():
if isinstance(value, JSONDecodeError):
if err == value:
return value
return None
"""
01 先不看,不研究
02 badcase: " --> "" success
03 控制符 pass
04 unicode \\u 的 pass
05 同上
06 object 后面没有跟随 " , badcase: {abc":1} --> {"abc":1}
07 object key 后面没有 : , badcase: {"abc"1} --> {"abc":1}
08 object 开始检测 Value 收到 StopIteration
08.1 要么后面没有了
08.2 要么后面不是 "/{/[/n[ull]/t[rue]/f[alse]/number/NaN/Infinity/-Infinity 开头的东西
-- 08.1 后面补上 null}
-- 08.2 无脑补一个 "
09 object 解析完一个 pair 后,下一个不是}, 期待一个 ','
badcase {"k":1"s":2}
10 在 09 的基础上解析完{"k":1, 发现下一个不是 ", 这个后面再优化(暂时和 06 一致)
badcase {"k":1,x":2}
11 array 开始检测 Value 收到 StopIteration
11.1 要么后面没有了,补上]
11.2 同 08.2,无脑补一个{ 看看
12 array 解析完前一个 object, 需要一个 ,
这里 nextchar 既不是 ] 也不是, 代表这个 nextchar 的 end 也已经+1 了,所以减 2
"""
def errmsg_inv(e):
assert isinstance(e, ValueError)
message = e.message
idx = message.rindex(':')
errmsg, left = message[:idx], message[idx + 1:]
numbers = re.compile(r'\d+').findall(left)
parser = e.__dict__.get("parser", "")
result = {
"parsers": e.__dict__.get("parsers", []),
"error": errors.get_decode_error(parser, errmsg),
"lineno": int(numbers[0]),
"colno": int(numbers[1]),
}
if len(numbers) == 3:
result["pos"] = int(numbers[2])
if len(numbers) > 3:
result["endlineno"] = int(numbers[2])
result["endcolno"] = int(numbers[3])
result["pos"] = int(numbers[4])
result["end"] = int(numbers[5])
return result
def record_parser_name(parser):
def new_parser(*args, **kwargs):
try:
return parser(*args, **kwargs)
except Exception as e:
if "parser" not in e.__dict__:
e.__dict__["parser"] = parser.__name__
if "parsers" not in e.__dict__:
e.__dict__["parsers"] = []
e.__dict__["parsers"].append(parser.__name__)
raise e
return new_parser
def make_decoder():
json.decoder.scanstring = record_parser_name(py_scanstring)
decoder = JSONDecoder()
decoder.parse_object = record_parser_name(decoder.parse_object)
decoder.parse_array = record_parser_name(decoder.parse_array)
decoder.parse_string = record_parser_name(py_scanstring)
decoder.parse_object = record_parser_name(decoder.parse_object)
decoder.scan_once = py_make_scanner(decoder)
return decoder
decoder = make_decoder()
DecodeResult = namedtuple('DecodeResult', ['success', 'exception', 'err_info'])
def decode_line(line):
try:
obj, end = decoder.scan_once(line, 0)
ok = end == len(line)
return DecodeResult(success=ok, exception=None, err_info=(obj, end))
except StopIteration as e:
return DecodeResult(success=False, exception=e, err_info=None)
except ValueError as e:
err_info = errmsg_inv(e)
return DecodeResult(success=False, exception=e, err_info=err_info)
| 3.09375 | 3 |
capa-system/capaSystem.py | slumbermachine/capatimelapse | 2 | 13307 | <reponame>slumbermachine/capatimelapse
#!/usr/bin/python
#####################################################################
# Name : capaSystem.py
# Description : Read system data and update db for web display
# Environment : Tested under Raspberry Pi Rasbian Jessie Summer 17
# Author : <NAME> <EMAIL>
######################################################################
import MySQLdb
import sys
import time
from subprocess import Popen, PIPE
import logging
import logging.handlers
log = logging.getLogger('CapaTimeLapseLog')
log.setLevel(logging.DEBUG) # prod: logging.ERROR
handler = logging.handlers.SysLogHandler(address='/dev/log')
formatter = logging.Formatter('%(name)-12s %(levelname)-8s %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
def get_temp():
t1 = Popen(["cat","/sys/class/thermal/thermal_zone0/temp"], stdout=PIPE)
output = t1.communicate()[0]
clean = output.rstrip()
clean = (float(clean) / 1000)
return clean
def get_batt():
try:
t1 = Popen(['/usr/local/bin/lifepo4wered-cli', 'get', 'vbat'], stdout=PIPE)
output = t1.communicate()[0]
clean = output.rstrip()
return clean
except Exception:
return "0"
def insert_db(temp, battery):
try:
db = MySQLdb.connect("localhost", "monitor", "23rdqw", "system")
except Exception as e:
log.critical('Error accessing database: %s', e)
sys.exit('Error accessing database')
try:
cursor=db.cursor()
line = "INSERT INTO tempdat values(0,CURRENT_DATE(),CURRENT_TIME(), %s, %s)" %(temp, battery)
cursor.execute(line)
db.commit()
except Exception as e:
db.rollback()
log.critical('Error in database submission: %s', e)
db.close()
def main():
while True:
battery = get_batt()
temp = get_temp()
insert_db(temp, battery)
time.sleep(60)
if __name__ == '__main__':
main()
| 2.234375 | 2 |
wagtail/wagtailadmin/tasks.py | willcodefortea/wagtail | 0 | 13308 | from django.template.loader import render_to_string
from django.core.mail import send_mail
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db.models import Q
from wagtail.wagtailcore.models import PageRevision, GroupPagePermission
from wagtail.wagtailusers.models import UserProfile
# The following will check to see if we can import task from celery -
# if not then we definitely haven't installed it
try:
from celery.decorators import task
NO_CELERY = False
except:
NO_CELERY = True
# However, we could have installed celery for other projects. So we will also
# check if we have defined the BROKER_URL setting. If not then definitely we
# haven't configured it.
if NO_CELERY or not hasattr(settings, 'BROKER_URL'):
# So if we enter here we will define a different "task" decorator that
# just returns the original function and sets its delay attribute to
# point to the original function: This way, the send_notification
# function will be actually called instead of the the
# send_notification.delay()
def task(f):
f.delay=f
return f
def users_with_page_permission(page, permission_type, include_superusers=True):
# Get user model
User = get_user_model()
# Find GroupPagePermission records of the given type that apply to this page or an ancestor
ancestors_and_self = list(page.get_ancestors()) + [page]
perm = GroupPagePermission.objects.filter(permission_type=permission_type, page__in=ancestors_and_self)
q = Q(groups__page_permissions=perm)
# Include superusers
if include_superusers:
q |= Q(is_superuser=True)
return User.objects.filter(is_active=True).filter(q).distinct()
@task
def send_notification(page_revision_id, notification, excluded_user_id):
# Get revision
revision = PageRevision.objects.get(id=page_revision_id)
# Get list of recipients
if notification == 'submitted':
# Get list of publishers
recipients = users_with_page_permission(revision.page, 'publish')
elif notification in ['rejected', 'approved']:
# Get submitter
recipients = [revision.user]
else:
return
# Get list of email addresses
email_addresses = [
recipient.email for recipient in recipients
if recipient.email and recipient.id != excluded_user_id and getattr(UserProfile.get_for_user(recipient), notification + '_notifications')
]
# Return if there are no email addresses
if not email_addresses:
return
# Get email subject and content
template = 'wagtailadmin/notifications/' + notification + '.html'
rendered_template = render_to_string(template, dict(revision=revision, settings=settings)).split('\n')
email_subject = rendered_template[0]
email_content = '\n'.join(rendered_template[1:])
# Get from email
if hasattr(settings, 'WAGTAILADMIN_NOTIFICATION_FROM_EMAIL'):
from_email = settings.WAGTAILADMIN_NOTIFICATION_FROM_EMAIL
elif hasattr(settings, 'DEFAULT_FROM_EMAIL'):
from_email = settings.DEFAULT_FROM_EMAIL
else:
from_email = '<EMAIL>'
# Send email
send_mail(email_subject, email_content, from_email, email_addresses)
@task
def send_email_task(email_subject, email_content, email_addresses, from_email=None):
if not from_email:
if hasattr(settings, 'WAGTAILADMIN_NOTIFICATION_FROM_EMAIL'):
from_email = settings.WAGTAILADMIN_NOTIFICATION_FROM_EMAIL
elif hasattr(settings, 'DEFAULT_FROM_EMAIL'):
from_email = settings.DEFAULT_FROM_EMAIL
else:
from_email = '<EMAIL>'
send_mail(email_subject, email_content, from_email, email_addresses)
| 1.945313 | 2 |
app/map_sup_enrich_compose.py | onap/sdc-dcae-d-tosca-lab | 1 | 13309 | <gh_stars>1-10
#Author: <NAME>
#emaiL: <EMAIL>
from toscalib.tosca_workbook import ToscaWorkBook
from toscalib.tosca_builder import ToscaBuilder
import getopt, sys, json, logging
def usage():
print('OPTIONS:')
print('\t-h|--help: print this help message')
print('\t-i|--input: The home folder where all spec files are')
print('\t-o|--output: the output file name')
print('\t-v|--value: the json value file')
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "hi:o:v:", ["help", "input=", "output=", "value="])
except getopt.GetoptError as err:
# print help information and exit:
logging.error( str(err)) # will print something like "option -a not recognized"
usage()
sys.exit(2)
spec_prefix = None
output_file = None
value_file = None
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-i", "--input"):
spec_prefix = a
elif o in ("-o", "--output"):
output_file = a
elif o in ("-v", "--value"):
value_file = a
else:
logging.error( 'Unrecognized option: ' + o)
usage()
sys.exit(2)
if spec_prefix is None or output_file is None:
logging.error( 'Incorrect arguments!')
usage()
sys.exit(2)
model_prefix = './data/tosca_model'
meta_model = './data/meta_model/meta_tosca_schema.yaml'
for ms in ['map', 'enrich', 'supplement']:
builder = ToscaBuilder()
builder.import_schema(meta_model)
builder.import_spec(spec_prefix+'/dcae-event-proc/dcae-event-proc-cdap-' + ms+ '\\' + ms+ '_spec.json')
builder.create_node_type()
builder.export_schema(model_prefix+'/' + ms + '/schema.yaml')
builder.import_schema(model_prefix+'/' + ms + '/schema.yaml')
builder.create_model(ms)
builder.export_model(model_prefix+'/' + ms + '/template.yaml')
builder.create_translate(ms)
builder.export_translation(model_prefix+'/' + ms + '/translate.yaml')
workbook = ToscaWorkBook()
workbook._import_dir(model_prefix)
workbook._import_dir('./data/shared_model/')
workbook._use('map','NO_PREFIX')
workbook._use('supplement','NO_PREFIX')
workbook._use('enrich','NO_PREFIX')
if value_file is not None:
try:
with open(value_file) as data_file:
data = json.load(data_file)
for ms in ['map', 'enrich', 'supplement']:
# if data.has_key(ms):
if ms in data:
prop_sec = data[ms]
for key in prop_sec.keys():
workbook._assign(ms, key, prop_sec[key])
except err :
logging.error( "Unable to read " +value_file)
logging.error( str(err))
workbook._add_shared_node([{'dcae.capabilities.cdapHost':'cdap_host'}, {'dcae.capabilities.dockerHost': 'docker_host'}, {'dcae.capabilities.composition.host': 'composition_virtual'}])
workbook._assign('supplement', 'stream_publish_0', 'map')
workbook._assign('enrich', 'stream_publish_0', 'supplement')
workbook.tran_db = workbook.db
workbook._export_yaml('event_proc.yaml', 'no_expand,main')
workbook._export_yaml(output_file, 'cloudify,main')
if __name__ == "__main__":
main() | 2.53125 | 3 |
gollama/backend/tests/test_api/test_shorthand.py | benjaminhubbell/gollama | 1 | 13310 | <filename>gollama/backend/tests/test_api/test_shorthand.py
from django.test import TestCase
from rest_framework.test import APIClient
class TestShortHand(TestCase):
def setUp(self) -> None:
self.client = APIClient()
def test_get(self):
response_json = self.client.post('/api/v1/shorthand/', {'label': 'foo', 'url': 'http://bar.com'}).json()
response = self.client.get(f'/api/v1/shorthand/{response_json["id"]}/')
self.assertEqual(200, response.status_code)
self.assertEqual({
'id': 1,
'label': 'foo',
'url': 'http://bar.com'
}, response.json())
def test_list(self):
self.client.post('/api/v1/shorthand/', {'label': 'foo', 'url': 'http://bar.com'})
self.client.post('/api/v1/shorthand/', {'label': 'bar', 'url': 'http://foo.com'})
response = self.client.get('/api/v1/shorthand/')
self.assertEqual(200, response.status_code)
response_json = response.json()
self.assertEqual(2, len(response_json))
def test_create(self):
response = self.client.post('/api/v1/shorthand/', {'label': 'foo', 'url': 'http://bar.com'})
self.assertEqual(201, response.status_code)
response_json = response.json()
self.assertEqual({
'id': 1,
'label': 'foo',
'url': 'http://bar.com'
}, response_json)
def test_create_fail_duplicate(self):
response = self.client.post('/api/v1/shorthand/', {'label': 'foo', 'url': 'http://bar.com'})
self.assertEqual(201, response.status_code)
response = self.client.post('/api/v1/shorthand/', {'label': 'foo', 'url': 'http://bar.com'})
self.assertEqual(400, response.status_code)
def test_update(self):
response = self.client.post('/api/v1/shorthand/', {'label': 'foo', 'url': 'http://bar.com'})
self.assertEqual(201, response.status_code)
response_json = response.json()
self.assertEqual('http://bar.com', response_json['url'])
response = self.client.patch(f'/api/v1/shorthand/{response_json["id"]}/', {'url': 'https://bar.com'})
self.assertEqual(200, response.status_code)
response_json = response.json()
self.assertEqual('https://bar.com', response_json['url'])
def test_delete(self):
response = self.client.post('/api/v1/shorthand/', {'label': 'foo', 'url': 'http://bar.com'})
self.assertEqual(201, response.status_code)
response_json = response.json()
self.assertEqual({
'id': 1,
'label': 'foo',
'url': 'http://bar.com'
}, response_json)
response = self.client.delete(f'/api/v1/shorthand/{response_json["id"]}/')
self.assertEqual(204, response.status_code)
response = self.client.get(f'/api/v1/shorthand/{response_json["id"]}/')
self.assertEqual(404, response.status_code)
| 2.390625 | 2 |
lusidtools/lpt/qry_scopes.py | fossabot/lusid-python-tools | 1 | 13311 | import pandas as pd
import dateutil
from lusidtools.lpt import lpt
from lusidtools.lpt import lse
from lusidtools.lpt import stdargs
from .either import Either
import re
import urllib.parse
rexp = re.compile(r".*page=([^=']{10,}).*")
TOOLNAME = "scopes"
TOOLTIP = "List scopes"
def parse(extend=None, args=None):
return (
stdargs.Parser("Get Scopes", ["filename", "limit"])
.add("--portfolios", action="store_true")
.extend(extend)
.parse(args)
)
def process_args(api, args):
results = []
def fetch_page(page_token):
return api.call.list_portfolios(page=page_token)
def got_page(result):
if args.portfolios:
df = lpt.to_df(
result,
["id.scope", "id.code", "is_derived", "type", "parent_portfolio_id"],
)
df.columns = ["Scope", "Portfolio", "Derived", "Type", "Parent"]
else:
df = (
pd.DataFrame({"Scopes": [v.id.scope for v in result.content.values]})
.groupby("Scopes")
.size()
.reset_index()
)
results.append(df)
links = [l for l in result.content.links if l.relation == "NextPage"]
if len(links) > 0:
match = rexp.match(links[0].href)
if match:
return urllib.parse.unquote(match.group(1))
return None
page = Either(None)
while True:
page = fetch_page(page.right).bind(got_page)
if page.is_left():
return page
if page.right == None:
break
return lpt.trim_df(
pd.concat(results, ignore_index=True, sort=False),
args.limit,
sort=["Scope", "Portfolio"] if args.portfolios else "Scopes",
)
# Standalone tool
def main(parse=parse, display_df=lpt.display_df):
return lpt.standard_flow(parse, lse.connect, process_args, display_df)
| 2.421875 | 2 |
qqbot/qqbotcls.py | skarl-api/qqbot | 0 | 13312 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
QQBot -- A conversation robot base on Tencent's SmartQQ
Website -- https://github.com/pandolia/qqbot/
Author -- <EMAIL>
"""
import sys, os
p = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if p not in sys.path:
sys.path.insert(0, p)
import sys, subprocess, time
from apscheduler.schedulers.background import BackgroundScheduler
from collections import defaultdict
from qqbot.qconf import QConf
from qqbot.utf8logger import INFO, CRITICAL, ERROR, WARN
from qqbot.qsession import QLogin, RequestError
from qqbot.exitcode import RESTART, POLL_ERROR, FRESH_RESTART
from qqbot.common import StartDaemonThread, Import
from qqbot.qterm import QTermServer
from qqbot.mainloop import MainLoop, Put
from qqbot.groupmanager import GroupManager
def runBot(botCls, qq, user):
if sys.argv[-1] == '--subprocessCall':
isSubprocessCall = True
sys.argv.pop()
else:
isSubprocessCall = False
if isSubprocessCall:
bot = botCls()
bot.Login(qq, user)
bot.Run()
else:
conf = QConf(qq, user)
if sys.argv[0].endswith('py') or sys.argv[0].endswith('pyc'):
args = [sys.executable] + sys.argv
else:
args = sys.argv
args = args + ['--mailAuthCode', conf.mailAuthCode]
args = args + ['--qq', conf.qq]
args = args + ['--subprocessCall']
while True:
p = subprocess.Popen(args)
pid = p.pid
code = p.wait()
if code == 0:
INFO('QQBot 正常停止')
sys.exit(code)
elif code == RESTART:
args[-2] = conf.LoadQQ(pid)
INFO('5 秒后重新启动 QQBot (自动登陆)')
time.sleep(5)
elif code == FRESH_RESTART:
args[-2] = ''
INFO('5 秒后重新启动 QQBot (手工登陆)')
time.sleep(5)
else:
CRITICAL('QQBOT 异常停止(code=%s)', code)
if conf.restartOnOffline:
args[-2] = conf.LoadQQ(pid)
INFO('30秒后重新启动 QQBot (自动登陆)')
time.sleep(30)
else:
sys.exit(code)
def RunBot(botCls=None, qq=None, user=None):
try:
runBot((botCls or QQBot), qq, user)
except KeyboardInterrupt:
sys.exit(1)
class QQBot(GroupManager):
def Login(self, qq=None, user=None):
session, contactdb, self.conf = QLogin(qq, user)
# main thread
self.SendTo = session.SendTo
self.groupKick = session.GroupKick
self.groupSetAdmin = session.GroupSetAdmin
self.groupShut = session.GroupShut
self.groupSetCard = session.GroupSetCard
# main thread
self.List = contactdb.List
self.Update = contactdb.Update
self.StrOfList = contactdb.StrOfList
self.ObjOfList = contactdb.ObjOfList
self.findSender = contactdb.FindSender
self.firstFetch = contactdb.FirstFetch
self.Delete = contactdb.db.Delete
self.Modify = contactdb.db.Modify
# child thread 1
self.poll = session.Copy().Poll
# child thread 2
self.termForver = QTermServer(self.conf.termServerPort).Run
def Run(self):
QQBot.initScheduler(self)
import qqbot.qslots as _x; _x
for plugin in self.conf.plugins:
self.Plug(plugin)
if self.conf.startAfterFetch:
self.firstFetch()
self.onStartupComplete()
StartDaemonThread(self.pollForever)
StartDaemonThread(self.termForver, self.onTermCommand)
StartDaemonThread(self.intervalForever)
MainLoop()
def Stop(self):
sys.exit(0)
def Restart(self):
self.conf.StoreQQ()
sys.exit(RESTART)
def FreshRestart(self):
sys.exit(FRESH_RESTART)
# child thread 1
def pollForever(self):
while True:
try:
result = self.poll()
except RequestError:
self.conf.StoreQQ()
Put(sys.exit, POLL_ERROR)
break
except:
ERROR('qsession.Poll 方法出错', exc_info=True)
else:
Put(self.onPollComplete, *result)
def onPollComplete(self, ctype, fromUin, membUin, content):
if ctype == 'timeout':
return
contact, member, nameInGroup = \
self.findSender(ctype, fromUin, membUin, self.conf.qq)
if self.detectAtMe(nameInGroup, content):
INFO('有人 @ 我:%s[%s]' % (contact, member))
content = '[@ME] ' + content.replace('@'+nameInGroup, '')
else:
content = content.replace('@ME', '@Me')
if ctype == 'buddy':
INFO('来自 %s 的消息: "%s"' % (contact, content))
else:
INFO('来自 %s[%s] 的消息: "%s"' % (contact, member, content))
self.onQQMessage(contact, member, content)
def detectAtMe(self, nameInGroup, content):
return nameInGroup and ('@'+nameInGroup) in content
# child thread 5
def intervalForever(self):
while True:
time.sleep(300)
Put(self.onInterval)
slotsTable = {
'onQQMessage': [],
'onInterval': [],
'onStartupComplete': []
}
plugins = set()
@classmethod
def AddSlot(cls, func):
cls.slotsTable[func.__name__].append(func)
return func
@classmethod
def unplug(cls, moduleName, removeJob=True):
for slots in cls.slotsTable.values():
i = 0
while i < len(slots):
if slots[i].__module__ == moduleName:
slots[i] = slots[-1]
slots.pop()
else:
i += 1
if removeJob:
for job in cls.schedTable.pop(moduleName, []):
job.remove()
cls.plugins.discard(moduleName)
@classmethod
def Unplug(cls, moduleName):
if moduleName not in cls.plugins:
result = '警告:试图卸载未安装的插件 %s' % moduleName
WARN(result)
else:
cls.unplug(moduleName)
result = '成功:卸载插件 %s' % moduleName
INFO(result)
return result
@classmethod
def Plug(cls, moduleName):
cls.unplug(moduleName)
try:
module = Import(moduleName)
except (Exception, SystemExit) as e:
result = '错误:无法加载插件 %s ,%s: %s' % (moduleName, type(e), e)
ERROR(result)
else:
cls.unplug(moduleName, removeJob=False)
names = []
for slotName in cls.slotsTable.keys():
if hasattr(module, slotName):
cls.slotsTable[slotName].append(getattr(module, slotName))
names.append(slotName)
if (not names) and (moduleName not in cls.schedTable):
result = '警告:插件 %s 中没有定义回调函数或定时任务' % moduleName
WARN(result)
else:
cls.plugins.add(moduleName)
jobs = cls.schedTable.get(moduleName,[])
jobNames = [f.func.__name__ for f in jobs]
result = '成功:加载插件 %s(回调函数%s、定时任务%s)' % \
(moduleName, names, jobNames)
INFO(result)
return result
@classmethod
def Plugins(cls):
return list(cls.plugins)
scheduler = BackgroundScheduler(daemon=True)
schedTable = defaultdict(list)
@classmethod
def initScheduler(cls, bot):
cls._bot = bot
cls.scheduler.start()
@classmethod
def AddSched(cls, **triggerArgs):
def wrapper(func):
job = lambda: Put(func, cls._bot)
job.__name__ = func.__name__
j = cls.scheduler.add_job(job, 'cron', **triggerArgs)
cls.schedTable[func.__module__].append(j)
return func
return wrapper
def wrap(slots):
return lambda *a,**kw: [f(*a, **kw) for f in slots[:]]
for name, slots in QQBot.slotsTable.items():
setattr(QQBot, name, wrap(slots))
QQBotSlot = QQBot.AddSlot
QQBotSched = QQBot.AddSched
if __name__ == '__main__':
bot = QQBot()
bot.Login(user='hcj')
gl = bot.List('group')
ml = bot.List(gl[0])
m = ml[0]
| 2.078125 | 2 |
tests/gear_scripts/buitin_runtime_func_GearsBuilder.py | jsam/redgrease | 17 | 13313 | from redgrease import GearsBuilder
gb = GearsBuilder()
gb.run()
| 1.179688 | 1 |
tests/tests.py | ActivityWatch/activitywatch-old | 4 | 13314 | from copy import copy
from itertools import groupby
import unittest
from datetime import datetime, timedelta
from typing import List
from activitywatch.base import Watcher, Activity, Logger
from activitywatch.settings import Settings
from activitywatch.utils import floor_datetime, ceil_datetime
from activitywatch.filters.split import split_by_interval, overlaps
from activitywatch.filters.chunk import chunk_by_tags
class MockWatcher(Watcher):
def run(self):
pass
def wait(self):
pass
identifier = "mock"
def __init__(self):
settings = Settings()
settings["watchers"][self.identifier] = {}
Watcher.__init__(self)
class MockLogger(Logger):
def log(self, activities: List[Activity]):
pass
def wait(self):
pass
identifier = "mock"
def __init__(self):
settings = Settings()
settings["loggers"][self.identifier] = {}
Logger.__init__(self)
class LoggerWatcherTest(unittest.TestCase):
def test_activity_flow(self):
watcher = MockWatcher()
logger = MockLogger()
logger.add_watcher(watcher)
watcher.dispatch_activity(Activity("test", datetime.now()-timedelta(days=1), datetime.now()))
activities = logger.flush_activities()
self.assertTrue(len(activities) == 1)
activities = logger.flush_activities()
self.assertTrue(len(activities) == 0)
class ActivityTest(unittest.TestCase):
def test_to_zenobase(self):
TAG = "something"
activity = Activity(TAG, started_at=datetime.now(), ended_at=datetime.now())
event = activity.to_zenobase_event()
self.assertTrue(event["tag"] == TAG)
class SettingsTest(unittest.TestCase):
def test_instance(self):
self.assertIs(Settings(), Settings())
HOUR = timedelta(hours=1)
class SplitActivityTest(unittest.TestCase):
def test_by_hour(self):
dt = datetime(2015, 1, 1, 8, 30)
td = timedelta(hours=3, minutes=23)
activity = Activity([], dt, dt+td)
split = split_by_interval([copy(activity), copy(activity)], interval=HOUR)
self.assertEqual(len(split), 8)
activity.end += -td + timedelta(minutes=2)
split = split_by_interval([copy(activity)], interval=HOUR)
self.assertEqual(len(split), 1)
def test_ceil_hour(self):
def ceil_hour(td):
return ceil_datetime(td, td=timedelta(hours=1))
self.assertEqual(ceil_hour(datetime(2015, 1, 1, 6, 2)), datetime(2015, 1, 1, 7))
self.assertEqual(ceil_hour(datetime(2015, 1, 1, 6, 2)), ceil_hour(datetime(2015, 1, 1, 6, 58)))
self.assertNotEqual(ceil_hour(datetime(2015, 1, 1, 5, 2)), ceil_hour(datetime(2015, 1, 1, 6, 4)))
def test_floor_hour(self):
def floor_hour(td):
return floor_datetime(td, td=timedelta(hours=1))
self.assertEqual(floor_hour(datetime(2015, 1, 1, 6, 2)), datetime(2015, 1, 1, 6))
self.assertEqual(floor_hour(datetime(2015, 1, 1, 6, 2)), floor_hour(datetime(2015, 1, 1, 6, 5)))
def test_overlaps_hour(self):
def overlaps_hours(td):
return overlaps(td, interval=timedelta(hours=1))
activity = Activity([], datetime(2015, 1, 1, 5, 23), datetime(2015, 1, 1, 6, 6))
self.assertTrue(overlaps_hours(activity))
activity = Activity([], datetime(2015, 1, 1, 5, 23), datetime(2015, 1, 1, 6, 0, 0, 1))
self.assertTrue(overlaps_hours(activity))
activity = Activity([], datetime(2015, 1, 1, 6, 30), datetime(2015, 1, 1, 6, 59))
self.assertFalse(overlaps_hours(activity))
class ChunkTest(unittest.TestCase):
def test_chunk_by_tags(self):
interval = timedelta(minutes=5)
start = floor_datetime(datetime.now(), interval)
activities = [Activity(["test"], start, start+interval*0.5),
Activity(["test2"], start+interval, start+interval*1.5),
Activity(["test"], start+interval*2, start+interval*2.5)]
self.assertEqual(3, len(activities))
activities.append(Activity(["test"], start+interval, start+interval*1.5))
self.assertEqual(4, len(activities))
self.assertEqual(2, len(chunk_by_tags(activities)))
| 2.40625 | 2 |
lang/it/basic_vocabulary_it.py | gtoffoli/commons-cops | 5 | 13315 | voc_it = [
['a', 'noun', 'c'],
['a', 'preposition', 'a'],
['abbagliante', 'pres_part', 'c'],
['abbagliante', 'adjective', 'c'],
['abbagliante', 'noun', 'c'],
['abbaiare', 'verb', 'c'],
['abbandonare', 'verb', 'a'],
['abbandonato', 'past_part', 'b'],
['abbandonato', 'adjective', 'b'],
['abbandono', 'noun', 'b'],
['abbassare', 'verb', 'a'],
['abbasso', 'adverb', 'c'],
['abbasso', 'exclamation', 'c'],
['abbastanza', 'adverb', 'a'],
['abbattere', 'verb', 'b'],
['abbeverare', 'verb', 'c'],
['abbigliamento', 'noun', 'b'],
['abbinare', 'verb', 'b'],
['abbonamento', 'noun', 'b'],
['abbonare', 'verb', 'c'],
['abbondante', 'pres_part', 'b'],
['abbondante', 'adjective', 'b'],
['abbondare', 'verb', 'c'],
['abbottonare', 'verb', 'c'],
['abbracciare', 'verb', 'a'],
['abbraccio', 'noun', 'b'],
['abbreviare', 'verb', 'c'],
['abbronzare', 'verb', 'c'],
['abete', 'noun', 'c'],
['abile', 'adjective', 'b'],
['abilità', 'noun', 'b'],
['abisso', 'noun', 'b'],
['abitante', 'pres_part', 'b'],
['abitante', 'adjective', 'b'],
['abitante', 'noun', 'b'],
['abitare', 'verb', 'a'],
['abitare', 'noun', 'a'],
['abitazione', 'noun', 'b'],
['abito', 'noun', 'a'],
['abituale', 'adjective', 'b'],
['abituare', 'verb', 'a'],
['abitudine', 'noun', 'a'],
['abolire', 'verb', 'b'],
['abortire', 'verb', 'c'],
['aborto', 'noun', 'c'],
['abruzzese', 'adjective', 'c'],
['abruzzese', 'noun', 'c'],
['abusare', 'verb', 'c'],
['abuso', 'noun', 'b'],
['acca', 'noun', 'c'],
['accademia', 'noun', 'b'],
['accademico', 'adjective', 'b'],
['accademico', 'noun', 'b'],
['accadere', 'verb', 'a'],
['accampamento', 'noun', 'c'],
['accanto', 'adverb', 'a'],
['accappatoio', 'noun', 'c'],
['accarezzare', 'verb', 'b'],
['accattone', 'noun', 'c'],
['accavallare', 'verb', 'c'],
['accecare', 'verb', 'c'],
['accedere', 'verb', 'b'],
['accelerare', 'verb', 'b'],
['acceleratore', 'adjective', 'c'],
['acceleratore', 'noun', 'c'],
['accelerazione', 'noun', 'b'],
['accendere', 'verb', 'a'],
['accendino', 'noun', 'c'],
['accennare', 'verb', 'b'],
['accenno', 'noun', 'c'],
['accentare', 'verb', 'c'],
['accertamento', 'noun', 'b'],
['accertare', 'verb', 'b'],
['acceso', 'past_part', 'b'],
['acceso', 'adjective', 'b'],
['accesso', 'noun', 'a'],
['accessorio', 'adjective', 'b'],
['accessorio', 'noun', 'b'],
['accetta', 'noun', 'c'],
['accettabile', 'adjective', 'b'],
['accettare', 'verb', 'a'],
['acchiappare', 'verb', 'c'],
['acciacco', 'noun', 'c'],
['acciaio', 'noun', 'b'],
['accidente', 'noun', 'b'],
['acciuga', 'noun', 'c'],
['accogliente', 'pres_part', 'c'],
['accogliente', 'adjective', 'c'],
['accoglienza', 'noun', 'b'],
['accogliere', 'verb', 'a'],
['accoltellare', 'verb', 'c'],
['accomodare', 'verb', 'b'],
['accompagnare', 'verb', 'a'],
['acconsentire', 'verb', 'c'],
['accontentare', 'verb', 'b'],
['accorciare', 'verb', 'c'],
['accordare', 'verb', 'b'],
['accordo', 'noun', 'a'],
['accorgersi', 'verb', 'a'],
['accorrere', 'verb', 'c'],
['accostare', 'verb', 'b'],
['accudire', 'verb', 'c'],
['accumulare', 'verb', 'b'],
['accumulatore', 'adjective', 'c'],
['accumulatore', 'noun', 'c'],
['accurato', 'past_part', 'b'],
['accurato', 'adjective', 'b'],
['accusa', 'noun', 'a'],
['accusare', 'verb', 'a'],
['accento', 'noun', 'b'],
['acerbo', 'adjective', 'c'],
['aceto', 'noun', 'c'],
['acido', 'adjective', 'b'],
['acido', 'noun', 'b'],
['acqua', 'noun', 'a'],
['acquarello', 'noun', 'c'],
['acquario', 'noun', 'c'],
['acquasanta', 'noun', 'c'],
['acquisire', 'verb', 'b'],
['acquisizione', 'noun', 'b'],
['acquistare', 'verb', 'a'],
['acquisto', 'noun', 'a'],
['acquolina', 'noun', 'c'],
['acrobata', 'noun', 'c'],
['acuto', 'adjective', 'b'],
['acuto', 'noun', 'b'],
['adattare', 'verb', 'b'],
['adattatore', 'noun', 'c'],
['adatto', 'adjective', 'a'],
['addetto', 'past_part', 'b'],
['addetto', 'adjective', 'b'],
['addetto', 'noun', 'b'],
['addio', 'exclamation', 'b'],
['addio', 'noun', 'b'],
['addirittura', 'adverb', 'a'],
['addizione', 'noun', 'c'],
['addobbare', 'verb', 'c'],
['addolcire', 'verb', 'c'],
['addomesticare', 'verb', 'c'],
['addormentarsi', 'verb', 'b'],
['addormentato', 'past_part', 'c'],
['addormentato', 'adjective', 'c'],
['addossare', 'verb', 'a'],
['addosso', 'adverb', 'c'],
['addosso', 'exclamation', 'c'],
['addrizzare', 'verb', 'c'],
['adeguare', 'verb', 'b'],
['adeguato', 'past_part', 'b'],
['adeguato', 'adjective', 'b'],
['adeguato', 'noun', 'b'],
['aderente', 'pres_part', 'c'],
['aderente', 'adjective', 'c'],
['aderente', 'noun', 'c'],
['aderire', 'verb', 'b'],
['adesione', 'noun', 'b'],
['adesso', 'adverb', 'a'],
['adolescente', 'adjective', 'a'],
['adolescente', 'noun', 'a'],
['adolescenza', 'noun', 'b'],
['adoperare', 'verb', 'b'],
['adorare', 'verb', 'a'],
['adottare', 'verb', 'a'],
['adozione', 'noun', 'b'],
['adriatico', 'adjective', 'c'],
['adulto', 'adjective', 'a'],
['adulto', 'noun', 'a'],
['aereo', 'adjective', 'a'],
['aereo', 'noun', 'a'],
['aereo', 'noun', 'b'],
['aeroplano', 'noun', 'c'],
['aeroporto', 'noun', 'b'],
['afa', 'noun', 'c'],
['affacciare', 'verb', 'b'],
['affamare', 'verb', 'c'],
['affamato', 'past_part', 'c'],
['affamato', 'adjective', 'c'],
['affamato', 'noun', 'c'],
['affannarsi', 'verb', 'c'],
['affannato', 'past_part', 'c'],
['affannato', 'adjective', 'c'],
['affanno', 'noun', 'c'],
['affare', 'noun', 'a'],
['affascinante', 'pres_part', 'b'],
['affascinante', 'adjective', 'b'],
['affascinare', 'verb', 'b'],
['affaticare', 'verb', 'c'],
['affatto', 'adverb', 'a'],
['affermare', 'verb', 'a'],
['affermazione', 'noun', 'b'],
['afferrare', 'verb', 'b'],
['affettare', 'verb', 'c'],
['affettato', 'past_part', 'c'],
['affettato', 'adjective', 'c'],
['affettato', 'noun', 'c'],
['affetto', 'noun', 'b'],
['affetto', 'adjective', 'b'],
['affettuoso', 'adjective', 'b'],
['affezionato', 'past_part', 'c'],
['affezionato', 'adjective', 'c'],
['affiancare', 'verb', 'b'],
['affidamento', 'noun', 'b'],
['affidare', 'verb', 'a'],
['affilato', 'past_part', 'c'],
['affilato', 'adjective', 'c'],
['affinché', 'conjunction', 'b'],
['affittare', 'verb', 'b'],
['affitto', 'noun', 'b'],
['affogare', 'verb', 'c'],
['affollare', 'verb', 'c'],
['affondare', 'verb', 'b'],
['affresco', 'noun', 'b'],
['affrontare', 'verb', 'a'],
['affumicare', 'verb', 'c'],
['africano', 'adjective', 'b'],
['africano', 'noun', 'b'],
['agenda', 'noun', 'b'],
['agente', 'pres_part', 'a'],
['agente', 'adjective', 'a'],
['agente', 'noun', 'a'],
['agenzia', 'noun', 'a'],
['agganciare', 'verb', 'b'],
['aggettivo', 'noun', 'b'],
['aggiornamento', 'noun', 'b'],
['aggiornare', 'verb', 'b'],
['aggirare', 'verb', 'b'],
['aggiungere', 'verb', 'a'],
['aggiustare', 'verb', 'b'],
['aggrapparsi', 'verb', 'b'],
['aggravare', 'verb', 'c'],
['aggredire', 'verb', 'b'],
['aggressione', 'noun', 'b'],
['aggressivo', 'adjective', 'b'],
['agiato', 'past_part', 'c'],
['agiato', 'adjective', 'c'],
['agile', 'adjective', 'c'],
['agio', 'noun', 'b'],
['agire', 'verb', 'a'],
['agitare', 'verb', 'b'],
['agitazione', 'noun', 'b'],
['aglio', 'noun', 'c'],
['agnello', 'noun', 'b'],
['ago', 'noun', 'b'],
['agonia', 'noun', 'c'],
['agosto', 'noun', 'a'],
['agricolo', 'adjective', 'b'],
['agricoltore', 'noun', 'c'],
['agricoltura', 'noun', 'b'],
['agrume', 'noun', 'c'],
['aguzzare', 'verb', 'c'],
['aguzzo', 'adjective', 'c'],
['aiuola', 'noun', 'c'],
['aiutare', 'verb', 'a'],
['aiuto', 'noun', 'a'],
['aiuto', 'exclamation', 'a'],
['ala', 'noun', 'a'],
['alba', 'noun', 'a'],
['albanese', 'adjective', 'b'],
['albanese', 'noun', 'b'],
['albergo', 'noun', 'a'],
['albero', 'noun', 'a'],
['albicocca', 'noun', 'c'],
['albicocca', 'adjective', 'c'],
['album', 'noun', 'a'],
['alcol', 'noun', 'b'],
['alcuno', 'adjective', 'a'],
['alcuno', 'pronoun', 'a'],
['alfabeto', 'noun', 'c'],
['alga', 'noun', 'c'],
['algerino', 'adjective', 'c'],
['algerino', 'noun', 'c'],
['alieno', 'adjective', 'b'],
['alieno', 'noun', 'b'],
['alimentare', 'adjective', 'b'],
['alimentare', 'noun', 'b'],
['alimentare', 'verb', 'b'],
['alimentari', 'noun', 'c'],
['alimentazione', 'noun', 'b'],
['alimento', 'noun', 'b'],
['alito', 'noun', 'c'],
['allacciare', 'verb', 'c'],
['allagare', 'verb', 'c'],
['allargare', 'verb', 'b'],
['allarmare', 'verb', 'c'],
['allarme', 'noun', 'b'],
['allattare', 'verb', 'c'],
['alleanza', 'noun', 'b'],
['allearsi', 'verb', 'c'],
['alleato', 'past_part', 'b'],
['alleato', 'adjective', 'b'],
['alleato', 'noun', 'b'],
['allegato', 'past_part', 'b'],
['allegato', 'adjective', 'b'],
['allegato', 'noun', 'b'],
['alleggerire', 'verb', 'c'],
['allegria', 'noun', 'b'],
['allegro', 'adjective', 'b'],
['allegro', 'adverb', 'b'],
['allegro', 'noun', 'b'],
['allenamento', 'noun', 'b'],
['allenare', 'verb', 'b'],
['allenatore', 'adjective', 'b'],
['allenatore', 'noun', 'b'],
['allentare', 'verb', 'c'],
['allergia', 'noun', 'c'],
['allevare', 'verb', 'b'],
['allievo', 'noun', 'b'],
['allineare', 'verb', 'c'],
['alloggio', 'noun', 'b'],
['allontanare', 'verb', 'a'],
['allora', 'adverb', 'a'],
['allora', 'conjunction', 'a'],
['alluce', 'noun', 'c'],
['alludere', 'verb', 'b'],
['alluminio', 'noun', 'c'],
['allungare', 'verb', 'a'],
['alluvione', 'noun', 'c'],
['almeno', 'adverb', 'a'],
['alquanto', 'adjective', 'b'],
['alquanto', 'pronoun', 'b'],
['alquanto', 'adverb', 'b'],
['altalena', 'noun', 'c'],
['altamente', 'adverb', 'b'],
['altare', 'noun', 'b'],
['alterare', 'verb', 'b'],
['alternare', 'verb', 'b'],
['alternativa', 'noun', 'b'],
['alternativo', 'adjective', 'b'],
['alterno', 'adjective', 'c'],
['altezza', 'noun', 'a'],
['alto', 'adjective', 'a'],
['alto', 'noun', 'a'],
['alto', 'adverb', 'a'],
['altoatesino', 'adjective', 'c'],
['altoatesino', 'noun', 'c'],
['altopiano', 'noun', 'c'],
['altrettanto', 'adjective', 'a'],
['altrettanto', 'pronoun', 'a'],
['altrettanto', 'adverb', 'a'],
['altrimenti', 'adverb', 'a'],
['altro', 'adjective', 'a'],
['altro', 'pronoun', 'a'],
['altro', 'adverb', 'a'],
['altrove', 'adverb', 'b'],
['altrui', 'adjective', 'b'],
['altrui', 'pronoun', 'b'],
['alunno', 'noun', 'b'],
['alveare', 'noun', 'c'],
['alzare', 'verb', 'a'],
['amante', 'pres_part', 'a'],
['amante', 'adjective', 'a'],
['amante', 'noun', 'a'],
['amare', 'verb', 'a'],
['amaro', 'adjective', 'b'],
['amaro', 'noun', 'b'],
['amato', 'past_part', 'b'],
['amato', 'adjective', 'b'],
['amato', 'noun', 'b'],
['ambasciata', 'noun', 'c'],
['ambientale', 'adjective', 'a'],
['ambientare', 'verb', 'b'],
['ambiente', 'noun', 'a'],
['ambiente', 'adjective', 'a'],
['ambito', 'noun', 'a'],
['ambizione', 'noun', 'b'],
['ambulanza', 'noun', 'b'],
['americano', 'adjective', 'a'],
['americano', 'noun', 'a'],
['amicizia', 'noun', 'a'],
['amico', 'adjective', 'a'],
['amico', 'noun', 'a'],
['ammaccare', 'verb', 'c'],
['ammalarsi', 'verb', 'b'],
['ammalato', 'past_part', 'c'],
['ammalato', 'adjective', 'c'],
['ammalato', 'noun', 'c'],
['ammanettare', 'verb', 'c'],
['ammassare', 'verb', 'c'],
['ammasso', 'noun', 'c'],
['ammazzare', 'verb', 'a'],
['ammettere', 'verb', 'a'],
['amministrativo', 'adjective', 'b'],
['amministrativo', 'noun', 'b'],
['amministratore', 'noun', 'b'],
['amministrazione', 'noun', 'a'],
['ammirare', 'verb', 'b'],
['ammissione', 'noun', 'b'],
['ammobiliare', 'verb', 'c'],
['ammoniaca', 'noun', 'c'],
['ammorbidente', 'pres_part', 'c'],
['ammorbidente', 'adjective', 'c'],
['ammorbidente', 'noun', 'c'],
['ammucchiare', 'verb', 'c'],
['ammuffire', 'verb', 'c'],
['amore', 'noun', 'a'],
['amoroso', 'adjective', 'b'],
['amoroso', 'noun', 'b'],
['ampiamente', 'adverb', 'b'],
['ampio', 'adjective', 'a'],
['ampio', 'noun', 'a'],
['amplificatore', 'adjective', 'c'],
['amplificatore', 'noun', 'c'],
['analcolico', 'adjective', 'c'],
['analcolico', 'noun', 'c'],
['analfabeta', 'adjective', 'c'],
['analfabeta', 'noun', 'c'],
['analisi', 'noun', 'a'],
['analitico', 'adjective', 'b'],
['analizzare', 'verb', 'a'],
['analogo', 'adjective', 'b'],
['ananas', 'noun', 'c'],
['anarchico', 'adjective', 'c'],
['anarchico', 'noun', 'c'],
['anatra', 'noun', 'c'],
['anche', 'conjunction', 'a'],
['anche', 'adverb', 'a'],
['anconetano', 'adjective', 'c'],
['anconetano', 'noun', 'c'],
['ancora', 'adverb', 'a'],
['ancora', 'conjunction', 'a'],
['ancorare', 'verb', 'b'],
['andamento', 'noun', 'b'],
['andare', 'verb', 'a'],
['andata', 'noun', 'c'],
['anello', 'noun', 'a'],
['angelo', 'noun', 'a'],
['angolare', 'adjective', 'b'],
['angolare', 'noun', 'b'],
['angolo', 'noun', 'a'],
['angoscia', 'noun', 'b'],
['anima', 'noun', 'a'],
['animale', 'noun', 'a'],
['animale', 'adjective', 'b'],
['animare', 'verb', 'a'],
['animato', 'past_part', 'b'],
['animato', 'adjective', 'b'],
['animato', 'adverb', 'b'],
['animo', 'noun', 'b'],
['animo', 'exclamation', 'b'],
['annacquare', 'verb', 'c'],
['annaffiare', 'verb', 'c'],
['annebbiare', 'verb', 'c'],
['anniversario', 'noun', 'b'],
['anniversario', 'adjective', 'b'],
['anno', 'noun', 'a'],
['annodare', 'verb', 'c'],
['annoiare', 'verb', 'b'],
['annotare', 'verb', 'b'],
['annuale', 'adjective', 'b'],
['annuale', 'noun', 'b'],
['annuire', 'verb', 'b'],
['annullare', 'verb', 'b'],
['annunciare', 'verb', 'a'],
['annuncio', 'noun', 'b'],
['annusare', 'verb', 'c'],
['anonimo', 'adjective', 'b'],
['anonimo', 'noun', 'b'],
['ansia', 'noun', 'a'],
['ansioso', 'adjective', 'b'],
['ansioso', 'noun', 'b'],
['antartico', 'adjective', 'c'],
['antartico', 'noun', 'c'],
['antenna', 'noun', 'b'],
['anteprima', 'noun', 'b'],
['anteriore', 'adjective', 'b'],
['anticalcare', 'adjective', 'c'],
['antichità', 'noun', 'c'],
['anticipare', 'verb', 'b'],
['anticipo', 'noun', 'b'],
['antico', 'adjective', 'a'],
['antico', 'noun', 'a'],
['antipasto', 'noun', 'c'],
['antirughe', 'adjective', 'c'],
['antirughe', 'noun', 'c'],
['antropologia', 'noun', 'b'],
['anulare', 'adjective', 'c'],
['anulare', 'noun', 'c'],
['anzi', 'adverb', 'a'],
['anzi', 'preposition', 'a'],
['anziano', 'adjective', 'a'],
['anziano', 'noun', 'a'],
['anziché', 'conjunction', 'b'],
['aostano', 'adjective', 'c'],
['aostano', 'noun', 'c'],
['ape', 'noun', 'b'],
['aperitivo', 'noun', 'c'],
['aperitivo', 'adjective', 'c'],
['aperto', 'past_part', 'a'],
['aperto', 'adjective', 'a'],
['aperto', 'noun', 'a'],
['aperto', 'adverb', 'a'],
['apertura', 'noun', 'a'],
['aspettativa', 'noun', 'b'],
['apostolo', 'noun', 'c'],
['appalto', 'noun', 'b'],
['appannare', 'verb', 'c'],
['apparato', 'noun', 'b'],
['apparecchiare', 'verb', 'c'],
['apparecchiatura', 'noun', 'c'],
['apparecchio', 'noun', 'b'],
['apparente', 'pres_part', 'b'],
['apparente', 'adjective', 'b'],
['apparentemente', 'adverb', 'b'],
['apparenza', 'noun', 'b'],
['apparire', 'verb', 'a'],
['apparizione', 'noun', 'b'],
['appartamento', 'noun', 'a'],
['appartenenza', 'noun', 'b'],
['appartenere', 'verb', 'a'],
['appassionare', 'verb', 'b'],
['appassionarsi', 'verb', 'c'],
['appassionato', 'past_part', 'b'],
['appassionato', 'adjective', 'b'],
['appassionato', 'noun', 'b'],
['appello', 'noun', 'b'],
['appena', 'adverb', 'a'],
['appena', 'conjunction', 'a'],
['appendere', 'verb', 'b'],
['appendicite', 'noun', 'c'],
['appenninico', 'adjective', 'c'],
['appeso', 'past_part', 'c'],
['appeso', 'adjective', 'c'],
['appeso', 'noun', 'c'],
['appiccicare', 'verb', 'c'],
['appiglio', 'noun', 'c'],
['applauso', 'noun', 'b'],
['applicare', 'verb', 'a'],
['applicazione', 'noun', 'b'],
['appoggiare', 'verb', 'a'],
['appoggio', 'noun', 'b'],
['apposito', 'adjective', 'b'],
['apposta', 'adverb', 'b'],
['apposta', 'adjective', 'b'],
['apprendere', 'verb', 'b'],
['apprendimento', 'noun', 'b'],
['apprendista', 'noun', 'c'],
['apprezzare', 'verb', 'a'],
['approccio', 'noun', 'b'],
['approfittare', 'verb', 'b'],
['approfondimento', 'noun', 'b'],
['approfondire', 'verb', 'b'],
['approvare', 'verb', 'b'],
['approvazione', 'noun', 'b'],
['appuntamento', 'noun', 'a'],
['appuntire', 'verb', 'c'],
['appunto', 'noun', 'b'],
['appunto', 'adverb', 'a'],
['aprile', 'noun', 'a'],
['aprire', 'verb', 'a'],
['apriscatole', 'noun', 'c'],
['aquila', 'noun', 'c'],
['aquilano', 'adjective', 'c'],
['aquilano', 'noun', 'c'],
['aquilone', 'noun', 'c'],
['arabo', 'adjective', 'a'],
['arabo', 'noun', 'a'],
['arachide', 'noun', 'c'],
['aragosta', 'noun', 'c'],
['aranciata', 'noun', 'c'],
['arancio', 'noun', 'c'],
['arare', 'verb', 'c'],
['aratro', 'noun', 'c'],
['arbitro', 'noun', 'b'],
['archeologo', 'noun', 'c'],
['architettare', 'verb', 'b'],
['architetto', 'noun', 'b'],
['architettonico', 'adjective', 'b'],
['architettura', 'noun', 'b'],
['archiviare', 'verb', 'b'],
['archivio', 'noun', 'b'],
['arco', 'noun', 'a'],
['arcobaleno', 'noun', 'c'],
['area', 'noun', 'a'],
['argentino', 'adjective', 'b'],
['argentino', 'noun', 'b'],
['argento', 'noun', 'b'],
['argomentare', 'verb', 'b'],
['argomentazione', 'noun', 'b'],
['argomento', 'noun', 'a'],
['aria', 'noun', 'a'],
['aristocratico', 'adjective', 'c'],
['aristocratico', 'noun', 'c'],
['aritmetica', 'noun', 'c'],
['aritmetico', 'adjective', 'c'],
['aritmetico', 'noun', 'c'],
['arma', 'noun', 'a'],
['armadio', 'noun', 'b'],
['armamento', 'noun', 'c'],
['armare', 'verb', 'b'],
['armato', 'past_part', 'b'],
['armato', 'adjective', 'b'],
['armato', 'noun', 'b'],
['armonia', 'noun', 'b'],
['aroma', 'noun', 'c'],
['arrabbiarsi', 'verb', 'a'],
['arrampicarsi', 'verb', 'b'],
['arredamento', 'noun', 'b'],
['arredare', 'verb', 'c'],
['arrendersi', 'verb', 'b'],
['arrendersi', 'verb', 'c'],
['arrestare', 'verb', 'a'],
['arresto', 'noun', 'b'],
['arricchire', 'verb', 'b'],
['arrivare', 'verb', 'a'],
['arrivederci', 'exclamation', 'b'],
['arrivederci', 'noun', 'b'],
['arrivo', 'noun', 'a'],
['arrosto', 'noun', 'c'],
['arrosto', 'adjective', 'c'],
['arrosto', 'adverb', 'c'],
['arrugginire', 'verb', 'c'],
['arte', 'noun', 'a'],
['arteria', 'noun', 'b'],
['artico', 'adjective', 'c'],
['artico', 'noun', 'c'],
['articolare', 'verb', 'b'],
['articolare', 'noun', 'b'],
['articolazione', 'noun', 'b'],
['articolo', 'noun', 'a'],
['artificiale', 'adjective', 'b'],
['artigianale', 'adjective', 'c'],
['artigiano', 'noun', 'b'],
['artigiano', 'adjective', 'b'],
['artiglieria', 'noun', 'c'],
['artiglio', 'noun', 'c'],
['artista', 'noun', 'a'],
['artistico', 'adjective', 'a'],
['artistico', 'noun', 'a'],
['ascella', 'noun', 'c'],
['ascensore', 'noun', 'b'],
['ascesa', 'noun', 'b'],
['ascesso', 'noun', 'c'],
['ascia', 'noun', 'c'],
['asciugamano', 'noun', 'b'],
['asciugare', 'verb', 'b'],
['asciutto', 'adjective', 'b'],
['asciutto', 'noun', 'b'],
['ascoltare', 'verb', 'a'],
['ascolto', 'noun', 'b'],
['asfaltare', 'verb', 'c'],
['asfalto', 'noun', 'c'],
['asiatico', 'adjective', 'b'],
['asiatico', 'noun', 'b'],
['asilo', 'noun', 'b'],
['asino', 'noun', 'b'],
['asma', 'noun', 'c'],
['asparago', 'noun', 'c'],
['aspettare', 'verb', 'a'],
['aspetto', 'noun', 'a'],
['aspirapolvere', 'noun', 'c'],
['aspirare', 'verb', 'b'],
['aspirazione', 'noun', 'b'],
['aspro', 'adjective', 'b'],
['aspro', 'noun', 'b'],
['assaggiare', 'verb', 'b'],
['assaggio', 'noun', 'c'],
['assai', 'adverb', 'a'],
['assai', 'adjective', 'a'],
['assai', 'noun', 'a'],
['assalire', 'verb', 'c'],
['assaltare', 'verb', 'c'],
['assalto', 'noun', 'b'],
['assaporare', 'verb', 'c'],
['assassinare', 'verb', 'b'],
['assassinio', 'noun', 'c'],
['assassino', 'noun', 'b'],
['assassino', 'adjective', 'b'],
['asse', 'noun', 'b'],
['assediare', 'verb', 'c'],
['assegnare', 'verb', 'b'],
['assegno', 'noun', 'b'],
['assemblea', 'noun', 'b'],
['assente', 'adjective', 'b'],
['assente', 'noun', 'b'],
['assenza', 'noun', 'a'],
['assicurare', 'verb', 'a'],
['assicurazione', 'noun', 'b'],
['assieme', 'adverb', 'a'],
['assieme', 'noun', 'a'],
['assistente', 'pres_part', 'b'],
['assistente', 'adjective', 'b'],
['assistente', 'noun', 'b'],
['assistenza', 'noun', 'b'],
['assistere', 'verb', 'a'],
['associare', 'verb', 'b'],
['associazione', 'noun', 'a'],
['assolutamente', 'adverb', 'a'],
['assoluto', 'adjective', 'a'],
['assoluto', 'noun', 'a'],
['assoluzione', 'noun', 'c'],
['assolvere', 'verb', 'b'],
['assomigliare', 'verb', 'b'],
['assorbente', 'pres_part', 'c'],
['assorbente', 'adjective', 'c'],
['assorbente', 'noun', 'c'],
['assorbire', 'verb', 'b'],
['assordare', 'verb', 'c'],
['assumere', 'verb', 'a'],
['assunzione', 'noun', 'b'],
['assurdo', 'adjective', 'a'],
['assurdo', 'noun', 'a'],
['asta', 'noun', 'b'],
['astemio', 'adjective', 'c'],
['astemio', 'noun', 'c'],
['astratto', 'past_part', 'b'],
['astratto', 'adjective', 'b'],
['astratto', 'noun', 'b'],
['astronave', 'noun', 'c'],
['astuccio', 'noun', 'c'],
['astuto', 'adjective', 'c'],
['astuto', 'noun', 'c'],
['astuzia', 'noun', 'c'],
['ateniese', 'adjective', 'c'],
['ateniese', 'noun', 'c'],
['ateo', 'adjective', 'b'],
['ateo', 'noun', 'b'],
['atlantico', 'adjective', 'c'],
['atleta', 'noun', 'b'],
['atmosfera', 'noun', 'a'],
['atomica', 'noun', 'c'],
['atomico', 'adjective', 'b'],
['atomo', 'noun', 'b'],
['atrio', 'noun', 'c'],
['atroce', 'adjective', 'b'],
['attaccante', 'pres_part', 'c'],
['attaccante', 'adjective', 'c'],
['attaccante', 'noun', 'c'],
['attaccapanni', 'noun', 'c'],
['attaccare', 'verb', 'a'],
['attacco', 'noun', 'a'],
['atteggiamento', 'noun', 'a'],
['atteggiare', 'verb', 'c'],
['attendere', 'verb', 'a'],
['attenere', 'verb', 'b'],
['attentamente', 'adverb', 'b'],
['attentare', 'verb', 'c'],
['attentato', 'noun', 'b'],
['attento', 'adjective', 'a'],
['attenzione', 'noun', 'a'],
['atterraggio', 'noun', 'c'],
['atterrare', 'verb', 'b'],
['attesa', 'noun', 'a'],
['attestare', 'verb', 'b'],
['attimo', 'noun', 'a'],
['attingere', 'verb', 'b'],
['attirare', 'verb', 'b'],
['attivare', 'verb', 'b'],
['attività', 'noun', 'a'],
['attivo', 'adjective', 'a'],
['attivo', 'noun', 'a'],
['atto', 'noun', 'a'],
['attore', 'noun', 'a'],
['attorno', 'adverb', 'a'],
['attrarre', 'verb', 'b'],
['attraversare', 'verb', 'a'],
['attraverso', 'preposition', 'a'],
['attraverso', 'adverb', 'a'],
['attrazione', 'noun', 'b'],
['attrezzare', 'verb', 'b'],
['attrezzatura', 'noun', 'b'],
['attrezzo', 'noun', 'b'],
['attribuire', 'verb', 'b'],
['attrice', 'noun', 'b'],
['attuale', 'adjective', 'a'],
['attualità', 'noun', 'b'],
['attualmente', 'adverb', 'b'],
['attuare', 'verb', 'b'],
['augurare', 'verb', 'b'],
['augurio', 'noun', 'b'],
['aula', 'noun', 'b'],
['aumentare', 'verb', 'a'],
['aumento', 'noun', 'a'],
['australiano', 'adjective', 'c'],
['australiano', 'noun', 'c'],
['austriaco', 'adjective', 'b'],
['austriaco', 'noun', 'b'],
['autentico', 'adjective', 'b'],
['autentico', 'noun', 'b'],
['autista', 'noun', 'b'],
['auto', 'noun', 'a'],
['autoambulanza', 'noun', 'c'],
['autobotte', 'noun', 'c'],
['autobus', 'noun', 'b'],
['autografo', 'adjective', 'c'],
['autografo', 'noun', 'c'],
['automaticamente', 'adverb', 'b'],
['automatico', 'adjective', 'b'],
['automatico', 'noun', 'b'],
['automobile', 'noun', 'b'],
['automobilista', 'noun', 'c'],
['autonomia', 'noun', 'b'],
['autonomo', 'adjective', 'b'],
['autonomo', 'noun', 'b'],
['autore', 'noun', 'a'],
['autorevole', 'adjective', 'c'],
['autorità', 'noun', 'a'],
['autorizzare', 'verb', 'a'],
['autoscontro', 'noun', 'c'],
['autoscuola', 'noun', 'c'],
['autostop', 'noun', 'c'],
['autostrada', 'noun', 'b'],
['autotreno', 'noun', 'c'],
['autunno', 'noun', 'b'],
['avambraccio', 'noun', 'c'],
['avanguardia', 'noun', 'b'],
['avanti', 'adverb', 'a'],
['avanti', 'adjective', 'a'],
['avanti', 'loc-comando', 'a'],
['avanti', 'preposition', 'a'],
['avanti', 'noun', 'a'],
['avanzare', 'verb', 'a'],
['avanzato', 'past_part', 'b'],
['avanzato', 'adjective', 'b'],
['avanzo', 'noun', 'c'],
['avarizia', 'noun', 'c'],
['avaro', 'adjective', 'c'],
['avaro', 'noun', 'c'],
['avena', 'noun', 'c'],
['avere', 'verb', 'a'],
['aviazione', 'noun', 'c'],
['avvantaggiare', 'verb', 'c'],
['avvelenare', 'verb', 'b'],
['avvelenato', 'past_part', 'c'],
['avvelenato', 'adjective', 'c'],
['avvenimento', 'noun', 'b'],
['avvenire', 'adjective', 'a'],
['avvenire', 'noun', 'a'],
['avventura', 'noun', 'a'],
['avverare', 'verb', 'c'],
['avversario', 'noun', 'b'],
['avvertire', 'verb', 'a'],
['avviamento', 'noun', 'c'],
['avviare', 'verb', 'a'],
['avvicinare', 'verb', 'a'],
['avvio', 'noun', 'b'],
['avvisare', 'verb', 'b'],
['avviso', 'noun', 'b'],
['avvitare', 'verb', 'c'],
['avvocato', 'noun', 'a'],
['avvolgere', 'verb', 'b'],
['azienda', 'noun', 'a'],
['aziendale', 'adjective', 'b'],
['azione', 'noun', 'a'],
['azione', 'noun', 'b'],
['azzardare', 'verb', 'b'],
['azzardo', 'noun', 'c'],
['azzurro', 'noun', 'a'],
['azzurro', 'adjective', 'a'],
['babbo', 'noun', 'b'],
['baby', 'noun', 'b'],
['baby', 'adjective', 'b'],
['babydoll', 'noun', 'c'],
['bacca', 'noun', 'c'],
['baccalà', 'noun', 'c'],
['bacheca', 'noun', 'b'],
['baciare', 'verb', 'a'],
['bacinella', 'noun', 'c'],
['bacino', 'noun', 'b'],
['bacio', 'noun', 'a'],
['baco', 'noun', 'c'],
['badare', 'verb', 'b'],
['baffo', 'noun', 'b'],
['bagagliaio', 'noun', 'c'],
['bagaglio', 'noun', 'b'],
['bagnare', 'verb', 'b'],
['bagnato', 'past_part', 'b'],
['bagnato', 'adjective', 'b'],
['bagnato', 'noun', 'b'],
['bagno', 'noun', 'a'],
['bagnoschiuma', 'noun', 'c'],
['balcone', 'noun', 'b'],
['balena', 'noun', 'b'],
['balia', 'noun', 'b'],
['ballare', 'verb', 'a'],
['ballerina', 'noun', 'c'],
['ballerino', 'noun', 'c'],
['ballerino', 'adjective', 'c'],
['balletto', 'noun', 'c'],
['ballo', 'noun', 'b'],
['balsamo', 'noun', 'c'],
['bambina', 'noun', 'a'],
['bambinaia', 'noun', 'c'],
['bambino', 'noun', 'a'],
['bambino', 'adjective', 'a'],
['bambola', 'noun', 'b'],
['banale', 'adjective', 'b'],
['banana', 'noun', 'c'],
['banca', 'noun', 'a'],
['bancarella', 'noun', 'c'],
['bancario', 'adjective', 'b'],
['bancario', 'noun', 'b'],
['banco', 'noun', 'b'],
['bancone', 'noun', 'b'],
['band', 'noun', 'b'],
['banda', 'noun', 'b'],
['bandiera', 'noun', 'b'],
['bando', 'noun', 'b'],
['bar', 'noun', 'a'],
['bara', 'noun', 'b'],
['baracca', 'noun', 'c'],
['barba', 'noun', 'b'],
['barbabietola', 'noun', 'c'],
['barbaro', 'adjective', 'b'],
['barbaro', 'noun', 'b'],
['barca', 'noun', 'a'],
['barella', 'noun', 'c'],
['barese', 'adjective', 'c'],
['barese', 'noun', 'c'],
['barile', 'noun', 'c'],
['barista', 'noun', 'c'],
['barriera', 'noun', 'b'],
['basare', 'verb', 'a'],
['base', 'noun', 'a'],
['basetta', 'noun', 'c'],
['basilica', 'noun', 'b'],
['basilico', 'noun', 'c'],
['basket', 'noun', 'c'],
['basso', 'adjective', 'a'],
['basso', 'noun', 'a'],
['basso', 'adverb', 'a'],
['bastardo', 'adjective', 'b'],
['bastardo', 'noun', 'b'],
['bastare', 'verb', 'a'],
['bastonare', 'verb', 'c'],
['bastone', 'noun', 'b'],
['battaglia', 'noun', 'a'],
['battello', 'noun', 'c'],
['battere', 'verb', 'a'],
['battere', 'noun', 'a'],
['batteria', 'noun', 'b'],
['batterio', 'noun', 'b'],
['batticuore', 'noun', 'c'],
['battipanni', 'noun', 'c'],
['battito', 'noun', 'c'],
['battuta', 'noun', 'a'],
['batuffolo', 'noun', 'c'],
['baule', 'noun', 'c'],
['bava', 'noun', 'c'],
['bavaglio', 'noun', 'c'],
['beato', 'past_part', 'b'],
['beato', 'adjective', 'b'],
['beato', 'noun', 'b'],
['beccare', 'verb', 'b'],
['befana', 'noun', 'c'],
['beffa', 'noun', 'c'],
['beh', 'exclamation', 'a'],
['belare', 'verb', 'c'],
['belga', 'adjective', 'c'],
['belga', 'noun', 'c'],
['bella', 'noun', 'b'],
['bellezza', 'noun', 'a'],
['bello', 'adjective', 'a'],
['bello', 'noun', 'a'],
['benché', 'conjunction', 'b'],
['benda', 'noun', 'c'],
['bene', 'adverb', 'a'],
['bene', 'exclamation', 'a'],
['bene', 'noun', 'a'],
['benedetto', 'past_part', 'b'],
['benedetto', 'adjective', 'b'],
['benedetto', 'noun', 'b'],
['beneficenza', 'noun', 'c'],
['beneficio', 'noun', 'b'],
['benessere', 'noun', 'b'],
['benestante', 'adjective', 'c'],
['benestante', 'noun', 'c'],
['bensì', 'conjunction', 'b'],
['bensì', 'adverb', 'b'],
['benvenuto', 'adjective', 'b'],
['benvenuto', 'noun', 'b'],
['benzina', 'noun', 'b'],
['benzinaio', 'noun', 'c'],
['bere', 'verb', 'a'],
['bere', 'noun', 'a'],
['berlinese', 'adjective', 'c'],
['berlinese', 'noun', 'c'],
['berretto', 'noun', 'c'],
['bersaglio', 'noun', 'b'],
['besciamella', 'noun', 'c'],
['bestemmia', 'noun', 'c'],
['bestia', 'noun', 'b'],
['bestiale', 'adjective', 'c'],
['bevanda', 'noun', 'b'],
['bevitore', 'noun', 'c'],
['bevuta', 'noun', 'c'],
['bi', 'noun', 'c'],
['bianco', 'adjective', 'a'],
['bianco', 'noun', 'a'],
['bibbia', 'noun', 'b'],
['bibita', 'noun', 'c'],
['biblico', 'adjective', 'b'],
['biblico', 'noun', 'b'],
['bibliografia', 'noun', 'b'],
['biblioteca', 'noun', 'b'],
['bicchiere', 'noun', 'a'],
['bici', 'noun', 'b'],
['bicicletta', 'noun', 'b'],
['bidè', 'noun', 'c'],
['bidello', 'noun', 'c'],
['biglia', 'noun', 'c'],
['biglietteria', 'noun', 'c'],
['biglietto', 'noun', 'a'],
['bikini', 'noun', 'c'],
['bilancia', 'noun', 'b'],
['bilancio', 'noun', 'b'],
['biliardo', 'noun', 'c'],
['bimba', 'noun', 'b'],
['bimbo', 'noun', 'b'],
['binario', 'noun', 'c'],
['biografia', 'noun', 'b'],
['biologia', 'noun', 'b'],
['biologico', 'adjective', 'b'],
['biologico', 'noun', 'b'],
['bionda', 'noun', 'b'],
['biondo', 'adjective', 'b'],
['biondo', 'noun', 'b'],
['birichino', 'noun', 'c'],
['birichino', 'adjective', 'c'],
['birillo', 'noun', 'c'],
['birra', 'noun', 'b'],
['bisbigliare', 'verb', 'c'],
['biscia', 'noun', 'c'],
['biscotto', 'adjective', 'b'],
['biscotto', 'noun', 'b'],
['bisnonno', 'noun', 'c'],
['bisognare', 'verb', 'a'],
['bisogno', 'noun', 'a'],
['bistecca', 'noun', 'c'],
['bistecchiera', 'noun', 'c'],
['bisticciare', 'verb', 'c'],
['bit', 'noun', 'b'],
['bizzarro', 'adjective', 'b'],
['bloccare', 'verb', 'a'],
['blocco', 'noun', 'b'],
['blocco', 'noun', 'b'],
['blog', 'noun', 'a'],
['blu', 'adjective', 'a'],
['blu', 'noun', 'a'],
['bocca', 'noun', 'a'],
['bocchino', 'noun', 'c'],
['boccia', 'noun', 'c'],
['bocciare', 'verb', 'b'],
['bocciatura', 'noun', 'c'],
['bocciolo', 'noun', 'c'],
['boccone', 'noun', 'c'],
['boh', 'exclamation', 'b'],
['boia', 'noun', 'c'],
['boia', 'adjective', 'c'],
['bolla', 'noun', 'b'],
['bolletta', 'noun', 'b'],
['bollito', 'past_part', 'c'],
['bollito', 'adjective', 'c'],
['bollito', 'noun', 'c'],
['bollitore', 'noun', 'c'],
['bollo', 'noun', 'c'],
['bolognese', 'adjective', 'c'],
['bolognese', 'noun', 'c'],
['bolzanino', 'adjective', 'c'],
['bolzanino', 'noun', 'c'],
['bomba', 'noun', 'b'],
['bombardare', 'verb', 'b'],
['bombola', 'noun', 'c'],
['bomboniera', 'noun', 'c'],
['bontà', 'noun', 'b'],
['bordo', 'noun', 'a'],
['borgata', 'noun', 'c'],
['borghese', 'adjective', 'b'],
['borghese', 'noun', 'b'],
['borghesia', 'noun', 'c'],
['borgo', 'noun', 'b'],
['borotalco', 'noun', 'c'],
['borsa', 'noun', 'a'],
['borsa', 'noun', 'b'],
['borsetta', 'noun', 'c'],
['bosco', 'noun', 'a'],
['bosniaco', 'adjective', 'c'],
['bosniaco', 'noun', 'c'],
['boss', 'noun', 'b'],
['bossolo', 'noun', 'c'],
['botanica', 'noun', 'c'],
['botta', 'noun', 'b'],
['botte', 'noun', 'c'],
['bottega', 'noun', 'b'],
['bottegaio', 'noun', 'c'],
['bottegaio', 'adjective', 'c'],
['bottiglia', 'noun', 'a'],
['botto', 'noun', 'c'],
['bottone', 'noun', 'b'],
['bovino', 'adjective', 'c'],
['bovino', 'noun', 'c'],
['box', 'noun', 'b'],
['boxer', 'noun', 'c'],
['braccialetto', 'noun', 'c'],
['bracciante', 'noun', 'c'],
['braccio', 'noun', 'a'],
['branco', 'noun', 'b'],
['brand', 'noun', 'b'],
['brandello', 'noun', 'c'],
['brano', 'noun', 'a'],
['brasiliano', 'adjective', 'b'],
['brasiliano', 'noun', 'b'],
['bravo', 'adjective', 'a'],
['bravo', 'noun', 'a'],
['bravo', 'exclamation', 'a'],
['bresaola', 'noun', 'c'],
['bretella', 'noun', 'c'],
['breve', 'adjective', 'a'],
['breve', 'adverb', 'a'],
['breve', 'noun', 'a'],
['briciola', 'noun', 'c'],
['brigantaggio', 'noun', 'c'],
['brigante', 'noun', 'c'],
['brillante', 'pres_part', 'b'],
['brillante', 'adjective', 'b'],
['brillante', 'noun', 'b'],
['brillantina', 'noun', 'c'],
['brillare', 'verb', 'b'],
['brina', 'noun', 'c'],
['brioche', 'noun', 'c'],
['britannico', 'adjective', 'b'],
['britannico', 'noun', 'b'],
['brivido', 'noun', 'b'],
['brocca', 'noun', 'c'],
['brogliaccio', 'noun', 'b'],
['bronchite', 'noun', 'c'],
['brontolare', 'verb', 'c'],
['bronzo', 'noun', 'b'],
['bruciare', 'verb', 'a'],
['bruciato', 'past_part', 'b'],
['bruciato', 'adjective', 'b'],
['bruciato', 'noun', 'b'],
['bruciatura', 'noun', 'c'],
['bruco', 'noun', 'c'],
['bruco', 'adjective', 'c'],
['bruschetta', 'noun', 'c'],
['brutale', 'adjective', 'c'],
['brutto', 'adjective', 'a'],
['brutto', 'noun', 'a'],
['brutto', 'adverb', 'a'],
['buca', 'noun', 'b'],
['bucare', 'verb', 'b'],
['bucato', 'noun', 'c'],
['buccia', 'noun', 'c'],
['buco', 'noun', 'a'],
['budino', 'noun', 'c'],
['bufala', 'noun', 'c'],
['bufalo', 'noun', 'c'],
['bufera', 'noun', 'c'],
['buffet', 'noun', 'c'],
['buffo', 'adjective', 'b'],
['buffo', 'noun', 'b'],
['bugia', 'noun', 'b'],
['bugiardo', 'adjective', 'b'],
['bugiardo', 'noun', 'b'],
['buio', 'adjective', 'a'],
['buio', 'noun', 'a'],
['bulgaro', 'adjective', 'c'],
['bulgaro', 'noun', 'c'],
['buonafede', 'noun', 'c'],
['buonasera', 'exclamation', 'b'],
['buongiorno', 'exclamation', 'a'],
['buongusto', 'noun', 'c'],
['buono', 'adjective', 'a'],
['buono', 'noun', 'a'],
['buono', 'adverb', 'a'],
['buonuomo', 'noun', 'c'],
['burattino', 'noun', 'c'],
['burocrazia', 'noun', 'c'],
['burrasca', 'noun', 'c'],
['burro', 'noun', 'b'],
['burrone', 'noun', 'c'],
['business', 'noun', 'b'],
['business', 'adjective', 'b'],
['bussare', 'verb', 'b'],
['bussola', 'noun', 'c'],
['busta', 'noun', 'b'],
['bustina', 'noun', 'c'],
['busto', 'noun', 'c'],
['buttare', 'verb', 'a'],
['cabina', 'noun', 'b'],
['cacao', 'noun', 'c'],
['cacca', 'noun', 'b'],
['caccia', 'noun', 'a'],
['cacciare', 'verb', 'a'],
['cacciatore', 'noun', 'b'],
['cacciavite', 'noun', 'c'],
['cadavere', 'noun', 'a'],
['cadere', 'verb', 'a'],
['cadere', 'noun', 'a'],
['caduta', 'noun', 'b'],
['caffè', 'noun', 'a'],
['caffè', 'adjective', 'a'],
['caffellatte', 'noun', 'c'],
['caffellatte', 'adjective', 'c'],
['caffettiera', 'noun', 'c'],
['cagare', 'verb', 'b'],
['cagliaritano', 'adjective', 'c'],
['cagliaritano', 'noun', 'c'],
['calabrese', 'adjective', 'c'],
['calabrese', 'noun', 'c'],
['calabrone', 'noun', 'c'],
['calamaro', 'noun', 'c'],
['calamita', 'noun', 'c'],
['calare', 'verb', 'b'],
['calcagno', 'noun', 'c'],
['calciare', 'verb', 'c'],
['calciatore', 'noun', 'b'],
['calcinaccio', 'noun', 'c'],
['calcio', 'noun', 'a'],
['calcolare', 'verb', 'b'],
['calcolatore', 'adjective', 'c'],
['calcolatore', 'noun', 'c'],
['calcolatrice', 'noun', 'c'],
['calcolo', 'noun', 'b'],
['caldo', 'adjective', 'a'],
['caldo', 'noun', 'a'],
['caldo', 'adverb', 'a'],
['calendario', 'noun', 'b'],
['calligrafia', 'noun', 'c'],
['callo', 'noun', 'c'],
['calma', 'noun', 'b'],
['calmare', 'verb', 'b'],
['calmo', 'adjective', 'b'],
['calo', 'noun', 'b'],
['calore', 'noun', 'a'],
['calpestare', 'verb', 'c'],
['calunnia', 'noun', 'c'],
['calvario', 'noun', 'c'],
['calza', 'noun', 'b'],
['calzare', 'verb', 'c'],
['calzatura', 'noun', 'c'],
['calzino', 'noun', 'c'],
['calzolaio', 'noun', 'c'],
['calzoleria', 'noun', 'c'],
['calzone', 'noun', 'c'],
['cambiamento', 'noun', 'a'],
['cambiare', 'verb', 'a'],
['cambio', 'noun', 'a'],
['camera', 'noun', 'a'],
['camerata', 'noun', 'c'],
['cameriere', 'noun', 'b'],
['camicetta', 'noun', 'c'],
['camicia', 'noun', 'b'],
['caminetto', 'noun', 'c'],
['camion', 'noun', 'a'],
['camionista', 'noun', 'c'],
['cammello', 'noun', 'c'],
['cammello', 'adjective', 'c'],
['camminare', 'verb', 'a'],
['camminata', 'noun', 'c'],
['cammino', 'noun', 'b'],
['camomilla', 'noun', 'c'],
['camorra', 'noun', 'b'],
['campagna', 'noun', 'a'],
['campana', 'noun', 'b'],
['campanella', 'noun', 'c'],
['campanello', 'noun', 'b'],
['campanile', 'noun', 'c'],
['campano', 'adjective', 'c'],
['campano', 'noun', 'c'],
['campare', 'verb', 'b'],
['campeggio', 'noun', 'c'],
['campionato', 'noun', 'b'],
['campione', 'noun', 'a'],
['campo', 'noun', 'a'],
['campobassano', 'adjective', 'c'],
['campobassano', 'noun', 'c'],
['camposanto', 'noun', 'c'],
['canadese', 'adjective', 'c'],
['canadese', 'noun', 'c'],
['canaglia', 'noun', 'c'],
['canale', 'noun', 'a'],
['canapa', 'noun', 'c'],
['canarino', 'noun', 'c'],
['canarino', 'adjective', 'c'],
['cancellare', 'verb', 'a'],
['cancellatura', 'noun', 'c'],
['cancello', 'noun', 'b'],
['cancro', 'noun', 'b'],
['candela', 'noun', 'b'],
['candeliere', 'noun', 'c'],
['candidare', 'verb', 'b'],
['candidato', 'past_part', 'a'],
['candidato', 'adjective', 'a'],
['candidato', 'noun', 'a'],
['candido', 'adjective', 'b'],
['cane', 'noun', 'a'],
['canestro', 'noun', 'c'],
['canguro', 'noun', 'c'],
['canna', 'noun', 'b'],
['cannibale', 'adjective', 'c'],
['cannibale', 'noun', 'c'],
['cannuccia', 'noun', 'c'],
['canone', 'noun', 'b'],
['canottiera', 'noun', 'c'],
['canotto', 'noun', 'c'],
['cantante', 'pres_part', 'b'],
['cantante', 'adjective', 'b'],
['cantante', 'noun', 'b'],
['cantare', 'verb', 'a'],
['cantautore', 'noun', 'c'],
['cantiere', 'noun', 'b'],
['cantilena', 'noun', 'c'],
['cantina', 'noun', 'b'],
['canto', 'noun', 'a'],
['canzone', 'noun', 'a'],
['caos', 'noun', 'b'],
['capace', 'adjective', 'a'],
['capacità', 'noun', 'a'],
['capanna', 'noun', 'b'],
['capannone', 'noun', 'b'],
['caparra', 'noun', 'c'],
['capello', 'noun', 'a'],
['capire', 'verb', 'a'],
['capitale', 'adjective', 'a'],
['capitale', 'noun', 'a'],
['capitano', 'noun', 'a'],
['capitare', 'verb', 'a'],
['capitolo', 'noun', 'a'],
['capo', 'noun', 'a'],
['capodanno', 'noun', 'c'],
['capogiro', 'noun', 'c'],
['capolavoro', 'noun', 'b'],
['capoluogo', 'noun', 'c'],
['caporale', 'noun', 'b'],
['caporale', 'adjective', 'b'],
['caposquadra', 'noun', 'c'],
['capotavola', 'noun', 'c'],
['capoufficio', 'noun', 'c'],
['cappa', 'noun', 'c'],
['cappella', 'noun', 'b'],
['cappelliera', 'noun', 'c'],
['cappello', 'noun', 'b'],
['cappero', 'noun', 'c'],
['cappotto', 'noun', 'c'],
['cappuccino', 'adjective', 'c'],
['cappuccino', 'noun', 'c'],
['cappuccino', 'adjective', 'c'],
['cappuccio', 'noun', 'c'],
['capra', 'noun', 'b'],
['capriccio', 'noun', 'b'],
['capriola', 'noun', 'c'],
['carabiniere', 'noun', 'a'],
['caramella', 'noun', 'b'],
['caramella', 'adjective', 'b'],
['carattere', 'noun', 'a'],
['caratteristica', 'noun', 'a'],
['caratteristico', 'adjective', 'b'],
['caratterizzare', 'verb', 'a'],
['carbone', 'noun', 'b'],
['carburante', 'pres_part', 'c'],
['carburante', 'adjective', 'c'],
['carburante', 'noun', 'c'],
['carcassa', 'noun', 'c'],
['carcerato', 'past_part', 'c'],
['carcerato', 'adjective', 'c'],
['carcerato', 'noun', 'c'],
['carcere', 'noun', 'a'],
['carciofino', 'noun', 'c'],
['carciofo', 'noun', 'c'],
['cardellino', 'noun', 'c'],
['cardiaco', 'adjective', 'b'],
['cardiaco', 'noun', 'b'],
['cardigan', 'noun', 'c'],
['cardinale', 'adjective', 'b'],
['cardinale', 'noun', 'b'],
['cardinale', 'adjective', 'b'],
['carenza', 'noun', 'b'],
['carica', 'noun', 'loc-comando'],
['caricare', 'verb', 'a'],
['carico', 'noun', 'a'],
['carico', 'adjective', 'b'],
['carino', 'adjective', 'a'],
['carità', 'noun', 'b'],
['carnagione', 'noun', 'c'],
['carne', 'noun', 'a'],
['carnevale', 'noun', 'c'],
['carnivoro', 'adjective', 'c'],
['carnivoro', 'noun', 'c'],
['carnoso', 'adjective', 'c'],
['carnoso', 'noun', 'c'],
['caro', 'adjective', 'a'],
['caro', 'adverb', 'a'],
['caro', 'noun', 'a'],
['carosello', 'noun', 'c'],
['carovana', 'noun', 'c'],
['carriera', 'noun', 'a'],
['carro', 'noun', 'b'],
['carrozzeria', 'noun', 'c'],
['carta', 'noun', 'a'],
['cartaceo', 'adjective', 'b'],
['cartella', 'noun', 'b'],
['cartello', 'noun', 'b'],
['cartoleria', 'noun', 'c'],
['cartolina', 'noun', 'b'],
['cartone', 'noun', 'b'],
['cartuccia', 'noun', 'c'],
['casa', 'noun', 'a'],
['casalinga', 'noun', 'c'],
['casalingo', 'adjective', 'c'],
['casalingo', 'noun', 'c'],
['cascare', 'verb', 'b'],
['cascata', 'noun', 'c'],
['casco', 'noun', 'c'],
['caserma', 'noun', 'b'],
['casetta', 'noun', 'b'],
['casino', 'noun', 'a'],
['caso', 'noun', 'a'],
['cassa', 'noun', 'a'],
['cassaforte', 'noun', 'c'],
['cassapanca', 'noun', 'c'],
['casseruola', 'noun', 'c'],
['cassetta', 'noun', 'b'],
['cassettiera', 'noun', 'c'],
['cassetto', 'noun', 'b'],
['cassiera', 'noun', 'c'],
['castagna', 'noun', 'c'],
['castagno', 'noun', 'c'],
['castano', 'adjective', 'c'],
['castello', 'noun', 'a'],
['castoro', 'noun', 'c'],
['casuale', 'adjective', 'b'],
['casuale', 'noun', 'b'],
['catalogo', 'noun', 'b'],
['catanzarese', 'adjective', 'c'],
['catanzarese', 'noun', 'c'],
['catarro', 'noun', 'c'],
['catasta', 'noun', 'c'],
['catastrofe', 'noun', 'b'],
['catechismo', 'noun', 'c'],
['categoria', 'noun', 'a'],
['catena', 'noun', 'a'],
['catenaccio', 'noun', 'c'],
['catino', 'noun', 'c'],
['catrame', 'noun', 'c'],
['cattedrale', 'adjective', 'b'],
['cattedrale', 'noun', 'b'],
['cattivo', 'adjective', 'a'],
['cattivo', 'noun', 'a'],
['cattolico', 'adjective', 'a'],
['cattolico', 'noun', 'a'],
['catturare', 'verb', 'b'],
['causa', 'noun', 'a'],
['causare', 'verb', 'a'],
['cavalcare', 'verb', 'b'],
['cavaliere', 'noun', 'a'],
['cavalletta', 'noun', 'c'],
['cavallo', 'noun', 'a'],
['cavare', 'verb', 'b'],
['cavatappi', 'noun', 'c'],
['caverna', 'noun', 'c'],
['caviglia', 'noun', 'b'],
['cavità', 'noun', 'b'],
['cavo', 'adjective', 'b'],
['cavo', 'noun', 'b'],
['cavo', 'noun', 'b'],
['cavolo', 'noun', 'b'],
['cazzata', 'noun', 'b'],
['cazzo', 'noun', 'a'],
['ce', 'pronoun', 'a'],
['ce', 'adverb', 'a'],
['cece', 'noun', 'c'],
['ceco', 'adjective', 'c'],
['ceco', 'noun', 'c'],
['cecoslovacco', 'adjective', 'c'],
['cecoslovacco', 'noun', 'c'],
['cedere', 'verb', 'a'],
['celare', 'verb', 'b'],
['celebrare', 'verb', 'b'],
['celebre', 'adjective', 'b'],
['celeste', 'adjective', 'b'],
['celeste', 'noun', 'b'],
['cella', 'noun', 'b'],
['cellula', 'noun', 'a'],
['cellulare', 'adjective', 'a'],
['cellulare', 'noun', 'a'],
['cemento', 'noun', 'b'],
['cena', 'noun', 'a'],
['cenare', 'verb', 'b'],
['cenere', 'noun', 'b'],
['cenere', 'adjective', 'b'],
['cenno', 'noun', 'b'],
['centesimo', 'adjective', 'b'],
['centesimo', 'noun', 'b'],
['centimetro', 'noun', 'b'],
['centinaio', 'noun', 'a'],
['cento', 'adjective', 'a'],
['cento', 'noun', 'a'],
['centrale', 'adjective', 'a'],
['centrale', 'noun', 'a'],
['centralino', 'noun', 'c'],
['centrare', 'verb', 'b'],
['centro', 'noun', 'a'],
['centroamericano', 'adjective', 'c'],
['centroamericano', 'noun', 'c'],
['ceramica', 'noun', 'b'],
['cercare', 'verb', 'a'],
['cerchio', 'noun', 'b'],
['cereale', 'noun', 'c'],
['cereale', 'adjective', 'c'],
['cerebrale', 'adjective', 'b'],
['cerebrale', 'noun', 'b'],
['cerimonia', 'noun', 'b'],
['cerino', 'noun', 'c'],
['cerniera', 'noun', 'c'],
['cerotto', 'noun', 'c'],
['certamente', 'adverb', 'a'],
['certezza', 'noun', 'a'],
['certificare', 'verb', 'b'],
['certificato', 'past_part', 'b'],
['certificato', 'adjective', 'b'],
['certificato', 'noun', 'b'],
['certo', 'adjective', 'a'],
['certo', 'adjective', 'a'],
['certo', 'pronoun', 'a'],
['certo', 'adverb', 'a'],
['cervello', 'noun', 'a'],
['cervo', 'noun', 'c'],
['cespuglio', 'noun', 'b'],
['cessare', 'verb', 'b'],
['cesso', 'noun', 'b'],
['cestino', 'noun', 'c'],
['cesto', 'noun', 'c'],
['cetriolo', 'noun', 'c'],
['chat', 'noun', 'b'],
['che', 'pronoun', 'a'],
['che', 'adjective', 'a'],
['che', 'noun', 'a'],
['chewingum', 'noun', 'c'],
['chi', 'pronoun', 'a'],
['chiacchiera', 'noun', 'b'],
['chiacchierare', 'verb', 'b'],
['chiamare', 'verb', 'a'],
['chiamata', 'noun', 'b'],
['chiaramente', 'adverb', 'a'],
['chiarezza', 'noun', 'b'],
['chiarire', 'verb', 'a'],
['chiaro', 'adjective', 'a'],
['chiaro', 'noun', 'a'],
['chiaro', 'adverb', 'a'],
['chiasso', 'noun', 'c'],
['chiave', 'noun', 'a'],
['chiazza', 'noun', 'c'],
['chiedere', 'verb', 'a'],
['chiesa', 'noun', 'a'],
['chilo', 'noun', 'b'],
['chilogrammo', 'noun', 'c'],
['chilometro', 'noun', 'a'],
['chimico', 'adjective', 'a'],
['chimico', 'noun', 'a'],
['china', 'noun', 'c'],
['chinare', 'verb', 'b'],
['chinotto', 'noun', 'c'],
['chiodo', 'noun', 'b'],
['chiosco', 'noun', 'b'],
['chirurgia', 'noun', 'b'],
['chirurgico', 'adjective', 'b'],
['chirurgico', 'noun', 'b'],
['chirurgo', 'noun', 'b'],
['chissà', 'adverb', 'a'],
['chitarra', 'noun', 'b'],
['chiudere', 'verb', 'a'],
['chiunque', 'pronoun', 'a'],
['chiuso', 'past_part', 'a'],
['chiuso', 'adjective', 'a'],
['chiuso', 'noun', 'a'],
['chiuso', 'adverb', 'a'],
['chiusura', 'noun', 'b'],
['ci', 'noun', 'c'],
['ci', 'pronoun', 'a'],
['ci', 'adverb', 'a'],
['ciabatta', 'noun', 'c'],
['ciambella', 'noun', 'c'],
['ciao', 'exclamation', 'a'],
['ciascuno', 'adjective', 'a'],
['ciascuno', 'pronoun', 'a'],
['cibare', 'verb', 'c'],
['cibo', 'noun', 'a'],
['cicatrice', 'noun', 'b'],
['ciclismo', 'noun', 'b'],
['ciclista', 'noun', 'c'],
['ciclo', 'noun', 'b'],
['cicogna', 'noun', 'c'],
['cicoria', 'noun', 'c'],
['cieco', 'adjective', 'b'],
['cieco', 'noun', 'b'],
['cielo', 'noun', 'a'],
['cifra', 'noun', 'a'],
['ciglio', 'noun', 'b'],
['cigno', 'noun', 'c'],
['cileno', 'adjective', 'c'],
['cileno', 'noun', 'c'],
['ciliegia', 'noun', 'c'],
['ciliegia', 'adjective', 'c'],
['ciliegio', 'noun', 'c'],
['cilindro', 'noun', 'c'],
['cima', 'noun', 'c'],
['cimice', 'noun', 'c'],
['ciminiera', 'noun', 'c'],
['cimitero', 'noun', 'b'],
['cinema', 'noun', 'a'],
['cinematografico', 'adjective', 'b'],
['cinese', 'adjective', 'a'],
['cinese', 'noun', 'a'],
['cinghia', 'noun', 'c'],
['cinghiale', 'noun', 'c'],
['cinguettare', 'verb', 'c'],
['cinguettio', 'noun', 'c'],
['cinico', 'adjective', 'c'],
['cinico', 'noun', 'c'],
['cinquanta', 'adjective', 'a'],
['cinquanta', 'noun', 'a'],
['cinque', 'adjective', 'a'],
['cinque', 'noun', 'a'],
['cinquecento', 'adjective', 'b'],
['cinquecento', 'noun', 'b'],
['cintura', 'noun', 'b'],
['cinturino', 'noun', 'c'],
['ciò', 'pronoun', 'a'],
['ciocca', 'noun', 'c'],
['cioccolatino', 'noun', 'c'],
['cioccolato', 'noun', 'b'],
['cioccolato', 'adjective', 'b'],
['cioè', 'conjunction', 'a'],
['ciotola', 'noun', 'c'],
['cipolla', 'noun', 'b'],
['cipresso', 'noun', 'c'],
['cipriota', 'adjective', 'c'],
['cipriota', 'noun', 'c'],
['circa', 'preposition', 'a'],
['circa', 'adverb', 'a'],
['circa', 'noun', 'a'],
['circo', 'noun', 'b'],
['circolare', 'adjective', 'b'],
['circolare', 'noun', 'b'],
['circolare', 'verb', 'b'],
['circolazione', 'noun', 'b'],
['circolo', 'noun', 'b'],
['circondare', 'verb', 'a'],
['circostanza', 'noun', 'a'],
['circuito', 'noun', 'b'],
['citare', 'verb', 'a'],
['citato', 'past_part', 'b'],
['citato', 'adjective', 'b'],
['citato', 'noun', 'b'],
['citazione', 'noun', 'b'],
['citofono', 'noun', 'c'],
['città', 'noun', 'a'],
['cittadina', 'noun', 'b'],
['cittadinanza', 'noun', 'b'],
['cittadino', 'adjective', 'a'],
['cittadino', 'noun', 'a'],
['ciuffo', 'noun', 'c'],
['civile', 'adjective', 'a'],
['civile', 'noun', 'a'],
['civiltà', 'noun', 'b'],
['clacson', 'noun', 'c'],
['clan', 'noun', 'b'],
['clandestino', 'adjective', 'b'],
['clandestino', 'noun', 'b'],
['classe', 'noun', 'a'],
['classico', 'adjective', 'a'],
['classico', 'noun', 'a'],
['classifica', 'noun', 'b'],
['classificare', 'verb', 'b'],
['clero', 'noun', 'c'],
['cliccare', 'verb', 'b'],
['cliente', 'noun', 'a'],
['clima', 'noun', 'b'],
['clinica', 'noun', 'b'],
['clinico', 'adjective', 'b'],
['clinico', 'noun', 'b'],
['clistere', 'noun', 'c'],
['cloro', 'noun', 'c'],
['club', 'noun', 'b'],
['cobra', 'noun', 'c'],
['cocaina', 'noun', 'b'],
['coccinella', 'noun', 'c'],
['coccio', 'noun', 'c'],
['cocciuto', 'adjective', 'c'],
['cocciuto', 'noun', 'c'],
['cocco', 'noun', 'c'],
['coccodrillo', 'noun', 'c'],
['coccola', 'noun', 'c'],
['coccolare', 'verb', 'c'],
['cocomero', 'noun', 'c'],
['coda', 'noun', 'a'],
['codice', 'noun', 'a'],
['coerente', 'adjective', 'b'],
['cofano', 'noun', 'c'],
['cogliere', 'verb', 'a'],
['coglione', 'noun', 'a'],
['cognato', 'noun', 'b'],
['cognato', 'adjective', 'b'],
['cognome', 'noun', 'b'],
['coincidenza', 'noun', 'b'],
['coincidere', 'verb', 'b'],
['coinvolgere', 'verb', 'a'],
['coinvolgimento', 'noun', 'b'],
['colare', 'verb', 'b'],
['colata', 'noun', 'c'],
['colazione', 'noun', 'b'],
['colera', 'noun', 'c'],
['colica', 'noun', 'c'],
['colino', 'noun', 'c'],
['colla', 'noun', 'c'],
['collaborare', 'verb', 'b'],
['collaboratore', 'noun', 'b'],
['collaborazione', 'noun', 'b'],
['collana', 'noun', 'b'],
['collant', 'noun', 'c'],
['collant', 'adjective', 'c'],
['collare', 'noun', 'c'],
['collasso', 'noun', 'c'],
['collaterale', 'adjective', 'b'],
['collaterale', 'noun', 'b'],
['colle', 'noun', 'c'],
['collega', 'noun', 'a'],
['collegamento', 'noun', 'b'],
['collegare', 'verb', 'a'],
['collegio', 'noun', 'b'],
['collera', 'noun', 'c'],
['colletta', 'noun', 'c'],
['collettivo', 'adjective', 'b'],
['collettivo', 'noun', 'b'],
['collezione', 'noun', 'b'],
['collina', 'noun', 'b'],
['collo', 'noun', 'a'],
['collocare', 'verb', 'b'],
['colloquio', 'noun', 'b'],
['colluttorio', 'noun', 'c'],
['colmo', 'noun', 'c'],
['colomba', 'noun', 'b'],
['colombo', 'noun', 'c'],
['colonna', 'noun', 'a'],
['colonnello', 'noun', 'b'],
['colorante', 'pres_part', 'c'],
['colorante', 'adjective', 'c'],
['colorante', 'noun', 'c'],
['colorare', 'verb', 'b'],
['colorato', 'past_part', 'b'],
['colorato', 'adjective', 'b'],
['colore', 'noun', 'a'],
['coloro', 'pronoun', 'a'],
['colosso', 'noun', 'c'],
['colpa', 'noun', 'a'],
['colpevole', 'adjective', 'b'],
['colpevole', 'noun', 'b'],
['colpire', 'verb', 'a'],
['colpo', 'noun', 'a'],
['coltellata', 'noun', 'c'],
['coltello', 'noun', 'a'],
['coltivare', 'verb', 'b'],
['coltivazione', 'noun', 'c'],
['colto', 'adjective', 'b'],
['colto', 'noun', 'b'],
['colui', 'pronoun', 'b'],
['coma', 'noun', 'b'],
['comandamento', 'noun', 'b'],
['comandante', 'pres_part', 'b'],
['comandante', 'adjective', 'b'],
['comandante', 'noun', 'b'],
['comandare', 'verb', 'b'],
['comando', 'noun', 'b'],
['combaciare', 'verb', 'c'],
['combattente', 'pres_part', 'c'],
['combattente', 'adjective', 'c'],
['combattente', 'noun', 'c'],
['combattere', 'verb', 'a'],
['combattimento', 'noun', 'b'],
['combinare', 'verb', 'b'],
['combinazione', 'noun', 'b'],
['come', 'adverb', 'a'],
['come', 'conjunction', 'a'],
['cometa', 'noun', 'c'],
['comfort', 'noun', 'c'],
['comico', 'adjective', 'b'],
['comico', 'noun', 'b'],
['cominciare', 'verb', 'a'],
['cominciare', 'noun', 'a'],
['comitato', 'noun', 'b'],
['comma', 'noun', 'b'],
['commedia', 'noun', 'b'],
['commentare', 'verb', 'a'],
['commento', 'noun', 'a'],
['commerciale', 'adjective', 'a'],
['commerciale', 'noun', 'a'],
['commerciante', 'pres_part', 'b'],
['commerciante', 'adjective', 'b'],
['commerciante', 'noun', 'b'],
['commercio', 'noun', 'b'],
['commettere', 'verb', 'a'],
['commissariato', 'noun', 'b'],
['commissario', 'noun', 'a'],
['commissione', 'noun', 'a'],
['community', 'noun', 'b'],
['commuovere', 'verb', 'b'],
['comodino', 'noun', 'c'],
['comodità', 'noun', 'c'],
['comodo', 'adjective', 'a'],
['comodo', 'noun', 'a'],
['compagnia', 'noun', 'a'],
['compagno', 'noun', 'a'],
['compagno', 'adjective', 'a'],
['comparire', 'verb', 'a'],
['comparsa', 'noun', 'b'],
['compassione', 'noun', 'c'],
['compasso', 'noun', 'c'],
['compatibile', 'adjective', 'b'],
['compatriota', 'noun', 'c'],
['compatto', 'adjective', 'b'],
['compatto', 'noun', 'b'],
['compensare', 'verb', 'b'],
['compenso', 'noun', 'b'],
['competente', 'adjective', 'b'],
['competente', 'noun', 'b'],
['competenza', 'noun', 'b'],
['competere', 'verb', 'b'],
['competizione', 'noun', 'b'],
['compiangere', 'verb', 'c'],
['compiere', 'verb', 'a'],
['compilare', 'verb', 'b'],
['compito', 'noun', 'a'],
['compleanno', 'noun', 'b'],
['complessivo', 'adjective', 'b'],
['complesso', 'noun', 'b'],
['complesso', 'adjective', 'a'],
['completamente', 'adverb', 'a'],
['completare', 'verb', 'b'],
['completo', 'adjective', 'a'],
['completo', 'noun', 'a'],
['complicare', 'verb', 'b'],
['complicato', 'past_part', 'b'],
['complicato', 'adjective', 'b'],
['complice', 'noun', 'b'],
['complice', 'adjective', 'b'],
['complimento', 'noun', 'b'],
['complotto', 'noun', 'c'],
['componente', 'pres_part', 'b'],
['componente', 'adjective', 'b'],
['componente', 'noun', 'b'],
['comporre', 'verb', 'a'],
['comportamento', 'noun', 'a'],
['comportare', 'verb', 'a'],
['composizione', 'noun', 'b'],
['composto', 'past_part', 'b'],
['composto', 'adjective', 'b'],
['composto', 'noun', 'b'],
['comprare', 'verb', 'a'],
['comprendere', 'verb', 'a'],
['comprensibile', 'adjective', 'b'],
['comprensione', 'noun', 'b'],
['comprensivo', 'adjective', 'c'],
['compreso', 'past_part', 'a'],
['compreso', 'adjective', 'a'],
['compromesso', 'noun', 'b'],
['compromettere', 'verb', 'b'],
['computer', 'noun', 'a'],
['comunale', 'adjective', 'b'],
['comunale', 'noun', 'b'],
['comune', 'adjective', 'a'],
['comune', 'noun', 'a'],
['comune', 'noun', 'a'],
['comunicare', 'verb', 'a'],
['comunicazione', 'noun', 'a'],
['comunione', 'noun', 'b'],
['comunismo', 'noun', 'b'],
['comunista', 'adjective', 'a'],
['comunista', 'noun', 'a'],
['comunità', 'noun', 'a'],
['comunque', 'adverb', 'a'],
['comunque', 'conjunction', 'a'],
['con', 'preposition', 'a'],
['conca', 'noun', 'c'],
['concedere', 'verb', 'b'],
['concentrare', 'verb', 'a'],
['concentrazione', 'noun', 'b'],
['concepire', 'noun', 'b'],
['concerto', 'noun', 'a'],
['concessione', 'noun', 'b'],
['concesso', 'past_part', 'b'],
['concesso', 'adjective', 'b'],
['concetto', 'past_part', 'a'],
['concetto', 'adjective', 'a'],
['concetto', 'noun', 'a'],
['concezione', 'noun', 'b'],
['conchiglia', 'noun', 'c'],
['concime', 'noun', 'c'],
['concludere', 'verb', 'a'],
['conclusione', 'noun', 'a'],
['concordare', 'verb', 'b'],
['concorrente', 'pres_part', 'b'],
['concorrente', 'adjective', 'b'],
['concorrente', 'noun', 'b'],
['concorrenza', 'noun', 'b'],
['concorrere', 'verb', 'b'],
['concorso', 'noun', 'b'],
['concreto', 'adjective', 'a'],
['concreto', 'noun', 'a'],
['condanna', 'noun', 'b'],
['condannare', 'verb', 'a'],
['condimento', 'noun', 'c'],
['condividere', 'verb', 'a'],
['condizionare', 'verb', 'b'],
['condizione', 'noun', 'a'],
['condoglianza', 'noun', 'c'],
['condominio', 'noun', 'b'],
['condotta', 'noun', 'b'],
['condurre', 'verb', 'a'],
['conduttore', 'adjective', 'b'],
['conduttore', 'noun', 'b'],
['conduttura', 'noun', 'c'],
['conferenza', 'noun', 'b'],
['conferire', 'verb', 'b'],
['conferma', 'noun', 'b'],
['confermare', 'verb', 'a'],
['confessare', 'verb', 'b'],
['confessione', 'noun', 'b'],
['confessore', 'noun', 'c'],
['confetto', 'noun', 'c'],
['confetto', 'adjective', 'c'],
['confettura', 'noun', 'c'],
['confezione', 'noun', 'b'],
['conficcare', 'verb', 'c'],
['confidare', 'verb', 'b'],
['confidenza', 'noun', 'b'],
['confine', 'noun', 'a'],
['conflitto', 'noun', 'b'],
['confondere', 'verb', 'a'],
['confortare', 'verb', 'c'],
['confrontare', 'verb', 'b'],
['confronto', 'noun', 'a'],
['confusione', 'noun', 'b'],
['confuso', 'past_part', 'b'],
['confuso', 'adjective', 'b'],
['congedo', 'noun', 'c'],
['congelare', 'verb', 'b'],
['congelatore', 'noun', 'c'],
['congestione', 'noun', 'c'],
['congiura', 'noun', 'c'],
['congresso', 'noun', 'b'],
['coniglio', 'noun', 'b'],
['coniugato', 'past_part', 'c'],
['coniugato', 'adjective', 'c'],
['coniugato', 'noun', 'c'],
['coniuge', 'noun', 'b'],
['connessione', 'noun', 'b'],
['connettere', 'verb', 'b'],
['cono', 'noun', 'b'],
['conoscenza', 'noun', 'a'],
['conoscere', 'verb', 'a'],
['conosciuto', 'past_part', 'b'],
['conosciuto', 'adjective', 'b'],
['conosciuto', 'noun', 'b'],
['conquista', 'noun', 'b'],
['conquistare', 'verb', 'a'],
['consapevole', 'adjective', 'b'],
['consapevolezza', 'noun', 'b'],
['consegna', 'noun', 'b'],
['consegnare', 'verb', 'a'],
['conseguente', 'pres_part', 'b'],
['conseguente', 'adjective', 'b'],
['conseguente', 'noun', 'b'],
['conseguenza', 'noun', 'a'],
['conseguire', 'verb', 'b'],
['consenso', 'noun', 'b'],
['consentire', 'verb', 'a'],
['conservare', 'verb', 'a'],
['conservazione', 'noun', 'b'],
['considerare', 'verb', 'a'],
['considerazione', 'noun', 'a'],
['consigliare', 'verb', 'a'],
['consigliere', 'noun', 'b'],
['consiglio', 'noun', 'a'],
['consistente', 'pres_part', 'b'],
['consistente', 'adjective', 'b'],
['consistenza', 'noun', 'b'],
['consistere', 'verb', 'b'],
['consolare', 'verb', 'b'],
['consonante', 'noun', 'c'],
['consorzio', 'noun', 'b'],
['constatare', 'verb', 'b'],
['consueto', 'adjective', 'b'],
['consueto', 'noun', 'b'],
['consulente', 'adjective', 'b'],
['consulente', 'noun', 'b'],
['consulenza', 'noun', 'b'],
['consultare', 'verb', 'b'],
['consumare', 'verb', 'a'],
['consumatore', 'noun', 'b'],
['consumatore', 'adjective', 'b'],
['consumazione', 'noun', 'c'],
['consumo', 'noun', 'b'],
['contachilometri', 'noun', 'c'],
['contadino', 'noun', 'b'],
['contadino', 'adjective', 'b'],
['contagiare', 'verb', 'c'],
['contagio', 'noun', 'c'],
['contagioso', 'adjective', 'c'],
['contagocce', 'noun', 'c'],
['contaminare', 'verb', 'b'],
['contante', 'pres_part', 'b'],
['contante', 'adjective', 'b'],
['contante', 'noun', 'b'],
['contare', 'verb', 'a'],
['contatore', 'noun', 'c'],
['contattare', 'verb', 'b'],
['contatto', 'noun', 'a'],
['conte', 'noun', 'b'],
['contemplare', 'verb', 'b'],
['contemporaneamente', 'adverb', 'b'],
['contemporaneo', 'adjective', 'a'],
['contemporaneo', 'noun', 'a'],
['contenere', 'verb', 'a'],
['contenitore', 'adjective', 'b'],
['contenitore', 'noun', 'b'],
['contentare', 'verb', 'b'],
['contentezza', 'noun', 'c'],
['contento', 'adjective', 'a'],
['contenuto', 'past_part', 'a'],
['contenuto', 'adjective', 'a'],
['contenuto', 'noun', 'a'],
['contestare', 'verb', 'b'],
['contestazione', 'noun', 'b'],
['contesto', 'noun', 'a'],
['continente', 'noun', 'b'],
['continuamente', 'adverb', 'b'],
['continuare', 'verb', 'a'],
['continuazione', 'noun', 'b'],
['continuità', 'noun', 'b'],
['continuo', 'adjective', 'a'],
['continuo', 'noun', 'a'],
['continuo', 'adverb', 'a'],
['conto', 'noun', 'a'],
['contorno', 'noun', 'b'],
['contrabbandiere', 'noun', 'c'],
['contrabbando', 'noun', 'c'],
['contraccambiare', 'verb', 'c'],
['contraddizione', 'noun', 'b'],
['contrario', 'adjective', 'a'],
['contrario', 'noun', 'a'],
['contrarre', 'verb', 'b'],
['contrastare', 'verb', 'b'],
['contrasto', 'noun', 'b'],
['contratto', 'noun', 'a'],
['contribuire', 'verb', 'b'],
['contributo', 'noun', 'b'],
['contro', 'preposition', 'a'],
['contro', 'adverb', 'a'],
['contro', 'noun', 'a'],
['controllare', 'verb', 'a'],
['controllo', 'noun', 'a'],
['controllore', 'noun', 'c'],
['convegno', 'noun', 'b'],
['conveniente', 'pres_part', 'b'],
['conveniente', 'adjective', 'b'],
['convenire', 'verb', 'b'],
['convenzione', 'noun', 'b'],
['conversazione', 'noun', 'a'],
['conversione', 'noun', 'b'],
['convertire', 'verb', 'b'],
['convincente', 'pres_part', 'b'],
['convincente', 'adjective', 'b'],
['convincere', 'verb', 'a'],
['convinto', 'past_part', 'b'],
['convinto', 'adjective', 'b'],
['convinzione', 'noun', 'b'],
['convivenza', 'noun', 'b'],
['convivere', 'verb', 'b'],
['convocare', 'verb', 'b'],
['convulsione', 'noun', 'c'],
['coordinamento', 'noun', 'b'],
['coordinare', 'verb', 'b'],
['coperchio', 'noun', 'c'],
['coperta', 'noun', 'b'],
['copertina', 'noun', 'b'],
['coperto', 'past_part', 'b'],
['coperto', 'adjective', 'b'],
['coperto', 'noun', 'b'],
['copertura', 'noun', 'b'],
['copia', 'noun', 'a'],
['copiare', 'verb', 'b'],
['copione', 'noun', 'b'],
['coppa', 'noun', 'b'],
['coppia', 'noun', 'a'],
['copricostume', 'noun', 'c'],
['copriletto', 'noun', 'c'],
['coprire', 'verb', 'a'],
['copyright', 'noun', 'b'],
['coraggio', 'noun', 'a'],
['coraggio', 'exclamation', 'a'],
['coraggioso', 'adjective', 'b'],
['corallo', 'noun', 'c'],
['corallo', 'adjective', 'c'],
['corazza', 'noun', 'c'],
['corazzata', 'noun', 'c'],
['corazziere', 'noun', 'c'],
['corda', 'noun', 'a'],
['coriandolo', 'noun', 'c'],
['coricare', 'verb', 'c'],
['cornacchia', 'noun', 'c'],
['cornetto', 'noun', 'c'],
['cornice', 'noun', 'b'],
['corno', 'noun', 'b'],
['cornuto', 'adjective', 'c'],
['cornuto', 'noun', 'c'],
['coro', 'noun', 'b'],
['corona', 'noun', 'b'],
['corpo', 'noun', 'a'],
['corporatura', 'noun', 'c'],
['correggere', 'verb', 'a'],
['corrente', 'pres_part', 'a'],
['corrente', 'adjective', 'a'],
['corrente', 'noun', 'a'],
['corrente', 'adverb', 'a'],
['correre', 'verb', 'a'],
['correttamente', 'adverb', 'b'],
['corretto', 'past_part', 'b'],
['corretto', 'adjective', 'b'],
['correzione', 'noun', 'c'],
['corridoio', 'noun', 'b'],
['corridore', 'adjective', 'c'],
['corridore', 'noun', 'c'],
['corriera', 'noun', 'c'],
['corriere', 'noun', 'a'],
['corrispondente', 'pres_part', 'b'],
['corrispondente', 'adjective', 'b'],
['corrispondente', 'noun', 'b'],
['corrispondenza', 'noun', 'b'],
['corrispondere', 'verb', 'a'],
['corruzione', 'noun', 'b'],
['corsa', 'noun', 'a'],
['corsia', 'noun', 'c'],
['corso', 'noun', 'a'],
['corte', 'noun', 'a'],
['corteccia', 'noun', 'c'],
['corteggiare', 'verb', 'c'],
['cortesia', 'noun', 'b'],
['cortile', 'noun', 'b'],
['corto', 'adjective', 'a'],
['corvo', 'noun', 'c'],
['cosa', 'noun', 'a'],
['coscia', 'noun', 'b'],
['cosciente', 'adjective', 'c'],
['coscienza', 'noun', 'a'],
['così', 'adverb', 'a'],
['cosiddetto', 'adjective', 'a'],
['costa', 'noun', 'a'],
['costante', 'adjective', 'b'],
['costante', 'noun', 'b'],
['costantemente', 'adverb', 'b'],
['costare', 'verb', 'a'],
['costellazione', 'noun', 'b'],
['costituire', 'verb', 'a'],
['costituzionale', 'adjective', 'b'],
['costituzione', 'noun', 'b'],
['costo', 'noun', 'a'],
['costoso', 'adjective', 'b'],
['costringere', 'verb', 'a'],
['costruire', 'verb', 'a'],
['costruttivo', 'adjective', 'b'],
['costruzione', 'noun', 'a'],
['costume', 'noun', 'a'],
['cotoletta', 'noun', 'c'],
['cotone', 'noun', 'b'],
['cottura', 'noun', 'c'],
['covare', 'verb', 'c'],
['covo', 'noun', 'c'],
['cozza', 'noun', 'c'],
['cracker', 'noun', 'c'],
['cranio', 'noun', 'b'],
['cravatta', 'noun', 'b'],
['creare', 'verb', 'a'],
['creatività', 'noun', 'b'],
['creativo', 'adjective', 'b'],
['creativo', 'noun', 'b'],
['creatura', 'noun', 'b'],
['creazione', 'noun', 'b'],
['credente', 'pres_part', 'b'],
['credente', 'adjective', 'b'],
['credente', 'noun', 'b'],
['credenza', 'noun', 'c'],
['credere', 'verb', 'a'],
['credere', 'noun', 'a'],
['credibile', 'adjective', 'b'],
['credito', 'noun', 'a'],
['creditore', 'noun', 'b'],
['credo', 'noun', 'c'],
['crema', 'noun', 'b'],
['crema', 'adjective', 'b'],
['crepaccio', 'noun', 'c'],
['crêpe', 'noun', 'c'],
['crescente', 'pres_part', 'b'],
['crescente', 'adjective', 'b'],
['crescente', 'noun', 'b'],
['crescere', 'verb', 'a'],
['crescita', 'noun', 'a'],
['cretino', 'adjective', 'b'],
['cretino', 'noun', 'b'],
['criceto', 'noun', 'c'],
['criminale', 'adjective', 'b'],
['criminale', 'noun', 'b'],
['crimine', 'noun', 'b'],
['criniera', 'noun', 'c'],
['crisantemo', 'noun', 'c'],
['crisi', 'noun', 'a'],
['cristallo', 'noun', 'b'],
['cristianesimo', 'noun', 'b'],
['cristiano', 'adjective', 'a'],
['cristiano', 'noun', 'a'],
['criterio', 'noun', 'b'],
['critica', 'noun', 'a'],
['criticare', 'verb', 'b'],
['critico', 'adjective', 'a'],
['critico', 'noun', 'a'],
['croato', 'adjective', 'c'],
['croato', 'noun', 'c'],
['croce', 'noun', 'b'],
['crocifiggere', 'verb', 'c'],
['crocifisso', 'past_part', 'c'],
['crocifisso', 'adjective', 'c'],
['crocifisso', 'noun', 'c'],
['crollare', 'verb', 'b'],
['cronaca', 'noun', 'b'],
['cronico', 'adjective', 'b'],
['cronico', 'noun', 'b'],
['cronista', 'noun', 'c'],
['crostaceo', 'noun', 'c'],
['crostino', 'noun', 'c'],
['crudele', 'adjective', 'b'],
['crudele', 'noun', 'b'],
['crudo', 'adjective', 'b'],
['crudo', 'noun', 'b'],
['cu', 'noun', 'c'],
['cubo', 'noun', 'b'],
['cubo', 'adjective', 'b'],
['cucchiaio', 'noun', 'b'],
['cuccia', 'noun', 'c'],
['cucciolo', 'noun', 'b'],
['cucina', 'noun', 'a'],
['cucinare', 'verb', 'a'],
['cucire', 'verb', 'b'],
['cucito', 'past_part', 'c'],
['cucito', 'adjective', 'c'],
['cucito', 'noun', 'c'],
['cucitura', 'noun', 'c'],
['cuffia', 'noun', 'b'],
['cugino', 'noun', 'b'],
['cui', 'pronoun', 'a'],
['cullare', 'verb', 'c'],
['culo', 'noun', 'a'],
['culto', 'noun', 'b'],
['cultura', 'noun', 'a'],
['culturale', 'adjective', 'a'],
['cumulo', 'noun', 'c'],
['cuocere', 'verb', 'b'],
['cuoco', 'noun', 'b'],
['cuore', 'noun', 'a'],
['cupo', 'adjective', 'b'],
['cupo', 'noun', 'b'],
['cura', 'noun', 'a'],
['curare', 'verb', 'a'],
['curiosare', 'verb', 'b'],
['curiosità', 'noun', 'b'],
['curioso', 'adjective', 'a'],
['curioso', 'noun', 'a'],
['curriculum', 'noun', 'b'],
['curva', 'noun', 'b'],
['curvo', 'adjective', 'b'],
['curvo', 'noun', 'b'],
['cuscino', 'noun', 'b'],
['custode', 'noun', 'b'],
['custode', 'adjective', 'b'],
['custodia', 'noun', 'b'],
['custodire', 'verb', 'b'],
['da', 'preposition', 'a'],
['dado', 'noun', 'c'],
['danese', 'adjective', 'c'],
['danese', 'noun', 'c'],
['dannato', 'past_part', 'b'],
['dannato', 'adjective', 'b'],
['dannato', 'noun', 'b'],
['danneggiare', 'verb', 'b'],
['danno', 'noun', 'a'],
['dannoso', 'adjective', 'c'],
['danza', 'noun', 'b'],
['dappertutto', 'adverb', 'b'],
['dare', 'verb', 'a'],
['dare', 'noun', 'a'],
['data', 'noun', 'a'],
['dato', 'past_part', 'a'],
['dato', 'adjective', 'a'],
['dato', 'noun', 'a'],
['dattero', 'noun', 'c'],
['davanti', 'adverb', 'a'],
['davanti', 'adjective', 'a'],
['davanti', 'noun', 'a'],
['davanzale', 'noun', 'c'],
['davvero', 'adverb', 'a'],
['dea', 'noun', 'b'],
['debito', 'noun', 'a'],
['debole', 'adjective', 'a'],
['debole', 'noun', 'a'],
['debolezza', 'noun', 'b'],
['decennio', 'noun', 'b'],
['decidere', 'verb', 'a'],
['decina', 'noun', 'a'],
['decisamente', 'adverb', 'b'],
['decisione', 'noun', 'a'],
['decisivo', 'adjective', 'b'],
['deciso', 'past_part', 'b'],
['deciso', 'adjective', 'b'],
['decorare', 'verb', 'b'],
['decorato', 'past_part', 'c'],
['decorato', 'adjective', 'c'],
['decorato', 'noun', 'c'],
['decorazione', 'noun', 'b'],
['decoroso', 'adjective', 'c'],
['decreto', 'noun', 'b'],
['dedica', 'noun', 'c'],
['dedicare', 'verb', 'a'],
['dedurre', 'verb', 'b'],
['deficiente', 'adjective', 'b'],
['deficiente', 'noun', 'b'],
['definire', 'verb', 'a'],
['definitivamente', 'adverb', 'b'],
['definitivo', 'adjective', 'a'],
['definitivo', 'noun', 'a'],
['definizione', 'noun', 'a'],
['deformare', 'verb', 'c'],
['deforme', 'adjective', 'c'],
['deforme', 'noun', 'c'],
['defunto', 'past_part', 'b'],
['defunto', 'adjective', 'b'],
['defunto', 'noun', 'b'],
['degno', 'adjective', 'b'],
['degradare', 'verb', 'b'],
['delegare', 'verb', 'b'],
['delegato', 'past_part', 'b'],
['delegato', 'adjective', 'b'],
['delegato', 'noun', 'b'],
['delegazione', 'noun', 'c'],
['delfino', 'noun', 'c'],
['delicatezza', 'noun', 'c'],
['delicato', 'adjective', 'b'],
['delicato', 'noun', 'b'],
['delinquente', 'pres_part', 'c'],
['delinquente', 'adjective', 'c'],
['delinquente', 'noun', 'c'],
['delirare', 'verb', 'c'],
['delirio', 'noun', 'b'],
['delitto', 'noun', 'b'],
['delizia', 'noun', 'c'],
['delizioso', 'adjective', 'b'],
['deludere', 'verb', 'b'],
['delusione', 'noun', 'b'],
['deluso', 'past_part', 'b'],
['deluso', 'adjective', 'b'],
['deluso', 'noun', 'b'],
['democratico', 'adjective', 'b'],
['democratico', 'noun', 'b'],
['democrazia', 'noun', 'a'],
['democristiano', 'adjective', 'c'],
['democristiano', 'noun', 'c'],
['demoralizzare', 'verb', 'c'],
['denaro', 'noun', 'a'],
['denominare', 'verb', 'b'],
['denso', 'adjective', 'b'],
['dente', 'noun', 'a'],
['dentiera', 'noun', 'c'],
['dentifricio', 'noun', 'c'],
['dentista', 'noun', 'b'],
['dentro', 'adverb', 'a'],
['dentro', 'preposition', 'a'],
['dentro', 'noun', 'a'],
['denuncia', 'noun', 'b'],
['denunciare', 'verb', 'a'],
['deodorante', 'pres_part', 'c'],
['deodorante', 'adjective', 'c'],
['deodorante', 'noun', 'c'],
['depilazione', 'noun', 'c'],
['deporre', 'verb', 'b'],
['depositare', 'verb', 'b'],
['deposito', 'noun', 'b'],
['deposizione', 'noun', 'b'],
['depressione', 'noun', 'b'],
['deprimere', 'verb', 'b'],
['depuratore', 'adjective', 'c'],
['depuratore', 'noun', 'c'],
['deputato', 'past_part', 'b'],
['deputato', 'adjective', 'b'],
['deputato', 'noun', 'b'],
['derivare', 'verb', 'a'],
['derubare', 'verb', 'c'],
['descrivere', 'verb', 'a'],
['descrizione', 'noun', 'a'],
['deserto', 'noun', 'b'],
['deserto', 'adjective', 'b'],
['desiderare', 'verb', 'a'],
['desiderio', 'noun', 'a'],
['design', 'noun', 'b'],
['dessert', 'noun', 'c'],
['destinare', 'verb', 'a'],
['destinazione', 'noun', 'b'],
['destino', 'noun', 'a'],
['destra', 'noun', 'a'],
['destro', 'adjective', 'a'],
['destro', 'noun', 'a'],
['detective', 'noun', 'b'],
['detenere', 'verb', 'b'],
['detenuto', 'past_part', 'c'],
['detenuto', 'adjective', 'c'],
['detenuto', 'noun', 'c'],
['determinare', 'verb', 'a'],
['determinato', 'past_part', 'a'],
['determinato', 'adjective', 'a'],
['determinazione', 'noun', 'b'],
['detersivo', 'adjective', 'c'],
['detersivo', 'noun', 'c'],
['dettagliato', 'past_part', 'b'],
['dettagliato', 'adjective', 'b'],
['dettaglio', 'noun', 'a'],
['dettare', 'verb', 'b'],
['dettato', 'past_part', 'c'],
['dettato', 'adjective', 'c'],
['dettato', 'noun', 'c'],
['devastare', 'verb', 'b'],
['deviare', 'verb', 'c'],
['deviazione', 'noun', 'c'],
['di', 'preposition', 'a'],
['di', 'noun', 'c'],
['diagnosi', 'noun', 'b'],
['dialetto', 'noun', 'a'],
['dialogare', 'verb', 'b'],
['dialogo', 'noun', 'a'],
['diamante', 'noun', 'a'],
['diametro', 'noun', 'b'],
['diario', 'noun', 'b'],
['diario', 'adjective', 'b'],
['diavolo', 'noun', 'a'],
['dibattito', 'noun', 'b'],
['dicembre', 'noun', 'a'],
['dichiarare', 'verb', 'a'],
['dichiarazione', 'noun', 'a'],
['diciotto', 'adjective', 'b'],
['diciotto', 'noun', 'b'],
['dieci', 'adjective', 'a'],
['dieci', 'noun', 'a'],
['diecimila', 'adjective', 'b'],
['diecimila', 'noun', 'b'],
['dieta', 'noun', 'b'],
['dietetico', 'adjective', 'c'],
['dietro', 'preposition', 'a'],
['dietro', 'adverb', 'a'],
['dietro', 'adjective', 'a'],
['dietro', 'noun', 'a'],
['difendere', 'verb', 'a'],
['difensore', 'adjective', 'b'],
['difensore', 'noun', 'b'],
['difesa', 'noun', 'a'],
['difetto', 'noun', 'b'],
['differente', 'pres_part', 'a'],
['differente', 'adjective', 'a'],
['differenza', 'noun', 'a'],
['difficile', 'adjective', 'a'],
['difficile', 'noun', 'a'],
['difficilmente', 'adverb', 'b'],
['difficoltà', 'noun', 'a'],
['diffidente', 'adjective', 'c'],
['diffidente', 'noun', 'c'],
['diffidenza', 'noun', 'c'],
['diffondere', 'verb', 'a'],
['diffusione', 'noun', 'b'],
['diffuso', 'past_part', 'b'],
['diffuso', 'adjective', 'b'],
['diga', 'noun', 'c'],
['digestione', 'noun', 'c'],
['digestivo', 'adjective', 'c'],
['digestivo', 'noun', 'c'],
['digitale', 'adjective', 'b'],
['digitale', 'noun', 'b'],
['digiunare', 'verb', 'c'],
['dignità', 'noun', 'b'],
['diluvio', 'noun', 'c'],
['dimagrante', 'pres_part', 'c'],
['dimagrante', 'adjective', 'c'],
['dimensione', 'noun', 'a'],
['dimenticare', 'verb', 'a'],
['dimettere', 'verb', 'b'],
['dimezzare', 'verb', 'c'],
['diminuire', 'verb', 'b'],
['dimostrare', 'verb', 'a'],
['dimostrazione', 'noun', 'b'],
['dinamica', 'noun', 'b'],
['dinamico', 'adjective', 'b'],
['dinosauro', 'noun', 'c'],
['dintorno', 'adverb', 'b'],
['dintorno', 'noun', 'b'],
['dio', 'noun', 'a'],
['dipartimento', 'noun', 'b'],
['dipendente', 'pres_part', 'a'],
['dipendente', 'adjective', 'a'],
['dipendente', 'noun', 'a'],
['dipendenza', 'noun', 'b'],
['dipendere', 'verb', 'a'],
['dipingere', 'verb', 'b'],
['dipinto', 'past_part', 'b'],
['dipinto', 'adjective', 'b'],
['dipinto', 'noun', 'b'],
['diploma', 'noun', 'b'],
['diplomatico', 'adjective', 'b'],
['diplomatico', 'noun', 'b'],
['dire', 'verb', 'a'],
['dire', 'noun', 'a'],
['diretta', 'noun', 'b'],
['direttamente', 'adverb', 'a'],
['diretto', 'past_part', 'a'],
['diretto', 'adjective', 'a'],
['diretto', 'noun', 'a'],
['direttore', 'noun', 'a'],
['direttore', 'adjective', 'a'],
['direttrice', 'noun', 'c'],
['direzione', 'noun', 'a'],
['dirigente', 'adjective', 'b'],
['dirigente', 'noun', 'b'],
['dirigere', 'verb', 'a'],
['diritto', 'noun', 'a'],
['disagio', 'noun', 'b'],
['disastro', 'noun', 'b'],
['disattento', 'adjective', 'c'],
['discarica', 'noun', 'b'],
['discendere', 'verb', 'b'],
['discepolo', 'noun', 'b'],
['discesa', 'noun', 'b'],
['disciplina', 'noun', 'b'],
['disco', 'noun', 'a'],
['discordia', 'noun', 'c'],
['discorso', 'noun', 'a'],
['discoteca', 'noun', 'b'],
['discreto', 'adjective', 'b'],
['discreto', 'noun', 'b'],
['discussione', 'noun', 'a'],
['discusso', 'past_part', 'b'],
['discusso', 'adjective', 'b'],
['discutere', 'verb', 'a'],
['disegnare', 'verb', 'a'],
['disegno', 'noun', 'a'],
['diseredare', 'verb', 'c'],
['disgrazia', 'noun', 'b'],
['disinfettante', 'pres_part', 'c'],
['disinfettante', 'adjective', 'c'],
['disinfettare', 'verb', 'c'],
['disinteresse', 'noun', 'c'],
['disoccupazione', 'noun', 'b'],
['disonesto', 'adjective', 'c'],
['disonesto', 'noun', 'c'],
['disordinato', 'past_part', 'c'],
['disordinato', 'adjective', 'c'],
['disordine', 'noun', 'b'],
['dispari', 'adjective', 'c'],
['dispensa', 'noun', 'c'],
['disperare', 'verb', 'b'],
['disperato', 'past_part', 'b'],
['disperato', 'adjective', 'b'],
['disperazione', 'noun', 'b'],
['disperdere', 'verb', 'b'],
['dispetto', 'noun', 'b'],
['dispettoso', 'adjective', 'c'],
['dispiacere', 'verb', 'a'],
['disponibile', 'adjective', 'a'],
['disponibile', 'noun', 'a'],
['disponibilità', 'noun', 'b'],
['disporre', 'verb', 'a'],
['dispositivo', 'adjective', 'b'],
['dispositivo', 'noun', 'b'],
['disposizione', 'noun', 'a'],
['disprezzo', 'noun', 'b'],
['dissenso', 'noun', 'c'],
['distacco', 'noun', 'b'],
['distante', 'pres_part', 'b'],
['distante', 'adjective', 'b'],
['distante', 'adverb', 'b'],
['distanza', 'noun', 'a'],
['distendere', 'verb', 'b'],
['disteso', 'past_part', 'c'],
['disteso', 'adjective', 'c'],
['disteso', 'noun', 'c'],
['distinguere', 'verb', 'a'],
['distintivo', 'adjective', 'c'],
['distintivo', 'noun', 'c'],
['distinto', 'past_part', 'b'],
['distinto', 'adjective', 'b'],
['distinto', 'noun', 'b'],
['distinzione', 'noun', 'b'],
['distrarre', 'verb', 'b'],
['distratto', 'past_part', 'c'],
['distratto', 'adjective', 'c'],
['distrazione', 'noun', 'c'],
['distretto', 'noun', 'b'],
['distribuire', 'verb', 'a'],
['distributore', 'adjective', 'b'],
['distributore', 'noun', 'b'],
['distribuzione', 'noun', 'b'],
['distruggere', 'verb', 'a'],
['distrutto', 'past_part', 'c'],
['distrutto', 'adjective', 'c'],
['distruzione', 'noun', 'b'],
['disturbare', 'verb', 'b'],
['disturbo', 'noun', 'b'],
['disubbidiente', 'pres_part', 'c'],
['disubbidiente', 'adjective', 'c'],
['disubbidienza', 'noun', 'c'],
['disubbidire', 'verb', 'c'],
['dito', 'noun', 'a'],
['ditta', 'noun', 'b'],
['dittatura', 'noun', 'b'],
['divano', 'noun', 'a'],
['divano-letto', 'noun', 'c'],
['divenire', 'verb', 'a'],
['divenire', 'noun', 'a'],
['diventare', 'verb', 'a'],
['diversamente', 'adverb', 'b'],
['diversità', 'noun', 'b'],
['diverso', 'adjective', 'a'],
['diverso', 'adjective', 'a'],
['diverso', 'pronoun', 'a'],
['divertente', 'pres_part', 'a'],
['divertente', 'adjective', 'a'],
['divertimento', 'noun', 'b'],
['divertire', 'verb', 'a'],
['divertito', 'past_part', 'b'],
['divertito', 'adjective', 'b'],
['dividere', 'verb', 'a'],
['divieto', 'noun', 'b'],
['divinità', 'noun', 'b'],
['divino', 'adjective', 'b'],
['divino', 'noun', 'b'],
['divisa', 'noun', 'b'],
['divisione', 'noun', 'b'],
['divorare', 'verb', 'b'],
['divorziare', 'verb', 'c'],
['divorzio', 'noun', 'b'],
['dizionario', 'noun', 'b'],
['do', 'noun', 'c'],
['doccia', 'noun', 'b'],
['docciaschiuma', 'noun', 'c'],
['docente', 'pres_part', 'b'],
['docente', 'adjective', 'b'],
['docente', 'noun', 'b'],
['docile', 'adjective', 'c'],
['documentare', 'verb', 'b'],
['documentario', 'adjective', 'b'],
['documentario', 'noun', 'b'],
['documentazione', 'noun', 'b'],
['documento', 'noun', 'a'],
['dodici', 'adjective', 'a'],
['dodici', 'noun', 'a'],
['dogana', 'noun', 'c'],
['dolce', 'adjective', 'a'],
['dolce', 'noun', 'a'],
['dolce', 'adverb', 'a'],
['dolcezza', 'noun', 'b'],
['dolcificante', 'pres_part', 'c'],
['dolcificante', 'adjective', 'c'],
['dolcificante', 'noun', 'c'],
['dolciume', 'noun', 'c'],
['dolere', 'verb', 'c'],
['dolersi', 'verb', 'c'],
['dollaro', 'noun', 'a'],
['dolore', 'noun', 'a'],
['doloroso', 'adjective', 'b'],
['domanda', 'noun', 'a'],
['domandare', 'verb', 'a'],
['domani', 'adverb', 'a'],
['domani', 'noun', 'a'],
['domenica', 'noun', 'a'],
['domestica', 'noun', 'c'],
['domestico', 'adjective', 'b'],
['domestico', 'noun', 'b'],
['dominante', 'pres_part', 'b'],
['dominante', 'adjective', 'b'],
['dominante', 'noun', 'b'],
['dominare', 'verb', 'b'],
['dominio', 'noun', 'b'],
['don', 'noun', 'a'],
['donare', 'verb', 'b'],
['dondolare', 'verb', 'c'],
['donna', 'noun', 'a'],
['dono', 'noun', 'b'],
['dopo', 'adverb', 'a'],
['dopo', 'preposition', 'a'],
['dopo', 'conjunction', 'a'],
['dopo', 'adjective', 'a'],
['dopo', 'noun', 'a'],
['dopobarba', 'noun', 'c'],
['doppio', 'adjective', 'a'],
['doppio', 'noun', 'a'],
['doppio', 'adverb', 'a'],
['doppione', 'noun', 'c'],
['dorato', 'past_part', 'b'],
['dorato', 'adjective', 'b'],
['dorato', 'noun', 'b'],
['dormiglione', 'adjective', 'c'],
['dormiglione', 'noun', 'c'],
['dormire', 'verb', 'a'],
['dorso', 'noun', 'b'],
['dose', 'noun', 'b'],
['dotare', 'verb', 'b'],
['dotato', 'past_part', 'b'],
['dotato', 'adjective', 'b'],
['dote', 'noun', 'b'],
['dottore', 'noun', 'a'],
['dottoressa', 'noun', 'b'],
['dottrina', 'noun', 'b'],
['dove', 'adverb', 'a'],
['dove', 'conjunction', 'a'],
['dove', 'noun', 'a'],
['dovere', 'verb', 'a'],
['dovere', 'noun', 'a'],
['dovuto', 'past_part', 'b'],
['dovuto', 'adjective', 'b'],
['dovuto', 'noun', 'b'],
['dozzina', 'noun', 'b'],
['drago', 'noun', 'b'],
['dramma', 'noun', 'b'],
['drammatico', 'adjective', 'b'],
['dritto', 'adjective', 'b'],
['dritto', 'adverb', 'b'],
['dritto', 'noun', 'b'],
['drizzare', 'verb', 'c'],
['droga', 'noun', 'a'],
['drogare', 'verb', 'b'],
['drogato', 'past_part', 'c'],
['drogato', 'adjective', 'c'],
['drogato', 'noun', 'c'],
['dubbio', 'noun', 'a'],
['dubbio', 'adjective', 'b'],
['dubitare', 'verb', 'b'],
['dublinese', 'adjective', 'c'],
['dublinese', 'noun', 'c'],
['due', 'adjective', 'a'],
['due', 'noun', 'a'],
['duecento', 'adjective', 'b'],
['duecento', 'noun', 'b'],
['duello', 'noun', 'b'],
['duemila', 'adjective', 'b'],
['duemila', 'noun', 'b'],
['dunque', 'conjunction', 'a'],
['dunque', 'noun', 'a'],
['duomo', 'noun', 'c'],
['durante', 'pres_part', 'a'],
['durante', 'preposition', 'a'],
['durante', 'noun', 'a'],
['durare', 'verb', 'a'],
['durata', 'noun', 'a'],
['duro', 'adjective', 'a'],
['duro', 'noun', 'a'],
['duro', 'adverb', 'a'],
['e', 'noun', 'c'],
['e', 'conjunction', 'a'],
['ebbene', 'conjunction', 'b'],
['ebraico', 'adjective', 'b'],
['ebraico', 'noun', 'b'],
['ebreo', 'adjective', 'a'],
['ebreo', 'noun', 'a'],
['eccellente', 'pres_part', 'b'],
['eccellente', 'adjective', 'b'],
['eccellenza', 'noun', 'b'],
['eccessivo', 'adjective', 'b'],
['eccesso', 'noun', 'b'],
['eccetera', 'adverb', 'b'],
['eccezionale', 'adjective', 'b'],
['eccezione', 'noun', 'b'],
['eccitare', 'verb', 'b'],
['ecco', 'adverb', 'a'],
['eco', 'noun', 'b'],
['ecologico', 'adjective', 'b'],
['economia', 'noun', 'a'],
['economico', 'adjective', 'a'],
['economico', 'noun', 'a'],
['economista', 'noun', 'b'],
['edicola', 'noun', 'a'],
['edificio', 'noun', 'a'],
['editore', 'noun', 'a'],
['editore', 'adjective', 'a'],
['editoriale', 'adjective', 'b'],
['editoriale', 'noun', 'b'],
['edizione', 'noun', 'a'],
['educare', 'verb', 'b'],
['educativo', 'adjective', 'b'],
['educato', 'past_part', 'c'],
['educato', 'adjective', 'c'],
['educazione', 'noun', 'a'],
['effe', 'noun', 'c'],
['effettivamente', 'adverb', 'a'],
['effettivo', 'adjective', 'b'],
['effettivo', 'noun', 'b'],
['effetto', 'noun', 'a'],
['effettuare', 'verb', 'a'],
['efficace', 'adjective', 'b'],
['efficacia', 'noun', 'b'],
['efficiente', 'adjective', 'b'],
['efficienza', 'noun', 'b'],
['egiziano', 'adjective', 'c'],
['egiziano', 'noun', 'c'],
['egli', 'pronoun', 'a'],
['elaborare', 'verb', 'b'],
['elaborazione', 'noun', 'b'],
['elastico', 'adjective', 'b'],
['elastico', 'noun', 'b'],
['elegante', 'adjective', 'a'],
['eleganza', 'noun', 'b'],
['eleggere', 'verb', 'b'],
['elementare', 'adjective', 'a'],
['elemento', 'noun', 'a'],
['elemosina', 'noun', 'c'],
['elencare', 'verb', 'b'],
['elenco', 'noun', 'a'],
['elettorale', 'adjective', 'b'],
['elettore', 'noun', 'b'],
['elettricista', 'noun', 'c'],
['elettricità', 'noun', 'c'],
['elettrico', 'adjective', 'a'],
['elettrico', 'noun', 'a'],
['elettrodomestico', 'noun', 'c'],
['elettromagnetico', 'adjective', 'b'],
['elettrone', 'noun', 'b'],
['elettronico', 'adjective', 'a'],
['elevare', 'verb', 'b'],
['elevato', 'past_part', 'b'],
['elevato', 'adjective', 'b'],
['elezione', 'noun', 'b'],
['elica', 'noun', 'c'],
['elicottero', 'noun', 'c'],
['eliminare', 'verb', 'a'],
['eliminazione', 'noun', 'b'],
['elle', 'noun', 'c'],
['elmo', 'noun', 'c'],
['e-mail', 'noun', 'a'],
['emanare', 'verb', 'b'],
['emergenza', 'noun', 'b'],
['emergere', 'verb', 'a'],
['emettere', 'verb', 'b'],
['emigrazione', 'noun', 'c'],
['emiliano', 'adjective', 'c'],
['emiliano', 'noun', 'c'],
['emissione', 'noun', 'b'],
['emme', 'noun', 'c'],
['emmenthal', 'noun', 'c'],
['emo', 'noun', 'b'],
['emotivo', 'adjective', 'b'],
['emotivo', 'noun', 'b'],
['emozionante', 'pres_part', 'c'],
['emozionante', 'adjective', 'c'],
['emozionare', 'verb', 'b'],
['emozionato', 'past_part', 'c'],
['emozionato', 'adjective', 'c'],
['emozione', 'noun', 'a'],
['enciclopedia', 'noun', 'c'],
['energetico', 'adjective', 'b'],
['energetico', 'noun', 'b'],
['energia', 'noun', 'a'],
['enne', 'noun', 'c'],
['ennesimo', 'adjective', 'b'],
['enorme', 'adjective', 'a'],
['ente', 'noun', 'a'],
['entità', 'noun', 'b'],
['entrambi', 'pronoun', 'a'],
['entrambi', 'adjective', 'a'],
['entrare', 'verb', 'a'],
['entrare', 'noun', 'a'],
['entrata', 'noun', 'a'],
['entro', 'preposition', 'a'],
['entro', 'adverb', 'a'],
['entusiasmo', 'noun', 'b'],
['entusiasta', 'adjective', 'b'],
['entusiasta', 'noun', 'b'],
['epifania', 'noun', 'c'],
['episodio', 'noun', 'a'],
['epoca', 'noun', 'a'],
['eppure', 'conjunction', 'a'],
['equazione', 'noun', 'b'],
['equilibrio', 'noun', 'a'],
['equino', 'adjective', 'c'],
['equino', 'noun', 'c'],
['equipaggio', 'noun', 'c'],
['equivalere', 'verb', 'b'],
['equivoco', 'adjective', 'b'],
['equivoco', 'noun', 'b'],
['era', 'noun', 'a'],
['erba', 'noun', 'b'],
['erede', 'noun', 'b'],
['eredità', 'noun', 'b'],
['ereditare', 'verb', 'b'],
['ergastolo', 'noun', 'c'],
['ergere', 'verb', 'b'],
['ernia', 'noun', 'c'],
['eroe', 'noun', 'a'],
['eroina', 'noun', 'c'],
['erotico', 'adjective', 'b'],
['erotico', 'noun', 'b'],
['errare', 'verb', 'b'],
['erre', 'noun', 'c'],
['errore', 'noun', 'a'],
['esagerare', 'verb', 'b'],
['esagerato', 'past_part', 'b'],
['esagerato', 'adjective', 'b'],
['esagerato', 'noun', 'b'],
['esagerazione', 'noun', 'c'],
['esagono', 'noun', 'c'],
['esagono', 'adjective', 'c'],
['esaltare', 'verb', 'b'],
['esaltazione', 'noun', 'c'],
['esame', 'noun', 'a'],
['esaminare', 'verb', 'b'],
['esattamente', 'adverb', 'a'],
['esatto', 'adjective', 'a'],
['esatto', 'adverb', 'a'],
['esaurire', 'verb', 'b'],
['esca', 'noun', 'c'],
['eschimese', 'adjective', 'c'],
['eschimese', 'noun', 'c'],
['esclamare', 'verb', 'b'],
['esclamazione', 'noun', 'c'],
['escludere', 'verb', 'a'],
['esclusione', 'noun', 'b'],
['esclusivamente', 'adverb', 'b'],
['esclusivo', 'adjective', 'b'],
['escluso', 'past_part', 'b'],
['escluso', 'adjective', 'b'],
['escluso', 'noun', 'b'],
['esecutivo', 'adjective', 'b'],
['esecutivo', 'noun', 'b'],
['esecuzione', 'noun', 'b'],
['eseguire', 'verb', 'a'],
['esempio', 'noun', 'a'],
['esemplare', 'noun', 'b'],
['esemplare', 'adjective', 'b'],
['esercitare', 'verb', 'b'],
['esercito', 'noun', 'a'],
['esercizio', 'noun', 'a'],
['esibire', 'verb', 'b'],
['esigenza', 'noun', 'a'],
['esigere', 'verb', 'b'],
['esilio', 'noun', 'c'],
['esistente', 'pres_part', 'b'],
['esistente', 'adjective', 'b'],
['esistente', 'noun', 'b'],
['esistenza', 'noun', 'a'],
['esistere', 'verb', 'a'],
['esitare', 'verb', 'b'],
['esito', 'noun', 'b'],
['esordio', 'noun', 'b'],
['espansione', 'noun', 'b'],
['espellere', 'verb', 'b'],
['esperienza', 'noun', 'a'],
['esperimento', 'noun', 'b'],
['esperto', 'past_part', 'a'],
['esperto', 'adjective', 'a'],
['esperto', 'noun', 'a'],
['esplicito', 'adjective', 'b'],
['esplodere', 'verb', 'b'],
['esplorare', 'verb', 'b'],
['esplosione', 'noun', 'b'],
['esplosivo', 'adjective', 'b'],
['esplosivo', 'noun', 'b'],
['esponente', 'pres_part', 'b'],
['esponente', 'noun', 'b'],
['esporre', 'verb', 'a'],
['esposizione', 'noun', 'b'],
['espressione', 'noun', 'a'],
['espresso', 'past_part', 'c'],
['espresso', 'adjective', 'c'],
['espresso', 'noun', 'c'],
['esprimere', 'verb', 'a'],
['essa', 'pronoun', 'a'],
['esse', 'noun', 'c'],
['esse', 'pronoun', 'b'],
['essenza', 'noun', 'b'],
['essenziale', 'adjective', 'b'],
['essenziale', 'noun', 'b'],
['essenzialmente', 'adverb', 'b'],
['essere', 'verb', 'a'],
['essere', 'noun', 'a'],
['essi', 'pronoun', 'a'],
['esso', 'pronoun', 'a'],
['est', 'noun', 'b'],
['est', 'adjective', 'b'],
['estate', 'noun', 'a'],
['estendere', 'verb', 'b'],
['estensione', 'noun', 'b'],
['esterno', 'adjective', 'a'],
['esterno', 'noun', 'a'],
['estero', 'adjective', 'a'],
['estero', 'noun', 'a'],
['estetico', 'adjective', 'b'],
['estivo', 'adjective', 'b'],
['estone', 'adjective', 'c'],
['estone', 'noun', 'c'],
['estraneo', 'adjective', 'b'],
['estraneo', 'noun', 'b'],
['estrarre', 'verb', 'b'],
['estratto', 'past_part', 'b'],
['estratto', 'adjective', 'b'],
['estratto', 'noun', 'b'],
['estrazione', 'noun', 'b'],
['estremamente', 'adverb', 'b'],
['estremità', 'noun', 'b'],
['estremo', 'adjective', 'a'],
['estremo', 'noun', 'a'],
['età', 'noun', 'a'],
['eterno', 'adjective', 'b'],
['eterno', 'noun', 'b'],
['etica', 'noun', 'b'],
['etichetta', 'noun', 'b'],
['etico', 'adjective', 'b'],
['ettaro', 'noun', 'c'],
['etto', 'noun', 'c'],
['euro', 'noun', 'a'],
['europeo', 'adjective', 'a'],
['europeo', 'noun', 'a'],
['evadere', 'verb', 'c'],
['evaporare', 'verb', 'c'],
['evasione', 'noun', 'b'],
['evento', 'noun', 'a'],
['eventuale', 'adjective', 'a'],
['eventualmente', 'adverb', 'b'],
['evidente', 'adjective', 'a'],
['evidentemente', 'adverb', 'a'],
['evidenza', 'noun', 'b'],
['evidenziare', 'verb', 'b'],
['evidenziatore', 'adjective', 'c'],
['evidenziatore', 'noun', 'c'],
['evitare', 'verb', 'a'],
['evocare', 'verb', 'b'],
['evoluzione', 'noun', 'b'],
['ex', 'adjective', 'a'],
['ex', 'noun', 'a'],
['ex', 'preposition', 'a'],
['extra', 'adjective', 'b'],
['extra', 'noun', 'b'],
['fa', 'adverb', 'a'],
['fabbrica', 'noun', 'a'],
['fabbricare', 'verb', 'b'],
['fabbro', 'noun', 'c'],
['faccenda', 'noun', 'b'],
['faccia', 'noun', 'a'],
['facciata', 'noun', 'b'],
['facile', 'adjective', 'a'],
['facile', 'adverb', 'a'],
['facilità', 'noun', 'b'],
['facilitare', 'verb', 'b'],
['facilitazione', 'noun', 'c'],
['facilmente', 'adverb', 'a'],
['facoltà', 'noun', 'b'],
['fagiano', 'noun', 'c'],
['falco', 'noun', 'c'],
['falegname', 'noun', 'c'],
['fallimento', 'noun', 'b'],
['fallire', 'verb', 'b'],
['fallito', 'past_part', 'b'],
['fallito', 'adjective', 'b'],
['fallito', 'noun', 'b'],
['falso', 'adjective', 'a'],
['falso', 'adverb', 'a'],
['falso', 'noun', 'a'],
['fama', 'noun', 'b'],
['fame', 'noun', 'a'],
['famiglia', 'noun', 'a'],
['familiare', 'adjective', 'a'],
['familiare', 'noun', 'a'],
['famoso', 'adjective', 'a'],
['fan', 'noun', 'b'],
['fanale', 'noun', 'c'],
['fanciulla', 'noun', 'b'],
['fanciullo', 'adjective', 'c'],
['fanciullo', 'noun', 'c'],
['fango', 'noun', 'b'],
['fangoso', 'adjective', 'c'],
['fantascienza', 'noun', 'b'],
['fantasia', 'noun', 'a'],
['fantasma', 'noun', 'b'],
['fantastico', 'adjective', 'a'],
['fantastico', 'noun', 'a'],
['fanteria', 'noun', 'c'],
['fantino', 'noun', 'c'],
['fantoccio', 'noun', 'c'],
['fare', 'verb', 'a'],
['fare', 'noun', 'a'],
['farfalla', 'noun', 'b'],
['farina', 'noun', 'b'],
['farmacia', 'noun', 'b'],
['farmaco', 'noun', 'b'],
['faro', 'noun', 'c'],
['fascia', 'noun', 'a'],
['fasciatoio', 'noun', 'c'],
['fascicolo', 'noun', 'b'],
['fascino', 'noun', 'b'],
['fascio', 'noun', 'b'],
['fascismo', 'noun', 'b'],
['fascista', 'adjective', 'b'],
['fascista', 'noun', 'b'],
['fase', 'noun', 'a'],
['fastidio', 'noun', 'a'],
['fastidioso', 'adjective', 'b'],
['fata', 'noun', 'b'],
['fatica', 'noun', 'a'],
['faticare', 'verb', 'b'],
['faticoso', 'adjective', 'b'],
['fatto', 'noun', 'a'],
['fattore', 'noun', 'a'],
['fattoria', 'noun', 'b'],
['fattura', 'noun', 'b'],
['fatturato', 'past_part', 'b'],
['fatturato', 'adjective', 'b'],
['fatturato', 'noun', 'b'],
['fauna', 'noun', 'c'],
['fava', 'noun', 'c'],
['favola', 'noun', 'b'],
['favoloso', 'adjective', 'b'],
['favore', 'noun', 'a'],
['favorevole', 'adjective', 'b'],
['favorire', 'verb', 'b'],
['fax', 'noun', 'b'],
['fazzoletto', 'noun', 'b'],
['febbraio', 'noun', 'a'],
['febbre', 'noun', 'b'],
['fecondare', 'verb', 'c'],
['fede', 'noun', 'a'],
['fedele', 'adjective', 'b'],
['fedele', 'noun', 'b'],
['fedeltà', 'noun', 'b'],
['federa', 'noun', 'c'],
['federale', 'adjective', 'b'],
['federale', 'noun', 'b'],
['fegato', 'noun', 'b'],
['felice', 'adjective', 'a'],
['felicità', 'noun', 'b'],
['felino', 'noun', 'c'],
['felino', 'adjective', 'c'],
['felpa', 'noun', 'c'],
['femmina', 'noun', 'a'],
['femminile', 'adjective', 'a'],
['femminile', 'noun', 'a'],
['fenomeno', 'noun', 'a'],
['feria', 'noun', 'b'],
['feriale', 'adjective', 'c'],
['ferie', 'noun', 'c'],
['ferire', 'verb', 'b'],
['ferita', 'noun', 'a'],
['ferito', 'past_part', 'b'],
['ferito', 'adjective', 'b'],
['ferito', 'noun', 'b'],
['fermaglio', 'noun', 'c'],
['fermare', 'verb', 'a'],
['fermo', 'adjective', 'a'],
['feroce', 'adjective', 'b'],
['ferragosto', 'noun', 'c'],
['ferramenta', 'noun', 'c'],
['ferro', 'noun', 'a'],
['ferrovia', 'noun', 'b'],
['ferroviario', 'adjective', 'b'],
['ferroviere', 'noun', 'c'],
['fertilizzante', 'pres_part', 'c'],
['fertilizzante', 'adjective', 'c'],
['fertilizzante', 'noun', 'c'],
['fessura', 'noun', 'c'],
['festa', 'noun', 'a'],
['festeggiare', 'verb', 'a'],
['festival', 'noun', 'b'],
['festivo', 'adjective', 'c'],
['fetta', 'noun', 'b'],
['fiaba', 'noun', 'b'],
['fiala', 'noun', 'c'],
['fiamma', 'noun', 'b'],
['fiammifero', 'noun', 'c'],
['fiammifero', 'adjective', 'c'],
['fianco', 'noun', 'a'],
['fiatare', 'verb', 'c'],
['fiato', 'noun', 'b'],
['fibbia', 'noun', 'c'],
['fibra', 'noun', 'b'],
['ficcare', 'verb', 'b'],
['fiction', 'noun', 'b'],
['fidanzamento', 'noun', 'c'],
['fidanzarsi', 'verb', 'b'],
['fidanzata', 'noun', 'b'],
['fidanzato', 'past_part', 'b'],
['fidanzato', 'adjective', 'b'],
['fidanzato', 'noun', 'b'],
['fidarsi', 'verb', 'a'],
['fiducia', 'noun', 'a'],
['fiducioso', 'adjective', 'c'],
['fieno', 'noun', 'c'],
['fiera', 'noun', 'b'],
['fiero', 'adjective', 'b'],
['figlia', 'noun', 'a'],
['figliastro', 'noun', 'c'],
['figlio', 'noun', 'a'],
['figura', 'noun', 'a'],
['figurare', 'verb', 'a'],
['figurina', 'noun', 'c'],
['fila', 'noun', 'a'],
['filante', 'pres_part', 'c'],
['filante', 'adjective', 'c'],
['filante', 'noun', 'c'],
['filare', 'verb', 'b'],
['filastrocca', 'noun', 'c'],
['file', 'noun', 'a'],
['filetto', 'noun', 'c'],
['film', 'noun', 'a'],
['filmato', 'past_part', 'b'],
['filmato', 'adjective', 'b'],
['filmato', 'noun', 'b'],
['filo', 'noun', 'a'],
['filosofia', 'noun', 'a'],
['filosofico', 'adjective', 'b'],
['filosofo', 'noun', 'b'],
['filtrare', 'verb', 'b'],
['filtro', 'noun', 'b'],
['finale', 'adjective', 'a'],
['finale', 'noun', 'a'],
['finalità', 'noun', 'b'],
['finalmente', 'adverb', 'a'],
['finanza', 'noun', 'b'],
['finanziamento', 'noun', 'b'],
['finanziare', 'verb', 'b'],
['finanziario', 'adjective', 'a'],
['finanziatore', 'adjective', 'c'],
['finanziatore', 'noun', 'c'],
['finché', 'conjunction', 'a'],
['fine', 'noun', 'a'],
['fine', 'adjective', 'b'],
['finestra', 'noun', 'a'],
['finestrino', 'noun', 'b'],
['fingere', 'verb', 'a'],
['finimondo', 'noun', 'c'],
['finire', 'verb', 'a'],
['finire', 'noun', 'a'],
['finito', 'past_part', 'b'],
['finito', 'adjective', 'b'],
['finlandese', 'adjective', 'c'],
['finlandese', 'noun', 'c'],
['fino', 'preposition', 'a'],
['fino', 'adverb', 'a'],
['finocchio', 'noun', 'c'],
['finora', 'adverb', 'b'],
['finta', 'noun', 'b'],
['finto', 'past_part', 'a'],
['finto', 'adjective', 'a'],
['fiocco', 'noun', 'c'],
['fionda', 'noun', 'c'],
['fioraio', 'noun', 'c'],
['fiore', 'noun', 'a'],
['fiorentino', 'adjective', 'b'],
['fiorentino', 'noun', 'b'],
['fiorito', 'past_part', 'c'],
['fiorito', 'adjective', 'c'],
['firma', 'noun', 'a'],
['firmare', 'verb', 'a'],
['fiscale', 'adjective', 'b'],
['fiscale', 'noun', 'b'],
['fisicamente', 'adverb', 'b'],
['fisico', 'adjective', 'a'],
['fisico', 'noun', 'a'],
['fissare', 'verb', 'a'],
['fisso', 'adjective', 'a'],
['fisso', 'adverb', 'a'],
['fisso', 'noun', 'a'],
['fitto', 'past_part', 'b'],
['fitto', 'adjective', 'b'],
['fitto', 'adverb', 'b'],
['fitto', 'noun', 'b'],
['fiume', 'noun', 'a'],
['fiuto', 'noun', 'c'],
['flash', 'noun', 'b'],
['flauto', 'noun', 'c'],
['flessibile', 'adjective', 'b'],
['flessibile', 'noun', 'b'],
['flora', 'noun', 'c'],
['fluido', 'adjective', 'b'],
['fluido', 'noun', 'b'],
['fluoro', 'noun', 'c'],
['flusso', 'noun', 'b'],
['foca', 'noun', 'c'],
['focaccia', 'noun', 'c'],
['fodera', 'noun', 'c'],
['foderare', 'verb', 'c'],
['foglia', 'noun', 'b'],
['foglio', 'noun', 'a'],
['fogna', 'noun', 'c'],
['folla', 'noun', 'b'],
['folle', 'adjective', 'b'],
['folle', 'noun', 'b'],
['follia', 'noun', 'b'],
['fondamentale', 'adjective', 'a'],
['fondamentale', 'noun', 'a'],
['fondamentalmente', 'adverb', 'b'],
['fondamento', 'noun', 'b'],
['fondare', 'verb', 'a'],
['fondatore', 'noun', 'b'],
['fondazione', 'noun', 'b'],
['fondere', 'verb', 'b'],
['fondo', 'adjective', 'loc-comando'],
['fondo', 'noun', 'loc-comando'],
['fondo', 'adverb', 'loc-comando'],
['fontana', 'noun', 'b'],
['fontanella', 'noun', 'c'],
['fonte', 'noun', 'a'],
['forare', 'verb', 'b'],
['forbice', 'noun', 'c'],
['forchetta', 'noun', 'c'],
['forcina', 'noun', 'c'],
['foresta', 'noun', 'b'],
['forestale', 'adjective', 'c'],
['forestale', 'noun', 'c'],
['forfora', 'noun', 'c'],
['forma', 'noun', 'a'],
['formaggino', 'noun', 'c'],
['formaggio', 'noun', 'b'],
['formale', 'adjective', 'b'],
['formare', 'verb', 'a'],
['formato', 'past_part', 'b'],
['formato', 'adjective', 'b'],
['formato', 'noun', 'b'],
['formazione', 'noun', 'a'],
['formula', 'noun', 'a'],
['formulare', 'verb', 'b'],
['fornace', 'noun', 'c'],
['fornaio', 'noun', 'c'],
['fornello', 'noun', 'b'],
['fornire', 'verb', 'a'],
['fornitore', 'adjective', 'b'],
['fornitore', 'noun', 'b'],
['forno', 'noun', 'b'],
['foro', 'noun', 'b'],
['forse', 'adverb', 'a'],
['forse', 'noun', 'a'],
['forte', 'adjective', 'a'],
['forte', 'adverb', 'a'],
['forte', 'noun', 'a'],
['fortemente', 'adverb', 'b'],
['fortuna', 'noun', 'a'],
['fortunatamente', 'adverb', 'b'],
['fortunato', 'adjective', 'b'],
['forum', 'noun', 'b'],
['forza', 'noun', 'a'],
['forzare', 'verb', 'b'],
['fosforescente', 'adjective', 'c'],
['fossa', 'noun', 'b'],
['fossetta', 'noun', 'c'],
['fosso', 'noun', 'c'],
['foto', 'noun', 'a'],
['fotografare', 'verb', 'b'],
['fotografia', 'noun', 'a'],
['fotografico', 'adjective', 'b'],
['fotografo', 'noun', 'b'],
['fottere', 'verb', 'b'],
['foulard', 'noun', 'c'],
['fra', 'preposition', 'a'],
['fracasso', 'noun', 'c'],
['fragile', 'adjective', 'b'],
['frammento', 'noun', 'b'],
['francamente', 'adverb', 'b'],
['francese', 'adjective', 'a'],
['francese', 'noun', 'a'],
['francobollo', 'noun', 'c'],
['frangia', 'noun', 'c'],
['frase', 'noun', 'a'],
['fratello', 'noun', 'a'],
['frazione', 'noun', 'b'],
['freccia', 'noun', 'b'],
['freddezza', 'noun', 'c'],
['freddo', 'adjective', 'a'],
['freddo', 'noun', 'a'],
['fregare', 'verb', 'a'],
['frenare', 'verb', 'b'],
['frenetico', 'adjective', 'b'],
['freno', 'noun', 'b'],
['frequentare', 'verb', 'a'],
['frequente', 'adjective', 'b'],
['frequenza', 'noun', 'b'],
['fresco', 'adjective', 'a'],
['fresco', 'noun', 'a'],
['fretta', 'noun', 'a'],
['frigo', 'noun', 'b'],
['frigorifero', 'adjective', 'b'],
['frigorifero', 'noun', 'b'],
['fringuello', 'noun', 'c'],
['frittata', 'noun', 'c'],
['fritto', 'past_part', 'c'],
['fritto', 'adjective', 'c'],
['fritto', 'noun', 'c'],
['friulano', 'adjective', 'c'],
['friulano', 'noun', 'c'],
['fronte', 'noun', 'a'],
['frontiera', 'noun', 'b'],
['frugare', 'verb', 'b'],
['frumento', 'noun', 'c'],
['fruscio', 'noun', 'c'],
['frusta', 'noun', 'c'],
['frutta', 'noun', 'b'],
['fruttivendolo', 'noun', 'c'],
['frutto', 'noun', 'a'],
['fucile', 'noun', 'b'],
['fuga', 'noun', 'a'],
['fuggire', 'verb', 'a'],
['fulmine', 'noun', 'b'],
['fumare', 'verb', 'a'],
['fumetto', 'noun', 'b'],
['fumo', 'noun', 'a'],
['fumo', 'adjective', 'a'],
['fune', 'noun', 'c'],
['funerale', 'noun', 'b'],
['funerale', 'adjective', 'b'],
['fungo', 'noun', 'b'],
['funzionale', 'adjective', 'b'],
['funzionale', 'noun', 'b'],
['funzionamento', 'noun', 'b'],
['funzionare', 'verb', 'a'],
['funzionario', 'noun', 'b'],
['funzione', 'noun', 'a'],
['fuoco', 'noun', 'loc-comando'],
['fuori', 'adverb', 'a'],
['fuori', 'preposition', 'a'],
['fuori', 'noun', 'a'],
['fuori', 'adjective', 'a'],
['furbo', 'adjective', 'b'],
['furbo', 'noun', 'b'],
['furfante', 'noun', 'c'],
['furgone', 'noun', 'b'],
['furia', 'noun', 'b'],
['furioso', 'adjective', 'b'],
['furto', 'noun', 'b'],
['fusione', 'noun', 'b'],
['fuso', 'past_part', 'b'],
['fuso', 'adjective', 'b'],
['fuso', 'noun', 'b'],
['futuro', 'adjective', 'a'],
['futuro', 'noun', 'a'],
['gabbia', 'noun', 'b'],
['galassia', 'noun', 'b'],
['galeotto', 'noun', 'c'],
['galera', 'noun', 'b'],
['galleggiare', 'verb', 'c'],
['galleria', 'noun', 'b'],
['gallese', 'adjective', 'c'],
['gallese', 'noun', 'c'],
['galletta', 'noun', 'c'],
['gallina', 'noun', 'b'],
['gallo', 'noun', 'c'],
['gamba', 'noun', 'a'],
['gambero', 'noun', 'c'],
['gambo', 'noun', 'c'],
['ganascia', 'noun', 'c'],
['gancio', 'noun', 'c'],
['gara', 'noun', 'a'],
['garage', 'noun', 'b'],
['garantire', 'verb', 'a'],
['garanzia', 'noun', 'b'],
['garbo', 'noun', 'c'],
['gargarismo', 'noun', 'c'],
['garofano', 'noun', 'c'],
['garza', 'noun', 'c'],
['gas', 'noun', 'a'],
['gasolio', 'noun', 'c'],
['gassosa', 'noun', 'c'],
['gastronomia', 'noun', 'c'],
['gatto', 'noun', 'a'],
['gavetta', 'noun', 'c'],
['gay', 'adjective', 'b'],
['gay', 'noun', 'b'],
['gazza', 'noun', 'c'],
['gelateria', 'noun', 'c'],
['gelatina', 'noun', 'c'],
['gelato', 'past_part', 'b'],
['gelato', 'adjective', 'b'],
['gelato', 'noun', 'b'],
['gelido', 'adjective', 'b'],
['gelo', 'noun', 'c'],
['gelosia', 'noun', 'b'],
['geloso', 'adjective', 'b'],
['gelsomino', 'noun', 'c'],
['gemello', 'adjective', 'b'],
['gemello', 'noun', 'b'],
['gemma', 'noun', 'c'],
['gene', 'noun', 'b'],
['generale', 'adjective', 'a'],
['generale', 'noun', 'a'],
['generalmente', 'adverb', 'b'],
['generare', 'verb', 'a'],
['generazione', 'noun', 'a'],
['genere', 'noun', 'a'],
['generico', 'adjective', 'b'],
['generico', 'noun', 'b'],
['generosità', 'noun', 'c'],
['generoso', 'adjective', 'b'],
['genetico', 'adjective', 'b'],
['gengiva', 'noun', 'c'],
['geniale', 'adjective', 'b'],
['genio', 'noun', 'b'],
['genitore', 'noun', 'a'],
['gennaio', 'noun', 'a'],
['genovese', 'adjective', 'c'],
['genovese', 'noun', 'c'],
['gente', 'noun', 'a'],
['gentile', 'adjective', 'a'],
['gentile', 'noun', 'a'],
['genuino', 'adjective', 'c'],
['geografico', 'adjective', 'b'],
['geografo', 'noun', 'c'],
['geometra', 'noun', 'c'],
['geometria', 'noun', 'c'],
['geometrico', 'adjective', 'c'],
['gesso', 'noun', 'b'],
['gestione', 'noun', 'a'],
['gestire', 'verb', 'a'],
['gesto', 'noun', 'a'],
['gestore', 'noun', 'b'],
['gettare', 'verb', 'a'],
['gettone', 'noun', 'c'],
['ghiaccio', 'noun', 'b'],
['ghiacciolo', 'noun', 'c'],
['ghianda', 'noun', 'c'],
['ghiro', 'noun', 'c'],
['gi', 'noun', 'c'],
['già', 'adverb', 'a'],
['giacca', 'noun', 'a'],
['giacere', 'verb', 'b'],
['giaguaro', 'noun', 'c'],
['giallo', 'adjective', 'a'],
['giallo', 'noun', 'a'],
['giapponese', 'adjective', 'a'],
['giapponese', 'noun', 'a'],
['giardinaggio', 'noun', 'c'],
['giardiniera', 'noun', 'c'],
['giardino', 'noun', 'a'],
['gigante', 'noun', 'b'],
['gigante', 'adjective', 'b'],
['gigantesco', 'adjective', 'b'],
['giglio', 'noun', 'b'],
['ginnastica', 'noun', 'b'],
['ginocchio', 'noun', 'a'],
['giocare', 'verb', 'a'],
['giocatore', 'noun', 'a'],
['giocattolo', 'noun', 'b'],
['gioco', 'noun', 'a'],
['gioia', 'noun', 'a'],
['gioiello', 'noun', 'b'],
['gioioso', 'adjective', 'c'],
['giordano', 'adjective', 'c'],
['giordano', 'noun', 'c'],
['giornale', 'noun', 'a'],
['giornale', 'adjective', 'a'],
['giornalino', 'noun', 'c'],
['giornalista', 'noun', 'a'],
['giornata', 'noun', 'a'],
['giorno', 'noun', 'a'],
['giostra', 'noun', 'c'],
['giovane', 'adjective', 'a'],
['giovane', 'noun', 'a'],
['giovanile', 'adjective', 'b'],
['giovedì', 'noun', 'b'],
['gioventù', 'noun', 'b'],
['giovinezza', 'noun', 'b'],
['giraffa', 'noun', 'c'],
['girare', 'verb', 'a'],
['giravite', 'noun', 'c'],
['giretto', 'noun', 'c'],
['giro', 'noun', 'a'],
['gironzolare', 'verb', 'c'],
['girotondo', 'noun', 'c'],
['gita', 'noun', 'b'],
['giù', 'adverb', 'a'],
['giù', 'adjective', 'a'],
['giubba', 'noun', 'c'],
['giubbotto', 'noun', 'c'],
['giudicare', 'verb', 'a'],
['giudice', 'noun', 'a'],
['giudiziario', 'adjective', 'b'],
['giudizio', 'noun', 'a'],
['giugno', 'noun', 'a'],
['giungere', 'verb', 'a'],
['giungla', 'noun', 'c'],
['giuramento', 'noun', 'b'],
['giurare', 'verb', 'a'],
['giuria', 'noun', 'c'],
['giuridico', 'adjective', 'b'],
['giustamente', 'adverb', 'b'],
['giustificare', 'verb', 'b'],
['giustizia', 'noun', 'a'],
['giusto', 'adjective', 'a'],
['giusto', 'noun', 'a'],
['giusto', 'adverb', 'a'],
['gli', 'pronoun', 'a'],
['glicine', 'noun', 'c'],
['global', 'adjective', 'b'],
['global', 'noun', 'b'],
['globale', 'adjective', 'b'],
['gloria', 'noun', 'b'],
['gnocco', 'noun', 'c'],
['gnomo', 'noun', 'c'],
['goal', 'noun', 'b'],
['gobbo', 'adjective', 'c'],
['gobbo', 'noun', 'c'],
['goccia', 'noun', 'b'],
['godere', 'verb', 'a'],
['gola', 'noun', 'b'],
['goloso', 'adjective', 'c'],
['gomito', 'noun', 'b'],
['gomitolo', 'noun', 'c'],
['gomma', 'noun', 'b'],
['gonfiare', 'verb', 'b'],
['gonfio', 'adjective', 'b'],
['gonfio', 'noun', 'b'],
['gonna', 'noun', 'b'],
['gorgonzola', 'noun', 'c'],
['gorilla', 'noun', 'c'],
['gossip', 'noun', 'b'],
['governare', 'verb', 'b'],
['governatore', 'noun', 'b'],
['governo', 'noun', 'a'],
['gradino', 'noun', 'b'],
['gradire', 'verb', 'b'],
['grado', 'noun', 'a'],
['graffiare', 'verb', 'c'],
['graffio', 'noun', 'c'],
['grafico', 'adjective', 'b'],
['grafico', 'noun', 'b'],
['grammatica', 'noun', 'b'],
['grammo', 'noun', 'b'],
['grana', 'noun', 'c'],
['granaio', 'noun', 'c'],
['granchio', 'noun', 'c'],
['grande', 'adjective', 'a'],
['grande', 'noun', 'a'],
['grandezza', 'noun', 'b'],
['grandine', 'noun', 'c'],
['grandioso', 'adjective', 'b'],
['grano', 'noun', 'b'],
['granturco', 'noun', 'c'],
['grappa', 'noun', 'c'],
['grasso', 'adjective', 'a'],
['grasso', 'noun', 'a'],
['gratis', 'adverb', 'b'],
['gratis', 'adjective', 'b'],
['grattare', 'verb', 'b'],
['grattugiato', 'past_part', 'c'],
['grattugiato', 'adjective', 'c'],
['gratuito', 'adjective', 'b'],
['grave', 'adjective', 'a'],
['grave', 'noun', 'a'],
['grave', 'adverb', 'a'],
['gravidanza', 'noun', 'b'],
['gravità', 'noun', 'b'],
['grazie', 'exclamation', 'a'],
['grazie', 'noun', 'a'],
['grazioso', 'adjective', 'c'],
['greco', 'adjective', 'a'],
['greco', 'noun', 'a'],
['grembiule', 'noun', 'c'],
['gridare', 'verb', 'a'],
['grido', 'noun', 'b'],
['grigio', 'adjective', 'a'],
['grigio', 'noun', 'a'],
['griglia', 'noun', 'c'],
['grinza', 'noun', 'c'],
['grissino', 'noun', 'c'],
['grossista', 'noun', 'c'],
['grosso', 'adjective', 'a'],
['grosso', 'noun', 'a'],
['grotta', 'noun', 'b'],
['gru', 'noun', 'c'],
['gruppo', 'noun', 'a'],
['guadagnare', 'verb', 'a'],
['guadagno', 'noun', 'b'],
['guaio', 'noun', 'b'],
['guaire', 'verb', 'c'],
['guancia', 'noun', 'b'],
['guanciale', 'noun', 'c'],
['guanciale', 'adjective', 'c'],
['guanto', 'noun', 'b'],
['guardare', 'verb', 'a'],
['guardaroba', 'noun', 'c'],
['guardia', 'noun', 'a'],
['guarire', 'verb', 'b'],
['guarnizione', 'noun', 'c'],
['guasto', 'noun', 'c'],
['guerra', 'noun', 'a'],
['guerriero', 'noun', 'b'],
['guerriero', 'adjective', 'b'],
['gufo', 'noun', 'c'],
['guida', 'noun', 'a'],
['guidare', 'verb', 'a'],
['guidatore', 'noun', 'c'],
['guinzaglio', 'noun', 'c'],
['gustare', 'verb', 'b'],
['gusto', 'noun', 'a'],
['gustoso', 'adjective', 'c'],
['hamburger', 'noun', 'c'],
['hobby', 'noun', 'b'],
['home', 'noun', 'b'],
['hotel', 'noun', 'b'],
['hyperlink', 'noun', 'b'],
['i', 'noun', 'c'],
['i', 'determiner', 'b'],
['icona', 'noun', 'b'],
['ics', 'noun', 'c'],
['idea', 'noun', 'a'],
['ideale', 'adjective', 'a'],
['ideale', 'noun', 'a'],
['ideare', 'verb', 'b'],
['identico', 'adjective', 'b'],
['identico', 'noun', 'b'],
['identificare', 'verb', 'a'],
['identificazione', 'noun', 'b'],
['identità', 'noun', 'a'],
['ideologia', 'noun', 'b'],
['ideologico', 'adjective', 'b'],
['idiota', 'adjective', 'a'],
['idiota', 'noun', 'a'],
['idraulico', 'adjective', 'b'],
['idraulico', 'noun', 'b'],
['idrico', 'adjective', 'b'],
['idrogeno', 'noun', 'b'],
['ieri', 'adverb', 'a'],
['ieri', 'noun', 'a'],
['igiene', 'noun', 'c'],
['ignorante', 'pres_part', 'b'],
['ignorante', 'adjective', 'b'],
['ignorante', 'noun', 'b'],
['ignoranza', 'noun', 'b'],
['ignorare', 'verb', 'a'],
['ignoto', 'adjective', 'b'],
['ignoto', 'noun', 'b'],
['il', 'determiner', 'a'],
['il', 'pronoun', 'a'],
['illecito', 'adjective', 'b'],
['illecito', 'noun', 'b'],
['illegale', 'adjective', 'b'],
['illegale', 'noun', 'b'],
['illegittimo', 'adjective', 'c'],
['illegittimo', 'noun', 'c'],
['illudere', 'verb', 'b'],
['illuminare', 'verb', 'b'],
['illuminato', 'past_part', 'b'],
['illuminato', 'adjective', 'b'],
['illuminato', 'noun', 'b'],
['illusione', 'noun', 'b'],
['illustrare', 'verb', 'b'],
['illustre', 'adjective', 'b'],
['imballare', 'verb', 'c'],
['imbarazzante', 'pres_part', 'b'],
['imbarazzante', 'adjective', 'b'],
['imbarazzato', 'past_part', 'b'],
['imbarazzato', 'adjective', 'b'],
['imbarazzo', 'noun', 'b'],
['imbattersi', 'verb', 'b'],
['imbecille', 'adjective', 'b'],
['imbecille', 'noun', 'b'],
['imbiancare', 'verb', 'c'],
['imbianchino', 'noun', 'c'],
['imbottigliare', 'verb', 'c'],
['imbrogliare', 'verb', 'c'],
['imbroglio', 'noun', 'c'],
['imbuto', 'noun', 'c'],
['imitare', 'verb', 'b'],
['immaginare', 'verb', 'a'],
['immaginare', 'noun', 'a'],
['immaginario', 'adjective', 'b'],
['immaginario', 'noun', 'b'],
['immaginazione', 'noun', 'b'],
['immagine', 'noun', 'a'],
['immaturo', 'adjective', 'c'],
['immediatamente', 'adverb', 'a'],
['immediato', 'adjective', 'b'],
['immediato', 'noun', 'b'],
['immenso', 'adjective', 'b'],
['immenso', 'noun', 'b'],
['immergere', 'verb', 'b'],
['immigrato', 'past_part', 'b'],
['immigrato', 'adjective', 'b'],
['immigrato', 'noun', 'b'],
['immobile', 'adjective', 'a'],
['immobile', 'noun', 'a'],
['immobiliare', 'adjective', 'b'],
['immobiliare', 'noun', 'b'],
['immondizia', 'noun', 'c'],
['impallidire', 'verb', 'c'],
['imparare', 'verb', 'a'],
['impastare', 'verb', 'c'],
['impatto', 'noun', 'b'],
['impaziente', 'adjective', 'c'],
['impaziente', 'noun', 'c'],
['impazzire', 'verb', 'b'],
['impedire', 'verb', 'a'],
['impegnare', 'verb', 'a'],
['impegnativo', 'adjective', 'b'],
['impegnato', 'past_part', 'c'],
['impegnato', 'adjective', 'c'],
['impegno', 'noun', 'a'],
['imperare', 'verb', 'b'],
['imperatore', 'noun', 'b'],
['imperiale', 'adjective', 'b'],
['imperiale', 'noun', 'b'],
['impermeabile', 'adjective', 'c'],
['impermeabile', 'noun', 'c'],
['impero', 'noun', 'b'],
['impero', 'adjective', 'b'],
['impianto', 'noun', 'a'],
['impiegare', 'verb', 'a'],
['impiegato', 'past_part', 'b'],
['impiegato', 'adjective', 'b'],
['impiegato', 'noun', 'b'],
['impiego', 'noun', 'b'],
['implicare', 'verb', 'b'],
['imporre', 'verb', 'a'],
['importante', 'pres_part', 'a'],
['importante', 'adjective', 'a'],
['importante', 'noun', 'a'],
['importanza', 'noun', 'a'],
['importare', 'verb', 'a'],
['importo', 'noun', 'b'],
['impossibile', 'adjective', 'a'],
['impossibile', 'noun', 'a'],
['impostare', 'verb', 'b'],
['impostazione', 'noun', 'b'],
['impreciso', 'adjective', 'c'],
['imprenditore', 'noun', 'b'],
['impresa', 'noun', 'a'],
['impressionante', 'pres_part', 'b'],
['impressionante', 'adjective', 'b'],
['impressionare', 'verb', 'b'],
['impressione', 'noun', 'a'],
['imprevisto', 'adjective', 'b'],
['imprevisto', 'noun', 'b'],
['imprigionare', 'verb', 'c'],
['improbabile', 'adjective', 'b'],
['impronta', 'noun', 'b'],
['improvvisamente', 'adverb', 'b'],
['improvvisare', 'verb', 'b'],
['improvviso', 'adjective', 'a'],
['improvviso', 'noun', 'a'],
['imprudente', 'adjective', 'c'],
['imprudente', 'noun', 'c'],
['impulsivo', 'adjective', 'c'],
['impulsivo', 'noun', 'c'],
['impulso', 'noun', 'b'],
['imputata', 'noun', 'b'],
['imputato', 'past_part', 'a'],
['imputato', 'adjective', 'a'],
['imputato', 'noun', 'a'],
['in', 'preposition', 'a'],
['inaspettato', 'adjective', 'b'],
['inaugurare', 'verb', 'b'],
['incamminare', 'verb', 'c'],
['incantare', 'verb', 'c'],
['incapace', 'adjective', 'b'],
['incapace', 'noun', 'b'],
['incapacità', 'noun', 'b'],
['incaricare', 'verb', 'b'],
['incarico', 'noun', 'b'],
['incartare', 'verb', 'c'],
['incassare', 'verb', 'b'],
['incasso', 'noun', 'c'],
['incastrare', 'verb', 'b'],
['incatenare', 'verb', 'c'],
['incazzarsi', 'verb', 'b'],
['incendio', 'noun', 'b'],
['incertezza', 'noun', 'b'],
['incerto', 'adjective', 'b'],
['incerto', 'noun', 'b'],
['inchiesta', 'noun', 'b'],
['inchiodare', 'verb', 'c'],
['incidente', 'noun', 'a'],
['incidere', 'verb', 'b'],
['incinta', 'adjective', 'b'],
['incitare', 'verb', 'c'],
['incivile', 'adjective', 'c'],
['incivile', 'noun', 'c'],
['includere', 'verb', 'b'],
['incluso', 'past_part', 'b'],
['incluso', 'adjective', 'b'],
['incluso', 'noun', 'b'],
['incollare', 'verb', 'b'],
['incominciare', 'verb', 'b'],
['incompleto', 'adjective', 'c'],
['incomprensibile', 'adjective', 'b'],
['inconsolabile', 'adjective', 'c'],
['incontentabile', 'adjective', 'c'],
['incontrare', 'verb', 'a'],
['incontro', 'noun', 'a'],
['incontro', 'adverb', 'b'],
['incoraggiare', 'verb', 'b'],
['incoronare', 'verb', 'c'],
['incorreggibile', 'adjective', 'c'],
['incredibile', 'adjective', 'a'],
['incremento', 'noun', 'b'],
['incrinare', 'verb', 'c'],
['incrociare', 'verb', 'b'],
['incrocio', 'noun', 'c'],
['incubo', 'noun', 'b'],
['incurabile', 'adjective', 'c'],
['incurabile', 'noun', 'c'],
['incuriosire', 'verb', 'b'],
['indagare', 'verb', 'b'],
['indagine', 'noun', 'a'],
['indescrivibile', 'adjective', 'c'],
['indiano', 'adjective', 'b'],
['indiano', 'noun', 'b'],
['indicare', 'verb', 'a'],
['indicazione', 'noun', 'a'],
['indice', 'noun', 'a'],
['indice', 'adjective', 'a'],
['indietreggiare', 'verb', 'c'],
['indietro', 'adverb', 'a'],
['indietro', 'adjective', 'a'],
['indietro', 'loc-comando', 'a'],
['indifeso', 'adjective', 'c'],
['indifferente', 'adjective', 'b'],
['indifferente', 'noun', 'b'],
['indifferenza', 'noun', 'b'],
['indigestione', 'noun', 'c'],
['indimenticabile', 'adjective', 'c'],
['indipendente', 'adjective', 'b'],
['indipendente', 'noun', 'b'],
['indipendentemente', 'adverb', 'b'],
['indipendenza', 'noun', 'b'],
['indiretto', 'adjective', 'b'],
['indirizzare', 'verb', 'b'],
['indirizzo', 'noun', 'a'],
['indisciplinato', 'adjective', 'c'],
['indispensabile', 'adjective', 'b'],
['indispensabile', 'noun', 'b'],
['individuale', 'adjective', 'b'],
['individuare', 'verb', 'a'],
['individuo', 'noun', 'a'],
['individuo', 'adjective', 'a'],
['indizio', 'noun', 'b'],
['indossare', 'verb', 'a'],
['indovinare', 'verb', 'b'],
['indovinello', 'noun', 'c'],
['indubbiamente', 'adverb', 'b'],
['indumento', 'noun', 'c'],
['indurre', 'verb', 'b'],
['industria', 'noun', 'a'],
['industriale', 'adjective', 'a'],
['industriale', 'noun', 'a'],
['inedito', 'adjective', 'b'],
['inefficace', 'adjective', 'c'],
['inerte', 'adjective', 'c'],
['inesistente', 'adjective', 'b'],
['inesperienza', 'noun', 'c'],
['inesperto', 'adjective', 'c'],
['inevitabile', 'adjective', 'b'],
['inevitabile', 'noun', 'b'],
['inevitabilmente', 'adverb', 'b'],
['infame', 'adjective', 'c'],
['infame', 'noun', 'c'],
['infantile', 'adjective', 'b'],
['infanzia', 'noun', 'b'],
['infarto', 'noun', 'b'],
['infatti', 'conjunction', 'a'],
['infatti', 'adverb', 'a'],
['infedele', 'adjective', 'c'],
['infedele', 'noun', 'c'],
['infelice', 'adjective', 'b'],
['infelice', 'noun', 'b'],
['inferiore', 'adjective', 'a'],
['infermiera', 'noun', 'b'],
['infermiere', 'noun', 'c'],
['inferno', 'noun', 'b'],
['inferno', 'adjective', 'b'],
['infezione', 'noun', 'b'],
['infilare', 'verb', 'a'],
['infine', 'adverb', 'a'],
['infinito', 'adjective', 'a'],
['infinito', 'noun', 'a'],
['influenza', 'noun', 'b'],
['influenzare', 'verb', 'b'],
['informare', 'verb', 'a'],
['informatica', 'noun', 'b'],
['informatico', 'adjective', 'b'],
['informatico', 'noun', 'b'],
['informativo', 'adjective', 'b'],
['informazione', 'noun', 'a'],
['infradito', 'adjective', 'c'],
['infradito', 'noun', 'c'],
['infrastruttura', 'noun', 'b'],
['infuriare', 'verb', 'b'],
['infuso', 'past_part', 'c'],
['infuso', 'adjective', 'c'],
['infuso', 'noun', 'c'],
['ingannare', 'verb', 'b'],
['inganno', 'noun', 'b'],
['ingegnere', 'noun', 'b'],
['ingegneria', 'noun', 'b'],
['ingelosire', 'verb', 'c'],
['ingenuo', 'adjective', 'b'],
['ingenuo', 'noun', 'b'],
['ingessare', 'verb', 'c'],
['ingiusto', 'adjective', 'b'],
['ingiusto', 'noun', 'b'],
['inglese', 'adjective', 'a'],
['inglese', 'noun', 'a'],
['ingoiare', 'verb', 'b'],
['ingorgo', 'noun', 'c'],
['ingrandire', 'verb', 'c'],
['ingrassare', 'verb', 'b'],
['ingrediente', 'noun', 'b'],
['ingresso', 'noun', 'a'],
['iniezione', 'noun', 'c'],
['iniziale', 'adjective', 'a'],
['iniziale', 'noun', 'a'],
['inizialmente', 'adverb', 'b'],
['iniziare', 'verb', 'a'],
['iniziativa', 'noun', 'a'],
['inizio', 'noun', 'a'],
['innamorarsi', 'verb', 'a'],
['innamorato', 'past_part', 'b'],
['innamorato', 'adjective', 'b'],
['innamorato', 'noun', 'b'],
['innanzitutto', 'adverb', 'b'],
['innervosire', 'verb', 'c'],
['innocente', 'adjective', 'b'],
['innocente', 'noun', 'b'],
['innocuo', 'adjective', 'b'],
['innovativo', 'adjective', 'b'],
['innovazione', 'noun', 'b'],
['inoltre', 'adverb', 'a'],
['inquadrare', 'verb', 'b'],
['inquietante', 'pres_part', 'b'],
['inquietante', 'adjective', 'b'],
['inquinamento', 'noun', 'b'],
['inquinare', 'verb', 'c'],
['inquinato', 'past_part', 'c'],
['inquinato', 'adjective', 'c'],
['insalata', 'noun', 'b'],
['insegna', 'noun', 'b'],
['insegnamento', 'noun', 'b'],
['insegnante', 'pres_part', 'a'],
['insegnante', 'adjective', 'a'],
['insegnante', 'noun', 'a'],
['insegnare', 'verb', 'a'],
['inseguire', 'verb', 'b'],
['inseparabile', 'adjective', 'c'],
['inseparabile', 'noun', 'c'],
['inserimento', 'noun', 'b'],
['inserire', 'verb', 'a'],
['insetticida', 'adjective', 'c'],
['insetto', 'noun', 'b'],
['insieme', 'adverb', 'a'],
['insieme', 'noun', 'a'],
['insinuare', 'verb', 'b'],
['insistere', 'verb', 'a'],
['insoddisfatto', 'adjective', 'c'],
['insolito', 'adjective', 'b'],
['insolito', 'noun', 'b'],
['insomma', 'adverb', 'a'],
['insopportabile', 'adjective', 'b'],
['insospettire', 'verb', 'c'],
['installare', 'verb', 'b'],
['insuccesso', 'noun', 'c'],
['insultare', 'verb', 'b'],
['insulto', 'noun', 'b'],
['intanto', 'adverb', 'a'],
['intasare', 'verb', 'c'],
['intatto', 'adjective', 'b'],
['integrale', 'adjective', 'b'],
['integrale', 'noun', 'b'],
['integrare', 'verb', 'b'],
['integrazione', 'noun', 'b'],
['intellettuale', 'adjective', 'b'],
['intellettuale', 'noun', 'b'],
['intelligente', 'adjective', 'a'],
['intelligenza', 'noun', 'b'],
['intendere', 'verb', 'a'],
['intensità', 'noun', 'b'],
['intenso', 'adjective', 'a'],
['intento', 'noun', 'b'],
['intenzione', 'noun', 'a'],
['interagire', 'verb', 'b'],
['interamente', 'adverb', 'b'],
['interazione', 'noun', 'b'],
['intercettare', 'verb', 'b'],
['intercettazione', 'noun', 'b'],
['interessante', 'pres_part', 'a'],
['interessante', 'adjective', 'a'],
['interessare', 'verb', 'a'],
['interessato', 'past_part', 'b'],
['interessato', 'adjective', 'b'],
['interessato', 'noun', 'b'],
['interesse', 'noun', 'a'],
['interiore', 'adjective', 'b'],
['interiore', 'noun', 'b'],
['interlocutore', 'noun', 'b'],
['internazionale', 'adjective', 'a'],
['internazionale', 'noun', 'a'],
['internet', 'noun', 'a'],
['interno', 'adjective', 'a'],
['interno', 'noun', 'a'],
['intero', 'adjective', 'a'],
['intero', 'noun', 'a'],
['interpretare', 'verb', 'a'],
['interpretazione', 'noun', 'b'],
['interprete', 'noun', 'b'],
['interrogare', 'verb', 'b'],
['interrogativo', 'adjective', 'b'],
['interrogativo', 'noun', 'b'],
['interrogatorio', 'adjective', 'b'],
['interrogatorio', 'noun', 'b'],
['interrogazione', 'noun', 'c'],
['interrompere', 'verb', 'a'],
['interruttore', 'noun', 'c'],
['interruzione', 'noun', 'b'],
['intervallo', 'noun', 'b'],
['intervenire', 'verb', 'a'],
['intervento', 'noun', 'a'],
['intervista', 'noun', 'a'],
['intesa', 'noun', 'b'],
['intestare', 'verb', 'b'],
['intestino', 'noun', 'c'],
['intimidire', 'verb', 'c'],
['intimità', 'noun', 'b'],
['intimo', 'adjective', 'b'],
['intimo', 'noun', 'b'],
['intitolare', 'verb', 'b'],
['intonaco', 'noun', 'c'],
['intorno', 'adverb', 'a'],
['intorno', 'preposition', 'a'],
['intorno', 'adjective', 'a'],
['intorno', 'noun', 'a'],
['intraprendere', 'verb', 'b'],
['intravedere', 'verb', 'b'],
['intrecciare', 'verb', 'b'],
['introdurre', 'verb', 'a'],
['introduzione', 'noun', 'b'],
['intuire', 'verb', 'b'],
['intuizione', 'noun', 'b'],
['inutile', 'adjective', 'a'],
['invadente', 'pres_part', 'c'],
['invadente', 'adjective', 'c'],
['invadente', 'noun', 'c'],
['invadere', 'verb', 'b'],
['invasione', 'noun', 'b'],
['invecchiare', 'verb', 'b'],
['invece', 'adverb', 'a'],
['inventare', 'verb', 'a'],
['invenzione', 'noun', 'b'],
['invernale', 'adjective', 'b'],
['invernale', 'noun', 'b'],
['inverno', 'noun', 'a'],
['investimento', 'noun', 'b'],
['investire', 'verb', 'a'],
['inviare', 'verb', 'a'],
['inviato', 'past_part', 'b'],
['inviato', 'adjective', 'b'],
['inviato', 'noun', 'b'],
['invidiare', 'verb', 'b'],
['invidioso', 'adjective', 'c'],
['invidioso', 'noun', 'c'],
['invincibile', 'adjective', 'c'],
['invisibile', 'adjective', 'b'],
['invisibile', 'noun', 'b'],
['invitare', 'verb', 'a'],
['invitato', 'past_part', 'b'],
['invitato', 'adjective', 'b'],
['invitato', 'noun', 'b'],
['invito', 'noun', 'b'],
['invocare', 'verb', 'b'],
['inzuppare', 'verb', 'c'],
['io', 'pronoun', 'a'],
['ionico', 'adjective', 'c'],
['ipotesi', 'noun', 'a'],
['ipotizzare', 'verb', 'b'],
['ippopotamo', 'noun', 'c'],
['ipsilon', 'noun', 'c'],
['ira', 'noun', 'b'],
['irlandese', 'adjective', 'b'],
['irlandese', 'noun', 'b'],
['ironia', 'noun', 'b'],
['ironico', 'adjective', 'b'],
['irriconoscibile', 'adjective', 'c'],
['irritare', 'verb', 'b'],
['iscritto', 'past_part', 'b'],
['iscritto', 'adjective', 'b'],
['iscritto', 'noun', 'b'],
['iscrivere', 'verb', 'a'],
['iscrizione', 'noun', 'b'],
['islamico', 'adjective', 'b'],
['islamico', 'noun', 'b'],
['islandese', 'adjective', 'c'],
['islandese', 'noun', 'c'],
['isola', 'noun', 'a'],
['isolare', 'verb', 'b'],
['isolato', 'past_part', 'b'],
['isolato', 'adjective', 'b'],
['isolato', 'noun', 'b'],
['ispettore', 'noun', 'b'],
['ispirare', 'verb', 'a'],
['ispirazione', 'noun', 'b'],
['israeliano', 'adjective', 'c'],
['israeliano', 'noun', 'c'],
['istante', 'noun', 'a'],
['istanza', 'noun', 'b'],
['istintivo', 'adjective', 'c'],
['istinto', 'noun', 'b'],
['istituto', 'noun', 'a'],
['istituzionale', 'adjective', 'b'],
['istituzione', 'noun', 'a'],
['istruttivo', 'adjective', 'c'],
['istruttore', 'noun', 'c'],
['istruzione', 'noun', 'a'],
['italiano', 'adjective', 'a'],
['italiano', 'noun', 'a'],
['iugoslavo', 'adjective', 'c'],
['iugoslavo', 'noun', 'c'],
['jeans', 'noun', 'b'],
['karatè', 'noun', 'c'],
['ketchup', 'noun', 'c'],
['killer', 'noun', 'b'],
['killer', 'adjective', 'b'],
['kit', 'noun', 'c'],
['kiwi', 'noun', 'c'],
['là', 'adverb', 'a'],
['la', 'determiner', 'a'],
['la', 'pronoun', 'a'],
['labbro', 'noun', 'a'],
['labirinto', 'noun', 'c'],
['laboratorio', 'noun', 'a'],
['laborioso', 'adjective', 'c'],
['lacca', 'noun', 'c'],
['lacca', 'adjective', 'c'],
['laccio', 'noun', 'c'],
['lacrima', 'noun', 'a'],
['laddove', 'adverb', 'b'],
['laddove', 'conjunction', 'b'],
['ladro', 'noun', 'b'],
['laggiù', 'adverb', 'b'],
['lago', 'noun', 'a'],
['laico', 'adjective', 'b'],
['laico', 'noun', 'b'],
['lama', 'noun', 'b'],
['lamentare', 'verb', 'a'],
['lamentela', 'noun', 'c'],
['lametta', 'noun', 'c'],
['lamiera', 'noun', 'c'],
['lampada', 'noun', 'b'],
['lampadario', 'noun', 'c'],
['lampo', 'noun', 'b'],
['lampo', 'adjective', 'b'],
['lampo', 'noun', 'b'],
['lana', 'noun', 'b'],
['lancetta', 'noun', 'c'],
['lanciare', 'verb', 'a'],
['lancio', 'noun', 'b'],
['lanterna', 'noun', 'c'],
['lapis', 'noun', 'c'],
['lardo', 'noun', 'c'],
['larghezza', 'noun', 'c'],
['largo', 'adjective', 'a'],
['largo', 'noun', 'a'],
['largo', 'adverb', 'a'],
['lasagna', 'noun', 'c'],
['lasciare', 'verb', 'a'],
['lassù', 'adverb', 'b'],
['lastra', 'noun', 'b'],
['laterale', 'adjective', 'b'],
['laterale', 'noun', 'b'],
['latino', 'adjective', 'b'],
['latino', 'noun', 'b'],
['lato', 'noun', 'a'],
['latta', 'noun', 'c'],
['lattante', 'pres_part', 'c'],
['lattante', 'adjective', 'c'],
['lattante', 'noun', 'c'],
['latte', 'noun', 'a'],
['latte', 'adjective', 'a'],
['latteria', 'noun', 'c'],
['lattina', 'noun', 'c'],
['lattuga', 'noun', 'c'],
['laurea', 'noun', 'b'],
['laureare', 'verb', 'b'],
['laureato', 'past_part', 'b'],
['laureato', 'adjective', 'b'],
['laureato', 'noun', 'b'],
['lava', 'noun', 'c'],
['lavabo', 'noun', 'c'],
['lavagna', 'noun', 'c'],
['lavagna', 'adjective', 'c'],
['lavanda', 'noun', 'c'],
['lavanderia', 'noun', 'c'],
['lavandino', 'noun', 'c'],
['lavapiatti', 'noun', 'c'],
['lavare', 'verb', 'a'],
['lavastoviglie', 'noun', 'c'],
['lavatrice', 'noun', 'b'],
['lavello', 'noun', 'c'],
['lavorare', 'verb', 'a'],
['lavorativo', 'adjective', 'b'],
['lavoratore', 'adjective', 'a'],
['lavoratore', 'noun', 'a'],
['lavorazione', 'noun', 'b'],
['lavoro', 'noun', 'a'],
['laziale', 'adjective', 'c'],
['laziale', 'noun', 'c'],
['le', 'determiner', 'a'],
['le', 'pronoun', 'a'],
['le', 'pronoun', 'a'],
['leader', 'noun', 'b'],
['lealtà', 'noun', 'c'],
['lebbra', 'noun', 'c'],
['leccare', 'verb', 'b'],
['leccio', 'noun', 'c'],
['lecito', 'adjective', 'b'],
['lecito', 'noun', 'b'],
['lega', 'noun', 'b'],
['legale', 'adjective', 'a'],
['legale', 'noun', 'a'],
['legame', 'noun', 'b'],
['legare', 'verb', 'a'],
['legato', 'past_part', 'a'],
['legato', 'adjective', 'a'],
['legato', 'noun', 'a'],
['legge', 'noun', 'a'],
['leggenda', 'noun', 'b'],
['leggere', 'verb', 'a'],
['leggermente', 'adverb', 'b'],
['leggero', 'adjective', 'a'],
['leggero', 'adverb', 'a'],
['leggero', 'noun', 'a'],
['legislativo', 'adjective', 'b'],
['legittimo', 'adjective', 'b'],
['legna', 'noun', 'c'],
['legno', 'noun', 'a'],
['legume', 'noun', 'c'],
['lei', 'pronoun', 'a'],
['lentamente', 'adverb', 'a'],
['lente', 'noun', 'c'],
['lenticchia', 'noun', 'c'],
['lentiggine', 'noun', 'c'],
['lento', 'adjective', 'a'],
['lento', 'noun', 'a'],
['lento', 'adverb', 'a'],
['lenza', 'noun', 'c'],
['lenzuolo', 'noun', 'b'],
['leone', 'noun', 'b'],
['leonessa', 'noun', 'c'],
['leopardo', 'noun', 'c'],
['lepre', 'noun', 'c'],
['lesione', 'noun', 'b'],
['lessare', 'verb', 'c'],
['lessema', 'noun', 'b'],
['lettera', 'noun', 'a'],
['letterale', 'adjective', 'c'],
['letteralmente', 'adverb', 'b'],
['letterario', 'adjective', 'b'],
['letteratura', 'noun', 'a'],
['letto', 'noun', 'a'],
['lettone', 'noun', 'c'],
['lettore', 'noun', 'a'],
['lettura', 'noun', 'a'],
['leva', 'noun', 'b'],
['levare', 'verb', 'a'],
['levare', 'noun', 'a'],
['lezione', 'noun', 'a'],
['lì', 'adverb', 'a'],
['li', 'pronoun', 'a'],
['libanese', 'adjective', 'b'],
['libanese', 'noun', 'b'],
['liberale', 'adjective', 'b'],
['liberale', 'noun', 'b'],
['liberamente', 'adverb', 'b'],
['liberare', 'verb', 'a'],
['liberazione', 'noun', 'b'],
['libero', 'adjective', 'a'],
['libero', 'noun', 'a'],
['libertà', 'noun', 'a'],
['libico', 'adjective', 'c'],
['libico', 'noun', 'c'],
['libraio', 'noun', 'c'],
['libreria', 'noun', 'b'],
['libretto', 'noun', 'b'],
['libro', 'noun', 'a'],
['licenza', 'noun', 'b'],
['licenziamento', 'noun', 'c'],
['licenziare', 'verb', 'b'],
['liceo', 'noun', 'b'],
['lido', 'noun', 'c'],
['lieto', 'adjective', 'b'],
['lieve', 'adjective', 'b'],
['lievito', 'noun', 'c'],
['ligure', 'adjective', 'c'],
['ligure', 'noun', 'c'],
['lima', 'noun', 'c'],
['limare', 'verb', 'c'],
['limitare', 'verb', 'a'],
['limitato', 'past_part', 'b'],
['limitato', 'adjective', 'b'],
['limite', 'noun', 'a'],
['limite', 'adjective', 'a'],
['limonata', 'noun', 'c'],
['limone', 'noun', 'b'],
['limone', 'adjective', 'b'],
['linea', 'noun', 'a'],
['lineare', 'adjective', 'b'],
['lineare', 'noun', 'b'],
['linfa', 'noun', 'b'],
['lingerie', 'noun', 'c'],
['lingua', 'noun', 'a'],
['linguaggio', 'noun', 'a'],
['linguistica', 'noun', 'b'],
['linguistico', 'adjective', 'b'],
['linguistico', 'noun', 'b'],
['link', 'noun', 'b'],
['liquido', 'adjective', 'a'],
['liquido', 'noun', 'a'],
['liquore', 'noun', 'c'],
['lira', 'noun', 'a'],
['lirico', 'adjective', 'b'],
['lisbonese', 'adjective', 'c'],
['lisbonese', 'noun', 'c'],
['liscio', 'adjective', 'b'],
['liscio', 'noun', 'b'],
['lista', 'noun', 'a'],
['lite', 'noun', 'b'],
['litigare', 'verb', 'a'],
['litigio', 'noun', 'b'],
['litro', 'noun', 'b'],
['lituano', 'adjective', 'c'],
['lituano', 'noun', 'c'],
['live', 'adjective', 'b'],
['livello', 'noun', 'a'],
['lo', 'determiner', 'a'],
['lo', 'pronoun', 'a'],
['locale', 'adjective', 'a'],
['locale', 'noun', 'a'],
['locale', 'noun', 'a'],
['località', 'noun', 'b'],
['locanda', 'noun', 'c'],
['locazione', 'noun', 'b'],
['locomotiva', 'noun', 'c'],
['logica', 'noun', 'b'],
['logico', 'adjective', 'b'],
['logico', 'noun', 'b'],
['logoro', 'past_part', 'c'],
['logoro', 'adjective', 'c'],
['lombardo', 'adjective', 'b'],
['lombardo', 'noun', 'b'],
['londinese', 'adjective', 'c'],
['londinese', 'noun', 'c'],
['lontananza', 'noun', 'b'],
['lontano', 'adjective', 'a'],
['lontano', 'adverb', 'a'],
['lontano', 'noun', 'a'],
['lonza', 'noun', 'c'],
['look', 'noun', 'b'],
['loro', 'pronoun', 'a'],
['loro', 'adjective', 'a'],
['lotta', 'noun', 'a'],
['lottare', 'verb', 'b'],
['lozione', 'noun', 'c'],
['lucano', 'adjective', 'c'],
['lucano', 'noun', 'c'],
['luccicare', 'verb', 'c'],
['lucciola', 'noun', 'c'],
['luce', 'noun', 'a'],
['lucente', 'pres_part', 'c'],
['lucente', 'adjective', 'c'],
['lucente', 'noun', 'c'],
['lucertola', 'noun', 'c'],
['lucidare', 'verb', 'c'],
['lucido', 'adjective', 'b'],
['lucido', 'noun', 'b'],
['luglio', 'noun', 'a'],
['lui', 'pronoun', 'a'],
['lumaca', 'noun', 'c'],
['luminoso', 'adjective', 'b'],
['luna', 'noun', 'a'],
['lunedì', 'noun', 'a'],
['lunghezza', 'noun', 'b'],
['lungo', 'adjective', 'a'],
['lungo', 'preposition', 'a'],
['lungo', 'noun', 'a'],
['luogo', 'noun', 'a'],
['lupo', 'noun', 'a'],
['lussemburghese', 'adjective', 'c'],
['lussemburghese', 'noun', 'c'],
['lusso', 'noun', 'b'],
['lutto', 'noun', 'b'],
['ma', 'conjunction', 'a'],
['ma', 'noun', 'a'],
['maccherone', 'noun', 'c'],
['macchia', 'noun', 'a'],
['macchina', 'noun', 'a'],
['macchinista', 'noun', 'c'],
['macedone', 'adjective', 'c'],
['macedone', 'noun', 'c'],
['macedonia', 'noun', 'c'],
['maceria', 'noun', 'b'],
['macinare', 'verb', 'c'],
['madonna', 'noun', 'b'],
['madonna', 'exclamation', 'b'],
['madre', 'noun', 'a'],
['madrileno', 'adjective', 'c'],
['madrileno', 'noun', 'c'],
['madrileno', 'adjective', 'c'],
['madrileno', 'noun', 'c'],
['madrina', 'noun', 'c'],
['maestra', 'noun', 'b'],
['maestranza', 'noun', 'c'],
['maestro', 'noun', 'a'],
['maestro', 'adjective', 'a'],
['mafia', 'noun', 'b'],
['mafioso', 'adjective', 'b'],
['mafioso', 'noun', 'b'],
['magari', 'exclamation', 'a'],
['magari', 'conjunction', 'a'],
['magari', 'adverb', 'a'],
['magazzino', 'noun', 'b'],
['maggio', 'noun', 'a'],
['maggioranza', 'noun', 'a'],
['maggiorenne', 'adjective', 'c'],
['maggiorenne', 'noun', 'c'],
['maggiormente', 'adverb', 'b'],
['magia', 'noun', 'b'],
['magico', 'adjective', 'a'],
['magistrato', 'noun', 'b'],
['magistratura', 'noun', 'b'],
['maglia', 'noun', 'a'],
['maglietta', 'noun', 'b'],
['magnetico', 'adjective', 'b'],
['magnifico', 'adjective', 'b'],
['mago', 'noun', 'b'],
['mago', 'adjective', 'b'],
['magro', 'adjective', 'b'],
['magro', 'noun', 'b'],
['mah', 'exclamation', 'b'],
['mai', 'adverb', 'a'],
['maiale', 'noun', 'b'],
['maionese', 'noun', 'c'],
['mais', 'noun', 'c'],
['maiuscola', 'noun', 'c'],
['malato', 'adjective', 'a'],
['malato', 'noun', 'a'],
['malattia', 'noun', 'a'],
['malaugurio', 'noun', 'c'],
['malavita', 'noun', 'c'],
['male', 'adverb', 'a'],
['male', 'exclamation', 'a'],
['male', 'noun', 'a'],
['maledetto', 'past_part', 'b'],
['maledetto', 'adjective', 'b'],
['maledetto', 'noun', 'b'],
['maledizione', 'noun', 'b'],
['maledizione', 'exclamation', 'b'],
['maleducato', 'adjective', 'c'],
['maleducato', 'noun', 'c'],
['maleducazione', 'noun', 'c'],
['malgrado', 'noun', 'b'],
['malgrado', 'adverb', 'b'],
['malgrado', 'conjunction', 'b'],
['malgrado', 'preposition', 'b'],
['malinconia', 'noun', 'b'],
['malinteso', 'adjective', 'c'],
['malinteso', 'noun', 'c'],
['malizia', 'noun', 'c'],
['maltempo', 'noun', 'c'],
['maltese', 'adjective', 'c'],
['maltese', 'noun', 'c'],
['maltrattamento', 'noun', 'c'],
['maltrattare', 'verb', 'c'],
['malva', 'noun', 'c'],
['malvagio', 'adjective', 'b'],
['malvagio', 'noun', 'b'],
['mamma', 'noun', 'a'],
['mammella', 'noun', 'c'],
['mammifero', 'noun', 'c'],
['manager', 'noun', 'b'],
['mancanza', 'noun', 'a'],
['mancare', 'verb', 'a'],
['mancato', 'past_part', 'b'],
['mancato', 'adjective', 'b'],
['mancino', 'adjective', 'c'],
['mancino', 'noun', 'c'],
['manco', 'adjective', 'b'],
['manco', 'adverb', 'b'],
['mandare', 'verb', 'a'],
['mandarino', 'noun', 'c'],
['mandarino', 'adjective', 'c'],
['mandato', 'past_part', 'b'],
['mandato', 'adjective', 'b'],
['mandato', 'noun', 'b'],
['mandorla', 'noun', 'c'],
['mandorlo', 'noun', 'c'],
['manganello', 'noun', 'c'],
['mangiare', 'verb', 'a'],
['mangime', 'noun', 'c'],
['mania', 'noun', 'b'],
['maniaco', 'adjective', 'c'],
['maniaco', 'noun', 'c'],
['manica', 'noun', 'b'],
['manico', 'noun', 'b'],
['maniera', 'noun', 'a'],
['manifestare', 'verb', 'a'],
['manifestazione', 'noun', 'a'],
['manifesto', 'noun', 'b'],
['mano', 'noun', 'a'],
['manodopera', 'noun', 'c'],
['manoscritto', 'adjective', 'b'],
['manoscritto', 'noun', 'b'],
['manovale', 'noun', 'c'],
['manovra', 'noun', 'b'],
['mantello', 'noun', 'b'],
['mantenere', 'verb', 'a'],
['manuale', 'adjective', 'b'],
['manuale', 'noun', 'b'],
['manuale', 'noun', 'b'],
['manutenzione', 'noun', 'b'],
['manzo', 'noun', 'c'],
['mappa', 'noun', 'b'],
['marca', 'noun', 'b'],
['marcare', 'verb', 'b'],
['marchigiano', 'adjective', 'c'],
['marchigiano', 'noun', 'c'],
['marchio', 'noun', 'b'],
['marcia', 'noun', 'b'],
['marciapiede', 'noun', 'b'],
['marcio', 'adjective', 'b'],
['marcio', 'noun', 'b'],
['marcire', 'verb', 'c'],
['marco', 'noun', 'a'],
['mare', 'noun', 'a'],
['marea', 'noun', 'b'],
['maresciallo', 'noun', 'b'],
['margherita', 'noun', 'c'],
['marginale', 'adjective', 'b'],
['marginale', 'noun', 'b'],
['margine', 'noun', 'b'],
['marinaio', 'noun', 'b'],
['marino', 'adjective', 'b'],
['marino', 'noun', 'b'],
['marionetta', 'noun', 'c'],
['marito', 'noun', 'a'],
['marketing', 'noun', 'b'],
['marmellata', 'noun', 'c'],
['marmo', 'noun', 'b'],
['marocchino', 'adjective', 'c'],
['marocchino', 'noun', 'c'],
['marrone', 'noun', 'b'],
['marrone', 'adjective', 'b'],
['martedì', 'noun', 'b'],
['marzo', 'noun', 'a'],
['mascarpone', 'noun', 'c'],
['maschera', 'noun', 'b'],
['mascherare', 'verb', 'b'],
['mascherato', 'past_part', 'c'],
['mascherato', 'adjective', 'c'],
['maschile', 'adjective', 'a'],
['maschile', 'noun', 'a'],
['maschio', 'noun', 'a'],
['maschio', 'adjective', 'a'],
['massa', 'noun', 'a'],
['massa', 'adverb', 'a'],
['massacrare', 'verb', 'b'],
['massacro', 'noun', 'c'],
['massaggio', 'noun', 'c'],
['massaia', 'noun', 'c'],
['massiccio', 'adjective', 'b'],
['massiccio', 'noun', 'b'],
['massimo', 'adjective', 'a'],
['massimo', 'noun', 'a'],
['massimo', 'adverb', 'a'],
['master', 'noun', 'b'],
['masticare', 'verb', 'b'],
['masturbare', 'verb', 'b'],
['matematica', 'noun', 'b'],
['matematico', 'adjective', 'b'],
['matematico', 'noun', 'b'],
['materasso', 'noun', 'b'],
['materia', 'noun', 'a'],
['materiale', 'adjective', 'a'],
['materiale', 'noun', 'a'],
['maternità', 'noun', 'b'],
['materno', 'adjective', 'b'],
['matita', 'noun', 'b'],
['matricola', 'noun', 'b'],
['matrimoniale', 'adjective', 'b'],
['matrimoniale', 'noun', 'b'],
['matrimonio', 'noun', 'a'],
['mattina', 'noun', 'a'],
['mattinata', 'noun', 'b'],
['mattino', 'noun', 'a'],
['matto', 'adjective', 'a'],
['matto', 'noun', 'a'],
['mattone', 'noun', 'b'],
['mattone', 'adjective', 'b'],
['mattone', 'noun', 'b'],
['maturare', 'verb', 'b'],
['maturità', 'noun', 'b'],
['maturo', 'adjective', 'b'],
['mazzo', 'noun', 'b'],
['me', 'pronoun', 'a'],
['meccanico', 'adjective', 'a'],
['meccanico', 'noun', 'a'],
['meccanismo', 'noun', 'a'],
['medaglia', 'noun', 'b'],
['medesimo', 'adjective', 'b'],
['medesimo', 'pronoun', 'b'],
['media', 'noun', 'a'],
['media', 'noun', 'b'],
['mediante', 'preposition', 'b'],
['medicare', 'verb', 'c'],
['medicina', 'noun', 'a'],
['medico', 'noun', 'a'],
['medico', 'adjective', 'b'],
['medievale', 'adjective', 'b'],
['medio', 'adjective', 'a'],
['medio', 'noun', 'a'],
['medioevo', 'noun', 'b'],
['meditare', 'verb', 'b'],
['mediterraneo', 'adjective', 'b'],
['mediterraneo', 'noun', 'b'],
['meglio', 'adverb', 'a'],
['meglio', 'adjective', 'a'],
['meglio', 'noun', 'a'],
['mela', 'noun', 'b'],
['melagrana', 'noun', 'c'],
['melanzana', 'noun', 'c'],
['melo', 'noun', 'c'],
['melograno', 'noun', 'c'],
['melone', 'noun', 'c'],
['membrana', 'noun', 'b'],
['membro', 'noun', 'a'],
['memoria', 'noun', 'a'],
['menare', 'verb', 'b'],
['mendicante', 'pres_part', 'c'],
['mendicante', 'adjective', 'c'],
['mendicante', 'noun', 'c'],
['meno', 'adverb', 'a'],
['meno', 'adjective', 'a'],
['meno', 'preposition', 'a'],
['meno', 'noun', 'a'],
['mensa', 'noun', 'b'],
['mensile', 'adjective', 'b'],
['mensile', 'noun', 'b'],
['mensola', 'noun', 'c'],
['menta', 'noun', 'c'],
['mentale', 'adjective', 'a'],
['mentalità', 'noun', 'b'],
['mente', 'noun', 'a'],
['mentire', 'verb', 'a'],
['mento', 'noun', 'b'],
['mentre', 'conjunction', 'a'],
['menu', 'noun', 'b'],
['menzogna', 'noun', 'b'],
['meraviglia', 'noun', 'b'],
['meravigliare', 'verb', 'b'],
['meraviglioso', 'adjective', 'a'],
['meraviglioso', 'noun', 'a'],
['mercante', 'noun', 'b'],
['mercato', 'noun', 'a'],
['merce', 'noun', 'b'],
['merceria', 'noun', 'c'],
['mercoledì', 'noun', 'b'],
['merda', 'noun', 'a'],
['merenda', 'noun', 'c'],
['merendina', 'noun', 'c'],
['meridiano', 'adjective', 'c'],
['meridiano', 'noun', 'c'],
['meridionale', 'adjective', 'a'],
['meridionale', 'noun', 'a'],
['meridione', 'noun', 'c'],
['meritare', 'verb', 'a'],
['merito', 'noun', 'a'],
['merlo', 'noun', 'c'],
['merluzzo', 'noun', 'c'],
['mero', 'adjective', 'b'],
['mescolare', 'verb', 'b'],
['mese', 'noun', 'a'],
['messa', 'noun', 'b'],
['messa', 'noun', 'b'],
['messaggio', 'noun', 'a'],
['messe', 'noun', 'c'],
['messicano', 'adjective', 'c'],
['messicano', 'noun', 'c'],
['mestiere', 'noun', 'a'],
['mestolo', 'noun', 'c'],
['mestruazione', 'noun', 'c'],
['metà', 'noun', 'a'],
['meta', 'noun', 'b'],
['metafora', 'noun', 'b'],
['metallico', 'adjective', 'b'],
['metallo', 'noun', 'b'],
['metalmeccanico', 'adjective', 'c'],
['metalmeccanico', 'noun', 'c'],
['meteo', 'adjective', 'b'],
['meteo', 'noun', 'b'],
['metodo', 'noun', 'a'],
['metro', 'noun', 'a'],
['metropolitano', 'adjective', 'b'],
['metropolitano', 'noun', 'b'],
['mettere', 'verb', 'a'],
['mezzanotte', 'noun', 'b'],
['mezzo', 'adjective', 'a'],
['mezzo', 'noun', 'a'],
['mezzo', 'adverb', 'a'],
['mezzogiorno', 'noun', 'b'],
['mi', 'pronoun', 'a'],
['miagolare', 'verb', 'c'],
['mica', 'noun', 'a'],
['mica', 'adverb', 'a'],
['micio', 'noun', 'c'],
['microfono', 'noun', 'b'],
['miele', 'noun', 'b'],
['miele', 'adjective', 'b'],
['mietere', 'verb', 'c'],
['migliaio', 'noun', 'c'],
['migliaio', 'noun', 'a'],
['miglioramento', 'noun', 'b'],
['migliorare', 'verb', 'a'],
['migliore', 'adjective', 'a'],
['migliore', 'noun', 'a'],
['migliore', 'adverb', 'a'],
['mignolo', 'noun', 'c'],
['mila', 'adjective', 'a'],
['milanese', 'adjective', 'b'],
['milanese', 'noun', 'b'],
['miliardo', 'noun', 'a'],
['milione', 'noun', 'a'],
['militare', 'adjective', 'a'],
['militare', 'noun', 'a'],
['mille', 'adjective', 'a'],
['mille', 'noun', 'a'],
['millennio', 'noun', 'b'],
['millimetro', 'noun', 'b'],
['mimosa', 'noun', 'c'],
['minaccia', 'noun', 'b'],
['minacciare', 'verb', 'a'],
['minchia', 'noun', 'b'],
['minestra', 'noun', 'c'],
['minestrone', 'noun', 'c'],
['mini', 'adjective', 'c'],
['miniera', 'noun', 'b'],
['minigonna', 'noun', 'c'],
['minimo', 'adjective', 'a'],
['minimo', 'noun', 'a'],
['ministero', 'noun', 'a'],
['ministro', 'noun', 'a'],
['minoranza', 'noun', 'b'],
['minore', 'adjective', 'a'],
['minore', 'noun', 'a'],
['minuscolo', 'adjective', 'b'],
['minuto', 'noun', 'a'],
['mio', 'adjective', 'a'],
['mio', 'pronoun', 'a'],
['miracolo', 'noun', 'a'],
['mirare', 'verb', 'b'],
['mischiare', 'verb', 'b'],
['miscuglio', 'noun', 'c'],
['miseria', 'noun', 'b'],
['misero', 'adjective', 'b'],
['missile', 'adjective', 'c'],
['missile', 'noun', 'c'],
['missione', 'noun', 'a'],
['mister', 'noun', 'c'],
['misterioso', 'adjective', 'b'],
['mistero', 'noun', 'a'],
['misto', 'adjective', 'b'],
['misto', 'noun', 'b'],
['misura', 'noun', 'a'],
['misurare', 'verb', 'b'],
['misurazione', 'noun', 'c'],
['mitico', 'adjective', 'b'],
['mito', 'noun', 'b'],
['mitragliatrice', 'noun', 'c'],
['mobile', 'adjective', 'a'],
['mobile', 'noun', 'a'],
['mobilio', 'noun', 'c'],
['mocassino', 'noun', 'c'],
['moda', 'noun', 'a'],
['modalità', 'noun', 'b'],
['modella', 'noun', 'b'],
['modellare', 'verb', 'c'],
['modello', 'noun', 'a'],
['moderato', 'past_part', 'b'],
['moderato', 'adjective', 'b'],
['moderato', 'adverb', 'b'],
['moderato', 'noun', 'b'],
['moderatore', 'adjective', 'b'],
['moderatore', 'noun', 'b'],
['modernità', 'noun', 'b'],
['moderno', 'adjective', 'a'],
['moderno', 'noun', 'a'],
['modestia', 'noun', 'c'],
['modesto', 'adjective', 'b'],
['modifica', 'noun', 'b'],
['modificare', 'verb', 'a'],
['modificazione', 'noun', 'b'],
['modo', 'noun', 'a'],
['modulo', 'noun', 'b'],
['moglie', 'noun', 'a'],
['molecola', 'noun', 'b'],
['molisano', 'adjective', 'c'],
['molisano', 'noun', 'c'],
['molla', 'noun', 'c'],
['mollare', 'verb', 'b'],
['mollusco', 'noun', 'c'],
['molo', 'noun', 'c'],
['moltiplicare', 'verb', 'b'],
['molto', 'adjective', 'a'],
['molto', 'pronoun', 'a'],
['molto', 'adverb', 'a'],
['molto', 'noun', 'a'],
['momento', 'noun', 'a'],
['monaca', 'noun', 'c'],
['monaco', 'noun', 'c'],
['monarchica', 'noun', 'c'],
['mondiale', 'adjective', 'a'],
['mondiale', 'noun', 'a'],
['mondo', 'noun', 'a'],
['monello', 'noun', 'c'],
['moneta', 'noun', 'a'],
['monetario', 'adjective', 'b'],
['monitor', 'noun', 'b'],
['monologo', 'noun', 'b'],
['montaggio', 'noun', 'b'],
['montagna', 'noun', 'a'],
['montare', 'verb', 'b'],
['monte', 'noun', 'a'],
['montenegrino', 'adjective', 'c'],
['montenegrino', 'noun', 'c'],
['monumento', 'noun', 'b'],
['mora', 'noun', 'b'],
['morale', 'adjective', 'a'],
['morale', 'noun', 'a'],
['morbido', 'adjective', 'b'],
['morbido', 'noun', 'b'],
['mordere', 'verb', 'b'],
['morire', 'verb', 'a'],
['moro', 'adjective', 'b'],
['moro', 'noun', 'b'],
['morsicare', 'verb', 'c'],
['morso', 'noun', 'c'],
['mortadella', 'noun', 'c'],
['mortale', 'adjective', 'b'],
['mortale', 'noun', 'b'],
['morte', 'noun', 'a'],
['morto', 'past_part', 'a'],
['morto', 'adjective', 'a'],
['morto', 'noun', 'a'],
['mosca', 'noun', 'b'],
['moscovita', 'adjective', 'c'],
['moscovita', 'noun', 'c'],
['mossa', 'noun', 'b'],
['mostarda', 'noun', 'c'],
['mostra', 'noun', 'a'],
['mostrare', 'verb', 'a'],
['mostro', 'noun', 'b'],
['motel', 'noun', 'c'],
['motivare', 'verb', 'b'],
['motivazione', 'noun', 'b'],
['motivo', 'noun', 'a'],
['moto', 'noun', 'a'],
['moto', 'noun', 'b'],
['motociclismo', 'noun', 'c'],
['motociclista', 'adjective', 'c'],
['motociclista', 'noun', 'c'],
['motore', 'adjective', 'a'],
['motore', 'noun', 'a'],
['motorino', 'noun', 'b'],
['motoscafo', 'noun', 'c'],
['mousse', 'noun', 'c'],
['movimento', 'noun', 'a'],
['mozzarella', 'noun', 'c'],
['mucca', 'noun', 'b'],
['mucchio', 'noun', 'b'],
['muggire', 'verb', 'c'],
['muggito', 'past_part', 'c'],
['muggito', 'noun', 'c'],
['mugnaio', 'noun', 'c'],
['mugolare', 'verb', 'c'],
['mulino', 'noun', 'c'],
['multa', 'noun', 'b'],
['multare', 'verb', 'c'],
['multinazionale', 'adjective', 'b'],
['multinazionale', 'noun', 'b'],
['multiplo', 'adjective', 'b'],
['multiplo', 'noun', 'b'],
['multipresa', 'noun', 'c'],
['mummia', 'noun', 'c'],
['mungere', 'verb', 'c'],
['municipio', 'noun', 'c'],
['muovere', 'verb', 'a'],
['murare', 'verb', 'c'],
['muratore', 'noun', 'c'],
['muro', 'noun', 'a'],
['muschio', 'noun', 'c'],
['muschio', 'adjective', 'c'],
['muscolare', 'adjective', 'b'],
['muscolare', 'noun', 'b'],
['muscolo', 'noun', 'a'],
['museo', 'noun', 'a'],
['musica', 'noun', 'a'],
['musicale', 'adjective', 'a'],
['musicista', 'noun', 'b'],
['muso', 'noun', 'b'],
['musulmano', 'adjective', 'b'],
['musulmano', 'noun', 'b'],
['muta', 'noun', 'c'],
['mutamento', 'noun', 'b'],
['mutanda', 'noun', 'b'],
['mutandina', 'noun', 'c'],
['mutare', 'verb', 'b'],
['mutazione', 'noun', 'b'],
['mutilato', 'past_part', 'c'],
['mutilato', 'adjective', 'c'],
['mutilato', 'noun', 'c'],
['muto', 'adjective', 'b'],
['muto', 'noun', 'b'],
['mutuo', 'noun', 'b'],
['nanna', 'noun', 'c'],
['nano', 'adjective', 'b'],
['nano', 'noun', 'b'],
['napoletano', 'adjective', 'b'],
['napoletano', 'noun', 'b'],
['narrare', 'verb', 'b'],
['narrativo', 'adjective', 'b'],
['narratore', 'noun', 'b'],
['narrazione', 'noun', 'b'],
['nasale', 'adjective', 'b'],
['nasale', 'noun', 'b'],
['nascere', 'verb', 'a'],
['nascere', 'noun', 'a'],
['nascita', 'noun', 'a'],
['nascondere', 'verb', 'a'],
['nascondiglio', 'noun', 'c'],
['nascondino', 'noun', 'c'],
['nascosto', 'past_part', 'a'],
['nascosto', 'adjective', 'a'],
['nascosto', 'noun', 'a'],
['naso', 'noun', 'a'],
['nastro', 'noun', 'a'],
['natale', 'adjective', 'a'],
['natale', 'noun', 'a'],
['natalizio', 'adjective', 'b'],
['natalizio', 'noun', 'b'],
['nato', 'past_part', 'b'],
['nato', 'adjective', 'b'],
['nato', 'noun', 'b'],
['natura', 'noun', 'a'],
['naturale', 'adjective', 'a'],
['naturale', 'noun', 'a'],
['naturalmente', 'adverb', 'a'],
['naufragio', 'noun', 'c'],
['navale', 'adjective', 'c'],
['nave', 'noun', 'a'],
['navicella', 'noun', 'c'],
['navigare', 'verb', 'b'],
['navigazione', 'noun', 'b'],
['nazionale', 'adjective', 'a'],
['nazionale', 'noun', 'a'],
['nazionalità', 'noun', 'c'],
['nazione', 'noun', 'a'],
['nazista', 'adjective', 'b'],
['nazista', 'noun', 'b'],
['ndrangheta', 'noun', 'c'],
['né', 'conjunction', 'a'],
['ne', 'pronoun', 'a'],
['ne', 'adverb', 'a'],
['neanche', 'adverb', 'a'],
['nebbia', 'noun', 'b'],
['necessariamente', 'adverb', 'b'],
['necessario', 'adjective', 'a'],
['necessario', 'noun', 'a'],
['necessità', 'noun', 'a'],
['necessitare', 'verb', 'b'],
['negare', 'verb', 'a'],
['negativo', 'adjective', 'a'],
['negativo', 'noun', 'a'],
['negativo', 'adverb', 'a'],
['negazione', 'noun', 'c'],
['negoziante', 'pres_part', 'c'],
['negoziante', 'noun', 'c'],
['negozio', 'noun', 'a'],
['negro', 'adjective', 'b'],
['negro', 'noun', 'b'],
['nemico', 'adjective', 'a'],
['nemico', 'noun', 'a'],
['nemmeno', 'adverb', 'a'],
['neo', 'noun', 'c'],
['neonato', 'noun', 'b'],
['neonato', 'adjective', 'b'],
['neppure', 'adverb', 'a'],
['nero', 'adjective', 'a'],
['nero', 'noun', 'a'],
['nervo', 'noun', 'b'],
['nervosismo', 'noun', 'c'],
['nervoso', 'adjective', 'a'],
['nervoso', 'noun', 'a'],
['nessuno', 'adjective', 'a'],
['nessuno', 'pronoun', 'a'],
['nettare', 'noun', 'c'],
['netto', 'adjective', 'b'],
['netto', 'noun', 'b'],
['netto', 'adverb', 'b'],
['network', 'noun', 'b'],
['neutro', 'adjective', 'b'],
['neutro', 'noun', 'b'],
['neve', 'noun', 'a'],
['nevicare', 'verb', 'c'],
['news', 'noun', 'b'],
['newyorkese', 'adjective', 'c'],
['newyorkese', 'noun', 'c'],
['nido', 'noun', 'b'],
['niente', 'pronoun', 'a'],
['niente', 'adjective', 'a'],
['niente', 'adverb', 'a'],
['nipote', 'noun', 'a'],
['no', 'adverb', 'a'],
['no', 'noun', 'a'],
['no', 'adjective', 'a'],
['nobile', 'adjective', 'b'],
['nobile', 'noun', 'b'],
['nocciola', 'noun', 'c'],
['nocciola', 'adjective', 'c'],
['nocciolina', 'noun', 'c'],
['nocivo', 'adjective', 'c'],
['nodo', 'noun', 'b'],
['noi', 'pronoun', 'a'],
['noia', 'noun', 'b'],
['noioso', 'adjective', 'b'],
['noleggiare', 'verb', 'c'],
['nome', 'noun', 'a'],
['nomina', 'noun', 'b'],
['nominare', 'verb', 'a'],
['non', 'adverb', 'a'],
['nonché', 'conjunction', 'b'],
['nonna', 'noun', 'a'],
['nonno', 'noun', 'a'],
['nono', 'adjective', 'b'],
['nono', 'noun', 'b'],
['nonostante', 'preposition', 'a'],
['nonostante', 'conjunction', 'a'],
['nord', 'noun', 'a'],
['nord', 'adjective', 'a'],
['nordamericano', 'adjective', 'c'],
['nordamericano', 'noun', 'c'],
['norma', 'noun', 'a'],
['normale', 'adjective', 'a'],
['normale', 'noun', 'a'],
['normalità', 'noun', 'b'],
['normalmente', 'adverb', 'b'],
['normativa', 'noun', 'b'],
['norvegese', 'adjective', 'c'],
['norvegese', 'noun', 'c'],
['nostalgia', 'noun', 'b'],
['nostro', 'adjective', 'a'],
['nostro', 'pronoun', 'a'],
['nota', 'noun', 'a'],
['notaio', 'noun', 'b'],
['notare', 'verb', 'a'],
['notevole', 'adjective', 'b'],
['notizia', 'noun', 'a'],
['noto', 'adjective', 'a'],
['noto', 'noun', 'a'],
['notte', 'noun', 'a'],
['notturno', 'adjective', 'b'],
['notturno', 'noun', 'b'],
['novanta', 'adjective', 'b'],
['novanta', 'noun', 'b'],
['nove', 'adjective', 'a'],
['nove', 'noun', 'a'],
['novella', 'noun', 'c'],
['novembre', 'noun', 'a'],
['novità', 'noun', 'a'],
['nozione', 'noun', 'b'],
['nozze', 'noun', 'b'],
['nube', 'noun', 'b'],
['nucleare', 'adjective', 'a'],
['nucleare', 'noun', 'a'],
['nucleo', 'noun', 'b'],
['nudo', 'adjective', 'a'],
['nudo', 'noun', 'a'],
['nulla', 'pronoun', 'a'],
['nulla', 'adverb', 'a'],
['numerare', 'verb', 'b'],
['numerazione', 'noun', 'c'],
['numero', 'noun', 'a'],
['numeroso', 'adjective', 'a'],
['nuora', 'noun', 'c'],
['nuotare', 'verb', 'b'],
['nuoto', 'noun', 'b'],
['nuovamente', 'adverb', 'b'],
['nuovo', 'adjective', 'a'],
['nuovo', 'noun', 'a'],
['nutrire', 'verb', 'b'],
['nuvola', 'noun', 'b'],
['nuvoloso', 'adjective', 'c'],
['nylon', 'noun', 'c'],
['o', 'noun', 'c'],
['o', 'conjunction', 'a'],
['obbedire', 'verb', 'b'],
['obbiettivo', 'adjective', 'c'],
['obbiettivo', 'noun', 'c'],
['obbligare', 'verb', 'a'],
['obbligatorio', 'adjective', 'b'],
['obbligazione', 'noun', 'b'],
['obbligo', 'noun', 'b'],
['obiettivo', 'adjective', 'a'],
['obiettivo', 'noun', 'a'],
['obiezione', 'noun', 'b'],
['oblò', 'noun', 'c'],
['occasione', 'noun', 'a'],
['occhiaia', 'noun', 'c'],
['occhiale', 'noun', 'a'],
['occhiale', 'adjective', 'a'],
['occhiata', 'noun', 'b'],
['occhiello', 'noun', 'c'],
['occhio', 'noun', 'a'],
['occidentale', 'adjective', 'a'],
['occidentale', 'noun', 'a'],
['occidente', 'noun', 'b'],
['occidente', 'adjective', 'b'],
['occorrere', 'verb', 'a'],
['occupare', 'verb', 'a'],
['occupato', 'past_part', 'c'],
['occupato', 'adjective', 'c'],
['occupato', 'noun', 'c'],
['occupazione', 'noun', 'b'],
['oceano', 'noun', 'b'],
['oculista', 'noun', 'c'],
['oddio', 'exclamation', 'b'],
['odiare', 'verb', 'a'],
['odio', 'noun', 'b'],
['odorare', 'verb', 'c'],
['odore', 'noun', 'a'],
['offendere', 'verb', 'b'],
['offerta', 'noun', 'a'],
['offesa', 'noun', 'b'],
['offeso', 'past_part', 'c'],
['offeso', 'adjective', 'c'],
['offeso', 'noun', 'c'],
['officina', 'noun', 'b'],
['offline', 'adjective', 'b'],
['offline', 'noun', 'b'],
['offrire', 'verb', 'a'],
['oggettivo', 'adjective', 'b'],
['oggetto', 'noun', 'a'],
['oggi', 'adverb', 'a'],
['oggi', 'noun', 'a'],
['ogni', 'adjective', 'a'],
['ognuno', 'pronoun', 'a'],
['ognuno', 'adjective', 'a'],
['ok', 'adverb', 'a'],
['ok', 'noun', 'a'],
['ok', 'adjective', 'a'],
['okay', 'adverb', 'a'],
['okay', 'noun', 'a'],
['okay', 'adjective', 'a'],
['olandese', 'adjective', 'b'],
['olandese', 'noun', 'b'],
['oliare', 'verb', 'c'],
['oliera', 'noun', 'c'],
['olimpico', 'adjective', 'b'],
['olio', 'noun', 'a'],
['oliva', 'noun', 'b'],
['oliva', 'adjective', 'b'],
['oltre', 'adverb', 'a'],
['oltre', 'preposition', 'a'],
['oltrepassare', 'verb', 'c'],
['oltretutto', 'adverb', 'b'],
['omaggio', 'noun', 'b'],
['ombelico', 'noun', 'c'],
['ombra', 'noun', 'a'],
['ombrellone', 'noun', 'c'],
['omicidio', 'noun', 'a'],
['omogeneizzato', 'past_part', 'c'],
['omogeneizzato', 'adjective', 'c'],
['omogeneizzato', 'noun', 'c'],
['omonimo', 'adjective', 'b'],
['omonimo', 'noun', 'b'],
['onda', 'noun', 'a'],
['ondata', 'noun', 'b'],
['ondeggiare', 'verb', 'c'],
['onere', 'noun', 'b'],
['onestamente', 'adverb', 'b'],
['onesto', 'adjective', 'b'],
['onesto', 'noun', 'b'],
['onesto', 'adverb', 'b'],
['online', 'adjective', 'b'],
['online', 'noun', 'b'],
['onorare', 'verb', 'b'],
['onore', 'noun', 'a'],
['opera', 'noun', 'a'],
['operaio', 'noun', 'a'],
['operaio', 'adjective', 'a'],
['operare', 'verb', 'a'],
['operativo', 'adjective', 'b'],
['operativo', 'noun', 'b'],
['operatore', 'adjective', 'b'],
['operatore', 'noun', 'b'],
['operazione', 'noun', 'a'],
['opinione', 'noun', 'a'],
['opporre', 'verb', 'a'],
['opportunità', 'noun', 'b'],
['opportuno', 'adjective', 'b'],
['opposizione', 'noun', 'b'],
['opposto', 'past_part', 'a'],
['opposto', 'adjective', 'a'],
['opposto', 'noun', 'a'],
['oppressivo', 'adjective', 'c'],
['oppresso', 'past_part', 'c'],
['oppresso', 'adjective', 'c'],
['oppresso', 'noun', 'c'],
['oppressore', 'adjective', 'c'],
['oppressore', 'noun', 'c'],
['oppure', 'conjunction', 'a'],
['opzione', 'noun', 'b'],
['ora', 'noun', 'a'],
['ora', 'adverb', 'a'],
['orale', 'adjective', 'b'],
['oramai', 'adverb', 'b'],
['orario', 'adjective', 'a'],
['orario', 'noun', 'a'],
['orbita', 'noun', 'b'],
['orchestra', 'noun', 'b'],
['orco', 'noun', 'b'],
['ordinamento', 'noun', 'b'],
['ordinanza', 'noun', 'b'],
['ordinare', 'verb', 'a'],
['ordinario', 'adjective', 'b'],
['ordinario', 'noun', 'b'],
['ordine', 'noun', 'a'],
['orecchino', 'noun', 'c'],
['orecchio', 'noun', 'a'],
['orefice', 'noun', 'c'],
['organico', 'adjective', 'b'],
['organico', 'noun', 'b'],
['organismo', 'noun', 'a'],
['organizzare', 'verb', 'a'],
['organizzato', 'past_part', 'b'],
['organizzato', 'adjective', 'b'],
['organizzato', 'noun', 'b'],
['organizzazione', 'noun', 'a'],
['organo', 'noun', 'a'],
['orgasmo', 'noun', 'b'],
['orgoglio', 'noun', 'b'],
['orgoglioso', 'adjective', 'b'],
['orientale', 'adjective', 'b'],
['orientale', 'noun', 'b'],
['orientamento', 'noun', 'b'],
['orientare', 'verb', 'b'],
['oriente', 'adjective', 'b'],
['oriente', 'noun', 'b'],
['origano', 'noun', 'c'],
['originale', 'adjective', 'a'],
['originale', 'noun', 'a'],
['originario', 'adjective', 'b'],
['origine', 'noun', 'a'],
['orizzontale', 'adjective', 'b'],
['orizzontale', 'noun', 'b'],
['orizzonte', 'noun', 'b'],
['orlo', 'noun', 'b'],
['orma', 'noun', 'c'],
['ormai', 'adverb', 'a'],
['ormone', 'noun', 'b'],
['oro', 'noun', 'a'],
['orologiaio', 'noun', 'c'],
['orologio', 'noun', 'a'],
['oroscopo', 'noun', 'b'],
['orribile', 'adjective', 'b'],
['orrore', 'noun', 'b'],
['orso', 'noun', 'b'],
['ortaggio', 'noun', 'c'],
['ortensia', 'noun', 'c'],
['ortica', 'noun', 'c'],
['orto', 'noun', 'b'],
['ortolano', 'noun', 'c'],
['ortolano', 'adjective', 'c'],
['orzo', 'noun', 'c'],
['osare', 'verb', 'b'],
['osceno', 'adjective', 'c'],
['oscillare', 'verb', 'b'],
['oscurare', 'verb', 'b'],
['oscuro', 'adjective', 'b'],
['oscuro', 'noun', 'b'],
['oscuro', 'adverb', 'b'],
['ospedale', 'noun', 'a'],
['ospitalità', 'noun', 'c'],
['ospitare', 'verb', 'a'],
['ospite', 'adjective', 'a'],
['ospite', 'noun', 'a'],
['ospizio', 'noun', 'c'],
['osservare', 'verb', 'a'],
['osservazione', 'noun', 'b'],
['ossessione', 'noun', 'b'],
['ossia', 'conjunction', 'b'],
['ossigeno', 'noun', 'b'],
['osso', 'noun', 'a'],
['ostacolare', 'verb', 'b'],
['ostacolo', 'noun', 'b'],
['ostaggio', 'noun', 'c'],
['oste', 'noun', 'c'],
['ostile', 'adjective', 'b'],
['ostinato', 'past_part', 'c'],
['ostinato', 'adjective', 'c'],
['ostrica', 'noun', 'c'],
['ottanta', 'adjective', 'b'],
['ottanta', 'noun', 'b'],
['ottavo', 'adjective', 'b'],
['ottavo', 'noun', 'b'],
['ottenere', 'verb', 'a'],
['ottica', 'noun', 'b'],
['ottimo', 'adjective', 'a'],
['ottimo', 'noun', 'a'],
['otto', 'adjective', 'a'],
['otto', 'noun', 'a'],
['ottobre', 'noun', 'a'],
['ottone', 'noun', 'c'],
['ovale', 'adjective', 'c'],
['ovale', 'noun', 'c'],
['ovatta', 'noun', 'c'],
['ove', 'adverb', 'b'],
['ove', 'conjunction', 'b'],
['ovest', 'noun', 'b'],
['ovest', 'adjective', 'b'],
['ovile', 'noun', 'c'],
['ovino', 'adjective', 'c'],
['ovino', 'noun', 'c'],
['ovunque', 'adverb', 'a'],
['ovunque', 'conjunction', 'a'],
['ovvero', 'conjunction', 'a'],
['ovviamente', 'adverb', 'a'],
['ovviare', 'verb', 'b'],
['ovvio', 'adjective', 'b'],
['ozono', 'noun', 'c'],
['pacchetto', 'noun', 'b'],
['pacco', 'noun', 'b'],
['pace', 'noun', 'a'],
['padella', 'noun', 'c'],
['padre', 'noun', 'a'],
['padrona', 'noun', 'b'],
['padronato', 'noun', 'c'],
['padrone', 'noun', 'a'],
['padroneggiare', 'verb', 'c'],
['paesaggio', 'noun', 'b'],
['paese', 'noun', 'a'],
['paga', 'noun', 'b'],
['pagamento', 'noun', 'a'],
['pagare', 'verb', 'a'],
['pagella', 'noun', 'c'],
['pagina', 'noun', 'a'],
['paglia', 'noun', 'b'],
['paglia', 'adjective', 'b'],
['pagliaio', 'noun', 'c'],
['pago', 'past_part', 'b'],
['pago', 'adjective', 'b'],
['paio', 'noun', 'a'],
['pala', 'noun', 'b'],
['palato', 'noun', 'c'],
['palazzina', 'noun', 'c'],
['palazzo', 'noun', 'a'],
['palco', 'noun', 'b'],
['palcoscenico', 'noun', 'b'],
['palermitano', 'adjective', 'c'],
['palermitano', 'noun', 'c'],
['palestinese', 'adjective', 'c'],
['palestinese', 'noun', 'c'],
['palestra', 'noun', 'b'],
['paletta', 'noun', 'c'],
['palla', 'noun', 'a'],
['pallacanestro', 'noun', 'c'],
['pallanuoto', 'noun', 'c'],
['pallavolo', 'noun', 'c'],
['pallido', 'adjective', 'b'],
['pallina', 'noun', 'b'],
['pallino', 'noun', 'c'],
['palloncino', 'noun', 'c'],
['pallone', 'noun', 'b'],
['pallottola', 'noun', 'c'],
['pallottoliere', 'noun', 'c'],
['palma', 'noun', 'c'],
['palo', 'noun', 'b'],
['palombaro', 'noun', 'c'],
['palpebra', 'noun', 'c'],
['palude', 'noun', 'c'],
['panca', 'noun', 'c'],
['pancarrè', 'noun', 'c'],
['pancetta', 'noun', 'c'],
['panchina', 'noun', 'b'],
['pancia', 'noun', 'b'],
['panciotto', 'noun', 'c'],
['panda', 'noun', 'c'],
['pandoro', 'noun', 'c'],
['pane', 'noun', 'a'],
['panetteria', 'noun', 'c'],
['panettiere', 'noun', 'c'],
['panettone', 'noun', 'c'],
['panico', 'adjective', 'b'],
['panico', 'noun', 'b'],
['paniere', 'noun', 'c'],
['panino', 'noun', 'b'],
['panna', 'noun', 'b'],
['pannello', 'noun', 'b'],
['panno', 'noun', 'b'],
['pannocchia', 'noun', 'c'],
['pannolino', 'noun', 'c'],
['pannolone', 'noun', 'c'],
['panorama', 'noun', 'b'],
['pantalone', 'noun', 'a'],
['pantera', 'noun', 'c'],
['pantofola', 'noun', 'c'],
['panzerotto', 'noun', 'c'],
['papa', 'noun', 'a'],
['papà', 'noun', 'a'],
['papavero', 'noun', 'c'],
['papera', 'noun', 'c'],
['papero', 'noun', 'c'],
['pappa', 'noun', 'c'],
['pappagallo', 'noun', 'c'],
['parabola', 'noun', 'c'],
['parabrezza', 'noun', 'c'],
['paracadute', 'noun', 'c'],
['paracadutista', 'noun', 'c'],
['paradiso', 'noun', 'b'],
['paradosso', 'noun', 'b'],
['paradosso', 'adjective', 'b'],
['parafulmine', 'noun', 'c'],
['paragonare', 'verb', 'b'],
['paragone', 'noun', 'b'],
['paralisi', 'noun', 'c'],
['paralizzato', 'past_part', 'c'],
['paralizzato', 'adjective', 'c'],
['parallelepipedo', 'noun', 'c'],
['parallelo', 'adjective', 'b'],
['parallelo', 'noun', 'b'],
['paralume', 'noun', 'c'],
['parametro', 'noun', 'b'],
['paraocchi', 'noun', 'c'],
['parare', 'verb', 'b'],
['paraurti', 'noun', 'c'],
['paravento', 'noun', 'c'],
['parcheggiare', 'verb', 'b'],
['parcheggio', 'noun', 'b'],
['parco', 'noun', 'a'],
['parecchio', 'adjective', 'a'],
['parecchio', 'pronoun', 'a'],
['parecchio', 'adverb', 'a'],
['parecchio', 'adjective', 'a'],
['pareggiare', 'verb', 'c'],
['pareggio', 'noun', 'c'],
['parente', 'noun', 'a'],
['parentesi', 'noun', 'b'],
['parere', 'verb', 'a'],
['parere', 'noun', 'a'],
['parete', 'noun', 'a'],
['pari', 'adjective', 'a'],
['pari', 'adverb', 'a'],
['pari', 'noun', 'a'],
['parigino', 'adjective', 'c'],
['parigino', 'noun', 'c'],
['parità', 'noun', 'c'],
['parlamentare', 'adjective', 'b'],
['parlamentare', 'noun', 'b'],
['parlamento', 'noun', 'b'],
['parlare', 'verb', 'a'],
['parmigiano', 'adjective', 'c'],
['parmigiano', 'noun', 'c'],
['parola', 'noun', 'a'],
['parquet', 'noun', 'c'],
['parroco', 'noun', 'c'],
['parrucca', 'noun', 'c'],
['parrucchiere', 'noun', 'c'],
['parte', 'noun', 'a'],
['parte', 'adverb', 'a'],
['partecipante', 'pres_part', 'b'],
['partecipante', 'adjective', 'b'],
['partecipante', 'noun', 'b'],
['partecipare', 'verb', 'a'],
['partecipazione', 'noun', 'b'],
['parteggiare', 'verb', 'c'],
['partenza', 'noun', 'a'],
['particella', 'noun', 'b'],
['particolare', 'adjective', 'a'],
['particolare', 'noun', 'a'],
['particolarmente', 'adverb', 'a'],
['partigiano', 'noun', 'b'],
['partigiano', 'adjective', 'b'],
['partire', 'verb', 'a'],
['partita', 'noun', 'a'],
['partito', 'noun', 'a'],
['partner', 'noun', 'b'],
['parto', 'noun', 'b'],
['partorire', 'verb', 'b'],
['party', 'noun', 'b'],
['parziale', 'adjective', 'b'],
['parziale', 'noun', 'b'],
['parzialmente', 'adverb', 'b'],
['pascolare', 'verb', 'c'],
['pasqua', 'noun', 'c'],
['pasquale', 'adjective', 'b'],
['passaggio', 'noun', 'a'],
['passare', 'verb', 'a'],
['passata', 'noun', 'c'],
['passatempo', 'noun', 'c'],
['passato', 'past_part', 'a'],
['passato', 'adjective', 'a'],
['passato', 'noun', 'a'],
['passeggero', 'adjective', 'b'],
['passeggero', 'noun', 'b'],
['passeggiare', 'verb', 'b'],
['passeggiata', 'noun', 'b'],
['passeggio', 'noun', 'c'],
['passero', 'noun', 'c'],
['passione', 'noun', 'a'],
['passivo', 'adjective', 'b'],
['passivo', 'noun', 'b'],
['passo', 'noun', 'a'],
['pasta', 'noun', 'a'],
['pasticca', 'noun', 'c'],
['pasticcere', 'noun', 'c'],
['pasticceria', 'noun', 'c'],
['pasticcino', 'noun', 'c'],
['pasticcio', 'noun', 'c'],
['pastiglia', 'noun', 'c'],
['pastina', 'noun', 'c'],
['pasto', 'noun', 'b'],
['pastore', 'noun', 'b'],
['patata', 'noun', 'b'],
['patatina', 'noun', 'c'],
['patè', 'noun', 'c'],
['patente', 'noun', 'b'],
['patetico', 'adjective', 'b'],
['patetico', 'noun', 'b'],
['patologia', 'noun', 'b'],
['patria', 'noun', 'b'],
['patrimonio', 'noun', 'b'],
['pattinaggio', 'noun', 'c'],
['pattinare', 'verb', 'c'],
['pattino', 'noun', 'c'],
['patto', 'noun', 'b'],
['pattumiera', 'noun', 'c'],
['paura', 'noun', 'a'],
['pauroso', 'adjective', 'c'],
['pausa', 'noun', 'a'],
['pavimento', 'noun', 'b'],
['pavone', 'noun', 'c'],
['pavone', 'adjective', 'c'],
['paziente', 'adjective', 'a'],
['paziente', 'noun', 'a'],
['pazienza', 'noun', 'a'],
['pazza', 'noun', 'c'],
['pazzesco', 'adjective', 'b'],
['pazzo', 'adjective', 'a'],
['pazzo', 'noun', 'a'],
['peccato', 'noun', 'b'],
['peccato', 'exclamation', 'b'],
['peccatore', 'noun', 'c'],
['peccatore', 'adjective', 'c'],
['pechinese', 'adjective', 'c'],
['pechinese', 'noun', 'c'],
['pecora', 'noun', 'b'],
['pecorino', 'adjective', 'c'],
['pecorino', 'noun', 'c'],
['pedalare', 'verb', 'c'],
['pedale', 'noun', 'c'],
['pedale', 'adjective', 'c'],
['pedone', 'noun', 'c'],
['pedone', 'adjective', 'c'],
['peggio', 'adverb', 'a'],
['peggio', 'adjective', 'a'],
['peggio', 'noun', 'a'],
['peggioramento', 'noun', 'c'],
['peggiorare', 'verb', 'b'],
['peggiore', 'adjective', 'b'],
['peggiore', 'noun', 'b'],
['peggiore', 'adverb', 'b'],
['pelato', 'past_part', 'c'],
['pelato', 'adjective', 'c'],
['pelato', 'noun', 'c'],
['pelle', 'noun', 'a'],
['pellegrino', 'noun', 'c'],
['pellegrino', 'adjective', 'c'],
['pellerossa', 'adjective', 'c'],
['pellerossa', 'noun', 'c'],
['pelletteria', 'noun', 'c'],
['pellicola', 'noun', 'b'],
['pelo', 'noun', 'b'],
['peloso', 'adjective', 'c'],
['peloso', 'noun', 'c'],
['peluche', 'noun', 'c'],
['pena', 'noun', 'a'],
['penale', 'adjective', 'b'],
['penale', 'noun', 'b'],
['pendere', 'verb', 'b'],
['pendolo', 'noun', 'c'],
['pene', 'noun', 'b'],
['penetrare', 'verb', 'b'],
['penisola', 'noun', 'c'],
['penna', 'noun', 'b'],
['pennarello', 'noun', 'c'],
['pensare', 'verb', 'a'],
['pensiero', 'noun', 'a'],
['pensionato', 'past_part', 'c'],
['pensionato', 'adjective', 'c'],
['pensionato', 'noun', 'c'],
['pensione', 'noun', 'a'],
['pentagono', 'noun', 'c'],
['pentirsi', 'verb', 'b'],
['pentola', 'noun', 'b'],
['penultimo', 'adjective', 'c'],
['pepe', 'noun', 'c'],
['peperoncino', 'noun', 'c'],
['peperone', 'noun', 'c'],
['per', 'preposition', 'a'],
['pera', 'noun', 'c'],
['peraltro', 'adverb', 'b'],
['percentuale', 'adjective', 'b'],
['percentuale', 'noun', 'b'],
['percepire', 'verb', 'a'],
['percezione', 'noun', 'b'],
['perché', 'adverb', 'a'],
['perché', 'conjunction', 'a'],
['perché', 'noun', 'a'],
['perciò', 'conjunction', 'a'],
['percorrere', 'verb', 'b'],
['percorso', 'past_part', 'a'],
['percorso', 'adjective', 'a'],
['percorso', 'noun', 'a'],
['perdere', 'verb', 'a'],
['perdita', 'noun', 'a'],
['perdonare', 'verb', 'a'],
['perdono', 'noun', 'b'],
['perduto', 'past_part', 'b'],
['perduto', 'adjective', 'b'],
['perfettamente', 'adverb', 'a'],
['perfetto', 'past_part', 'a'],
['perfetto', 'adjective', 'a'],
['perfetto', 'noun', 'a'],
['perfezione', 'noun', 'b'],
['perfino', 'adverb', 'a'],
['perfino', 'preposition', 'a'],
['pergola', 'noun', 'c'],
['pergolato', 'noun', 'c'],
['pergolato', 'adjective', 'c'],
['pericolo', 'noun', 'a'],
['pericoloso', 'adjective', 'a'],
['periferia', 'noun', 'b'],
['periodico', 'adjective', 'b'],
['periodico', 'noun', 'b'],
['periodo', 'noun', 'a'],
['perito', 'noun', 'b'],
['perito', 'adjective', 'b'],
['perla', 'noun', 'b'],
['perla', 'adjective', 'b'],
['permaloso', 'adjective', 'c'],
['permaloso', 'noun', 'c'],
['permanente', 'pres_part', 'b'],
['permanente', 'adjective', 'b'],
['permanente', 'noun', 'b'],
['permesso', 'past_part', 'b'],
['permesso', 'adjective', 'b'],
['permesso', 'noun', 'b'],
['permettere', 'verb', 'a'],
['pero', 'noun', 'c'],
['però', 'conjunction', 'a'],
['perpendicolare', 'adjective', 'c'],
['perpendicolare', 'noun', 'c'],
['perplesso', 'adjective', 'b'],
['perquisizione', 'noun', 'b'],
['perseguire', 'verb', 'b'],
['persiana', 'noun', 'c'],
['persiano', 'adjective', 'b'],
['persiano', 'noun', 'b'],
['persino', 'adverb', 'a'],
['perso', 'past_part', 'b'],
['perso', 'adjective', 'b'],
['persona', 'noun', 'a'],
['personaggio', 'noun', 'a'],
['personale', 'adjective', 'a'],
['personale', 'noun', 'a'],
['personale', 'noun', 'a'],
['personalità', 'noun', 'b'],
['personalmente', 'adverb', 'a'],
['pertanto', 'conjunction', 'b'],
['perugino', 'adjective', 'c'],
['perugino', 'noun', 'c'],
['peruviano', 'adjective', 'c'],
['peruviano', 'noun', 'c'],
['pervenire', 'verb', 'b'],
['pesante', 'pres_part', 'a'],
['pesante', 'adjective', 'a'],
['pesante', 'adverb', 'a'],
['pesare', 'verb', 'b'],
['pesca', 'noun', 'c'],
['pesca', 'adjective', 'c'],
['pesca', 'noun', 'b'],
['pescare', 'verb', 'b'],
['pescatore', 'noun', 'b'],
['pescatore', 'adjective', 'b'],
['pesce', 'noun', 'a'],
['peschereccio', 'noun', 'c'],
['peschereccio', 'adjective', 'c'],
['pescheria', 'noun', 'c'],
['pesco', 'noun', 'c'],
['peso', 'noun', 'a'],
['pessimo', 'adjective', 'b'],
['pestare', 'verb', 'c'],
['peste', 'noun', 'c'],
['pesto', 'past_part', 'c'],
['pesto', 'adjective', 'c'],
['pesto', 'noun', 'c'],
['petalo', 'noun', 'c'],
['petardo', 'noun', 'c'],
['petroliera', 'noun', 'c'],
['petrolio', 'noun', 'b'],
['pettegolezzo', 'noun', 'c'],
['pettegolo', 'adjective', 'c'],
['pettegolo', 'noun', 'c'],
['pettinare', 'verb', 'c'],
['pettinatura', 'noun', 'c'],
['pettine', 'noun', 'c'],
['pettirosso', 'noun', 'c'],
['petto', 'noun', 'a'],
['pezza', 'noun', 'c'],
['pezzetto', 'noun', 'b'],
['pezzo', 'noun', 'a'],
['pezzuola', 'noun', 'c'],
['pi', 'noun', 'c'],
['piacere', 'verb', 'a'],
['piacere', 'noun', 'a'],
['piacevole', 'adjective', 'b'],
['piadina', 'noun', 'c'],
['piaga', 'noun', 'c'],
['pialla', 'noun', 'c'],
['piallare', 'verb', 'c'],
['pianeggiante', 'pres_part', 'c'],
['pianeggiante', 'adjective', 'c'],
['pianerottolo', 'noun', 'b'],
['pianeta', 'noun', 'a'],
['piangere', 'verb', 'a'],
['piangere', 'noun', 'a'],
['piano', 'noun', 'a'],
['piano', 'noun', 'a'],
['piano', 'adjective', 'a'],
['piano', 'adverb', 'a'],
['pianoforte', 'noun', 'b'],
['pianoterra', 'noun', 'c'],
['pianta', 'noun', 'a'],
['piantare', 'verb', 'b'],
['pianto', 'noun', 'b'],
['pianura', 'noun', 'b'],
['piastra', 'noun', 'c'],
['piattaforma', 'noun', 'b'],
['piatto', 'adjective', 'a'],
['piatto', 'noun', 'a'],
['piazza', 'noun', 'a'],
['piazzale', 'noun', 'b'],
['piazzare', 'verb', 'b'],
['piccante', 'adjective', 'c'],
['picchiare', 'verb', 'b'],
['piccino', 'adjective', 'c'],
['piccino', 'noun', 'c'],
['piccione', 'noun', 'c'],
['picco', 'noun', 'b'],
['piccolo', 'adjective', 'a'],
['piccolo', 'noun', 'a'],
['piccone', 'noun', 'c'],
['picnic', 'noun', 'c'],
['pidocchio', 'noun', 'c'],
['piede', 'noun', 'a'],
['piega', 'noun', 'b'],
['piegare', 'verb', 'b'],
['pieghevole', 'adjective', 'c'],
['pieghevole', 'noun', 'c'],
['piemontese', 'adjective', 'b'],
['piemontese', 'noun', 'b'],
['piena', 'noun', 'c'],
['pienamente', 'adverb', 'b'],
['pieno', 'adjective', 'a'],
['pieno', 'noun', 'a'],
['pietà', 'noun', 'b'],
['pietra', 'noun', 'a'],
['pigiama', 'noun', 'c'],
['pigione', 'noun', 'c'],
['pigliare', 'verb', 'b'],
['pigna', 'noun', 'c'],
['pigrizia', 'noun', 'c'],
['pigro', 'adjective', 'c'],
['pigro', 'noun', 'c'],
['pila', 'noun', 'b'],
['pillola', 'noun', 'b'],
['pilota', 'noun', 'b'],
['pineta', 'noun', 'c'],
['ping-pong', 'noun', 'c'],
['pinguino', 'noun', 'c'],
['pinna', 'noun', 'c'],
['pinolo', 'noun', 'c'],
['pinza', 'noun', 'c'],
['pinzetta', 'noun', 'c'],
['pioggia', 'noun', 'a'],
['piombo', 'noun', 'b'],
['piombo', 'adjective', 'b'],
['piombo', 'noun', 'b'],
['pioppo', 'noun', 'c'],
['piovere', 'verb', 'b'],
['piovoso', 'adjective', 'c'],
['piovoso', 'noun', 'c'],
['pipì', 'noun', 'c'],
['pipistrello', 'noun', 'c'],
['pirata', 'noun', 'b'],
['piscina', 'noun', 'b'],
['pisello', 'noun', 'c'],
['pisello', 'adjective', 'c'],
['pisolino', 'noun', 'c'],
['pista', 'noun', 'b'],
['pistacchio', 'noun', 'c'],
['pistacchio', 'adjective', 'c'],
['pistola', 'noun', 'a'],
['pittare', 'verb', 'c'],
['pittore', 'noun', 'b'],
['pittore', 'adjective', 'b'],
['pittura', 'noun', 'b'],
['pitturare', 'verb', 'c'],
['più', 'adverb', 'a'],
['più', 'adjective', 'a'],
['più', 'preposition', 'a'],
['più', 'noun', 'a'],
['piuma', 'noun', 'c'],
['piumino', 'noun', 'c'],
['piuttosto', 'adverb', 'a'],
['pizza', 'noun', 'b'],
['pizzeria', 'noun', 'c'],
['pizzetta', 'noun', 'c'],
['pizzicare', 'verb', 'c'],
['pizzo', 'noun', 'c'],
['plaid', 'noun', 'c'],
['plastica', 'noun', 'b'],
['plastico', 'adjective', 'b'],
['plastico', 'noun', 'b'],
['platano', 'noun', 'c'],
['platino', 'noun', 'c'],
['platino', 'adjective', 'c'],
['plurale', 'noun', 'c'],
['plurale', 'adjective', 'c'],
['pneumatico', 'noun', 'c'],
['pochino', 'noun', 'b'],
['poco', 'adjective', 'a'],
['poco', 'pronoun', 'a'],
['poco', 'adverb', 'a'],
['podere', 'noun', 'c'],
['poema', 'noun', 'b'],
['poesia', 'noun', 'a'],
['poeta', 'noun', 'a'],
['poetico', 'adjective', 'b'],
['poetico', 'noun', 'b'],
['poggiapiedi', 'noun', 'c'],
['poggiare', 'verb', 'c'],
['poi', 'adverb', 'a'],
['poiché', 'conjunction', 'a'],
['poker', 'noun', 'b'],
['polacco', 'adjective', 'b'],
['polacco', 'noun', 'b'],
['polemica', 'noun', 'b'],
['polenta', 'noun', 'c'],
['polipo', 'noun', 'c'],
['politica', 'noun', 'a'],
['politico', 'adjective', 'a'],
['politico', 'noun', 'a'],
['polizia', 'noun', 'a'],
['poliziotto', 'noun', 'a'],
['pollaio', 'noun', 'c'],
['pollame', 'noun', 'c'],
['pollice', 'noun', 'b'],
['pollo', 'noun', 'c'],
['polmone', 'noun', 'b'],
['polo', 'noun', 'b'],
['polpa', 'noun', 'c'],
['polpastrello', 'noun', 'c'],
['polpetta', 'noun', 'c'],
['polpo', 'noun', 'c'],
['polsino', 'noun', 'c'],
['polso', 'noun', 'b'],
['poltrona', 'noun', 'b'],
['polvere', 'noun', 'a'],
['polverina', 'noun', 'c'],
['polveroso', 'adjective', 'c'],
['pomata', 'noun', 'c'],
['pomello', 'noun', 'c'],
['pomeriggio', 'noun', 'a'],
['pomodoro', 'noun', 'b'],
['pompa', 'noun', 'b'],
['pompelmo', 'noun', 'c'],
['pompiere', 'noun', 'c'],
['ponte', 'noun', 'a'],
['pony', 'noun', 'c'],
['pop', 'adjective', 'b'],
['pop', 'noun', 'b'],
['popolare', 'adjective', 'a'],
['popolare', 'noun', 'a'],
['popolare', 'verb', 'b'],
['popolarità', 'noun', 'c'],
['popolazione', 'noun', 'a'],
['popolo', 'noun', 'a'],
['porcellana', 'noun', 'c'],
['porcheria', 'noun', 'c'],
['porco', 'noun', 'b'],
['porco', 'adjective', 'b'],
['porgere', 'verb', 'b'],
['porno', 'adjective', 'b'],
['porno', 'noun', 'b'],
['porre', 'verb', 'a'],
['porta', 'noun', 'a'],
['portabagagli', 'noun', 'c'],
['portabagagli', 'adjective', 'c'],
['portacenere', 'noun', 'c'],
['portachiavi', 'noun', 'c'],
['portacipria', 'noun', 'c'],
['portaerei', 'noun', 'c'],
['portafinestra', 'noun', 'c'],
['portafoglio', 'noun', 'b'],
['portafortuna', 'noun', 'c'],
['portale', 'noun', 'b'],
['portamonete', 'noun', 'c'],
['portaombrelli', 'noun', 'c'],
['portare', 'verb', 'a'],
['portata', 'noun', 'b'],
['portatore', 'adjective', 'b'],
['portatore', 'noun', 'b'],
['portiere', 'noun', 'b'],
['portineria', 'noun', 'c'],
['porto', 'noun', 'a'],
['portoghese', 'adjective', 'b'],
['portoghese', 'noun', 'b'],
['portone', 'noun', 'b'],
['porzione', 'noun', 'b'],
['posa', 'noun', 'b'],
['posacenere', 'noun', 'c'],
['posare', 'verb', 'b'],
['posata', 'noun', 'c'],
['positivo', 'adjective', 'a'],
['positivo', 'noun', 'a'],
['positivo', 'adverb', 'a'],
['posizionare', 'verb', 'b'],
['posizione', 'noun', 'a'],
['possedere', 'verb', 'a'],
['possesso', 'noun', 'b'],
['possibile', 'adjective', 'a'],
['possibile', 'noun', 'a'],
['possibilità', 'noun', 'a'],
['post', 'noun', 'b'],
['posta', 'noun', 'a'],
['postale', 'adjective', 'b'],
['postare', 'verb', 'b'],
['posteggiatore', 'noun', 'c'],
['posteriore', 'adjective', 'b'],
['posteriore', 'noun', 'b'],
['postino', 'noun', 'c'],
['postino', 'adjective', 'c'],
['posto', 'noun', 'a'],
['potare', 'verb', 'c'],
['potente', 'pres_part', 'a'],
['potente', 'adjective', 'a'],
['potente', 'noun', 'a'],
['potentino', 'adjective', 'c'],
['potentino', 'noun', 'c'],
['potenza', 'noun', 'b'],
['potenziale', 'adjective', 'b'],
['potenziale', 'noun', 'b'],
['potere', 'verb', 'a'],
['potere', 'noun', 'a'],
['povero', 'adjective', 'a'],
['povertà', 'noun', 'b'],
['pozzanghera', 'noun', 'c'],
['pozzo', 'noun', 'b'],
['praghese', 'adjective', 'c'],
['praghese', 'noun', 'c'],
['pranzo', 'noun', 'a'],
['prassi', 'noun', 'b'],
['pratica', 'noun', 'a'],
['praticamente', 'adverb', 'a'],
['praticare', 'verb', 'b'],
['pratico', 'adjective', 'a'],
['prato', 'noun', 'b'],
['precario', 'adjective', 'b'],
['precedente', 'pres_part', 'a'],
['precedente', 'adjective', 'a'],
['precedente', 'noun', 'a'],
['precedentemente', 'adverb', 'b'],
['precedenza', 'noun', 'b'],
['precedere', 'verb', 'b'],
['precipitare', 'verb', 'b'],
['precisamente', 'adverb', 'b'],
['precisare', 'verb', 'a'],
['precisione', 'noun', 'b'],
['preciso', 'adjective', 'a'],
['preciso', 'adverb', 'a'],
['preda', 'noun', 'b'],
['predisporre', 'verb', 'b'],
['preferenza', 'noun', 'b'],
['preferire', 'verb', 'a'],
['preferito', 'past_part', 'b'],
['preferito', 'adjective', 'b'],
['preferito', 'noun', 'b'],
['pregare', 'verb', 'a'],
['preghiera', 'noun', 'b'],
['pregiato', 'past_part', 'c'],
['pregiato', 'adjective', 'c'],
['pregio', 'noun', 'b'],
['pregiudizio', 'noun', 'b'],
['prego', 'exclamation', 'a'],
['prelevare', 'verb', 'b'],
['preliminare', 'adjective', 'b'],
['preliminare', 'noun', 'b'],
['prémaman', 'adjective', 'c'],
['premere', 'verb', 'b'],
['premessa', 'noun', 'b'],
['premiare', 'verb', 'b'],
['premier', 'noun', 'b'],
['premio', 'noun', 'a'],
['premio', 'adjective', 'a'],
['prendere', 'verb', 'a'],
['prenotare', 'verb', 'b'],
['prenotazione', 'noun', 'c'],
['preoccupare', 'verb', 'a'],
['preoccupato', 'past_part', 'b'],
['preoccupato', 'adjective', 'b'],
['preoccupazione', 'noun', 'b'],
['preparare', 'verb', 'a'],
['preparazione', 'noun', 'b'],
['prepotente', 'adjective', 'c'],
['prepotente', 'noun', 'c'],
['presa', 'noun', 'a'],
['prescindere', 'verb', 'b'],
['prescrivere', 'verb', 'b'],
['prescrizione', 'noun', 'b'],
['presentare', 'verb', 'a'],
['presentazione', 'noun', 'b'],
['presente', 'adjective', 'a'],
['presente', 'noun', 'a'],
['presente', 'adverb', 'a'],
['presenza', 'noun', 'a'],
['presepe', 'noun', 'b'],
['preside', 'noun', 'c'],
['presidente', 'noun', 'a'],
['presidente', 'adjective', 'a'],
['presidenza', 'noun', 'b'],
['pressione', 'noun', 'a'],
['presso', 'adverb', 'a'],
['presso', 'preposition', 'a'],
['presso', 'noun', 'a'],
['presso', 'adjective', 'a'],
['prestare', 'verb', 'a'],
['prestazione', 'noun', 'b'],
['prestigio', 'noun', 'b'],
['prestigioso', 'adjective', 'b'],
['prestito', 'noun', 'b'],
['presto', 'adverb', 'a'],
['presto', 'exclamation', 'a'],
['presto', 'adjective', 'a'],
['presumere', 'verb', 'b'],
['presunto', 'past_part', 'b'],
['presunto', 'adjective', 'b'],
['presupposto', 'past_part', 'b'],
['presupposto', 'adjective', 'b'],
['presupposto', 'noun', 'b'],
['prete', 'noun', 'a'],
['pretendere', 'verb', 'a'],
['pretesa', 'noun', 'b'],
['pretesto', 'noun', 'b'],
['prevalentemente', 'adverb', 'b'],
['prevalere', 'verb', 'b'],
['prevedere', 'verb', 'a'],
['prevedibile', 'adjective', 'b'],
['prevenire', 'verb', 'b'],
['preventivo', 'adjective', 'b'],
['preventivo', 'noun', 'b'],
['prevenzione', 'noun', 'b'],
['previdenza', 'noun', 'c'],
['previsione', 'noun', 'b'],
['previsto', 'past_part', 'a'],
['previsto', 'adjective', 'a'],
['previsto', 'noun', 'a'],
['prezioso', 'adjective', 'a'],
['prezioso', 'noun', 'a'],
['prezzemolo', 'noun', 'c'],
['prezzo', 'noun', 'a'],
['prigione', 'noun', 'b'],
['prigioniero', 'adjective', 'b'],
['prigioniero', 'noun', 'b'],
['prima', 'adverb', 'a'],
['prima', 'adjective', 'a'],
['prima', 'noun', 'a'],
['prima', 'noun', 'a'],
['primario', 'adjective', 'b'],
['primario', 'noun', 'b'],
['primavera', 'noun', 'a'],
['primizia', 'noun', 'c'],
['primo', 'adjective', 'a'],
['primo', 'noun', 'a'],
['primo', 'adverb', 'a'],
['primula', 'noun', 'c'],
['principale', 'adjective', 'a'],
['principale', 'noun', 'a'],
['principalmente', 'adverb', 'b'],
['principe', 'noun', 'a'],
['principe', 'adjective', 'a'],
['principessa', 'noun', 'b'],
['principio', 'noun', 'a'],
['priorità', 'noun', 'b'],
['privacy', 'noun', 'b'],
['privare', 'verb', 'b'],
['privato', 'adjective', 'a'],
['privato', 'noun', 'a'],
['privilegio', 'noun', 'b'],
['privo', 'adjective', 'b'],
['privo', 'preposition', 'b'],
['privo', 'noun', 'b'],
['probabile', 'adjective', 'b'],
['probabilità', 'noun', 'b'],
['probabilmente', 'adverb', 'a'],
['problema', 'noun', 'a'],
['problematico', 'adjective', 'b'],
['procedere', 'verb', 'a'],
['procedimento', 'noun', 'b'],
['procedura', 'noun', 'a'],
['processo', 'noun', 'a'],
['proclamare', 'verb', 'b'],
['procura', 'noun', 'b'],
['procurare', 'verb', 'b'],
['procuratore', 'noun', 'b'],
['prodotto', 'past_part', 'a'],
['prodotto', 'adjective', 'a'],
['prodotto', 'noun', 'a'],
['produrre', 'verb', 'a'],
['produttivo', 'adjective', 'b'],
['produttore', 'adjective', 'b'],
['produttore', 'noun', 'b'],
['produzione', 'noun', 'a'],
['prof', 'noun', 'b'],
['professionale', 'adjective', 'a'],
['professione', 'noun', 'b'],
['professionista', 'noun', 'b'],
['professore', 'noun', 'a'],
['professoressa', 'noun', 'b'],
['profeta', 'noun', 'b'],
['profilattico', 'adjective', 'c'],
['profilattico', 'noun', 'c'],
['profilo', 'noun', 'a'],
['profitto', 'noun', 'b'],
['profondamente', 'adverb', 'b'],
['profondità', 'noun', 'b'],
['profondo', 'adjective', 'a'],
['profondo', 'noun', 'a'],
['profondo', 'adverb', 'a'],
['profumare', 'verb', 'b'],
['profumato', 'past_part', 'c'],
['profumato', 'adjective', 'c'],
['profumo', 'noun', 'b'],
['progettare', 'verb', 'b'],
['progettazione', 'noun', 'b'],
['progetto', 'noun', 'a'],
['programma', 'noun', 'a'],
['programmare', 'verb', 'b'],
['programmazione', 'noun', 'b'],
['progressista', 'adjective', 'c'],
['progressista', 'noun', 'c'],
['progressivo', 'adjective', 'b'],
['progresso', 'noun', 'b'],
['proibire', 'verb', 'b'],
['proiettare', 'verb', 'b'],
['proiettile', 'noun', 'b'],
['proiezione', 'noun', 'b'],
['prolunga', 'noun', 'c'],
['promessa', 'noun', 'b'],
['promettere', 'verb', 'a'],
['promozione', 'noun', 'b'],
['promuovere', 'verb', 'b'],
['pronto', 'adjective', 'a'],
['pronuncia', 'noun', 'c'],
['pronunciare', 'verb', 'a'],
['propaganda', 'noun', 'b'],
['propagandare', 'verb', 'c'],
['proporre', 'verb', 'a'],
['proporzione', 'noun', 'b'],
['proposito', 'noun', 'a'],
['proposizione', 'noun', 'c'],
['proposta', 'noun', 'a'],
['proprietà', 'noun', 'a'],
['proprietario', 'adjective', 'a'],
['proprietario', 'noun', 'a'],
['proprio', 'adjective', 'a'],
['proprio', 'adverb', 'a'],
['proprio', 'noun', 'a'],
['prosa', 'noun', 'b'],
['prosciugare', 'verb', 'c'],
['prosciutto', 'noun', 'b'],
['prosecco', 'noun', 'c'],
['proseguire', 'verb', 'a'],
['prospettiva', 'noun', 'b'],
['prossimo', 'adjective', 'a'],
['prossimo', 'noun', 'a'],
['prostituta', 'noun', 'b'],
['protagonista', 'adjective', 'a'],
['protagonista', 'noun', 'a'],
['proteggere', 'verb', 'a'],
['proteina', 'noun', 'b'],
['protesta', 'noun', 'b'],
['protestare', 'verb', 'b'],
['protetto', 'past_part', 'b'],
['protetto', 'adjective', 'b'],
['protetto', 'noun', 'b'],
['protezione', 'noun', 'b'],
['protocollo', 'noun', 'b'],
['prova', 'noun', 'a'],
['provare', 'verb', 'a'],
['provenienza', 'noun', 'b'],
['provenire', 'verb', 'a'],
['provincia', 'noun', 'a'],
['provinciale', 'adjective', 'b'],
['provinciale', 'noun', 'b'],
['provocare', 'verb', 'a'],
['provola', 'noun', 'c'],
['provolone', 'noun', 'c'],
['provvedere', 'verb', 'b'],
['provvedimento', 'noun', 'b'],
['provvisorio', 'adjective', 'b'],
['prudere', 'verb', 'c'],
['prugna', 'noun', 'c'],
['prugna', 'adjective', 'c'],
['prurito', 'noun', 'c'],
['pseudonimo', 'noun', 'b'],
['pseudonimo', 'adjective', 'b'],
['psichiatra', 'noun', 'b'],
['psichiatria', 'noun', 'c'],
['psichico', 'adjective', 'b'],
['psicologia', 'noun', 'b'],
['psicologico', 'adjective', 'b'],
['psicologo', 'noun', 'b'],
['pub', 'noun', 'b'],
['pubblicare', 'verb', 'a'],
['pubblicazione', 'noun', 'b'],
['pubblicità', 'noun', 'a'],
['pubblicitario', 'adjective', 'b'],
['pubblicitario', 'noun', 'b'],
['pubblico', 'adjective', 'a'],
['pubblico', 'noun', 'a'],
['pugilato', 'noun', 'c'],
['pugliese', 'adjective', 'c'],
['pugliese', 'noun', 'c'],
['pugno', 'noun', 'a'],
['pulce', 'noun', 'c'],
['pulce', 'adjective', 'c'],
['pulcino', 'noun', 'c'],
['puledro', 'noun', 'c'],
['pulire', 'verb', 'a'],
['pulito', 'past_part', 'b'],
['pulito', 'adjective', 'b'],
['pulito', 'noun', 'b'],
['pulizia', 'noun', 'b'],
['pullman', 'noun', 'b'],
['pullover', 'noun', 'c'],
['pulmino', 'noun', 'c'],
['pulsante', 'pres_part', 'b'],
['pulsante', 'adjective', 'b'],
['pulsante', 'noun', 'b'],
['puma', 'noun', 'c'],
['pungere', 'verb', 'c'],
['punire', 'verb', 'b'],
['punizione', 'noun', 'b'],
['punk', 'adjective', 'c'],
['punk', 'noun', 'c'],
['punta', 'noun', 'a'],
['puntare', 'verb', 'a'],
['puntata', 'noun', 'b'],
['puntato', 'past_part', 'b'],
['puntato', 'adjective', 'b'],
['punteggio', 'noun', 'c'],
['puntiglio', 'noun', 'c'],
['puntino', 'noun', 'b'],
['punto', 'noun', 'a'],
['puntuale', 'adjective', 'b'],
['puntura', 'noun', 'c'],
['pupa', 'noun', 'b'],
['pupazzo', 'noun', 'c'],
['pupo', 'noun', 'c'],
['purché', 'conjunction', 'b'],
['pure', 'adverb', 'a'],
['pure', 'conjunction', 'a'],
['purè', 'noun', 'c'],
['purga', 'noun', 'c'],
['puro', 'adjective', 'a'],
['puro', 'noun', 'a'],
['purtroppo', 'adverb', 'a'],
['puttana', 'noun', 'b'],
['puzza', 'noun', 'b'],
['puzzare', 'verb', 'b'],
['puzzle', 'noun', 'c'],
['qua', 'adverb', 'a'],
['quaderno', 'noun', 'b'],
['quadrato', 'past_part', 'b'],
['quadrato', 'adjective', 'b'],
['quadrato', 'noun', 'b'],
['quadrifoglio', 'noun', 'c'],
['quadro', 'adjective', 'a'],
['quadro', 'noun', 'a'],
['quaglia', 'noun', 'c'],
['qualche', 'adjective', 'a'],
['qualche', 'adverb', 'a'],
['qualcosa', 'pronoun', 'a'],
['qualcuno', 'pronoun', 'a'],
['qualcuno', 'adjective', 'a'],
['qualcuno', 'noun', 'a'],
['quale', 'adjective', 'a'],
['quale', 'pronoun', 'a'],
['quale', 'adverb', 'a'],
['quale', 'noun', 'a'],
['qualificare', 'verb', 'b'],
['qualità', 'noun', 'a'],
['qualora', 'conjunction', 'b'],
['qualsiasi', 'adjective', 'a'],
['qualunque', 'adjective', 'a'],
['qualunque', 'pronoun', 'a'],
['quando', 'conjunction', 'a'],
['quando', 'adverb', 'a'],
['quando', 'noun', 'a'],
['quantità', 'noun', 'a'],
['quantitativo', 'adjective', 'b'],
['quantitativo', 'noun', 'b'],
['quanto', 'adjective', 'a'],
['quanto', 'pronoun', 'a'],
['quanto', 'adverb', 'a'],
['quanto', 'noun', 'a'],
['quaranta', 'adjective', 'a'],
['quaranta', 'noun', 'a'],
['quarta', 'noun', 'b'],
['quartiere', 'noun', 'a'],
['quarto', 'adjective', 'a'],
['quarto', 'noun', 'a'],
['quasi', 'adverb', 'a'],
['quasi', 'conjunction', 'a'],
['quattordici', 'adjective', 'b'],
['quattordici', 'noun', 'b'],
['quattro', 'adjective', 'a'],
['quattro', 'noun', 'a'],
['quello', 'adjective', 'a'],
['quello', 'pronoun', 'a'],
['quercia', 'noun', 'c'],
['questione', 'noun', 'a'],
['questo', 'adjective', 'a'],
['questo', 'pronoun', 'a'],
['questura', 'noun', 'b'],
['qui', 'adverb', 'a'],
['quindi', 'adverb', 'a'],
['quindi', 'conjunction', 'a'],
['quindici', 'adjective', 'a'],
['quindici', 'noun', 'a'],
['quinta', 'noun', 'b'],
['quinto', 'adjective', 'b'],
['quinto', 'noun', 'b'],
['quiz', 'noun', 'a'],
['quota', 'noun', 'a'],
['quotidiano', 'adjective', 'a'],
['quotidiano', 'noun', 'a'],
['rabbia', 'noun', 'a'],
['racchetta', 'noun', 'c'],
['racchiudere', 'verb', 'b'],
['raccogliere', 'verb', 'a'],
['raccolta', 'noun', 'a'],
['raccomandare', 'verb', 'b'],
['raccomandazione', 'noun', 'c'],
['raccontare', 'verb', 'a'],
['racconto', 'noun', 'a'],
['raddoppiare', 'verb', 'b'],
['raddrizzare', 'verb', 'c'],
['radere', 'verb', 'c'],
['radiazione', 'noun', 'b'],
['radicale', 'adjective', 'b'],
['radicale', 'noun', 'b'],
['radicchio', 'noun', 'c'],
['radice', 'noun', 'a'],
['radio', 'noun', 'a'],
['radio', 'adjective', 'a'],
['rado', 'adjective', 'b'],
['rado', 'adverb', 'b'],
['raffigurare', 'verb', 'b'],
['raffinato', 'past_part', 'b'],
['raffinato', 'adjective', 'b'],
['raffinato', 'noun', 'b'],
['rafforzamento', 'noun', 'c'],
['rafforzare', 'verb', 'b'],
['raffreddore', 'noun', 'c'],
['ragazza', 'noun', 'a'],
['ragazzino', 'noun', 'a'],
['ragazzo', 'noun', 'a'],
['raggio', 'noun', 'a'],
['raggiungere', 'verb', 'a'],
['ragionamento', 'noun', 'b'],
['ragionare', 'verb', 'b'],
['ragione', 'noun', 'a'],
['ragionevole', 'adjective', 'b'],
['ragioniere', 'noun', 'b'],
['ragnatela', 'noun', 'c'],
['ragno', 'noun', 'c'],
['ragù', 'noun', 'c'],
['rallegrare', 'verb', 'c'],
['rallentare', 'verb', 'b'],
['rame', 'noun', 'b'],
['rammendo', 'noun', 'c'],
['ramo', 'noun', 'b'],
['rampicante', 'pres_part', 'c'],
['rampicante', 'adjective', 'c'],
['rampicante', 'noun', 'c'],
['rana', 'noun', 'c'],
['rancio', 'noun', 'c'],
['rapa', 'noun', 'c'],
['rapidamente', 'adverb', 'b'],
['rapido', 'adjective', 'a'],
['rapido', 'noun', 'a'],
['rapimento', 'noun', 'c'],
['rapina', 'noun', 'b'],
['rapinatore', 'adjective', 'c'],
['rapinatore', 'noun', 'c'],
['rapire', 'verb', 'b'],
['rapporto', 'noun', 'a'],
['rappresentante', 'pres_part', 'b'],
['rappresentante', 'adjective', 'b'],
['rappresentante', 'noun', 'b'],
['rappresentanza', 'noun', 'b'],
['rappresentare', 'verb', 'a'],
['rappresentazione', 'noun', 'b'],
['raramente', 'adverb', 'b'],
['raro', 'adjective', 'a'],
['raro', 'noun', 'a'],
['raro', 'adverb', 'a'],
['rasare', 'verb', 'c'],
['rasoio', 'noun', 'c'],
['rassegna', 'noun', 'b'],
['rassegnare', 'verb', 'b'],
['rassegnazione', 'noun', 'c'],
['rasserenare', 'verb', 'c'],
['rassicurare', 'verb', 'b'],
['rastrello', 'noun', 'c'],
['rata', 'noun', 'c'],
['rateale', 'adjective', 'c'],
['rattristare', 'verb', 'c'],
['rauco', 'adjective', 'c'],
['ravanello', 'noun', 'c'],
['razionale', 'adjective', 'b'],
['razionale', 'noun', 'b'],
['razza', 'noun', 'b'],
['razzo', 'noun', 'c'],
['re', 'noun', 'a'],
['reagire', 'verb', 'a'],
['reale', 'adjective', 'a'],
['reale', 'noun', 'a'],
['realistico', 'adjective', 'b'],
['realizzare', 'verb', 'a'],
['realizzazione', 'noun', 'b'],
['realmente', 'adverb', 'b'],
['realtà', 'noun', 'a'],
['reato', 'noun', 'a'],
['reazione', 'noun', 'a'],
['recare', 'verb', 'a'],
['recensione', 'noun', 'b'],
['recente', 'adjective', 'a'],
['recentemente', 'adverb', 'b'],
['recintare', 'verb', 'c'],
['recinto', 'past_part', 'c'],
['recinto', 'adjective', 'c'],
['recinto', 'noun', 'c'],
['recipiente', 'adjective', 'c'],
['recipiente', 'noun', 'c'],
['reciproco', 'adjective', 'b'],
['reciproco', 'noun', 'b'],
['recita', 'noun', 'c'],
['recitare', 'verb', 'a'],
['reclame', 'noun', 'c'],
['reclame', 'adjective', 'c'],
['reclamo', 'noun', 'c'],
['recluta', 'noun', 'c'],
['record', 'noun', 'b'],
['recuperare', 'verb', 'a'],
['recupero', 'noun', 'b'],
['redazione', 'noun', 'b'],
['reddito', 'noun', 'b'],
['redigere', 'verb', 'b'],
['referendum', 'noun', 'b'],
['regalare', 'verb', 'a'],
['regale', 'adjective', 'b'],
['regalo', 'noun', 'a'],
['reggere', 'verb', 'a'],
['reggimento', 'noun', 'c'],
['reggiseno', 'noun', 'b'],
['regia', 'noun', 'b'],
['regime', 'noun', 'a'],
['regina', 'noun', 'a'],
['regionale', 'adjective', 'b'],
['regionale', 'noun', 'b'],
['regione', 'noun', 'a'],
['regista', 'noun', 'a'],
['registrare', 'verb', 'a'],
['registratore', 'adjective', 'c'],
['registratore', 'noun', 'c'],
['registrazione', 'noun', 'a'],
['registro', 'noun', 'b'],
['regnare', 'verb', 'b'],
['regno', 'noun', 'a'],
['regola', 'noun', 'a'],
['regolamento', 'noun', 'b'],
['regolare', 'adjective', 'b'],
['regolare', 'noun', 'b'],
['regolare', 'verb', 'b'],
['regolarmente', 'adverb', 'b'],
['relativamente', 'adverb', 'b'],
['relativo', 'adjective', 'a'],
['relazione', 'noun', 'a'],
['religione', 'noun', 'a'],
['religioso', 'adjective', 'a'],
['religioso', 'noun', 'a'],
['remare', 'verb', 'c'],
['remo', 'noun', 'c'],
['remoto', 'adjective', 'b'],
['rendere', 'verb', 'a'],
['rene', 'noun', 'b'],
['reparto', 'noun', 'b'],
['repertorio', 'noun', 'b'],
['replica', 'noun', 'b'],
['replicare', 'verb', 'b'],
['repressione', 'noun', 'c'],
['reprimere', 'verb', 'c'],
['repubblica', 'noun', 'a'],
['repubblicano', 'adjective', 'b'],
['repubblicano', 'noun', 'b'],
['requisito', 'noun', 'b'],
['resa', 'noun', 'b'],
['residente', 'adjective', 'b'],
['residente', 'noun', 'b'],
['residenza', 'noun', 'b'],
['residuo', 'adjective', 'b'],
['residuo', 'noun', 'b'],
['resistente', 'pres_part', 'b'],
['resistente', 'adjective', 'b'],
['resistente', 'noun', 'b'],
['resistenza', 'noun', 'b'],
['resistere', 'verb', 'a'],
['resoconto', 'noun', 'c'],
['respingere', 'verb', 'b'],
['respirare', 'verb', 'a'],
['respirazione', 'noun', 'c'],
['respiro', 'noun', 'b'],
['responsabile', 'adjective', 'a'],
['responsabile', 'noun', 'a'],
['responsabilità', 'noun', 'a'],
['restare', 'verb', 'a'],
['restituire', 'verb', 'b'],
['resto', 'noun', 'a'],
['restringere', 'verb', 'b'],
['rete', 'noun', 'a'],
['retorica', 'noun', 'b'],
['retro', 'adverb', 'b'],
['retro', 'noun', 'b'],
['retta', 'noun', 'b'],
['rettangolare', 'adjective', 'c'],
['rettile', 'noun', 'c'],
['rettile', 'adjective', 'c'],
['retto', 'adjective', 'b'],
['retto', 'noun', 'b'],
['revisione', 'noun', 'b'],
['rialzare', 'verb', 'b'],
['riaprire', 'verb', 'b'],
['riassumere', 'verb', 'b'],
['ribadire', 'verb', 'b'],
['ribattere', 'verb', 'b'],
['ribellare', 'verb', 'b'],
['ribelle', 'adjective', 'b'],
['ribelle', 'noun', 'b'],
['ricadere', 'verb', 'b'],
['ricaduta', 'noun', 'c'],
['ricalcare', 'verb', 'c'],
['ricamare', 'verb', 'c'],
['ricambiare', 'verb', 'b'],
['ricambio', 'noun', 'c'],
['ricamo', 'noun', 'c'],
['ricarica', 'noun', 'c'],
['ricavare', 'verb', 'b'],
['ricchezza', 'noun', 'b'],
['riccio', 'adjective', 'c'],
['riccio', 'noun', 'c'],
['ricciolo', 'adjective', 'c'],
['ricciolo', 'noun', 'c'],
['ricco', 'adjective', 'a'],
['ricerca', 'noun', 'a'],
['ricercare', 'verb', 'b'],
['ricercatore', 'adjective', 'b'],
['ricercatore', 'noun', 'b'],
['ricetta', 'noun', 'a'],
['ricevere', 'verb', 'a'],
['ricevimento', 'noun', 'c'],
['ricevuta', 'noun', 'b'],
['richiamare', 'verb', 'a'],
['richiamo', 'noun', 'b'],
['richiedere', 'verb', 'a'],
['richiesta', 'noun', 'a'],
['richiudere', 'verb', 'b'],
['ricominciare', 'verb', 'a'],
['ricompensa', 'noun', 'c'],
['ricompensare', 'verb', 'c'],
['riconciliarsi', 'verb', 'c'],
['riconoscere', 'verb', 'a'],
['riconoscimento', 'noun', 'b'],
['ricopiare', 'verb', 'c'],
['ricoprire', 'verb', 'b'],
['ricordare', 'verb', 'a'],
['ricordo', 'noun', 'a'],
['ricorrere', 'verb', 'b'],
['ricorso', 'noun', 'b'],
['ricostruire', 'verb', 'b'],
['ricostruzione', 'noun', 'b'],
['ricotta', 'noun', 'c'],
['ricoverare', 'verb', 'b'],
['ricovero', 'noun', 'c'],
['ricreazione', 'noun', 'c'],
['ridare', 'verb', 'b'],
['ridere', 'verb', 'a'],
['ridere', 'noun', 'a'],
['ridicolo', 'adjective', 'b'],
['ridicolo', 'noun', 'b'],
['ridotto', 'past_part', 'b'],
['ridotto', 'adjective', 'b'],
['ridotto', 'noun', 'b'],
['ridurre', 'verb', 'a'],
['riduzione', 'noun', 'b'],
['riempire', 'verb', 'a'],
['rientrare', 'verb', 'a'],
['rientro', 'noun', 'b'],
['rifare', 'verb', 'a'],
['riferimento', 'noun', 'a'],
['riferire', 'verb', 'a'],
['rifinire', 'verb', 'c'],
['rifiutare', 'verb', 'a'],
['rifiuto', 'noun', 'a'],
['riflessione', 'noun', 'a'],
['riflesso', 'noun', 'b'],
['riflettere', 'verb', 'a'],
['riflettore', 'noun', 'c'],
['riflettore', 'adjective', 'c'],
['riforma', 'noun', 'b'],
['rifornimento', 'noun', 'c'],
['rifugiare', 'verb', 'b'],
['rifugio', 'noun', 'b'],
['riga', 'noun', 'a'],
['rigattiere', 'noun', 'c'],
['rigido', 'adjective', 'b'],
['rigore', 'noun', 'b'],
['rigoroso', 'adjective', 'b'],
['rigovernare', 'verb', 'c'],
['riguardare', 'verb', 'a'],
['riguardo', 'noun', 'a'],
['rilasciare', 'verb', 'b'],
['rilassare', 'verb', 'a'],
['rilegare', 'verb', 'c'],
['rileggere', 'verb', 'b'],
['rilevante', 'pres_part', 'b'],
['rilevante', 'adjective', 'b'],
['rilevare', 'verb', 'b'],
['rilievo', 'noun', 'b'],
['rima', 'noun', 'b'],
['rimandare', 'verb', 'b'],
['rimanenza', 'noun', 'c'],
['rimanere', 'verb', 'a'],
['rimbombare', 'verb', 'c'],
['rimborsare', 'verb', 'c'],
['rimediare', 'verb', 'b'],
['rimedio', 'noun', 'b'],
['rimettere', 'verb', 'a'],
['rimodernare', 'verb', 'c'],
['rimorchio', 'noun', 'c'],
['rimpiangere', 'verb', 'b'],
['rimproverare', 'verb', 'b'],
['rimprovero', 'noun', 'c'],
['rimuovere', 'verb', 'b'],
['rinascere', 'verb', 'b'],
['rinascimento', 'noun', 'b'],
['rinascimento', 'adjective', 'b'],
['rincarare', 'verb', 'c'],
['rinchiudere', 'verb', 'b'],
['rincorsa', 'noun', 'c'],
['rinforzo', 'noun', 'c'],
['rinfresco', 'noun', 'c'],
['ringhiare', 'verb', 'c'],
['ringhiera', 'noun', 'c'],
['ringhio', 'noun', 'c'],
['ringiovanire', 'verb', 'c'],
['ringraziare', 'verb', 'a'],
['rinnegare', 'verb', 'c'],
['rinnovare', 'verb', 'b'],
['rinoceronte', 'noun', 'c'],
['rintracciare', 'verb', 'b'],
['rinuncia', 'noun', 'c'],
['rinunciare', 'verb', 'a'],
['rinvenire', 'verb', 'b'],
['rinviare', 'verb', 'b'],
['rinvio', 'noun', 'c'],
['rione', 'noun', 'c'],
['riordinare', 'verb', 'c'],
['riparare', 'verb', 'b'],
['riparo', 'noun', 'b'],
['ripartire', 'verb', 'b'],
['ripartire', 'verb', 'b'],
['ripensamento', 'noun', 'c'],
['ripensare', 'verb', 'b'],
['ripetente', 'pres_part', 'c'],
['ripetente', 'adjective', 'c'],
['ripetente', 'noun', 'c'],
['ripetere', 'verb', 'a'],
['ripetizione', 'noun', 'b'],
['ripido', 'adjective', 'c'],
['ripiego', 'noun', 'c'],
['ripieno', 'adjective', 'c'],
['ripieno', 'noun', 'c'],
['riportare', 'verb', 'a'],
['riposare', 'verb', 'b'],
['riposo', 'noun', 'b'],
['riposo', 'loc-comando', 'b'],
['riposo', 'noun', 'b'],
['riprendere', 'verb', 'a'],
['ripresa', 'noun', 'b'],
['riprodurre', 'verb', 'b'],
['riproduzione', 'noun', 'a'],
['riproporre', 'verb', 'b'],
['riprovare', 'verb', 'b'],
['ripulire', 'verb', 'b'],
['risaia', 'noun', 'c'],
['risalire', 'verb', 'a'],
['risarcimento', 'noun', 'b'],
['risata', 'noun', 'b'],
['riscaldamento', 'noun', 'b'],
['riscaldare', 'verb', 'b'],
['riscattare', 'verb', 'c'],
['riscatto', 'noun', 'c'],
['rischiare', 'verb', 'a'],
['rischio', 'noun', 'a'],
['rischioso', 'adjective', 'b'],
['risciacquare', 'verb', 'c'],
['riscontrare', 'verb', 'b'],
['riscontro', 'noun', 'b'],
['riscuotere', 'verb', 'b'],
['risentimento', 'noun', 'c'],
['risentire', 'verb', 'b'],
['riserva', 'noun', 'b'],
['riservare', 'verb', 'a'],
['riservato', 'past_part', 'a'],
['riservato', 'adjective', 'a'],
['risiedere', 'verb', 'b'],
['riso', 'noun', 'b'],
['risoluzione', 'noun', 'b'],
['risolvere', 'verb', 'a'],
['risonanza', 'noun', 'b'],
['risorsa', 'noun', 'a'],
['risparmiare', 'verb', 'b'],
['risparmio', 'noun', 'b'],
['rispettare', 'verb', 'a'],
['rispettivamente', 'adverb', 'b'],
['rispettivo', 'adjective', 'b'],
['rispetto', 'noun', 'a'],
['risplendere', 'verb', 'c'],
['rispondere', 'verb', 'a'],
['risposta', 'noun', 'a'],
['rissa', 'noun', 'b'],
['ristampare', 'verb', 'c'],
['ristorante', 'noun', 'a'],
['ristretto', 'past_part', 'b'],
['ristretto', 'adjective', 'b'],
['ristretto', 'noun', 'b'],
['risultare', 'verb', 'a'],
['risultato', 'past_part', 'a'],
['risultato', 'adjective', 'a'],
['risultato', 'noun', 'a'],
['risvegliare', 'verb', 'b'],
['risveglio', 'noun', 'b'],
['ritagliare', 'verb', 'b'],
['ritardare', 'verb', 'b'],
['ritardo', 'noun', 'a'],
['ritenere', 'verb', 'a'],
['ritirare', 'verb', 'a'],
['ritirata', 'noun', 'c'],
['ritiro', 'noun', 'b'],
['ritmo', 'noun', 'a'],
['rito', 'noun', 'b'],
['ritoccare', 'verb', 'c'],
['ritornare', 'verb', 'a'],
['ritornello', 'noun', 'c'],
['ritorno', 'noun', 'a'],
['ritrarre', 'verb', 'b'],
['ritratto', 'past_part', 'b'],
['ritratto', 'adjective', 'b'],
['ritratto', 'noun', 'b'],
['ritrovare', 'verb', 'a'],
['ritrovo', 'noun', 'c'],
['ritto', 'adjective', 'c'],
['ritto', 'noun', 'c'],
['ritto', 'adverb', 'c'],
['ritto', 'preposition', 'c'],
['rituale', 'adjective', 'b'],
['rituale', 'noun', 'b'],
['riunione', 'noun', 'a'],
['riunire', 'verb', 'a'],
['riunito', 'past_part', 'c'],
['riunito', 'adjective', 'c'],
['riunito', 'noun', 'c'],
['riuscire', 'verb', 'a'],
['riuscita', 'noun', 'c'],
['riva', 'noun', 'b'],
['rivale', 'adjective', 'b'],
['rivale', 'noun', 'b'],
['rivedere', 'verb', 'a'],
['rivelare', 'verb', 'a'],
['rivelazione', 'noun', 'b'],
['rivendicare', 'verb', 'b'],
['rivendita', 'noun', 'c'],
['rivestimento', 'noun', 'c'],
['rivestire', 'verb', 'b'],
['rivincita', 'noun', 'c'],
['rivista', 'noun', 'a'],
['rivisto', 'past_part', 'b'],
['rivisto', 'adjective', 'b'],
['rivolgere', 'verb', 'a'],
['rivolta', 'noun', 'b'],
['rivoltare', 'verb', 'c'],
['rivoluzionario', 'adjective', 'b'],
['rivoluzionario', 'noun', 'b'],
['rivoluzione', 'noun', 'a'],
['roba', 'noun', 'a'],
['robot', 'noun', 'b'],
['robusto', 'adjective', 'b'],
['rocca', 'noun', 'c'],
['rocchetto', 'noun', 'c'],
['roccia', 'noun', 'b'],
['roccioso', 'adjective', 'c'],
['rock', 'noun', 'b'],
['rock', 'adjective', 'b'],
['rodaggio', 'noun', 'c'],
['rodere', 'verb', 'c'],
['romagnolo', 'adjective', 'c'],
['romagnolo', 'noun', 'c'],
['romano', 'adjective', 'a'],
['romano', 'noun', 'a'],
['romantico', 'adjective', 'b'],
['romantico', 'noun', 'b'],
['romanzo', 'noun', 'a'],
['rombo', 'noun', 'c'],
['romeno', 'adjective', 'c'],
['romeno', 'noun', 'c'],
['rompere', 'verb', 'a'],
['rondine', 'noun', 'c'],
['ronzare', 'verb', 'c'],
['ronzio', 'noun', 'c'],
['rosa', 'noun', 'a'],
['rosa', 'adjective', 'a'],
['rosario', 'noun', 'c'],
['rosato', 'adjective', 'c'],
['rosato', 'noun', 'c'],
['roseo', 'adjective', 'c'],
['roseo', 'noun', 'c'],
['rosetta', 'noun', 'c'],
['rosmarino', 'noun', 'c'],
['rosolia', 'noun', 'c'],
['rosso', 'adjective', 'a'],
['rosso', 'noun', 'a'],
['rossore', 'noun', 'c'],
['rosticceria', 'noun', 'c'],
['rotaia', 'noun', 'c'],
['rotella', 'noun', 'c'],
['rotolare', 'verb', 'c'],
['rotondo', 'adjective', 'b'],
['rotondo', 'noun', 'b'],
['rotta', 'noun', 'b'],
['rotto', 'past_part', 'b'],
['rotto', 'adjective', 'b'],
['rotto', 'noun', 'b'],
['rottura', 'noun', 'b'],
['roulotte', 'noun', 'c'],
['rovesciare', 'verb', 'b'],
['rovescio', 'adjective', 'b'],
['rovescio', 'noun', 'b'],
['rovina', 'noun', 'b'],
['rovinare', 'verb', 'a'],
['rovo', 'noun', 'c'],
['rozzo', 'adjective', 'c'],
['rubare', 'verb', 'a'],
['rubinetto', 'noun', 'c'],
['rubrica', 'noun', 'b'],
['rude', 'adjective', 'c'],
['ruga', 'noun', 'c'],
['ruggine', 'noun', 'c'],
['ruggine', 'adjective', 'c'],
['ruggire', 'verb', 'c'],
['ruggito', 'past_part', 'c'],
['ruggito', 'noun', 'c'],
['rullo', 'noun', 'c'],
['rumeno', 'adjective', 'c'],
['rumeno', 'noun', 'c'],
['ruminante', 'pres_part', 'c'],
['ruminante', 'adjective', 'c'],
['ruminante', 'noun', 'c'],
['rumore', 'noun', 'a'],
['ruolo', 'noun', 'a'],
['ruota', 'noun', 'b'],
['ruotare', 'verb', 'b'],
['ruscello', 'noun', 'c'],
['ruspa', 'noun', 'c'],
['russare', 'verb', 'c'],
['russo', 'adjective', 'a'],
['russo', 'noun', 'a'],
['rustico', 'adjective', 'c'],
['rustico', 'noun', 'c'],
['ruttare', 'verb', 'c'],
['rutto', 'noun', 'c'],
['sabato', 'noun', 'a'],
['sabbia', 'noun', 'b'],
['sabbia', 'adjective', 'b'],
['sabotare', 'verb', 'c'],
['saccheggiare', 'verb', 'c'],
['sacchetto', 'noun', 'b'],
['sacco', 'noun', 'a'],
['sacerdote', 'noun', 'b'],
['sacrificare', 'verb', 'b'],
['sacrificio', 'noun', 'b'],
['sacro', 'adjective', 'b'],
['sacro', 'noun', 'b'],
['safari', 'noun', 'c'],
['saga', 'noun', 'b'],
['saggezza', 'noun', 'b'],
['saggio', 'adjective', 'b'],
['saggio', 'noun', 'b'],
['saggio', 'noun', 'b'],
['sagra', 'noun', 'c'],
['sagrestano', 'noun', 'c'],
['sagrestano', 'adjective', 'c'],
['sala', 'noun', 'a'],
['salame', 'noun', 'c'],
['salare', 'verb', 'c'],
['salario', 'adjective', 'b'],
['salario', 'noun', 'b'],
['salatino', 'noun', 'c'],
['salato', 'past_part', 'b'],
['salato', 'adjective', 'b'],
['salato', 'noun', 'b'],
['saldatura', 'noun', 'c'],
['sale', 'noun', 'b'],
['salice', 'noun', 'c'],
['saliera', 'noun', 'c'],
['salire', 'verb', 'a'],
['salita', 'noun', 'b'],
['saliva', 'noun', 'c'],
['salmone', 'noun', 'c'],
['salmone', 'adjective', 'c'],
['salone', 'noun', 'b'],
['salotto', 'noun', 'b'],
['salsa', 'noun', 'b'],
['salsiccia', 'noun', 'c'],
['saltare', 'verb', 'a'],
['saltellare', 'verb', 'c'],
['salto', 'noun', 'b'],
['salume', 'noun', 'c'],
['salutare', 'verb', 'a'],
['salutare', 'noun', 'a'],
['salute', 'noun', 'a'],
['salute', 'exclamation', 'a'],
['saluto', 'noun', 'a'],
['salvadanaio', 'noun', 'c'],
['salvagente', 'noun', 'c'],
['salvare', 'verb', 'a'],
['salvaslip', 'noun', 'c'],
['salvatore', 'adjective', 'b'],
['salvatore', 'noun', 'b'],
['salve', 'exclamation', 'b'],
['salvezza', 'noun', 'b'],
['salvia', 'noun', 'c'],
['salvietta', 'noun', 'c'],
['salvo', 'adjective', 'a'],
['salvo', 'preposition', 'a'],
['sandalo', 'noun', 'c'],
['sangue', 'noun', 'a'],
['sangue', 'adjective', 'a'],
['sanguinare', 'verb', 'c'],
['sanguisuga', 'noun', 'c'],
['sanità', 'noun', 'b'],
['sanitaria', 'noun', 'c'],
['sanitario', 'adjective', 'b'],
['sanitario', 'noun', 'b'],
['sano', 'adjective', 'a'],
['santo', 'adjective', 'a'],
['santo', 'noun', 'a'],
['sanzione', 'noun', 'b'],
['sapere', 'verb', 'a'],
['sapere', 'noun', 'b'],
['sapiente', 'adjective', 'c'],
['sapiente', 'noun', 'c'],
['sapone', 'noun', 'b'],
['saponetta', 'noun', 'c'],
['sapore', 'noun', 'b'],
['saporito', 'past_part', 'c'],
['saporito', 'adjective', 'c'],
['sardina', 'noun', 'c'],
['sardo', 'adjective', 'b'],
['sardo', 'noun', 'b'],
['sarto', 'noun', 'c'],
['sasso', 'noun', 'b'],
['satellite', 'noun', 'b'],
['sazio', 'past_part', 'c'],
['sazio', 'adjective', 'c'],
['sbadato', 'adjective', 'c'],
['sbadato', 'noun', 'c'],
['sbadigliare', 'verb', 'c'],
['sbadiglio', 'noun', 'c'],
['sbagliare', 'verb', 'a'],
['sbagliato', 'past_part', 'a'],
['sbagliato', 'adjective', 'a'],
['sbaglio', 'noun', 'b'],
['sbarbare', 'verb', 'c'],
['sbarcare', 'verb', 'b'],
['sbarra', 'noun', 'c'],
['sbarramento', 'noun', 'c'],
['sbattere', 'verb', 'a'],
['sberla', 'noun', 'c'],
['sbiadire', 'verb', 'c'],
['sbiancare', 'verb', 'c'],
['sbigottire', 'verb', 'c'],
['sbloccare', 'verb', 'c'],
['sboccare', 'verb', 'c'],
['sbocciare', 'verb', 'c'],
['sbocco', 'noun', 'c'],
['sbornia', 'noun', 'c'],
['sbottonare', 'verb', 'c'],
['sbriciolare', 'verb', 'c'],
['sbrigare', 'verb', 'b'],
['sbronza', 'noun', 'c'],
['sbronzo', 'adjective', 'c'],
['sbucciare', 'verb', 'c'],
['sbuffare', 'verb', 'c'],
['scacchiera', 'noun', 'c'],
['scadenza', 'noun', 'b'],
['scadere', 'verb', 'b'],
['scaffale', 'noun', 'b'],
['scafo', 'noun', 'c'],
['scala', 'noun', 'a'],
['scalare', 'verb', 'b'],
['scalata', 'noun', 'c'],
['scaldabagno', 'noun', 'c'],
['scaldare', 'verb', 'b'],
['scalinata', 'noun', 'c'],
['scalino', 'noun', 'c'],
['scalpello', 'noun', 'c'],
['scalzo', 'adjective', 'c'],
['scambiare', 'verb', 'a'],
['scambio', 'noun', 'a'],
['scamorza', 'noun', 'c'],
['scampagnata', 'noun', 'c'],
['scampo', 'noun', 'c'],
['scandalizzare', 'verb', 'c'],
['scandalo', 'noun', 'b'],
['scandire', 'verb', 'b'],
['scansare', 'verb', 'c'],
['scapito', 'noun', 'c'],
['scappamento', 'noun', 'c'],
['scappare', 'verb', 'a'],
['scappatoia', 'noun', 'c'],
['scarabocchiare', 'verb', 'c'],
['scarabocchio', 'noun', 'c'],
['scarafaggio', 'noun', 'c'],
['scarcerare', 'verb', 'c'],
['scaricare', 'verb', 'a'],
['scaricatore', 'noun', 'c'],
['scarico', 'noun', 'b'],
['scarlattina', 'noun', 'c'],
['scarpa', 'noun', 'a'],
['scarpiera', 'noun', 'c'],
['scarpone', 'noun', 'c'],
['scarso', 'adjective', 'b'],
['scartare', 'verb', 'b'],
['scatenare', 'verb', 'b'],
['scatola', 'noun', 'a'],
['scattare', 'verb', 'a'],
['scatto', 'noun', 'b'],
['scavalcare', 'verb', 'c'],
['scavare', 'verb', 'b'],
['scavo', 'noun', 'c'],
['scegliere', 'verb', 'a'],
['scelta', 'noun', 'a'],
['scemo', 'past_part', 'b'],
['scemo', 'adjective', 'b'],
['scemo', 'noun', 'b'],
['scena', 'noun', 'a'],
['scenario', 'noun', 'b'],
['scendere', 'verb', 'a'],
['sceneggiatura', 'noun', 'b'],
['sceriffo', 'noun', 'c'],
['scheda', 'noun', 'b'],
['schedario', 'noun', 'c'],
['scheggia', 'noun', 'c'],
['scheletro', 'noun', 'c'],
['schema', 'noun', 'b'],
['schermo', 'noun', 'a'],
['scherzare', 'verb', 'a'],
['scherzo', 'noun', 'b'],
['scherzoso', 'adjective', 'c'],
['schiacciare', 'verb', 'b'],
['schiacciato', 'past_part', 'c'],
['schiacciato', 'adjective', 'c'],
['schiaffo', 'noun', 'b'],
['schiavo', 'adjective', 'b'],
['schiavo', 'noun', 'b'],
['schiena', 'noun', 'a'],
['schierare', 'verb', 'b'],
['schietto', 'adjective', 'c'],
['schifo', 'noun', 'a'],
['schifo', 'adjective', 'a'],
['schiuma', 'noun', 'c'],
['schizzare', 'verb', 'b'],
['schizzo', 'noun', 'b'],
['sci', 'noun', 'b'],
['scia', 'noun', 'b'],
['sciacquare', 'verb', 'c'],
['scialle', 'noun', 'c'],
['sciame', 'noun', 'c'],
['sciare', 'verb', 'c'],
['sciarpa', 'noun', 'c'],
['sciatore', 'noun', 'c'],
['scientifico', 'adjective', 'a'],
['scientifico', 'noun', 'a'],
['scienza', 'noun', 'a'],
['scienziato', 'noun', 'b'],
['scienziato', 'adjective', 'b'],
['scimmia', 'noun', 'b'],
['scintilla', 'noun', 'b'],
['sciocchezza', 'noun', 'b'],
['sciocco', 'adjective', 'b'],
['sciocco', 'noun', 'b'],
['sciogliere', 'verb', 'b'],
['scioperare', 'verb', 'c'],
['sciopero', 'noun', 'b'],
['scirocco', 'noun', 'c'],
['sciroppo', 'noun', 'c'],
['scivolare', 'verb', 'b'],
['scivolata', 'noun', 'c'],
['scivolo', 'noun', 'c'],
['scocciare', 'verb', 'c'],
['scodella', 'noun', 'c'],
['scodinzolare', 'verb', 'c'],
['scoglio', 'noun', 'c'],
['scoiattolo', 'noun', 'c'],
['scolapiatti', 'noun', 'c'],
['scolaro', 'noun', 'c'],
['scolastico', 'adjective', 'b'],
['scolastico', 'noun', 'b'],
['scolpire', 'verb', 'c'],
['scommessa', 'noun', 'b'],
['scommettere', 'verb', 'b'],
['scomodo', 'adjective', 'c'],
['scomparire', 'verb', 'a'],
['scomparsa', 'noun', 'b'],
['scompartimento', 'noun', 'c'],
['sconfiggere', 'verb', 'b'],
['sconfitta', 'noun', 'b'],
['scongelare', 'verb', 'c'],
['sconosciuto', 'past_part', 'a'],
['sconosciuto', 'adjective', 'a'],
['sconsigliare', 'verb', 'c'],
['scontato', 'past_part', 'b'],
['scontato', 'adjective', 'b'],
['scontento', 'adjective', 'c'],
['sconto', 'noun', 'b'],
['scontrare', 'verb', 'b'],
['scontro', 'noun', 'b'],
['sconvolgere', 'verb', 'b'],
['scopa', 'noun', 'c'],
['scopare', 'verb', 'b'],
['scoperta', 'noun', 'a'],
['scopo', 'noun', 'a'],
['scoppiare', 'verb', 'a'],
['scoprire', 'verb', 'a'],
['scordare', 'verb', 'b'],
['scorgere', 'verb', 'b'],
['scorpione', 'noun', 'c'],
['scorrere', 'verb', 'a'],
['scorretto', 'adjective', 'c'],
['scorso', 'past_part', 'a'],
['scorso', 'adjective', 'a'],
['scorso', 'noun', 'a'],
['scorta', 'noun', 'b'],
['scortese', 'adjective', 'c'],
['scossa', 'noun', 'c'],
['scout', 'noun', 'c'],
['scout', 'adjective', 'c'],
['scozzese', 'adjective', 'c'],
['scozzese', 'noun', 'c'],
['screpolare', 'verb', 'c'],
['scricchiolare', 'verb', 'c'],
['scritta', 'noun', 'b'],
['scritto', 'past_part', 'b'],
['scritto', 'adjective', 'b'],
['scritto', 'noun', 'b'],
['scrittore', 'noun', 'a'],
['scrittura', 'noun', 'a'],
['scrivania', 'noun', 'b'],
['scrivere', 'verb', 'a'],
['scrofa', 'noun', 'c'],
['scrupolo', 'noun', 'c'],
['scudetto', 'noun', 'c'],
['scudo', 'noun', 'b'],
['scultore', 'noun', 'c'],
['scultura', 'noun', 'b'],
['scuola', 'noun', 'a'],
['scuotere', 'verb', 'b'],
['scure', 'noun', 'c'],
['scurire', 'verb', 'c'],
['scuro', 'adjective', 'b'],
['scuro', 'noun', 'b'],
['scuro', 'adverb', 'b'],
['scusa', 'noun', 'a'],
['scusare', 'verb', 'a'],
['sdebitarsi', 'verb', 'c'],
['sdegnare', 'verb', 'c'],
['sdraiare', 'verb', 'b'],
['sdraiato', 'past_part', 'c'],
['sdraiato', 'adjective', 'c'],
['se', 'pronoun', 'a'],
['se', 'conjunction', 'a'],
['se', 'noun', 'a'],
['sebbene', 'conjunction', 'b'],
['seccare', 'verb', 'b'],
['seccatura', 'noun', 'c'],
['secchio', 'noun', 'b'],
['secchione', 'noun', 'b'],
['secco', 'adjective', 'a'],
['secco', 'noun', 'a'],
['secolo', 'noun', 'a'],
['seconda', 'noun', 'b'],
['secondario', 'adjective', 'b'],
['secondario', 'noun', 'b'],
['secondo', 'adjective', 'a'],
['secondo', 'noun', 'a'],
['secondo', 'adverb', 'a'],
['secondo', 'preposition', 'a'],
['secondo', 'conjunction', 'a'],
['sedano', 'noun', 'c'],
['sede', 'noun', 'a'],
['sedere', 'verb', 'a'],
['sedia', 'noun', 'a'],
['sedici', 'adjective', 'b'],
['sedici', 'noun', 'b'],
['sedile', 'noun', 'b'],
['sedurre', 'verb', 'b'],
['seduta', 'noun', 'b'],
['seduttore', 'adjective', 'c'],
['seduttore', 'noun', 'c'],
['seggiolino', 'noun', 'c'],
['seggiovia', 'noun', 'c'],
['segheria', 'noun', 'c'],
['segmento', 'noun', 'b'],
['segnalare', 'verb', 'a'],
['segnalazione', 'noun', 'b'],
['segnale', 'noun', 'a'],
['segnare', 'verb', 'a'],
['segno', 'noun', 'a'],
['segretaria', 'noun', 'b'],
['segretario', 'noun', 'b'],
['segreteria', 'noun', 'b'],
['segreto', 'noun', 'a'],
['segreto', 'adjective', 'a'],
['segreto', 'noun', 'a'],
['segreto', 'adverb', 'a'],
['seguente', 'pres_part', 'a'],
['seguente', 'adjective', 'a'],
['seguente', 'noun', 'a'],
['seguire', 'verb', 'a'],
['seguito', 'noun', 'a'],
['sei', 'adjective', 'a'],
['sei', 'noun', 'a'],
['selezionare', 'verb', 'b'],
['selezione', 'noun', 'b'],
['selva', 'noun', 'c'],
['selvaggina', 'noun', 'c'],
['selvaggio', 'adjective', 'b'],
['selvaggio', 'noun', 'b'],
['semaforo', 'noun', 'c'],
['semantico', 'adjective', 'b'],
['sembrare', 'verb', 'a'],
['seme', 'noun', 'b'],
['semestre', 'noun', 'c'],
['semifreddo', 'adjective', 'c'],
['semifreddo', 'noun', 'c'],
['seminare', 'verb', 'b'],
['semmai', 'conjunction', 'b'],
['semmai', 'adverb', 'b'],
['semolino', 'noun', 'c'],
['semplice', 'adjective', 'a'],
['semplice', 'noun', 'a'],
['semplicemente', 'adverb', 'a'],
['semplicità', 'noun', 'b'],
['semplificare', 'verb', 'b'],
['sempre', 'adverb', 'a'],
['senape', 'noun', 'c'],
['senape', 'adjective', 'c'],
['senato', 'noun', 'b'],
['senatore', 'noun', 'b'],
['sennò', 'adverb', 'b'],
['seno', 'noun', 'a'],
['sensazione', 'noun', 'a'],
['sensibile', 'adjective', 'b'],
['sensibile', 'noun', 'b'],
['sensibilità', 'noun', 'b'],
['senso', 'noun', 'a'],
['sensuale', 'adjective', 'b'],
['sentenza', 'noun', 'a'],
['sentiero', 'noun', 'b'],
['sentimentale', 'adjective', 'b'],
['sentimentale', 'noun', 'b'],
['sentimento', 'noun', 'a'],
['sentire', 'verb', 'a'],
['sentito', 'past_part', 'b'],
['sentito', 'adjective', 'b'],
['senza', 'preposition', 'a'],
['senza', 'conjunction', 'a'],
['separare', 'verb', 'a'],
['separato', 'past_part', 'b'],
['separato', 'adjective', 'b'],
['separato', 'noun', 'b'],
['separazione', 'noun', 'b'],
['sepolto', 'past_part', 'b'],
['sepolto', 'adjective', 'b'],
['sepolto', 'noun', 'b'],
['seppellire', 'verb', 'b'],
['seppia', 'noun', 'c'],
['seppia', 'adjective', 'c'],
['seppia', 'noun', 'c'],
['sequenza', 'noun', 'b'],
['sequestrare', 'verb', 'b'],
['sequestro', 'noun', 'b'],
['sera', 'noun', 'a'],
['serata', 'noun', 'a'],
['serbo', 'adjective', 'c'],
['serbo', 'noun', 'c'],
['serenata', 'noun', 'c'],
['serenità', 'noun', 'b'],
['sereno', 'adjective', 'a'],
['sereno', 'noun', 'a'],
['sergente', 'noun', 'b'],
['seriamente', 'adverb', 'b'],
['serie', 'noun', 'a'],
['serietà', 'noun', 'c'],
['serio', 'adjective', 'a'],
['serio', 'noun', 'a'],
['serpente', 'noun', 'b'],
['serra', 'noun', 'b'],
['servire', 'verb', 'a'],
['servizio', 'noun', 'a'],
['servo', 'noun', 'b'],
['servo', 'adjective', 'b'],
['sessanta', 'adjective', 'b'],
['sessanta', 'noun', 'b'],
['sesso', 'noun', 'a'],
['sessuale', 'adjective', 'a'],
['sesto', 'adjective', 'b'],
['sesto', 'noun', 'b'],
['set', 'noun', 'b'],
['seta', 'noun', 'b'],
['sete', 'noun', 'b'],
['setta', 'noun', 'b'],
['settanta', 'adjective', 'b'],
['settanta', 'noun', 'b'],
['sette', 'adjective', 'a'],
['sette', 'noun', 'a'],
['settembre', 'noun', 'a'],
['settentrione', 'noun', 'c'],
['settimana', 'noun', 'a'],
['settimanale', 'adjective', 'b'],
['settimanale', 'noun', 'b'],
['settimo', 'adjective', 'b'],
['settimo', 'noun', 'b'],
['settore', 'noun', 'a'],
['severo', 'adjective', 'b'],
['sexy', 'adjective', 'b'],
['sezione', 'noun', 'a'],
['sfera', 'noun', 'b'],
['sfida', 'noun', 'a'],
['sfidare', 'verb', 'b'],
['sfiducia', 'noun', 'c'],
['sfigato', 'adjective', 'b'],
['sfigato', 'noun', 'b'],
['sfilare', 'verb', 'b'],
['sfilata', 'noun', 'b'],
['sfinire', 'verb', 'c'],
['sfiorare', 'verb', 'b'],
['sfociare', 'verb', 'c'],
['sfogare', 'verb', 'b'],
['sfoglia', 'noun', 'c'],
['sfogliare', 'verb', 'b'],
['sfogo', 'noun', 'b'],
['sfollamento', 'noun', 'c'],
['sfollare', 'verb', 'c'],
['sfondare', 'verb', 'b'],
['sfondo', 'noun', 'b'],
['sfortunato', 'adjective', 'c'],
['sforzare', 'verb', 'b'],
['sforzo', 'noun', 'a'],
['sfrenato', 'past_part', 'c'],
['sfrenato', 'adjective', 'c'],
['sfruttare', 'verb', 'a'],
['sfuggire', 'verb', 'a'],
['sgabello', 'noun', 'c'],
['sganciare', 'verb', 'c'],
['sgarbato', 'adjective', 'c'],
['sgarbato', 'noun', 'c'],
['sgarbo', 'noun', 'c'],
['sgombro', 'noun', 'c'],
['sgomento', 'noun', 'c'],
['sgonfiare', 'verb', 'c'],
['sgozzare', 'verb', 'c'],
['sgrassare', 'verb', 'c'],
['sgrassatore', 'noun', 'c'],
['sgridare', 'verb', 'c'],
['sguardo', 'noun', 'a'],
['shampoo', 'noun', 'c'],
['share', 'noun', 'b'],
['shopping', 'noun', 'b'],
['shorts', 'noun', 'c'],
['show', 'noun', 'b'],
['sì', 'adverb', 'a'],
['sì', 'noun', 'a'],
['sì', 'adjective', 'a'],
['si', 'pronoun', 'a'],
['sia', 'conjunction', 'a'],
['siamese', 'adjective', 'c'],
['siamese', 'noun', 'c'],
['sicché', 'conjunction', 'b'],
['siccità', 'noun', 'c'],
['siccome', 'conjunction', 'a'],
['siccome', 'adverb', 'a'],
['siciliano', 'adjective', 'b'],
['siciliano', 'noun', 'b'],
['sicuramente', 'adverb', 'a'],
['sicurezza', 'noun', 'a'],
['sicuro', 'adjective', 'a'],
['sicuro', 'noun', 'a'],
['sicuro', 'adverb', 'a'],
['siepe', 'noun', 'c'],
['sigaretta', 'noun', 'a'],
['sigaro', 'noun', 'c'],
['sigla', 'noun', 'b'],
['significare', 'verb', 'a'],
['significativo', 'adjective', 'b'],
['significato', 'past_part', 'a'],
['significato', 'noun', 'a'],
['signora', 'noun', 'a'],
['signore', 'noun', 'a'],
['signorina', 'noun', 'a'],
['silenzio', 'noun', 'a'],
['silenzioso', 'adjective', 'b'],
['sillaba', 'noun', 'c'],
['simbolico', 'adjective', 'b'],
['simbolo', 'noun', 'a'],
['simile', 'adjective', 'a'],
['simile', 'adjective', 'a'],
['simile', 'noun', 'a'],
['simile', 'adverb', 'a'],
['simpatia', 'noun', 'b'],
['simpatico', 'adjective', 'a'],
['simulare', 'verb', 'b'],
['sinceramente', 'adverb', 'b'],
['sincero', 'adjective', 'b'],
['sindacale', 'adjective', 'b'],
['sindacato', 'noun', 'b'],
['sindaco', 'noun', 'b'],
['sindrome', 'noun', 'b'],
['single', 'noun', 'b'],
['singolare', 'adjective', 'b'],
['singolare', 'noun', 'b'],
['singolo', 'adjective', 'a'],
['singolo', 'noun', 'a'],
['sinistra', 'noun', 'a'],
['sinistro', 'adjective', 'a'],
['sinistro', 'noun', 'a'],
['sino', 'preposition', 'a'],
['sino', 'adverb', 'a'],
['sinonimo', 'noun', 'b'],
['sintesi', 'noun', 'b'],
['sintetico', 'adjective', 'b'],
['sintetizzare', 'verb', 'b'],
['sintomo', 'noun', 'b'],
['sir', 'noun', 'b'],
['siriano', 'adjective', 'c'],
['siriano', 'noun', 'c'],
['siringa', 'noun', 'c'],
['sistema', 'noun', 'a'],
['sistemare', 'verb', 'a'],
['sito', 'noun', 'a'],
['sito', 'adjective', 'a'],
['situare', 'verb', 'b'],
['situazione', 'noun', 'a'],
['slacciare', 'verb', 'c'],
['slanciato', 'past_part', 'c'],
['slanciato', 'adjective', 'c'],
['slavo', 'adjective', 'c'],
['slavo', 'noun', 'c'],
['slegare', 'verb', 'c'],
['slip', 'noun', 'c'],
['slitta', 'noun', 'c'],
['slogan', 'noun', 'b'],
['slogare', 'verb', 'c'],
['slogatura', 'noun', 'c'],
['slovacco', 'adjective', 'c'],
['slovacco', 'noun', 'c'],
['sloveno', 'adjective', 'c'],
['sloveno', 'noun', 'c'],
['smacchiare', 'verb', 'c'],
['smacchiatore', 'adjective', 'c'],
['smacchiatore', 'noun', 'c'],
['smaltimento', 'noun', 'b'],
['smalto', 'noun', 'c'],
['smascherare', 'verb', 'c'],
['smentire', 'verb', 'b'],
['smettere', 'verb', 'a'],
['smisurato', 'past_part', 'c'],
['smisurato', 'adjective', 'c'],
['smog', 'noun', 'c'],
['smontare', 'verb', 'b'],
['smorfia', 'noun', 'c'],
['smuovere', 'verb', 'c'],
['snack', 'noun', 'c'],
['sneaker', 'noun', 'c'],
['snello', 'adjective', 'c'],
['soccorrere', 'verb', 'c'],
['soccorso', 'noun', 'b'],
['socialdemocratico', 'adjective', 'c'],
['socialdemocratico', 'noun', 'c'],
['sociale', 'adjective', 'a'],
['sociale', 'noun', 'a'],
['socialista', 'adjective', 'b'],
['socialista', 'noun', 'b'],
['società', 'noun', 'a'],
['socievole', 'adjective', 'c'],
['socio', 'noun', 'b'],
['soddisfare', 'verb', 'a'],
['soddisfatto', 'past_part', 'b'],
['soddisfatto', 'adjective', 'b'],
['soddisfazione', 'noun', 'a'],
['sodo', 'adjective', 'b'],
['sodo', 'noun', 'b'],
['sodo', 'adverb', 'b'],
['sofà', 'noun', 'c'],
['sofferenza', 'noun', 'a'],
['soffermare', 'verb', 'b'],
['soffiare', 'verb', 'b'],
['soffice', 'adjective', 'c'],
['soffitta', 'noun', 'c'],
['soffitto', 'noun', 'b'],
['soffocare', 'verb', 'b'],
['soffriggere', 'verb', 'c'],
['soffrire', 'verb', 'a'],
['sofisticato', 'past_part', 'b'],
['sofisticato', 'adjective', 'b'],
['software', 'noun', 'b'],
['soggettivo', 'adjective', 'b'],
['soggetto', 'noun', 'a'],
['soggetto', 'adjective', 'b'],
['soggezione', 'noun', 'c'],
['soggiorno', 'noun', 'a'],
['soglia', 'noun', 'b'],
['sogliola', 'noun', 'c'],
['sognare', 'verb', 'a'],
['sogno', 'noun', 'a'],
['sol', 'noun', 'c'],
['solaio', 'noun', 'c'],
['solamente', 'adverb', 'a'],
['solamente', 'conjunction', 'a'],
['solare', 'adjective', 'b'],
['solare', 'noun', 'b'],
['solco', 'noun', 'b'],
['soldato', 'noun', 'a'],
['soldo', 'noun', 'a'],
['sole', 'noun', 'a'],
['solenne', 'adjective', 'b'],
['solidarietà', 'noun', 'b'],
['solido', 'adjective', 'b'],
['solido', 'noun', 'b'],
['solitamente', 'adverb', 'b'],
['solitario', 'adjective', 'b'],
['solitario', 'noun', 'b'],
['solito', 'adjective', 'a'],
['solito', 'noun', 'a'],
['solitudine', 'noun', 'b'],
['solletico', 'noun', 'c'],
['sollevare', 'verb', 'a'],
['sollievo', 'noun', 'b'],
['solo', 'adjective', 'a'],
['solo', 'noun', 'a'],
['solo', 'adverb', 'a'],
['solo', 'conjunction', 'a'],
['soltanto', 'adverb', 'a'],
['soltanto', 'conjunction', 'a'],
['soluzione', 'noun', 'a'],
['somigliare', 'verb', 'b'],
['somma', 'noun', 'a'],
['sommare', 'verb', 'b'],
['sondaggio', 'noun', 'a'],
['sonno', 'noun', 'a'],
['sonoro', 'adjective', 'b'],
['sonoro', 'noun', 'b'],
['soppalco', 'noun', 'c'],
['sopportare', 'verb', 'a'],
['sopra', 'preposition', 'a'],
['sopra', 'adverb', 'a'],
['sopra', 'adjective', 'a'],
['sopra', 'noun', 'a'],
['soprabito', 'noun', 'c'],
['sopracciglio', 'noun', 'c'],
['soprammobile', 'noun', 'c'],
['soprannome', 'noun', 'c'],
['soprattutto', 'adverb', 'a'],
['sopravvalutare', 'verb', 'c'],
['sopravvivenza', 'noun', 'b'],
['sopravvivere', 'verb', 'a'],
['sorcio', 'noun', 'c'],
['sordo', 'adjective', 'b'],
['sordo', 'noun', 'b'],
['sorella', 'noun', 'a'],
['sorgente', 'pres_part', 'b'],
['sorgente', 'adjective', 'b'],
['sorgente', 'noun', 'b'],
['sorgere', 'verb', 'b'],
['sorpassare', 'verb', 'c'],
['sorpasso', 'noun', 'c'],
['sorprendente', 'pres_part', 'b'],
['sorprendente', 'adjective', 'b'],
['sorprendere', 'verb', 'b'],
['sorpresa', 'noun', 'a'],
['sorridente', 'pres_part', 'c'],
['sorridente', 'adjective', 'c'],
['sorridere', 'verb', 'a'],
['sorriso', 'noun', 'a'],
['sorso', 'noun', 'c'],
['sorta', 'noun', 'a'],
['sorte', 'noun', 'b'],
['sorteggiare', 'verb', 'c'],
['sorteggio', 'noun', 'c'],
['sorvegliare', 'verb', 'b'],
['sospendere', 'verb', 'b'],
['sospensione', 'noun', 'b'],
['sospeso', 'past_part', 'b'],
['sospeso', 'adjective', 'b'],
['sospeso', 'noun', 'b'],
['sospettare', 'verb', 'b'],
['sospetto', 'noun', 'a'],
['sospetto', 'adjective', 'a'],
['sospetto', 'noun', 'a'],
['sospirare', 'verb', 'b'],
['sospiro', 'noun', 'b'],
['sosta', 'noun', 'b'],
['sostanza', 'noun', 'a'],
['sostanzialmente', 'adverb', 'b'],
['sostare', 'verb', 'c'],
['sostegno', 'noun', 'b'],
['sostenere', 'verb', 'a'],
['sostenitore', 'adjective', 'b'],
['sostenitore', 'noun', 'b'],
['sostituire', 'verb', 'a'],
['sostituzione', 'noun', 'b'],
['sottaceto', 'adjective', 'c'],
['sottaceto', 'adverb', 'c'],
['sottaceto', 'noun', 'c'],
['sotterraneo', 'adjective', 'b'],
['sotterraneo', 'noun', 'b'],
['sottile', 'adjective', 'a'],
['sottile', 'noun', 'a'],
['sottile', 'adverb', 'a'],
['sottinteso', 'past_part', 'c'],
['sottinteso', 'adjective', 'c'],
['sottinteso', 'noun', 'c'],
['sotto', 'preposition', 'a'],
['sotto', 'adverb', 'a'],
['sotto', 'adjective', 'a'],
['sotto', 'noun', 'a'],
['sottofondo', 'noun', 'b'],
['sottolineare', 'verb', 'a'],
['sottolio', 'adverb', 'c'],
['sottolio', 'adjective', 'c'],
['sottomarino', 'adjective', 'c'],
['sottomarino', 'noun', 'c'],
['sottopassaggio', 'noun', 'c'],
['sottoporre', 'verb', 'a'],
['sottoscrivere', 'verb', 'b'],
['sottovalutare', 'verb', 'b'],
['sottrarre', 'verb', 'b'],
['sovietico', 'adjective', 'b'],
['sovietico', 'noun', 'b'],
['sovrano', 'adjective', 'b'],
['sovrano', 'noun', 'b'],
['sovrapporre', 'verb', 'b'],
['spaccare', 'verb', 'b'],
['spaccatura', 'noun', 'c'],
['spacciare', 'verb', 'b'],
['spacciatore', 'noun', 'c'],
['spaccio', 'noun', 'c'],
['spada', 'noun', 'b'],
['spaghetto', 'noun', 'b'],
['spagnolo', 'adjective', 'a'],
['spagnolo', 'noun', 'a'],
['spago', 'noun', 'c'],
['spalancare', 'verb', 'b'],
['spalla', 'noun', 'a'],
['spalmabile', 'adjective', 'c'],
['spalmare', 'verb', 'c'],
['spam', 'noun', 'b'],
['sparare', 'verb', 'a'],
['sparecchiare', 'verb', 'c'],
['spargere', 'verb', 'b'],
['sparire', 'verb', 'a'],
['sparo', 'noun', 'b'],
['sparso', 'past_part', 'b'],
['sparso', 'adjective', 'b'],
['spassare', 'verb', 'b'],
['spasso', 'noun', 'c'],
['spavaldo', 'adjective', 'c'],
['spaventare', 'verb', 'a'],
['spaventato', 'past_part', 'b'],
['spaventato', 'adjective', 'b'],
['spaventoso', 'adjective', 'b'],
['spaziale', 'adjective', 'b'],
['spazio', 'noun', 'a'],
['spazioso', 'adjective', 'c'],
['spazzare', 'verb', 'b'],
['spazzatura', 'noun', 'b'],
['spazzino', 'noun', 'c'],
['spazzola', 'noun', 'c'],
['spazzolare', 'verb', 'c'],
['spazzolino', 'noun', 'c'],
['spazzolone', 'noun', 'c'],
['specchiarsi', 'verb', 'c'],
['specchio', 'noun', 'a'],
['speciale', 'adjective', 'a'],
['speciale', 'noun', 'a'],
['specialista', 'noun', 'b'],
['specializzato', 'past_part', 'b'],
['specializzato', 'adjective', 'b'],
['specializzato', 'noun', 'b'],
['specialmente', 'adverb', 'b'],
['specie', 'noun', 'a'],
['specie', 'adverb', 'a'],
['specificare', 'verb', 'b'],
['specifico', 'adjective', 'a'],
['specifico', 'noun', 'a'],
['speck', 'noun', 'c'],
['spedire', 'verb', 'b'],
['spedizione', 'noun', 'b'],
['spegnere', 'verb', 'a'],
['spellare', 'verb', 'c'],
['spendere', 'verb', 'a'],
['spennare', 'verb', 'c'],
['spensierato', 'adjective', 'c'],
['spento', 'past_part', 'b'],
['spento', 'adjective', 'b'],
['speranza', 'noun', 'a'],
['sperare', 'verb', 'a'],
['sperimentale', 'adjective', 'b'],
['sperimentare', 'verb', 'b'],
['sperimentazione', 'noun', 'b'],
['sperone', 'noun', 'c'],
['spesa', 'noun', 'a'],
['spesso', 'adjective', 'b'],
['spesso', 'adverb', 'a'],
['spessore', 'noun', 'b'],
['spettacolare', 'adjective', 'b'],
['spettacolo', 'noun', 'a'],
['spettare', 'verb', 'b'],
['spettatore', 'noun', 'b'],
['spettinare', 'verb', 'c'],
['spettro', 'noun', 'b'],
['spezia', 'noun', 'c'],
['spezzare', 'verb', 'b'],
['spia', 'noun', 'b'],
['spiacere', 'verb', 'b'],
['spiaggia', 'noun', 'a'],
['spianare', 'verb', 'c'],
['spiare', 'verb', 'b'],
['spiazzo', 'noun', 'c'],
['spiccare', 'verb', 'b'],
['spicciolo', 'adjective', 'c'],
['spicciolo', 'noun', 'c'],
['spiedino', 'noun', 'c'],
['spiedo', 'noun', 'c'],
['spiegare', 'verb', 'a'],
['spiegazione', 'noun', 'a'],
['spietato', 'adjective', 'b'],
['spiga', 'noun', 'c'],
['spigolo', 'noun', 'c'],
['spillo', 'noun', 'c'],
['spina', 'noun', 'b'],
['spinacio', 'noun', 'c'],
['spingere', 'verb', 'a'],
['spinta', 'noun', 'b'],
['spionaggio', 'noun', 'c'],
['spirito', 'noun', 'a'],
['spiritoso', 'adjective', 'c'],
['spirituale', 'adjective', 'b'],
['spirituale', 'noun', 'b'],
['splendente', 'pres_part', 'c'],
['splendente', 'adjective', 'c'],
['splendere', 'verb', 'b'],
['splendido', 'adjective', 'b'],
['splendore', 'noun', 'b'],
['spogliare', 'verb', 'b'],
['spogliatoio', 'noun', 'c'],
['spoglio', 'noun', 'c'],
['spolverare', 'verb', 'c'],
['sponda', 'noun', 'b'],
['spontaneo', 'adjective', 'b'],
['sporcare', 'verb', 'b'],
['sporcizia', 'noun', 'c'],
['sporco', 'adjective', 'a'],
['sporco', 'noun', 'a'],
['sporgente', 'pres_part', 'c'],
['sporgente', 'adjective', 'c'],
['sporgente', 'noun', 'c'],
['sporgere', 'verb', 'b'],
['sport', 'noun', 'a'],
['sport', 'adjective', 'a'],
['sportello', 'noun', 'b'],
['sportivo', 'adjective', 'a'],
['sportivo', 'noun', 'a'],
['sposare', 'verb', 'a'],
['sposato', 'past_part', 'b'],
['sposato', 'adjective', 'b'],
['sposato', 'noun', 'b'],
['sposo', 'noun', 'b'],
['spostamento', 'noun', 'b'],
['spostare', 'verb', 'a'],
['spot', 'noun', 'b'],
['spranga', 'noun', 'c'],
['spray', 'adjective', 'c'],
['spray', 'noun', 'c'],
['sprecare', 'verb', 'b'],
['spreco', 'noun', 'c'],
['spremere', 'verb', 'c'],
['spremuta', 'noun', 'c'],
['sprofondare', 'verb', 'b'],
['sproposito', 'noun', 'c'],
['spruzzare', 'verb', 'c'],
['spuma', 'noun', 'c'],
['spumante', 'pres_part', 'c'],
['spumante', 'adjective', 'c'],
['spumante', 'noun', 'c'],
['spuntare', 'verb', 'b'],
['spuntino', 'noun', 'c'],
['spunto', 'noun', 'b'],
['sputare', 'verb', 'b'],
['sputo', 'noun', 'c'],
['squadra', 'noun', 'a'],
['squallido', 'adjective', 'c'],
['squalo', 'noun', 'c'],
['squarcio', 'noun', 'c'],
['squillare', 'verb', 'b'],
['squisito', 'adjective', 'c'],
['stabile', 'adjective', 'b'],
['stabile', 'noun', 'b'],
['stabilire', 'verb', 'a'],
['stabilità', 'noun', 'b'],
['staccare', 'verb', 'a'],
['stacco', 'noun', 'c'],
['stadio', 'noun', 'b'],
['staffa', 'noun', 'c'],
['stagione', 'noun', 'a'],
['stagno', 'noun', 'c'],
['stalla', 'noun', 'b'],
['stallone', 'noun', 'c'],
['stamattina', 'adverb', 'b'],
['stampa', 'noun', 'a'],
['stampare', 'verb', 'b'],
['stampatello', 'noun', 'c'],
['stampato', 'past_part', 'b'],
['stampato', 'adjective', 'b'],
['stampato', 'noun', 'b'],
['stampella', 'noun', 'c'],
['stampo', 'noun', 'c'],
['stancare', 'verb', 'b'],
['stanchezza', 'noun', 'b'],
['stanco', 'adjective', 'a'],
['standard', 'noun', 'b'],
['standard', 'adjective', 'b'],
['stanga', 'noun', 'c'],
['stanotte', 'adverb', 'b'],
['stanza', 'noun', 'a'],
['star', 'noun', 'b'],
['stare', 'verb', 'a'],
['stasera', 'adverb', 'a'],
['statale', 'adjective', 'b'],
['statale', 'noun', 'b'],
['statistica', 'noun', 'b'],
['statistico', 'adjective', 'b'],
['statistico', 'noun', 'b'],
['stato', 'noun', 'a'],
['stato', 'noun', 'a'],
['statua', 'noun', 'b'],
['statunitense', 'adjective', 'b'],
['statunitense', 'noun', 'b'],
['status', 'noun', 'b'],
['stavolta', 'adverb', 'b'],
['stazione', 'noun', 'a'],
['stella', 'noun', 'a'],
['stellare', 'adjective', 'b'],
['stendere', 'verb', 'b'],
['stendibiancheria', 'noun', 'c'],
['stereo', 'adjective', 'c'],
['stereo', 'noun', 'c'],
['sterlina', 'noun', 'b'],
['sterzare', 'verb', 'c'],
['sterzo', 'noun', 'c'],
['stesso', 'adjective', 'a'],
['stesso', 'pronoun', 'a'],
['stile', 'noun', 'a'],
['stima', 'noun', 'b'],
['stimare', 'verb', 'b'],
['stimolare', 'verb', 'b'],
['stimolo', 'noun', 'b'],
['stinco', 'noun', 'c'],
['stipendiare', 'verb', 'c'],
['stipendio', 'noun', 'a'],
['stirare', 'verb', 'b'],
['stivaletto', 'noun', 'c'],
['stoffa', 'noun', 'b'],
['stomaco', 'noun', 'b'],
['stonare', 'verb', 'c'],
['stop', 'loc-comando', 'c'],
['stop', 'noun', 'c'],
['stoppa', 'noun', 'c'],
['storcere', 'verb', 'c'],
['storia', 'noun', 'a'],
['storico', 'adjective', 'a'],
['storico', 'noun', 'a'],
['stornello', 'noun', 'c'],
['storta', 'noun', 'c'],
['storto', 'past_part', 'b'],
['storto', 'adjective', 'b'],
['storto', 'adverb', 'b'],
['storto', 'noun', 'b'],
['stoviglia', 'noun', 'c'],
['stracchino', 'noun', 'c'],
['straccio', 'noun', 'b'],
['strada', 'noun', 'a'],
['stradale', 'adjective', 'b'],
['stradale', 'noun', 'b'],
['strage', 'noun', 'b'],
['strangolare', 'verb', 'c'],
['straniero', 'adjective', 'a'],
['straniero', 'noun', 'a'],
['strano', 'adjective', 'a'],
['straordinario', 'adjective', 'a'],
['straordinario', 'noun', 'a'],
['strappare', 'verb', 'b'],
['strategia', 'noun', 'a'],
['strategico', 'adjective', 'b'],
['strato', 'noun', 'b'],
['strega', 'noun', 'a'],
['stregare', 'verb', 'b'],
['stregone', 'noun', 'c'],
['stress', 'noun', 'b'],
['stretta', 'noun', 'b'],
['strettamente', 'adverb', 'b'],
['stretto', 'past_part', 'a'],
['stretto', 'adjective', 'a'],
['stretto', 'noun', 'a'],
['strillare', 'verb', 'b'],
['strillo', 'noun', 'c'],
['stringa', 'noun', 'c'],
['stringere', 'verb', 'a'],
['striscia', 'noun', 'b'],
['strisciare', 'verb', 'b'],
['strofinaccio', 'noun', 'c'],
['stronzata', 'noun', 'b'],
['stronzo', 'noun', 'a'],
['stronzo', 'adjective', 'a'],
['strumento', 'noun', 'a'],
['strutto', 'past_part', 'c'],
['strutto', 'adjective', 'c'],
['strutto', 'noun', 'c'],
['struttura', 'noun', 'a'],
['strutturale', 'adjective', 'b'],
['struzzo', 'noun', 'c'],
['studente', 'noun', 'a'],
['studiare', 'verb', 'a'],
['studio', 'noun', 'a'],
['studioso', 'adjective', 'b'],
['studioso', 'noun', 'b'],
['stufa', 'noun', 'c'],
['stuoia', 'noun', 'c'],
['stupefacente', 'pres_part', 'b'],
['stupefacente', 'adjective', 'b'],
['stupefacente', 'noun', 'b'],
['stupendo', 'adjective', 'b'],
['stupido', 'adjective', 'a'],
['stupido', 'noun', 'a'],
['stupire', 'verb', 'b'],
['stupito', 'past_part', 'b'],
['stupito', 'adjective', 'b'],
['stupore', 'noun', 'b'],
['stuzzicadenti', 'noun', 'c'],
['stuzzicare', 'verb', 'c'],
['style', 'noun', 'b'],
['su', 'preposition', 'a'],
['su', 'adverb', 'a'],
['su', 'exclamation', 'a'],
['su', 'noun', 'a'],
['subire', 'verb', 'a'],
['subito', 'adverb', 'a'],
['succedere', 'verb', 'a'],
['successione', 'noun', 'b'],
['successivamente', 'adverb', 'b'],
['successivo', 'adjective', 'a'],
['successo', 'noun', 'a'],
['succhiare', 'verb', 'b'],
['succo', 'noun', 'b'],
['sud', 'noun', 'a'],
['sud', 'adjective', 'a'],
['sudamericano', 'adjective', 'c'],
['sudamericano', 'noun', 'c'],
['sudare', 'verb', 'b'],
['sudato', 'past_part', 'c'],
['sudato', 'adjective', 'c'],
['suddito', 'noun', 'b'],
['suddito', 'adjective', 'b'],
['suddividere', 'verb', 'b'],
['sudicio', 'adjective', 'c'],
['sudicio', 'noun', 'c'],
['sudore', 'noun', 'b'],
['sudtirolese', 'adjective', 'c'],
['sudtirolese', 'noun', 'c'],
['sufficiente', 'adjective', 'a'],
['suggerimento', 'noun', 'b'],
['suggerire', 'verb', 'a'],
['suggestivo', 'adjective', 'b'],
['sughero', 'noun', 'c'],
['sugo', 'noun', 'b'],
['suicidio', 'noun', 'b'],
['suino', 'noun', 'c'],
['suino', 'adjective', 'c'],
['suo', 'adjective', 'a'],
['suo', 'pronoun', 'a'],
['suocera', 'noun', 'c'],
['suocero', 'noun', 'c'],
['suola', 'noun', 'c'],
['suolo', 'noun', 'b'],
['suonare', 'verb', 'a'],
['suono', 'noun', 'a'],
['suora', 'noun', 'a'],
['super', 'adjective', 'b'],
['super', 'noun', 'b'],
['superare', 'verb', 'a'],
['superbia', 'noun', 'c'],
['superficiale', 'adjective', 'b'],
['superficie', 'noun', 'a'],
['superiore', 'adjective', 'a'],
['superiore', 'noun', 'a'],
['supermercato', 'noun', 'b'],
['supporre', 'verb', 'b'],
['supportare', 'verb', 'b'],
['supporto', 'noun', 'a'],
['supremo', 'adjective', 'b'],
['surgelato', 'past_part', 'c'],
['surgelato', 'adjective', 'c'],
['surgelato', 'noun', 'c'],
['suscitare', 'verb', 'b'],
['susina', 'noun', 'c'],
['susino', 'noun', 'c'],
['susseguirsi', 'verb', 'c'],
['sussurrare', 'verb', 'b'],
['svanire', 'verb', 'b'],
['svedese', 'adjective', 'c'],
['svedese', 'noun', 'c'],
['sveglia', 'noun', 'c'],
['svegliare', 'verb', 'a'],
['svegliarsi', 'verb', 'c'],
['sveglio', 'past_part', 'b'],
['sveglio', 'adjective', 'b'],
['svelare', 'verb', 'b'],
['svelto', 'adjective', 'c'],
['svenire', 'verb', 'b'],
['sventola', 'noun', 'c'],
['sviluppare', 'verb', 'a'],
['sviluppato', 'past_part', 'b'],
['sviluppato', 'adjective', 'b'],
['sviluppo', 'noun', 'a'],
['svizzero', 'adjective', 'b'],
['svizzero', 'noun', 'b'],
['svolazzare', 'verb', 'c'],
['svolgere', 'verb', 'a'],
['svolgimento', 'noun', 'c'],
['svolta', 'noun', 'b'],
['svuotare', 'verb', 'b'],
['tabaccaio', 'noun', 'c'],
['tabella', 'noun', 'b'],
['tacca', 'noun', 'c'],
['tacchino', 'noun', 'c'],
['tacco', 'noun', 'b'],
['tacere', 'verb', 'a'],
['tacere', 'noun', 'a'],
['tag', 'noun', 'b'],
['taglia', 'noun', 'b'],
['tagliare', 'verb', 'a'],
['tagliatella', 'noun', 'c'],
['tagliato', 'past_part', 'b'],
['tagliato', 'adjective', 'b'],
['tagliere', 'noun', 'c'],
['taglio', 'noun', 'a'],
['tagliola', 'noun', 'c'],
['talco', 'noun', 'c'],
['tale', 'adjective', 'a'],
['tale', 'pronoun', 'a'],
['tale', 'adverb', 'a'],
['taleggio', 'noun', 'c'],
['talento', 'noun', 'b'],
['talmente', 'adverb', 'a'],
['talpa', 'noun', 'c'],
['talpa', 'adjective', 'c'],
['talpa', 'noun', 'c'],
['talvolta', 'adverb', 'b'],
['tamburo', 'noun', 'c'],
['tamponare', 'verb', 'c'],
['tangente', 'pres_part', 'b'],
['tangente', 'adjective', 'b'],
['tangente', 'noun', 'b'],
['tanto', 'adjective', 'a'],
['tanto', 'pronoun', 'a'],
['tanto', 'noun', 'a'],
['tanto', 'adverb', 'a'],
['tanto', 'conjunction', 'a'],
['tappa', 'noun', 'b'],
['tappare', 'verb', 'b'],
['tappetino', 'noun', 'c'],
['tappeto', 'noun', 'b'],
['tappezzare', 'verb', 'c'],
['tappo', 'noun', 'c'],
['tarallo', 'noun', 'c'],
['tarantella', 'noun', 'c'],
['tardi', 'adverb', 'a'],
['tardo', 'adjective', 'a'],
['tardo', 'adverb', 'a'],
['targa', 'noun', 'b'],
['tariffa', 'noun', 'b'],
['tarlo', 'noun', 'c'],
['tartaruga', 'noun', 'c'],
['tartufo', 'noun', 'c'],
['tasca', 'noun', 'a'],
['tassa', 'noun', 'a'],
['tassare', 'verb', 'c'],
['tassello', 'noun', 'c'],
['tasso', 'noun', 'b'],
['tastiera', 'noun', 'b'],
['tasto', 'noun', 'b'],
['tatto', 'noun', 'c'],
['tatuaggio', 'noun', 'b'],
['taverna', 'noun', 'c'],
['tavola', 'noun', 'a'],
['tavoletta', 'noun', 'c'],
['tavolino', 'noun', 'b'],
['tavolo', 'noun', 'a'],
['taxi', 'noun', 'b'],
['tazza', 'noun', 'b'],
['tè', 'noun', 'b'],
['te', 'pronoun', 'noun'],
['te', 'team', 'noun'],
['teatrale', 'adjective', 'b'],
['teatro', 'noun', 'a'],
['tecnica', 'noun', 'a'],
['tecnicamente', 'adverb', 'b'],
['tecnico', 'adjective', 'a'],
['tecnico', 'noun', 'a'],
['tecnologia', 'noun', 'a'],
['tecnologico', 'adjective', 'b'],
['tedesco', 'adjective', 'a'],
['tedesco', 'noun', 'a'],
['tegame', 'noun', 'c'],
['teglia', 'noun', 'c'],
['tegola', 'noun', 'c'],
['tela', 'noun', 'b'],
['telaio', 'noun', 'c'],
['telecamera', 'noun', 'b'],
['telecomandato', 'past_part', 'c'],
['telecomandato', 'adjective', 'c'],
['telecronaca', 'noun', 'c'],
['telecronista', 'noun', 'c'],
['telefilm', 'noun', 'b'],
['telefonare', 'verb', 'a'],
['telefonata', 'noun', 'a'],
['telefonico', 'adjective', 'a'],
['telefonino', 'noun', 'b'],
['telefono', 'noun', 'a'],
['telegiornale', 'noun', 'b'],
['telegrafico', 'adjective', 'c'],
['telegrafo', 'noun', 'c'],
['telegramma', 'noun', 'c'],
['telescopio', 'noun', 'b'],
['televisione', 'noun', 'a'],
['televisivo', 'adjective', 'a'],
['televisore', 'noun', 'b'],
['tema', 'noun', 'a'],
['temere', 'verb', 'a'],
['temperatura', 'noun', 'a'],
['tempesta', 'noun', 'b'],
['tempio', 'noun', 'b'],
['tempo', 'noun', 'a'],
['temporale', 'noun', 'b'],
['temporaneo', 'adjective', 'b'],
['tenaglia', 'noun', 'c'],
['tenda', 'noun', 'a'],
['tendenza', 'noun', 'a'],
['tendere', 'verb', 'a'],
['tenebra', 'noun', 'c'],
['tenente', 'noun', 'b'],
['tenere', 'verb', 'a'],
['tenerezza', 'noun', 'b'],
['tenero', 'adjective', 'b'],
['tenero', 'noun', 'b'],
['tennis', 'noun', 'b'],
['tensione', 'noun', 'a'],
['tentare', 'verb', 'a'],
['tentativo', 'noun', 'a'],
['tentazione', 'noun', 'b'],
['tenuta', 'noun', 'b'],
['teologia', 'noun', 'b'],
['teologo', 'noun', 'b'],
['teoria', 'noun', 'a'],
['teorico', 'adjective', 'b'],
['teorico', 'noun', 'b'],
['terapia', 'noun', 'a'],
['tergicristallo', 'noun', 'c'],
['terminale', 'adjective', 'b'],
['terminale', 'noun', 'b'],
['terminare', 'verb', 'a'],
['termine', 'noun', 'a'],
['termosifone', 'noun', 'c'],
['terra', 'noun', 'a'],
['terrazzo', 'noun', 'b'],
['terremoto', 'noun', 'b'],
['terreno', 'noun', 'a'],
['terrestre', 'adjective', 'b'],
['terrestre', 'noun', 'b'],
['terribile', 'adjective', 'a'],
['terriccio', 'noun', 'c'],
['territoriale', 'adjective', 'b'],
['territoriale', 'noun', 'b'],
['territorio', 'noun', 'a'],
['terrore', 'noun', 'b'],
['terrorismo', 'noun', 'b'],
['terrorista', 'adjective', 'b'],
['terrorista', 'noun', 'b'],
['terrorizzare', 'verb', 'b'],
['terzo', 'adjective', 'a'],
['terzo', 'noun', 'a'],
['teschio', 'noun', 'b'],
['tesi', 'noun', 'a'],
['teso', 'past_part', 'b'],
['teso', 'adjective', 'b'],
['tesoro', 'noun', 'a'],
['tessera', 'noun', 'b'],
['tessile', 'adjective', 'c'],
['tessile', 'noun', 'c'],
['tessuto', 'past_part', 'b'],
['tessuto', 'adjective', 'b'],
['tessuto', 'noun', 'b'],
['test', 'noun', 'a'],
['testa', 'noun', 'a'],
['testamento', 'noun', 'b'],
['testare', 'verb', 'b'],
['testimone', 'noun', 'a'],
['testimonianza', 'noun', 'b'],
['testimoniare', 'verb', 'b'],
['testo', 'noun', 'a'],
['tetta', 'noun', 'b'],
['tetto', 'noun', 'a'],
['tettoia', 'noun', 'c'],
['tg', 'sigla', 'b'],
['thermos', 'noun', 'c'],
['ti', 'noun', 'c'],
['ti', 'pronoun', 'a'],
['tic', 'noun', 'c'],
['ticchettio', 'noun', 'c'],
['tifare', 'verb', 'b'],
['tifo', 'noun', 'c'],
['tifoso', 'adjective', 'b'],
['tifoso', 'noun', 'b'],
['tigre', 'noun', 'b'],
['timbro', 'noun', 'c'],
['timidezza', 'noun', 'c'],
['timido', 'adjective', 'b'],
['timido', 'noun', 'b'],
['timone', 'noun', 'c'],
['timoniere', 'noun', 'c'],
['timore', 'noun', 'b'],
['tinello', 'noun', 'c'],
['tino', 'noun', 'c'],
['tipico', 'adjective', 'a'],
['tipo', 'noun', 'a'],
['tipologia', 'noun', 'b'],
['tiramisù', 'noun', 'c'],
['tiranno', 'noun', 'c'],
['tiranno', 'adjective', 'c'],
['tirare', 'verb', 'a'],
['tiro', 'noun', 'b'],
['tirocinio', 'noun', 'b'],
['tirrenico', 'adjective', 'c'],
['tisana', 'noun', 'c'],
['titolare', 'adjective', 'b'],
['titolare', 'noun', 'b'],
['titolo', 'noun', 'a'],
['tivù', 'noun', 'a'],
['tizio', 'noun', 'b'],
['toast', 'noun', 'c'],
['toccare', 'verb', 'a'],
['tocco', 'noun', 'b'],
['togliere', 'verb', 'a'],
['toilette', 'noun', 'c'],
['toletta', 'noun', 'c'],
['tolleranza', 'noun', 'b'],
['tollerare', 'verb', 'b'],
['tomba', 'noun', 'b'],
['tombola', 'noun', 'c'],
['tonaca', 'noun', 'c'],
['tondo', 'adjective', 'b'],
['tondo', 'noun', 'b'],
['tonnellata', 'noun', 'b'],
['tonno', 'noun', 'c'],
['tono', 'noun', 'a'],
['tonsilla', 'noun', 'c'],
['top', 'noun', 'b'],
['topo', 'noun', 'b'],
['topo', 'adjective', 'b'],
['toppa', 'noun', 'c'],
['torbido', 'adjective', 'c'],
['torbido', 'noun', 'c'],
['torcere', 'verb', 'b'],
['torcia', 'noun', 'c'],
['torcicollo', 'noun', 'c'],
['tordo', 'noun', 'c'],
['torero', 'noun', 'c'],
['torinese', 'adjective', 'c'],
['torinese', 'noun', 'c'],
['tormentare', 'verb', 'b'],
['tornaconto', 'noun', 'c'],
['tornare', 'verb', 'a'],
['torneo', 'noun', 'b'],
['tornio', 'noun', 'c'],
['toro', 'noun', 'b'],
['torre', 'noun', 'b'],
['torrone', 'noun', 'c'],
['torta', 'noun', 'b'],
['tortellino', 'noun', 'c'],
['torto', 'noun', 'b'],
['tortora', 'noun', 'c'],
['tortora', 'adjective', 'c'],
['tortora', 'noun', 'c'],
['tosare', 'verb', 'c'],
['toscano', 'adjective', 'b'],
['toscano', 'noun', 'b'],
['tosse', 'noun', 'b'],
['tossico', 'adjective', 'b'],
['tossico', 'noun', 'b'],
['tossire', 'verb', 'c'],
['tostapane', 'noun', 'c'],
['totale', 'adjective', 'a'],
['totale', 'noun', 'a'],
['totalmente', 'adverb', 'b'],
['tour', 'noun', 'b'],
['tovaglia', 'noun', 'b'],
['tovaglietta', 'noun', 'c'],
['tovagliolo', 'noun', 'c'],
['tra', 'preposition', 'a'],
['traballare', 'verb', 'c'],
['traboccare', 'verb', 'c'],
['trabocchetto', 'noun', 'c'],
['traccia', 'noun', 'a'],
['tracciare', 'verb', 'b'],
['tradimento', 'noun', 'b'],
['tradire', 'verb', 'b'],
['tradizionale', 'adjective', 'a'],
['tradizione', 'noun', 'a'],
['tradurre', 'verb', 'a'],
['traduzione', 'noun', 'a'],
['traffico', 'noun', 'a'],
['trafila', 'noun', 'c'],
['traforo', 'noun', 'c'],
['tragedia', 'noun', 'b'],
['traghetto', 'noun', 'c'],
['tragico', 'adjective', 'b'],
['tragico', 'noun', 'b'],
['trainare', 'verb', 'c'],
['trama', 'noun', 'b'],
['tramezzino', 'noun', 'c'],
['tramite', 'noun', 'preposition'],
['tramontare', 'verb', 'c'],
['tramonto', 'noun', 'b'],
['trampolino', 'noun', 'c'],
['trancio', 'noun', 'c'],
['tranne', 'preposition', 'a'],
['tranquillamente', 'adverb', 'b'],
['tranquillità', 'noun', 'b'],
['tranquillizzare', 'verb', 'c'],
['tranquillo', 'adjective', 'a'],
['tranquillo', 'adverb', 'a'],
['tranquillo', 'noun', 'a'],
['transito', 'noun', 'c'],
['trapano', 'noun', 'c'],
['trapezio', 'noun', 'c'],
['trapezio', 'adjective', 'c'],
['trapianto', 'noun', 'c'],
['trappola', 'noun', 'b'],
['trapunta', 'noun', 'c'],
['trarre', 'verb', 'a'],
['trascinare', 'verb', 'a'],
['trascorrere', 'verb', 'a'],
['trascrizione', 'noun', 'b'],
['trascurare', 'verb', 'b'],
['trasferimento', 'noun', 'b'],
['trasferire', 'verb', 'a'],
['trasformare', 'verb', 'a'],
['trasformazione', 'noun', 'b'],
['trasfusione', 'noun', 'c'],
['traslocare', 'verb', 'c'],
['trasloco', 'noun', 'c'],
['trasmettere', 'verb', 'a'],
['trasmissione', 'noun', 'a'],
['trasparente', 'adjective', 'b'],
['trasparente', 'noun', 'b'],
['trasparenza', 'noun', 'b'],
['trasportare', 'verb', 'b'],
['trasporto', 'noun', 'a'],
['trattamento', 'noun', 'a'],
['trattare', 'verb', 'a'],
['trattativa', 'noun', 'b'],
['trattato', 'noun', 'b'],
['trattenere', 'verb', 'a'],
['trattenuta', 'noun', 'c'],
['tratto', 'noun', 'a'],
['trattore', 'noun', 'c'],
['trauma', 'noun', 'b'],
['travasare', 'verb', 'c'],
['travestire', 'verb', 'c'],
['travolgere', 'verb', 'b'],
['tre', 'adjective', 'a'],
['tre', 'noun', 'a'],
['trebbiare', 'verb', 'c'],
['trecento', 'adjective', 'b'],
['trecento', 'noun', 'b'],
['tredici', 'adjective', 'b'],
['tredici', 'noun', 'b'],
['tremare', 'verb', 'b'],
['tremendo', 'adjective', 'b'],
['trend', 'noun', 'b'],
['treno', 'noun', 'a'],
['trenta', 'adjective', 'a'],
['trenta', 'noun', 'a'],
['trentino', 'adjective', 'c'],
['trentino', 'noun', 'c'],
['triangolo', 'noun', 'b'],
['tribù', 'noun', 'c'],
['tribunale', 'noun', 'a'],
['triestino', 'adjective', 'c'],
['triestino', 'noun', 'c'],
['trifoglio', 'noun', 'c'],
['trina', 'noun', 'c'],
['trincea', 'noun', 'c'],
['trionfo', 'noun', 'b'],
['triste', 'adjective', 'a'],
['tristezza', 'noun', 'b'],
['tritare', 'verb', 'c'],
['trofeo', 'noun', 'c'],
['tronco', 'noun', 'b'],
['trono', 'noun', 'b'],
['troppo', 'adjective', 'a'],
['troppo', 'pronoun', 'a'],
['troppo', 'adverb', 'a'],
['troppo', 'noun', 'a'],
['trota', 'noun', 'c'],
['trottare', 'verb', 'c'],
['trottola', 'noun', 'c'],
['trovare', 'verb', 'a'],
['truccare', 'verb', 'c'],
['trucco', 'noun', 'b'],
['trucco', 'noun', 'b'],
['truffa', 'noun', 'b'],
['truffare', 'verb', 'c'],
['truppa', 'noun', 'b'],
['t-shirt', 'noun', 'c'],
['tu', 'pronoun', 'a'],
['tubo', 'noun', 'b'],
['tuffare', 'verb', 'b'],
['tuffo', 'noun', 'c'],
['tulipano', 'noun', 'c'],
['tumore', 'noun', 'b'],
['tunica', 'noun', 'c'],
['tunisino', 'adjective', 'c'],
['tunisino', 'noun', 'c'],
['tunnel', 'noun', 'c'],
['tuo', 'adjective', 'a'],
['tuo', 'pronoun', 'a'],
['tuono', 'noun', 'c'],
['turbare', 'verb', 'b'],
['turco', 'adjective', 'b'],
['turco', 'noun', 'b'],
['turismo', 'noun', 'b'],
['turista', 'noun', 'b'],
['turistico', 'adjective', 'b'],
['turno', 'noun', 'a'],
['tuta', 'noun', 'b'],
['tutela', 'noun', 'b'],
['tutelare', 'verb', 'b'],
['tutore', 'noun', 'c'],
['tuttavia', 'conjunction', 'a'],
['tuttavia', 'adverb', 'a'],
['tutto', 'adjective', 'a'],
['tutto', 'pronoun', 'a'],
['tuttora', 'adverb', 'b'],
['u', 'noun', 'c'],
['ubriaco', 'adjective', 'b'],
['ubriaco', 'noun', 'b'],
['uccello', 'noun', 'a'],
['uccidere', 'verb', 'a'],
['ucraino', 'adjective', 'c'],
['ucraino', 'noun', 'c'],
['udienza', 'noun', 'b'],
['udinese', 'adjective', 'c'],
['udinese', 'noun', 'c'],
['udire', 'verb', 'b'],
['udire', 'noun', 'b'],
['ufficiale', 'noun', 'b'],
['ufficiale', 'adjective', 'a'],
['ufficialmente', 'adverb', 'b'],
['ufficio', 'noun', 'a'],
['uguale', 'adjective', 'a'],
['uguale', 'adverb', 'a'],
['uguale', 'noun', 'a'],
['ugualmente', 'adverb', 'b'],
['ulcera', 'noun', 'c'],
['ulteriore', 'adjective', 'a'],
['ulteriormente', 'adverb', 'b'],
['ultimamente', 'adverb', 'b'],
['ultimo', 'adjective', 'a'],
['ultimo', 'noun', 'a'],
['ultravioletto', 'noun', 'c'],
['ultravioletto', 'adjective', 'c'],
['umanità', 'noun', 'a'],
['umano', 'adjective', 'a'],
['umano', 'noun', 'a'],
['umbro', 'adjective', 'c'],
['umbro', 'noun', 'c'],
['umido', 'adjective', 'b'],
['umido', 'noun', 'b'],
['umile', 'adjective', 'b'],
['umile', 'noun', 'b'],
['umiliare', 'verb', 'b'],
['umore', 'noun', 'b'],
['umorismo', 'noun', 'c'],
['una', 'determiner', 'a'],
['una', 'pronoun', 'a'],
['undici', 'adjective', 'b'],
['undici', 'noun', 'b'],
['ungherese', 'adjective', 'c'],
['ungherese', 'noun', 'c'],
['unghia', 'noun', 'b'],
['unguento', 'noun', 'c'],
['unico', 'adjective', 'a'],
['unico', 'noun', 'a'],
['uniforme', 'adjective', 'b'],
['unione', 'noun', 'b'],
['unire', 'verb', 'a'],
['unità', 'noun', 'a'],
['unito', 'past_part', 'a'],
['unito', 'adjective', 'a'],
['unito', 'noun', 'a'],
['universale', 'adjective', 'b'],
['universale', 'noun', 'b'],
['università', 'noun', 'a'],
['universitario', 'adjective', 'b'],
['universitario', 'noun', 'b'],
['universo', 'noun', 'a'],
['uno', 'adjective', 'a'],
['uno', 'noun', 'a'],
['uno', 'determiner', 'a'],
['uno', 'pronoun', 'a'],
['uomo', 'noun', 'a'],
['uovo', 'noun', 'a'],
['uragano', 'noun', 'c'],
['urbanistico', 'adjective', 'b'],
['urbano', 'adjective', 'b'],
['urgente', 'adjective', 'b'],
['urgenza', 'noun', 'b'],
['urlare', 'verb', 'a'],
['urlo', 'noun', 'b'],
['urna', 'noun', 'c'],
['urtare', 'verb', 'b'],
['usare', 'verb', 'a'],
['usato', 'past_part', 'b'],
['usato', 'adjective', 'b'],
['usato', 'noun', 'b'],
['uscire', 'verb', 'a'],
['uscita', 'noun', 'a'],
['usignolo', 'noun', 'c'],
['uso', 'noun', 'a'],
['utensile', 'noun', 'c'],
['utente', 'noun', 'a'],
['utenza', 'noun', 'b'],
['utile', 'adjective', 'a'],
['utile', 'noun', 'a'],
['utilità', 'noun', 'b'],
['utilizzare', 'verb', 'a'],
['utilizzo', 'noun', 'b'],
['vabbè', 'exclamation', 'b'],
['vacanza', 'noun', 'a'],
['vacca', 'noun', 'b'],
['vaccino', 'noun', 'c'],
['vaffanculo', 'exclamation', 'b'],
['vagare', 'verb', 'b'],
['vagire', 'verb', 'c'],
['vago', 'adjective', 'b'],
['vago', 'noun', 'b'],
['valanga', 'noun', 'c'],
['valdostano', 'adjective', 'c'],
['valdostano', 'noun', 'c'],
['valere', 'verb', 'a'],
['valido', 'adjective', 'b'],
['valigia', 'noun', 'b'],
['valle', 'noun', 'b'],
['valore', 'noun', 'a'],
['valorizzare', 'verb', 'b'],
['valoroso', 'adjective', 'c'],
['valoroso', 'noun', 'c'],
['valutare', 'verb', 'a'],
['valutazione', 'noun', 'b'],
['valvola', 'noun', 'c'],
['vampata', 'noun', 'c'],
['vampiro', 'noun', 'b'],
['vandalo', 'adjective', 'c'],
['vandalo', 'noun', 'c'],
['vanga', 'noun', 'c'],
['vangelo', 'noun', 'b'],
['vanitoso', 'adjective', 'c'],
['vanitoso', 'noun', 'c'],
['vano', 'adjective', 'b'],
['vano', 'noun', 'b'],
['vantaggio', 'noun', 'a'],
['vantaggioso', 'adjective', 'c'],
['vantare', 'verb', 'b'],
['vanto', 'noun', 'c'],
['vapore', 'noun', 'b'],
['variabile', 'adjective', 'b'],
['variabile', 'noun', 'b'],
['variante', 'pres_part', 'b'],
['variante', 'adjective', 'b'],
['variante', 'noun', 'b'],
['variare', 'verb', 'b'],
['variazione', 'noun', 'b'],
['varietà', 'noun', 'b'],
['vario', 'adjective', 'a'],
['vario', 'adjective', 'a'],
['vario', 'pronoun', 'a'],
['variopinto', 'adjective', 'c'],
['vasca', 'noun', 'b'],
['vaso', 'noun', 'b'],
['vasto', 'adjective', 'b'],
['vasto', 'noun', 'b'],
['ve', 'pronoun', 'a'],
['ve', 'adverb', 'a'],
['vecchio', 'adjective', 'a'],
['vecchio', 'noun', 'a'],
['vedere', 'verb', 'a'],
['vedere', 'noun', 'a'],
['vedova', 'noun', 'b'],
['vegetale', 'adjective', 'b'],
['vegetale', 'noun', 'b'],
['veglia', 'noun', 'c'],
['veglione', 'noun', 'c'],
['veicolo', 'noun', 'b'],
['vela', 'noun', 'b'],
['veleno', 'noun', 'b'],
['velenoso', 'adjective', 'c'],
['vellutato', 'past_part', 'c'],
['vellutato', 'adjective', 'c'],
['velluto', 'noun', 'c'],
['velo', 'noun', 'b'],
['veloce', 'adjective', 'a'],
['veloce', 'adverb', 'a'],
['veloce', 'noun', 'a'],
['velocemente', 'adverb', 'b'],
['velocità', 'noun', 'a'],
['vena', 'noun', 'b'],
['vendemmiare', 'verb', 'c'],
['vendere', 'verb', 'a'],
['vendetta', 'noun', 'b'],
['vendicare', 'verb', 'b'],
['vendita', 'noun', 'a'],
['venditore', 'adjective', 'b'],
['venditore', 'noun', 'b'],
['venerdì', 'noun', 'a'],
['veneto', 'adjective', 'b'],
['veneto', 'noun', 'b'],
['veneziano', 'adjective', 'c'],
['veneziano', 'noun', 'c'],
['venire', 'verb', 'a'],
['ventaglio', 'noun', 'c'],
['ventata', 'noun', 'c'],
['venti', 'adjective', 'a'],
['venti', 'noun', 'a'],
['venticinque', 'adjective', 'b'],
['venticinque', 'noun', 'b'],
['ventilatore', 'adjective', 'c'],
['ventilatore', 'noun', 'c'],
['ventina', 'noun', 'b'],
['ventiquattro', 'adjective', 'b'],
['ventiquattro', 'noun', 'b'],
['vento', 'noun', 'a'],
['ventre', 'noun', 'b'],
['venuta', 'noun', 'c'],
['veramente', 'adverb', 'a'],
['verbale', 'adjective', 'a'],
['verbale', 'noun', 'a'],
['verbo', 'noun', 'b'],
['verde', 'adjective', 'a'],
['verde', 'noun', 'a'],
['verdura', 'noun', 'b'],
['vergine', 'adjective', 'b'],
['vergine', 'noun', 'b'],
['vergogna', 'noun', 'b'],
['vergognarsi', 'verb', 'b'],
['verifica', 'noun', 'b'],
['verificare', 'verb', 'a'],
['verità', 'noun', 'a'],
['verme', 'noun', 'b'],
['vernice', 'noun', 'b'],
['vero', 'adjective', 'a'],
['vero', 'noun', 'a'],
['versare', 'verb', 'a'],
['versione', 'noun', 'a'],
['verso', 'noun', 'a'],
['verso', 'preposition', 'a'],
['vertebra', 'noun', 'c'],
['verticale', 'adjective', 'b'],
['verticale', 'noun', 'b'],
['vertice', 'noun', 'b'],
['vertigine', 'noun', 'c'],
['vescovo', 'noun', 'b'],
['vescovo', 'adjective', 'b'],
['vespa', 'noun', 'c'],
['veste', 'noun', 'b'],
['vestire', 'verb', 'a'],
['vestito', 'noun', 'a'],
['vestito', 'past_part', 'b'],
['vestito', 'adjective', 'b'],
['veterinario', 'adjective', 'c'],
['veterinario', 'noun', 'c'],
['vetrina', 'noun', 'b'],
['vetro', 'noun', 'a'],
['vettura', 'noun', 'b'],
['vi', 'pronoun', 'a'],
['vi', 'adverb', 'a'],
['via', 'noun', 'a'],
['via', 'adverb', 'a'],
['via', 'exclamation', 'a'],
['via', 'noun', 'a'],
['viaggiare', 'verb', 'a'],
['viaggiatore', 'noun', 'b'],
['viaggiatrice', 'noun', 'c'],
['viaggio', 'noun', 'a'],
['viale', 'noun', 'b'],
['vibrare', 'verb', 'b'],
['vice', 'noun', 'b'],
['vicenda', 'noun', 'a'],
['viceversa', 'adverb', 'b'],
['vicinanza', 'noun', 'b'],
['vicino', 'adjective', 'a'],
['vicino', 'noun', 'a'],
['vicino', 'adverb', 'a'],
['vicolo', 'noun', 'b'],
['video', 'adjective', 'a'],
['video', 'noun', 'a'],
['videogioco', 'noun', 'b'],
['viennese', 'adjective', 'c'],
['viennese', 'noun', 'c'],
['vietare', 'verb', 'b'],
['vigile', 'adjective', 'b'],
['vigile', 'noun', 'b'],
['vigilia', 'noun', 'b'],
['vigna', 'noun', 'c'],
['vigore', 'noun', 'b'],
['villa', 'noun', 'a'],
['villaggio', 'noun', 'a'],
['vincente', 'pres_part', 'b'],
['vincente', 'adjective', 'b'],
['vincente', 'noun', 'b'],
['vincere', 'verb', 'a'],
['vincitore', 'adjective', 'b'],
['vincitore', 'noun', 'b'],
['vincolo', 'noun', 'b'],
['vino', 'noun', 'a'],
['vino', 'adjective', 'a'],
['viola', 'noun', 'b'],
['viola', 'adjective', 'b'],
['violare', 'verb', 'b'],
['violazione', 'noun', 'b'],
['violentare', 'verb', 'c'],
['violento', 'adjective', 'a'],
['violento', 'noun', 'a'],
['violenza', 'noun', 'a'],
['violetta', 'noun', 'c'],
['violetto', 'adjective', 'c'],
['violetto', 'noun', 'c'],
['violino', 'noun', 'b'],
['vipera', 'noun', 'c'],
['virgola', 'noun', 'b'],
['virtù', 'noun', 'b'],
['virtuale', 'adjective', 'b'],
['virus', 'noun', 'b'],
['visibile', 'adjective', 'b'],
['visibile', 'noun', 'b'],
['visione', 'noun', 'a'],
['visita', 'noun', 'a'],
['visitare', 'verb', 'a'],
['visitatore', 'noun', 'b'],
['visivo', 'adjective', 'b'],
['viso', 'noun', 'a'],
['vissuto', 'past_part', 'b'],
['vissuto', 'adjective', 'b'],
['vissuto', 'noun', 'b'],
['vista', 'noun', 'a'],
['vita', 'noun', 'a'],
['vitale', 'adjective', 'b'],
['vitale', 'noun', 'b'],
['vitamina', 'noun', 'c'],
['vite', 'noun', 'c'],
['vitello', 'noun', 'c'],
['vittima', 'noun', 'a'],
['vittoria', 'noun', 'a'],
['vivace', 'adjective', 'b'],
['vivace', 'adverb', 'b'],
['vivace', 'noun', 'b'],
['vivente', 'pres_part', 'b'],
['vivente', 'adjective', 'b'],
['vivente', 'noun', 'b'],
['vivere', 'verb', 'a'],
['vivere', 'noun', 'a'],
['vivo', 'adjective', 'a'],
['vivo', 'noun', 'a'],
['viziare', 'verb', 'c'],
['viziato', 'past_part', 'c'],
['viziato', 'adjective', 'c'],
['vizio', 'noun', 'b'],
['vocabolario', 'noun', 'b'],
['vocale', 'noun', 'b'],
['vocale', 'adjective', 'b'],
['vocazione', 'noun', 'b'],
['voce', 'noun', 'a'],
['vodka', 'noun', 'c'],
['voglia', 'noun', 'a'],
['voi', 'pronoun', 'a'],
['volantino', 'noun', 'c'],
['volare', 'verb', 'a'],
['volata', 'noun', 'c'],
['volenteroso', 'adjective', 'c'],
['volentieri', 'adverb', 'b'],
['volere', 'verb', 'a'],
['volgare', 'adjective', 'b'],
['volgare', 'noun', 'b'],
['volgere', 'verb', 'b'],
['volo', 'noun', 'a'],
['volontà', 'noun', 'a'],
['volontariato', 'noun', 'b'],
['volontario', 'adjective', 'b'],
['volontario', 'noun', 'b'],
['volta', 'noun', 'a'],
['voltare', 'verb', 'a'],
['volto', 'noun', 'a'],
['volume', 'noun', 'a'],
['vomitare', 'verb', 'b'],
['vomito', 'noun', 'c'],
['vongola', 'noun', 'c'],
['vostro', 'adjective', 'a'],
['vostro', 'pronoun', 'a'],
['votare', 'verb', 'a'],
['votazione', 'noun', 'c'],
['voto', 'noun', 'a'],
['vu', 'noun', 'c'],
['vuotare', 'verb', 'c'],
['vuoto', 'adjective', 'a'],
['vuoto', 'noun', 'a'],
['wafer', 'noun', 'c'],
['web', 'noun', 'a'],
['weekend', 'noun', 'b'],
['whisky', 'noun', 'c'],
['wurstel', 'noun', 'c'],
['yogurt', 'noun', 'c'],
['zaino', 'noun', 'b'],
['zampa', 'noun', 'b'],
['zampogna', 'noun', 'c'],
['zanna', 'noun', 'c'],
['zanzara', 'noun', 'c'],
['zattera', 'noun', 'c'],
['zebra', 'noun', 'c'],
['zero', 'adjective', 'a'],
['zero', 'noun', 'a'],
['zero', 'symbol', 'a'],
['zeta', 'noun', 'c'],
['zia', 'noun', 'a'],
['zingaro', 'adjective', 'c'],
['zingaro', 'noun', 'c'],
['zio', 'noun', 'a'],
['zitella', 'noun', 'c'],
['zitto', 'adjective', 'a'],
['zitto', 'noun', 'a'],
['zoccolo', 'noun', 'c'],
['zolla', 'noun', 'c'],
['zona', 'noun', 'a'],
['zoo', 'noun', 'c'],
['zoppicare', 'verb', 'c'],
['zoppo', 'adjective', 'c'],
['zoppo', 'noun', 'c'],
['zucca', 'noun', 'b'],
['zucchero', 'noun', 'b'],
['zucchina', 'noun', 'c'],
['zuffa', 'noun', 'c'],
['zuppa', 'noun', 'c'],
] | 1.828125 | 2 |
pytorch/torch/_utils_internal.py | raghavnauhria/whatmt | 15 | 13316 | <filename>pytorch/torch/_utils_internal.py<gh_stars>10-100
from __future__ import absolute_import, division, print_function, unicode_literals
import os
# this arbitrary-looking assortment of functionality is provided here
# to have a central place for overrideable behavior. The motivating
# use is the FB build environment, where this source file is replaced
# by an equivalent.
if os.path.basename(os.path.dirname(__file__)) == 'shared':
torch_parent = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
else:
torch_parent = os.path.dirname(os.path.dirname(__file__))
def get_file_path(*path_components):
return os.path.join(torch_parent, *path_components)
def get_file_path_2(*path_components):
return os.path.join(*path_components)
def get_writable_path(path):
return path
def prepare_multiprocessing_environment(path):
pass
def resolve_library_path(path):
return os.path.realpath(path)
TEST_MASTER_ADDR = '127.0.0.1'
TEST_MASTER_PORT = 29500
| 2.109375 | 2 |
libs/models.py | aquastripe/DenseCLIP | 7 | 13317 | import json
from collections import OrderedDict
from typing import Union, List
import clip
import torch
import torch.nn as nn
import torch.nn.functional as F
from libs.definitions import ROOT
label_file = ROOT / 'imagenet_class_index.json'
with open(label_file, 'r') as f:
labels = json.load(f)
_DEFAULT_CLASSNAMES = [value[1] for value in labels.values()]
# templates are copied from https://github.com/openai/CLIP/blob/main/notebooks/Prompt_Engineering_for_ImageNet.ipynb
_DEFAULT_TEMPLATES = [
'a bad photo of a {}.',
'a photo of many {}.',
'a sculpture of a {}.',
'a photo of the hard to see {}.',
'a low resolution photo of the {}.',
'a rendering of a {}.',
'graffiti of a {}.',
'a bad photo of the {}.',
'a cropped photo of the {}.',
'a tattoo of a {}.',
'the embroidered {}.',
'a photo of a hard to see {}.',
'a bright photo of a {}.',
'a photo of a clean {}.',
'a photo of a dirty {}.',
'a dark photo of the {}.',
'a drawing of a {}.',
'a photo of my {}.',
'the plastic {}.',
'a photo of the cool {}.',
'a close-up photo of a {}.',
'a black and white photo of the {}.',
'a painting of the {}.',
'a painting of a {}.',
'a pixelated photo of the {}.',
'a sculpture of the {}.',
'a bright photo of the {}.',
'a cropped photo of a {}.',
'a plastic {}.',
'a photo of the dirty {}.',
'a jpeg corrupted photo of a {}.',
'a blurry photo of the {}.',
'a photo of the {}.',
'a good photo of the {}.',
'a rendering of the {}.',
'a {} in a video game.',
'a photo of one {}.',
'a doodle of a {}.',
'a close-up photo of the {}.',
'a photo of a {}.',
'the origami {}.',
'the {} in a video game.',
'a sketch of a {}.',
'a doodle of the {}.',
'a origami {}.',
'a low resolution photo of a {}.',
'the toy {}.',
'a rendition of the {}.',
'a photo of the clean {}.',
'a photo of a large {}.',
'a rendition of a {}.',
'a photo of a nice {}.',
'a photo of a weird {}.',
'a blurry photo of a {}.',
'a cartoon {}.',
'art of a {}.',
'a sketch of the {}.',
'a embroidered {}.',
'a pixelated photo of a {}.',
'itap of the {}.',
'a jpeg corrupted photo of the {}.',
'a good photo of a {}.',
'a plushie {}.',
'a photo of the nice {}.',
'a photo of the small {}.',
'a photo of the weird {}.',
'the cartoon {}.',
'art of the {}.',
'a drawing of the {}.',
'a photo of the large {}.',
'a black and white photo of a {}.',
'the plushie {}.',
'a dark photo of a {}.',
'itap of a {}.',
'graffiti of the {}.',
'a toy {}.',
'itap of my {}.',
'a photo of a cool {}.',
'a photo of a small {}.',
'a tattoo of the {}.',
]
class DenseClip(nn.Module):
_AVAILABLE_MODELS = ['RN50', 'RN50x16'] # refer to Table 3. in the paper
def __init__(self,
name: str,
classnames: List[str] = None,
templates: List[str] = None,
device: Union[str, torch.device] = 'cuda' if torch.cuda.is_available() else 'cpu',
jit: bool = False, download_root: str = None):
super(DenseClip, self).__init__()
self.clip_model, self.preprocess = clip.load(name, device, jit, download_root)
if classnames is None:
classnames = _DEFAULT_CLASSNAMES
if templates is None:
templates = _DEFAULT_TEMPLATES
self._init_visual(device)
self._init_zeroshot_classifier(classnames, templates, device)
def _init_visual(self, device):
self.visual = self.clip_model.visual
self.conv1 = nn.Conv2d(self.visual.attnpool.v_proj.in_features,
self.visual.attnpool.v_proj.out_features,
kernel_size=(1, 1)).to(device).to(self.dtype)
self.conv2 = nn.Conv2d(self.visual.attnpool.c_proj.in_features,
self.visual.attnpool.c_proj.out_features,
kernel_size=(1, 1)).to(device).to(self.dtype)
conv1_weight_shape = (*self.visual.attnpool.v_proj.weight.shape, 1, 1)
conv2_weight_shape = (*self.visual.attnpool.c_proj.weight.shape, 1, 1)
self.conv1.load_state_dict(
OrderedDict(weight=self.visual.attnpool.v_proj.weight.reshape(conv1_weight_shape),
bias=self.visual.attnpool.v_proj.bias))
self.conv2.load_state_dict(
OrderedDict(weight=self.visual.attnpool.c_proj.weight.reshape(conv2_weight_shape),
bias=self.visual.attnpool.c_proj.bias))
@torch.no_grad()
def _init_zeroshot_classifier(self, classnames, templates, device):
# refer to: https://github.com/openai/CLIP/blob/main/notebooks/Prompt_Engineering_for_ImageNet.ipynb
zeroshot_weights = []
for classname in classnames:
texts = [template.format(classname) for template in templates] # format with class
texts = clip.tokenize(texts).to(device) # tokenize
class_embeddings = self.clip_model.encode_text(texts) # embed with text encoder
class_embeddings /= class_embeddings.norm(dim=-1, keepdim=True)
class_embedding = class_embeddings.mean(dim=0)
class_embedding /= class_embedding.norm()
zeroshot_weights.append(class_embedding)
# shape: [E, C]
# where E is the dimension of an embedding and C is the number of classes.
self.zeroshot_weights = torch.stack(zeroshot_weights, dim=1).to(device)
@property
def dtype(self):
return self.visual.conv1.weight.dtype
def _stem(self, x):
for conv, bn in [(self.visual.conv1, self.visual.bn1),
(self.visual.conv2, self.visual.bn2),
(self.visual.conv3, self.visual.bn3)]:
x = self.visual.relu(bn(conv(x)))
x = self.visual.avgpool(x)
return x
def encode_image(self, image):
image = image.type(self.dtype)
feature = self._stem(image)
feature = self.visual.layer1(feature)
feature = self.visual.layer2(feature)
feature = self.visual.layer3(feature)
feature = self.visual.layer4(feature)
# removed attnpool
feature = self.conv1(feature)
feature = self.conv2(feature)
return feature
def forward(self, images):
# [B, E, h, w]
features = self.encode_image(images)
# [B, w, h, E]
features_t = features.transpose(1, 3)
# [B, w, h, C]
output_t = features_t @ self.zeroshot_weights
# [B, C, h, w]
output = output_t.transpose(1, 3)
output = F.interpolate(output, size=images.shape[-2:], mode='bilinear')
return output
@staticmethod
def available_models():
return DenseClip._AVAILABLE_MODELS
class Clip(nn.Module):
_AVAILABLE_MODELS = ['RN50', 'RN50x16'] # refer to Table 3. in the paper
def __init__(self,
name: str,
classnames: List[str] = None,
templates: List[str] = None,
device: Union[str, torch.device] = 'cuda' if torch.cuda.is_available() else 'cpu',
jit: bool = False, download_root: str = None):
super(Clip, self).__init__()
self.clip_model, self.preprocess = clip.load(name, device, jit, download_root)
if classnames is None:
classnames = _DEFAULT_CLASSNAMES
if templates is None:
templates = _DEFAULT_TEMPLATES
self._init_zeroshot_classifier(classnames, templates, device)
@torch.no_grad()
def _init_zeroshot_classifier(self, classnames, templates, device):
# refer to: https://github.com/openai/CLIP/blob/main/notebooks/Prompt_Engineering_for_ImageNet.ipynb
zeroshot_weights = []
for classname in classnames:
texts = [template.format(classname) for template in templates] # format with class
texts = clip.tokenize(texts).to(device) # tokenize
class_embeddings = self.clip_model.encode_text(texts) # embed with text encoder
class_embeddings /= class_embeddings.norm(dim=-1, keepdim=True)
class_embedding = class_embeddings.mean(dim=0)
class_embedding /= class_embedding.norm()
zeroshot_weights.append(class_embedding)
# shape: [E, C]
# where E is the dimension of an embedding and C is the number of classes.
self.zeroshot_weights = torch.stack(zeroshot_weights, dim=1).to(device)
def encode_image(self, image):
feature = self.clip_model.encode_image(image)
feature /= feature.norm(dim=-1, keepdim=True)
return feature
def forward(self, images):
features = self.encode_image(images)
output = features @ self.zeroshot_weights
return F.softmax(output, dim=-1)
@staticmethod
def available_models():
return Clip._AVAILABLE_MODELS
| 2.40625 | 2 |
src/basset_sick_loss.py | shtoneyan/Basset | 248 | 13318 | <filename>src/basset_sick_loss.py
#!/usr/bin/env python
from __future__ import print_function
from optparse import OptionParser
import os
import random
import subprocess
import matplotlib
matplotlib.use('Agg')
import numpy as np
import matplotlib.pyplot as plt
import pysam
from scipy.stats import binom
from scipy.stats.mstats import mquantiles
import seaborn as sns
import stats
################################################################################
# basset_sick_loss.py
#
# Shuffle SNPs that overlap DNase sites within their sites and compare the SAD
# distributions.
#
# Todo:
# -Control for GC% changes introduced by mutation shuffles.
# -Control for positional changes within the DHS regions.
# -Properly handle indels.
################################################################################
################################################################################
# main
################################################################################
def main():
usage = 'usage: %prog [options] <vcf_file> <sample_beds_file> <model_file>'
parser = OptionParser(usage)
parser.add_option('-f', dest='genome_fasta', default='%s/assembly/hg19.fa'%os.environ['HG19'], help='Genome FASTA [Default: %default]')
parser.add_option('-g', dest='gpu', default=False, action='store_true', help='Run on GPU [Default: %default]')
parser.add_option('-l', dest='seq_len', type='int', default=600, help='Sequence length provided to the model [Default: %default]')
parser.add_option('-o', dest='out_dir', default='sad_shuffle', help='Output directory')
parser.add_option('-r', dest='replot', default=False, action='store_true', help='Re-plot only, without re-computing [Default: %default]')
parser.add_option('-s', dest='num_shuffles', default=1, type='int', help='Number of SNP shuffles [Default: %default]')
parser.add_option('-t', dest='sad_table_file', help='Pre-computed SAD scores for the SNPs')
(options,args) = parser.parse_args()
if len(args) != 3:
parser.error('Must provide VCF file, sample BEDs file, and model file')
else:
vcf_file = args[0]
sample_beds_file = args[1]
model_file = args[2]
if not os.path.isdir(options.out_dir):
os.mkdir(options.out_dir)
# open reference genome
genome = pysam.Fastafile(options.genome_fasta)
# open binomial stats file
binom_out = open('%s/binom.txt' % options.out_dir, 'w')
# open mann-whitney stats file
mw_out = open('%s/mannwhitney.txt' % options.out_dir, 'w')
# plot defaults
sns.set(font_scale=1.5, style='ticks')
si = 0
for line in open(sample_beds_file):
sample, bed_file = line.split()
print(sample)
#########################################
# compute SAD
#########################################
# filter VCF to overlapping SNPs
print(" intersecting SNPs")
sample_vcf_file = '%s/%s.vcf' % (options.out_dir,sample)
if not options.replot:
filter_vcf(vcf_file, bed_file, sample_vcf_file)
# compute SAD scores for this sample's SNPs
print(" computing SAD")
if options.sad_table_file:
true_sad = retrieve_sad(sample_vcf_file, options.sad_table_file, si)
else:
true_sad = compute_sad(sample_vcf_file, model_file, si, '%s/%s_sad'%(options.out_dir,sample), options.seq_len, options.gpu, options.replot)
#########################################
# compute shuffled SAD
#########################################
shuffle_sad = np.zeros((true_sad.shape[0],options.num_shuffles))
for ni in range(options.num_shuffles):
# shuffle the SNPs within their overlapping DHS
print(" shuffle %d" % ni)
sample_vcf_shuf_file = '%s/%s_shuf%d.vcf' % (options.out_dir, sample, ni)
shuffle_snps(sample_vcf_file, sample_vcf_shuf_file, genome)
# compute SAD scores for shuffled SNPs
print(" computing shuffle SAD")
shuffle_sad[:,ni] = compute_sad(sample_vcf_shuf_file, model_file, si, '%s/%s_shuf%d_sad'%(options.out_dir,sample,ni), options.seq_len, options.gpu, options.replot)
#########################################
# simple stats
#########################################
# compute shuffle means
shuffle_sad_mean = shuffle_sad.mean(axis=1)
# print sample table
sample_sad_out = open('%s/%s_table.txt' % (options.out_dir,sample), 'w')
for vi in range(len(true_sad)):
print('%f\t%f' % (true_sad[vi], shuffle_sad_mean[vi]), file=sample_sad_out)
sample_sad_out.close()
# scatter plot
# plt.figure()
# plt.scatter(true_sad, shuffle_sad_mean, color='black', alpha=0.7)
# plt.gca().grid(True, linestyle=':')
# plt.savefig('%s/%s_scatter.pdf' % (options.out_dir,sample))
# plt.close()
# plot CDFs
sns_colors = sns.color_palette('deep')
plt.figure()
plt.hist(true_sad, 1000, normed=1, histtype='step', cumulative=True, color=sns_colors[0], linewidth=1, label='SNPs')
plt.hist(shuffle_sad.flatten(), 1000, normed=1, histtype='step', cumulative=True, color=sns_colors[2], linewidth=1, label='Shuffle')
ax = plt.gca()
ax.grid(True, linestyle=':')
ax.set_xlim(-.2, .2)
plt.legend()
plt.savefig('%s/%s_cdf.pdf' % (options.out_dir,sample))
plt.close()
# plot Q-Q
true_q = mquantiles(true_sad, np.linspace(0,1,min(10000,true_sad.shape[0])))
shuf_q = mquantiles(shuffle_sad_mean, np.linspace(0,1,min(10000,true_sad.shape[0])))
plt.figure()
plt.scatter(true_q, shuf_q, color=sns_colors[0])
pmin = 1.05*min(true_q[0], shuf_q[0])
pmax = 1.05*max(true_q[-1], shuf_q[-1])
plt.plot([pmin,pmax], [pmin,pmax], color='black', linewidth=1)
ax = plt.gca()
ax.set_xlim(pmin,pmax)
ax.set_ylim(pmin,pmax)
ax.set_xlabel('True SAD')
ax.set_ylabel('Shuffled SAD')
ax.grid(True, linestyle=':')
plt.savefig('%s/%s_qq.pdf' % (options.out_dir,sample))
plt.close()
#########################################
# statistical tests
#########################################
# compute matched binomial test
true_great = sum((true_sad-shuffle_sad_mean) > 0)
true_lo = np.log2(true_great) - np.log2(len(true_sad)-true_great)
if true_lo > 0:
binom_p = 1.0 - binom.cdf(true_great-1, n=len(true_sad), p=0.5)
else:
binom_p = binom.cdf(true_great, n=len(true_sad), p=0.5)
# print significance stats
cols = (sample, len(true_sad), true_great, true_lo, binom_p)
print('%-20s %5d %5d %6.2f %6.1e' % cols, file=binom_out)
# compute Mann-Whitney
mw_z, mw_p = stats.mannwhitneyu(true_sad, shuffle_sad.flatten())
cols = (sample, len(true_sad), true_sad.mean(), shuffle_sad.mean(), mw_z, mw_p)
print('%-20s %5d %6.3f %6.3f %6.2f %6.1e' % cols, file=mw_out)
# update sample index
si += 1
binom_out.close()
mw_out.close()
genome.close()
def compute_sad(sample_vcf_file, model_file, si, out_dir, seq_len, gpu, replot):
''' Run basset_sad.py to compute scores. '''
cuda_str = ''
if gpu:
cuda_str = '--cudnn'
cmd = 'basset_sad.py %s -l %d -o %s %s %s' % (cuda_str, seq_len, out_dir, model_file, sample_vcf_file)
if not replot:
subprocess.call(cmd, shell=True)
sad = []
for line in open('%s/sad_table.txt' % out_dir):
a = line.split()
if a[3] == 't%d'%si:
sad.append(float(a[-1]))
return np.array(sad)
def filter_vcf(vcf_file, bed_file, sample_vcf_file):
''' Filter the VCF file for SNPs that overlap
the BED file, removing indels. '''
# open filtered file
sample_vcf_out = open(sample_vcf_file, 'w')
# intersect
p = subprocess.Popen('bedtools intersect -wo -a %s -b %s' % (vcf_file, bed_file), stdout=subprocess.PIPE, shell=True)
for line in p.stdout:
a = line.split()
if len(a[3]) == len(a[4]) == 1:
print(line, file=sample_vcf_out, end='')
sample_vcf_out.close()
def retrieve_sad(sample_vcf_file, sad_table_file, si):
''' Retrieve SAD scores from a pre-computed table.
Note that I'm assuming here the table has all
SAD scores in one row for each SNP so I can
pull out the score I want as column si+1.
'''
snp_indexes = {}
vi = 0
for line in open(sample_vcf_file):
a = line.split()
snp_indexes[a[2]] = vi
vi += 1
sad = np.zeros(len(snp_indexes))
for line in open(sad_table_file):
a = line.split()
print(a)
if a[0] in snp_indexes:
sad[snp_indexes[a[0]]] = float(a[si+1])
return sad
def shuffle_snps(in_vcf_file, out_vcf_file, genome):
''' Shuffle the SNPs within their overlapping DHS. '''
out_vcf_open = open(out_vcf_file, 'w')
for line in open(in_vcf_file):
a = line.split()
# read SNP info
snp_chrom = a[0]
snp_pos = int(a[1])
snp_nt = a[3]
# determine BED start
bi = 5
while a[bi] != snp_chrom:
bi += 1
# read BED info
bed_chrom = a[bi]
bed_start = int(a[bi+1])
bed_end = int(a[bi+2])
# get sequence
bed_seq = genome.fetch(bed_chrom, bed_start, bed_end)
# determine matching positions
bed_nt_matches = [i for i in range(len(bed_seq)) if bed_seq[i] == snp_nt]
while len(bed_nt_matches) == 0:
# expand segment by 10 nt
bed_start = max(0, bed_start-10)
bed_end += 10
bed_seq = genome.fetch(bed_chrom, bed_start, bed_end)
# sample new SNP position
shuf_pos = bed_start + 1 + random.choice(bed_nt_matches)
# write into columns
a[1] = str(shuf_pos)
print('\t'.join(a), file=out_vcf_open)
out_vcf_open.close()
def shuffle_snps_old(in_vcf_file, out_vcf_file, genome):
''' Shuffle the SNPs within their overlapping DHS. '''
out_vcf_open = open(out_vcf_file, 'w')
for line in open(in_vcf_file):
a = line.split()
# read SNP info
snp_chrom = a[0]
snp_pos = int(a[1])
# determine BED start
bi = 5
while a[bi] != snp_chrom:
bi += 1
# read BED info
bed_chrom = a[bi]
bed_start = int(a[bi+1])
bed_end = int(a[bi+2])
# sample new SNP position
shuf_pos = random.randint(bed_start, bed_end-1)
while shuf_pos == snp_pos:
shuf_pos = random.randint(bed_start, bed_end-1)
# set reference allele
ref_nt = genome.fetch(snp_chrom, shuf_pos-1, shuf_pos)
# sample alternate allele
alt_nt = random.choice('ACGT')
while alt_nt == ref_nt:
alt_nt = random.choice('ACGT')
# write into columns
a[1] = str(shuf_pos)
a[3] = ref_nt
a[4] = alt_nt
print('\t'.join(a), file=out_vcf_open)
################################################################################
# __main__
################################################################################
if __name__ == '__main__':
main()
| 2.03125 | 2 |
level2/huge/split_huge_from_tar_strace.py | fishilico/sstic-2016 | 0 | 13319 | <reponame>fishilico/sstic-2016
#!/usr/bin/env python3
import codecs
import re
def trans_addr(addr):
"""Traduit une position de fichier en une adresse du programme"""
if addr < 0x1000:
return 0
if 0x0000000000001000 <= addr < 0x0000000000001000 + 0x00001ef000000000:
return 0x00002b0000000000 + addr - 0x0000000000001000
if 0x00002afffffe1000 <= addr < 0x00002afffffe1000 + 0x0000161000000000:
return 0x000049f000000000 + addr - 0x00002afffffe1000
if 0x000049effffe1000 <= addr < 0x000049effffe1000 + 0x00002afffffe0000:
return 0x0000000000020000 + addr - 0x000049effffe1000
raise Exception("Invalid addr {:#x}".format(addr))
blobs = {}
with open('strace_tar_output.log', 'r') as f:
curseek = 0
for line in f:
m = re.match(r'lseek\(4, ([^,]*), SEEK_SET\)', line)
if m is not None:
curseek = int(m.group(1))
continue
if line.startswith('write(4, "'):
m = re.match(r'write\(4, "(.*)", ([0-9]*)\) = ([0-9]*)', line)
assert m is not None:
rawdata, count1, count2 = m.groups()
assert count1 == count2
addr = curseek
curseek += int(count1)
data = codecs.escape_decode(rawdata.encode('ascii'))[0]
# Trouve le premier octet non-nul dans le bloc de données
i = 0
while i < len(data) and not data[i]:
i += 1
if i >= len(data):
continue
addr = trans_addr(addr + i)
data = data[i:].rstrip(b'\0')
with open('out/blob-{:016x}.bin'.format(addr), 'wb') as f:
f.write(data)
| 2.21875 | 2 |
UI/test/ui_test.py | tunapro1234/ai.destroy | 0 | 13320 | <filename>UI/test/ui_test.py
import pygame_gui
import pygame
class colors:
black = "#000000"
dimgrey = "#696969" # lmao
darkslategray = "#2F4F4F"
TITLE = "TUNAPRO1234"
BACKGROUND = colors.darkslategray
WIDTH, HEIGHT = 1920, 1080
"""
Hızlıca bir plan yapacağım
Neural ağları kontrol edebileceğimiz küçük bir framework
Ağların gelişimini görebileceğiz değiştirebileceğiz ve kaydedebileceğiz
Bunun için
-select box yapısı
-başlatmak için buton
-kaydetme olayları için üstteki şeyden
Pencereler
-tıkladığımız nöronun bilgilerini gösteren ve değiştirebilen bir pencere
-tıkladığımız weightin değişmesini sağlayan bir pencere
Norön, katman ve ağ için pygame wrapperları yazacağım
Weigth için de bir class olur
Kaydetme olayına daha var
"""
# elements: dict: {"buttons": butonlar, "entries", entryler}
class Window:
def __init__(self, screen, buttons={}, entries={}):
self.buttons = buttons
self.entries = entries
self.screen = screen
def main():
pygame.init()
pygame.display.set_caption(TITLE)
window_surface = pygame.display.set_mode((WIDTH, HEIGHT),
pygame.FULLSCREEN)
background = pygame.Surface((WIDTH, HEIGHT))
background.fill(pygame.Color(BACKGROUND))
manager = pygame_gui.UIManager((WIDTH, HEIGHT))
buttons = {}
entries = {}
selects = {}
sliders = {}
windows = {}
# labels = {}
dropdowns = {}
#yapf: disable
entries["Hello"] = pygame_gui.elements.UITextEntryLine(relative_rect=pygame.Rect((400, 500), (200, 50)), manager=manager)
buttons["Hello"] = pygame_gui.elements.UIButton(relative_rect=pygame.Rect((605, 500), (95, 29)), text='ok', manager=manager)
sliders["Hello"] = pygame_gui.elements.UIHorizontalSlider(relative_rect=pygame.Rect((400, 534), (300, 20)), start_value=0, value_range=(-20.0, 20.0), manager=manager)
dropdowns["Hello"] = pygame_gui.elements.UIDropDownMenu(relative_rect=pygame.Rect((500, 100), (100, 20)), options_list=["1", "2", "3", "4"], starting_option="select", manager=manager)
selects["Hello"] = pygame_gui.elements.UISelectionList(relative_rect=pygame.Rect((100, 500), (100, 100)), item_list=["1", "2", "3", "4"], manager=manager)
# links["Hello"] = pygame_gui.elements.UITextBox(relative_rect=pygame.Rect((100, 500), (100, 50)), text="LABEL TUNAPRO", manager=manager)
windows["Hello"] = pygame_gui.elements.UIWindow(rect=pygame.Rect((100, 100), (200, 200)), manager=manager, window_display_title="test", resizable=True)
buttonRect = pygame.Rect(0, 0, 100, 20)
buttonRect.bottomright = (-30, -20)
anchors = {
'left': 'right',
'right': 'right',
'top': 'bottom',
'bottom': 'bottom'
}
pygame_gui.elements.UIButton(relative_rect=buttonRect, text='Hello', manager=manager, container=windows["Hello"], anchors=anchors)
# yapf: enable
# activate: text_box.set_active_effect(pygame_gui.TEXT_EFFECT_TYPING_APPEAR)
# activate: text_box.set_active_effect(pygame_gui.TEXT_EFFECT_FADE_OUT)
# activate: text_box.set_active_effect(pygame_gui.TEXT_EFFECT_FADE_IN)
# deactivate: text_box.set_active_effect(None)
clock = pygame.time.Clock()
isRunning = True
while isRunning:
time_delta = clock.tick(60) / 1000.0
for event in pygame.event.get():
if event.type == pygame.QUIT:
isRunning = False
if event.type == pygame.USEREVENT:
if event.ui_element == buttons["Hello"]:
if event.user_type == pygame_gui.UI_BUTTON_PRESSED:
print('Hello World!')
if event.ui_element == dropdowns["Hello"]:
if event.user_type == pygame_gui.UI_DROP_DOWN_MENU_CHANGED:
print("Selected option:", event.text)
if event.ui_element == entries["Hello"]:
if event.user_type == pygame_gui.UI_TEXT_ENTRY_FINISHED:
print("Entered text:", event.text)
# if event.user_type == pygame_gui.UI_TEXT_ENTRY_CHANGED:
# print("Changed text:", event.text)
if event.ui_element == sliders["Hello"]:
if event.user_type == pygame_gui.UI_HORIZONTAL_SLIDER_MOVED:
print('current slider value:', event.value)
if event.ui_element == selects["Hello"]:
if event.user_type == pygame_gui.UI_SELECTION_LIST_NEW_SELECTION:
print("Selected item:", event.text)
manager.process_events(event)
manager.update(time_delta)
window_surface.blit(background, (0, 0))
manager.draw_ui(window_surface)
pygame.display.update()
if __name__ == "__main__":
main() | 2.75 | 3 |
tests/test_temperature_system.py | SmartSleepIoT/SmartSleepCoding | 0 | 13321 | import time
import pytest
from flask import g
from flask import session
import paho.mqtt.client as paho
from SmartSleep.db import get_db
from flask import json
import runpy
msg_nr = 0
messages = [""]
broker = 'broker.emqx.io'
port = 1883
def update_contor():
global msg_nr
msg_nr += 1
def on_message(client, userdata, message):
received = json.loads(message.payload)
if "status" in received:
assert received['status'] == messages[msg_nr]
update_contor()
elif "db" in received:
assert received["db"] == messages[msg_nr]
update_contor()
def test_cooling_system(client, auth):
global msg_nr
msg_nr = 0
global messages
messages = ['16',
"Setting the temperature system level to 1.0", "New temperature system level set to 1.0",
'16',
"Setting the temperature system level to 2.0", "New temperature system level set to 2.0",
'16',
"Setting the temperature system level to 3.0", "New temperature system level set to 3.0",
'16',
"Setting the temperature system level to 4.0", "New temperature system level set to 4.0",
'19',
"Setting the temperature system level to 3.0", "New temperature system level set to 3.0",
'16',
"Setting the temperature system level to 4.0", "New temperature system level set to 4.0",
"18"
]
time.sleep(2)
client_mqtt = paho.Client("client-test-snoring")
client_mqtt.on_message = on_message
client_mqtt.connect(broker)
client_mqtt.loop_start()
client_mqtt.subscribe("SmartSleep/SoundSensor")
auth.login()
response = client.post(f"/config/start_to_sleep?sleep_now={True}")
assert response.status_code == 200
response = client.post("/config/temp?temperature=18")
assert response.status_code == 200
time.sleep(1.5)
response = client.post("/config/current_temp?sensor=16")
assert response.status_code == 200
time.sleep(1.5)
response = client.post("/config/current_temp?sensor=16")
assert response.status_code == 200
time.sleep(1.5)
response = client.post("/config/current_temp?sensor=16")
assert response.status_code == 200
time.sleep(1.5)
response = client.post("/config/current_temp?sensor=16")
assert response.status_code == 200
time.sleep(1.5)
response = client.post("/config/current_temp?sensor=19")
assert response.status_code == 200
time.sleep(1.5)
response = client.post("/config/current_temp?sensor=16")
assert response.status_code == 200
time.sleep(1.5)
response = client.post("/config/current_temp?sensor=18")
assert response.status_code == 200
time.sleep(1.5)
| 2.15625 | 2 |
3_gabor/model/gabor_rf/maprf/invlink.py | mackelab/IdentifyMechanisticModels_2020 | 3 | 13322 | import theano.tensor as tt
def explin(x):
return tt.where(x >= 0, 1 + x, tt.exp(x))
def log_exp1p(x):
return tt.log1p(tt.exp(x))
| 2.171875 | 2 |
readtest.py | pyEtherCAT/Test-Source | 0 | 13323 | from pyEtherCAT import MasterEtherCAT #ライブラリの読出し
nic = "eth0" # ネットワークカードのアドレスを記載
cat = MasterEtherCAT.MasterEtherCAT(nic)
ADP = 0x0000 #1台目
ADDR = 0x0E00 #コアレジスタのアドレス
cat.APRD(IDX=0x00, ADP=ADP, ADO=ADDR, DATA=[0,0,0,0,0,0,0,0]) #DATAは0を8個(64bit分)の枠を指示
(DATA, WKC) = cat.socket_read() #結果を読出し
print("[0x{:04X}]= 0x{:02x}{:02x},0x{:02x}{:02x},0x{:02x}{:02x},0x{:02x}{:02x}".format(ADDR, DATA[7],DATA[6],DATA[5],DATA[4],DATA[3],DATA[2],DATA[1],DATA[0]))
#読み出したデータを表示する | 2.328125 | 2 |
python/dataingest/grammar/dmo/python_loc_parser.py | jiportilla/ontology | 0 | 13324 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import pprint
from base import BaseObject
from base import FileIO
class PythonLOCParser(BaseObject):
""" Parse T/LOC from a Python File
"""
def __init__(self,
file_path: str,
is_debug: bool = False):
"""
Created:
24-Dec-2019
<EMAIL>
* https://github.ibm.com/GTS-CDO/unstructured-analytics/issues/1637#issuecomment-16802191
:param file_path:
link to a python file
:param is_debug:
"""
BaseObject.__init__(self, __name__)
self._is_debug = is_debug
self._file_path = file_path
def _lines(self) -> list:
lines = FileIO.file_to_lines(self._file_path, use_sort=False)
return lines
def process(self) -> dict:
lines = self._lines()
loc = len(lines)
tloc = len([line for line in lines if line and len(line.strip())])
d_result = {
"Provenance": str(self.__class__.__name__),
"FilePath": self._file_path,
"LOC": str(loc),
"TLOC": str(tloc)}
if self._is_debug:
self.logger.debug('\n'.join([
"LOC Parsing Complete",
pprint.pformat(d_result, indent=4)]))
return d_result
| 2.765625 | 3 |
time_series_data_generator/csv_to_df_generator.py | ArtHackDay-Plus1/ParameterServer | 0 | 13325 | <filename>time_series_data_generator/csv_to_df_generator.py
import pandas as pd
import time
df = pd.read_csv("data/sample.csv")
for num in range(1000):
argx = str(df["x"][num:num+1].get_values())
argy = str(df["y"][num:num+1].get_values())
print("x:{0} / y:{1}".format(argx,argy))
time.sleep(0.1)
| 3.1875 | 3 |
architecture_tool_django/graphdefs/urls.py | goldginkgo/architecture_tool_django | 1 | 13326 | from django.urls import path
from . import views
app_name = "graphs"
urlpatterns = [
path("graphs/", views.GraphListView.as_view(), name="graph.list"),
path("graphs/create/", views.GraphCreateView.as_view(), name="graph.create"),
path(
"graphs/<str:pk>/",
views.GraphDetailView.as_view(),
name="graph.detail",
),
path(
"graphs/<str:pk>/update/",
views.GraphUpdateView.as_view(),
name="graph.update",
),
path(
"graphs/<str:pk>/delete/",
views.GraphDeleteView.as_view(),
name="graph.delete",
),
]
| 1.890625 | 2 |
lib/utils/blob.py | TheRevanchist/DeepWatershedDetection | 0 | 13327 | <reponame>TheRevanchist/DeepWatershedDetection
# --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by <NAME> - extended by <NAME>
# --------------------------------------------------------
"""Blob helper functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import cv2
import random
def im_list_to_blob(ims):
"""Convert a list of images into a network input.
Assumes images are already prepared (means subtracted, BGR order, ...).
"""
max_shape = np.array([im.shape for im in ims]).max(axis=0)
num_images = len(ims)
blob = np.zeros((num_images, max_shape[0], max_shape[1], 3),
dtype=np.float32)
for i in range(num_images):
im = ims[i]
blob[i, 0:im.shape[0], 0:im.shape[1], :] = im
return blob
def prep_im_for_blob(im, pixel_means, global_scale, args):
"""Mean subtract and scale an image for use in a blob."""
im = im.astype(np.float32, copy=False)
# substract mean
if args.substract_mean == "True":
im -= pixel_means
# do global scaling
im = cv2.resize(im, None, None, fx=global_scale, fy=global_scale,
interpolation=cv2.INTER_LINEAR)
im_size_max = np.max(im.shape[0:2])
# Prevent the biggest axis from being more than MAX_SIZE
if im_size_max > args.max_edge:
if not args.crop == "True":
# scale down if bigger than max size
re_scale = (float(args.max_edge) / float(im_size_max))
im = cv2.resize(im, None, None, fx=re_scale, fy=re_scale,
interpolation=cv2.INTER_LINEAR)
global_scale = global_scale*re_scale
crop_box = [0,0,im.shape[0],im.shape[1]]
else:
# Crop image
topleft = random.uniform(0,1)<args.crop_top_left_bias
# crop to max size if necessary
if im.shape[0] <= args.max_edge or topleft:
crop_0 = 0
else:
crop_0 = random.randint(0,im.shape[0]-args.max_edge)
if im.shape[1] <= args.max_edge or topleft:
crop_1 = 0
else:
crop_1 = random.randint(0,im.shape[1]-args.max_edge)
crop_box = [crop_0, crop_1, min(crop_0+args.max_edge,im.shape[0]), min(crop_1+args.max_edge,im.shape[1])]
im = im[crop_box[0]:crop_box[2],crop_box[1]:crop_box[3]]
else:
crop_box = [0, 0, im.shape[0], im.shape[1]]
if not args.pad_to == 0:
# pad to fit RefineNet #TODO fix refinenet padding problem
y_mulity = int(np.ceil(im.shape[0] / float(args.pad_to)))
x_mulity = int(np.ceil(im.shape[1] / float(args.pad_to)))
canv = np.ones([y_mulity * args.pad_to, x_mulity * args.pad_to,3], dtype=np.uint8) * 255
canv[0:im.shape[0], 0:im.shape[1]] = im
im = canv
return im, global_scale, crop_box
| 2.4375 | 2 |
edlm/convert/_get_media_folders.py | etcher-be/EDLM | 0 | 13328 | <gh_stars>0
# coding=utf-8
"""
Gathers the media folders
"""
import elib
from ._context import Context
def get_media_folders(ctx: Context):
"""
Gathers the media folders
"""
ctx.info('gathering media folders')
media_folders = []
this_folder = ctx.source_folder
while True:
ctx.debug(f'traversing: "{this_folder}"')
media_folder_candidate = elib.path.ensure_path(this_folder, 'media', must_exist=False).absolute()
if media_folder_candidate.exists() and media_folder_candidate.is_dir():
ctx.debug(f'media folder found: "{media_folder_candidate}"')
media_folders.append(media_folder_candidate)
if len(this_folder.parents) is 1:
ctx.debug(f'reach mount point at: "{this_folder}"')
break
this_folder = this_folder.parent
# if not media_folders:
# raise ConvertError('no media folder found', ctx)
ctx.info(f'media folders:\n{elib.pretty_format(media_folders)}')
ctx.media_folders = media_folders
| 2.890625 | 3 |
twister2/python-support/src/main/python/twister2/tset/TLink.py | pulasthi/twister2 | 63 | 13329 | <filename>twister2/python-support/src/main/python/twister2/tset/TLink.py
from inspect import signature
import twister2.tset.TSet as ts
from twister2.utils import function_wrapper
class TLink:
def __init__(self, java_ref, env):
self.__java_ref = java_ref
self.__env = env
def map(self, lam):
map_wrapper = function_wrapper(lam)
map_func_java_ref = self.__env.functions.map.build(map_wrapper)
map_t_set_java_ref = self.__java_ref.map(map_func_java_ref)
return ts.TSet(map_t_set_java_ref, self.__env)
def flat_map(self, lam):
flat_map_wrapper = function_wrapper(lam)
flat_map_func_java_ref = self.__env.functions.flat_map.build(flat_map_wrapper)
flat_map_t_set_java_ref = self.__java_ref.flatmap(flat_map_func_java_ref)
return ts.TSet(flat_map_t_set_java_ref, self.__env)
def sink(self, sink_func):
sink_wrapper = function_wrapper(sink_func)
sink_func_java_ref = self.__env.functions.sink.build(sink_wrapper)
self.__java_ref.sink(sink_func_java_ref)
def compute(self, compute_func):
compute_wrapper = function_wrapper(compute_func)
# if function has two arguments, user is expecting the collector version of compute
if len(signature(compute_func).parameters) is 3:
compute_collector_func_java_ref = self.__env.functions \
.compute_with_collector.build(compute_wrapper)
return ts.TSet(self.__java_ref.compute(compute_collector_func_java_ref), self.__env)
else:
compute_func_java_ref = self.__env.functions.compute.build(compute_wrapper)
return ts.TSet(self.__java_ref.compute(compute_func_java_ref), self.__env)
def for_each(self, foreach_func):
foreach_wrapper = function_wrapper(foreach_func)
foreach_func_java_ref = self.__env.functions.apply.build(foreach_wrapper)
return ts.TSet(self.__java_ref.forEach(foreach_func_java_ref), self.__env)
| 2.390625 | 2 |
e2cnn/nn/modules/nonlinearities/concatenated.py | ziatdinovmax/e2cnn | 0 | 13330 | <filename>e2cnn/nn/modules/nonlinearities/concatenated.py
from e2cnn.gspaces import *
from e2cnn.nn import FieldType
from e2cnn.nn import GeometricTensor
from e2cnn.group import Representation
from e2cnn.group.representation import build_from_discrete_group_representation
from ..equivariant_module import EquivariantModule
import torch
from typing import List, Tuple, Any
import numpy as np
import math
__all__ = ["ConcatenatedNonLinearity"]
class ConcatenatedNonLinearity(EquivariantModule):
def __init__(self, in_type, function = "c_relu"):
r"""
Concatenated non-linearities.
For each input channel, the module applies the specified activation function both to its value and its opposite
(the value multiplied by -1).
The number of channels is, therefore, doubled.
Notice that not all the representations support this kind of non-linearity. Indeed, only representations
with the same pattern of permutation matrices and containing only values in :math:`\{0, 1, -1\}` support it.
Args:
in_type (FieldType): the input field type
function (str): the identifier of the non-linearity. It is used to specify which function to apply.
By default (``'c_relu'``), ReLU is used.
"""
assert isinstance(in_type.gspace, GeneralOnR2)
for r in in_type.representations:
assert "concatenated" in r.supported_nonlinearities, (
'Error! Representation "{}" does not support "concatenated"'
" non-linearity".format(r.name)
)
super(ConcatenatedNonLinearity, self).__init__()
self.space = in_type.gspace
self.in_type = in_type
# compute the output representation given the input one
self.out_type = ConcatenatedNonLinearity._transform_fiber_representation(
in_type
)
# retrieve the activation function to apply
if function == "c_relu":
self._function = torch.relu
elif function == "c_sigmoid":
self._function = torch.sigmoid
elif function == "c_tanh":
self._function = torch.tanh
else:
raise ValueError('Function "{}" not recognized!'.format(function))
def forward(self, input):
assert input.type == self.in_type
b, c, w, h = input.tensor.shape
# build the output tensor
output = torch.empty(
b, 2 * c, w, h, dtype=torch.float, device=input.tensor.device
)
# each channels is transformed to 2 channels:
# first, apply the non-linearity to its value
output[:, fdf8:f53e:61e4::18, ...] = self._function(input.tensor)
# then, apply the non-linearity to its values with the sign inverted
output[:, fc00:e968:6179::de52:7100, ...] = self._function(-1 * input.tensor)
# wrap the result in a GeometricTensor
return GeometricTensor(output, self.out_type)
def evaluate_output_shape(self, input_shape):
assert len(input_shape) == 4
assert input_shape[1] == self.in_type.size
b, c, hi, wi = input_shape
return b, self.out_type.size, hi, wi
def check_equivariance(self, atol = 1e-6, rtol = 1e-5):
c = self.in_type.size
x = torch.randn(3, c, 10, 10)
x = GeometricTensor(x, self.in_type)
errors = []
for el in self.space.testing_elements:
out1 = self(x).transform_fibers(el)
out2 = self(x.transform_fibers(el))
errs = (out1.tensor - out2.tensor).detach().numpy()
errs = np.abs(errs).reshape(-1)
print(el, errs.max(), errs.mean(), errs.var())
assert torch.allclose(out1.tensor, out2.tensor, atol=atol, rtol=rtol), (
'The error found during equivariance check with element "{}" is too'
" high: max = {}, mean = {} var ={}".format(
el, errs.max(), errs.mean(), errs.var()
)
)
errors.append((el, errs.mean()))
return errors
@staticmethod
def _transform_fiber_representation(in_type):
r"""
Compute the output representation from the input one after applying the concatenated non-linearity.
Args:
in_type (FieldType): the input field type
Returns:
(FieldType): the new output field type
"""
transformed = {}
# transform each different input Representation
for repr in in_type._unique_representations:
transformed[repr] = ConcatenatedNonLinearity._transform_representation(repr)
new_representations = []
# concatenate the new representations
for repr in in_type.representations:
new_representations.append(transformed[repr])
return FieldType(in_type.gspace, new_representations)
@staticmethod
def _transform_representation(representation):
r"""
Transform an input :class:`~e2cnn.group.Representation` according to the concatenated non-linearity.
The input representation needs to have the pattern of a permutation matrix, with values -1 or 1.
The output representation has double the size of the input one and is built by substituting the ``1`` s with 2x2
identity matrices and the ``-1`` s with 2x2 antidiagonal matrix containing ``1`` s.
Args:
representation (Representation): the input representation
Returns:
(Representation): the new output representation
"""
group = representation.group
assert not group.continuous
# the name of the new representation
name = "concatenated_{}".format(representation.name)
if name in group.representations:
# if the representation has already been built, return it
r = group.representations[name]
else:
# otherwise, build the new representation
s = representation.size
rep = {}
# build the representation for each element
for element in group.elements:
# retrieve the input representation of the current element
r = representation(element)
# build the matrix for the output representation of the current element
rep[element] = np.zeros((2 * s, 2 * s))
# check if the input matrix has the pattern of a permutation matrix
e = [-1] * s
for i in range(s):
for j in range(s):
if not math.isclose(r[i, j], 0, abs_tol=1e-9):
if e[i] < 0:
e[i] = j
else:
raise ValueError(
'''Error! the representation should have the pattern of a permutation matrix
but 2 values have been found in a row for element "{}"'''.format(
element
)
)
if len(set(e)) != len(e):
raise ValueError(
'''Error! the representation should have the pattern of a permutation matrix
but 2 values have been found in a column for element "{}"'''.format(
element
)
)
# parse the input representation matrix and fill the output representation accordingly
for i in range(s):
for j in range(s):
if math.isclose(r[i, j], 1, abs_tol=1e-9):
# if the current cell contains 1, fill the output submatrix with the 2x2 identity
rep[element][2 * i : 2 * i + 2, 2 * j : 2 * j + 2] = np.eye(
2
)
elif math.isclose(r[i, j], -1, abs_tol=1e-9):
# if the current cell contains -1, fill the output submatrix with the 2x2 antidigonal matrix
rep[element][
2 * i : 2 * i + 2, 2 * j : 2 * j + 2
] = np.flipud(np.eye(2))
elif not math.isclose(r[i, j], 0, abs_tol=1e-9):
# otherwise the cell has to contain a 0
raise ValueError(
'''Error! The representation should be a signed permutation matrix and, therefore,
contain only -1, 1 or 0 values but {} found in position({}, {}) for element "{}"'''.format(
r[i, j], i, j, element
)
)
# the resulting representation is a quotient repreentation and, therefore,
# it also supports pointwise non-linearities
nonlinearities = representation.supported_nonlinearities.union(
["pointwise"]
)
# build the output representation
r = build_from_discrete_group_representation(
rep, name, group, supported_nonlinearities=nonlinearities
)
return r
| 2.75 | 3 |
modules/gathering/host_gathering.py | anouarbensaad/VulnX | 10 | 13331 | <gh_stars>1-10
import requests
import re
import socket
from common.colors import bad,que, info, good,run,W,end
from common.uriParser import parsing_url as hostd
class GatherHost():
def __init__(self,url,headers=None):
self.url = url
self.headers = headers
def match_info(self,regex,data):
match = re.search(regex, data)
if match:
return dict(
data=match.group(1)
)
def match_printer(self,to_match,match):
if match['data']:
print(' {0} {1} : {2}'.format(good,to_match,match['data']))
def os_server(self):
response = requests.get(self.url, headers=self.headers).headers
try:
regx = re.compile(r"(.+) \((.+)\)")
data = regx.search(response["server"])
try:
print(' {0} {1}Server :{2} {3}' .format(good, W, end, data.group(1)))
print(' {0} {1}OS :{2} {3}' .format(good, W, end, data.group(2)))
except AttributeError:
print(' {0} Cannot Find OS & HostingServer ' .format(bad))
except KeyError:
print(' {0} Cannot Find the server headers ' .format(bad))
def web_host(self):
urldate = "https://input.payapi.io/v1/api/fraud/domain/age/" + hostd(self.url)
getinfo = requests.get(urldate, self.headers).text
regex_date = r'Date: (.+?)-(.+?)'
regex_date = re.compile(regex_date)
matches = re.search(regex_date, getinfo)
try:
if matches:
print(' {0} Domain Created on : {1}'.format(good, matches.group(1)))
ip = socket.gethostbyname(hostd(self.url))
print(' {0} CloudFlare IP : {1}'.format(good, ip))
ipinfo = "http://ipinfo.io/" + ip + "/json"
gather = requests.get(ipinfo, self.headers).text
self.match_printer('Country',self.match_info(r'country\": \"(.+?)\"',gather))
self.match_printer('Region',self.match_info(r'region\": \"(.+?)\"',gather))
self.match_printer('Timezone',self.match_info(r'timezone\": \"(.+?)\"',gather))
self.match_printer('Postal',self.match_info(r'postal\": \"(.+?)\"',gather))
self.match_printer('Org',self.match_info(r'org\": \"(.+?)\"',gather))
self.match_printer('Location',self.match_info(r'loc\": \"(.+?)\"',gather))
except Exception as err:
print(' {0} Parse Error : {1}' .format(bad,err)) | 2.75 | 3 |
datastore/core/basic.py | datastore/datastore | 65 | 13332 |
from key import Key
from query import Cursor
class Datastore(object):
'''A Datastore represents storage for any key-value pair.
Datastores are general enough to be backed by all kinds of different storage:
in-memory caches, databases, a remote datastore, flat files on disk, etc.
The general idea is to wrap a more complicated storage facility in a simple,
uniform interface, keeping the freedom of using the right tools for the job.
In particular, a Datastore can aggregate other datastores in interesting ways,
like sharded (to distribute load) or tiered access (caches before databases).
While Datastores should be written general enough to accept all sorts of
values, some implementations will undoubtedly have to be specific (e.g. SQL
databases where fields should be decomposed into columns), particularly to
support queries efficiently.
'''
# Main API. Datastore mplementations MUST implement these methods.
def get(self, key):
'''Return the object named by key or None if it does not exist.
None takes the role of default value, so no KeyError exception is raised.
Args:
key: Key naming the object to retrieve
Returns:
object or None
'''
raise NotImplementedError
def put(self, key, value):
'''Stores the object `value` named by `key`.
How to serialize and store objects is up to the underlying datastore.
It is recommended to use simple objects (strings, numbers, lists, dicts).
Args:
key: Key naming `value`
value: the object to store.
'''
raise NotImplementedError
def delete(self, key):
'''Removes the object named by `key`.
Args:
key: Key naming the object to remove.
'''
raise NotImplementedError
def query(self, query):
'''Returns an iterable of objects matching criteria expressed in `query`
Implementations of query will be the largest differentiating factor
amongst datastores. All datastores **must** implement query, even using
query's worst case scenario, see :ref:class:`Query` for details.
Args:
query: Query object describing the objects to return.
Raturns:
iterable cursor with all objects matching criteria
'''
raise NotImplementedError
# Secondary API. Datastores MAY provide optimized implementations.
def contains(self, key):
'''Returns whether the object named by `key` exists.
The default implementation pays the cost of a get. Some datastore
implementations may optimize this.
Args:
key: Key naming the object to check.
Returns:
boalean whether the object exists
'''
return self.get(key) is not None
class NullDatastore(Datastore):
'''Stores nothing, but conforms to the API. Useful to test with.'''
def get(self, key):
'''Return the object named by key or None if it does not exist (None).'''
return None
def put(self, key, value):
'''Store the object `value` named by `key` (does nothing).'''
pass
def delete(self, key):
'''Remove the object named by `key` (does nothing).'''
pass
def query(self, query):
'''Returns an iterable of objects matching criteria in `query` (empty).'''
return query([])
class DictDatastore(Datastore):
'''Simple straw-man in-memory datastore backed by nested dicts.'''
def __init__(self):
self._items = dict()
def _collection(self, key):
'''Returns the namespace collection for `key`.'''
collection = str(key.path)
if not collection in self._items:
self._items[collection] = dict()
return self._items[collection]
def get(self, key):
'''Return the object named by `key` or None.
Retrieves the object from the collection corresponding to ``key.path``.
Args:
key: Key naming the object to retrieve.
Returns:
object or None
'''
try:
return self._collection(key)[key]
except KeyError, e:
return None
def put(self, key, value):
'''Stores the object `value` named by `key`.
Stores the object in the collection corresponding to ``key.path``.
Args:
key: Key naming `value`
value: the object to store.
'''
if value is None:
self.delete(key)
else:
self._collection(key)[key] = value
def delete(self, key):
'''Removes the object named by `key`.
Removes the object from the collection corresponding to ``key.path``.
Args:
key: Key naming the object to remove.
'''
try:
del self._collection(key)[key]
if len(self._collection(key)) == 0:
del self._items[str(key.path)]
except KeyError, e:
pass
def contains(self, key):
'''Returns whether the object named by `key` exists.
Checks for the object in the collection corresponding to ``key.path``.
Args:
key: Key naming the object to check.
Returns:
boalean whether the object exists
'''
return key in self._collection(key)
def query(self, query):
'''Returns an iterable of objects matching criteria expressed in `query`
Naively applies the query operations on the objects within the namespaced
collection corresponding to ``query.key.path``.
Args:
query: Query object describing the objects to return.
Raturns:
iterable cursor with all objects matching criteria
'''
# entire dataset already in memory, so ok to apply query naively
if str(query.key) in self._items:
return query(self._items[str(query.key)].values())
else:
return query([])
def __len__(self):
return sum(map(len, self._items.values()))
class InterfaceMappingDatastore(Datastore):
'''Represents simple wrapper datastore around an object that, though not a
Datastore, implements data storage through a similar interface. For example,
memcached and redis both implement a `get`, `set`, `delete` interface.
'''
def __init__(self, service, get='get', put='put', delete='delete', key=str):
'''Initialize the datastore with given `service`.
Args:
service: A service that provides data storage through a similar interface
to Datastore. Using the service should only require a simple mapping
of methods, such as {put : set}.
get: The attribute name of the `service` method implementing get
put: The attribute name of the `service` method implementing put
delete: The attribute name of the `service` method implementing delete
key: A function converting a Datastore key (of type Key) into a `service`
key. The conversion will often be as simple as `str`.
'''
self._service = service
self._service_key = key
self._service_ops = {}
self._service_ops['get'] = getattr(service, get)
self._service_ops['put'] = getattr(service, put)
self._service_ops['delete'] = getattr(service, delete)
# AttributeError will be raised if service does not implement the interface
def get(self, key):
'''Return the object in `service` named by `key` or None.
Args:
key: Key naming the object to retrieve.
Returns:
object or None
'''
key = self._service_key(key)
return self._service_ops['get'](key)
def put(self, key, value):
'''Stores the object `value` named by `key` in `service`.
Args:
key: Key naming `value`.
value: the object to store.
'''
key = self._service_key(key)
self._service_ops['put'](key, value)
def delete(self, key):
'''Removes the object named by `key` in `service`.
Args:
key: Key naming the object to remove.
'''
key = self._service_key(key)
self._service_ops['delete'](key)
class ShimDatastore(Datastore):
'''Represents a non-concrete datastore that adds functionality between the
client and a lower level datastore. Shim datastores do not actually store
data themselves; instead, they delegate storage to an underlying child
datastore. The default implementation just passes all calls to the child.
'''
def __init__(self, datastore):
'''Initializes this ShimDatastore with child `datastore`.'''
if not isinstance(datastore, Datastore):
errstr = 'datastore must be of type %s. Got %s.'
raise TypeError(errstr % (Datastore, datastore))
self.child_datastore = datastore
# default implementation just passes all calls to child
def get(self, key):
'''Return the object named by key or None if it does not exist.
Default shim implementation simply returns ``child_datastore.get(key)``
Override to provide different functionality, for example::
def get(self, key):
value = self.child_datastore.get(key)
return json.loads(value)
Args:
key: Key naming the object to retrieve
Returns:
object or None
'''
return self.child_datastore.get(key)
def put(self, key, value):
'''Stores the object `value` named by `key`.
Default shim implementation simply calls ``child_datastore.put(key, value)``
Override to provide different functionality, for example::
def put(self, key, value):
value = json.dumps(value)
self.child_datastore.put(key, value)
Args:
key: Key naming `value`.
value: the object to store.
'''
self.child_datastore.put(key, value)
def delete(self, key):
'''Removes the object named by `key`.
Default shim implementation simply calls ``child_datastore.delete(key)``
Override to provide different functionality.
Args:
key: Key naming the object to remove.
'''
self.child_datastore.delete(key)
def query(self, query):
'''Returns an iterable of objects matching criteria expressed in `query`.
Default shim implementation simply returns ``child_datastore.query(query)``
Override to provide different functionality, for example::
def query(self, query):
cursor = self.child_datastore.query(query)
cursor._iterable = deserialized(cursor._iterable)
return cursor
Args:
query: Query object describing the objects to return.
Raturns:
iterable cursor with all objects matching criteria
'''
return self.child_datastore.query(query)
class CacheShimDatastore(ShimDatastore):
'''Wraps a datastore with a caching shim optimizes some calls.'''
def __init__(self, *args, **kwargs):
self.cache_datastore = kwargs.pop('cache')
if not isinstance(self.cache_datastore, Datastore):
errstr = 'datastore must be of type %s. Got %s.'
raise TypeError(errstr % (Datastore, self.cache_datastore))
super(CacheShimDatastore, self).__init__(*args, **kwargs)
def get(self, key):
'''Return the object named by key or None if it does not exist.
CacheShimDatastore first checks its ``cache_datastore``.
'''
value = self.cache_datastore.get(key)
return value if value is not None else self.child_datastore.get(key)
def put(self, key, value):
'''Stores the object `value` named by `key`self.
Writes to both ``cache_datastore`` and ``child_datastore``.
'''
self.cache_datastore.put(key, value)
self.child_datastore.put(key, value)
def delete(self, key):
'''Removes the object named by `key`.
Writes to both ``cache_datastore`` and ``child_datastore``.
'''
self.cache_datastore.delete(key)
self.child_datastore.delete(key)
def contains(self, key):
'''Returns whether the object named by `key` exists.
First checks ``cache_datastore``.
'''
return self.cache_datastore.contains(key) \
or self.child_datastore.contains(key)
class LoggingDatastore(ShimDatastore):
'''Wraps a datastore with a logging shim.'''
def __init__(self, child_datastore, logger=None):
if not logger:
import logging
logger = logging
self.logger = logger
super(LoggingDatastore, self).__init__(child_datastore)
def get(self, key):
'''Return the object named by key or None if it does not exist.
LoggingDatastore logs the access.
'''
self.logger.info('%s: get %s' % (self, key))
value = super(LoggingDatastore, self).get(key)
self.logger.debug('%s: %s' % (self, value))
return value
def put(self, key, value):
'''Stores the object `value` named by `key`self.
LoggingDatastore logs the access.
'''
self.logger.info('%s: put %s' % (self, key))
self.logger.debug('%s: %s' % (self, value))
super(LoggingDatastore, self).put(key, value)
def delete(self, key):
'''Removes the object named by `key`.
LoggingDatastore logs the access.
'''
self.logger.info('%s: delete %s' % (self, key))
super(LoggingDatastore, self).delete(key)
def contains(self, key):
'''Returns whether the object named by `key` exists.
LoggingDatastore logs the access.
'''
self.logger.info('%s: contains %s' % (self, key))
return super(LoggingDatastore, self).contains(key)
def query(self, query):
'''Returns an iterable of objects matching criteria expressed in `query`.
LoggingDatastore logs the access.
'''
self.logger.info('%s: query %s' % (self, query))
return super(LoggingDatastore, self).query(query)
class KeyTransformDatastore(ShimDatastore):
'''Represents a simple ShimDatastore that applies a transform on all incoming
keys. For example:
>>> import datastore.core
>>> def transform(key):
... return key.reverse
...
>>> ds = datastore.DictDatastore()
>>> kt = datastore.KeyTransformDatastore(ds, keytransform=transform)
None
>>> ds.put(datastore.Key('/a/b/c'), 'abc')
>>> ds.get(datastore.Key('/a/b/c'))
'abc'
>>> kt.get(datastore.Key('/a/b/c'))
None
>>> kt.get(datastore.Key('/c/b/a'))
'abc'
>>> ds.get(datastore.Key('/c/b/a'))
None
'''
def __init__(self, *args, **kwargs):
'''Initializes KeyTransformDatastore with `keytransform` function.'''
self.keytransform = kwargs.pop('keytransform', None)
super(KeyTransformDatastore, self).__init__(*args, **kwargs)
def get(self, key):
'''Return the object named by keytransform(key).'''
return self.child_datastore.get(self._transform(key))
def put(self, key, value):
'''Stores the object names by keytransform(key).'''
return self.child_datastore.put(self._transform(key), value)
def delete(self, key):
'''Removes the object named by keytransform(key).'''
return self.child_datastore.delete(self._transform(key))
def contains(self, key):
'''Returns whether the object named by key is in this datastore.'''
return self.child_datastore.contains(self._transform(key))
def query(self, query):
'''Returns a sequence of objects matching criteria expressed in `query`'''
query = query.copy()
query.key = self._transform(query.key)
return self.child_datastore.query(query)
def _transform(self, key):
'''Returns a `key` transformed by `self.keytransform`.'''
return self.keytransform(key) if self.keytransform else key
class LowercaseKeyDatastore(KeyTransformDatastore):
'''Represents a simple ShimDatastore that lowercases all incoming keys.
For example:
>>> import datastore.core
>>> ds = datastore.DictDatastore()
>>> ds.put(datastore.Key('hello'), 'world')
>>> ds.put(datastore.Key('HELLO'), 'WORLD')
>>> ds.get(datastore.Key('hello'))
'world'
>>> ds.get(datastore.Key('HELLO'))
'WORLD'
>>> ds.get(datastore.Key('HeLlO'))
None
>>> lds = datastore.LowercaseKeyDatastore(ds)
>>> lds.get(datastore.Key('HeLlO'))
'world'
>>> lds.get(datastore.Key('HeLlO'))
'world'
>>> lds.get(datastore.Key('HeLlO'))
'world'
'''
def __init__(self, *args, **kwargs):
'''Initializes KeyTransformDatastore with keytransform function.'''
super(LowercaseKeyDatastore, self).__init__(*args, **kwargs)
self.keytransform = self.lowercaseKey
@classmethod
def lowercaseKey(cls, key):
'''Returns a lowercased `key`.'''
return Key(str(key).lower())
class NamespaceDatastore(KeyTransformDatastore):
'''Represents a simple ShimDatastore that namespaces all incoming keys.
For example:
>>> import datastore.core
>>>
>>> ds = datastore.DictDatastore()
>>> ds.put(datastore.Key('/a/b'), 'ab')
>>> ds.put(datastore.Key('/c/d'), 'cd')
>>> ds.put(datastore.Key('/a/b/c/d'), 'abcd')
>>>
>>> nd = datastore.NamespaceDatastore('/a/b', ds)
>>> nd.get(datastore.Key('/a/b'))
None
>>> nd.get(datastore.Key('/c/d'))
'abcd'
>>> nd.get(datastore.Key('/a/b/c/d'))
None
>>> nd.put(datastore.Key('/c/d'), 'cd')
>>> ds.get(datastore.Key('/a/b/c/d'))
'cd'
'''
def __init__(self, namespace, *args, **kwargs):
'''Initializes NamespaceDatastore with `key` namespace.'''
super(NamespaceDatastore, self).__init__(*args, **kwargs)
self.keytransform = self.namespaceKey
self.namespace = Key(namespace)
def namespaceKey(self, key):
'''Returns a namespaced `key`: namespace.child(key).'''
return self.namespace.child(key)
class NestedPathDatastore(KeyTransformDatastore):
'''Represents a simple ShimDatastore that shards/namespaces incoming keys.
Incoming keys are sharded into nested namespaces. The idea is to use the key
name to separate into nested namespaces. This is akin to the directory
structure that ``git`` uses for objects. For example:
>>> import datastore.core
>>>
>>> ds = datastore.DictDatastore()
>>> np = datastore.NestedPathDatastore(ds, depth=3, length=2)
>>>
>>> np.put(datastore.Key('/abcdefghijk'), 1)
>>> np.get(datastore.Key('/abcdefghijk'))
1
>>> ds.get(datastore.Key('/abcdefghijk'))
None
>>> ds.get(datastore.Key('/ab/cd/ef/abcdefghijk'))
1
>>> np.put(datastore.Key('abc'), 2)
>>> np.get(datastore.Key('abc'))
2
>>> ds.get(datastore.Key('/ab/ca/bc/abc'))
2
'''
_default_depth = 3
_default_length = 2
_default_keyfn = lambda key: key.name
_default_keyfn = staticmethod(_default_keyfn)
def __init__(self, *args, **kwargs):
'''Initializes KeyTransformDatastore with keytransform function.
kwargs:
depth: the nesting level depth (e.g. 3 => /1/2/3/123) default: 3
length: the nesting level length (e.g. 2 => /12/123456) default: 2
'''
# assign the nesting variables
self.nest_depth = kwargs.pop('depth', self._default_depth)
self.nest_length = kwargs.pop('length', self._default_length)
self.nest_keyfn = kwargs.pop('keyfn', self._default_keyfn)
super(NestedPathDatastore, self).__init__(*args, **kwargs)
self.keytransform = self.nestKey
def query(self, query):
# Requires supporting * operator on queries.
raise NotImplementedError
def nestKey(self, key):
'''Returns a nested `key`.'''
nest = self.nest_keyfn(key)
# if depth * length > len(key.name), we need to pad.
mult = 1 + int(self.nest_depth * self.nest_length / len(nest))
nest = nest * mult
pref = Key(self.nestedPath(nest, self.nest_depth, self.nest_length))
return pref.child(key)
@staticmethod
def nestedPath(path, depth, length):
'''returns a nested version of `basename`, using the starting characters.
For example:
>>> NestedPathDatastore.nested_path('abcdefghijk', 3, 2)
'ab/cd/ef'
>>> NestedPathDatastore.nested_path('abcdefghijk', 4, 2)
'ab/cd/ef/gh'
>>> NestedPathDatastore.nested_path('abcdefghijk', 3, 4)
'abcd/efgh/ijk'
>>> NestedPathDatastore.nested_path('abcdefghijk', 1, 4)
'abcd'
>>> NestedPathDatastore.nested_path('abcdefghijk', 3, 10)
'abcdefghij/k'
'''
components = [path[n:n+length] for n in xrange(0, len(path), length)]
components = components[:depth]
return '/'.join(components)
class SymlinkDatastore(ShimDatastore):
'''Datastore that creates filesystem-like symbolic link keys.
A symbolic link key is a way of naming the same value with multiple keys.
For example:
>>> import datastore.core
>>>
>>> dds = datastore.DictDatastore()
>>> sds = datastore.SymlinkDatastore(dds)
>>>
>>> a = datastore.Key('/A')
>>> b = datastore.Key('/B')
>>>
>>> sds.put(a, 1)
>>> sds.get(a)
1
>>> sds.link(a, b)
>>> sds.get(b)
1
>>> sds.put(b, 2)
>>> sds.get(b)
2
>>> sds.get(a)
2
>>> sds.delete(a)
>>> sds.get(a)
None
>>> sds.get(b)
None
>>> sds.put(a, 3)
>>> sds.get(a)
3
>>> sds.get(b)
3
>>> sds.delete(b)
>>> sds.get(b)
None
>>> sds.get(a)
3
'''
sentinel = 'datastore_link'
def _link_value_for_key(self, source_key):
'''Returns the link value for given `key`.'''
return str(source_key.child(self.sentinel))
def _link_for_value(self, value):
'''Returns the linked key if `value` is a link, or None.'''
try:
key = Key(value)
if key.name == self.sentinel:
return key.parent
except:
pass
return None
def _follow_link(self, value):
'''Returns given `value` or, if it is a symlink, the `value` it names.'''
seen_keys = set()
while True:
link_key = self._link_for_value(value)
if not link_key:
return value
assert link_key not in seen_keys, 'circular symlink reference'
seen_keys.add(link_key)
value = super(SymlinkDatastore, self).get(link_key)
def _follow_link_gen(self, iterable):
'''A generator that follows links in values encountered.'''
for item in iterable:
yield self._follow_link(item)
def link(self, source_key, target_key):
'''Creates a symbolic link key pointing from `target_key` to `source_key`'''
link_value = self._link_value_for_key(source_key)
# put straight into the child, to avoid following previous links.
self.child_datastore.put(target_key, link_value)
# exercise the link. ensure there are no cycles.
self.get(target_key)
def get(self, key):
'''Return the object named by `key. Follows links.'''
value = super(SymlinkDatastore, self).get(key)
return self._follow_link(value)
def put(self, key, value):
'''Stores the object named by `key`. Follows links.'''
# if value is a link, don't follow links
if self._link_for_value(value):
super(SymlinkDatastore, self).put(key, value)
return
# if `key` points to a symlink, need to follow it.
current_value = super(SymlinkDatastore, self).get(key)
link_key = self._link_for_value(current_value)
if link_key:
self.put(link_key, value) # self.put: could be another link.
else:
super(SymlinkDatastore, self).put(key, value)
def query(self, query):
'''Returns objects matching criteria expressed in `query`. Follows links.'''
results = super(SymlinkDatastore, self).query(query)
return self._follow_link_gen(results)
class DirectoryDatastore(ShimDatastore):
'''Datastore that allows manual tracking of directory entries.
For example:
>>> ds = DirectoryDatastore(ds)
>>>
>>> # initialize directory at /foo
>>> ds.directory(Key('/foo'))
>>>
>>> # adding directory entries
>>> ds.directoryAdd(Key('/foo'), Key('/foo/bar'))
>>> ds.directoryAdd(Key('/foo'), Key('/foo/baz'))
>>>
>>> # value is a generator returning all the keys in this dir
>>> for key in ds.directoryRead(Key('/foo')):
... print key
Key('/foo/bar')
Key('/foo/baz')
>>>
>>> # querying for a collection works
>>> for item in ds.query(Query(Key('/foo'))):
... print item
'bar'
'baz'
'''
def directory(self, dir_key):
'''Initializes directory at dir_key.'''
dir_items = self.get(dir_key)
if not isinstance(dir_items, list):
self.put(dir_key, [])
def directoryRead(self, dir_key):
'''Returns a generator that iterates over all keys in the directory
referenced by `dir_key`
Returns None if the directory `dir_key` does not exist
'''
return self.directory_entries_generator(dir_key)
def directoryAdd(self, dir_key, key):
'''Adds directory entry `key` to directory at `dir_key`.
If the directory `dir_key` does not exist, it is created.
'''
key = str(key)
dir_items = self.get(dir_key) or []
if key not in dir_items:
dir_items.append(key)
self.put(dir_key, dir_items)
def directoryRemove(self, dir_key, key):
'''Removes directory entry `key` from directory at `dir_key`.
If either the directory `dir_key` or the directory entry `key` don't exist,
this method is a no-op.
'''
key = str(key)
dir_items = self.get(dir_key) or []
if key in dir_items:
dir_items = [k for k in dir_items if k != key]
self.put(dir_key, dir_items)
def directory_entries_generator(self, dir_key):
dir_items = self.get(dir_key) or []
for item in dir_items:
yield Key(item)
class DirectoryTreeDatastore(ShimDatastore):
'''Datastore that tracks directory entries, like in a filesystem.
All key changes cause changes in a collection-like directory.
For example:
>>> import datastore.core
>>>
>>> dds = datastore.DictDatastore()
>>> rds = datastore.DirectoryTreeDatastore(dds)
>>>
>>> a = datastore.Key('/A')
>>> b = datastore.Key('/A/B')
>>> c = datastore.Key('/A/C')
>>>
>>> rds.get(a)
[]
>>> rds.put(b, 1)
>>> rds.get(b)
1
>>> rds.get(a)
['/A/B']
>>> rds.put(c, 1)
>>> rds.get(c)
1
>>> rds.get(a)
['/A/B', '/A/C']
>>> rds.delete(b)
>>> rds.get(a)
['/A/C']
>>> rds.delete(c)
>>> rds.get(a)
[]
'''
def put(self, key, value):
'''Stores the object `value` named by `key`self.
DirectoryTreeDatastore stores a directory entry.
'''
super(DirectoryTreeDatastore, self).put(key, value)
str_key = str(key)
# ignore root
if str_key == '/':
return
# retrieve directory, to add entry
dir_key = key.parent.instance('directory')
directory = self.directory(dir_key)
# ensure key is in directory
if str_key not in directory:
directory.append(str_key)
super(DirectoryTreeDatastore, self).put(dir_key, directory)
def delete(self, key):
'''Removes the object named by `key`.
DirectoryTreeDatastore removes the directory entry.
'''
super(DirectoryTreeDatastore, self).delete(key)
str_key = str(key)
# ignore root
if str_key == '/':
return
# retrieve directory, to remove entry
dir_key = key.parent.instance('directory')
directory = self.directory(dir_key)
# ensure key is not in directory
if directory and str_key in directory:
directory.remove(str_key)
if len(directory) > 0:
super(DirectoryTreeDatastore, self).put(dir_key, directory)
else:
super(DirectoryTreeDatastore, self).delete(dir_key)
def query(self, query):
'''Returns objects matching criteria expressed in `query`.
DirectoryTreeDatastore uses directory entries.
'''
return query(self.directory_values_generator(query.key))
def directory(self, key):
'''Retrieves directory entries for given key.'''
if key.name != 'directory':
key = key.instance('directory')
return self.get(key) or []
def directory_values_generator(self, key):
'''Retrieve directory values for given key.'''
directory = self.directory(key)
for key in directory:
yield self.get(Key(key))
class DatastoreCollection(ShimDatastore):
'''Represents a collection of datastores.'''
def __init__(self, stores=[]):
'''Initialize the datastore with any provided datastores.'''
if not isinstance(stores, list):
stores = list(stores)
for store in stores:
if not isinstance(store, Datastore):
raise TypeError("all stores must be of type %s" % Datastore)
self._stores = stores
def datastore(self, index):
'''Returns the datastore at `index`.'''
return self._stores[index]
def appendDatastore(self, store):
'''Appends datastore `store` to this collection.'''
if not isinstance(store, Datastore):
raise TypeError("stores must be of type %s" % Datastore)
self._stores.append(store)
def removeDatastore(self, store):
'''Removes datastore `store` from this collection.'''
self._stores.remove(store)
def insertDatastore(self, index, store):
'''Inserts datastore `store` into this collection at `index`.'''
if not isinstance(store, Datastore):
raise TypeError("stores must be of type %s" % Datastore)
self._stores.insert(index, store)
class TieredDatastore(DatastoreCollection):
'''Represents a hierarchical collection of datastores.
Each datastore is queried in order. This is helpful to organize access
order in terms of speed (i.e. read caches first).
Datastores should be arranged in order of completeness, with the most complete
datastore last, as it will handle query calls.
Semantics:
* get : returns first found value
* put : writes through to all
* delete : deletes through to all
* contains : returns first found value
* query : queries bottom (most complete) datastore
'''
def get(self, key):
'''Return the object named by key. Checks each datastore in order.'''
value = None
for store in self._stores:
value = store.get(key)
if value is not None:
break
# add model to lower stores only
if value is not None:
for store2 in self._stores:
if store == store2:
break
store2.put(key, value)
return value
def put(self, key, value):
'''Stores the object in all underlying datastores.'''
for store in self._stores:
store.put(key, value)
def delete(self, key):
'''Removes the object from all underlying datastores.'''
for store in self._stores:
store.delete(key)
def query(self, query):
'''Returns a sequence of objects matching criteria expressed in `query`.
The last datastore will handle all query calls, as it has a (if not
the only) complete record of all objects.
'''
# queries hit the last (most complete) datastore
return self._stores[-1].query(query)
def contains(self, key):
'''Returns whether the object is in this datastore.'''
for store in self._stores:
if store.contains(key):
return True
return False
class ShardedDatastore(DatastoreCollection):
'''Represents a collection of datastore shards.
A datastore is selected based on a sharding function.
Sharding functions should take a Key and return an integer.
WARNING: adding or removing datastores while mid-use may severely affect
consistency. Also ensure the order is correct upon initialization.
While this is not as important for caches, it is crucial for
persistent datastores.
'''
def __init__(self, stores=[], shardingfn=hash):
'''Initialize the datastore with any provided datastore.'''
if not callable(shardingfn):
raise TypeError('shardingfn (type %s) is not callable' % type(shardingfn))
super(ShardedDatastore, self).__init__(stores)
self._shardingfn = shardingfn
def shard(self, key):
'''Returns the shard index to handle `key`, according to sharding fn.'''
return self._shardingfn(key) % len(self._stores)
def shardDatastore(self, key):
'''Returns the shard to handle `key`.'''
return self.datastore(self.shard(key))
def get(self, key):
'''Return the object named by key from the corresponding datastore.'''
return self.shardDatastore(key).get(key)
def put(self, key, value):
'''Stores the object to the corresponding datastore.'''
self.shardDatastore(key).put(key, value)
def delete(self, key):
'''Removes the object from the corresponding datastore.'''
self.shardDatastore(key).delete(key)
def contains(self, key):
'''Returns whether the object is in this datastore.'''
return self.shardDatastore(key).contains(key)
def query(self, query):
'''Returns a sequence of objects matching criteria expressed in `query`'''
cursor = Cursor(query, self.shard_query_generator(query))
cursor.apply_order() # ordering sharded queries is expensive (no generator)
return cursor
def shard_query_generator(self, query):
'''A generator that queries each shard in sequence.'''
shard_query = query.copy()
for shard in self._stores:
# yield all items matching within this shard
cursor = shard.query(shard_query)
for item in cursor:
yield item
# update query with results of first query
shard_query.offset = max(shard_query.offset - cursor.skipped, 0)
if shard_query.limit:
shard_query.limit = max(shard_query.limit - cursor.returned, 0)
if shard_query.limit <= 0:
break # we're already done!
'''
Hello Tiered Access
>>> import pymongo
>>> import datastore.core
>>>
>>> from datastore.impl.mongo import MongoDatastore
>>> from datastore.impl.lrucache import LRUCache
>>> from datastore.impl.filesystem import FileSystemDatastore
>>>
>>> conn = pymongo.Connection()
>>> mongo = MongoDatastore(conn.test_db)
>>>
>>> cache = LRUCache(1000)
>>> fs = FileSystemDatastore('/tmp/.test_db')
>>>
>>> ds = datastore.TieredDatastore([cache, mongo, fs])
>>>
>>> hello = datastore.Key('hello')
>>> ds.put(hello, 'world')
>>> ds.contains(hello)
True
>>> ds.get(hello)
'world'
>>> ds.delete(hello)
>>> ds.get(hello)
None
Hello Sharding
>>> import datastore.core
>>>
>>> shards = [datastore.DictDatastore() for i in range(0, 10)]
>>>
>>> ds = datastore.ShardedDatastore(shards)
>>>
>>> hello = datastore.Key('hello')
>>> ds.put(hello, 'world')
>>> ds.contains(hello)
True
>>> ds.get(hello)
'world'
>>> ds.delete(hello)
>>> ds.get(hello)
None
'''
| 4 | 4 |
website/admin.py | jonfroehlich/makeabilitylabwebsite | 2 | 13333 | <reponame>jonfroehlich/makeabilitylabwebsite
from django.contrib import admin
from django.contrib.admin import widgets
from .models import Person, Publication, Position, Talk, Project, Poster, Keyword, News, Banner, Video, Project_header, Photo, Project_umbrella, Project_Role, Sponsor
from website.admin_list_filters import PositionRoleListFilter, PositionTitleListFilter, PubVenueTypeListFilter, PubVenueListFilter
from sortedm2m_filter_horizontal_widget.forms import SortedFilteredSelectMultiple
import django
from django import forms
from django.http import HttpResponse
from datetime import datetime
from django.template import loader
from django.template import RequestContext
from django.shortcuts import redirect
from django import forms
import urllib
import bibtexparser
from image_cropping import ImageCroppingMixin
class BannerAdmin(ImageCroppingMixin, admin.ModelAdmin):
fieldsets = [
(None, {'fields': ["page", "title", "caption", "alt_text", "link", "favorite", "project"]}),
# ('Image', {'fields': ["image", "image_preview"]})
('Image', {'fields': ["image", "cropping"]})
]
# The list display lets us control what is shown in the default persons table at Home > Website > Banners
# info on displaying multiple entries comes from http://stackoverflow.com/questions/9164610/custom-columns-using-django-admin
list_display = ('title', 'project', 'page', 'favorite', 'image')
# readonly_fields = ["image_preview"]
class PositionInline(admin.StackedInline):
model = Position
# This specifies that the Inline is linked to the main owner of the position rather than any of the advisor roles.
fk_name = "person"
# This specifies that the field appears only once (by default)
extra = 0
def formfield_for_foreignkey(self, db_field, request, **kwargs):
print("PositionInline.formfield_for_foreignkey: db_field: {} db_field.name {} request: {}".format(db_field, db_field.name, request))
if db_field.name == "advisor" or db_field.name == "co_advisor":
# Filters advisors to professors and sorts by first name
# Based on: http://stackoverflow.com/a/30627555
professor_ids = [person.id for person in Person.objects.all() if person.is_professor()]
filtered_persons = Person.objects.filter(id__in=professor_ids).order_by('first_name')
print(filtered_persons)
kwargs["queryset"] = filtered_persons
elif db_field.name == "grad_mentor":
# Filters grad mentor list to current grad students (either member or collaborator)
grad_ids = [person.id for person in Person.objects.all() if person.is_grad_student() and (person.is_current_member() or person.is_current_collaborator())]
filtered_persons = Person.objects.filter(id__in=grad_ids).order_by('first_name')
print(filtered_persons)
kwargs["queryset"] = filtered_persons
return super(PositionInline, self).formfield_for_foreignkey(db_field, request, **kwargs)
class ProjectRoleInline(admin.StackedInline):
model = Project_Role
extra = 0
class ProjectHeaderInline(ImageCroppingMixin, admin.StackedInline):
model = Project_header
extra = 0
# Uses format as per https://github.com/jonasundderwolf/django-image-cropping to add cropping to the admin page
class NewsAdmin(ImageCroppingMixin, admin.ModelAdmin):
# Filters authors only to current members and sorts by firstname
# Based on: http://stackoverflow.com/a/30627555
def formfield_for_foreignkey(self, db_field, request, **kwargs):
# print("NewsAdmin.formfield_for_foreignkey: db_field: {} db_field.name {} request: {}".format(db_field, db_field.name, request))
if db_field.name == "author":
current_member_ids = [person.id for person in Person.objects.all() if person.is_current_member()]
filtered_persons = Person.objects.filter(id__in=current_member_ids).order_by('first_name')
print(filtered_persons)
kwargs["queryset"] = filtered_persons
return super(NewsAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs)
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
if db_field.name == "project":
kwargs["widget"] = widgets.FilteredSelectMultiple("project", is_stacked=False)
return super(NewsAdmin, self).formfield_for_manytomany(db_field, request, **kwargs)
class PhotoAdmin(ImageCroppingMixin, admin.ModelAdmin):
list_display = ('__str__', 'admin_thumbnail')
class ProjectAdmin(ImageCroppingMixin, admin.ModelAdmin):
inlines = [ProjectHeaderInline]
# The list display lets us control what is shown in the Project table at Home > Website > Project
# info on displaying multiple entries comes from http://stackoverflow.com/questions/9164610/custom-columns-using-django-admin
list_display = ('name', 'start_date', 'end_date', 'has_ended', 'get_people_count',
'get_current_member_count', 'get_past_member_count',
'get_most_recent_artifact_date', 'get_most_recent_artifact_type',
'get_publication_count', 'get_video_count', 'get_talk_count')
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
if db_field.name == "sponsors":
kwargs["widget"] = widgets.FilteredSelectMultiple("sponsors", is_stacked=False)
if db_field.name == "keywords":
kwargs["widget"] = widgets.FilteredSelectMultiple("keywords", is_stacked=False)
if db_field.name == "project_umbrellas":
kwargs["widget"] = widgets.FilteredSelectMultiple("project umbrellas", is_stacked=False)
return super(ProjectAdmin, self).formfield_for_manytomany(db_field, request, **kwargs)
class PersonAdmin(ImageCroppingMixin, admin.ModelAdmin):
# inlines allow us to edit models on the same page as a parent model
# see: https://docs.djangoproject.com/en/1.11/ref/contrib/admin/#inlinemodeladmin-objects
inlines = [PositionInline, ProjectRoleInline]
# The list display lets us control what is shown in the default persons table at Home > Website > People
# info on displaying multiple entries comes from http://stackoverflow.com/questions/9164610/custom-columns-using-django-admin
list_display = ('get_full_name', 'get_current_title', 'get_current_role', 'is_active', 'get_start_date', 'get_end_date', 'get_time_in_current_position', 'get_total_time_as_member')
#TODO setup filter here that has diff categories (like active members, past, etc.):
#https://www.elements.nl/2015/03/16/getting-the-most-out-of-django-admin-filters/
#related to: https://github.com/jonfroehlich/makeabilitylabwebsite/issues/238
list_filter = (PositionRoleListFilter, PositionTitleListFilter)
class VideoAdmin(admin.ModelAdmin):
# The list display lets us control what is shown in the default persons table at Home > Website > Videos
# info on displaying multiple entries comes from http://stackoverflow.com/questions/9164610/custom-columns-using-django-admin
list_display = ('title', 'date', 'caption', 'project')
# search_fields are used for auto-complete, see:
# https://docs.djangoproject.com/en/3.0/ref/contrib/admin/#django.contrib.admin.ModelAdmin.autocomplete_fields
search_fields = ['title', 'get_video_host_str', 'date']
# default the sort order in table to descending order by date
ordering = ('-date',)
class TalkAdmin(admin.ModelAdmin):
# The list display lets us control what is shown in the default talk table at Home > Website > Talk
# See: https://docs.djangoproject.com/en/dev/ref/contrib/admin/#django.contrib.admin.ModelAdmin.list_display
list_display = ('title', 'date', 'get_speakers_as_csv', 'forum_name', 'location', 'talk_type')
# search_fields are used for auto-complete, see:
# https://docs.djangoproject.com/en/3.0/ref/contrib/admin/#django.contrib.admin.ModelAdmin.autocomplete_fields
# for example, the PublicationAdmin uses auto-complete select2 for talks
search_fields = ['title', 'forum_name']
# Filters speakers only to current members and collaborators and sorts by first name
# Based on: https://stackoverflow.com/a/17457828
# Update: we no longer do this because sometimes we want to add a talk by a former member or collaborator
def formfield_for_manytomany(self, db_field, request, **kwargs):
print("TalkAdmin.formfield_for_manytomany: db_field: {} db_field.name {} request: {}".format(db_field, db_field.name, request))
if db_field.name == "projects":
kwargs["widget"] = widgets.FilteredSelectMultiple("projects", is_stacked=False)
if db_field.name == "project_umbrellas":
kwargs["widget"] = widgets.FilteredSelectMultiple("project umbrellas", is_stacked=False, )
if db_field.name == "speakers":
# Uncomment the following block of code to limit the speakers field in the admin UI only to current lab members
# Note: we don't actually want to do this (see https://github.com/jonfroehlich/makeabilitylabwebsite/issues/534)
# but keeping it here because code may be useful in the future for other areas of admin interface
# current_member_and_collab_ids = [person.id for person in Person.objects.all() if person.is_current_member()]
# filtered_speakers = Person.objects.filter(id__in=current_member_and_collab_ids).order_by('first_name')
# kwargs["queryset"] = filtered_speakers
kwargs["widget"] = widgets.FilteredSelectMultiple("speakers", is_stacked=False)
if db_field.name == "keywords":
kwargs["widget"] = widgets.FilteredSelectMultiple("keywords", is_stacked=False)
return super(TalkAdmin, self).formfield_for_manytomany(db_field, request, **kwargs)
class PosterAdmin(admin.ModelAdmin):
# search_fields are used for auto-complete, see:
# https://docs.djangoproject.com/en/3.0/ref/contrib/admin/#django.contrib.admin.ModelAdmin.autocomplete_fields
search_fields = ['title', 'date']
def formfield_for_manytomany(self, db_field, request, **kwargs):
print("PosterAdmin.formfield_for_manytomany: db_field: {} db_field.name {} request: {}".format(db_field, db_field.name, request))
if db_field.name == "projects":
kwargs["widget"] = widgets.FilteredSelectMultiple("projects", is_stacked=False)
if db_field.name == "authors":
kwargs["widget"] = widgets.FilteredSelectMultiple("authors", is_stacked=False)
if db_field.name == "keywords":
kwargs["widget"] = widgets.FilteredSelectMultiple("keywords", is_stacked=False)
return super(PosterAdmin, self).formfield_for_manytomany(db_field, request, **kwargs)
class ProjectUmbrellaAdmin(admin.ModelAdmin):
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
if db_field.name == "keywords":
kwargs["widget"] = widgets.FilteredSelectMultiple("keywords", is_stacked=False)
return super(ProjectUmbrellaAdmin, self).formfield_for_manytomany(db_field, request, **kwargs)
#from https://stackoverflow.com/questions/9602217/define-an-order-for-manytomanyfield-with-django
#display items inline
class PublicationAuthorInline(admin.TabularInline):
model = Publication.authors.through
verbose_name = "Author"
verbose_name_plural = "Author Order"
class PublicationAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['title', 'authors', 'date']}),
('Files', {'fields': ['pdf_file']}),
('Pub Venue information', {'fields': ['pub_venue_url','pub_venue_type', 'book_title', 'book_title_short', 'geo_location', 'total_papers_submitted', 'total_papers_accepted']}),
('Archival Info', {'fields': ['official_url', 'extended_abstract', 'peer_reviewed', 'award' ]}),
('Page Info', {'fields': ['num_pages', 'page_num_start', 'page_num_end']}),
('Supplementary Artifacts', {'fields': ['poster', 'video', 'talk', 'code_repo_url']}),
('Project Info', {'fields': ['projects', 'project_umbrellas']}),
('Keyword Info', {'fields': ['keywords']}),
]
list_display = ('title', 'book_title_short', 'date')
# default the sort order in table to descending order by date
ordering = ('-date',)
list_filter = (PubVenueTypeListFilter, PubVenueListFilter)
# add in auto-complete fields for talks, see:
# https://docs.djangoproject.com/en/3.0/ref/contrib/admin/#django.contrib.admin.ModelAdmin.autocomplete_fields
# this addresses: https://github.com/jonfroehlich/makeabilitylabwebsite/issues/553
# You must also update the search_fields in the respective admins like PosterAdmin, VideoAdmin, and TalkAdmin
# these search fields become what the auto-complete function searches for filtering
autocomplete_fields = ['poster', 'video', 'talk']
def get_form(self, request, obj=None, **kwargs):
"""We custom style some of the admin UI, including expanding the width of the talk select interface"""
form = super(PublicationAdmin, self).get_form(request, obj, **kwargs)
# we style the talks select2 widget so that it's wider, see:
# https://docs.djangoproject.com/en/2.2/ref/forms/widgets/#customizing-widget-instances
# see also:
# https://stackoverflow.com/questions/10588275/django-change-field-size-of-modelmultiplechoicefield
# https://stackoverflow.com/questions/110378/change-the-width-of-form-elements-created-with-modelform-in-django
# and finally, this is what worked for me:
# https://stackoverflow.com/q/35211809
# to address: https://github.com/jonfroehlich/makeabilitylabwebsite/issues/851
text_min_width = 750
form.base_fields['title'].widget.attrs['style'] = 'min-width: {}px;'.format(text_min_width)
form.base_fields['book_title'].widget.attrs['style'] = 'min-width: {}px;'.format(text_min_width)
form.base_fields['book_title_short'].widget.attrs['style'] = 'min-width: {}px;'.format(500)
select_min_width = 600
form.base_fields['poster'].widget.attrs['style'] = 'min-width: {}px;'.format(select_min_width)
form.base_fields['video'].widget.attrs['style'] = 'min-width: {}px;'.format(select_min_width)
form.base_fields['talk'].widget.attrs['style'] = 'min-width: {}px;'.format(select_min_width)
return form
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
if db_field.name == "authors":
kwargs['widget'] = SortedFilteredSelectMultiple()
elif db_field.name == "projects":
kwargs["widget"] = widgets.FilteredSelectMultiple("projects", is_stacked=False)
elif db_field.name == "project_umbrellas":
kwargs["widget"] = widgets.FilteredSelectMultiple("project umbrellas", is_stacked=False)
elif db_field.name == "keywords":
kwargs["widget"] = widgets.FilteredSelectMultiple("keywords", is_stacked=False)
return super(PublicationAdmin, self).formfield_for_manytomany(db_field, request, **kwargs)
admin.site.register(Person, PersonAdmin)
admin.site.register(Publication, PublicationAdmin)
admin.site.register(Talk, TalkAdmin)
admin.site.register(Project, ProjectAdmin)
admin.site.register(Poster, PosterAdmin)
admin.site.register(Keyword)
admin.site.register(News, NewsAdmin)
admin.site.register(Banner, BannerAdmin)
admin.site.register(Video, VideoAdmin)
admin.site.register(Photo, PhotoAdmin)
admin.site.register(Project_umbrella, ProjectUmbrellaAdmin)
admin.site.register(Sponsor)
# For modifying more on the front admin landing page, see https://medium.com/django-musings/customizing-the-django-admin-site-b82c7d325510
admin.site.index_title = "Makeability Lab Admin. Django version: " + django.get_version() + " ML Version: 0.5.7a"
| 2.125 | 2 |
python/annotation_orthologs_inference.py | liebermanlab/wide-variant | 0 | 13334 | <reponame>liebermanlab/wide-variant<gh_stars>0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 26 21:39:04 2019
@author: fmk
"""
import argparse,subprocess,string,random
import pandas as pd
''' positional and optional argument parser'''
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
description='''\
Infer orthologs across two or more prokka-based annotations, and returns overview table for all genes.
Homology is inferred using CD-HIT and annotations need to be in fasta format (nucleotide (*.ffn) or amino acid (*.faa))
CD-HIT: %identity optional. Fixed: -s 0.9, ie. shorter sequences need to be at least 90% length of the representative of the cluster.
''',
epilog="Questions or comments? --> <EMAIL>")
parser.add_argument("-f", dest="file_sample_annotation", help="2-col TSV file with subject-identifier and annotation file path.",type=argparse.FileType('r'),required=True)
parser.add_argument('-p', dest="percentIdentity", action="store", default='0.98', help="Percent identity cd-hit. Default: 0.98")
parser.add_argument('-o', dest="outpath", action="store", help="Output path.",required=True)
parser.add_argument("-c", dest="cdhit", help="Path to CD-HIT executable", action="store",default="cd-hit")
args = parser.parse_args()
''' FUNCTIONS'''
def fix_path(path):
# make sure path has trailing "/"
if path[-1] != "/":
path = path + "/"
return path
else:
return path
def read_merge_sample_annotation_file(file_sample_annotation):
# get list of annotation file paths, tuple/dict of subjectID and prokka-assigned gene-tag (use first line)
subj_tag_dict = {}
subprocess.run(['mkdir','-p',outpath],check=True)
with open(outpath+"merged_annotation.fa", 'w') as outfile:
# with open(file_sample_annotation,'r') as infile:
for line in file_sample_annotation:
line = line.strip().split('\t')
print('1')
print(line)
# read annotation file: extract prokka-gene-tag (from 1st line) and merge all annotation files into one
with open(line[1]) as annofile:
# link subjectID with prokka gene tag
first_line = annofile.readline()
if first_line.startswith('>'):
prokka_gene_tag = first_line.strip().split(' ')[0].split('>')[1].split('_')[0]
subj_tag_dict[ line[0] ] = prokka_gene_tag
else:
raise ValueError('Annotation file does not start with ">": '+first_line+' in '+line[1])
# write annotation files into merged outfile
outfile.write(first_line) # necessary bcs already read
outfile.write(annofile.read())
file_sample_annotation.close()
return subj_tag_dict
def read_cdhit_cluster(cdhit_clstr,percentIdentity,prokka_tag_list):
# read cdhit results and build for each cluster entry in dict
ctr = 1
ctr_cluster_within_subject = 0
rdm_tag = ''.join(random.choice(string.ascii_lowercase) for x in range(4)) # added to cluster-tags to avoid confusion w/ different runs
saab_cluster_genes = {}
# value_default = ['NA' for i in prokka_tag_list]
with open(cdhit_clstr) as infile:
for line in infile:
if line.startswith('>'):
cluster_tag = "SAAB_" + "%05d" % ctr + "_pid" + percentIdentity + "_" + rdm_tag
saab_cluster_genes[cluster_tag] = ['NA' for i in prokka_tag_list]
ctr += 1
else:
line = line.strip().split('\t')[1].split(" ")[1].split('.')[0].split('>')[1] # remodel string so it matches prokka-gene_id eg. "0 10155aa, >JKPBNODD_00001... *"
subject_identifier_prokka = line.split('_')[0]
subject_index = prokka_tag_list.index( subject_identifier_prokka )
if saab_cluster_genes[cluster_tag][subject_index] == 'NA':
saab_cluster_genes[cluster_tag][subject_index] = line
else:
saab_cluster_genes[cluster_tag][subject_index] = saab_cluster_genes[cluster_tag][subject_index] + "," + line
ctr_cluster_within_subject += 1
if ctr_cluster_within_subject > 0:
print('NOTE: ' + str(ctr_cluster_within_subject) + " occasions where a gene cluster had >1 gene from the same individual assigned.")
return saab_cluster_genes
''' MAIN '''
# TEST Vars
#file_sample_annotation = "/Users/fmk/Documents/mit/stapAD/tmp/pycode/prokka_ffn/subject_4_9_16.list"
##annopath = "/Users/fmk/Documents/mit/stapAD/mlst"
##filetype = "txt"
#outpath = "/Users/fmk/Documents/mit/stapAD/tmp/pycode"
#percentIdentity=0.95
#cdhit_executable = '/usr/local/bin/cd-hit'
if __name__ == "__main__":
# assign argparse arguments
file_sample_annotation = args.file_sample_annotation
# annopath = fix_path(args.annopath) # fix path to annotation has trailing "/"
outpath = fix_path(args.outpath)
# filetype = args.filetype
cdhit_executable = args.cdhit
percentIdentity = args.percentIdentity
# get concatenated annotation file (output: merged_annotation.fa) and dict[subject]=prokka-tag
subj_tag_dict = read_merge_sample_annotation_file(file_sample_annotation)
subject_list_ord = list(subj_tag_dict.keys())
prokkaTag_list_ord = [ subj_tag_dict[k] for k in subject_list_ord ]
# cd-hit
command_cdhit = cdhit_executable + " -s 0.9 -c " + percentIdentity + " -i " + outpath + "merged_annotation.fa" + " -o " + outpath+"cdhit_results"
subprocess.run(command_cdhit,shell=True)
# read-in cdhit results: dict[SAAB_XXXXX_pidZZZ_YYY]=[geneX,geneY,geneZ]
cdhit_res_dict = read_cdhit_cluster(outpath+"cdhit_results.clstr",percentIdentity,prokkaTag_list_ord)
# build table of gene annotation
cdhit_res_df = pd.DataFrame.from_dict(cdhit_res_dict,orient='index',columns=subject_list_ord)
# write cdhit res
cdhit_res_df.to_csv(outpath+'annotation_orthologs.tsv',sep="\t")
| 1.851563 | 2 |
tests/test_vtable.py | matthewpruett/angr | 6,132 | 13335 | import os
import angr
test_location = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests')
def test_vtable_extraction_x86_64():
p = angr.Project(os.path.join(test_location, "x86_64", "cpp_classes"), auto_load_libs=False)
vtables_sizes = {0x403cb0: 24, 0x403cd8: 16, 0x403cf8: 16, 0x403d18: 16}
vtable_analysis = p.analyses.VtableFinder()
vtables = vtable_analysis.vtables_list
assert len(vtables) == 4
for vtable in vtables:
assert vtable.vaddr in [0x403cb0, 0x403cd8, 0x403cf8, 0x403d18]
assert vtables_sizes[vtable.vaddr] == vtable.size
if __name__ == "__main__":
test_vtable_extraction_x86_64()
| 2.171875 | 2 |
sendmail.py | loitd/buzreportgenerator | 0 | 13336 | <gh_stars>0
from zeep import Client
def test():
client = Client('http://www.dneonline.com/calculator.asmx?wsdl')
result = client.service.Add(100,220)
print(result)
def test2():
client = Client('http://172.16.237.11:8080/SendNotify_Interface_Gmail/services/Notify?wsdl')
result = client.service.sendEmail("<EMAIL>", "deglkxtfyjpnjqtq", "Ahihi", "<NAME>", "<EMAIL>", 1)
print(result)
if __name__ == "__main__":
test2() | 2.296875 | 2 |
pommerman/agents/http_agent.py | KaixiangLin/playground | 2 | 13337 | <gh_stars>1-10
'''The HTTP agent - provides observation using http push to remote
agent and expects action in the reply'''
import json
import time
import os
import threading
import requests
from . import BaseAgent
from .. import utility
from .. import characters
class HttpAgent(BaseAgent):
"""The HTTP Agent that connects to a port with a remote agent where the
character runs. It uses the same interface as the docker agent and
is useful for debugging."""
def __init__(self,
port=8080,
host='localhost',
timeout=120,
character=characters.Bomber):
self._port = port
self._host = host
self._timeout = timeout
super(HttpAgent, self).__init__(character)
self._wait_for_remote()
def _wait_for_remote(self):
"""Wait for network service to appear. A timeout of 0 waits forever."""
timeout = self._timeout
backoff = .25
max_backoff = min(timeout, 16)
if timeout:
# time module is needed to calc timeout shared between two exceptions
end = time.time() + timeout
while True:
try:
now = time.time()
if timeout and end < now:
print("Timed out - %s:%s" % (self._host, self._port))
raise
request_url = 'http://%s:%s/ping' % (self._host, self._port)
req = requests.get(request_url)
self._acknowledged = True
return True
except requests.exceptions.ConnectionError as e:
print("ConnectionError: ", e)
backoff = min(max_backoff, backoff * 2)
time.sleep(backoff)
except requests.exceptions.HTTPError as e:
print("HTTPError: ", e)
backoff = min(max_backoff, backoff * 2)
time.sleep(backoff)
def act(self, obs, action_space):
obs_serialized = json.dumps(obs, cls=utility.PommermanJSONEncoder)
request_url = "http://{}:{}/action".format(self._host, self._port)
try:
req = requests.post(
request_url,
timeout=0.15,
json={
"obs":
obs_serialized,
"action_space":
json.dumps(action_space, cls=utility.PommermanJSONEncoder)
})
action = req.json()['action']
except requests.exceptions.Timeout as e:
print('Timeout!')
# TODO: Fix this. It's ugly.
action = [0] * len(action_space.shape)
if len(action) == 1:
action = action[0]
return action
| 2.9375 | 3 |
cacao_app/event/serializers.py | CacaoMovil/guia-de-cacao-django | 1 | 13338 | # -*- coding: utf-8 -*-
from rest_framework import serializers
from django_countries.serializer_fields import CountryField
from .models import Event, CountryEvent
class CountryEventSerializer(serializers.ModelSerializer):
code = serializers.ReadOnlyField(source='country.code')
name = serializers.SerializerMethodField()
class Meta:
model = CountryEvent
fields = ('code', 'name')
def get_name(self, obj):
return obj.country.name
class EventsSerializer(serializers.ModelSerializer):
events_country = CountryEventSerializer(many=True, read_only=True)
class Meta:
model = Event
fields = (
'name', 'description', 'start', 'end', 'events_country'
)
| 2.1875 | 2 |
examples/map.py | jlsajfj/NBT | 241 | 13339 | <filename>examples/map.py
#!/usr/bin/env python
"""
Prints a map of the entire world.
"""
import os, sys
import math
from struct import pack
# local module
try:
import nbt
except ImportError:
# nbt not in search path. Let's see if it can be found in the parent folder
extrasearchpath = os.path.realpath(os.path.join(__file__,os.pardir,os.pardir))
if not os.path.exists(os.path.join(extrasearchpath,'nbt')):
raise
sys.path.append(extrasearchpath)
from nbt.region import RegionFile
from nbt.chunk import Chunk
from nbt.world import WorldFolder,McRegionWorldFolder
# PIL module (not build-in)
try:
from PIL import Image
except ImportError:
# PIL not in search path. Let's see if it can be found in the parent folder
sys.stderr.write("Module PIL/Image not found. Pillow (a PIL fork) can be found at http://python-imaging.github.io/\n")
# Note: it may also be possible that PIL is installed, but JPEG support is disabled or broken
sys.exit(70) # EX_SOFTWARE
def get_heightmap_image(chunk, buffer=False, gmin=False, gmax=False):
points = chunk.blocks.generate_heightmap(buffer, True)
# Normalize the points
hmin = min(points) if (gmin == False) else gmin # Allow setting the min/max explicitly, in case this is part of a bigger map
hmax = max(points) if (gmax == False) else gmax
hdelta = hmax-hmin+0.0
pixels = ""
for y in range(16):
for x in range(16):
# pix X => mc -Z
# pix Y => mc X
offset = (15-x)*16+y
height = int((points[offset]-hmin)/hdelta*255)
if (height < 0): height = 0
if (height > 255): height = 255
pixels += pack(">B", height)
im = Image.fromstring('L', (16,16), pixels)
return im
# List of blocks to ignore
# Uncomment all the lines to show underground structures
# TODO: move this list into a separate config file
block_ignore = [
'air', # At least this one
# 'cave_air', 'water', 'lava', 'snow', 'ice',
# 'grass', 'tall_grass', 'dead_bush',
# 'seagrass', 'tall_seagrass', 'kelp', 'kelp_plant',
# 'dandelion', 'poppy', 'oxeye_daisy', 'white_tulip',
# 'azure_bluet', 'lilac', 'rose_bush', 'peony', 'blue_orchid',
# 'lily_pad', 'sugar_cane', 'vine', 'pumpkin', 'cactus',
# 'wheat', 'potatoes', 'beetroots', 'carrots',
# 'oak_leaves', 'dark_oak_leaves', 'birch_leaves',
# 'acacia_leaves', 'spruce_leaves',
# 'oak_log', 'dark_oak_log', 'birch_log',
# 'acacia_log', 'spruce_log',
# 'brown_mushroom', 'red_mushroom',
# 'brown_mushroom_block', 'red_mushroom_block', 'mushroom_stem',
# 'grass_block', 'grass_path', 'farmland', 'dirt',
# 'stone', 'sand', 'gravel', 'clay',
# 'sandstone', 'diorite', 'andesite', 'granite', 'obsidian',
# 'coal_ore', 'iron_ore', 'gold_ore', 'diamond_ore',
# 'redstone_ore', 'lapis_ore', 'emerald_ore',
# 'cobweb',
]
# Map of block colors from names
# Legacy block numeric identifiers are now hidden by Block class
# and mapped to alpha identifiers in best effort
# TODO: move this map into a separate config file
block_colors = {
'acacia_leaves': {'h':114, 's':64, 'l':22 },
'acacia_log': {'h':35, 's':93, 'l':30 },
'air': {'h':0, 's':0, 'l':0 },
'andesite': {'h':0, 's':0, 'l':32 },
'azure_bluet': {'h':0, 's':0, 'l':100},
'bedrock': {'h':0, 's':0, 'l':10 },
'birch_leaves': {'h':114, 's':64, 'l':22 },
'birch_log': {'h':35, 's':93, 'l':30 },
'blue_orchid': {'h':0, 's':0, 'l':100},
'bookshelf': {'h':0, 's':0, 'l':100},
'brown_mushroom': {'h':0, 's':0, 'l':100},
'brown_mushroom_block': {'h':0, 's':0, 'l':100},
'cactus': {'h':126, 's':61, 'l':20 },
'cave_air': {'h':0, 's':0, 'l':0 },
'chest': {'h':0, 's':100, 'l':50 },
'clay': {'h':7, 's':62, 'l':23 },
'coal_ore': {'h':0, 's':0, 'l':10 },
'cobblestone': {'h':0, 's':0, 'l':25 },
'cobblestone_stairs': {'h':0, 's':0, 'l':25 },
'crafting_table': {'h':0, 's':0, 'l':100},
'dandelion': {'h':60, 's':100, 'l':60 },
'dark_oak_leaves': {'h':114, 's':64, 'l':22 },
'dark_oak_log': {'h':35, 's':93, 'l':30 },
'dark_oak_planks': {'h':35, 's':93, 'l':30 },
'dead_bush': {'h':0, 's':0, 'l':100},
'diorite': {'h':0, 's':0, 'l':32 },
'dirt': {'h':27, 's':51, 'l':15 },
'end_portal_frame': {'h':0, 's':100, 'l':50 },
'farmland': {'h':35, 's':93, 'l':15 },
'fire': {'h':55, 's':100, 'l':50 },
'flowing_lava': {'h':16, 's':100, 'l':48 },
'flowing_water': {'h':228, 's':50, 'l':23 },
'glass_pane': {'h':0, 's':0, 'l':100},
'granite': {'h':0, 's':0, 'l':32 },
'grass': {'h':94, 's':42, 'l':25 },
'grass_block': {'h':94, 's':42, 'l':32 },
'gravel': {'h':21, 's':18, 'l':20 },
'ice': {'h':240, 's':10, 'l':95 },
'infested_stone': {'h':320, 's':100, 'l':50 },
'iron_ore': {'h':22, 's':65, 'l':61 },
'iron_bars': {'h':22, 's':65, 'l':61 },
'ladder': {'h':35, 's':93, 'l':30 },
'lava': {'h':16, 's':100, 'l':48 },
'lilac': {'h':0, 's':0, 'l':100},
'lily_pad': {'h':114, 's':64, 'l':18 },
'lit_pumpkin': {'h':24, 's':100, 'l':45 },
'mossy_cobblestone': {'h':115, 's':30, 'l':50 },
'mushroom_stem': {'h':0, 's':0, 'l':100},
'oak_door': {'h':35, 's':93, 'l':30 },
'oak_fence': {'h':35, 's':93, 'l':30 },
'oak_fence_gate': {'h':35, 's':93, 'l':30 },
'oak_leaves': {'h':114, 's':64, 'l':22 },
'oak_log': {'h':35, 's':93, 'l':30 },
'oak_planks': {'h':35, 's':93, 'l':30 },
'oak_pressure_plate': {'h':35, 's':93, 'l':30 },
'oak_stairs': {'h':114, 's':64, 'l':22 },
'peony': {'h':0, 's':0, 'l':100},
'pink_tulip': {'h':0, 's':0, 'l':0 },
'poppy': {'h':0, 's':100, 'l':50 },
'pumpkin': {'h':24, 's':100, 'l':45 },
'rail': {'h':33, 's':81, 'l':50 },
'red_mushroom': {'h':0, 's':50, 'l':20 },
'red_mushroom_block': {'h':0, 's':50, 'l':20 },
'rose_bush': {'h':0, 's':0, 'l':100},
'sugar_cane': {'h':123, 's':70, 'l':50 },
'sand': {'h':53, 's':22, 'l':58 },
'sandstone': {'h':48, 's':31, 'l':40 },
'seagrass': {'h':94, 's':42, 'l':25 },
'sign': {'h':114, 's':64, 'l':22 },
'spruce_leaves': {'h':114, 's':64, 'l':22 },
'spruce_log': {'h':35, 's':93, 'l':30 },
'stone': {'h':0, 's':0, 'l':32 },
'stone_slab': {'h':0, 's':0, 'l':32 },
'tall_grass': {'h':94, 's':42, 'l':25 },
'tall_seagrass': {'h':94, 's':42, 'l':25 },
'torch': {'h':60, 's':100, 'l':50 },
'snow': {'h':240, 's':10, 'l':85 },
'spawner': {'h':180, 's':100, 'l':50 },
'vine': {'h':114, 's':64, 'l':18 },
'wall_torch': {'h':60, 's':100, 'l':50 },
'water': {'h':228, 's':50, 'l':23 },
'wheat': {'h':123, 's':60, 'l':50 },
'white_wool': {'h':0, 's':0, 'l':100},
}
def get_map(chunk):
# Show an image of the chunk from above
pixels = b""
for z in range(16):
for x in range(16):
# Find the highest block in this column
max_height = chunk.get_max_height()
ground_height = max_height
tints = []
for y in range(max_height,-1,-1):
block_id = chunk.get_block(x, y, z)
if block_id != None:
#block_data = 0 # TODO: use block properties
#if (block_id == 'water' or block_id == 'water'):
#tints.append({'h':228, 's':50, 'l':23}) # Water
#elif (block_id == 'leaves'): # TODO: old id - update
#if (block_data == 1):
#tints.append({'h':114, 's':64, 'l':22}) # Redwood Leaves
#elif (block_data == 2):
#tints.append({'h':93, 's':39, 'l':10}) # Birch Leaves
#else:
#tints.append({'h':114, 's':64, 'l':22}) # Normal Leaves
#elif (block_id == 'ice'):
#tints.append({'h':240, 's':5, 'l':95}) # Ice
#elif (block_id == 'fire'):
#tints.append({'h':55, 's':100, 'l':50}) # Fire
#elif (block_id != 'air' or block_id != 'cave_air' or y == 0):
if (block_id not in block_ignore or y == 0):
# Here is ground level
ground_height = y
break
if block_id != None:
if block_id in block_colors:
color = block_colors[block_id]
else:
color = {'h':0, 's':0, 'l':100}
print("warning: unknown color for block id: %s" % block_id)
print("hint: add that block to the 'block_colors' map")
else:
color = {'h':0, 's':0, 'l':0}
height_shift = 0 #(ground_height-64)*0.25
final_color = {'h':color['h'], 's':color['s'], 'l':color['l'] + height_shift}
if final_color['l'] > 100: final_color['l'] = 100
if final_color['l'] < 0: final_color['l'] = 0
# Apply tints from translucent blocks
for tint in reversed(tints):
final_color = hsl_slide(final_color, tint, 0.4)
rgb = hsl2rgb(final_color['h'], final_color['s'], final_color['l'])
pixels += pack("BBB", rgb[0], rgb[1], rgb[2])
im = Image.frombytes('RGB', (16,16), pixels)
return im
## Color functions for map generation ##
# Hue given in degrees,
# saturation and lightness given either in range 0-1 or 0-100 and returned in kind
def hsl_slide(hsl1, hsl2, ratio):
if (abs(hsl2['h'] - hsl1['h']) > 180):
if (hsl1['h'] > hsl2['h']):
hsl1['h'] -= 360
else:
hsl1['h'] += 360
# Find location of two colors on the H/S color circle
p1x = math.cos(math.radians(hsl1['h']))*hsl1['s']
p1y = math.sin(math.radians(hsl1['h']))*hsl1['s']
p2x = math.cos(math.radians(hsl2['h']))*hsl2['s']
p2y = math.sin(math.radians(hsl2['h']))*hsl2['s']
# Slide part of the way from tint to base color
avg_x = p1x + ratio*(p2x-p1x)
avg_y = p1y + ratio*(p2y-p1y)
avg_h = math.atan(avg_y/avg_x)
avg_s = avg_y/math.sin(avg_h)
avg_l = hsl1['l'] + ratio*(hsl2['l']-hsl1['l'])
avg_h = math.degrees(avg_h)
#print('tint: %s base: %s avg: %s %s %s' % (tint,final_color,avg_h,avg_s,avg_l))
return {'h':avg_h, 's':avg_s, 'l':avg_l}
# From http://www.easyrgb.com/index.php?X=MATH&H=19#text19
def hsl2rgb(H,S,L):
H = H/360.0
S = S/100.0 # Turn into a percentage
L = L/100.0
if (S == 0):
return (int(L*255), int(L*255), int(L*255))
var_2 = L * (1+S) if (L < 0.5) else (L+S) - (S*L)
var_1 = 2*L - var_2
def hue2rgb(v1, v2, vH):
if (vH < 0): vH += 1
if (vH > 1): vH -= 1
if ((6*vH)<1): return v1 + (v2-v1)*6*vH
if ((2*vH)<1): return v2
if ((3*vH)<2): return v1 + (v2-v1)*(2/3.0-vH)*6
return v1
R = int(255*hue2rgb(var_1, var_2, H + (1.0/3)))
G = int(255*hue2rgb(var_1, var_2, H))
B = int(255*hue2rgb(var_1, var_2, H - (1.0/3)))
return (R,G,B)
def main(world_folder, show=True):
world = WorldFolder(world_folder)
bb = world.get_boundingbox()
world_map = Image.new('RGB', (16*bb.lenx(),16*bb.lenz()))
t = world.chunk_count()
try:
i =0.0
for chunk in world.iter_chunks():
if i % 50 ==0:
sys.stdout.write("Rendering image")
elif i % 2 == 0:
sys.stdout.write(".")
sys.stdout.flush()
elif i % 50 == 49:
sys.stdout.write("%5.1f%%\n" % (100*i/t))
i +=1
chunkmap = get_map(chunk)
x,z = chunk.get_coords()
world_map.paste(chunkmap, (16*(x-bb.minx),16*(z-bb.minz)))
print(" done\n")
filename = os.path.basename(world_folder)+".png"
world_map.save(filename,"PNG")
print("Saved map as %s" % filename)
except KeyboardInterrupt:
print(" aborted\n")
filename = os.path.basename(world_folder)+".partial.png"
world_map.save(filename,"PNG")
print("Saved map as %s" % filename)
return 75 # EX_TEMPFAIL
if show:
world_map.show()
return 0 # NOERR
if __name__ == '__main__':
if (len(sys.argv) == 1):
print("No world folder specified!")
sys.exit(64) # EX_USAGE
if sys.argv[1] == '--noshow' and len(sys.argv) > 2:
show = False
world_folder = sys.argv[2]
else:
show = True
world_folder = sys.argv[1]
# clean path name, eliminate trailing slashes. required for os.path.basename()
world_folder = os.path.normpath(world_folder)
if (not os.path.exists(world_folder)):
print("No such folder as "+world_folder)
sys.exit(72) # EX_IOERR
sys.exit(main(world_folder, show))
| 2.8125 | 3 |
flaskcbv/protos/simple/apps/main/urls.py | procool/flaskcbv | 1 | 13340 | <gh_stars>1-10
from flaskcbv.url import Url, make_urls
from .views import mainView
namespases = make_urls(
Url('', mainView(), name="main"),
)
| 1.523438 | 2 |
main/permissions.py | hellojoshuatonga/notepik | 0 | 13341 | # Rest framework
from rest_framework import permissions
class IsAuthorOrReadOnly(permissions.BasePermission):
"""
Object level permission. Check if the requesting user is the author or not. If he/she the author then we will give him/her a read and write permission otherwise ready only
"""
def has_object_permission(self, request, view, obj):
# Check if he requesting for only a get, etc
if request.method in permissions.SAFE_METHODS:
return True
return obj.author == request.user
| 2.96875 | 3 |
femagtools/vtu.py | dapu/femagtools | 0 | 13342 | <filename>femagtools/vtu.py<gh_stars>0
"""
femagtools.vtu
~~~~~~~~~~~~~~
Read FEMAG vtu files
"""
import vtk
import pathlib
import numpy as np
class Reader(object):
'''Class to read vtu-files'''
def __init__(self, pathname):
'''Read the vtu-files
Parameters
----------
pathname : str
Directory of result files (vtu-files) or a single vtu file
'''
self.data = {}
self.reader = vtk.vtkXMLUnstructuredGridReader()
self.output = self.reader.GetOutput()
self.field_data_names = []
self.point_data_names = []
self.cell_data_names = []
if pathlib.Path(pathname).suffix == '.vtu':
self.filenames = [pathlib.Path(pathname)]
else:
self.filenames = sorted(pathlib.Path(pathname).glob(
"*.vtu"))
self.reader.SetFileName(str(self.filenames[0]))
self.reader.Update()
self.field_data_names = [
self.output.GetFieldData().GetAbstractArray(i).GetName()
for i in range(self.output.GetFieldData().GetNumberOfArrays())]
self.point_data_names = [
self.output.GetPointData().GetAbstractArray(i).GetName()
for i in range(self.output.GetPointData().GetNumberOfArrays())]
self.cell_data_names = [
self.output.GetCellData().GetAbstractArray(i).GetName()
for i in range(self.output.GetCellData().GetNumberOfArrays())]
self.set_time_window(0.0, 0.0)
def get_data_names(self):
'''Read the list of values stored in the vtu files
Parameters
----------
None
Returns
-------
List of values stored in the vtu files
'''
return (self.field_data_names +
self.point_data_names +
self.cell_data_names)
def read_data(self, data_list):
'''Extracts data from the vtu files
Parameters
----------
data_list : fist of str
List of values to extract from vtu_files
'''
for data_name in data_list:
if data_name in self.field_data_names:
self.data[data_name] = []
elif data_name in self.point_data_names:
self.data[data_name] = []
elif data_name in self.cell_data_names:
self.data[data_name] = []
else:
raise Exception('unknown data name "' + data_name+'"')
for filename in self.filenames:
self.reader.SetFileName(str(filename))
self.reader.Update()
for data_name in data_list:
if data_name in self.field_data_names:
self.data[data_name].append(
self.output.GetFieldData().GetAbstractArray(data_name))
if data_name in self.point_data_names:
self.data[data_name].append(
self.output.GetPointData().GetAbstractArray(data_name))
if data_name in self.cell_data_names:
self.data[data_name].append(
self.output.GetCellData().GetAbstractArray(data_name))
return "done"
def set_time_window(self, start, end):
'''Set time window
Parameters
----------
start: float
Start of the time window
end: float
End of the time window
Only values within the time window are output by the functions
get_field_vector
get_point_vector
get_cell_vector
get_data_vector
At start = 0.0 the values are read out starting from the first value
At end = 0.0 the values are read out up to the last value
'''
try:
if "time [s]" not in self.data:
self.read_data(['time [s]'])
if start == 0 or start <= self.data['time [s]'][0].GetValue(0):
self.istart = 0
else:
self.istart = 0
for i in range(len(self.data['time [s]'])):
if start >= self.data['time [s]'][i].GetValue(0):
self.istart = i+1
if end == 0 or end >= self.data['time [s]'][-1].GetValue(0):
self.iend = len(self.data['time [s]'])
else:
self.iend = 0
for i in range(len(self.data['time [s]'])):
if end <= self.data['time [s]'][i].GetValue(0):
self.iend = i
except:
self.istart = None
self.iend = None
def get_field_vector(self, field_data):
'''Read field data
Parameters
----------
field_data : str
Name of field to read
Returns
-------
field_vec : list of float
List of field values within the time window
'''
if field_data not in self.data:
self.read_data([field_data])
if self.istart:
start = self.istart
else:
start = 0
if self.iend:
end = self.iend
else:
end = len(self.data[field_data])
field_vec = []
# for i in range(self.istart,self.iend):
for i in range(start, end):
field_vec.append(self.data[field_data][i].GetValue(0))
return field_vec
# pnt = node-key, >0
def get_point_vector(self, pnt_data, pnt):
'''Read point data
Parameters
----------
point_data : str
Name of field to read
pnt : int
Key of point
Returns
-------
point_vec : list of float
List of point values within the time window
'''
if pnt_data not in self.data:
self.read_data([pnt_data])
if self.istart:
start = self.istart
else:
start = 0
if self.iend:
end = self.iend
else:
end = len(self.data[pnt_data])
point_vec = []
for i in range(start, end):
point_vec.append(self.data[pnt_data][i].GetValue(pnt-1))
return point_vec
def get_cell_vector(self, cell_data, cell):
'''Read cell data
Parameters
----------
cell_data : str
Name of field to read
cell : int
Key of cell
Returns
-------
cell_vec : list of float
List of cell values within the time window
'''
if cell_data not in self.data:
self.read_data([cell_data])
i = self.cell_data_names.index(cell_data)
noc = self.output.GetCellData().GetAbstractArray(i).GetNumberOfComponents()
if noc == 1:
cell_vec = []
else:
cell_vec_x = []
cell_vec_y = []
cell_vec_z = []
if self.istart:
start = self.istart
else:
start = 0
if self.iend:
end = self.iend
else:
end = int(len(self.data[cell_data]))
for i in range(start, end):
if noc == 1:
cell_vec.append(self.data[cell_data][i].GetValue(cell-1))
else:
cell_vec_x.append(
self.data[cell_data][i].GetValue(noc*(cell-1)))
cell_vec_y.append(
self.data[cell_data][i].GetValue(noc*(cell-1)+1))
cell_vec_z.append(
self.data[cell_data][i].GetValue(noc*(cell-1)+2))
if noc == 1:
return cell_vec
else:
return [cell_vec_x, cell_vec_y, cell_vec_z]
def get_data_vector(self, data_name, key=0):
'''Read data of fiels, point or cell
Parameters
----------
data_name : str
Name of data to read
hey : int (optional)
Key of point or cell
Returns
-------
data_vec : list of float
List of values within the time window
'''
if data_name in self.field_data_names:
return self.get_field_vector(data_name)
if data_name in self.point_data_names:
return self.get_point_vector(data_name, key)
if data_name in self.cell_data_names:
return self.get_cell_vector(data_name, key)
return []
def read(filename):
"""
Read vtu file and return Reader object.
Arguments:
filename: name of vtu file to be read
"""
return Reader(filename)
| 2.703125 | 3 |
__init__.py | rolc/python-package | 0 | 13343 | #!/usr/bin/python
#-------------------------------IMPORT--------------------------------#
from lib import *
#-------------------------------EXPORT--------------------------------#
__all__ = ['<#PREFIX#>_app','<#PREFIX#>_index']
| 1.46875 | 1 |
svm-rank/bag_of_words.py | Anthony-Alridge/individual_project | 0 | 13344 | from collections import Counter
import numpy as np
def keep_word(word):
return word.is_alpha
def unique_words(problems):
return set([word.lemma_ for problem in problems for word in problem.tokens() if keep_word(word)])
def create_word2idx(vocab):
return {word: idx for idx, word in enumerate(vocab)}
class BagOfWordsFeature():
def __init__(self, corpus):
self.vocab = list(unique_words(corpus))
# Mapping from words to their index in the feature vector.
self.word2idx = create_word2idx(self.vocab)
def process(self, problem):
features = np.zeros(len(self.vocab))
words = [word.lemma_ for word in problem.tokens() if keep_word(word)]
freqs = Counter(words)
for word in freqs:
# Skip unknown words.
if word in self.word2idx:
features[self.word2idx[word]] = freqs[word]
return features
| 2.78125 | 3 |
pymps/ansatz/tests/test_sweeper.py | GiggleLiu/pymps | 4 | 13345 | <reponame>GiggleLiu/pymps
#!/usr/bin/python
'''
Tests for MPS and MPO
'''
from numpy import *
import matplotlib.pyplot as plt
from numpy.testing import dec, assert_, assert_raises, assert_almost_equal, assert_allclose
import pdb
from ..sweep import *
def test_iterator():
start = (1, '->', 2)
stop = (3, '<-', 1)
print('Testing iterator start = %s, stop= %s' % (start, stop))
iterator = get_sweeper(start=start, stop=stop, nsite=4 - 2, iprint=2)
order = [(1, '->', 2), (1, '<-', 1), (1, '<-', 0),
(2, '->', 1), (2, '->', 2), (2, '<-', 1), (2, '<-', 0),
(3, '->', 1), (3, '->', 2), (3, '<-', 1),
]
plt.ion()
visualize_sweeper(iterator, nsite=3)
for od, it in zip(order, iterator):
assert_(od == it)
print('Testing 2-site iterator.')
start = (1, '->', 0)
stop = (3, '->', 0)
order = [(1, '->', 0), (2, '->', 0), (3, '->', 0)]
iterator = get_sweeper(start=start, stop=stop, nsite=2 - 2)
for od, it in zip(order, iterator):
assert_(od == it)
print('Testing periodic case.')
iterator = get_psweeper(start=(1, 2), stop=(3, 1), nsite=4, iprint=2)
order = [(1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3), (3, 0), (3, 1)]
for od, it in zip(order, iterator):
assert_(od == it)
iterator = get_psweeper(start=(1, 0), stop=(3, 0), nsite=2, iprint=2)
order = [(1, 0), (1, 1), (2, 0)]
for od, it in zip(order, iterator):
assert_(od == it)
if __name__ == '__main__':
test_iterator()
| 2.671875 | 3 |
ui/numbered_menu.py | LouisPi/PiPortableRecorder | 51 | 13346 | from threading import Lock
from time import time
from ui import Menu
from ui.utils import clamp, check_value_lock, to_be_foreground
class NumberedMenu(Menu):
"""
This Menu allows the user to jump to entries using the numpad. If the menu is 10 entries or less
the navigation is instant. Otherwise, it lets the user type multiple digits to navigate to entries beyond 10th.
The `input_delay` parameter controls how long, and if, the menu waits before considering an input as definitive.
If `input_delay` is 0, then only the 10 first entries can be navigated to using the keypad.
The `prepend_numbers` parameters controls whether the entries should be prefixed by their number.
(default: `True`)
"""
def __init__(self, *args, **kwargs):
self.prepend_numbers = kwargs.pop('prepend_numbers', True)
self.input_delay = kwargs.pop('input_delay', 1)
Menu.__init__(self, *args, **kwargs)
self.__locked_name__ = None
self.value_lock = Lock()
self.numeric_keymap = {"KEY_{}".format(i): i for i in range(10)}
self.last_input_time = 0
self.current_input = None
@property
def entry_count(self):
return len(self.contents)
def before_activate(self):
Menu.before_activate(self)
self.last_input_time = -self.input_delay
def idle_loop(self):
Menu.idle_loop(self)
self.check_character_state()
def set_keymap(self):
Menu.set_keymap(self)
self.i.set_streaming(self.on_key_pressed)
def deactivate(self):
Menu.deactivate(self)
self.i.remove_streaming()
@to_be_foreground
def on_key_pressed(self, key):
if key == "KEY_RIGHT" and self.is_multi_digit():
self.confirm_current_input()
if key not in self.numeric_keymap:
return
if self.is_multi_digit():
self.process_multi_digit_input(key)
else:
self.process_single_digit_input(key)
self.view.refresh()
def process_single_digit_input(self, key):
self.move_to_entry(self.numeric_keymap[key])
def process_multi_digit_input(self, key):
self.last_input_time = time()
if not self.current_input:
self.current_input = str(self.numeric_keymap[key])
else:
self.current_input += str(self.numeric_keymap[key])
def move_to_entry(self, index):
if self.pointer == index:
# Moving to the same item that's already selected
# let's interpret this as KEY_ENTER
self.current_input = None
self.select_entry()
return
self.pointer = clamp(index, 0, len(self.contents) - 1)
self.current_input = None
self.view.refresh()
def process_contents(self):
Menu.process_contents(self)
if self.prepend_numbers:
self.prepend_entry_text()
def prepend_entry_text(self):
# prepend numbers to each entry name
if self.is_multi_digit():
self.contents = [["{} {}".format(i, entry[0]), entry[1]]
for i, entry in enumerate(self.contents)]
else:
for i, entry in enumerate(self.contents[:10]):
entry[0] = "{} {}".format(i, entry[0])
@check_value_lock
def check_character_state(self):
if self.is_current_input_finished():
self.move_to_entry(int(self.current_input))
def is_multi_digit(self):
return self.input_delay > 0
def is_current_input_finished(self):
# nothing in the buffer
if not self.current_input:
return False
# no need to let the user input '100' if we have 20 entries
if len(str(self.current_input)) == len(str(self.entry_count)):
return True
# user typed 2 and we have 19 entries, going to the most likely option
if int(self.current_input) * 10 > self.entry_count:
return True
# user typed 17 and we have 12 entries
if int(self.current_input) >= self.entry_count:
return True
now = time()
elapsed = now - self.last_input_time
if self.is_multi_digit() and elapsed >= self.input_delay: # delay wait is over
return True
return False
def confirm_current_input(self):
if self.current_input is None:
return
self.move_to_entry(int(self.current_input))
| 3.75 | 4 |
Math/RussianPeasantMultiplication.py | kopok2/algorithms | 0 | 13347 | # coding=utf-8
"""Russian Peasant Multiplication algorithm Python implementation."""
def russ_peasant(a, b):
res = 0
while b > 0:
if b & 1:
res += a
a <<= 1
b >>= 1
return res
if __name__ == '__main__':
for x in range(10):
for y in range(10):
print(x, y, x * y, russ_peasant(x, y))
| 3.71875 | 4 |
Assignment 4/src/optim/lr_scheduler.py | vamsi3/CS763-IIT-Bombay | 1 | 13348 | import math
import torch
from bisect import bisect_right
class _LRScheduler:
def __init__(self, optimizer, last_epoch=-1):
self.optimizer = optimizer
self.base_lr = optimizer.lr
self.last_epoch = last_epoch
def step(self):
self.last_epoch += 1
self.optimizer.lr = self.get_lr()
class StepLR(_LRScheduler):
def __init__(self, optimizer, step_size, gamma=0.1, last_epoch=-1):
super().__init__(optimizer, last_epoch)
self.step_size = step_size
self.gamma = gamma
def get_lr(self):
return self.base_lr * self.gamma ** (self.last_epoch // self.step_size)
class MultiStepLR(_LRScheduler):
def __init__(self, optimizer, milestones, gamma=0.1, last_epoch=-1):
super().__init__(optimizer, last_epoch)
self.milestones = milestones
self.gamma = gamma
def get_lr(self):
return self.base_lr * self.gamma ** bisect_right(self.milestones, self.last_epoch)
class ExponentialLR(_LRScheduler):
def __init__(self, optimizer, gamma, last_epoch=-1):
super().__init__(optimizer, last_epoch)
self.gamma = gamma
def get_lr(self):
return self.base_lr * self.gamma ** self.last_epoch
class CosineAnnealingLR(_LRScheduler):
def __init__(self, optimizer, T_max, eta_min=0, last_epoch=-1):
super().__init__(optimizer, last_epoch)
self.T_max = T_max
self.eta_min = eta_min
def get_lr(self):
return self.eta_min + (self.base_lr - self.eta_min) * (1 + math.cos(math.pi * self.last_epoch / self.T_max)) / 2
| 2.234375 | 2 |
gluon/dal/adapters/teradata.py | lightcoder127/Web2py | 2 | 13349 | <reponame>lightcoder127/Web2py
# -*- coding: utf-8 -*-
from .._globals import IDENTITY
from ..connection import ConnectionPool
from .base import BaseAdapter
class TeradataAdapter(BaseAdapter):
drivers = ('pyodbc',)
types = {
'boolean': 'CHAR(1)',
'string': 'VARCHAR(%(length)s)',
'text': 'VARCHAR(2000)',
'json': 'VARCHAR(4000)',
'password': 'VARCHAR(%(length)s)',
'blob': 'BLOB',
'upload': 'VARCHAR(%(length)s)',
'integer': 'INT',
'bigint': 'BIGINT',
'float': 'REAL',
'double': 'DOUBLE',
'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
'date': 'DATE',
'time': 'TIME',
'datetime': 'TIMESTAMP',
# Modified Constraint syntax for Teradata.
# Teradata does not support ON DELETE.
'id': 'INT GENERATED ALWAYS AS IDENTITY', # Teradata Specific
'reference': 'INT',
'list:integer': 'VARCHAR(4000)',
'list:string': 'VARCHAR(4000)',
'list:reference': 'VARCHAR(4000)',
'geometry': 'ST_GEOMETRY', # http://www.info.teradata.com/HTMLPubs/DB_TTU_14_00/index.html#page/Database_Management/B035_1094_111A/ch14.055.160.html
'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY', # Teradata Specific
'big-reference': 'BIGINT',
'reference FK': ' REFERENCES %(foreign_key)s',
'reference TFK': ' FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s)',
}
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=IDENTITY, driver_args={},
adapter_args={}, do_connect=True, after_connection=None):
self.db = db
self.dbengine = "teradata"
self.uri = uri
if do_connect: self.find_driver(adapter_args,uri)
self.pool_size = pool_size
self.folder = folder
self.db_codec = db_codec
self._after_connection = after_connection
self.find_or_make_work_folder()
ruri = uri.split('://', 1)[1]
def connector(cnxn=ruri,driver_args=driver_args):
return self.driver.connect(cnxn,**driver_args)
self.connector = connector
if do_connect: self.reconnect()
def close(self,action='commit',really=True):
# Teradata does not implicitly close off the cursor
# leading to SQL_ACTIVE_STATEMENTS limit errors
self.cursor.close()
ConnectionPool.close(self, action, really)
def LEFT_JOIN(self):
return 'LEFT OUTER JOIN'
# Similar to MSSQL, Teradata can't specify a range (for Pageby)
def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
if limitby:
(lmin, lmax) = limitby
sql_s += ' TOP %i' % lmax
return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
def _truncate(self, table, mode=''):
tablename = table._tablename
return ['DELETE FROM %s ALL;' % (tablename)]
| 1.976563 | 2 |
conan_tests/external_tools/vswhere_test.py | conan-io/test | 2 | 13350 | import os
import platform
import unittest
import nose
from conans import tools
from conans.errors import ConanException
from conans.model.version import Version
from conans import __version__ as client_version
from conans.model import settings
from conans.test.utils.tools import TestClient
from conans.test.assets.visual_project_files import get_vs_project_files
class vswhereTest(unittest.TestCase):
# Environment supossed:
# - BuildTools 14 (2015)
# - VS Community 14 (2015)
#
# - BuildTools 15 (2017) OR VS Community 15 (2017)
modern_products = 1 # 2017 or higher versions without BuildTools -> vswhere()
all_modern_products = 2 # 2017 or higher versions with BuildTools -> vswhere(products=["*"])
modern_and_legacy_products = 2 # 2017 and lower versions (without BuildTools) -> vswhere(legacy=True)
only_legacy_products = 1
all_products = 3
def setUp(self):
if platform.system() != "Windows":
raise nose.SkipTest("Only Windows test")
if Version(client_version) < Version("1.1.0-dev"):
raise nose.SkipTest("Only >= 1.1.0-dev version")
def vs_comntools_test(self):
# Fake path
with tools.environment_append({"VS150COMNTOOLS": "fake/path/here"}):
path = tools.vs_comntools("15")
self.assertEqual(path, "fake/path/here")
# VS 14 path
path = tools.vs_comntools("14")
self.assertEqual(path, "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\Common7\\Tools\\")
# VS 15 path (shouldn't be found as VS150COMNTOOLS is not set by default)
path = tools.vs_comntools("15")
self.assertEqual(path, None)
def vswhere_test(self):
# products and legacy not allowed
self.assertRaises(ConanException, tools.vswhere, products=["*"], legacy=True)
# Detect only one product (VS Community 15) as vswhere default detection
nproducts = len(tools.vswhere())
self.assertEqual(nproducts, self.modern_products)
# Detect only modern products (VS Community 15 & BuildTools 15)
products = tools.vswhere(products=["*"])
nproducts = len(products)
self.assertEqual(nproducts, self.all_modern_products)
installation_paths = [product["installationPath"] for product in products]
self.assertTrue(any("Community" in install_path for install_path in installation_paths))
self.assertTrue(any("BuildTools" in install_path for install_path in installation_paths))
# Detect also legacy products but no modern BuildTools
products = tools.vswhere(legacy=True)
nproducts = len(products)
self.assertEqual(nproducts, self.modern_and_legacy_products)
installation_paths = [product["installationPath"] for product in products]
self.assertTrue(any("Community" in install_path for install_path in installation_paths))
self.assertTrue(any("Microsoft Visual Studio 14.0" in install_path for install_path in installation_paths))
# Detect all installed products
products = tools.vswhere(products=["*"])
products += tools.vswhere(legacy=["*"])
seen_products = []
for product in products:
if product not in seen_products:
seen_products.append(product)
products = seen_products
nproducts = len(products)
self.assertEqual(nproducts, self.all_products)
installation_paths = [product["installationPath"] for product in products]
self.assertTrue(any("Community" in install_path for install_path in installation_paths))
self.assertTrue(any("BuildTools" in install_path for install_path in installation_paths))
self.assertTrue(any("Microsoft Visual Studio 14.0" in install_path for install_path in installation_paths))
def vs_installation_path_test(self):
# Default behaviour
install_path = tools.vs_installation_path("15")
self.assertIn("Community", install_path)
install_path = tools.vs_installation_path("14")
self.assertIn("Microsoft Visual Studio 14.0", install_path)
# only BuildTools detection
install_path = tools.vs_installation_path("15", preference=["BuildTools"])
self.assertIn("BuildTools", install_path)
install_path = tools.vs_installation_path("14", preference=["BuildTools"])
self.assertIn("Microsoft Visual Studio 14.0", install_path)
# Ask for not installed versions
install_path = tools.vs_installation_path("15", preference=["Enterprise"])
self.assertIsNone(install_path)
install_path = tools.vs_installation_path("15", preference=["Professional"])
self.assertIsNone(install_path)
# Change preference order
install_path = tools.vs_installation_path("15", preference=["BuildTools", "Community", "Professional", "Enterprise"])
self.assertIn("BuildTools", install_path)
install_path = tools.vs_installation_path("15", preference=["Professional", "Enterprise", "Community"])
self.assertIn("Community", install_path)
# Preference order by env var
with(tools.environment_append({"CONAN_VS_INSTALLATION_PREFERENCE":"BuildTools, Community,Professional, Enterprise"})):
install_path = tools.vs_installation_path("15")
self.assertIn("BuildTools", install_path)
with(tools.environment_append({"CONAN_VS_INSTALLATION_PREFERENCE":"Professional, Enterprise,Community"})):
install_path = tools.vs_installation_path("15")
self.assertIn("Community", install_path)
def vvcars_command_test(self):
fake_settings = settings.Settings({"os":"Windows", "arch": "x86_64"})
# preference order with VS 15
with(tools.environment_append({"CONAN_VS_INSTALLATION_PREFERENCE":"BuildTools, Community,Professional, Enterprise"})):
command = tools.vcvars_command(settings=fake_settings, compiler_version="15")
self.assertNotIn("Community", command)
self.assertIn("VC/Auxiliary/Build/vcvarsall.bat", command)
self.assertIn("Microsoft Visual Studio\\2017\\BuildTools", command)
self.assertIn("VSCMD_START_DIR", command)
with(tools.environment_append({"CONAN_VS_INSTALLATION_PREFERENCE":"Professional, Enterprise,Community"})):
command = tools.vcvars_command(settings=fake_settings, compiler_version="15")
self.assertNotIn("BuildTools", command)
self.assertIn("VC/Auxiliary/Build/vcvarsall.bat", command)
self.assertIn("Microsoft Visual Studio\\2017\\Community", command)
self.assertIn("VSCMD_START_DIR", command)
# With VS 14 order of preference does not apply
command = tools.vcvars_command(settings=fake_settings, compiler_version="14")
self.assertNotIn("VSCMD_START_DIR", command)
self.assertIn("VC/vcvarsall.bat", command)
self.assertIn("Microsoft Visual Studio 14.0\\", command)
def build_test(self):
conan_build_vs = """
from conans import ConanFile, MSBuild, tools
class HelloConan(ConanFile):
name = "Hello"
version = "1.2.1"
settings = "os", "build_type", "arch", "compiler"
export_source = "*"
def build(self):
msbuild = MSBuild(self)
msbuild.build("MyProject.sln", upgrade_project=False)
"""
client = TestClient()
files = get_vs_project_files()
files["conanfile.py"] = conan_build_vs
client.save(files)
with(tools.environment_append({"CONAN_PRINT_RUN_COMMANDS": "1"})):
with(tools.environment_append({"CONAN_VS_INSTALLATION_PREFERENCE": "BuildTools"})):
client.run("install .")
client.run("build .")
self.assertIn("BuildTools", client.out)
conan_build_vs = conan_build_vs.replace("upgrade_project=False", "upgrade_project=True")
files["conanfile.py"] = conan_build_vs
client.save(files)
with(tools.environment_append({"CONAN_VS_INSTALLATION_PREFERENCE":"BuildTools",
"CONAN_SKIP_VS_PROJECTS_UPGRADE":"True"})):
client.run("install .")
client.run("build .")
self.assertIn("BuildTools", client.out)
| 2.171875 | 2 |
pinliner/pinliner.py | minazukie/pinliner | 53 | 13351 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import argparse
import json
import os
from pinliner import __version__
import sys
TEMPLATE_FILE = 'importer.template'
TEMPLATE_PATTERN = '${CONTENTS}'
def output(cfg, what, newline=True):
# We need indentation for PEP8
cfg.outfile.write(what)
if newline:
cfg.outfile.write(os.linesep)
def process_file(cfg, base_dir, package_path):
if cfg.tagging:
output(cfg, '<tag:' + package_path + '>')
path = os.path.splitext(package_path)[0].replace(os.path.sep, '.')
package_start = cfg.outfile.tell()
full_path = os.path.join(base_dir, package_path)
with open(full_path, 'r') as f:
# Read the whole file
code = f.read()
# Insert escape character before ''' since we'll be using ''' to insert
# the code as a string
output(cfg, code.replace("'''", r"\'''"), newline=cfg.tagging)
package_end = cfg.outfile.tell()
is_package = 1 if path.endswith('__init__') else 0
if is_package:
path = path[:-9]
# Get file timestamp
timestamp = int(os.path.getmtime(full_path))
return path, is_package, package_start, package_end, timestamp
def template(cfg):
template_path = os.path.join(os.path.dirname(__file__), TEMPLATE_FILE)
with open(template_path) as f:
template = f.read()
prefix_end = template.index(TEMPLATE_PATTERN)
prefix_data = template[:prefix_end].replace('%{FORCE_EXC_HOOK}',
str(cfg.set_hook))
prefix_data = prefix_data.replace('%{DEFAULT_PACKAGE}',
cfg.default_package)
cfg.outfile.write(prefix_data)
postfix_begin = prefix_end + len(TEMPLATE_PATTERN)
return template[postfix_begin:]
def process_directory(cfg, base_dir, package_path):
files = []
contents = os.listdir(os.path.join(base_dir, package_path))
for content in contents:
next_path = os.path.join(package_path, content)
path = os.path.join(base_dir, next_path)
if is_module(path):
files.append(process_file(cfg, base_dir, next_path))
elif is_package(path):
files.extend(process_directory(cfg, base_dir, next_path))
return files
def process_files(cfg):
# template would look better as a context manager
postfix = template(cfg)
files = []
output(cfg, "'''")
for package_path in cfg.packages:
base_dir, module_name = os.path.split(package_path)
files.extend(process_directory(cfg, base_dir, module_name))
output(cfg, "'''")
# Transform the list into a dictionary
inliner_packages = {data[0]: data[1:] for data in files}
# Generate the references to the positions of the different packages and
# modules inside the main file.
# We don't use indent to decrease the number of bytes in the file
data = json.dumps(inliner_packages)
output(cfg, 2 * os.linesep + 'inliner_packages = ', newline=False)
data = data.replace('],', '],' + os.linesep + ' ')
data = data.replace('[', '[' + os.linesep + 8 * ' ')
data = '%s%s %s%s%s' % (data[0], os.linesep, data[1:-1], os.linesep,
data[-1])
output(cfg, data)
# No newline on last line, as we want output file to be PEP8 compliant.
output(cfg, postfix, newline=False)
cfg.outfile.close()
def parse_args():
class MyParser(argparse.ArgumentParser):
"""Class to print verbose help on error."""
def error(self, message):
self.print_help()
sys.stderr.write('\nERROR: %s\n' % message)
sys.exit(2)
general_description = """Pinliner - Python Inliner (Version %s)
This tool allows you to merge all files that comprise a Python package into
a single file and be able to use this single file as if it were a package.
Imports will work as usual so if you have a package structure like:
.
└── [my_package]
├── file_a.py
├── [sub_package]
│ ├── file_b.py
│ └── __init__.py
├── __init__.py
And you execute:
$ mkdir test
$ pinliner my_package test/my_package.py
$ cd test
$ python
You'll be able to use this file as if it were the real package:
>>> import my_package
>>> from my_package import file_a as a_file
>>> from my_package.sub_package import file_b
And __init__.py contents will be executed as expected when importing
my_package and you'll be able to access its contents like you would with your
normal package. Modules will also behave as usual.
By default there is no visible separation between the different modules'
source code, but one can be enabled for clarity with option --tag, which will
include a newline and a <tag:file_path> tag before each of the source files.
""" % __version__
general_epilog = None
parser = MyParser(description=general_description,
epilog=general_epilog, argument_default='',
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('packages', nargs='+', help='Packages to inline.')
parser.add_argument('--version', action='version', version=__version__)
parser.add_argument('-o', '--outfile', nargs='?',
type=argparse.FileType('w'),
default=sys.stdout, help='Output file.')
parser.add_argument('--set-except', default=None, dest='set_hook',
action='store_true',
help='Force setting handler for uncaught exceptions.')
parser.add_argument('--no-except', default=None, dest='set_hook',
action='store_false',
help="Don't set handler for uncaught exceptions.")
parser.add_argument('--tag', default=False, dest='tagging',
action='store_true',
help="Mark with <tag:file_path> each added file.")
parser.add_argument('-d', '--default-pkg', default=None,
dest='default_package',
help='Define the default package when multiple '
'packages are inlined.')
cfg = parser.parse_args()
# If user didn't pass a default package determine one ourselves.
if cfg.default_package is None:
# For single package file default is the package, for multiple packaged
# files default is none (act as a bundle).
def_file = cfg.packages[0] if len(cfg.packages) == 1 else ''
cfg.default_package = def_file
return cfg
def is_module(module):
# This validation is poor, but good enough for now
return os.path.isfile(module) and module.endswith('.py')
def is_package(package):
init_file = os.path.join(package, '__init__.py')
return os.path.isdir(package) and os.path.isfile(init_file)
def validate_args(cfg):
missing = False
# This is weird now, but in the future we'll allow to inline multiple
# packages
for package in cfg.packages:
if not is_package(package):
sys.stderr.write('ERROR: %s is not a python package' % package)
missing = True
if missing:
sys.exit(1)
if cfg.default_package:
if cfg.default_package not in cfg.packages:
sys.stderr.write('ERROR: %s is not a valid default package' %
cfg.default_pkg)
sys.exit(2)
# Convert the default package from path to package
cfg.default_package = os.path.split(cfg.default_package)[1]
def main():
cfg = parse_args()
validate_args(cfg)
process_files(cfg)
if __name__ == '__main__':
main()
| 2.515625 | 3 |
list_s3_buckets.py | MarijaKalebota/aws-playground | 0 | 13352 | from dotenv import load_dotenv
load_dotenv()
import os
import boto3
#s3 = boto3.resource('s3')
s3 = boto3.resource('s3', aws_access_key_id=os.environ.get("AWS_KEY_ID"),
aws_secret_access_key=os.environ.get("AWS_SECRET_KEY"))
for bucket in s3.buckets.all():
print(bucket.name)
| 2.328125 | 2 |
GAScore/testbench/hold_buffer.py | sharm294/shoal | 1 | 13353 | <reponame>sharm294/shoal
import os
from sonar.testbench import Testbench, Module, TestVector, Thread
from sonar.interfaces import AXIS
from sonar_strToInt import strToInt
hold_buffer = Testbench.default('hold_buffer')
filepath = os.path.join(os.path.dirname(__file__), 'build/hold_buffer/')
dut = Module.default("DUT")
dut.add_clock_port('ap_clk', '20ns')
dut.add_reset_port('ap_rst_n')
dut.add_port('dataRelease_V', 'input', 16)
axis_input = AXIS('axis_input', 'slave', 'ap_clk', c_struct='axis_word', c_stream='uaxis_l')
axis_input.port.init_channels('tkeep', 64, True)
dut.add_interface(axis_input)
axis_output = AXIS('axis_output', 'master', 'ap_clk', c_struct='axis_word', c_stream='uaxis_l')
axis_output.port.init_channels('tkeep', 64, True)
dut.add_interface(axis_output)
hold_buffer.add_module(dut)
################################################################################
# Test Vectors
################################################################################
# Initialization thread (added to each test vector to reset everything)
initT = Thread()
initT.init_signals()
initT.wait_negedge('ap_clk')
initT.add_delay('40ns')
initT.set_signal('ap_rst_n', 1)
initT.set_signal('axis_output_tready', 1)
#-------------------------------------------------------------------------------
#
#-------------------------------------------------------------------------------
Release_A = TestVector()
Release_A.add_thread(initT)
rA_t1 = Thread()
rA_t1.add_delay('100ns')
rA_t1.init_timer()
rA_t1.set_signal('dataRelease_V', 1)
axis_input.writes(rA_t1, [
{"tdata": 0xDEF, "callTB": 1},
{"tdata": 0xFED, "callTB": 1},
])
Release_A.add_thread(rA_t1)
rA_t2 = Thread()
axis_output.read(rA_t2, 0xDEF)
axis_output.read(rA_t2, 0xFED)
rA_t2.print_elapsed_time("Release_A")
rA_t2.end_vector()
Release_A.add_thread(rA_t2)
#-------------------------------------------------------------------------------
# Medium Message A
#
#
#-------------------------------------------------------------------------------
Release_B = TestVector()
Release_B.add_thread(initT)
rB_t1 = Thread()
rB_t1.add_delay('100ns')
rB_t1.init_timer()
axis_input.writes(rB_t1, [
{"tdata": 0xDEF, "callTB": 1},
{"tdata": 0xFED, "callTB": 1},
])
rB_t1.set_signal('dataRelease_V', 1)
Release_B.add_thread(rB_t1)
rB_t2 = Thread()
axis_output.read(rB_t2, 0xDEF)
axis_output.read(rB_t2, 0xFED)
rB_t2.print_elapsed_time("Release_B")
rB_t2.end_vector()
Release_B.add_thread(rB_t2)
hold_buffer.add_test_vector(Release_A)
hold_buffer.add_test_vector(Release_B)
hold_buffer.generateTB(filepath, 'all')
| 2.03125 | 2 |
vaxtools/utils/pair.py | menis/vaxtools | 0 | 13354 | #!/usr/bin/env python
# filename: pair.py
#
# Copyright (c) 2015 <NAME>
# License: The MIT license (http://opensource.org/licenses/MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software
# and associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import copy
import sys
import traceback
from Bio.Seq import Seq
from Bio.Alphabet import generic_dna
from abtools import germlines
from abtools.alignment import global_alignment
from abtools.sequence import Sequence
class Pair(object):
'''
Holds a pair of sequences, corresponding to HC and LC of a single mAb.
Input is a list of dicts, with each dict containing sequence information from a single
chain, formatted as would be returned from a query on a MongoDB database containing
AbStar output.
'''
def __init__(self, seqs, name=None, h_selection_func=None, l_selection_func=None):
self._seqs = seqs
self._heavy = None
self._light = None
self._heavies = [s for s in seqs if s['chain'] == 'heavy']
self._lights = [s for s in seqs if s['chain'] in ['kappa', 'lambda']]
self._name = name
self._fasta = None
self._sample = None
self._subject = None
self._group = None
self._experiment = None
self._timepoint = None
self._is_pair = None
self._vrc01_like = None
self._lineage = None
self._select_heavy = h_selection_func
self._select_light = l_selection_func
def __eq__(self, other):
return (self.heavy, self.light) == (other.heavy, other.light)
def __ne__(self, other):
return not self == other
def __hash(self):
return hash((self.heavy, self.light))
@property
def heavy(self):
if self._heavy is None:
# self._heavies = [s for s in self._seqs if s['chain'] == 'heavy']
if len(self._heavies) > 0:
if self._select_heavy is not None:
self._heavy = Sequence(self._select_heavy(self._heavies))
else:
self._heavy = Sequence(self._heavies[0])
else:
self._heavy = None
return self._heavy
@heavy.setter
def heavy(self, heavy):
self._heavy = heavy
@property
def light(self):
if self._light is None:
# self._lights = [s for s in self._seqs if s['chain'] in ['kappa', 'lambda']]
if len(self._lights) > 0:
if self._select_light is not None:
self._light = Sequence(self._select_light(self._lights))
else:
self._light = Sequence(self._lights[0])
else:
self._light = None
return self._light
@light.setter
def light(self, light):
self._light = light
@property
def is_pair(self):
if all([self.heavy is not None, self.light is not None]):
return True
return False
@property
def lineage(self):
if self._lineage is None:
self._lineage = self.heavy['clonify']['id']
return self._lineage
@property
def vrc01_like(self):
if self._vrc01_like is None:
if any([self.heavy is None, self.light is None]):
self._vrc01_like = False
else:
self._vrc01_like = all([self.heavy['v_gene']['gene'] == 'IGHV1-2', self.light['cdr3_len'] == 5])
return self._vrc01_like
@property
def name(self):
if self._name is None:
if self.heavy is not None:
self._name = self.heavy['seq_id']
elif self.light is not None:
self._name = self.light['seq_id']
return self._name
@name.setter
def name(self, name):
self._name = name
@property
def sample(self):
if self._sample is None:
slist = []
if self.experiment is not None:
slist.append(str(self.experiment))
if self.group is not None:
slist.append(str(self.group))
if self.subject is not None:
slist.append(str(self.subject))
if self.timepoint is not None:
slist.append(str(self.timepoint))
if slist:
self._sample = '|'.join(slist)
return self._sample
@property
def subject(self):
if self._subject is None:
if self.heavy is not None and 'subject' in list(self.heavy.keys()):
self._subject = self.heavy['subject']
elif self.light is not None and 'subject' in list(self.light.keys()):
self._subject = self.light['subject']
return self._subject
@subject.setter
def subject(self, subject):
self._subject = subject
@property
def group(self):
if self._group is None:
if self.heavy is not None and 'group' in list(self.heavy.keys()):
self._group = self.heavy['group']
elif self.light is not None and 'group' in list(self.light.keys()):
self._group = self.light['group']
return self._group
@group.setter
def group(self, group):
self._group = group
@property
def experiment(self):
if self._experiment is None:
if self.heavy is not None and 'experiment' in list(self.heavy.keys()):
self._experiment = self.heavy['experiment']
elif self.light is not None and 'experiment' in list(self.light.keys()):
self._experiment = self.light['experiment']
return self._experiment
@experiment.setter
def experiment(self, experiment):
self._experiment = experiment
@property
def timepoint(self):
if self._timepoint is None:
if self.heavy is not None and 'timepoint' in list(self.heavy.keys()):
self._timepoint = self.heavy['timepoint']
elif self.light is not None and 'timepoint' in list(self.light.keys()):
self._timepoint = self.light['timepoint']
return self._timepoint
@timepoint.setter
def timepoint(self, timepoint):
self._timepoint = timepoint
def refine(self, heavy=True, light=True, species='human'):
for seq in [s for s in [self.heavy, self.light] if s is not None]:
try:
self.remove_ambigs(seq)
self._refine_v(seq, species)
self._refine_j(seq, species)
self._retranslate(seq)
except:
print('REFINEMENT FAILED: {}, {} chain'.format(s['seq_id'], s['chain']))
print(traceback.format_exception_only(sys.exc_info()[0], sys.exc_info()[1]))
@staticmethod
def remove_ambigs(seq):
# fix Ns in the nucleotide sequence
vdj = ''
for s, g in zip(seq['vdj_nt'], seq['vdj_germ_nt']):
if s.upper() == 'N':
vdj += g
else:
vdj += s
seq['vdj_nt'] = vdj
# fix Xs in the amino acid sequence
vdj = ''
for s, g in zip(seq['vdj_aa'], seq['vdj_germ_aa']):
if s.upper() == 'X':
vdj += g
else:
vdj += s
seq['vdj_aa'] = vdj
@staticmethod
def _refine_v(seq, species):
'''
Completes the 5' end of a a truncated sequence with germline nucleotides.
Input is a MongoDB dict (seq) and the species.
'''
vgerm = germlines.get_germline(seq['v_gene']['full'], species)
aln = global_alignment(seq['vdj_nt'], vgerm)
prepend = ''
for s, g in zip(aln.aligned_query, aln.aligned_target):
if s != '-':
break
else:
prepend += g
seq['vdj_nt'] = prepend + seq['vdj_nt']
@staticmethod
def _refine_j(seq, species):
'''
Completes the 3' end of a a truncated sequence with germline nucleotides.
Input is a MongoDB dict (seq) and the species.
'''
jgerm = germlines.get_germline(seq['j_gene']['full'], species)
aln = global_alignment(seq['vdj_nt'], jgerm)
append = ''
for s, g in zip(aln.aligned_query[::-1], aln.aligned_target[::-1]):
if s != '-':
break
else:
append += g
seq['vdj_nt'] = seq['vdj_nt'] + append[::-1]
@staticmethod
def _retranslate(seq):
'''
Retranslates a nucleotide sequence following refinement.
Input is a Pair sequence (basically a dict of MongoDB output).
'''
if len(seq['vdj_nt']) % 3 != 0:
trunc = len(seq['vdj_nt']) % 3
seq['vdj_nt'] = seq['vdj_nt'][:-trunc]
seq['vdj_aa'] = Seq(seq['vdj_nt'], generic_dna).translate()
def fasta(self, key='vdj_nt', append_chain=True):
'''
Returns the sequence pair as a fasta string. If the Pair object contains
both heavy and light chain sequences, both will be returned as a single string.
By default, the fasta string contains the 'vdj_nt' sequence for each chain. To change,
use the <key> option to select an alternate sequence.
By default, the chain (heavy or light) will be appended to the sequence name:
>MySequence_heavy
To just use the pair name (which will result in duplicate sequence names for Pair objects
with both heavy and light chains), set <append_chain> to False.
'''
fastas = []
for s, chain in [(self.heavy, 'heavy'), (self.light, 'light')]:
if s is not None:
c = '_{}'.format(chain) if append_chain else ''
fastas.append('>{}{}\n{}'.format(s['seq_id'], c, s[key]))
return '\n'.join(fastas)
def get_pairs(db, collection, experiment=None, subject=None, group=None, name='seq_id',
delim=None, delim_occurance=1, pairs_only=False):
'''
Gets sequences and assigns them to the appropriate mAb pair, based on the sequence name.
Inputs:
::db:: is a pymongo database connection object
::collection:: is the collection name, as a string
If ::subject:: is provided, only sequences with a 'subject' field matching ::subject:: will
be included. ::subject:: can be either a single subject (as a string) or an iterable
(list or tuple) of subject strings.
If ::group:: is provided, only sequences with a 'group' field matching ::group:: will
be included. ::group:: can be either a single group (as a string) or an iterable
(list or tuple) of group strings.
::name:: is the dict key of the field to be used to group the sequences into pairs.
Default is 'seq_id'
::delim:: is an optional delimiter used to truncate the contents of the ::name:: field.
Default is None, which results in no name truncation.
::delim_occurance:: is the occurance of the delimiter at which to trim. Trimming is performed
as delim.join(name.split(delim)[:delim_occurance]), so setting delim_occurance to -1 will
trucate after the last occurance of delim. Default is 1.
::pairs_only:: setting to True results in only truly paired sequences (pair.is_pair == True)
will be returned. Default is False.
Returns a list of Pair objects, one for each mAb pair.
'''
match = {}
if subject is not None:
if type(subject) in (list, tuple):
match['subject'] = {'$in': subject}
elif type(subject) in (str, str):
match['subject'] = subject
if group is not None:
if type(group) in (list, tuple):
match['group'] = {'$in': group}
elif type(group) in (str, str):
match['group'] = group
if experiment is not None:
if type(experiment) in (list, tuple):
match['experiment'] = {'$in': experiment}
elif type(experiment) in (str, str):
match['experiment'] = experiment
seqs = list(db[collection].find(match))
return assign_pairs(seqs, name=name, delim=delim,
delim_occurance=delim_occurance, pairs_only=pairs_only)
def assign_pairs(seqs, name='seq_id', delim=None, delim_occurance=1, pairs_only=False):
'''
Assigns sequences to the appropriate mAb pair, based on the sequence name.
Inputs:
::seqs:: is a list of dicts, of the format returned by querying a MongoDB containing
Abstar output.
::name:: is the dict key of the field to be used to group the sequences into pairs.
Default is 'seq_id'
::delim:: is an optional delimiter used to truncate the contents of the ::name:: field.
Default is None, which results in no name truncation.
::delim_occurance:: is the occurance of the delimiter at which to trim. Trimming is performed
as delim.join(name.split(delim)[:delim_occurance]), so setting delim_occurance to -1 will
trucate after the last occurance of delim. Default is 1.
::pairs_only:: setting to True results in only truly paired sequences (pair.is_pair == True)
will be returned. Default is False.
Returns a list of Pair objects, one for each mAb pair.
'''
pdict = {}
for s in seqs:
if delim is not None:
pname = delim.join(s[name].split(delim)[:delim_occurance])
else:
pname = s[name]
if pname not in pdict:
pdict[pname] = [s, ]
else:
pdict[pname].append(s)
pairs = [Pair(pdict[n], name=n) for n in list(pdict.keys())]
if pairs_only:
pairs = [p for p in pairs if p.is_pair]
return pairs
def deduplicate(pairs, aa=False, ignore_primer_regions=False):
'''
Removes duplicate sequences from a list of Pair objects.
If a Pair has heavy and light chains, both chains must identically match heavy and light chains
from another Pair to be considered a duplicate. If a Pair has only a single chain,
identical matches to that chain will cause the single chain Pair to be considered a duplicate,
even if the comparison Pair has both chains.
Note that identical sequences are identified by simple string comparison, so sequences of
different length that are identical over the entirety of the shorter sequence are not
considered duplicates.
By default, comparison is made on the nucleotide sequence. To use the amino acid sequence instead,
set aa=True.
'''
nr_pairs = []
just_pairs = [p for p in pairs if p.is_pair]
single_chains = [p for p in pairs if not p.is_pair]
_pairs = just_pairs + single_chains
for p in _pairs:
duplicates = []
for nr in nr_pairs:
identical = True
vdj = 'vdj_aa' if aa else 'vdj_nt'
offset = 4 if aa else 12
if p.heavy is not None:
if nr.heavy is None:
identical = False
else:
heavy = p.heavy[vdj][offset:-offset] if ignore_primer_regions else p.heavy[vdj]
nr_heavy = nr.heavy[vdj][offset:-offset] if ignore_primer_regions else nr.heavy[vdj]
if heavy != nr_heavy:
identical = False
if p.light is not None:
if nr.light is None:
identical = False
else:
light = p.light[vdj][offset:-offset] if ignore_primer_regions else p.light[vdj]
nr_light = nr.light[vdj][offset:-offset] if ignore_primer_regions else nr.light[vdj]
if light != nr_light:
identical = False
duplicates.append(identical)
if any(duplicates):
continue
else:
nr_pairs.append(p)
return nr_pairs
def refine(pairs, heavy=True, light=True, species='human'):
refined_pairs = copy.deepcopy(pairs)
for p in refined_pairs:
p.refine(heavy, light, species)
return refined_pairs
| 1.804688 | 2 |
t3f/riemannian.py | robol/t3f | 0 | 13355 | <filename>t3f/riemannian.py
import tensorflow as tf
from t3f.tensor_train import TensorTrain
from t3f.tensor_train_batch import TensorTrainBatch
from t3f import shapes
from t3f import decompositions
def project_sum(what, where, weights=None):
"""Project sum of `what` TTs on the tangent space of `where` TT.
project_sum(what, x) = P_x(what)
project_sum(batch_what, x) = P_x(\sum_i batch_what[i])
project_sum(batch_what, x, weights) = P_x(\sum_j weights[j] * batch_what[j])
This function implements the algorithm from the paper [1], theorem 3.1.
[1] <NAME>, <NAME> and <NAME>, Time integration of
Tensor Trains.
Args:
what: TensorTrain or TensorTrainBatch. In the case of batch returns
projection of the sum of elements in the batch.
where: TensorTrain, TT-tensor or TT-matrix on which tangent space to project
weights: python list or tf.Tensor of numbers or None, weights of the sum
Returns:
a TensorTrain with the TT-ranks equal 2 * tangent_space_tens.get_tt_ranks()
Complexity:
O(d r_where^3 m) for orthogonalizing the TT-cores of where
+O(batch_size d r_what r_where n (r_what + r_where))
d is the number of TT-cores (what.ndims());
r_what is the largest TT-rank of what max(what.get_tt_rank())
r_where is the largest TT-rank of where
n is the size of the axis dimension of what and where e.g.
for a tensor of size 4 x 4 x 4, n is 4;
for a 9 x 64 matrix of raw shape (3, 3, 3) x (4, 4, 4) n is 12
"""
# Always work with batch of TT objects for simplicity.
what = shapes.expand_batch_dim(what)
if weights is not None:
weights = tf.convert_to_tensor(weights, dtype=where.dtype)
if not isinstance(where, TensorTrain):
raise ValueError('The first argument should be a TensorTrain object, got '
'"%s".' % where)
if where.get_raw_shape() != what.get_raw_shape():
raise ValueError('The shapes of the tensor we want to project and of the '
'tensor on which tangent space we want to project should '
'match, got %s and %s.' %
(where.get_raw_shape(),
what.get_raw_shape()))
dtypes_compatible = (where.dtype.is_compatible_with(what.dtype) or
what.dtype.is_compatible_with(where.dtype))
if not dtypes_compatible:
raise ValueError('Dtypes of the arguments should coincide, got %s and %s.' %
(where.dtype,
what.dtype))
left_tangent_space_tens = decompositions.orthogonalize_tt_cores(
where)
right_tangent_space_tens = decompositions.orthogonalize_tt_cores(
left_tangent_space_tens, left_to_right=False)
ndims = where.ndims()
dtype = where.dtype
raw_shape = shapes.lazy_raw_shape(where)
batch_size = shapes.lazy_batch_size(what)
right_tangent_tt_ranks = shapes.lazy_tt_ranks(right_tangent_space_tens)
left_tangent_tt_ranks = shapes.lazy_tt_ranks(left_tangent_space_tens)
# For einsum notation.
mode_str = 'ij' if where.is_tt_matrix() else 'i'
right_rank_dim = where.right_tt_rank_dim
left_rank_dim = where.left_tt_rank_dim
if weights is not None:
weights_shape = weights.get_shape()
output_is_batch = len(weights_shape) > 1 and weights_shape[1] > 1
else:
output_is_batch = False
output_batch_str = 'o' if output_is_batch else ''
if output_is_batch:
right_rank_dim += 1
left_rank_dim += 1
output_batch_size = weights.get_shape()[1].value
# Prepare rhs vectors.
# rhs[core_idx] is of size
# batch_size x tensor_tt_ranks[core_idx] x tangent_tt_ranks[core_idx]
rhs = [None] * (ndims + 1)
rhs[ndims] = tf.ones((batch_size, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1, 0, -1):
tens_core = what.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
einsum_str = 'sa{0}b,sbd,c{0}d->sac'.format(mode_str)
rhs[core_idx] = tf.einsum(einsum_str, tens_core, rhs[core_idx + 1],
right_tang_core)
# Prepare lhs vectors.
# lhs[core_idx] is of size
# batch_size x tangent_tt_ranks[core_idx] x tensor_tt_ranks[core_idx]
lhs = [None] * (ndims + 1)
lhs[0] = tf.ones((batch_size, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1):
tens_core = what.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
einsum_str = 'sab,a{0}c,sb{0}d->scd'.format(mode_str)
lhs[core_idx + 1] = tf.einsum(einsum_str, lhs[core_idx], left_tang_core,
tens_core)
# Left to right sweep.
res_cores_list = []
for core_idx in range(ndims):
tens_core = what.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
if core_idx < ndims - 1:
einsum_str = 'sab,sb{0}c->sa{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, lhs[core_idx], tens_core)
einsum_str = 'a{0}b,sbc->sa{0}c'.format(mode_str)
proj_core -= tf.einsum(einsum_str, left_tang_core, lhs[core_idx + 1])
if weights is None:
einsum_str = 'sa{0}b,sbc->a{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, proj_core, rhs[core_idx + 1])
else:
einsum_str = 'sa{0}b,sbc->sa{0}c'.format(mode_str, output_batch_str)
proj_core_s = tf.einsum(einsum_str, proj_core, rhs[core_idx + 1])
einsum_str = 's{1},sa{0}c->{1}a{0}c'.format(mode_str, output_batch_str)
proj_core = tf.einsum(einsum_str, weights, proj_core_s)
if core_idx == ndims - 1:
if weights is None:
einsum_str = 'sab,sb{0}c->a{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, lhs[core_idx], tens_core)
else:
einsum_str = 'sab,sb{0}c->sa{0}c'.format(mode_str, output_batch_str)
proj_core_s = tf.einsum(einsum_str, lhs[core_idx], tens_core)
einsum_str = 's{1},sa{0}c->{1}a{0}c'.format(mode_str, output_batch_str)
proj_core = tf.einsum(einsum_str, weights, proj_core_s)
if output_is_batch:
# Add batch dimension of size output_batch_size to left_tang_core and
# right_tang_core
extended_left_tang_core = tf.expand_dims(left_tang_core, 0)
extended_right_tang_core = tf.expand_dims(right_tang_core, 0)
if where.is_tt_matrix():
extended_left_tang_core = tf.tile(extended_left_tang_core,
[output_batch_size, 1, 1, 1, 1])
extended_right_tang_core = tf.tile(extended_right_tang_core,
[output_batch_size, 1, 1, 1, 1])
else:
extended_left_tang_core = tf.tile(extended_left_tang_core,
[output_batch_size, 1, 1, 1])
extended_right_tang_core = tf.tile(extended_right_tang_core,
[output_batch_size, 1, 1, 1])
else:
extended_left_tang_core = left_tang_core
extended_right_tang_core = right_tang_core
if core_idx == 0:
res_core = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
elif core_idx == ndims - 1:
res_core = tf.concat((extended_right_tang_core, proj_core), axis=left_rank_dim)
else:
rank_1 = right_tangent_tt_ranks[core_idx]
rank_2 = left_tangent_tt_ranks[core_idx + 1]
if where.is_tt_matrix():
mode_size_n = raw_shape[0][core_idx]
mode_size_m = raw_shape[1][core_idx]
shape = [rank_1, mode_size_n, mode_size_m, rank_2]
else:
mode_size = raw_shape[0][core_idx]
shape = [rank_1, mode_size, rank_2]
if output_is_batch:
shape = [output_batch_size] + shape
zeros = tf.zeros(shape, dtype)
upper = tf.concat((extended_right_tang_core, zeros), axis=right_rank_dim)
lower = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
res_core = tf.concat((upper, lower), axis=left_rank_dim)
res_cores_list.append(res_core)
# TODO: TT-ranks.
if output_is_batch:
res = TensorTrainBatch(res_cores_list, where.get_raw_shape(),
batch_size=output_batch_size)
else:
res = TensorTrain(res_cores_list, where.get_raw_shape())
res.projection_on = where
return res
def project(what, where):
"""Project `what` TTs on the tangent space of `where` TT.
project(what, x) = P_x(what)
project(batch_what, x) = batch(P_x(batch_what[0]), ..., P_x(batch_what[N]))
This function implements the algorithm from the paper [1], theorem 3.1.
[1] <NAME>, <NAME> and <NAME>, Time integration of
Tensor Trains.
Args:
what: TensorTrain or TensorTrainBatch. In the case of batch returns
batch with projection of each individual tensor.
where: TensorTrain, TT-tensor or TT-matrix on which tangent space to project
Returns:
a TensorTrain with the TT-ranks equal 2 * tangent_space_tens.get_tt_ranks()
Complexity:
O(d r_where^3 m) for orthogonalizing the TT-cores of where
+O(batch_size d r_what r_where n (r_what + r_where))
d is the number of TT-cores (what.ndims());
r_what is the largest TT-rank of what max(what.get_tt_rank())
r_where is the largest TT-rank of where
n is the size of the axis dimension of what and where e.g.
for a tensor of size 4 x 4 x 4, n is 4;
for a 9 x 64 matrix of raw shape (3, 3, 3) x (4, 4, 4) n is 12
"""
if not isinstance(where, TensorTrain):
raise ValueError('The first argument should be a TensorTrain object, got '
'"%s".' % where)
if where.get_raw_shape() != what.get_raw_shape():
raise ValueError('The shapes of the tensor we want to project and of the '
'tensor on which tangent space we want to project should '
'match, got %s and %s.' %
(where.get_raw_shape(),
what.get_raw_shape()))
dtypes_compatible = (where.dtype.is_compatible_with(what.dtype) or
what.dtype.is_compatible_with(where.dtype))
if not dtypes_compatible:
raise ValueError('Dtypes of the arguments should coincide, got %s and %s.' %
(where.dtype,
what.dtype))
left_tangent_space_tens = decompositions.orthogonalize_tt_cores(
where)
right_tangent_space_tens = decompositions.orthogonalize_tt_cores(
left_tangent_space_tens, left_to_right=False)
ndims = where.ndims()
dtype = where.dtype
raw_shape = shapes.lazy_raw_shape(where)
right_tangent_tt_ranks = shapes.lazy_tt_ranks(right_tangent_space_tens)
left_tangent_tt_ranks = shapes.lazy_tt_ranks(left_tangent_space_tens)
# For einsum notation.
mode_str = 'ij' if where.is_tt_matrix() else 'i'
right_rank_dim = what.right_tt_rank_dim
left_rank_dim = what.left_tt_rank_dim
output_is_batch = isinstance(what, TensorTrainBatch)
if output_is_batch:
output_batch_size = what.batch_size
# Always work with batch of TT objects for simplicity.
what = shapes.expand_batch_dim(what)
batch_size = shapes.lazy_batch_size(what)
# Prepare rhs vectors.
# rhs[core_idx] is of size
# batch_size x tensor_tt_ranks[core_idx] x tangent_tt_ranks[core_idx]
rhs = [None] * (ndims + 1)
rhs[ndims] = tf.ones((batch_size, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1, 0, -1):
tens_core = what.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
einsum_str = 'sa{0}b,sbd,c{0}d->sac'.format(mode_str)
rhs[core_idx] = tf.einsum(einsum_str, tens_core, rhs[core_idx + 1],
right_tang_core)
# Prepare lhs vectors.
# lhs[core_idx] is of size
# batch_size x tangent_tt_ranks[core_idx] x tensor_tt_ranks[core_idx]
lhs = [None] * (ndims + 1)
lhs[0] = tf.ones((batch_size, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1):
tens_core = what.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
einsum_str = 'sab,a{0}c,sb{0}d->scd'.format(mode_str)
lhs[core_idx + 1] = tf.einsum(einsum_str, lhs[core_idx], left_tang_core,
tens_core)
# Left to right sweep.
res_cores_list = []
for core_idx in range(ndims):
tens_core = what.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
if core_idx < ndims - 1:
einsum_str = 'sab,sb{0}c->sa{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, lhs[core_idx], tens_core)
einsum_str = 'a{0}b,sbc->sa{0}c'.format(mode_str)
proj_core -= tf.einsum(einsum_str, left_tang_core, lhs[core_idx + 1])
if output_is_batch:
einsum_str = 'sa{0}b,sbc->sa{0}c'.format(mode_str)
else:
einsum_str = 'sa{0}b,sbc->a{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, proj_core, rhs[core_idx + 1])
if core_idx == ndims - 1:
if output_is_batch:
einsum_str = 'sab,sb{0}c->sa{0}c'.format(mode_str)
else:
einsum_str = 'sab,sb{0}c->a{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, lhs[core_idx], tens_core)
if output_is_batch:
# Add batch dimension of size output_batch_size to left_tang_core and
# right_tang_core
extended_left_tang_core = tf.expand_dims(left_tang_core, 0)
extended_right_tang_core = tf.expand_dims(right_tang_core, 0)
if where.is_tt_matrix():
extended_left_tang_core = tf.tile(extended_left_tang_core,
[output_batch_size, 1, 1, 1, 1])
extended_right_tang_core = tf.tile(extended_right_tang_core,
[output_batch_size, 1, 1, 1, 1])
else:
extended_left_tang_core = tf.tile(extended_left_tang_core,
[output_batch_size, 1, 1, 1])
extended_right_tang_core = tf.tile(extended_right_tang_core,
[output_batch_size, 1, 1, 1])
else:
extended_left_tang_core = left_tang_core
extended_right_tang_core = right_tang_core
if core_idx == 0:
res_core = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
elif core_idx == ndims - 1:
res_core = tf.concat((extended_right_tang_core, proj_core), axis=left_rank_dim)
else:
rank_1 = right_tangent_tt_ranks[core_idx]
rank_2 = left_tangent_tt_ranks[core_idx + 1]
if where.is_tt_matrix():
mode_size_n = raw_shape[0][core_idx]
mode_size_m = raw_shape[1][core_idx]
shape = [rank_1, mode_size_n, mode_size_m, rank_2]
else:
mode_size = raw_shape[0][core_idx]
shape = [rank_1, mode_size, rank_2]
if output_is_batch:
shape = [output_batch_size] + shape
zeros = tf.zeros(shape, dtype)
upper = tf.concat((extended_right_tang_core, zeros), axis=right_rank_dim)
lower = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
res_core = tf.concat((upper, lower), axis=left_rank_dim)
res_cores_list.append(res_core)
# TODO: TT-ranks.
if output_is_batch:
res = TensorTrainBatch(res_cores_list, where.get_raw_shape(),
batch_size=output_batch_size)
else:
res = TensorTrain(res_cores_list, where.get_raw_shape())
res.projection_on = where
return res
def project_matmul(what, where, matrix):
"""Project `matrix` * `what` TTs on the tangent space of `where` TT.
project(what, x) = P_x(what)
project(batch_what, x) = batch(P_x(batch_what[0]), ..., P_x(batch_what[N]))
This function implements the algorithm from the paper [1], theorem 3.1.
[1] <NAME>, <NAME> and <NAME>, Time integration of
Tensor Trains.
Args:
what: TensorTrain or TensorTrainBatch. In the case of batch returns
batch with projection of each individual tensor.
where: TensorTrain, TT-tensor or TT-matrix on which tangent space to project
matrix: TensorTrain, TT-matrix to multiply by what
Returns:
a TensorTrain with the TT-ranks equal 2 * tangent_space_tens.get_tt_ranks()
Complexity:
O(d r_where^3 m) for orthogonalizing the TT-cores of where
+O(batch_size d R r_what r_where (n r_what + n m R + m r_where))
d is the number of TT-cores (what.ndims());
r_what is the largest TT-rank of what max(what.get_tt_rank())
r_where is the largest TT-rank of where
matrix is of TT-rank R and of raw-shape (m, m, ..., m) x (n, n, ..., n).
"""
if not isinstance(where, TensorTrain):
raise ValueError('The first argument should be a TensorTrain object, got '
'"%s".' % where)
if where.get_raw_shape() != what.get_raw_shape():
raise ValueError('The shapes of the tensor we want to project and of the '
'tensor on which tangent space we want to project should '
'match, got %s and %s.' %
(where.get_raw_shape(),
what.get_raw_shape()))
dtypes_compatible = (where.dtype.is_compatible_with(what.dtype) or
what.dtype.is_compatible_with(where.dtype))
if not dtypes_compatible:
raise ValueError('Dtypes of the arguments should coincide, got %s and %s.' %
(where.dtype,
what.dtype))
left_tangent_space_tens = decompositions.orthogonalize_tt_cores(
where)
right_tangent_space_tens = decompositions.orthogonalize_tt_cores(
left_tangent_space_tens, left_to_right=False)
ndims = where.ndims()
dtype = where.dtype
raw_shape = shapes.lazy_raw_shape(where)
batch_size = shapes.lazy_batch_size(what)
right_tangent_tt_ranks = shapes.lazy_tt_ranks(right_tangent_space_tens)
left_tangent_tt_ranks = shapes.lazy_tt_ranks(left_tangent_space_tens)
# For einsum notation.
right_rank_dim = what.right_tt_rank_dim
left_rank_dim = what.left_tt_rank_dim
output_is_batch = isinstance(what, TensorTrainBatch)
if output_is_batch:
output_batch_size = what.batch_size
# Always work with batch of TT objects for simplicity.
what = shapes.expand_batch_dim(what)
# Prepare rhs vectors.
# rhs[core_idx] is of size
# batch_size x tensor_tt_ranks[core_idx] x matrix_tt_ranks[core_idx] x tangent_tt_ranks[core_idx]
rhs = [None] * (ndims + 1)
rhs[ndims] = tf.ones((batch_size, 1, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1, 0, -1):
tens_core = what.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
matrix_core = matrix.tt_cores[core_idx]
rhs[core_idx] = tf.einsum('bije,cikf,sdef,sajkd->sabc', matrix_core,
right_tang_core, rhs[core_idx + 1], tens_core)
# Prepare lhs vectors.
# lhs[core_idx] is of size
# batch_size x tangent_tt_ranks[core_idx] x matrix_tt_ranks[core_idx] x tensor_tt_ranks[core_idx]
lhs = [None] * (ndims + 1)
lhs[0] = tf.ones((batch_size, 1, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1):
tens_core = what.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
matrix_core = matrix.tt_cores[core_idx]
# TODO: brutforce order of indices in lhs??
lhs[core_idx + 1] = tf.einsum('bije,aikd,sabc,scjkf->sdef', matrix_core,
left_tang_core, lhs[core_idx], tens_core)
# Left to right sweep.
res_cores_list = []
for core_idx in range(ndims):
tens_core = what.tt_cores[core_idx]
matrix_core = matrix.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
if core_idx < ndims - 1:
proj_core = tf.einsum('scjke,sabc,bijd->saikde', tens_core,
lhs[core_idx], matrix_core)
proj_core -= tf.einsum('aikb,sbcd->saikcd', left_tang_core,
lhs[core_idx + 1])
proj_core = tf.einsum('saikcb,sbcd->saikd', proj_core, rhs[core_idx + 1])
if core_idx == ndims - 1:
# d and e dimensions take 1 value, since its the last rank.
# To make the result shape (?, ?, ?, 1), we are summing d and leaving e,
# but we could have done the opposite -- sum e and leave d.
proj_core = tf.einsum('sabc,bijd,scjke->saike', lhs[core_idx], matrix_core,
tens_core)
if output_is_batch:
# Add batch dimension of size output_batch_size to left_tang_core and
# right_tang_core
extended_left_tang_core = tf.expand_dims(left_tang_core, 0)
extended_right_tang_core = tf.expand_dims(right_tang_core, 0)
extended_left_tang_core = tf.tile(extended_left_tang_core,
[output_batch_size, 1, 1, 1, 1])
extended_right_tang_core = tf.tile(extended_right_tang_core,
[output_batch_size, 1, 1, 1, 1])
else:
extended_left_tang_core = left_tang_core
extended_right_tang_core = right_tang_core
if core_idx == 0:
res_core = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
elif core_idx == ndims - 1:
res_core = tf.concat((extended_right_tang_core, proj_core),
axis=left_rank_dim)
else:
rank_1 = right_tangent_tt_ranks[core_idx]
rank_2 = left_tangent_tt_ranks[core_idx + 1]
mode_size_n = raw_shape[0][core_idx]
mode_size_m = raw_shape[1][core_idx]
shape = [rank_1, mode_size_n, mode_size_m, rank_2]
if output_is_batch:
shape = [output_batch_size] + shape
zeros = tf.zeros(shape, dtype)
upper = tf.concat((extended_right_tang_core, zeros),
axis=right_rank_dim)
lower = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
res_core = tf.concat((upper, lower), axis=left_rank_dim)
res_cores_list.append(res_core)
# TODO: TT-ranks.
if output_is_batch:
res = TensorTrainBatch(res_cores_list, where.get_raw_shape(),
batch_size=output_batch_size)
else:
res = TensorTrain(res_cores_list, where.get_raw_shape())
res.projection_on = where
return res
def pairwise_flat_inner_projected(projected_tt_vectors_1,
projected_tt_vectors_2):
"""Scalar products between two batches of TTs from the same tangent space.
res[i, j] = t3f.flat_inner(projected_tt_vectors_1[i], projected_tt_vectors_1[j]).
pairwise_flat_inner_projected(projected_tt_vectors_1, projected_tt_vectors_2)
is equivalent to
pairwise_flat_inner(projected_tt_vectors_1, projected_tt_vectors_2)
, but works only on objects from the same tangent space and is much faster
than general pairwise_flat_inner.
Args:
projected_tt_vectors_1: TensorTrainBatch of tensors projected on the same
tangent space as projected_tt_vectors_2.
projected_tt_vectors_2: TensorTrainBatch.
Returns:
tf.tensor with the scalar product matrix.
Complexity:
O(batch_size^2 d r^2 n), where
d is the number of TT-cores (projected_tt_vectors_1.ndims());
r is the largest TT-rank max(projected_tt_vectors_1.get_tt_rank())
(i.e. 2 * {the TT-rank of the object we projected vectors onto}.
and n is the size of the axis dimension, e.g.
for a tensor of size 4 x 4 x 4, n is 4;
for a 9 x 64 matrix of raw shape (3, 3, 3) x (4, 4, 4) n is 12.
"""
if not hasattr(projected_tt_vectors_1, 'projection_on') or \
not hasattr(projected_tt_vectors_2, 'projection_on'):
raise ValueError('Both arguments should be projections on the tangent '
'space of some other TT-object. All projection* functions '
'leave .projection_on field in the resulting TT-object '
'which is not present in the arguments you\'ve provided')
if projected_tt_vectors_1.projection_on != projected_tt_vectors_2.projection_on:
raise ValueError('Both arguments should be projections on the tangent '
'space of the same TT-object. The provided arguments are '
'projections on different TT-objects (%s and %s). Or at '
'least the pointers are different.' %
(projected_tt_vectors_1.projection_on,
projected_tt_vectors_2.projection_on))
# Always work with batches of objects for simplicity.
projected_tt_vectors_1 = shapes.expand_batch_dim(projected_tt_vectors_1)
projected_tt_vectors_2 = shapes.expand_batch_dim(projected_tt_vectors_2)
ndims = projected_tt_vectors_1.ndims()
tt_ranks = shapes.lazy_tt_ranks(projected_tt_vectors_1)
if projected_tt_vectors_1.is_tt_matrix():
right_size = tt_ranks[1] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[0]
curr_core_2 = projected_tt_vectors_2.tt_cores[0]
curr_du_1 = curr_core_1[:, :, :, :, :right_size]
curr_du_2 = curr_core_2[:, :, :, :, :right_size]
res = tf.einsum('paijb,qaijb->pq', curr_du_1, curr_du_2)
for core_idx in range(1, ndims):
left_size = tt_ranks[core_idx] // 2
right_size = tt_ranks[core_idx + 1] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[core_idx]
curr_core_2 = projected_tt_vectors_2.tt_cores[core_idx]
curr_du_1 = curr_core_1[:, left_size:, :, :, :right_size]
curr_du_2 = curr_core_2[:, left_size:, :, :, :right_size]
res += tf.einsum('paijb,qaijb->pq', curr_du_1, curr_du_2)
left_size = tt_ranks[-2] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[-1]
curr_core_2 = projected_tt_vectors_2.tt_cores[-1]
curr_du_1 = curr_core_1[:, left_size:, :, :, :]
curr_du_2 = curr_core_2[:, left_size:, :, :, :]
res += tf.einsum('paijb,qaijb->pq', curr_du_1, curr_du_2)
else:
# Working with TT-tensor, not TT-matrix.
right_size = tt_ranks[1] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[0]
curr_core_2 = projected_tt_vectors_2.tt_cores[0]
curr_du_1 = curr_core_1[:, :, :, :right_size]
curr_du_2 = curr_core_2[:, :, :, :right_size]
res = tf.einsum('paib,qaib->pq', curr_du_1, curr_du_2)
for core_idx in range(1, ndims):
left_size = tt_ranks[core_idx] // 2
right_size = tt_ranks[core_idx + 1] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[core_idx]
curr_core_2 = projected_tt_vectors_2.tt_cores[core_idx]
curr_du_1 = curr_core_1[:, left_size:, :, :right_size]
curr_du_2 = curr_core_2[:, left_size:, :, :right_size]
res += tf.einsum('paib,qaib->pq', curr_du_1, curr_du_2)
left_size = tt_ranks[-2] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[-1]
curr_core_2 = projected_tt_vectors_2.tt_cores[-1]
curr_du_1 = curr_core_1[:, left_size:, :, :]
curr_du_2 = curr_core_2[:, left_size:, :, :]
res += tf.einsum('paib,qaib->pq', curr_du_1, curr_du_2)
return res
def add_n_projected(tt_objects, coef=None):
"""Adds all input TT-objects that are projections on the same tangent space.
add_projected((a, b)) is equivalent add(a, b) for a and b that are from the
same tangent space, but doesn't increase the TT-ranks.
Args:
tt_objects: a list of TT-objects that are projections on the same tangent
space.
coef: a list of numbers or anything else convertable to tf.Tensor.
If provided, computes weighted sum. The size of this array should be
len(tt_objects) x tt_objects[0].batch_size
Returns:
TT-objects representing the sum of the tt_objects (weighted sum if coef is
provided). The TT-rank of the result equals to the TT-ranks of the arguments.
"""
for tt in tt_objects:
if not hasattr(tt, 'projection_on'):
raise ValueError('Both arguments should be projections on the tangent '
'space of some other TT-object. All projection* functions '
'leave .projection_on field in the resulting TT-object '
'which is not present in the argument you\'ve provided.')
projection_on = tt_objects[0].projection_on
for tt in tt_objects[1:]:
if tt.projection_on != projection_on:
raise ValueError('All tt_objects should be projections on the tangent '
'space of the same TT-object. The provided arguments are '
'projections on different TT-objects (%s and %s). Or at '
'least the pointers are different.' % (tt.projection_on,
projection_on))
if coef is not None:
coef = tf.convert_to_tensor(coef, dtype=tt_objects[0].dtype)
if coef.get_shape().ndims > 1:
# In batch case we will need to multiply each core by this coefficients
# along the first axis. To do it need to reshape the coefs to match
# the TT-cores number of dimensions.
some_core = tt_objects[0].tt_cores[0]
dim_array = [1] * (some_core.get_shape().ndims + 1)
dim_array[0] = coef.get_shape()[0].value
dim_array[1] = coef.get_shape()[1].value
coef = tf.reshape(coef, dim_array)
ndims = tt_objects[0].ndims()
tt_ranks = shapes.lazy_tt_ranks(tt_objects[0])
left_rank_dim = tt_objects[0].left_tt_rank_dim
right_rank_dim = tt_objects[0].right_tt_rank_dim
res_cores = []
def slice_tt_core(tt_core, left_idx, right_idx):
num_tt_core_dims = len(tt_core.get_shape())
idx = [slice(None)] * num_tt_core_dims
idx[left_rank_dim] = left_idx
idx[right_rank_dim] = right_idx
return tt_core[idx]
right_half_rank = tt_ranks[1] // 2
left_chunks = []
for obj_idx, tt in enumerate(tt_objects):
curr_core = slice_tt_core(tt.tt_cores[0], slice(None),
slice(0, right_half_rank))
if coef is not None:
curr_core *= coef[obj_idx]
left_chunks.append(curr_core)
left_part = tf.add_n(left_chunks)
first_obj_core = tt_objects[0].tt_cores[0]
right_part = slice_tt_core(first_obj_core, slice(None),
slice(right_half_rank, None))
first_core = tf.concat((left_part, right_part), axis=right_rank_dim)
res_cores.append(first_core)
for core_idx in range(1, ndims - 1):
first_obj_core = tt_objects[0].tt_cores[core_idx]
left_half_rank = tt_ranks[core_idx] // 2
right_half_rank = tt_ranks[core_idx + 1] // 2
upper_part = slice_tt_core(tt.tt_cores[core_idx], slice(0, left_half_rank),
slice(None))
lower_right_part = slice_tt_core(first_obj_core,
slice(left_half_rank, None),
slice(right_half_rank, None))
lower_left_chunks = []
for obj_idx, tt in enumerate(tt_objects):
curr_core = slice_tt_core(tt.tt_cores[core_idx],
slice(left_half_rank, None),
slice(0, right_half_rank))
if coef is not None:
curr_core *= coef[obj_idx]
lower_left_chunks.append(curr_core)
lower_left_part = tf.add_n(lower_left_chunks)
lower_part = tf.concat((lower_left_part, lower_right_part),
axis=right_rank_dim)
curr_core = tf.concat((upper_part, lower_part), axis=left_rank_dim)
res_cores.append(curr_core)
left_half_rank = tt_ranks[ndims - 1] // 2
upper_part = slice_tt_core(tt.tt_cores[-1], slice(0, left_half_rank),
slice(None))
lower_chunks = []
for obj_idx, tt in enumerate(tt_objects):
curr_core = slice_tt_core(tt.tt_cores[-1], slice(left_half_rank, None),
slice(None))
if coef is not None:
curr_core *= coef[obj_idx]
lower_chunks.append(curr_core)
lower_part = tf.add_n(lower_chunks)
last_core = tf.concat((upper_part, lower_part), axis=left_rank_dim)
res_cores.append(last_core)
raw_shape = tt_objects[0].get_raw_shape()
static_tt_ranks = tt_objects[0].get_tt_ranks()
if isinstance(tt_objects[0], TensorTrain):
res = TensorTrain(res_cores, raw_shape, static_tt_ranks)
elif isinstance(tt_objects[0], TensorTrainBatch):
res = TensorTrainBatch(res_cores, raw_shape, static_tt_ranks,
tt_objects[0].batch_size)
# Maintain the projection_on property.
res.projection_on = tt_objects[0].projection_on
return res
def tangent_space_to_deltas(tt, name='t3f_tangent_space_to_deltas'):
"""Convert an element of the tangent space to deltas representation.
Tangent space elements (outputs of t3f.project) look like:
dP1 V2 ... Vd + U1 dP2 V3 ... Vd + ... + U1 ... Ud-1 dPd.
This function takes as input an element of the tangent space and converts
it to the list of deltas [dP1, ..., dPd].
Args:
tt: `TensorTrain` or `TensorTrainBatch` that is a result of t3f.project,
t3f.project_matmul, or other similar functions.
name: string, name of the Op.
Returns:
A list of delta-cores (tf.Tensors).
"""
if not hasattr(tt, 'projection_on') or tt.projection_on is None:
raise ValueError('tt argument is supposed to be a projection, but it '
'lacks projection_on field')
num_dims = tt.ndims()
left_tt_rank_dim = tt.left_tt_rank_dim
right_tt_rank_dim = tt.right_tt_rank_dim
deltas = [None] * num_dims
tt_ranks = shapes.lazy_tt_ranks(tt)
for i in range(1, num_dims - 1):
if int(tt_ranks[i] / 2) != tt_ranks[i] / 2:
raise ValueError('tt argument is supposed to be a projection, but its '
'ranks are not even.')
with tf.compat.v1.name_scope(name, values=tt.tt_cores):
for i in range(1, num_dims - 1):
r1, r2 = tt_ranks[i], tt_ranks[i + 1]
curr_core = tt.tt_cores[i]
slc = [slice(None)] * len(curr_core.shape)
slc[left_tt_rank_dim] = slice(int(r1 / 2), None)
slc[right_tt_rank_dim] = slice(0, int(r2 / 2))
deltas[i] = curr_core[slc]
slc = [slice(None)] * len(tt.tt_cores[0].shape)
slc[right_tt_rank_dim] = slice(0, int(tt_ranks[1] / 2))
deltas[0] = tt.tt_cores[0][slc]
slc = [slice(None)] * len(tt.tt_cores[0].shape)
slc[left_tt_rank_dim] = slice(int(tt_ranks[-2] / 2), None)
deltas[num_dims - 1] = tt.tt_cores[num_dims - 1][slc]
return deltas
def deltas_to_tangent_space(deltas, tt, left=None, right=None,
name='t3f_deltas_to_tangent_space'):
"""Converts deltas representation of tangent space vector to TT object.
Takes as input a list of [dP1, ..., dPd] and returns
dP1 V2 ... Vd + U1 dP2 V3 ... Vd + ... + U1 ... Ud-1 dPd.
This function is hard to use correctly because deltas should abey the
so called gauge conditions. If the don't, the function will silently return
incorrect result. This is why this function is not imported in __init__.
Args:
deltas: a list of deltas (essentially TT-cores) obeying the gauge
conditions.
tt: `TensorTrain` object on which the tangent space tensor represented by
delta is projected.
left: t3f.orthogonilize_tt_cores(tt). If you have it already compute, you
may pass it as argument to avoid recomputing.
right: t3f.orthogonilize_tt_cores(left, left_to_right=False). If you have
it already compute, you may pass it as argument to avoid recomputing.
name: string, name of the Op.
Returns:
`TensorTrain` object constructed from deltas, that is from the tangent
space at point `tt`.
"""
cores = []
dtype = tt.dtype
num_dims = tt.ndims()
# TODO: add cache instead of mannually pasisng precomputed stuff?
input_tensors = list(tt.tt_cores) + list(deltas)
if left is not None:
input_tensors += list(left.tt_cores)
if right is not None:
input_tensors += list(right.tt_cores)
with tf.compat.v1.name_scope(name, values=input_tensors):
if left is None:
left = decompositions.orthogonalize_tt_cores(tt)
if right is None:
right = decompositions.orthogonalize_tt_cores(left, left_to_right=False)
left_tangent_tt_ranks = shapes.lazy_tt_ranks(left)
right_tangent_tt_ranks = shapes.lazy_tt_ranks(left)
raw_shape = shapes.lazy_raw_shape(left)
right_rank_dim = left.right_tt_rank_dim
left_rank_dim = left.left_tt_rank_dim
is_batch_case = len(deltas[0].shape) > len(tt.tt_cores[0].shape)
if is_batch_case:
right_rank_dim += 1
left_rank_dim += 1
batch_size = deltas[0].shape.as_list()[0]
for i in range(num_dims):
left_tt_core = left.tt_cores[i]
right_tt_core = right.tt_cores[i]
if is_batch_case:
tile = [1] * len(left_tt_core.shape)
tile = [batch_size] + tile
left_tt_core = tf.tile(left_tt_core[None, ...], tile)
right_tt_core = tf.tile(right_tt_core[None, ...], tile)
if i == 0:
tangent_core = tf.concat((deltas[i], left_tt_core),
axis=right_rank_dim)
elif i == num_dims - 1:
tangent_core = tf.concat((right_tt_core, deltas[i]),
axis=left_rank_dim)
else:
rank_1 = right_tangent_tt_ranks[i]
rank_2 = left_tangent_tt_ranks[i + 1]
if tt.is_tt_matrix():
mode_size_n = raw_shape[0][i]
mode_size_m = raw_shape[1][i]
shape = [rank_1, mode_size_n, mode_size_m, rank_2]
else:
mode_size_n = raw_shape[0][i]
shape = [rank_1, mode_size_n, rank_2]
if is_batch_case:
shape = [batch_size] + shape
zeros = tf.zeros(shape, dtype=dtype)
upper = tf.concat((right_tt_core, zeros), axis=right_rank_dim)
lower = tf.concat((deltas[i], left_tt_core), axis=right_rank_dim)
tangent_core = tf.concat((upper, lower), axis=left_rank_dim)
cores.append(tangent_core)
if is_batch_case:
tangent = TensorTrainBatch(cores, batch_size=batch_size)
else:
tangent = TensorTrain(cores)
tangent.projection_on = tt
return tangent
| 2.828125 | 3 |
dpauth/admin.py | askmeaboutlo0m/website | 9 | 13356 | <filename>dpauth/admin.py
from django.contrib import admin
from . import models
@admin.register(models.Username)
class UsernameAdmin(admin.ModelAdmin):
list_display = ('user', 'name', 'is_mod')
readonly_fields = ('normalized_name',)
search_fields = ('user__email', 'name')
| 2.078125 | 2 |
iptv_proxy/providers/beast/json_api.py | sfanous/IPTVProxy | 9 | 13357 | <gh_stars>1-10
import logging
from iptv_proxy.providers.beast.constants import BeastConstants
from iptv_proxy.providers.iptv_provider.json_api import ProviderConfigurationJSONAPI
logger = logging.getLogger(__name__)
class BeastConfigurationJSONAPI(ProviderConfigurationJSONAPI):
__slots__ = []
_provider_name = BeastConstants.PROVIDER_NAME.lower()
| 1.640625 | 2 |
tests/random/random_testing.py | jkeiren/mCRL2 | 61 | 13358 | <reponame>jkeiren/mCRL2
#!/usr/bin/env python
# Copyright 2015 <NAME>.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import os
import os.path
import random
import re
import sys
import traceback
sys.path += [os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'python'))]
import random_state_formula_generator
from random_bes_generator import make_bes
from random_pbes_generator import make_pbes
import random_process_expression
from testing import YmlTest
from text_utility import write_text
MCRL2_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
MCRL2_INSTALL_DIR = os.path.join(MCRL2_ROOT, 'install', 'bin')
def ymlfile(file):
return '{}/tests/specifications/{}.yml'.format(MCRL2_ROOT, file)
def mcrl2file(file):
return os.path.join(MCRL2_ROOT, file)
class RandomTest(YmlTest):
def __init__(self, name, ymlfile, settings):
super(RandomTest, self).__init__(name, ymlfile, [], settings)
# create input files for the random test, and add the filenames to self.inputfiles
def create_inputfiles(self, runpath = '.'):
raise NotImplementedError
# removes input files that are in the runpath directory
def remove_inputfiles(self, runpath = '.'):
for filename in self.inputfiles:
if os.path.abspath(runpath) == os.path.abspath(os.path.dirname(filename)):
os.remove(filename)
def execute(self, runpath = '.'):
self.create_inputfiles(runpath)
super(RandomTest, self).execute(runpath)
self.remove_inputfiles(runpath)
class ProcessTest(RandomTest):
def __init__(self, name, ymlfile, settings):
super(ProcessTest, self).__init__(name, ymlfile, settings)
self.actions = ['a', 'b', 'c', 'd']
self.process_identifiers = ['P', 'Q', 'R']
self.process_size = 13
self.parallel_operator_generators = random_process_expression.default_parallel_operator_generators
self.process_expression_generators = random_process_expression.default_process_expression_generators
self.init = None
self.generate_process_parameters = False
def create_inputfiles(self, runpath = '.'):
filename = '{0}.mcrl2'.format(self.name, self.settings)
p = random_process_expression.make_process_specification(self.parallel_operator_generators, self.process_expression_generators, self.actions, self.process_identifiers, self.process_size, self.init, self.generate_process_parameters)
write_text(filename, str(p))
self.inputfiles += [filename]
# generates stochastic random processes
class StochasticProcessTest(ProcessTest):
def __init__(self, name, ymlfile, settings):
super(StochasticProcessTest, self).__init__(name, ymlfile, settings)
self.process_expression_generators = {
random_process_expression.make_action : 8,
random_process_expression.make_delta : 1,
random_process_expression.make_tau : 1,
random_process_expression.make_process_instance: 2,
random_process_expression.make_sum : 2,
random_process_expression.make_if_then : 2,
random_process_expression.make_if_then_else : 2,
random_process_expression.make_choice : 5,
random_process_expression.make_seq : 5,
random_process_expression.make_multi_action : 1,
random_process_expression.make_dist : 3,
}
# generates random process with higher probability of tau transitions
class ProcessTauTest(ProcessTest):
def __init__(self, name, testfile, settings):
super(ProcessTauTest, self).__init__(name, testfile, settings)
self.actions = ['a', 'b', 'c']
self.init = 'hide({a}, allow({a, b, c}, P || Q || R))'
self.process_expression_generators = {
random_process_expression.make_action: 8,
random_process_expression.make_delta: 1,
random_process_expression.make_tau: 4,
random_process_expression.make_process_instance: 1,
random_process_expression.make_sum: 0,
random_process_expression.make_if_then: 0,
random_process_expression.make_if_then_else: 0,
random_process_expression.make_choice: 5,
random_process_expression.make_seq: 5,
random_process_expression.make_multi_action: 1,
random_process_expression.make_dist: 0,
}
class AlphabetReduceTest(ProcessTest):
def __init__(self, name, settings):
super(AlphabetReduceTest, self).__init__(name, ymlfile('alphabet-reduce'), settings)
self.actions = ['a', 'b', 'c', 'd', 'e']
class LpsSuminstTest(ProcessTest):
def __init__(self, name, settings):
super(LpsSuminstTest, self).__init__(name, ymlfile('lpssuminst'), settings)
class LpsSumelmTest(ProcessTest):
def __init__(self, name, settings):
super(LpsSumelmTest, self).__init__(name, ymlfile('lpssumelm'), settings)
class LpsParelmTest(ProcessTest):
def __init__(self, name, settings):
super(LpsParelmTest, self).__init__(name, ymlfile('lpsparelm'), settings)
self.generate_process_parameters = True
class LpsOnePointRuleRewriteTest(ProcessTest):
def __init__(self, name, settings):
super(LpsOnePointRuleRewriteTest, self).__init__(name, ymlfile('lpstransform'), settings)
self.add_command_line_options('t2', ['-alps-one-point-rule-rewriter'])
class LpsConfcheckTest(ProcessTauTest):
def __init__(self, name, confluence_type, settings):
self.option_map = { 'commutative' : 'C',
'commutative-disjoint' : 'c',
'disjoint' : 'd',
'triangular' : 'T',
'trivial' : 'Z'
}
assert confluence_type in self.option_map
super(LpsConfcheckTest, self).__init__(name, ymlfile('lpsconfcheck'), settings)
self.add_command_line_options('t2', ['-x' + self.option_map[confluence_type]])
class LtscompareTest(ProcessTauTest):
def __init__(self, name, equivalence_type, settings):
assert equivalence_type in ['bisim', 'bisim-gv', 'bisim-gjkw', 'branching-bisim', 'branching-bisim-gv', 'branching-bisim-gjkw', 'dpbranching-bisim', 'dpbranching-bisim-gv', 'dpbranching-bisim-gjkw', 'weak-bisim', 'dpweak-bisim', 'sim', 'ready-sim' , 'trace', 'weak-trace']
super(LtscompareTest, self).__init__(name, ymlfile('ltscompare'), settings)
self.add_command_line_options('t3', ['-e' + equivalence_type])
self.add_command_line_options('t4', ['-e' + equivalence_type])
class StochasticLtscompareTest(StochasticProcessTest):
def __init__(self, name, settings):
super(StochasticLtscompareTest, self).__init__(name, ymlfile('stochastic-ltscompare'), settings)
class BisimulationTest(ProcessTauTest):
def __init__(self, name, equivalence_type, settings):
assert equivalence_type in ['bisim', 'bisim-gv', 'bisim-gjkw', 'branching-bisim', 'branching-bisim-gv', 'branching-bisim-gjkw', 'weak-bisim']
super(BisimulationTest, self).__init__(name, ymlfile('bisimulation'), settings)
self.add_command_line_options('t3', ['-e' + equivalence_type])
self.add_command_line_options('t4', ['-e' + equivalence_type])
if equivalence_type in ['branching-bisim-gv', 'branching-bisim-gjkw']:
self.add_command_line_options('t7', ['-bbranching-bisim'])
elif equivalence_type in ['bisim', 'bisim-gv', 'bisim-gjkw']:
self.add_command_line_options('t7', ['-bstrong-bisim'])
else:
self.add_command_line_options('t7', ['-b' + equivalence_type])
class Lps2ltsAlgorithmsTest(ProcessTauTest):
def __init__(self, name, settings):
super(Lps2ltsAlgorithmsTest, self).__init__(name, ymlfile('lps2lts-algorithms'), settings)
# randomly choose an algorithm
actions = random.choice(['a', 'a,b', 'a,b,c'])
options = [random.choice(['--deadlock', '--divergence', '--nondeterminism', '--action={}'.format(actions)])]
options = [random.choice(['--deadlock', '--nondeterminism', '--action={}'.format(actions)])]
if 'divergence' in options[0]:
tau_actions = random.choice(['', '', 'b', 'b,c'])
if tau_actions:
options.append('--tau={}'.format(tau_actions))
self.add_command_line_options('t2', options)
self.add_command_line_options('t3', options)
class LpsConstelmTest(ProcessTest):
def __init__(self, name, settings):
super(LpsConstelmTest, self).__init__(name, ymlfile('lpsconstelm'), settings)
self.generate_process_parameters = True
class LpsBinaryTest(ProcessTest):
def __init__(self, name, settings):
super(LpsBinaryTest, self).__init__(name, ymlfile('lpsbinary'), settings)
self.generate_process_parameters = True
class LpsstategraphTest(ProcessTest):
def __init__(self, name, settings):
super(LpsstategraphTest, self).__init__(name, ymlfile('lpsstategraph'), settings)
self.generate_process_parameters = True
class Lps2pbesTest(ProcessTest):
def __init__(self, name, settings):
super(Lps2pbesTest, self).__init__(name, ymlfile('lps2pbes'), settings)
def create_inputfiles(self, runpath = '.'):
super(Lps2pbesTest, self).create_inputfiles(runpath)
self.inputfiles.append(mcrl2file('examples/modal-formulas/nodeadlock.mcf'))
class Lts2pbesTest(ProcessTest):
def __init__(self, name, settings):
super(Lts2pbesTest, self).__init__(name, ymlfile('lts2pbes'), settings)
def create_inputfiles(self, runpath = '.'):
super(Lts2pbesTest, self).create_inputfiles(runpath)
self.inputfiles.append(mcrl2file('examples/modal-formulas/nodeadlock.mcf'))
class PbesTest(RandomTest):
def __init__(self, name, ymlfile, settings):
super(PbesTest, self).__init__(name, ymlfile, settings)
self.equation_count = 4
self.atom_count = 4
self.propvar_count = 3
self.use_quantifiers = True
self.use_integers = True
def create_inputfiles(self, runpath = '.'):
filename = '{0}.txt'.format(self.name)
p = make_pbes(self.equation_count, self.atom_count, self.propvar_count, self.use_quantifiers, use_integers=self.use_integers)
write_text(filename, str(p))
self.inputfiles += [filename]
# N.B. does not work yet due to unusable abstraction map
class PbesabsintheTest(PbesTest):
def __init__(self, name, settings):
super(PbesabsintheTest, self).__init__(name, ymlfile('pbesabsinthe'), settings)
# N.B. This test has been disabled, since the tool has been deprecated.
class PbesabstractTest(PbesTest):
def __init__(self, name, settings):
super(PbesabstractTest, self).__init__(name, ymlfile('pbesabstract'), settings)
class PbesbddsolveTest(PbesTest):
def __init__(self, name, settings):
super(PbesbddsolveTest, self).__init__(name, ymlfile('pbesbddsolve'), settings)
self.use_integers = False
self.use_quantifiers = False
class PbesconstelmTest(PbesTest):
def __init__(self, name, settings):
super(PbesconstelmTest, self).__init__(name, ymlfile('pbesconstelm'), settings)
class PbesparelmTest(PbesTest):
def __init__(self, name, settings):
super(PbesparelmTest, self).__init__(name, ymlfile('pbesparelm'), settings)
class PbespareqelmTest(PbesTest):
def __init__(self, name, settings):
super(PbespareqelmTest, self).__init__(name, ymlfile('pbespareqelm'), settings)
class Pbespor1Test(PbesTest):
def __init__(self, name, settings):
super(Pbespor1Test, self).__init__(name, ymlfile('pbespor1'), settings)
class Pbespor2Test(ProcessTest):
def __init__(self, name, settings):
super(Pbespor2Test, self).__init__(name, ymlfile('pbespor2'), settings)
def create_inputfiles(self, runpath = '.'):
super(Pbespor2Test, self).create_inputfiles(runpath)
filename = '{0}.mcf'.format(self.name, self.settings)
formula = random_state_formula_generator.make_modal_formula()
write_text(filename, str(formula))
self.inputfiles += [filename]
class PbesrewrTest(PbesTest):
def __init__(self, name, rewriter, settings):
super(PbesrewrTest, self).__init__(name, ymlfile('pbesrewr'), settings)
self.add_command_line_options('t2', ['-p' + rewriter])
class PbestransformTest(PbesTest):
def __init__(self, name, rewriter, settings):
super(PbestransformTest, self).__init__(name, ymlfile('pbestransform'), settings)
self.add_command_line_options('t2', ['-a' + rewriter])
class PbesinstTest(PbesTest):
def __init__(self, name, options, settings):
super(PbesinstTest, self).__init__(name, ymlfile('pbesinst'), settings)
self.add_command_line_options('t2', options)
class PbespgsolveTest(PbesTest):
def __init__(self, name, settings):
super(PbespgsolveTest, self).__init__(name, ymlfile('pbespgsolve'), settings)
class PbesstategraphTest(PbesTest):
def __init__(self, name, settings):
super(PbesstategraphTest, self).__init__(name, ymlfile('pbesstategraph'), settings)
class PbessymbolicbisimTest(PbesTest):
def __init__(self, name, settings):
super(PbessymbolicbisimTest, self).__init__(name, ymlfile('pbessymbolicbisim'), settings)
class PbessolvesymbolicTest(PbesTest):
def __init__(self, name, settings):
super(PbessolvesymbolicTest, self).__init__(name, ymlfile('pbessolvesymbolic'), settings)
class Pbes2boolTest(PbesTest):
def __init__(self, name, settings):
super(Pbes2boolTest, self).__init__(name, ymlfile('pbessolve'), settings)
class Pbes2boolDepthFirstTest(PbesTest):
def __init__(self, name, settings):
super(Pbes2boolDepthFirstTest, self).__init__(name, ymlfile('pbessolve'), settings)
self.add_command_line_options('t2', ['-zdepth-first'])
self.add_command_line_options('t3', ['-zdepth-first'])
self.add_command_line_options('t4', ['-zdepth-first'])
self.add_command_line_options('t5', ['-zdepth-first'])
self.add_command_line_options('t6', ['-zdepth-first'])
self.add_command_line_options('t7', ['-zdepth-first'])
self.add_command_line_options('t8', ['-zdepth-first'])
class Pbes2bool_counter_exampleTest(ProcessTest):
def __init__(self, name, optimization, settings):
super(Pbes2bool_counter_exampleTest, self).__init__(name, ymlfile('pbessolve-counter-example'), settings)
if optimization in [4, 5]:
self.add_command_line_options('t3', ['-l{}'.format(optimization), '--aggressive', '--prune-todo-list'])
else:
self.add_command_line_options('t3', ['-l{}'.format(optimization), '--prune-todo-list'])
def create_inputfiles(self, runpath = '.'):
super(Pbes2bool_counter_exampleTest, self).create_inputfiles(runpath)
filename = '{0}.mcf'.format(self.name, self.settings)
formula = random_state_formula_generator.make_modal_formula()
write_text(filename, str(formula))
self.inputfiles += [filename]
class Pbes_unify_parametersTest(PbesTest):
def __init__(self, name, settings):
super(Pbes_unify_parametersTest, self).__init__(name, ymlfile('pbes-unify-parameters'), settings)
class Pbes_srfTest(PbesTest):
def __init__(self, name, settings):
super(Pbes_srfTest, self).__init__(name, ymlfile('pbes-srf'), settings)
# N.B does not work due to unknown expressions (F_or)
class SymbolicExplorationTest(PbesTest):
def __init__(self, name, settings):
super(SymbolicExplorationTest, self).__init__(name, ymlfile('symbolic_exploration'), settings)
class BesTest(RandomTest):
def __init__(self, name, ymlfile, settings):
super(BesTest, self).__init__(name, ymlfile, settings)
self.equation_count = 4
self.term_size = 3
def create_inputfiles(self, runpath = '.'):
filename = '{0}.txt'.format(self.name, self.settings)
p = make_bes(self.equation_count, self.term_size)
write_text(filename, str(p))
self.inputfiles += [filename]
class BessolveTest(BesTest):
def __init__(self, name, settings):
super(BessolveTest, self).__init__(name, ymlfile('bessolve'), settings)
available_tests = {
'alphabet-reduce' : lambda name, settings: AlphabetReduceTest(name, settings) ,
'lpssuminst' : lambda name, settings: LpsSuminstTest(name, settings) ,
'lpssumelm' : lambda name, settings: LpsSumelmTest(name, settings) ,
'lpsparelm' : lambda name, settings: LpsParelmTest(name, settings) ,
'lps-quantifier-one-point' : lambda name, settings: LpsOnePointRuleRewriteTest(name, settings) ,
'lpsconfcheck-commutative' : lambda name, settings: LpsConfcheckTest(name, 'commutative', settings) ,
'lpsconfcheck-commutative-disjoint' : lambda name, settings: LpsConfcheckTest(name, 'commutative-disjoint', settings) ,
'lpsconfcheck-disjoint' : lambda name, settings: LpsConfcheckTest(name, 'disjoint', settings) ,
'lpsconfcheck-triangular' : lambda name, settings: LpsConfcheckTest(name, 'triangular', settings) ,
'lpsconfcheck-trivial' : lambda name, settings: LpsConfcheckTest(name, 'trivial', settings) ,
'lpsconstelm' : lambda name, settings: LpsConstelmTest(name, settings) ,
'lpsbinary' : lambda name, settings: LpsBinaryTest(name, settings) ,
'lps2lts-algorithms' : lambda name, settings: Lps2ltsAlgorithmsTest(name, settings) ,
'lps2pbes' : lambda name, settings: Lps2pbesTest(name, settings) ,
'lpsstategraph' : lambda name, settings: LpsstategraphTest(name, settings) ,
'lts2pbes' : lambda name, settings: Lts2pbesTest(name, settings) ,
'ltscompare-bisim' : lambda name, settings: LtscompareTest(name, 'bisim', settings) ,
'ltscompare-bisim-gv' : lambda name, settings: LtscompareTest(name, 'bisim-gv', settings) ,
'ltscompare-bisim-gjkw' : lambda name, settings: LtscompareTest(name, 'bisim-gjkw', settings) ,
'ltscompare-branching-bisim' : lambda name, settings: LtscompareTest(name, 'branching-bisim', settings) ,
'ltscompare-branching-bisim-gv' : lambda name, settings: LtscompareTest(name, 'branching-bisim-gv', settings) ,
'ltscompare-branching-bisim-gjkw' : lambda name, settings: LtscompareTest(name, 'branching-bisim-gjkw', settings) ,
'ltscompare-dpbranching-bisim' : lambda name, settings: LtscompareTest(name, 'dpbranching-bisim', settings) ,
'ltscompare-dpbranching-bisim-gv' : lambda name, settings: LtscompareTest(name, 'dpbranching-bisim-gv', settings) ,
'ltscompare-dpbranching-bisim-gjkw' : lambda name, settings: LtscompareTest(name, 'dpbranching-bisim-gjkw', settings) ,
'ltscompare-weak-bisim' : lambda name, settings: LtscompareTest(name, 'weak-bisim', settings) ,
'ltscompare-dpweak-bisim' : lambda name, settings: LtscompareTest(name, 'dpweak-bisim', settings) ,
'ltscompare-sim' : lambda name, settings: LtscompareTest(name, 'sim', settings) ,
'ltscompare-ready-sim' : lambda name, settings: LtscompareTest(name, 'ready-sim', settings) ,
'ltscompare-trace' : lambda name, settings: LtscompareTest(name, 'trace', settings) ,
'ltscompare-weak-trace' : lambda name, settings: LtscompareTest(name, 'weak-trace', settings) ,
'bisimulation-bisim' : lambda name, settings: BisimulationTest(name, 'bisim', settings) ,
'bisimulation-bisim-gv' : lambda name, settings: BisimulationTest(name, 'bisim-gv', settings) ,
'bisimulation-bisim-gjkw' : lambda name, settings: BisimulationTest(name, 'bisim-gjkw', settings) ,
'bisimulation-branching-bisim' : lambda name, settings: BisimulationTest(name, 'branching-bisim', settings) ,
'bisimulation-branching-bisim-gv' : lambda name, settings: BisimulationTest(name, 'branching-bisim-gv', settings) ,
'bisimulation-branching-bisim-gjkw' : lambda name, settings: BisimulationTest(name, 'branching-bisim-gjkw', settings) ,
'bisimulation-weak-bisim' : lambda name, settings: BisimulationTest(name, 'weak-bisim', settings) ,
'pbesconstelm' : lambda name, settings: PbesconstelmTest(name, settings) ,
'pbesparelm' : lambda name, settings: PbesparelmTest(name, settings) ,
'pbespareqelm' : lambda name, settings: PbespareqelmTest(name, settings) ,
'pbespor2' : lambda name, settings: Pbespor2Test(name, settings) ,
'pbesrewr-simplify' : lambda name, settings: PbesrewrTest(name, 'simplify', settings) ,
'pbesrewr-pfnf' : lambda name, settings: PbesrewrTest(name, 'pfnf', settings) ,
'pbesrewr-quantifier-all' : lambda name, settings: PbesrewrTest(name, 'quantifier-all', settings) ,
'pbesrewr-quantifier-finite' : lambda name, settings: PbesrewrTest(name, 'quantifier-finite', settings) ,
'pbesrewr-quantifier-inside' : lambda name, settings: PbesrewrTest(name, 'quantifier-inside', settings) ,
'pbesrewr-quantifier-one-point' : lambda name, settings: PbesrewrTest(name, 'quantifier-one-point', settings) ,
'pbesrewr-data-rewriter' : lambda name, settings: PbestransformTest(name, 'pbes-data-rewriter', settings) ,
'pbesrewr-simplify-rewriter' : lambda name, settings: PbestransformTest(name, 'pbes-simplify-rewriter', settings) ,
'pbesrewr-simplify-data-rewriter' : lambda name, settings: PbestransformTest(name, 'pbes-simplify-data-rewriter', settings) ,
'pbesrewr-simplify-quantifiers-rewriter' : lambda name, settings: PbestransformTest(name, 'pbes-simplify-quantifiers-rewriter', settings) ,
'pbesrewr-simplify-quantifiers-data-rewriter' : lambda name, settings: PbestransformTest(name, 'pbes-simplify-quantifiers-data-rewriter', settings),
'pbesinst-lazy' : lambda name, settings: PbesinstTest(name, ['-slazy'], settings) ,
'pbesinst-alternative_lazy' : lambda name, settings: PbesinstTest(name, ['-salternative-lazy'], settings) ,
'pbesinst-finite' : lambda name, settings: PbesinstTest(name, ['-sfinite', '-f*(*:Bool)'], settings) ,
'pbespgsolve' : lambda name, settings: PbespgsolveTest(name, settings) ,
'pbessolve' : lambda name, settings: Pbes2boolTest(name, settings) ,
'pbessolve-depth-first' : lambda name, settings: Pbes2boolDepthFirstTest(name, settings) ,
'pbessolve-counter-example-optimization-0' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 0, settings) ,
'pbessolve-counter-example-optimization-1' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 1, settings) ,
'pbessolve-counter-example-optimization-2' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 2, settings) ,
'pbessolve-counter-example-optimization-3' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 3, settings) ,
'pbessolve-counter-example-optimization-4' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 4, settings) ,
'pbessolve-counter-example-optimization-5' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 5, settings) ,
'pbessolve-counter-example-optimization-6' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 6, settings) ,
'pbessolve-counter-example-optimization-7' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 7, settings) ,
'pbesstategraph' : lambda name, settings: PbesstategraphTest(name, settings) ,
'pbes-unify-parameters' : lambda name, settings: Pbes_unify_parametersTest(name, settings) ,
'pbes-srf' : lambda name, settings: Pbes_srfTest(name, settings) ,
# 'pbessymbolicbisim' : lambda name, settings: PbessymbolicbisimTest(name, settings) , # excluded from the tests because of Z3 dependency
'bessolve' : lambda name, settings: BessolveTest(name, settings) ,
#'stochastic-ltscompare' : lambda name, settings: StochasticLtscompareTest(name, settings) ,
}
# These test do not work on Windows due to dependencies.
if os.name != 'nt':
available_tests.update({'pbessolvesymbolic' : lambda name, settings: PbessolvesymbolicTest(name, settings) })
# available_tests.update({ 'pbesbddsolve' : lambda name, settings: PbesbddsolveTest(name, settings) })
def print_names(tests):
for name in sorted(tests):
print(name)
# Return all tests that match with pattern. In case of an exact match, only this exact match is returned.
def matching_tests(tests, pattern):
matches = [name for name in sorted(tests) if re.search(pattern, name)]
if pattern in matches:
return [pattern]
return matches
def main(tests):
import argparse
cmdline_parser = argparse.ArgumentParser()
cmdline_parser.add_argument('-t', '--toolpath', dest='toolpath', help='The path where the mCRL2 tools are installed')
cmdline_parser.add_argument('-r', '--repetitions', dest='repetitions', metavar='N', default='10', help='Perform N repetitions of each test')
cmdline_parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='Display additional progress messages.')
cmdline_parser.add_argument('-k', '--keep-files', dest='keep_files', action='store_true', help='Keep the files produced by the test')
cmdline_parser.add_argument('-n', '--names', dest='names', action='store_true', help='Print the names of the available tests')
cmdline_parser.add_argument('-p', '--pattern', dest='pattern', metavar='P', default='.', action='store', help='Run the tests that match with pattern P')
cmdline_parser.add_argument('-o', '--output', dest='output', metavar='o', action='store', help='Run the tests in the given directory')
args = cmdline_parser.parse_args()
if args.names:
print_names(tests)
return
toolpath = args.toolpath
if not toolpath:
toolpath = MCRL2_INSTALL_DIR
settings = {'toolpath': toolpath, 'verbose': args.verbose, 'cleanup_files': not args.keep_files, 'allow-non-zero-return-values': True}
I = range(int(args.repetitions))
if args.output:
if not os.path.exists(args.output):
os.mkdir(args.output)
os.chdir(args.output)
test_failed = False
for name in matching_tests(tests, args.pattern):
try:
for i in I:
test = tests[name]('{}_{}'.format(name, i), settings)
test.execute_in_sandbox()
except Exception as e:
print('An exception occurred:', e.__class__, e)
traceback.print_exc()
test_failed = True
if (test_failed):
sys.exit(-1)
if __name__ == '__main__':
main(available_tests)
| 2.03125 | 2 |
src/download.py | stanislawbartkowski/webhdfsdirectory | 0 | 13359 | """ Main program to launch proc/hdfs.py
"""
import argparse
import logging
from pars import addargs
import sys
import logging
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
from proc.hdfs import DIRHDFS
def gettestargs(parser) :
i = "/home/sbartkowski/work/webhdfsdirectory/testdata/inputhdfs.txt"
return parser.parse_args([i,"inimical1","14000","sb","/user/sb","dir1","/tmp/download","--dryrun"])
def getargs(parser) :
return parser.parse_args(sys.argv[1:])
def readargs():
parser = argparse.ArgumentParser(
description='Download HDFS using WEB REST/API')
addargs(parser)
# return gettestargs(parser)
return getargs(parser)
def main():
args = readargs()
T = DIRHDFS(args.host[0], args.port[0], args.user[0],args.regexp,args.dryrun)
T.downloadhdfsdir(args.userdir[0], args.usersubdir[0], args.localdir[0])
if __name__ == "__main__":
# execute only if run as a script
main()
| 2.65625 | 3 |
desktop/core/ext-py/josepy-1.1.0/src/josepy/json_util.py | kokosing/hue | 5,079 | 13360 | """JSON (de)serialization framework.
The framework presented here is somewhat based on `Go's "json" package`_
(especially the ``omitempty`` functionality).
.. _`Go's "json" package`: http://golang.org/pkg/encoding/json/
"""
import abc
import binascii
import logging
import OpenSSL
import six
from josepy import b64, errors, interfaces, util
logger = logging.getLogger(__name__)
class Field(object):
"""JSON object field.
:class:`Field` is meant to be used together with
:class:`JSONObjectWithFields`.
``encoder`` (``decoder``) is a callable that accepts a single
parameter, i.e. a value to be encoded (decoded), and returns the
serialized (deserialized) value. In case of errors it should raise
:class:`~josepy.errors.SerializationError`
(:class:`~josepy.errors.DeserializationError`).
Note, that ``decoder`` should perform partial serialization only.
:ivar str json_name: Name of the field when encoded to JSON.
:ivar default: Default value (used when not present in JSON object).
:ivar bool omitempty: If ``True`` and the field value is empty, then
it will not be included in the serialized JSON object, and
``default`` will be used for deserialization. Otherwise, if ``False``,
field is considered as required, value will always be included in the
serialized JSON objected, and it must also be present when
deserializing.
"""
__slots__ = ('json_name', 'default', 'omitempty', 'fdec', 'fenc')
def __init__(self, json_name, default=None, omitempty=False,
decoder=None, encoder=None):
# pylint: disable=too-many-arguments
self.json_name = json_name
self.default = default
self.omitempty = omitempty
self.fdec = self.default_decoder if decoder is None else decoder
self.fenc = self.default_encoder if encoder is None else encoder
@classmethod
def _empty(cls, value):
"""Is the provided value considered "empty" for this field?
This is useful for subclasses that might want to override the
definition of being empty, e.g. for some more exotic data types.
"""
return not isinstance(value, bool) and not value
def omit(self, value):
"""Omit the value in output?"""
return self._empty(value) and self.omitempty
def _update_params(self, **kwargs):
current = dict(json_name=self.json_name, default=self.default,
omitempty=self.omitempty,
decoder=self.fdec, encoder=self.fenc)
current.update(kwargs)
return type(self)(**current) # pylint: disable=star-args
def decoder(self, fdec):
"""Descriptor to change the decoder on JSON object field."""
return self._update_params(decoder=fdec)
def encoder(self, fenc):
"""Descriptor to change the encoder on JSON object field."""
return self._update_params(encoder=fenc)
def decode(self, value):
"""Decode a value, optionally with context JSON object."""
return self.fdec(value)
def encode(self, value):
"""Encode a value, optionally with context JSON object."""
return self.fenc(value)
@classmethod
def default_decoder(cls, value):
"""Default decoder.
Recursively deserialize into immutable types (
:class:`josepy.util.frozendict` instead of
:func:`dict`, :func:`tuple` instead of :func:`list`).
"""
# bases cases for different types returned by json.loads
if isinstance(value, list):
return tuple(cls.default_decoder(subvalue) for subvalue in value)
elif isinstance(value, dict):
return util.frozendict(
dict((cls.default_decoder(key), cls.default_decoder(value))
for key, value in six.iteritems(value)))
else: # integer or string
return value
@classmethod
def default_encoder(cls, value):
"""Default (passthrough) encoder."""
# field.to_partial_json() is no good as encoder has to do partial
# serialization only
return value
class JSONObjectWithFieldsMeta(abc.ABCMeta):
"""Metaclass for :class:`JSONObjectWithFields` and its subclasses.
It makes sure that, for any class ``cls`` with ``__metaclass__``
set to ``JSONObjectWithFieldsMeta``:
1. All fields (attributes of type :class:`Field`) in the class
definition are moved to the ``cls._fields`` dictionary, where
keys are field attribute names and values are fields themselves.
2. ``cls.__slots__`` is extended by all field attribute names
(i.e. not :attr:`Field.json_name`). Original ``cls.__slots__``
are stored in ``cls._orig_slots``.
In a consequence, for a field attribute name ``some_field``,
``cls.some_field`` will be a slot descriptor and not an instance
of :class:`Field`. For example::
some_field = Field('someField', default=())
class Foo(object):
__metaclass__ = JSONObjectWithFieldsMeta
__slots__ = ('baz',)
some_field = some_field
assert Foo.__slots__ == ('some_field', 'baz')
assert Foo._orig_slots == ()
assert Foo.some_field is not Field
assert Foo._fields.keys() == ['some_field']
assert Foo._fields['some_field'] is some_field
As an implementation note, this metaclass inherits from
:class:`abc.ABCMeta` (and not the usual :class:`type`) to mitigate
the metaclass conflict (:class:`ImmutableMap` and
:class:`JSONDeSerializable`, parents of :class:`JSONObjectWithFields`,
use :class:`abc.ABCMeta` as its metaclass).
"""
def __new__(mcs, name, bases, dikt):
fields = {}
for base in bases:
fields.update(getattr(base, '_fields', {}))
# Do not reorder, this class might override fields from base classes!
for key, value in tuple(six.iteritems(dikt)):
# not six.iterkeys() (in-place edit!)
if isinstance(value, Field):
fields[key] = dikt.pop(key)
dikt['_orig_slots'] = dikt.get('__slots__', ())
dikt['__slots__'] = tuple(
list(dikt['_orig_slots']) + list(six.iterkeys(fields)))
dikt['_fields'] = fields
return abc.ABCMeta.__new__(mcs, name, bases, dikt)
@six.add_metaclass(JSONObjectWithFieldsMeta)
class JSONObjectWithFields(util.ImmutableMap, interfaces.JSONDeSerializable):
# pylint: disable=too-few-public-methods
"""JSON object with fields.
Example::
class Foo(JSONObjectWithFields):
bar = Field('Bar')
empty = Field('Empty', omitempty=True)
@bar.encoder
def bar(value):
return value + 'bar'
@bar.decoder
def bar(value):
if not value.endswith('bar'):
raise errors.DeserializationError('No bar suffix!')
return value[:-3]
assert Foo(bar='baz').to_partial_json() == {'Bar': 'bazbar'}
assert Foo.from_json({'Bar': 'bazbar'}) == Foo(bar='baz')
assert (Foo.from_json({'Bar': 'bazbar', 'Empty': '!'})
== Foo(bar='baz', empty='!'))
assert Foo(bar='baz').bar == 'baz'
"""
@classmethod
def _defaults(cls):
"""Get default fields values."""
return dict([(slot, field.default) for slot, field
in six.iteritems(cls._fields)])
def __init__(self, **kwargs):
# pylint: disable=star-args
super(JSONObjectWithFields, self).__init__(
**(dict(self._defaults(), **kwargs)))
def encode(self, name):
"""Encode a single field.
:param str name: Name of the field to be encoded.
:raises errors.SerializationError: if field cannot be serialized
:raises errors.Error: if field could not be found
"""
try:
field = self._fields[name]
except KeyError:
raise errors.Error("Field not found: {0}".format(name))
return field.encode(getattr(self, name))
def fields_to_partial_json(self):
"""Serialize fields to JSON."""
jobj = {}
omitted = set()
for slot, field in six.iteritems(self._fields):
value = getattr(self, slot)
if field.omit(value):
omitted.add((slot, value))
else:
try:
jobj[field.json_name] = field.encode(value)
except errors.SerializationError as error:
raise errors.SerializationError(
'Could not encode {0} ({1}): {2}'.format(
slot, value, error))
return jobj
def to_partial_json(self):
return self.fields_to_partial_json()
@classmethod
def _check_required(cls, jobj):
missing = set()
for _, field in six.iteritems(cls._fields):
if not field.omitempty and field.json_name not in jobj:
missing.add(field.json_name)
if missing:
raise errors.DeserializationError(
'The following fields are required: {0}'.format(
','.join(missing)))
@classmethod
def fields_from_json(cls, jobj):
"""Deserialize fields from JSON."""
cls._check_required(jobj)
fields = {}
for slot, field in six.iteritems(cls._fields):
if field.json_name not in jobj and field.omitempty:
fields[slot] = field.default
else:
value = jobj[field.json_name]
try:
fields[slot] = field.decode(value)
except errors.DeserializationError as error:
raise errors.DeserializationError(
'Could not decode {0!r} ({1!r}): {2}'.format(
slot, value, error))
return fields
@classmethod
def from_json(cls, jobj):
return cls(**cls.fields_from_json(jobj))
def encode_b64jose(data):
"""Encode JOSE Base-64 field.
:param bytes data:
:rtype: `unicode`
"""
# b64encode produces ASCII characters only
return b64.b64encode(data).decode('ascii')
def decode_b64jose(data, size=None, minimum=False):
"""Decode JOSE Base-64 field.
:param unicode data:
:param int size: Required length (after decoding).
:param bool minimum: If ``True``, then `size` will be treated as
minimum required length, as opposed to exact equality.
:rtype: bytes
"""
error_cls = TypeError if six.PY2 else binascii.Error
try:
decoded = b64.b64decode(data.encode())
except error_cls as error:
raise errors.DeserializationError(error)
if size is not None and ((not minimum and len(decoded) != size) or
(minimum and len(decoded) < size)):
raise errors.DeserializationError(
"Expected at least or exactly {0} bytes".format(size))
return decoded
def encode_hex16(value):
"""Hexlify.
:param bytes value:
:rtype: unicode
"""
return binascii.hexlify(value).decode()
def decode_hex16(value, size=None, minimum=False):
"""Decode hexlified field.
:param unicode value:
:param int size: Required length (after decoding).
:param bool minimum: If ``True``, then `size` will be treated as
minimum required length, as opposed to exact equality.
:rtype: bytes
"""
value = value.encode()
if size is not None and ((not minimum and len(value) != size * 2) or
(minimum and len(value) < size * 2)):
raise errors.DeserializationError()
error_cls = TypeError if six.PY2 else binascii.Error
try:
return binascii.unhexlify(value)
except error_cls as error:
raise errors.DeserializationError(error)
def encode_cert(cert):
"""Encode certificate as JOSE Base-64 DER.
:type cert: `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
:rtype: unicode
"""
return encode_b64jose(OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_ASN1, cert.wrapped))
def decode_cert(b64der):
"""Decode JOSE Base-64 DER-encoded certificate.
:param unicode b64der:
:rtype: `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
"""
try:
return util.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, decode_b64jose(b64der)))
except OpenSSL.crypto.Error as error:
raise errors.DeserializationError(error)
def encode_csr(csr):
"""Encode CSR as JOSE Base-64 DER.
:type csr: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
:rtype: unicode
"""
return encode_b64jose(OpenSSL.crypto.dump_certificate_request(
OpenSSL.crypto.FILETYPE_ASN1, csr.wrapped))
def decode_csr(b64der):
"""Decode JOSE Base-64 DER-encoded CSR.
:param unicode b64der:
:rtype: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
"""
try:
return util.ComparableX509(OpenSSL.crypto.load_certificate_request(
OpenSSL.crypto.FILETYPE_ASN1, decode_b64jose(b64der)))
except OpenSSL.crypto.Error as error:
raise errors.DeserializationError(error)
class TypedJSONObjectWithFields(JSONObjectWithFields):
"""JSON object with type."""
typ = NotImplemented
"""Type of the object. Subclasses must override."""
type_field_name = "type"
"""Field name used to distinguish different object types.
Subclasses will probably have to override this.
"""
TYPES = NotImplemented
"""Types registered for JSON deserialization"""
@classmethod
def register(cls, type_cls, typ=None):
"""Register class for JSON deserialization."""
typ = type_cls.typ if typ is None else typ
cls.TYPES[typ] = type_cls
return type_cls
@classmethod
def get_type_cls(cls, jobj):
"""Get the registered class for ``jobj``."""
if cls in six.itervalues(cls.TYPES):
if cls.type_field_name not in jobj:
raise errors.DeserializationError(
"Missing type field ({0})".format(cls.type_field_name))
# cls is already registered type_cls, force to use it
# so that, e.g Revocation.from_json(jobj) fails if
# jobj["type"] != "revocation".
return cls
if not isinstance(jobj, dict):
raise errors.DeserializationError(
"{0} is not a dictionary object".format(jobj))
try:
typ = jobj[cls.type_field_name]
except KeyError:
raise errors.DeserializationError("missing type field")
try:
return cls.TYPES[typ]
except KeyError:
raise errors.UnrecognizedTypeError(typ, jobj)
def to_partial_json(self):
"""Get JSON serializable object.
:returns: Serializable JSON object representing ACME typed object.
:meth:`validate` will almost certainly not work, due to reasons
explained in :class:`josepy.interfaces.IJSONSerializable`.
:rtype: dict
"""
jobj = self.fields_to_partial_json()
jobj[self.type_field_name] = self.typ
return jobj
@classmethod
def from_json(cls, jobj):
"""Deserialize ACME object from valid JSON object.
:raises josepy.errors.UnrecognizedTypeError: if type
of the ACME object has not been registered.
"""
# make sure subclasses don't cause infinite recursive from_json calls
type_cls = cls.get_type_cls(jobj)
return type_cls(**type_cls.fields_from_json(jobj))
| 2.890625 | 3 |
norm/executable/schema/variable.py | reasoned-ai/norm | 8 | 13361 | <reponame>reasoned-ai/norm
from norm.models.norm import Status, Lambda
from norm.executable import NormExecutable
from typing import Union, List
import logging
logger = logging.getLogger(__name__)
class VariableName(NormExecutable):
def __init__(self, scope, name):
"""
The variable and its scope
:param scope: the scope of the variable
:type scope: Union[VariableName, EvaluationExpr]
:param name: the name of the variable
:type name: str
"""
super().__init__()
from norm.executable.expression.evaluation import EvaluationExpr
self.scope: Union[VariableName, EvaluationExpr] = scope
self.name: str = name
self.output_projection: str = None
@property
def eval_lam(self):
return self.lam
def __str__(self):
if self.scope is not None:
return '{}{}{}'.format(self.scope.name, self.VARIABLE_SEPARATOR, self.name)
else:
return self.name
def variable_type(self):
return self.lam
def compile(self, context):
if self.name == context.THAT_VARIABLE_NAME:
self.lam = context.that
return self
session = context.session
if self.scope is None:
name = self.name
scope = context.get_scope(name)
if scope is not None:
return ColumnVariable(scope, name).compile(context)
else:
lam = self.try_retrieve_type(session, context.context_namespace, name)
if lam is None:
lam = self.try_retrieve_type(session, context.search_namespaces, name, status=Status.READY)
self.lam = lam
return self
else:
if isinstance(self.scope, ColumnVariable) and str(self) in self.scope.lam:
# Already joined
self.scope.name = str(self)
return self.scope
lam = self.scope.variable_type()
if self.name in lam:
if isinstance(self.scope, (ColumnVariable, JoinVariable)):
# Join lam to the scope for the column
return JoinVariable(self.scope, self.name, lam).compile(context)
else:
# A column of the scope variable
return ColumnVariable(self.scope, self.name).compile(context)
else:
# An evaluation whose first argument is the scope
lam = self.try_retrieve_type(session, context.context_namespace, self.name)
if lam is None:
lam = self.try_retrieve_type(session, context.search_namespaces, self.name, status=Status.READY)
assert(lam is not None)
self.lam = lam
from norm.executable.expression.argument import ArgumentExpr
arg = ArgumentExpr(expr=self.scope)
self.scope = None
from norm.executable.expression.evaluation import EvaluationExpr
return EvaluationExpr([arg], self)
class UnquoteVariable(VariableName):
def __init__(self, name, unquoted_variables):
"""
The variable and its scope
:param name: the name of the variable
:type name: str
:param unquoted_variables: a list of variables to unquote
:type unquoted_variables: List[VariableName]
"""
super().__init__(None, name)
self.unquoted_variables: List[VariableName] = unquoted_variables
def __str__(self):
return self.name
def variable_type(self):
raise NotImplementedError
def compile(self, context):
assert(len(self.unquoted_variables) > 0)
assert(all([isinstance(v, ColumnVariable) for v in self.unquoted_variables]))
lam = self.unquoted_variables[0].lam
assert(all([v.lam is lam for v in self.unquoted_variables]))
self.lam = lam
return self
def execute(self, context):
# TODO: hacky
assert(len(self.unquoted_variables) == 1)
vname = self.unquoted_variables[0].name
data = self.lam.data
from pandas import DataFrame
def _execute(x):
try:
result = context.execute(x[vname].values[0])
if isinstance(result, DataFrame):
return result
else:
return None
except:
return None
results = data.groupby(vname).apply(_execute).reset_index()
if self.output_projection is not None:
cols = dict((col, self.VARIABLE_SEPARATOR.join([self.output_projection, col]))
for col in results.columns if col != vname and col != self.lam.VAR_OID)
if self.lam.VAR_OID in results.columns:
cols[self.lam.VAR_OID] = self.output_projection
results = results.rename(columns=cols)
return data.merge(results, on=vname)
class ColumnVariable(VariableName):
def __init__(self, scope, name):
super().__init__(scope, name)
def __str__(self):
return self.name
def variable_type(self):
return self.lam.get_type(self.name)
def compile(self, context):
from norm.engine import QuantifiedLambda
if self.scope is None:
assert(context.scope is not None)
self.lam = context.scope
elif isinstance(self.scope, Lambda):
self.lam = self.scope
elif isinstance(self.scope, QuantifiedLambda):
self.lam = self.scope
else:
self.lam = self.scope.lam
return self
def execute(self, context):
result = self.lam.data[self.name]
if self.output_projection is not None:
return result.rename(self.output_projection)
else:
return result
class JoinVariable(VariableName):
def __init__(self, scope, name, joiner):
super().__init__(scope, name)
self.lam = joiner
def variable_type(self):
return self.lam.get_type(self.name)
def compile(self, context):
return self
def execute(self, context):
lam = self.scope.lam
joiner = self.lam
if str(self) not in lam.data.columns:
to_join = joiner.data[[self.name]].rename(columns={self.name: str(self)})
lam.data = lam.data.join(to_join, on=str(self.scope))
return lam.data
| 2.453125 | 2 |
src/parserindexer/tika_parser.py | yyzhuang1991/parser-indexer-py | 8 | 13362 | <reponame>yyzhuang1991/parser-indexer-py
from __future__ import print_function
import os
import sys
import json
import tika
from tqdm import tqdm
from utils import LogUtil
from parser import Parser
from ioutils import read_lines
from tika import parser as tk_parser
class TikaParser(Parser):
""" The TikaParser class is wrapper of the Apache TIKA parse """
def __init__(self, tika_server_url):
super(TikaParser, self).__init__('tika_parser')
if tika_server_url:
os.environ['TIKA_CLIENT_ONLY'] = 'True'
os.environ['TIKA_SERVER_ENDPOINT'] = tika_server_url
print("Tika Server Endpoint %s" %
os.environ['TIKA_SERVER_ENDPOINT'])
tika.initVM()
def parse(self, file_path):
""" Parse one PDF file using Apache TIKA parser
Args:
file_path (str): Path to a PDF file
Return:
parsed content stored in a dictionary
"""
if not os.path.exists(file_path):
raise RuntimeError('%s error. File not found: %s' %
(self.parse_name, os.path.abspath(file_path)))
try:
tika_dict = tk_parser.from_file(file_path)
except Exception:
raise RuntimeError('Internal TIKA error occurred while parsing the '
'file: %s' % os.path.abspath(file_path))
tika_dict['file'] = os.path.abspath(file_path)
return tika_dict
def process(in_file, in_list, out_file, log_file, tika_server_url):
# Log input parameters
logger = LogUtil('lpsc-parser', log_file)
logger.info('Input parameters')
logger.info('in_file: %s' % in_file)
logger.info('in_list: %s' % in_list)
logger.info('out_file: %s' % out_file)
logger.info('tika_server_url: %s' % tika_server_url)
if in_file and in_list:
print('[ERROR] in_file and in_list cannot be provided simultaneously')
sys.exit(1)
tika_parser = TikaParser(tika_server_url)
if in_file:
files = [in_file]
else:
files = read_lines(in_list)
out_f = open(out_file, 'wb', 1)
for f in tqdm(files):
try:
tika_dict = tika_parser.parse(f)
out_f.write(json.dumps(tika_dict))
out_f.write('\n')
except Exception as e:
logger.info('TIKA parser failed: %s' % os.path.abspath(f))
logger.error(e)
out_f.close()
def main():
import argparse
parser = argparse.ArgumentParser()
input_parser = parser.add_mutually_exclusive_group(required=True)
input_parser.add_argument('-i', '--in_file', help='Path to input file')
input_parser.add_argument('-li', '--in_list', help='Path to input list')
parser.add_argument('-o', '--out_file', required=True,
help='Path to output JSON file')
parser.add_argument('-l', '--log_file', default='./tika-parser-log.txt',
help='Log file that contains processing information. '
'It is default to ./tika-parser-log.txt unless '
'otherwise specified.')
parser.add_argument('-p', '--tika_server_url', required=False,
help='Tika server URL')
args = parser.parse_args()
process(**vars(args))
if __name__ == '__main__':
main()
| 2.40625 | 2 |
checkov/cloudformation/checks/resource/aws/DocDBAuditLogs.py | niradler/checkov | 0 | 13363 | <gh_stars>0
from checkov.cloudformation.checks.resource.base_resource_check import BaseResourceCheck
from checkov.common.parsers.node import DictNode
from checkov.common.models.enums import CheckResult, CheckCategories
class DocDBAuditLogs(BaseResourceCheck):
def __init__(self) -> None:
name = "Ensure DocDB has audit logs enabled"
id = "CKV_AWS_104"
supported_resources = ["AWS::DocDB::DBClusterParameterGroup"]
categories = [CheckCategories.LOGGING]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def scan_resource_conf(self, conf: DictNode) -> CheckResult:
params = conf.get("Properties", {}).get("Parameters", {})
if params.get("audit_logs") == "enabled":
return CheckResult.PASSED
return CheckResult.FAILED
check = DocDBAuditLogs()
| 2.125 | 2 |
py/server/deephaven/server/plugin/__init__.py | lbooker42/deephaven-core | 0 | 13364 | #
# Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending
#
import jpy
_JCallbackAdapter = jpy.get_type('io.deephaven.server.plugin.python.CallbackAdapter')
def initialize_all_and_register_into(callback: _JCallbackAdapter):
try:
from . import register
except ModuleNotFoundError as e:
# deephaven.plugin is an optional dependency, so if it can't be found, there are no Deephaven python plugins
# to register
if e.name == 'deephaven.plugin':
return
raise e
register.initialize_all_and_register_into(callback)
| 2.03125 | 2 |
mypy/test/testoutput.py | TimSimpsonR/mypy | 1 | 13365 | """Tests for parse tree pretty printing that preserves formatting
Test case descriptions are in file test/data/output.test.
"""
import os.path
import re
from typing import Undefined, Any
from mypy import build
from mypy.myunit import Suite, run_test
from mypy.test.helpers import assert_string_arrays_equal
from mypy.test.data import parse_test_cases
from mypy.test.config import test_data_prefix, test_temp_dir
from mypy.parse import parse
from mypy.output import OutputVisitor
from mypy.errors import CompileError
# Files which contain test case descriptions.
output_files = ['output.test']
class OutputSuite(Suite):
def cases(self):
c = []
for f in output_files:
c += parse_test_cases(os.path.join(test_data_prefix, f),
test_output, test_temp_dir, True)
return c
def test_output(testcase):
"""Perform an identity source code transformation test case."""
expected = testcase.output
if expected == []:
expected = testcase.input
try:
src = '\n'.join(testcase.input)
# Parse and semantically analyze the source program.
# Test case names with a special suffix get semantically analyzed. This
# lets us test that semantic analysis does not break source code pretty
# printing.
if testcase.name.endswith('_SemanticAnalyzer'):
result = build.build('main',
target=build.SEMANTIC_ANALYSIS,
program_text=src,
flags=[build.TEST_BUILTINS],
alt_lib_path=test_temp_dir)
files = result.files
else:
files = {'main': parse(src, 'main')}
a = []
first = True
# Produce an output containing the pretty-printed forms (with original
# formatting) of all the relevant source files.
for fnam in sorted(files.keys()):
f = files[fnam]
# Omit the builtins and files marked for omission.
if (not f.path.endswith(os.sep + 'builtins.py') and
'-skip.' not in f.path):
# Add file name + colon for files other than the first.
if not first:
a.append('{}:'.format(fix_path(remove_prefix(
f.path, test_temp_dir))))
v = OutputVisitor()
f.accept(v)
s = v.output()
if s != '':
a += s.split('\n')
first = False
except CompileError as e:
a = e.messages
assert_string_arrays_equal(
expected, a, 'Invalid source code output ({}, line {})'.format(
testcase.file, testcase.line))
def remove_prefix(path, prefix):
regexp = '^' + prefix.replace('\\', '\\\\')
np = re.sub(regexp, '', path)
if np.startswith(os.sep):
np = np[1:]
return np
def fix_path(path):
return path.replace('\\', '/')
if __name__ == '__main__':
import sys
run_test(OutputSuite(), sys.argv[1:])
| 2.734375 | 3 |
tests/test_SklearnDecisionTreeConverters.py | c-bata/sklearn-onnx | 1 | 13366 | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import unittest
from sklearn.tree import DecisionTreeClassifier
from sklearn.tree import DecisionTreeRegressor
from skl2onnx.common.data_types import onnx_built_with_ml
from test_utils import (
dump_one_class_classification,
dump_binary_classification,
dump_multiple_classification,
)
from test_utils import dump_multiple_regression, dump_single_regression
class TestSklearnDecisionTreeModels(unittest.TestCase):
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_decision_tree_classifier(self):
model = DecisionTreeClassifier()
dump_one_class_classification(
model,
# Operator cast-1 is not implemented in onnxruntime
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.3') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
dump_binary_classification(
model,
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.3') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
dump_multiple_classification(
model,
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.3') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
def test_decision_tree_regressor(self):
model = DecisionTreeRegressor()
dump_single_regression(
model,
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2')",
)
dump_multiple_regression(
model,
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2')",
)
if __name__ == "__main__":
unittest.main()
| 2.25 | 2 |
tests/test_inference.py | MihailSalnikov/microscopeimagequality | 77 | 13367 | <gh_stars>10-100
import logging
import os
import tempfile
import PIL.Image
import numpy
import tensorflow
import microscopeimagequality.constants
import microscopeimagequality.data_provider
import microscopeimagequality.evaluation
import microscopeimagequality.prediction
class Inference(tensorflow.test.TestCase):
def setUp(self):
self.input_directory = os.path.join(os.path.dirname(os.path.abspath(__file__))
, "data")
self.test_data_directory = os.path.join(os.path.dirname(os.path.abspath(__file__))
, "data")
self.test_dir = tempfile.mkdtemp()
self.glob_images = os.path.join(self.input_directory, 'images_for_glob_test/*')
self.patch_width = 84
self.num_classes = 11
def testPatchValuesToMask(self):
values = numpy.round(
numpy.array([[0.2, 0.4, 0.5], [1.0, 0.0, 0.3]]) *
numpy.iinfo(numpy.uint16).max).astype(numpy.uint16)
mask = microscopeimagequality.prediction.patch_values_to_mask(values, self.patch_width)
self.assertEquals((168, 252), mask.shape)
self.assertEquals(numpy.iinfo(numpy.uint16).max, numpy.max(mask))
def testSaveMasksAndAnnotatedVisualization(self):
test_filename = 'BBBC006_z_aligned__a01__s1__w1_10.png'
orig_name = os.path.join(self.test_data_directory, test_filename)
prediction = 1
certainties = {name: 0.3 for name in microscopeimagequality.evaluation.CERTAINTY_NAMES}
num_patches = 4
np_images = numpy.ones((num_patches, self.patch_width, self.patch_width, 1))
np_probabilities = numpy.ones(
(num_patches, self.num_classes)) / self.num_classes
np_probabilities[0, :] = 0
np_probabilities[0, 1] = 1.0
np_probabilities[1, :] = 0
np_probabilities[1, 2] = 0.4
np_probabilities[1, -1] = 0.6
np_labels = 2 * numpy.ones(num_patches)
image_height = int(numpy.sqrt(num_patches)) * self.patch_width
image_width = image_height
microscopeimagequality.prediction.save_masks_and_annotated_visualization(
orig_name, self.test_dir, prediction, certainties, np_images,
np_probabilities, np_labels, self.patch_width, image_height,
image_width)
# Check that output has been generated and is the correct shape.
expected_size = PIL.Image.open(orig_name, 'r').size
expected_visualization_path = os.path.join(
self.test_dir,
'actual2_pred1_mean_certainty=0.300orig_name=%s' % test_filename)
expected_predictions_path = os.path.join(self.test_dir,
microscopeimagequality.constants.PREDICTIONS_MASK_FORMAT %
test_filename)
expected_certainties_path = os.path.join(self.test_dir,
microscopeimagequality.constants.CERTAINTY_MASK_FORMAT %
test_filename)
expected_valid_path = os.path.join(self.test_dir,
microscopeimagequality.constants.VALID_MASK_FORMAT %
test_filename)
img = PIL.Image.open(expected_visualization_path, 'r')
self.assertEquals(expected_size, img.size)
img = PIL.Image.open(expected_predictions_path, 'r')
self.assertEquals(expected_size, img.size)
img = PIL.Image.open(expected_certainties_path, 'r')
self.assertEquals(expected_size, img.size)
img = PIL.Image.open(expected_valid_path, 'r')
self.assertEquals(expected_size, img.size)
def testSaveMasksAndAnnotatedVisualizationTif(self):
test_filename = ('00_mcf-z-stacks-03212011_k06_s2_w12667264a'
'-6432-4f7e-bf58-625a1319a1c9.tif')
orig_name = os.path.join(self.test_data_directory, test_filename)
prediction = 1
certainties = {name: 0.3 for name in microscopeimagequality.evaluation.CERTAINTY_NAMES}
num_patches = 4
np_images = numpy.ones((num_patches, self.patch_width, self.patch_width, 1))
np_probabilities = numpy.ones(
(num_patches, self.num_classes)) / self.num_classes
image_height = int(numpy.sqrt(num_patches)) * self.patch_width
image_width = image_height
np_labels = 2 * numpy.ones(num_patches)
microscopeimagequality.prediction.save_masks_and_annotated_visualization(
orig_name, self.test_dir, prediction, certainties, np_images,
np_probabilities, np_labels, self.patch_width, image_height,
image_width)
mask_formats = [
microscopeimagequality.constants.CERTAINTY_MASK_FORMAT, microscopeimagequality.constants.PREDICTIONS_MASK_FORMAT,
microscopeimagequality.constants.VALID_MASK_FORMAT
]
for mask_format in mask_formats:
orig_name_png = os.path.splitext(os.path.basename(orig_name))[0] + '.png'
expected_file = os.path.join(self.test_dir,
mask_format % orig_name_png)
self.assertTrue(os.path.isfile(expected_file))
def testRunModelInferenceFirstHalfRuns(self):
batch_size = 1
num_classes = 11
model_patch_width = 84
image_width = 84
image_height = 84
tfexamples_tfrecord = microscopeimagequality.prediction.build_tfrecord_from_pngs(
[self.glob_images],
use_unlabeled_data=True,
num_classes=num_classes,
eval_directory=self.test_dir,
image_background_value=0,
image_brightness_scale=1,
shard_num=0,
num_shards=1,
image_width=image_width,
image_height=image_height)
num_samples = microscopeimagequality.data_provider.get_num_records(tfexamples_tfrecord %
microscopeimagequality.prediction._SPLIT_NAME)
logging.info('TFRecord has %g samples.', num_samples)
g = tensorflow.Graph()
with g.as_default():
images, one_hot_labels, _, _ = microscopeimagequality.data_provider.provide_data(
tfexamples_tfrecord,
split_name=microscopeimagequality.prediction._SPLIT_NAME,
batch_size=batch_size,
num_classes=num_classes,
image_width=84,
image_height=84,
patch_width=model_patch_width,
randomize=False,
num_threads=1)
labels = microscopeimagequality.evaluation.get_model_and_metrics(
images,
num_classes=num_classes,
one_hot_labels=one_hot_labels,
is_training=False).labels
self.assertEquals(batch_size, labels.get_shape())
| 2.0625 | 2 |
config.py | hiankun/qb_test | 4 | 13368 | <filename>config.py<gh_stars>1-10
"""
QuickBot wiring config.
Specifies which pins are used for motor control, IR sensors and wheel encoders.
"""
# Motor pins: (dir1_pin, dir2_pin, pwd_pin)
RIGHT_MOTOR_PINS = 'P8_12', 'P8_10', 'P9_14'
LEFT_MOTOR_PINS = 'P8_14', 'P8_16', 'P9_16'
# IR sensors (clock-wise, starting with the rear left sensor):
# rear-left, front-left, front, front-right, rear-right
IR_PINS = ('P9_38', 'P9_40', 'P9_36', 'P9_35', 'P9_33')
# Wheel encoder sensors: (left, right)
ENC_PINS = ('P9_39', 'P9_37')
| 2.09375 | 2 |
tools/tests/skimage_self_test.py | yinquan529/platform-external-skia | 1 | 13369 | <reponame>yinquan529/platform-external-skia<gh_stars>1-10
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Self-test for skimage.
import filecmp
import os
import subprocess
import sys
import tempfile
class BinaryNotFoundException(Exception):
def __str__ (self):
return ("Could not find binary!\n"
"Did you forget to build the tools project?\n"
"Self tests failed")
# Find a path to the binary to use. Iterates through a list of possible
# locations the binary may be.
def PickBinaryPath(base_dir):
POSSIBLE_BINARY_PATHS = [
'out/Debug/skimage',
'out/Release/skimage',
'xcodebuild/Debug/skimage',
'xcodebuild/Release/skimage',
]
for binary in POSSIBLE_BINARY_PATHS:
binary_full_path = os.path.join(base_dir, binary)
if (os.path.exists(binary_full_path)):
return binary_full_path
raise BinaryNotFoundException
# Quit early if two files have different content.
def DieIfFilesMismatch(expected, actual):
if not filecmp.cmp(expected, actual):
print 'Error: file mismatch! expected=%s , actual=%s' % (
expected, actual)
exit(1)
def test_invalid_file(file_dir, skimage_binary):
""" Test the return value of skimage when an invalid file is decoded.
If there is no expectation file, or the file expects a particular
result, skimage should return nonzero indicating failure.
If the file has no expectation, or ignore-failure is set to true,
skimage should return zero indicating success. """
invalid_file = os.path.join(file_dir, "skimage", "input", "bad-images",
"invalid.png")
# No expectations file:
args = [skimage_binary, "--readPath", invalid_file]
result = subprocess.call(args)
if 0 == result:
print "'%s' should have reported failure!" % " ".join(args)
exit(1)
# Directory holding all expectations files
expectations_dir = os.path.join(file_dir, "skimage", "input", "bad-images")
# Expectations file expecting a valid decode:
incorrect_expectations = os.path.join(expectations_dir,
"incorrect-results.json")
args = [skimage_binary, "--readPath", invalid_file,
"--readExpectationsPath", incorrect_expectations]
result = subprocess.call(args)
if 0 == result:
print "'%s' should have reported failure!" % " ".join(args)
exit(1)
# Empty expectations:
empty_expectations = os.path.join(expectations_dir, "empty-results.json")
output = subprocess.check_output([skimage_binary, "--readPath", invalid_file,
"--readExpectationsPath",
empty_expectations],
stderr=subprocess.STDOUT)
if not "Missing" in output:
# Another test (in main()) tests to ensure that "Missing" does not appear
# in the output. That test could be passed if the output changed so
# "Missing" never appears. This ensures that an error is not missed if
# that happens.
print "skimage output changed! This may cause other self tests to fail!"
exit(1)
# Ignore failure:
ignore_expectations = os.path.join(expectations_dir, "ignore-results.json")
output = subprocess.check_output([skimage_binary, "--readPath", invalid_file,
"--readExpectationsPath",
ignore_expectations],
stderr=subprocess.STDOUT)
if not "failures" in output:
# Another test (in main()) tests to ensure that "failures" does not
# appear in the output. That test could be passed if the output changed
# so "failures" never appears. This ensures that an error is not missed
# if that happens.
print "skimage output changed! This may cause other self tests to fail!"
exit(1)
def test_incorrect_expectations(file_dir, skimage_binary):
""" Test that comparing to incorrect expectations fails, unless
ignore-failures is set to true. """
valid_file = os.path.join(file_dir, "skimage", "input",
"images-with-known-hashes",
"1209453360120438698.png")
expectations_dir = os.path.join(file_dir, "skimage", "input",
"images-with-known-hashes")
incorrect_results = os.path.join(expectations_dir,
"incorrect-results.json")
args = [skimage_binary, "--readPath", valid_file, "--readExpectationsPath",
incorrect_results]
result = subprocess.call(args)
if 0 == result:
print "'%s' should have reported failure!" % " ".join(args)
exit(1)
ignore_results = os.path.join(expectations_dir, "ignore-failures.json")
subprocess.check_call([skimage_binary, "--readPath", valid_file,
"--readExpectationsPath", ignore_results])
def main():
# Use the directory of this file as the out directory
file_dir = os.path.abspath(os.path.dirname(__file__))
trunk_dir = os.path.normpath(os.path.join(file_dir, os.pardir, os.pardir))
# Find the binary
skimage_binary = PickBinaryPath(trunk_dir)
print "Running " + skimage_binary
# Generate an expectations file from known images.
images_dir = os.path.join(file_dir, "skimage", "input",
"images-with-known-hashes")
expectations_path = os.path.join(file_dir, "skimage", "output-actual",
"create-expectations", "expectations.json")
subprocess.check_call([skimage_binary, "--readPath", images_dir,
"--createExpectationsPath", expectations_path])
# Make sure the expectations file was generated correctly.
golden_expectations = os.path.join(file_dir, "skimage", "output-expected",
"create-expectations",
"expectations.json")
DieIfFilesMismatch(expected=golden_expectations, actual=expectations_path)
# Tell skimage to read back the expectations file it just wrote, and
# confirm that the images in images_dir match it.
output = subprocess.check_output([skimage_binary, "--readPath", images_dir,
"--readExpectationsPath",
expectations_path],
stderr=subprocess.STDOUT)
# Although skimage succeeded, it would have reported success if the file
# was missing from the expectations file. Consider this a failure, since
# the expectations file was created from this same image. (It will print
# "Missing" in this case before listing the missing expectations).
if "Missing" in output:
print "Expectations file was missing expectations!"
print output
exit(1)
# Again, skimage would succeed if there were known failures (and print
# "failures"), but there should be no failures, since the file just
# created did not include failures to ignore.
if "failures" in output:
print "Image failed!"
print output
exit(1)
test_incorrect_expectations(file_dir=file_dir,
skimage_binary=skimage_binary)
# Generate an expectations file from an empty directory.
empty_dir = tempfile.mkdtemp()
expectations_path = os.path.join(file_dir, "skimage", "output-actual",
"empty-dir", "expectations.json")
subprocess.check_call([skimage_binary, "--readPath", empty_dir,
"--createExpectationsPath", expectations_path])
golden_expectations = os.path.join(file_dir, "skimage", "output-expected",
"empty-dir", "expectations.json")
DieIfFilesMismatch(expected=golden_expectations, actual=expectations_path)
os.rmdir(empty_dir)
# Generate an expectations file from a nonexistent directory.
expectations_path = os.path.join(file_dir, "skimage", "output-actual",
"nonexistent-dir", "expectations.json")
subprocess.check_call([skimage_binary, "--readPath", "/nonexistent/dir",
"--createExpectationsPath", expectations_path])
golden_expectations = os.path.join(file_dir, "skimage", "output-expected",
"nonexistent-dir", "expectations.json")
DieIfFilesMismatch(expected=golden_expectations, actual=expectations_path)
test_invalid_file(file_dir=file_dir, skimage_binary=skimage_binary)
# Done with all tests.
print "Self tests succeeded!"
if __name__ == "__main__":
main()
| 2.328125 | 2 |
pymachine/condition.py | landrew31/pymachine | 1 | 13370 | <filename>pymachine/condition.py
from collections import namedtuple
Condition = namedtuple(
'Condition',
['current_state', 'input_character'],
)
| 2.140625 | 2 |
examples/simple_lakehouse/simple_lakehouse/assets.py | bitdotioinc/dagster | 2 | 13371 | """Asset definitions for the simple_lakehouse example."""
import pandas as pd
from lakehouse import Column, computed_table, source_table
from pyarrow import date32, float64, string
sfo_q2_weather_sample_table = source_table(
path="data", columns=[Column("tmpf", float64()), Column("valid_date", string())],
)
@computed_table(
input_assets=[sfo_q2_weather_sample_table],
columns=[Column("valid_date", date32()), Column("max_tmpf", float64())],
)
def daily_temperature_highs_table(sfo_q2_weather_sample: pd.DataFrame) -> pd.DataFrame:
"""Computes the temperature high for each day"""
sfo_q2_weather_sample["valid_date"] = pd.to_datetime(sfo_q2_weather_sample["valid"])
return sfo_q2_weather_sample.groupby("valid_date").max().rename(columns={"tmpf": "max_tmpf"})
| 2.765625 | 3 |
tests/testresources/pytest_resource_path_ini.py | yukihiko-shinoda/pytest-resource-path | 5 | 13372 | <filename>tests/testresources/pytest_resource_path_ini.py
"""Implements test for pytest-resource-path Fixtures with pytest.ini."""
from pathlib import Path
import pytest
def test_resource_path_ini(resource_path, request):
"""Fixture resource_path should be following absolute path."""
assert resource_path == Path(str(request.fspath)).parents[1] / Path(
"data/test_package/test_module_something/test_resource_path_ini"
)
def test_resource_path_root_ini(resource_path_root, request):
"""Fixture resource_path_root should be following absolute path."""
assert resource_path_root == Path(str(request.fspath)).parents[1] / Path("data")
@pytest.fixture(scope="package")
def resource_path_root_scope_package_ini(resource_path_root):
yield resource_path_root
# Reason: To define fixture in same module. pylint: disable=redefined-outer-name
def test_resource_path_root_scope_package_ini(resource_path_root_scope_package_ini, request):
assert resource_path_root_scope_package_ini == Path(str(request.fspath)).parents[1] / Path("data")
| 2.3125 | 2 |
17-files/read-file-with-try-block.py | johnehunt/Python3Intro | 1 | 13373 | # Illustrates combining exception / error handling
# with file access
print('Start')
try:
with open('myfile2.txt', 'r') as f:
lines = f.readlines()
for line in lines:
print(line, end='')
except FileNotFoundError as err:
print('oops')
print(err)
print('Done')
| 3.578125 | 4 |
Miscellaneous/test_script_pymc3/multinominal.py | junpenglao/Planet_Sakaar_Data_Science | 51 | 13374 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 11 13:30:53 2017
@author: laoj
"""
import numpy as np
import pymc3 as pm
import theano.tensor as tt
from pymc3.distributions.distribution import Discrete, draw_values, generate_samples, infer_shape
from pymc3.distributions.dist_math import bound, logpow, factln, Cholesky
from pymc3.math import tround
#%% n scaler, p 1D
#n = 183
n = np.array([[106],
[143],
[102],
[116],
[183],
[150]])
p = np.array([[ 0.21245365, 0.41223126, 0.37531509],
[ 0.13221011, 0.50537169, 0.3624182 ],
[ 0.08813779, 0.54447146, 0.36739075],
[ 0.18932804, 0.4630365, 0.34763546],
[ 0.11006472, 0.49227755, 0.39765773],
[ 0.17886852, 0.41098834, 0.41014314]])
# p = np.array([ 0.21245365, 0.41223126, 0.37531509])
n = tt.as_tensor_variable(n)
p = tt.as_tensor_variable(p)
n = np.squeeze(n)
n = tt.shape_padright(n) if n.ndim == 1 else tt.as_tensor_variable(n)
n.ndim
n * p
#%%
n = np.array([[106],
[143],
[102],
[116],
[183],
[150]])
#n = 183
p = np.array([[ 0.21245365, 0.41223126, 0.37531509],
[ 0.13221011, 0.50537169, 0.3624182 ],
[ 0.08813779, 0.54447146, 0.36739075],
[ 0.18932804, 0.4630365, 0.34763546],
[ 0.11006472, 0.49227755, 0.39765773],
[ 0.17886852, 0.41098834, 0.41014314]])
#p = np.array([[ 0.21245365, 0.41223126, 0.37531509]])
#n = tt.as_tensor_variable(n)
p = tt.as_tensor_variable(p)
#%%
class Multinomial(Discrete):
def __init__(self, n, p, *args, **kwargs):
super(Multinomial, self).__init__(*args, **kwargs)
p = p / tt.sum(p, axis=-1, keepdims=True)
n = np.squeeze(n) # works also if n is a tensor
if len(self.shape) > 1:
m = self.shape[-2]
try:
assert n.shape == (m,)
except (AttributeError, AssertionError):
n = n * tt.ones(m)
self.n = tt.shape_padright(n)
self.p = p if p.ndim > 1 else tt.shape_padleft(p)
elif n.ndim == 1:
self.n = tt.shape_padright(n)
self.p = p if p.ndim > 1 else tt.shape_padleft(p)
else:
# n is a scalar, p is a 1d array
self.n = tt.as_tensor_variable(n)
self.p = tt.as_tensor_variable(p)
self.mean = self.n * self.p
mode = tt.cast(tt.round(self.mean), 'int32')
diff = self.n - tt.sum(mode, axis=-1, keepdims=True)
inc_bool_arr = tt.abs_(diff) > 0
mode = tt.inc_subtensor(mode[inc_bool_arr.nonzero()],
diff[inc_bool_arr.nonzero()])
self.mode = mode
def _random(self, n, p, size=None):
original_dtype = p.dtype
# Set float type to float64 for numpy. This change is related to numpy issue #8317 (https://github.com/numpy/numpy/issues/8317)
p = p.astype('float64')
# Now, re-normalize all of the values in float64 precision. This is done inside the conditionals
if size == p.shape:
size = None
if (p.ndim == 1) and (n.ndim == 0):
p = p / p.sum()
randnum = np.random.multinomial(n, p.squeeze(), size=size)
else:
p = p / p.sum(axis=1, keepdims=True)
if n.shape[0] > p.shape[0]:
randnum = np.asarray([
np.random.multinomial(nn, p.squeeze(), size=size)
for nn in n
])
elif n.shape[0] < p.shape[0]:
randnum = np.asarray([
np.random.multinomial(n.squeeze(), pp, size=size)
for pp in p
])
else:
randnum = np.asarray([
np.random.multinomial(nn, pp, size=size)
for (nn, pp) in zip(n, p)
])
return randnum.astype(original_dtype)
def random(self, point=None, size=None):
n, p = draw_values([self.n, self.p], point=point)
samples = generate_samples(self._random, n, p,
dist_shape=self.shape,
size=size)
return samples
def logp(self, x):
n = self.n
p = self.p
return bound(
tt.sum(factln(n)) - tt.sum(factln(x)) + tt.sum(x * tt.log(p)),
tt.all(x >= 0),
tt.all(tt.eq(tt.sum(x, axis=-1, keepdims=True), n)),
tt.all(p <= 1),
tt.all(tt.eq(tt.sum(p, axis=-1), 1)),
tt.all(tt.ge(n, 0)),
broadcast_conditions=False
)
Multinomial.dist(1,np.ones(3)/3,shape=(6, 3)).mode.eval()
#%%
Multinomial.dist(n,p,shape=(6, 3)).p.eval()
#%%
Multinomial.dist(n,p,shape=(6, 3)).n.eval()
#%%
Multinomial.dist(n,p,shape=(6, 3)).mean.eval()
#%%
Multinomial.dist(n,p,shape=(6, 3)).random()
#%%
counts =np.asarray([[19, 50, 37],
[21, 67, 55],
[11, 53, 38],
[17, 54, 45],
[24, 93, 66],
[27, 53, 70]])
Multinomial.dist(n,p,shape=(6, 3)).logp(x=counts).eval()
#%%
with pm.Model() as model:
like = Multinomial('obs_ABC', n, p, observed=counts, shape=counts.shape)
#%%
paramall = (
[[.25, .25, .25, .25], 4, 2],
[[.25, .25, .25, .25], (1, 4), 3],
# 3: expect to fail
# [[.25, .25, .25, .25], (10, 4)],
[[.25, .25, .25, .25], (10, 1, 4), 5],
# 5: expect to fail
# [[[.25, .25, .25, .25]], (2, 4), [7, 11]],
[[[.25, .25, .25, .25],
[.25, .25, .25, .25]], (2, 4), 13],
[[[.25, .25, .25, .25],
[.25, .25, .25, .25]], (2, 4), [17, 19]],
[[[.25, .25, .25, .25],
[.25, .25, .25, .25]], (1, 2, 4), [23, 29]],
[[[.25, .25, .25, .25],
[.25, .25, .25, .25]], (10, 2, 4), [31, 37]],
)
for p, shape, n in paramall:
with pm.Model() as model:
m = Multinomial('m', n=n, p=np.asarray(p), shape=shape)
print(m.random().shape)
#%%
counts =np.asarray([[19, 50, 37],
[21, 67, 55],
[11, 53, 38],
[17, 54, 45],
[24, 93, 66],
[27, 53, 70]])
n = np.array([[106],
[143],
[102],
[116],
[183],
[150]])
sparsity=1 #not zero
beta=np.ones(counts.shape) #input for dirichlet
with pm.Model() as model:
theta=pm.Dirichlet('theta',beta/sparsity, shape = counts.shape)
transition=pm.Multinomial('transition',n,theta,observed=counts)
trace=pm.sample(1000)
#%%
import numpy as np
import pymc3 as pm
import theano.tensor as tt
def norm_simplex(p):
"""Sum-to-zero transformation."""
return (p.T / p.sum(axis=-1)).T
def ccmodel(beta, x):
"""Community composition model."""
return norm_simplex(tt.exp(tt.dot(x, tt.log(beta))))
class DirichletMultinomial(pm.Discrete):
"""Dirichlet Multinomial Model
"""
def __init__(self, alpha, *args, **kwargs):
super(DirichletMultinomial, self).__init__(*args, **kwargs)
self.alpha = alpha
def logp(self, x):
alpha = self.alpha
n = tt.sum(x, axis=-1)
sum_alpha = tt.sum(alpha, axis=-1)
const = (tt.gammaln(n + 1) + tt.gammaln(sum_alpha)) - tt.gammaln(n + sum_alpha)
series = tt.gammaln(x + alpha) - (tt.gammaln(x + 1) + tt.gammaln(alpha))
result = const + tt.sum(series, axis=-1)
return result
def as_col(x):
if isinstance(x, tt.TensorVariable):
return x.dimshuffle(0, 'x')
else:
return np.asarray(x).reshape(-1, 1)
def as_row(x):
if isinstance(x, tt.TensorVariable):
return x.dimshuffle('x', 0)
else:
return np.asarray(x).reshape(1, -1)
n, k, r = 25, 10, 2
x = np.random.randint(0, 1000, size=(n, k))
y = np.random.randint(0, 1000, size=n)
design = np.vstack((np.ones(25), np.random.randint(2, size=n))).T
with pm.Model() as model:
# Community composition
pi = pm.Dirichlet('pi', np.ones(k), shape=(r, k))
comp = pm.Deterministic('comp', ccmodel(pi, design))
# Inferred population density of observed taxa (hierarchical model)
rho = pm.Normal('rho', shape=r)
tau = pm.Lognormal('tau')
dens = pm.Lognormal('dens', tt.dot(design, rho), tau=tau, shape=n)
# Community composition *with* the spike
expected_recovery = as_col(1 / dens)
_comp = norm_simplex(tt.concatenate((comp, expected_recovery), axis=1))
# Variability
mu = pm.Lognormal('mu')
# Data
obs = DirichletMultinomial('obs', _comp * mu,
observed=tt.concatenate((x, as_col(y)), axis=1))
pm.sample(1000) | 1.679688 | 2 |
transformation_fnc.py | usrmaia/transformation-fnc | 0 | 13375 | from useful import *
from os import system
def remove_implication(formula):
while ">" in formula:
operator = formula.find(">")
print(formula, operator)
subform_left = get_subform_left(formula, operator)
subform_right = get_subform_right(formula, operator)
formula = get_remove_implication(formula, subform_left, subform_right, operator)
return formula
def get_remove_implication(formula, subform_left, subform_right, operator):
# ...(A>B)... |-> ...(-A#B)...
no_modification_right = formula[operator + len(subform_right) + 1:]
no_modification_left = formula[:operator - len(subform_left)]
return f"{no_modification_left}-{subform_left}#{subform_right}{no_modification_right}"
def morgan_law(formula):
while "-(" in formula:
index = formula.find("-(")
print(formula, index)
operator = get_operator(formula, index + 1)
subform_left = get_subform_left(formula, operator)
subform_right = get_subform_right(formula, operator)
formula = get_morgan_law(formula, subform_left, subform_right, operator)
return formula
def get_morgan_law(formula, subform_left, subform_right, operator):
# ...-(A&B)... |-> ...(-A#-B)...
# ...-(A#B)... |-> ...(-A&-B)...
match formula[operator]:
case "#":
new_operator = "&"
case "&":
new_operator = "#"
no_modification_right = formula[operator + len(subform_right) + 1:]
no_modification_left = formula[:operator - len(subform_left) - 1 - 1]
return f"{no_modification_left}(-{subform_left}{new_operator}-{subform_right}{no_modification_right}"
def remove_double_negation(formula):
# --A |-> A
formula = formula.replace("--", "")
return formula
def distributivity(formula):
index = 0
while index < len(formula):
# Existir "#(" ou ")#" é apenas a primeira condição para se aplicar a distributividade
# A segunda condição é existir "#(A&B)" ou "(A&B)#"
if "#(" in formula[index:index + 2]: # "#("
operator_and = get_operator(formula, index + 1)
if formula[operator_and] == "&": # "#(A&B)"
print(formula, index, operator_and)
formula, index = get_distributivity_lr(formula, index, operator_and)
if ")#" in formula[index:index + 2]: # "(#"
len_subform_left = len(get_subform_left(formula, index + 1))
operator_and = get_operator(formula, index + 1 - len_subform_left)
if formula[operator_and] == "&": # "(A&B)#"
print(formula, index + 1, operator_and)
formula, index = get_distributivity_rl(formula, index + 1, operator_and)
index += 1
return formula
def get_distributivity_lr(formula, operator_or, operator_and):
# ...(A#(B&C))... |-> ...((A#B)&(A#C))...
# Parenteses externo da fórmula
subform_left = get_subform_left(formula, operator_or)
no_modification_left = formula[:operator_or - len(subform_left)]
subform_right = get_subform_right(formula, operator_or)
no_modification_right = formula[operator_or + len(subform_right) + 1:]
# Parenteses interno da fórmula
subform_middle = get_subform_left(formula, operator_and)
subform_right = get_subform_right(formula, operator_and)
return f"{no_modification_left}({subform_left}#{subform_middle})&({subform_left}#{subform_right}){no_modification_right}", 0
def get_distributivity_rl(formula, operator_or, operator_and):
# ...((A&B)#C)... |-> ...((A#C)&(B#C))...
# Parenteses externo da fórmula
subform_left = get_subform_left(formula, operator_or)
no_modification_left = formula[:operator_or - len(subform_left)]
subform_right = get_subform_right(formula, operator_or)
no_modification_right = formula[operator_or + len(subform_right) + 1:]
# Parenteses interno da fórmula
subform_left = get_subform_left(formula, operator_and)
subform_middle = get_subform_right(formula, operator_and)
return f"{no_modification_left}({subform_left}#{subform_right})&({subform_middle}#{subform_right}){no_modification_right}", 0
def distributivity_new_aton(formula):
index = 0
while index < len(formula):
# Existir "#(" ou ")#" é apenas a primeira condição para se aplicar a distributividade
# A segunda condição é existir "#(A&B)" ou "(A&B)#"
if "#(" in formula[index:index + 2]: # "#("
operator_and = get_operator(formula, index + 1)
if formula[operator_and] == "&": # "#(A&B)"
print(formula, index, operator_and)
formula, index = get_distributivity_new_atom_lr(formula, index, operator_and)
if ")#" in formula[index:index + 2]: # "(#"
len_subform_left = len(get_subform_left(formula, index + 1))
operator_and = get_operator(formula, index + 1 - len_subform_left)
if formula[operator_and] == "&": # "(A&B)#"
print(formula, index + 1, operator_and)
formula, index = get_distributivity_new_atom_rl(formula, index + 1, operator_and)
index += 1
return formula
def get_distributivity_new_atom_lr(formula, operator_or, operator_and):
# ...(A#(B&C))... |-> ...(((A#p)&((¬p#B)&(¬p#C)))&((¬B#¬C)#p))...
# Parenteses externo da fórmula
subform_left = get_subform_left(formula, operator_or)
no_modification_left = formula[:operator_or - len(subform_left)]
subform_right = get_subform_right(formula, operator_or)
no_modification_right = formula[operator_or + len(subform_right) + 1:]
# Parenteses interno da fórmula
subform_middle = get_subform_left(formula, operator_and)
subform_right = get_subform_right(formula, operator_and)
new_operator = get_unprecedented(formula)
return f"{no_modification_left}(({subform_left}#{new_operator})&((¬{new_operator}#{subform_middle})&(¬{new_operator}#{subform_right})))&((¬{subform_middle}#¬{subform_right})#{new_operator}){no_modification_right}", 0
#return f"{no_modification_left}({subform_left}#{new_operator})&(¬{new_operator}#{subform_middle})&(¬{new_operator}#{subform_right})&(¬{subform_middle}#¬{subform_right}#{new_operator}){no_modification_right}", 0
def get_distributivity_new_atom_rl(formula, operator_or, operator_and):
# ...((A&B)#C)... |-> ...(((C#p)&((¬p#A)&(¬p#B)))&((¬A#¬B)#p))...
# Parenteses externo da fórmula
subform_left = get_subform_left(formula, operator_or)
no_modification_left = formula[:operator_or - len(subform_left)]
subform_right = get_subform_right(formula, operator_or)
no_modification_right = formula[operator_or + len(subform_right) + 1:]
# Parenteses interno da fórmula
subform_left = get_subform_left(formula, operator_and)
subform_middle = get_subform_right(formula, operator_and)
new_operator = get_unprecedented(formula)
return f"{no_modification_left}(({subform_right}#{new_operator})&((¬{new_operator}#{subform_left})&(¬{new_operator}#{subform_middle})))&((¬{subform_left}#¬{subform_middle})#{new_operator}){no_modification_right}", 0
#return f"{no_modification_left}({subform_right}#{new_operator})&(¬{new_operator}#{subform_left})&(¬{new_operator}#{subform_middle})&(¬{subform_left}#¬{subform_middle}#{new_operator}){no_modification_right}", 0
if __name__ == "__main__":
system("cls")
#system("clear")
while(True):
formula = input("Fórmula: ")
if formula == 'q': break
print(formula)
print("Removendo implicações: ")
A1 = remove_implication(formula)
print(A1)
print("Aplicando Lei de Morgan: ")
A2 = morgan_law(A1)
print(A2)
print("Removendo dupla negação: ")
A3 = remove_double_negation(A2)
print(A3)
print("Aplicando distributividade: ")
A4 = distributivity(A3)
print(A4)
print("Aplicando distributividade com novo átomo: ")
A5 = distributivity_new_aton(A3)
print(A5)
system("pause") | 3.65625 | 4 |
src/implant/commands/__init__.py | diefans/debellator | 2 | 13376 | # Copyright 2018 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Core features
"""
import asyncio
import concurrent
import logging
import os
import time
from implant import core
log = logging.getLogger(__name__)
class Echo(core.Command):
"""Demonstrate the basic command API."""
data = core.Parameter(default='ping', description='Meaningful data.')
async def local(self, context):
# custom protocol
# first: send
await context.channel.send_iteration("send to remote")
# second: receive
from_remote = []
async for x in context.channel:
from_remote.append(x)
log.debug("************ receiving from remote: %s", from_remote)
# third: wait for remote to finish and return result
remote_result = await context.remote_future
result = {
'local_data': self.data,
'from_remote': ''.join(from_remote),
}
result.update(remote_result)
return result
remote = core.CommandRemote('implant.commands.remotes.Echo')
class SystemLoad(core.Command):
async def local(self, context):
t, load = await context.remote_future
return t, load
async def remote(self, context):
t, load = time.time(), os.getloadavg()
return t, load
class Copy(core.Command):
src = core.Parameter(description='Source file at local side.')
dest = core.Parameter(description='Desatination file at remote side.')
def __init__(self, *args, **kwargs):
super(Copy, self).__init__(*args, **kwargs)
self.executor = concurrent.futures.ThreadPoolExecutor()
self.loop = asyncio.get_event_loop()
def __del__(self):
self.executor.shutdown(wait=True)
async def local(self, context):
with open(self.src, "rb") as f:
while True:
data = await self.loop.run_in_executor(self.executor, f.read, 0x8000)
if not data:
context.channel.send(StopAsyncIteration())
break
await context.channel.send(data)
result = await context.remote_future
return result
async def remote(self, context):
with open(self.dest, "wb") as f:
async for data in context.channel:
await self.loop.run_in_executor(self.executor, f.write, data)
| 2.234375 | 2 |
trainer/utils/__init__.py | chriszhou0916/czai4art | 0 | 13377 | <reponame>chriszhou0916/czai4art<gh_stars>0
from trainer.utils.losses import *
from trainer.utils import custom_ssim
| 0.945313 | 1 |
src/Testing/ZopeTestCase/__init__.py | tseaver/Zope-RFA | 2 | 13378 | <filename>src/Testing/ZopeTestCase/__init__.py
##############################################################################
#
# Copyright (c) 2005 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Names exported by the ZopeTestCase package
"""
import ZopeLite as Zope2
import utils
import layer
from ZopeLite import hasProduct
from ZopeLite import installProduct
from ZopeLite import hasPackage
from ZopeLite import installPackage
from ZopeLite import _print
from ZopeTestCase import folder_name
from ZopeTestCase import user_name
from ZopeTestCase import user_password
from ZopeTestCase import user_role
from ZopeTestCase import standard_permissions
from ZopeTestCase import ZopeTestCase
from ZopeTestCase import FunctionalTestCase
from PortalTestCase import portal_name
from PortalTestCase import PortalTestCase
from sandbox import Sandboxed
from functional import Functional
from base import TestCase
from base import app
from base import close
from warnhook import WarningsHook
from unittest import main
from zopedoctest import ZopeDocTestSuite
from zopedoctest import ZopeDocFileSuite
from zopedoctest import FunctionalDocTestSuite
from zopedoctest import FunctionalDocFileSuite
import zopedoctest as doctest
import transaction
import placeless
Zope = Zope2
| 1.507813 | 2 |
CIM14/CPSM/Equipment/LoadModel/SubLoadArea.py | MaximeBaudette/PyCIM | 58 | 13379 | # Copyright (C) 2010-2011 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.CPSM.Equipment.LoadModel.EnergyArea import EnergyArea
class SubLoadArea(EnergyArea):
"""The class is the second level in a hierarchical structure for grouping of loads for the purpose of load flow load scaling.
"""
def __init__(self, LoadGroups=None, LoadArea=None, *args, **kw_args):
"""Initialises a new 'SubLoadArea' instance.
@param LoadGroups: The Loadgroups in the SubLoadArea.
@param LoadArea: The LoadArea where the SubLoadArea belongs.
"""
self._LoadGroups = []
self.LoadGroups = [] if LoadGroups is None else LoadGroups
self._LoadArea = None
self.LoadArea = LoadArea
super(SubLoadArea, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["LoadGroups", "LoadArea"]
_many_refs = ["LoadGroups"]
def getLoadGroups(self):
"""The Loadgroups in the SubLoadArea.
"""
return self._LoadGroups
def setLoadGroups(self, value):
for x in self._LoadGroups:
x.SubLoadArea = None
for y in value:
y._SubLoadArea = self
self._LoadGroups = value
LoadGroups = property(getLoadGroups, setLoadGroups)
def addLoadGroups(self, *LoadGroups):
for obj in LoadGroups:
obj.SubLoadArea = self
def removeLoadGroups(self, *LoadGroups):
for obj in LoadGroups:
obj.SubLoadArea = None
def getLoadArea(self):
"""The LoadArea where the SubLoadArea belongs.
"""
return self._LoadArea
def setLoadArea(self, value):
if self._LoadArea is not None:
filtered = [x for x in self.LoadArea.SubLoadAreas if x != self]
self._LoadArea._SubLoadAreas = filtered
self._LoadArea = value
if self._LoadArea is not None:
if self not in self._LoadArea._SubLoadAreas:
self._LoadArea._SubLoadAreas.append(self)
LoadArea = property(getLoadArea, setLoadArea)
| 1.984375 | 2 |
bin/clean_pdb.py | caixiuhong/Stable-MCCE | 0 | 13380 | #!/usr/bin/env python
import sys
HATOMS = ["HG", "HD", "HE", "HH"]
lines = open(sys.argv[1]).readlines()
for line in lines:
if line[:6] == "ATOM " or line[:6] == "HETATM":
if line[17:20] == "WAT":
continue
if line[13] == "H":
continue
if line[12:14] in HATOMS:
continue
print(line.strip("\n"))
| 3.1875 | 3 |
old_logen/pylogen/OutputBook.py | leuschel/logen | 14 | 13381 | <gh_stars>10-100
import Pmw
import os
import re
from FastIndex import FastIndex, timer
from PrologFrame import PrologFrame
from TerminalFrame import TerminalFrame
class OutputBook(Pmw.NoteBook):
def __init__(self, master=None):
self.app = master
Pmw.NoteBook.__init__(self, self.app.bottom,createcommand=self.create_page)
self.spec_page = self.add('Specialised File')
self.memo_page = self.add('Memo Table')
self.gx_page = self.add('Generating Extension')
self.output_page = self.add('Output')
self.console_page = self.add('Console')
#self.new_terminal_page = self.add('Terminal 1')
#spec file
self.output_spec = PrologFrame(self.spec_page,"",app=self.app)
self.output_spec.pack(side="bottom", fill="both", expand="yes")
#memo file
self.output_memo = PrologFrame(self.memo_page,"",app=self.app)
self.output_memo.pack(side="bottom", fill="both", expand="yes")
#gx file
self.output_gx = PrologFrame(self.gx_page,"",app=self.app)
self.output_gx.pack(side="bottom", fill="both", expand="yes")
#output
self.output_out = PrologFrame(self.output_page,"",app=self.app)
self.output_out.pack(side="bottom", fill="both", expand="yes")
#console
self.output_console = PrologFrame(self.console_page,"", app=self.app)
self.output_console.pack(side="bottom", fill="both", expand="yes")
self.pack(side="bottom", fill="both", expand="yes")
self.output_spec.text.tag_bind("nametag", "<Motion>", self.mouse_over)
self.output_spec.text.tag_bind("nametag", "<Leave>", self.mouse_leave)
self.terminal_pages = []
self.terminals = []
self.term_count = 0
self.create_new_terminal()
def create_page(self, pagename):
if pagename.startswith("Terminal"):
i = self.terminal_pages.index(pagename)
page = self.page(self.index(pagename))
self.terminals[i] = TerminalFrame(page, app=self.app, id=pagename)
self.terminals[i].pack(side="bottom", fill="both", expand="yes")
self.app.update_completions()
#self.selectpage(pagename)
def get_console_stream(self):
pass
def write_to_console(self,string):
self.output_console.text.config(state="normal")
self.output_console.text.insert("end", string)
self.output_console.text.config(state="disabled")
def set_font(self, font):
self.output_spec.text["font"] = font
self.output_memo.text["font"] = font
self.output_gx.text["font"] = font
self.output_out.text["font"] = font
def view_spec_output(self, filename):
(root, ext) = os.path.splitext(filename)
fast_idx = self.output_spec.load_source(root + ".spec")
self.output_memo.load_source(root + ".memo")
self.output_gx.load_source(root + ".gx")
self.output_out.clear()
funcs = self.get_spec_funcs()
spec_lines = self.output_spec.text.get(1.0, 'end')
pos = 0
while 1:
id = next_id(spec_lines, pos)
if id == ():
break
(start, end) = id
pos = end
(start_idx, end_idx) = fast_idx.get_two_tk_indices_same_line(start,
end)
self.output_spec.text.tag_add("nametag", start_idx, end_idx)
def reset_output(self, gx=None):
if gx is None:
pass
#self.output_gx.clear()
elif os.path.exists(gx):
self.output_gx.load_source(gx)
else:
self.output_gx.clear()
self.output_memo.clear()
self.output_spec.clear()
self.output_out.clear()
self.selectpage('Specialised File')
def set_output_from_file(self, filename):
self.output_out.load_source(filename)
def set_output(self, text):
self.output_out.set_text(text)
def view_output(self):
self.selectpage('Output')
def get_spec_funcs(self):
memo_text = self.output_memo.text.get(1.0, 'end').split('\n')
funcs = {}
for line in memo_text:
if line.startswith('table'):
(orig, i) = up_to_comma(line[6:])
(pattern, _) = up_to_comma(line[i + 7:])
i = pattern.find('(')
if i > 0:
name = pattern[:i]
else:
name = pattern
funcs[name] = orig + " --> " + pattern
self.funcs = funcs
def get_tag_position(self, x, y):
index = self.output_spec.text.index("@"+str(x)+","+str(y)+" + 1 char")
return self.output_spec.text.tag_prevrange("nametag", index)
start = None
def mouse_over(self, event):
(start, end) = self.get_tag_position(event.x, event.y)
predicate = self.output_spec.text.get(start, end)
#print "over " + start + ", " + end + " : " + predicate
#print self.funcs[predicate]
if self.start != start:
self.app.balloon.configure(relmouse="both",yoffset=15)
self.app.balloon._showBalloon(self.output_spec.text,
self.funcs[predicate], False)
self.start = start
def mouse_leave(self, event):
self.app.balloon.configure(relmouse="none",yoffset=1)
self.app.balloon.withdraw()
self.start = None
def create_new_terminal(self):
self.term_count += 1
self.terminal_pages.append('Terminal ' + str(self.term_count))
page = self.add(self.terminal_pages[-1])
self.terminals.append(None)
def quit(self):
for t in self.terminals:
if t is not None:
t.quit()
def kill_terminal(self, term_str):
i = self.terminal_pages.index(term_str)
self.terminals[i].quit()
self.delete(term_str)
self.terminals.pop(i)
self.terminal_pages.pop(i)
def reset_completions(self):
for t in self.terminals:
if t is not None:
t.reset_completions()
def add_completions(self, completions):
for t in self.terminals:
if t is not None:
t.add_completions(completions)
def up_to_comma(str):
bracket_stack = []
i = 0
current_char = str[i]
in_string = False
in_double_string = False
ret_string = ''
while len(bracket_stack) > 0 or current_char != ',' or in_string or in_double_string:
if current_char == '(' or current_char == '[' or current_char == '{':
bracket_stack.append(current_char)
elif current_char == ')' or current_char == ']' or current_char == '}':
bracket_stack = bracket_stack[:-1]
elif current_char == '"':
if in_double_string:
in_double_string = False
elif not in_string:
in_double_string = True
elif current_char == "'":
if in_string:
in_string = False
elif not in_double_string:
in_string = True
ret_string = ret_string + current_char
i = i + 1
current_char = str[i]
return (ret_string.strip(), i)
regexp = re.compile('[a-zA-z0-9_]+__[0-9]+')
def next_id(string, pos):
match = regexp.search(string[pos:])
if match is None:
return ()
else:
return (match.start() + pos, match.end() + pos)
| 2.1875 | 2 |
back/db.py | belshoff/Agenda | 0 | 13382 | import sqlite3
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS Produtos (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
price REAL,
compra_id INTEGER,
FOREIGN KEY (compra_id) REFERENCES Compras(id)
);
"""
)
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS Compras (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
date TEXT NOT NULL
);
"""
)
class Produto(object):
def getAll(self):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute("SELECT * FROM Produtos;")
return [
{
"id": items[0],
"name": items[1],
"price": items[2],
"compra_id": items[3]
} for items in cursor.fetchall()
]
def getByCompra(self, compraId):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
print(f"SELECT * FROM Produtos WHERE compra_id = {compraId}")
cursor.execute(f"SELECT * FROM Produtos WHERE compra_id = {compraId}")
return [
{
"id": items[0],
"name": items[1],
"price": items[2],
} for items in cursor.fetchall()
]
def insert(self, *args):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
print(f"INSERT INTO Produtos (name, price, compra_id) VALUES ('{args[0]}', {args[1]}, {args[2]})")
cursor.execute(f"INSERT INTO Produtos (name, price, compra_id) VALUES ('{args[0]}', {args[1]}, {args[2]})")
def getById(self, id):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute(f"SELECT * FROM Produtos WHERE id = {id} ;")
return [
{
"id": items[0],
"name": items[1],
"price": items[2]
} for items in cursor.fetchall()
][0]
def update(self, id, *args):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute(f"UPDATE Produtos SET name = {args[0]}, price = {args[1]}, compra_id = {args[2]} WHERE id = {id};")
def delete(self, id):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute(f"DELETE FROM Produtos WHERE id = {id}")
def deleteByCompra(self, compraId):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
print(f"DELETE FROM Produtos WHERE compra_id = {compraId}")
cursor.execute(f"DELETE FROM Produtos WHERE compra_id = {compraId}")
class Compra(object):
def __init__(self):
self.produto = Produto()
def getAll(self):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
print("SELECT * FROM Compras;")
cursor.execute("SELECT * FROM Compras;")
return [
{
"id": items[0],
"date": items[1],
"produtos": self.produto.getByCompra(items[0])
} for items in cursor.fetchall()
]
def insert(self, *args):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
print(f"INSERT INTO Compras (date) VALUES ('{args[0]}')")
cursor.execute(f"INSERT INTO Compras (date) VALUES ('{args[0]}')")
c = self.getAll()[-1]
ps = list(args[1])
for p in ps:
self.produto.insert(str(p["name"]), p["price"], c["id"])
# return self.getById(c.id)
def getById(self, id):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute(f"SELECT * FROM Compras WHERE id = {id} ;")
return [
{
"id": items[0],
"date": items[1],
"produtos": self.produto.getByCompra(id)
} for items in cursor.fetchall()
][0]
def getByDate(self, date):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute(f"SELECT * FROM Compras WHERE date = '{date}' ;")
return [
{
"id": items[0],
"date": items[1],
"produtos": self.produto.getByCompra(items[0])
} for items in cursor.fetchall()
]
def update(self, id, *args):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute(
"""
UPDATE Compras
SET date = ?, produto_id = ?
WHERE id = ?;
""", (*args, id)
)
def delete(self, id):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
self.produto.deleteByCompra(self.getById(id)["id"])
print(f"DELETE FROM Compras WHERE id = {id}")
cursor.execute(f"DELETE FROM Compras WHERE id = {id}") | 4 | 4 |
tools/build_rules/gtk_dependent.bzl | Ewpratten/frc_971_mirror | 39 | 13383 | disable_gtk_binaries = True
def gtk_dependent_cc_library(**kwargs):
if not disable_gtk_binaries:
native.cc_library(**kwargs)
def gtk_dependent_cc_binary(**kwargs):
if not disable_gtk_binaries:
native.cc_binary(**kwargs)
| 1.5625 | 2 |
vectorize.py | tomohiroando/recipe_gan | 0 | 13384 | <gh_stars>0
import sys
from gensim import models
from gensim.models.doc2vec import LabeledSentence
import pickle
def corpus_to_sentences(corpus):
sentences = []
for idx, (name, doc) in enumerate(corpus.items()):
sys.stdout.write('\r前処理中 {}/{}'.format(idx, len(corpus)))
sentence = LabeledSentence(words=doc, tags=[name])
sentences.append(sentence)
return sentences
with open('corpus_text', 'rb') as f:
corpus = pickle.load(f)
sentences = corpus_to_sentences(corpus)
model = models.Doc2Vec(vector_size=400, window=15, alpha=.025, min_alpha=.025, min_count=1, sample=1e-6)
model.build_vocab(sentences)
print(len(corpus))
model.train(sentences, total_examples=len(corpus), epochs=20)
model.save('doc2vec.model') | 2.609375 | 3 |
lxman/registry.py | stuxcrystal/lxman | 1 | 13385 | <gh_stars>1-10
# -*- encoding: utf-8 -*-
from collections import UserDict
from itertools import count
import shutil
import winreg
import uuid
PATH = "Software\\Microsoft\\Windows\\CurrentVersion\\Lxss"
KEY = winreg.HKEY_CURRENT_USER
class RegistryDescriptor(object):
def __init__(self, name):
self.name = name
def __get__(self, instance, clazz):
key = instance._key()
if key is not None:
return self._get_value_by_registry(key)
return self._get_value_by_vartable(instance, key)
def __set__(self, instance, value):
key = instance._key("", winreg.KEY_WRITE)
if key is not None:
return self._set_value_by_registry(key, value)
def _get_value_by_registry(self, key):
with key as k:
try:
value, _ = winreg.QueryValueEx(k, self.name)
except FileNotFoundError:
return None
return value
def _set_value_by_registry(self, key, value):
if isinstance(value, int):
type = winreg.REG_DWORD
elif isinstance(value, (list, tuple)):
type = winreg.REG_MULTI_SZ
else:
type = winreg.REG_SZ
with key as k:
winreg.SetValueEx(k, self.name, 0, type, value)
def _get_value_by_vartable(self, instance, key):
return vars(instance)[key]
class EnvironmentVariables(UserDict):
def __init__(self, distribution):
super(EnvironmentVariables, self).__init__()
self.distribution = distribution
self.reload()
def _save_values(self):
return (f"{v[0]}={v[1]}" for v in self.data.items())
def save(self):
self.distribution.default_environment = list(self._save_values())
def reload(self):
self.clear()
self.update(dict(
v.split("=", 1) for v in self.distribution.default_environment
))
class Distribution(object):
@classmethod
def create(cls, name, source_path):
guid = "{%s}"%uuid.uuid4()
with winreg.CreateKey(KEY, f"{PATH}\\{guid}") as k:
winreg.SetValueEx(k, 'State', 0, winreg.REG_DWORD, 1)
winreg.SetValueEx(k, 'DistributionName', 0, winreg.REG_SZ, name)
winreg.SetValueEx(k, 'BasePath', 0, winreg.REG_SZ, source_path)
winreg.SetValueEx(k, 'DefaultUid', 0, winreg.REG_DWORD, 0)
winreg.SetValueEx(k, 'Version', 0, winreg.REG_DWORD, 1)
winreg.SetValueEx(k, 'KernelCommandLine', 0, winreg.REG_SZ, 'BOOT_IMAGE=/kernel init=/init ro')
winreg.SetValueEx(k, 'DefaultEnvironment', 0, winreg.REG_MULTI_SZ, [
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
])
return cls(guid)
def __init__(self, guid=""):
self.guid = guid
def _key(self, sub="", privileges=winreg.KEY_READ):
if not self.guid:
return None
if sub:
sub = "\\" + sub
return winreg.OpenKey(KEY, PATH+f"\\{self.guid}"+sub, 0, privileges)
name = RegistryDescriptor("DistributionName")
base_path = RegistryDescriptor("BasePath")
default_user = RegistryDescriptor("DefaultUid")
default_environment = RegistryDescriptor("DefaultEnvironment")
cmdline = RegistryDescriptor("KernelCommandLine")
flags = RegistryDescriptor("Flags")
package_family_name = RegistryDescriptor("PackageFamilyName")
_state = RegistryDescriptor("State")
version = RegistryDescriptor("Version")
@property
def environment(self):
return EnvironmentVariables(self)
def launch_params(self, params=("/bin/bash",)):
return [shutil.which("wsl.exe"), f"{self.guid}"] + list(params)
def __repr__(self):
return f"<Distribution '{self.name}' guid:{self.guid}>"
def delete(self):
with Lxss._key('', winreg.KEY_WRITE) as k:
winreg.DeleteKey(k, self.guid)
@property
def state(self):
st = self._state
if st == 1:
return "Ready"
elif st == 3:
return "Installing"
return "Unknown:" + str(st)
@state.setter
def state(self, value):
if isinstance(value, int):
self._state = value
return
value = value.lower()
if value == "ready":
self._state = 1
elif value == "installing":
self._state = 3
else:
self._state = value
def __enter__(self):
self._state = 3
return self
def __exit__(self, *exc):
self._state = 1
return False
class _Lxss(object):
def _key(self, sub="", privileges=winreg.KEY_READ):
if sub:
sub = "\\" + sub
return winreg.OpenKey(KEY, PATH+sub, 0, privileges)
default_distribution = RegistryDescriptor("DefaultDistribution")
@property
def default(self):
return Distribution(self.default_distribution)
@default.setter
def default(self, value):
self.default_distribution = value.guid
def __iter__(self):
for i in count():
with self._key() as k:
try:
name = winreg.EnumKey(k, i)
except OSError as e:
if e.winerror != 259:
raise
break
yield Distribution(name)
def get(self, value, default=None):
for distribution in self:
if value.startswith("{") and value.endswith("}"):
if distribution.guid.lower() == value.lower():
return distribution
else:
if distribution.name == value:
return distribution
return default
def __getitem__(self, value):
value = self.get(value, None)
if value is None:
raise KeyError("Unknown distribution")
return value
Lxss = _Lxss()
| 2.359375 | 2 |
virtual/lib/python3.6/site-packages/debian/changelog.py | marknesh/pitches | 0 | 13386 | # changelog.py -- Python module for Debian changelogs
# Copyright (C) 2006-7 <NAME> <<EMAIL>>
# Copyright (C) 2008 Canonical Ltd.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# The parsing code is based on that from dpkg which is:
# Copyright 1996 <NAME>
# Copyright 2005 <NAME> <<EMAIL>>
# and licensed under the same license as above.
"""This module implements facilities to deal with Debian changelogs."""
from __future__ import absolute_import
import os
import pwd
import re
import socket
import warnings
import sys
import six
from debian import debian_support
# Python 3 doesn't have StandardError, but let's avoid changing our
# exception inheritance hierarchy for Python 2.
try:
_base_exception_class = StandardError
except NameError:
_base_exception_class = Exception
class ChangelogParseError(_base_exception_class):
"""Indicates that the changelog could not be parsed"""
is_user_error = True
def __init__(self, line):
self._line=line
def __str__(self):
return "Could not parse changelog: "+self._line
class ChangelogCreateError(_base_exception_class):
"""Indicates that changelog could not be created, as all the information
required was not given"""
class VersionError(_base_exception_class):
"""Indicates that the version does not conform to the required format"""
is_user_error = True
def __init__(self, version):
self._version=version
def __str__(self):
return "Could not parse version: "+self._version
# TODO(jsw): Remove this in favor of using debian_support.Version directly. I
# don't think we gain anything by using this empty subclass.
class Version(debian_support.Version):
"""Represents a version of a Debian package."""
# debian_support.Version now has all the functionality we need
class ChangeBlock(object):
"""Holds all the information about one block from the changelog."""
def __init__(self, package=None, version=None, distributions=None,
urgency=None, urgency_comment=None, changes=None,
author=None, date=None, other_pairs=None, encoding='utf-8'):
self._raw_version = None
self._set_version(version)
self.package = package
self.distributions = distributions
self.urgency = urgency or "unknown"
self.urgency_comment = urgency_comment or ''
self._changes = changes
self.author = author
self.date = date
self._trailing = []
self.other_pairs = other_pairs or {}
self._encoding = encoding
self._no_trailer = False
self._trailer_separator = " "
def _set_version(self, version):
if version is not None:
self._raw_version = str(version)
def _get_version(self):
return Version(self._raw_version)
version = property(_get_version, _set_version)
def other_keys_normalised(self):
norm_dict = {}
for (key, value) in other_pairs.items():
key = key[0].upper() + key[1:].lower()
m = xbcs_re.match(key)
if m is None:
key = "XS-%s" % key
norm_dict[key] = value
return norm_dict
def changes(self):
return self._changes
def add_trailing_line(self, line):
self._trailing.append(line)
def add_change(self, change):
if self._changes is None:
self._changes = [change]
else:
#Bit of trickery to keep the formatting nicer with a blank
#line at the end if there is one
changes = self._changes
changes.reverse()
added = False
for i in range(len(changes)):
m = blankline.match(changes[i])
if m is None:
changes.insert(i, change)
added = True
break
changes.reverse()
if not added:
changes.append(change)
self._changes = changes
def _get_bugs_closed_generic(self, type_re):
changes = six.u(' ').join(self._changes)
bugs = []
for match in type_re.finditer(changes):
closes_list = match.group(0)
for match in re.finditer(r"\d+", closes_list):
bugs.append(int(match.group(0)))
return bugs
@property
def bugs_closed(self):
return self._get_bugs_closed_generic(closes)
@property
def lp_bugs_closed(self):
return self._get_bugs_closed_generic(closeslp)
def _format(self):
# TODO(jsw): Switch to StringIO or a list to join at the end.
block = ""
if self.package is None:
raise ChangelogCreateError("Package not specified")
block += self.package + " "
if self._raw_version is None:
raise ChangelogCreateError("Version not specified")
block += "(" + self._raw_version + ") "
if self.distributions is None:
raise ChangelogCreateError("Distribution not specified")
block += self.distributions + "; "
if self.urgency is None:
raise ChangelogCreateError("Urgency not specified")
block += "urgency=" + self.urgency + self.urgency_comment
for (key, value) in self.other_pairs.items():
block += ", %s=%s" % (key, value)
block += '\n'
if self.changes() is None:
raise ChangelogCreateError("Changes not specified")
for change in self.changes():
block += change + "\n"
if not self._no_trailer:
if self.author is None:
raise ChangelogCreateError("Author not specified")
if self.date is None:
raise ChangelogCreateError("Date not specified")
block += " -- " + self.author + self._trailer_separator \
+ self.date + "\n"
for line in self._trailing:
block += line + "\n"
return block
if sys.version >= '3':
__str__ = _format
def __bytes__(self):
return str(self).encode(self._encoding)
else:
__unicode__ = _format
def __str__(self):
return unicode(self).encode(self._encoding)
topline = re.compile(r'^(\w%(name_chars)s*) \(([^\(\) \t]+)\)'
r'((\s+%(name_chars)s+)+)\;'
% {'name_chars': '[-+0-9a-z.]'},
re.IGNORECASE)
blankline = re.compile(r'^\s*$')
change = re.compile(r'^\s\s+.*$')
endline = re.compile(r'^ -- (.*) <(.*)>( ?)((\w+\,\s*)?\d{1,2}\s+\w+\s+'
r'\d{4}\s+\d{1,2}:\d\d:\d\d\s+[-+]\d{4}\s*)$')
endline_nodetails = re.compile(r'^ --(?: (.*) <(.*)>( ?)((\w+\,\s*)?\d{1,2}'
r'\s+\w+\s+\d{4}\s+\d{1,2}:\d\d:\d\d\s+[-+]\d{4}'
r'))?\s*$')
keyvalue= re.compile(r'^([-0-9a-z]+)=\s*(.*\S)$', re.IGNORECASE)
value_re = re.compile(r'^([-0-9a-z]+)((\s+.*)?)$', re.IGNORECASE)
xbcs_re = re.compile('^X[BCS]+-', re.IGNORECASE)
emacs_variables = re.compile(r'^(;;\s*)?Local variables:', re.IGNORECASE)
vim_variables = re.compile('^vim:', re.IGNORECASE)
cvs_keyword = re.compile(r'^\$\w+:.*\$')
comments = re.compile(r'^\# ')
more_comments = re.compile(r'^/\*.*\*/')
closes = re.compile(r'closes:\s*(?:bug)?\#?\s?\d+(?:,\s*(?:bug)?\#?\s?\d+)*',
re.IGNORECASE)
closeslp = re.compile(r'lp:\s+\#\d+(?:,\s*\#\d+)*', re.IGNORECASE)
old_format_re1 = re.compile(r'^(\w+\s+\w+\s+\d{1,2} \d{1,2}:\d{1,2}:\d{1,2}'
r'\s+[\w\s]*\d{4})\s+(.*)\s+(<|\()(.*)(\)|>)')
old_format_re2 = re.compile(r'^(\w+\s+\w+\s+\d{1,2},?\s*\d{4})\s+(.*)'
r'\s+(<|\()(.*)(\)|>)')
old_format_re3 = re.compile(r'^(\w[-+0-9a-z.]*) \(([^\(\) \t]+)\)\;?',
re.IGNORECASE)
old_format_re4 = re.compile(r'^([\w.+-]+)(-| )(\S+) Debian (\S+)',
re.IGNORECASE)
old_format_re5 = re.compile('^Changes from version (.*) to (.*):',
re.IGNORECASE)
old_format_re6 = re.compile(r'^Changes for [\w.+-]+-[\w.+-]+:?\s*$',
re.IGNORECASE)
old_format_re7 = re.compile(r'^Old Changelog:\s*$', re.IGNORECASE)
old_format_re8 = re.compile(r'^(?:\d+:)?\w[\w.+~-]*:?\s*$')
class Changelog(object):
"""Represents a debian/changelog file."""
# TODO(jsw): Avoid masking the 'file' built-in.
def __init__(self, file=None, max_blocks=None,
allow_empty_author=False, strict=False, encoding='utf-8'):
"""Initializer.
Args:
file: The contents of the changelog, either as a str, unicode object,
or an iterator of lines (each of which is either a str or unicode)
max_blocks: The maximum number of blocks to parse from the input.
(Default: no limit)
allow_empty_author: Whether to allow an empty author in the trailer
line of a change block. (Default: False)
strict: Whether to raise an exception if there are errors. (Default:
use a warning)
encoding: If the input is a str or iterator of str, the encoding to
use when interpreting the input.
"""
self._encoding = encoding
self._blocks = []
self.initial_blank_lines = []
if file is not None:
self.parse_changelog(file, max_blocks=max_blocks,
allow_empty_author=allow_empty_author,
strict=strict)
def _parse_error(self, message, strict):
if strict:
raise ChangelogParseError(message)
else:
warnings.warn(message)
def parse_changelog(self, file, max_blocks=None,
allow_empty_author=False, strict=True, encoding=None):
first_heading = "first heading"
next_heading_or_eof = "next heading of EOF"
start_of_change_data = "start of change data"
more_changes_or_trailer = "more change data or trailer"
slurp_to_end = "slurp to end"
encoding = encoding or self._encoding
if file is None:
self._parse_error('Empty changelog file.', strict)
return
self._blocks = []
self.initial_blank_lines = []
current_block = ChangeBlock(encoding=encoding)
changes = []
state = first_heading
old_state = None
if isinstance(file, bytes):
file = file.decode(encoding)
if isinstance(file, six.string_types):
# Make sure the changelog file is not empty.
if len(file.strip()) == 0:
self._parse_error('Empty changelog file.', strict)
return
file = file.splitlines()
for line in file:
if not isinstance(line, six.text_type):
line = line.decode(encoding)
# Support both lists of lines without the trailing newline and
# those with trailing newlines (e.g. when given a file object
# directly)
line = line.rstrip('\n')
if state == first_heading or state == next_heading_or_eof:
top_match = topline.match(line)
blank_match = blankline.match(line)
if top_match is not None:
if (max_blocks is not None
and len(self._blocks) >= max_blocks):
return
current_block.package = top_match.group(1)
current_block._raw_version = top_match.group(2)
current_block.distributions = top_match.group(3).lstrip()
pairs = line.split(";", 1)[1]
all_keys = {}
other_pairs = {}
for pair in pairs.split(','):
pair = pair.strip()
kv_match = keyvalue.match(pair)
if kv_match is None:
self._parse_error("Invalid key-value "
"pair after ';': %s" % pair, strict)
continue
key = kv_match.group(1)
value = kv_match.group(2)
if key.lower() in all_keys:
self._parse_error("Repeated key-value: "
"%s" % key.lower(), strict)
all_keys[key.lower()] = value
if key.lower() == "urgency":
val_match = value_re.match(value)
if val_match is None:
self._parse_error("Badly formatted "
"urgency value: %s" % value, strict)
else:
current_block.urgency = val_match.group(1)
comment = val_match.group(2)
if comment is not None:
current_block.urgency_comment = comment
else:
other_pairs[key] = value
current_block.other_pairs = other_pairs
state = start_of_change_data
elif blank_match is not None:
if state == first_heading:
self.initial_blank_lines.append(line)
else:
self._blocks[-1].add_trailing_line(line)
else:
emacs_match = emacs_variables.match(line)
vim_match = vim_variables.match(line)
cvs_match = cvs_keyword.match(line)
comments_match = comments.match(line)
more_comments_match = more_comments.match(line)
if ((emacs_match is not None or vim_match is not None)
and state != first_heading):
self._blocks[-1].add_trailing_line(line)
old_state = state
state = slurp_to_end
continue
if (cvs_match is not None or comments_match is not None
or more_comments_match is not None):
if state == first_heading:
self.initial_blank_lines.append(line)
else:
self._blocks[-1].add_trailing_line(line)
continue
if ((old_format_re1.match(line) is not None
or old_format_re2.match(line) is not None
or old_format_re3.match(line) is not None
or old_format_re4.match(line) is not None
or old_format_re5.match(line) is not None
or old_format_re6.match(line) is not None
or old_format_re7.match(line) is not None
or old_format_re8.match(line) is not None)
and state != first_heading):
self._blocks[-1].add_trailing_line(line)
old_state = state
state = slurp_to_end
continue
self._parse_error("Unexpected line while looking "
"for %s: %s" % (state, line), strict)
if state == first_heading:
self.initial_blank_lines.append(line)
else:
self._blocks[-1].add_trailing_line(line)
elif (state == start_of_change_data
or state == more_changes_or_trailer):
change_match = change.match(line)
end_match = endline.match(line)
end_no_details_match = endline_nodetails.match(line)
blank_match = blankline.match(line)
if change_match is not None:
changes.append(line)
state = more_changes_or_trailer
elif end_match is not None:
if end_match.group(3) != ' ':
self._parse_error("Badly formatted trailer "
"line: %s" % line, strict)
current_block._trailer_separator = end_match.group(3)
current_block.author = "%s <%s>" \
% (end_match.group(1), end_match.group(2))
current_block.date = end_match.group(4)
current_block._changes = changes
self._blocks.append(current_block)
changes = []
current_block = ChangeBlock(encoding=encoding)
state = next_heading_or_eof
elif end_no_details_match is not None:
if not allow_empty_author:
self._parse_error("Badly formatted trailer "
"line: %s" % line, strict)
continue
current_block._changes = changes
self._blocks.append(current_block)
changes = []
current_block = ChangeBlock(encoding=encoding)
state = next_heading_or_eof
elif blank_match is not None:
changes.append(line)
else:
cvs_match = cvs_keyword.match(line)
comments_match = comments.match(line)
more_comments_match = more_comments.match(line)
if (cvs_match is not None or comments_match is not None
or more_comments_match is not None):
changes.append(line)
continue
self._parse_error("Unexpected line while looking "
"for %s: %s" % (state, line), strict)
changes.append(line)
elif state == slurp_to_end:
if old_state == next_heading_or_eof:
self._blocks[-1].add_trailing_line(line)
else:
changes.append(line)
else:
assert False, "Unknown state: %s" % state
if ((state != next_heading_or_eof and state != slurp_to_end)
or (state == slurp_to_end and old_state != next_heading_or_eof)):
self._parse_error("Found eof where expected %s" % state,
strict)
current_block._changes = changes
current_block._no_trailer = True
self._blocks.append(current_block)
def get_version(self):
"""Return a Version object for the last version"""
return self._blocks[0].version
def set_version(self, version):
"""Set the version of the last changelog block
version can be a full version string, or a Version object
"""
self._blocks[0].version = Version(version)
version = property(get_version, set_version,
doc="Version object for last changelog block""")
### For convenience, let's expose some of the version properties
full_version = property(lambda self: self.version.full_version)
epoch = property(lambda self: self.version.epoch)
debian_version = property(lambda self: self.version.debian_revision)
debian_revision = property(lambda self: self.version.debian_revision)
upstream_version = property(lambda self: self.version.upstream_version)
def get_package(self):
"""Returns the name of the package in the last version."""
return self._blocks[0].package
def set_package(self, package):
self._blocks[0].package = package
package = property(get_package, set_package,
doc="Name of the package in the last version")
def get_versions(self):
"""Returns a list of version objects that the package went through."""
return [block.version for block in self._blocks]
versions = property(get_versions,
doc="List of version objects the package went through")
def _raw_versions(self):
return [block._raw_version for block in self._blocks]
def _format(self):
pieces = []
pieces.append(six.u('\n').join(self.initial_blank_lines))
for block in self._blocks:
pieces.append(six.text_type(block))
return six.u('').join(pieces)
if sys.version >= '3':
__str__ = _format
def __bytes__(self):
return str(self).encode(self._encoding)
else:
__unicode__ = _format
def __str__(self):
return unicode(self).encode(self._encoding)
def __iter__(self):
return iter(self._blocks)
def __getitem__(self, n):
""" select a changelog entry by number, version string, or Version
:param n: integer or str representing a version or Version object
"""
if type(n) is int:
return self._blocks[n]
elif type(n) is str:
return self[Version(n)]
return self._blocks[self.versions.index(n)]
def __len__(self):
return len(self._blocks)
def set_distributions(self, distributions):
self._blocks[0].distributions = distributions
distributions = property(lambda self: self._blocks[0].distributions,
set_distributions)
def set_urgency(self, urgency):
self._blocks[0].urgency = urgency
urgency = property(lambda self: self._blocks[0].urgency, set_urgency)
def add_change(self, change):
self._blocks[0].add_change(change)
def set_author(self, author):
self._blocks[0].author = author
author = property(lambda self: self._blocks[0].author, set_author)
def set_date(self, date):
self._blocks[0].date = date
date = property(lambda self: self._blocks[0].date, set_date)
def new_block(self, **kwargs):
kwargs.setdefault('encoding', self._encoding)
block = ChangeBlock(**kwargs)
block.add_trailing_line('')
self._blocks.insert(0, block)
def write_to_open_file(self, file):
file.write(self.__str__())
def get_maintainer():
"""Get the maintainer information in the same manner as dch.
This function gets the information about the current user for
the maintainer field using environment variables of gecos
informations as approriate.
It uses the same methods as dch to get the information, namely
DEBEMAIL, DEBFULLNAME, EMAIL, NAME, /etc/mailname and gecos.
:returns: a tuple of the full name, email pair as strings.
Either of the pair may be None if that value couldn't
be determined.
"""
env = os.environ
regex = re.compile(r"^(.*)\s+<(.*)>$")
# Split email and name
if 'DEBEMAIL' in env:
match_obj = regex.match(env['DEBEMAIL'])
if match_obj:
if not 'DEBFULLNAME' in env:
env['DEBFULLNAME'] = match_obj.group(1)
env['DEBEMAIL'] = match_obj.group(2)
if 'DEBEMAIL' not in env or 'DEBFULLNAME' not in env:
if 'EMAIL' in env:
match_obj = regex.match(env['EMAIL'])
if match_obj:
if not 'DEBFULLNAME' in env:
env['DEBFULLNAME'] = match_obj.group(1)
env['EMAIL'] = match_obj.group(2)
# Get maintainer's name
if 'DEBFULLNAME' in env:
maintainer = env['DEBFULLNAME']
elif 'NAME' in env:
maintainer = env['NAME']
else:
# Use password database if no data in environment variables
try:
maintainer = re.sub(r',.*', '', pwd.getpwuid(os.getuid()).pw_gecos)
except (KeyError, AttributeError):
maintainer = None
# Get maintainer's mail address
if 'DEBEMAIL' in env:
email = env['DEBEMAIL']
elif 'EMAIL' in env:
email = env['EMAIL']
else:
addr = None
if os.path.exists('/etc/mailname'):
f = open('/etc/mailname')
try:
addr = f.readline().strip()
finally:
f.close()
if not addr:
addr = socket.getfqdn()
if addr:
user = pwd.getpwuid(os.getuid()).pw_name
if not user:
addr = None
else:
addr = "%s@%s" % (user, addr)
if addr:
email = addr
else:
email = None
return (maintainer, email)
| 1.851563 | 2 |
Median.py | fatih-iver/Intro-to-Computer-Science-with-Python | 0 | 13387 | # Define a procedure, median, that takes three
# numbers as its inputs, and returns the median
# of the three numbers.
# Make sure your procedure has a return statement.
def bigger(a,b):
if a > b:
return a
else:
return b
def biggest(a,b,c):
return bigger(a,bigger(b,c))
def median(a, b ,c):
if (b >= a and a >= c) or (c >= a and a >= b):
return a
if (a >= b and b >= c) or (c >= b and b >= a):
return b
if (a >= c and c >= b) or (b >= c and c >= a):
return c
print(median(1,2,3))
#>>> 2
print(median(9,3,6))
#>>> 6
print(median(7,8,7))
#>>> 7 | 4.15625 | 4 |
miss_islington/util.py | webknjaz/miss-islington | 0 | 13388 | import requests
import os
import subprocess
import gidgethub
from gidgethub import sansio
AUTOMERGE_LABEL = ":robot: automerge"
def comment_on_pr(issue_number, message):
"""
Leave a comment on a PR/Issue
"""
request_headers = sansio.create_headers(
"miss-islington", oauth_token=os.getenv("GH_AUTH")
)
issue_comment_url = (
f"https://api.github.com/repos/python/cpython/issues/{issue_number}/comments"
)
data = {"body": message}
response = requests.post(issue_comment_url, headers=request_headers, json=data)
if response.status_code == requests.codes.created:
print(f"Commented at {response.json()['html_url']}, message: {message}")
else:
print(response.status_code)
print(response.text)
return response
def assign_pr_to_core_dev(issue_number, coredev_login):
"""
Assign the PR to a core dev. Should be done when miss-islington failed
to backport.
"""
request_headers = sansio.create_headers(
"miss-islington", oauth_token=os.getenv("GH_AUTH")
)
edit_issue_url = (
f"https://api.github.com/repos/python/cpython/issues/{issue_number}"
)
data = {"assignees": [coredev_login]}
response = requests.patch(edit_issue_url, headers=request_headers, json=data)
if response.status_code == requests.codes.created:
print(f"Assigned PR {issue_number} to {coredev_login}")
else:
print(response.status_code)
print(response.text)
return response
async def leave_comment(gh, pr_number, message):
"""
Leave a comment on a PR/Issue
"""
issue_comment_url = f"/repos/python/cpython/issues/{pr_number}/comments"
data = {"body": message}
await gh.post(issue_comment_url, data=data)
def is_cpython_repo():
cmd = "git log -r 7f777ed95a19224294949e1b4ce56bbffcb1fe9f"
try:
subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
except subprocess.SubprocessError:
return False
return True
async def get_gh_participants(gh, pr_number):
pr_url = f"/repos/python/cpython/pulls/{pr_number}"
pr_result = await gh.getitem(pr_url)
created_by = pr_result["user"]["login"]
merged_by = None
if pr_result["merged_by"] and pr_result["merged_by"]["login"] != "miss-islington":
merged_by = pr_result["merged_by"]["login"]
participants = ""
if created_by == merged_by or merged_by is None:
participants = f"@{created_by}"
else:
participants = f"@{created_by} and @{merged_by}"
return participants
def get_participants(created_by, merged_by):
participants = ""
if created_by == merged_by or merged_by == "miss-islington":
participants = f"@{created_by}"
else:
participants = f"@{created_by} and @{merged_by}"
return participants
def normalize_title(title, body):
"""Normalize the title if it spills over into the PR's body."""
if not (title.endswith("…") and body.startswith("…")):
return title
else:
# Being paranoid in case \r\n is used.
return title[:-1] + body[1:].partition("\r\n")[0]
def normalize_message(body):
"""Normalize the message body to make it commit-worthy.
Mostly this just means removing HTML comments, but also removes unwanted
leading or trailing whitespace.
Returns the normalized body.
"""
while "<!--" in body:
body = body[: body.index("<!--")] + body[body.index("-->") + 3 :]
return "\n\n" + body.strip()
# Copied over from https://github.com/python/bedevere
async def is_core_dev(gh, username):
"""Check if the user is a CPython core developer."""
org_teams = "/orgs/python/teams"
team_name = "python core"
async for team in gh.getiter(org_teams):
if team["name"].lower() == team_name:
break
else:
raise ValueError(f"{team_name!r} not found at {org_teams!r}")
# The 'teams' object only provides a URL to a deprecated endpoint,
# so manually construct the URL to the non-deprecated team membership
# endpoint.
membership_url = f"/teams/{team['id']}/memberships/{username}"
try:
await gh.getitem(membership_url)
except gidgethub.BadRequest as exc:
if exc.status_code == 404:
return False
raise
else:
return True
def pr_is_awaiting_merge(pr_labels):
label_names = [label["name"] for label in pr_labels]
if (
"DO-NOT-MERGE" not in label_names
and "awaiting merge" in label_names
and "CLA signed" in label_names
):
return True
return False
def pr_is_automerge(pr_labels):
for label in pr_labels:
if label["name"] == AUTOMERGE_LABEL:
return True
return False
async def get_pr_for_commit(gh, sha):
prs_for_commit = await gh.getitem(
f"/search/issues?q=type:pr+repo:python/cpython+sha:{sha}"
)
if prs_for_commit["total_count"] > 0: # there should only be one
pr_for_commit = prs_for_commit["items"][0]
return pr_for_commit
return None
| 2.640625 | 3 |
src/plugins/database.py | Blitz-Raynor/Kiba | 4 | 13389 | import aiosqlite
import sqlite3
import asyncio
import nonebot
from nonebot.log import logger
driver: nonebot.Driver = nonebot.get_driver()
config: nonebot.config.Config = driver.config
@driver.on_startup
async def init_db():
config.db = await aiosqlite.connect("src/static/Kiba.db")
logger.info("Kiba Kernel -> Starting to Create \"Kiba Database\"")
try:
await config.db.executescript(
"create table group_poke_table (group_id bigint primary key not null, last_trigger_time int, triggered int, disabled bit, strategy text);"
"create table user_poke_table (user_id bigint, group_id bigint, triggered int);"
"create table guess_table (group_id bigint, enabled bit);"
"create table waiting_table (shop text, location text, wait int, updated text);"
"create table plp_table (id bigint, user_id bigint, nickname text, message text, is_picture bit, view bigint, reply bigint);"
"create table plp_reply_table (id bigint, plpid bigint, userid bigint, nickname text, message text);"
"create table group_plp_table (group_id bigint, disableinsert int, disabletake int, disablereply int, slowmode int, limit int, time bigint);"
"create table plp_blacklist_table (id bigint, lastbanner bigint, disableinsert int, disabletake int, disablereply int);"
"create table gld_table (qq bigint, uid bigint);"
"create table sign_table (no bigint, id bigint, day int);"
"create table acard_table (id bigint, times int, six int, five int, four int, three int, two int, one int);"
)
logger.info("Kiba Kernel -> Create \"Kiba Database\" successfully")
except Exception as e:
logger.info(f"Kiba Kernel --Skip-> Database Created....Skipped Creating Databases. \n[SKIP ERR]{e}")
pass
@driver.on_shutdown
async def free_db():
await config.db.close() | 2.234375 | 2 |
dexp/processing/utils/scatter_gather_i2i.py | haesleinhuepf/dexp | 16 | 13390 | from typing import Callable, Optional, Sequence, Tuple, Union
import numpy
from dexp.processing.utils.nd_slice import nd_split_slices, remove_margin_slice
from dexp.processing.utils.normalise import Normalise
from dexp.utils import xpArray
from dexp.utils.backends import Backend
def scatter_gather_i2i(
function: Callable,
image: xpArray,
tiles: Union[int, Tuple[int, ...]],
margins: Optional[Union[int, Tuple[int, ...]]] = None,
normalise: bool = False,
clip: bool = False,
to_numpy: bool = True,
internal_dtype: Optional[numpy.dtype] = None,
) -> xpArray:
"""
Image-2-image scatter-gather.
'Scatters' computation of a given unary function by splitting the input array into tiles,
computing using a given backend, and reassembling the tiles into a single array of same
shape as the inpout that is either backed by the same backend than that of the input image,
or that is backed by numpy -- usefull when the compute backend cannot hold the whole input and output
images in memory.
Parameters
----------
function : unary function
image : input image (can be any backend, numpy )
tiles : tile sizes to cut input image into, can be a single integer or a tuple of integers.
margins : margins to add to each tile, can be a single integer or a tuple of integers.
if None, no margins are added.
normalise : normalises the input image.
clip : clip after normalisation/denormalisation
to_numpy : should the result be a numpy array? Very usefull when the compute backend
cannot hold the whole input and output images in memory.
internal_dtype : internal dtype for computation
Returns
-------
Result of applying the unary function to the input image, if to_numpy==True then the image is
"""
if internal_dtype is None:
internal_dtype = image.dtype
if type(tiles) == int:
tiles = (tiles,) * image.ndim
# If None is passed for a tile that means that we don't tile along that axis, we als clip the tile size:
tiles = tuple((length if tile is None else min(length, tile)) for tile, length in zip(tiles, image.shape))
if margins is None:
margins = (0,) * image.ndim
if type(margins) == int:
margins = (margins,) * image.ndim
if to_numpy:
result = numpy.empty(shape=image.shape, dtype=internal_dtype)
else:
result = Backend.get_xp_module(image).empty_like(image, dtype=internal_dtype)
# Normalise:
norm = Normalise(Backend.to_backend(image), do_normalise=normalise, clip=clip, quantile=0.005)
# image shape:
shape = image.shape
# We compute the slices objects to cut the input and target images into batches:
tile_slices = list(nd_split_slices(shape, chunks=tiles, margins=margins))
tile_slices_no_margins = list(nd_split_slices(shape, chunks=tiles))
# Zipping together slices with and without margins:
slices = zip(tile_slices, tile_slices_no_margins)
# Number of tiles:
number_of_tiles = len(tile_slices)
if number_of_tiles == 1:
# If there is only one tile, let's not be complicated about it:
result = norm.backward(function(norm.forward(image)))
if to_numpy:
result = Backend.to_numpy(result, dtype=internal_dtype)
else:
result = Backend.to_backend(result, dtype=internal_dtype)
else:
_scatter_gather_loop(
norm.backward, function, image, internal_dtype, norm.forward, result, shape, slices, to_numpy
)
return result
def _scatter_gather_loop(
denorm_fun: Callable,
function: Callable,
image: xpArray,
internal_dtype: numpy.dtype,
norm_fun: Callable,
result: Callable,
shape: Tuple[int, ...],
slices: Sequence[Tuple[slice, ...]],
to_numpy: bool,
) -> None:
for tile_slice, tile_slice_no_margins in slices:
image_tile = image[tile_slice]
image_tile = Backend.to_backend(image_tile, dtype=internal_dtype)
image_tile = denorm_fun(function(norm_fun(image_tile)))
if to_numpy:
image_tile = Backend.to_numpy(image_tile, dtype=internal_dtype)
else:
image_tile = Backend.to_backend(image_tile, dtype=internal_dtype)
remove_margin_slice_tuple = remove_margin_slice(shape, tile_slice, tile_slice_no_margins)
image_tile = image_tile[remove_margin_slice_tuple]
result[tile_slice_no_margins] = image_tile
# Dask turned out not too work great here, HUGE overhead compared to the light approach above.
# def scatter_gather_dask(backend: Backend,
# function,
# image,
# chunks,
# margins=None):
# boundary=None
# trim=True
# align_arrays=True
#
# image_d = from_array(image, chunks=chunks, asarray=False)
#
# def function_numpy(_image):
# print(_image.shape)
# return backend.to_numpy(function(_image))
#
# #func, *args, depth=None, boundary=None, trim=True, align_arrays=True, **kwargs
# computation= map_overlap(function_numpy,
# image_d,
# depth=margins,
# boundary=boundary,
# trim=trim,
# align_arrays=align_arrays,
# dtype=image.dtype
# )
#
# #computation.visualize(filename='transpose.png')
# result = computation.compute()
#
# return result
| 2.8125 | 3 |
http_shadow/__init__.py | abador/http-shadow | 0 | 13391 | <gh_stars>0
from .backend import Backend
from .thread import HttpPool
| 1.109375 | 1 |
Problems/sample.py | HKuz/Test_Code | 1 | 13392 | #!/usr/local/bin/python3
def main():
# Test suite
return
if __name__ == '__main__':
main()
| 1.335938 | 1 |
setup.py | wrmsr/omnibus | 2 | 13393 | <reponame>wrmsr/omnibus
import fnmatch
import glob
import os
import sys
import setuptools.command.build_ext
APPLE = sys.platform == 'darwin'
BASE_DIR = os.path.dirname(__file__)
ABOUT = {}
def _read_about():
with open(os.path.join(BASE_DIR, 'omnibus', '__about__.py'), 'rb') as f:
src = f.read()
if sys.version_info[0] > 2:
src = src.decode('UTF-8')
exec(src, ABOUT)
_read_about()
EXCLUDED_STATIC_FILE_PATHS = [
'*.py',
'*/__pycache__/*',
'*/tests/*',
'*/_ext/cc/*',
'*/_ext/cy/*',
'*/_ext/rs/*',
]
def _get_static_files(path):
return [filepath
for (dirpath, dirnames, filenames) in os.walk(path, followlinks=True)
for filename in filenames
for filepath in [os.path.join(dirpath, filename)]
if not any(fnmatch.fnmatch(filepath, pat) for pat in EXCLUDED_STATIC_FILE_PATHS)]
PACKAGE_DATA = [
'.revision',
] + _get_static_files('omnibus')
INSTALL_REQUIRES = [
'toolz>=0.9.0',
]
EXTRAS_REQUIRE = {
'bintrees': ['bintrees>=0.2.7'],
'cytoolz': ['cytoolz>=0.9.0'],
'docker': ['docker>=3.7.0'],
'sortedcontainers': ['sortedcontainers>=2.1.0'],
}
DEBUG = 'DEBUG' in os.environ
EXT_MODULES = []
try:
import Cython
except ImportError:
pass
else:
import Cython.Build
import Cython.Compiler.Options
EXT_MODULES.extend([
*[
setuptools.Extension(
'omnibus._ext.cc.' + os.path.basename(fpath).rpartition('.')[0],
sources=[fpath]
)
for fpath in glob.glob('omnibus/_ext/cc/*.cc')
],
*Cython.Build.cythonize(
[
setuptools.Extension(
'omnibus._ext.cy.' + os.path.basename(fpath).rpartition('.')[0],
sources=[fpath],
language='c++',
)
for fpath in glob.glob('omnibus/_ext/cy/**/*.pyx', recursive=True)
],
language_level=3,
gdb_debug=DEBUG,
compiler_directives={
**Cython.Compiler.Options.get_directive_defaults(),
'embedsignature': True,
'binding': True,
},
),
])
if APPLE:
EXT_MODULES.extend([
setuptools.Extension(
'omnibus._ext.m.' + os.path.basename(fpath).rpartition('.')[0],
sources=[fpath],
extra_link_args=[
'-framework', 'AppKit',
'-framework', 'CoreFoundation',
]
)
for fpath in glob.glob('omnibus/_ext/m/*.m')
])
if __name__ == '__main__':
setuptools.setup(
name=ABOUT['__title__'],
version=ABOUT['__version__'],
description=ABOUT['__description__'],
author=ABOUT['__author__'],
url=ABOUT['__url__'],
python_requires='>=3.7',
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: ' + '.'.join(map(str, sys.version_info[:2])),
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python',
],
# zip_safe=True,
setup_requires=['setuptools'],
packages=setuptools.find_packages(
include=['omnibus', 'omnibus.*'],
exclude=['tests', '*.tests', '*.tests.*'],
),
py_modules=['omnibus'],
package_data={'omnibus': PACKAGE_DATA},
include_package_data=True,
entry_points={},
install_requires=INSTALL_REQUIRES,
extras_require=EXTRAS_REQUIRE,
ext_modules=EXT_MODULES,
)
| 1.945313 | 2 |
day1/test_day1.py | Sundin/advent-of-code-2019 | 0 | 13394 | <filename>day1/test_day1.py
from day1 import *
import unittest
def test_get_fuel_requirements_for_mass():
assert get_fuel_requirements_for_mass(12) == 2
assert get_fuel_requirements_for_mass(14) == 2
assert get_fuel_requirements_for_mass(1969) == 654
assert get_fuel_requirements_for_mass(100756) == 33583
def test_sum_fuel_requirements_for_all_modules_on_spacecraft():
assert sum_fuel_requirements_for_all_modules_on_spacecraft([12, 14, 1969, 100756]) == 34241
def test_get_fuel_requirements_for_mass_recursive():
assert get_fuel_requirements_for_mass_recursive(14) == 2
assert get_fuel_requirements_for_mass_recursive(1969) == 966
assert get_fuel_requirements_for_mass_recursive(100756) == 50346
| 2.890625 | 3 |
PyObjCTest/test_nspdfinfo.py | linuxfood/pyobjc-framework-Cocoa-test | 0 | 13395 | import AppKit
from PyObjCTools.TestSupport import TestCase, min_os_level
class TestNSPDFInfo(TestCase):
@min_os_level("10.9")
def testMethods(self):
self.assertResultIsBOOL(AppKit.NSPDFInfo.isFileExtensionHidden)
self.assertArgIsBOOL(AppKit.NSPDFInfo.setFileExtensionHidden_, 0)
| 1.90625 | 2 |
gap/src/util/data_iterator.py | cosmozhang/autoencoding_parsing | 0 | 13396 | <gh_stars>0
from collections import OrderedDict, defaultdict
import numpy as np
'''
generate a id to length dic
'''
def gen_sid_len(sentences):
sid2len = OrderedDict()
for i, sent in enumerate(sentences):
sid2len[i] = len(sent)
return sid2len
def batch_slice(data, batch_size):
# data is a list of sentences of the same length
batch_num = int(np.ceil(len(data) / float(batch_size)))
for i in xrange(batch_num):
cur_batch_size = batch_size if i < batch_num - 1 else len(data) - batch_size * i
# cur_batch_size is the end-point of the batch
sents = data[i * batch_size: i * batch_size + cur_batch_size]
yield sents
def data_iter(sents_id2length_dic, batch_size, shuffle=True):
"""
randomly permute data, then sort by source length, and partition into batches
ensure that the length of source sentences in each batch is decreasing
"""
buckets = defaultdict(list)
for (sent_id, sent_len) in sents_id2length_dic.iteritems():
buckets[sent_len].append(sent_id)
batched_data = []
for (sent_len, sent_ids_smlen) in buckets.iteritems():
# sent_ids_smlen is a list of sentences of the same length
if shuffle:
np.random.shuffle(sent_ids_smlen)
# pdb.set_trace()
'''
'extend' expecting a iterable finishes the iteration
'''
batched_data.extend(list(batch_slice(sent_ids_smlen, batch_size)))
if shuffle:
np.random.shuffle(batched_data)
for batch in batched_data:
"""
sent_ids in the same batch are of the same length
"""
yield batch
| 2.6875 | 3 |
recipe_parser/recipes/thehappyfoodie.py | tyler-a-cox/recipe-parsing | 0 | 13397 | from ._schema import DefaultSchema
from ._utils import get_minutes, get_yields, normalize_string
class TheHappyFoodie(DefaultSchema):
@classmethod
def host(cls):
return "thehappyfoodie.co.uk"
def title(self):
return self.soup.find("h1", {"class": "main-title"}).get_text()
def total_time(self):
return get_minutes(
self.soup.find("div", {"class": "recipe__data__total-time"})
) or sum(
[
get_minutes(
self.soup.find("div", {"class": "recipe__data__prep-time"})
),
get_minutes(
self.soup.find("div", {"class": "recipe__data__cook-time"})
),
]
)
def yields(self):
return get_yields(
self.soup.find("div", {"class": "recipe__data__yield"}).get_text()
)
def ingredients(self):
ingredients = self.soup.find(
"table", {"class": "recipe__ingredients-table"}
).findAll("tr")
ingredients = [
(
ingredient.find(
"td", {"class": "recipe__ingredients__amount"}
).get_text(),
ingredient.find(
"td", {"class": "recipe__ingredients__name"}
).get_text(),
)
for ingredient in ingredients
]
return [
normalize_string("{} {}".format(amount, name))
for amount, name in ingredients
]
def instructions(self):
instructions = self.soup.find("div", {"class": "recipe__instructions"}).findAll(
"p"
)
return "\n".join(
normalize_string(instruction.get_text()) for instruction in instructions
)
| 2.484375 | 2 |
Scopuli/WEB/DebugToolbar/Toolbar.py | MaxOnNet/scopuli-core-web | 0 | 13398 | <gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright [2017] <NAME> [<EMAIL>]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from urllib.parse import unquote
except ImportError:
from urllib import unquote
from flask import url_for, current_app
from werkzeug.utils import import_string
class DebugToolbar(object):
_cached_panel_classes = {}
def __init__(self, request, jinja_env):
self.jinja_env = jinja_env
self.request = request
self.panels = []
self.template_context = {
'static_path': '/static/debug_panel/'
}
self.create_panels()
def create_panels(self):
"""
Populate debug panels
"""
activated = self.request.cookies.get('fldt_active', '')
activated = unquote(activated).split(';')
for panel_class in self._iter_panels(current_app):
panel_instance = panel_class(jinja_env=self.jinja_env,
context=self.template_context)
if panel_instance.dom_id() in activated:
panel_instance.is_active = True
self.panels.append(panel_instance)
def render_toolbar(self):
context = self.template_context.copy()
context.update({'panels': self.panels})
template = self.jinja_env.get_template('base.html')
return template.render(**context)
@classmethod
def load_panels(cls, app):
for panel_class in cls._iter_panels(app):
# just loop to make sure they've been loaded
pass
@classmethod
def _iter_panels(cls, app):
for panel_path in app._config.get("web", "debug-toolbar", "panels", "").split(";"):
panel_class = cls._import_panel(app, "WEB.DebugToolbar.Panels.{}".format(panel_path))
if panel_class is not None:
yield panel_class
@classmethod
def _import_panel(cls, app, path):
cache = cls._cached_panel_classes
try:
return cache[path]
except KeyError:
pass
try:
panel_class = import_string(path)
except ImportError as e:
app.logger.warning('Disabled %s due to ImportError: %s', path, e)
panel_class = None
cache[path] = panel_class
return panel_class
| 1.945313 | 2 |
CalculateLods.py | makeling/AGSSmartVectorTileTools | 3 | 13399 | # -*- coding: utf-8 -*-
# !/usr/bin/python
__author__ = 'ma_keling'
# Version : 1.0.0
# Start Time : 2018-11-29
# Update Time :
# Change Log :
## 1.
## 2.
## 3.
import time
import arcpy
import math
def express_arcpy_error():
severity = arcpy.GetMaxSeverity()
if severity == 2:
# If the tool returned an error
arcpy.AddError("Error occurred \n{0}".format(arcpy.GetMessages(2)))
elif severity == 1:
# If the tool returned no errors, but returned a warning
arcpy.AddWarning("Warning raised \n{0}".format(arcpy.GetMessages(1)))
else:
# If the tool did not return an error or a warning
arcpy.AddMessage(arcpy.GetMessages())
# Description: Loop layers and calculate lod for every feature in the layer.
def calculate_lods_for_feature(in_layers, fieldName):
try:
startTime = time.time()
timeStampName = time.strftime('%Y_%m_%d %H:%M:%S', time.localtime(time.time()))
arcpy.AddMessage("Start compute lods at: {0}".format(timeStampName))
for layer in in_layers:
arcpy.AddMessage("Calculating lod for layer : {0}.".format(layer))
add_field(layer, fieldName, 9)
cursor = arcpy.da.UpdateCursor(layer, ['SHAPE@', 'OID@', fieldName])
lyr_path = layer.dataSource
desc = arcpy.Describe(lyr_path)
extent = desc.extent
arcpy.AddMessage("Original dataset extent:" + str(desc.extent))
ext_wm = extent.projectAs(arcpy.SpatialReference(102100))
arcpy.AddMessage("New WebMercator extent:" + str(ext_wm))
start_level, start_compute_resolution = confirm_level(ext_wm)
if desc.shapeType == "Polygon":
baselength, basearea = get_length_area_from_pixel(96, 295828764)
lod_area = basearea / math.pow(4, start_level - 1)
arcpy.AddMessage("start lod area: " + str(lod_area))
for row in cursor:
lod = calculate_lod_for_polygon(row[0], baselength, lod_area,start_level)
row[2] = lod
cursor.updateRow(row)
elif desc.shapeType == "Point":
count = get_count(layer)
arcpy.AddMessage("Total Points:" + str(count))
if count < 200000:
arcpy.AddMessage("Input point sets too small for optimized, skip!")
continue
else:
n = math.ceil(math.log(count / (512 * 512 / 16), 4))
arcpy.AddMessage("n:" + str(n))
for row in cursor:
oid = row[1]
lod = calculate_lod_for_point(oid,start_level,n)
row[2] = lod
cursor.updateRow(row)
elif desc.shapeType == 'Polyline':
baselength = get_length_from_pixel(96, 295828764)
lod_length = baselength / math.pow(2, start_level - 1)
for row in cursor:
lod = calculate_lod_for_polyline(row[0],lod_length,start_level)
row[2] = lod
cursor.updateRow(row)
endTime = time.time()
print("Compute finished, elapsed: {0} Seconds.eter..".format(str(endTime - startTime)))
arcpy.AddMessage("Compute finished, elapsed: {0} Seconds.eter..".format(str(endTime - startTime)))
print("\n")
arcpy.AddMessage("\n")
except arcpy.ExecuteError:
express_arcpy_error()
# Description: Compute the total records for a featureclass
def get_count(layer):
fields = ['SHAPE@']
count = 0
with arcpy.da.SearchCursor(layer, fields) as cursor:
for row in cursor:
count += 1
return count
# Description: get the start level based on layer extent
def confirm_level(extent):
width = extent.width
height = extent.height
arcpy.AddMessage("width:"+str(width) +" height:"+ str(height))
length = max(width, height)
base_resolution = 78271.516964011724
base_tile_resolution = base_resolution * 512
for level in range(21):
start_compute_resolution = base_tile_resolution
if length >= base_tile_resolution:
arcpy.AddMessage("level:" + str(level))
arcpy.AddMessage("base tile resolution:" + str(base_tile_resolution))
return level, start_compute_resolution
else:
base_tile_resolution /= 2
# Description: Add a new field with name lod to a table
def add_field(inFeatures,fieldName,fieldPrecision):
try:
startTime = time.time()
timeStampName = time.strftime('%Y_%m_%d %H:%M:%S', time.localtime(time.time()))
print("start add new field for: ", inFeatures, " at: ", timeStampName)
arcpy.AddMessage("start add new field for: {0} at: {1}".format(str(inFeatures), str(timeStampName)))
# Execute AddField for new field
arcpy.AddField_management(inFeatures, fieldName, "Text", fieldPrecision,
field_alias=fieldName, field_is_nullable="NULLABLE")
endTime = time.time()
print(inFeatures, "Add field:", fieldName, "finished, elapsed: ", str(endTime - startTime) + ' Seconds.eter..')
arcpy.AddMessage(
"Add field: {0} finished, elapsed: {1} Seconds.eter..".format(fieldName, str(endTime - startTime)))
print("\n")
arcpy.AddMessage("\n")
except arcpy.ExecuteError:
express_arcpy_error()
# Description: Compute get area and length per pixel based on dpi and scale
def get_length_area_from_pixel(dpi,scale):
pixel = 1 / dpi * 0.025
length = scale * pixel
area = length * length
return length,area
# Description: Compute get length per pixel based on dpi and scale
def get_length_from_pixel(dpi,scale):
pixel = 1 / dpi * 0.025
length = scale * pixel
return length
# Description: Calculate lod for every polygon
def calculate_lod_for_polygon(feature,baselength, basearea, start_level):
try:
if feature:
area = feature.getArea("GEODESIC", "SQUAREMETERS")
# length = feature.getLength("GEODESIC", "METERS")
lod = start_level
for i in range(20):
# arcpy.AddMessage(str(i) + ":" + str(basearea) + "___"+str(area))
# arcpy.AddMessage(str(i) + ":" + str(baselength) + "___" + str(length))
if area >= basearea :
return str(lod)
else:
lod += 1
basearea /= 4
baselength /= 2
return str(lod)
else:
print(type(feature))
return "19"
except arcpy.ExecuteError:
express_arcpy_error()
# Description: Calculate lod for every point
def calculate_lod_for_point(id, start_level, n):
try:
for i in range(n):
if id % math.pow(4, n - i) == 0:
return start_level
else:
start_level += 1
return start_level
except arcpy.ExecuteError:
express_arcpy_error()
# Description: Calculate lod for every polyline
def calculate_lod_for_polyline(feature,baselength, start_level):
try:
if feature:
length = feature.getLength("GEODESIC", "METERS")
lod = start_level
for i in range(20):
# arcpy.AddMessage(str(i) + ":" + str(basearea) + "___"+str(area))
# arcpy.AddMessage(str(i) + ":" + str(baselength) + "___" + str(length))
if length >= baselength:
return lod
else:
lod += 1
baselength /= 2
return lod
else:
print(type(feature))
except arcpy.ExecuteError:
express_arcpy_error()
def execute():
in_map = arcpy.GetParameter(0)
arcpy.AddMessage("Input map : {0}.".format(in_map))
in_layers = arcpy.GetParameter(1)
field_name = "lod"
calculate_lods_for_feature(in_layers, field_name)
# execute()
| 2.328125 | 2 |