python_code
stringlengths
0
187k
repo_name
stringlengths
8
46
file_path
stringlengths
6
135
import time import torch import comet.utils as utils import comet.data.config as cfg class Evaluator(object): def __init__(self, opt, model, data_loader): super(Evaluator, self).__init__() self.data_loader = data_loader self.model = model self.batch_variables = { "mo...
comet-public-master
comet/evaluate/evaluate.py
import torch import torch.nn as nn import torch.nn.functional as F import comet.data.data as data import comet.data.config as cfg import comet.models.utils as model_utils import comet.evaluate.utils as eval_utils import comet.train.batch as batch_utils def make_sampler(sampler_type, opt, *args, **kwargs): print("...
comet-public-master
comet/evaluate/sampler.py
import copy import torch import torch.nn as nn import torch.nn.functional as F import comet.data.config as cfg import comet.train.utils as train_utils import comet.models.utils as model_utils import comet.evaluate.utils as eval_utils import comet.utils as utils from IPython import embed ###########################...
comet-public-master
comet/train/batch.py
import random import torch import comet.data.config as cfg import comet.train.atomic_train as base_train import comet.train.batch as batch_utils import comet.evaluate.conceptnet_evaluate as evaluate import comet.evaluate.conceptnet_generate as gen def make_trainer(opt, *args): return ConceptNetGenerationIterato...
comet-public-master
comet/train/conceptnet_train.py
import random import comet.train.train as base_train import comet.train.batch as batch import comet.evaluate.atomic_evaluate as evaluate # import comet.evaluate.atomic_generate as gen def make_trainer(opt, *args): return AtomicGenerationIteratorTrainer(opt, *args) class AtomicGenerationIteratorTrainer(base_tra...
comet-public-master
comet/train/atomic_train.py
comet-public-master
comet/train/__init__.py
'''TAKEN from OpenAI LM Code by HuggingFace''' import math import torch from torch.optim import Optimizer from torch.nn.utils import clip_grad_norm_ def warmup_cosine(x, warmup=0.002): s = 1 if x <= warmup else 0 return s*(x/warmup) + (1-s)*(0.5 * (1 + torch.cos(math.pi * x))) def warmup_constant(x, warmup...
comet-public-master
comet/train/opt.py
import torch import torch.optim import torch.nn.functional as F import copy def update_generation_losses(losses, nums, micro, macro, bs, length, loss): # Update Losses losses[micro] += \ [copy.deepcopy(losses[micro][-1])] losses[macro] += \ [copy.deepcopy(losses[macro][-1])] losses[m...
comet-public-master
comet/train/utils.py
import torch import torch.nn as nn import torch.nn.functional as F import comet.data.config as cfg import comet.data.data as data import comet.train.utils as train_utils import comet.train.batch as batch import comet.evaluate.evaluate as evaluate import comet.evaluate.generate as gen import comet.evaluate.sampler as ...
comet-public-master
comet/train/train.py
import torch import sys import os import time from comet.data.utils import TextEncoder import comet.data.config as cfg import comet.data.data as data import comet.models.models as models from comet.evaluate.sampler import BeamSampler, GreedySampler, TopKSampler import comet.utils as utils def set_compute_mode(mode)...
comet-public-master
comet/interactive/functions.py
import sys import os import argparse import comet.interactive.functions as interactive sampling_mapping = { "b10": "beam-10", "b5": "beam-5", "g": "greedy" } def parse_input_string(string): objects = string.split("|") relations = objects[1] if not relations or relations == "all": bas...
comet-public-master
comet/interactive/conceptnet_demo.py
comet-public-master
comet/interactive/__init__.py
import sys import os import argparse import comet.interactive.functions as interactive descriptions = { "oEffect": "The effect of the event on participants besides PersonX might be: ", "oReact": "Other participants may react to the event in this way: ", "oWant": "After the event, other participants may wa...
comet-public-master
comet/interactive/atomic_demo.py
import json from comet.utils import DD device = "cuda" save = False test_save = False toy = False do_gen = False save_strategy = "all" def get_parameters(opt, exp_type="model"): params = DD() params.net = DD() params.mle = 0 params.dataset = opt.dataset params.net = get_net_parameters(opt) ...
comet-public-master
comet/data/config.py
import comet.data.utils as data_utils import comet.data.atomic as adata import comet.data.config as cfg import torch import random from tqdm import tqdm def map_name(name, opt): if name == "train": return "train{}k.txt".format(opt.trainsize) elif name == "test": return "test.txt" else: ...
comet-public-master
comet/data/conceptnet.py
comet-public-master
comet/data/__init__.py
import re import ftfy import json import spacy import torch from tqdm import tqdm def load_existing_data_loader(data_loader, path): old_data_loader = torch.load(path) for attr in data_loader.__dict__.keys(): if attr not in old_data_loader.__dict__.keys(): continue setattr(data_loa...
comet-public-master
comet/data/utils.py
import comet.utils as utils import comet.data.utils as data_utils import comet.data.config as cfg import pandas import json import random import math import torch from tqdm import tqdm all_categories = ["oEffect", "oReact", "oWant", "xAttr", "xEffect", "xIntent", "xNeed", "xReact", "xWant"] def map_name(name): ...
comet-public-master
comet/data/atomic.py
import os import comet.data.atomic as atomic_data import comet.data.conceptnet as conceptnet_data import comet.data.config as cfg import comet.utils as utils import pickle import torch import json start_token = "<START>" end_token = "<END>" blank_token = "<blank>" def save_checkpoint(state, filename): print("...
comet-public-master
comet/data/data.py
__version__ = "0.0.1a"
allentune-master
__init__.py
""" Simple check list from AllenNLP repo: https://github.com/allenai/allennlp/blob/master/setup.py To create the package for pypi. 1. Change the version in __init__.py and setup.py. 2. Commit these changes with the message: "Release: VERSION" 3. Add a tag in git to mark the release: "git tag VERSION -m'Adds tag VERSION...
allentune-master
setup.py
allentune-master
allentune/__init__.py
#!/usr/bin/env python import logging import os import sys if os.environ.get("ALLENTUNE_DEBUG"): LEVEL = logging.DEBUG else: LEVEL = logging.INFO sys.path.insert(0, os.path.dirname(os.path.abspath(os.path.join(__file__, os.pardir)))) logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(messag...
allentune-master
allentune/__main__.py
allentune-master
allentune/util/__init__.py
import logging import os from typing import Any, Dict, List, Union import numpy as np import ray # Create a custom logger logger = logging.getLogger(__name__) class RandomSearch: @staticmethod def random_choice(args: List[Any], n: int = 1): """ pick a random element from a set. ...
allentune-master
allentune/util/random_search.py
from allentune.runners.runner import Runner from allentune.runners.allennlp_runner import AllenNlpRunner
allentune-master
allentune/runners/__init__.py
import argparse import glob import json import logging import os import re import sys from collections import ChainMap from typing import Optional import pandas as pd from allentune.commands.subcommand import Subcommand logger = logging.getLogger(__name__) class Merge(Subcommand): def add_subparser(self, name: ...
allentune-master
allentune/commands/merge.py
import argparse import datetime import glob import json import os from collections import ChainMap from typing import Dict, List, Optional, Tuple import matplotlib import matplotlib.pyplot as plt import numpy as np import pandas as pd import scipy import seaborn as sns from matplotlib.ticker import ScalarFormatter fr...
allentune-master
allentune/commands/plot.py
from typing import Dict import argparse import logging from overrides import overrides from allennlp.commands.subcommand import Subcommand from allentune.commands.report import Report from allentune.commands.search import Search from allentune.commands.plot import Plot logger = logging.getLogger(__name__) # pylint: ...
allentune-master
allentune/commands/__init__.py
#!/usr/bin/env python import sys import logging import os import argparse from allentune.modules import AllenNlpRunner from allentune.modules import RayExecutor from allentune.commands.subcommand import Subcommand if os.environ.get("ALLENTUNE_DEBUG"): LEVEL = logging.DEBUG else: LEVEL = logging.INFO logging....
allentune-master
allentune/commands/search.py
""" Base class for subcommands under ``allentune.run``. """ import argparse class Subcommand: """ An abstract class representing subcommands for allentune.run. If you wanted to (for example) create your own custom `special-evaluate` command to use like ``allentune special-evaluate ...`` you would...
allentune-master
allentune/commands/subcommand.py
import argparse import glob import json import logging import os import re import sys from collections import ChainMap from typing import Optional import pandas as pd from tabulate import tabulate from allentune.commands.subcommand import Subcommand logger = logging.getLogger(__name__) class Report(Subcommand): ...
allentune-master
allentune/commands/report.py
import argparse import json import logging import os import random from typing import Any, Callable, Dict, Optional import numpy as np import ray from ray.tune import function, register_trainable, run_experiments, sample_from from ray.tune.function_runner import StatusReporter from allentune.modules.allennlp_runner i...
allentune-master
allentune/modules/ray_executor.py
from allentune.modules.allennlp_runner import AllenNlpRunner from allentune.modules.ray_executor import RayExecutor
allentune-master
allentune/modules/__init__.py
import argparse import glob import json import logging import os from collections import ChainMap from datetime import datetime from typing import Optional import pandas as pd import torch from allennlp.commands.train import train_model from allennlp.common.params import Params, parse_overrides, with_fallback from all...
allentune-master
allentune/modules/allennlp_runner.py
from allentune.modules import AllenNlpRunner, RayExecutor import pytest import argparse import os import shutil import pathlib class TestExampleRun(object): def test_run(self): runner = AllenNlpRunner() executor = RayExecutor(runner) args = argparse.Namespace() PROJECT_ROOT = (p...
allentune-master
tests/test_example_run.py
from allentune.util.random_search import RandomSearch import pytest import numpy as np import string class TestRandomSearch(object): def test_random_choice(self): random_search = RandomSearch() items = range(100) sampler = random_search.random_choice(items) for _ in range(3): ...
allentune-master
tests/test_random_search.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """Update encrypted deploy password in Travis config file """ import base64 import json import os from getpass import getpass import yaml from cryptography.hazmat.primitives.serialization import load_pem_public_key from cryptography.hazmat.backends import default_backend ...
alexafsm-master
travis_pypi_setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import os from setuptools import setup with open('HISTORY.md') as history_file: history = history_file.read() try: import pypandoc readme = pypandoc.convert('README.md', 'rst') except (IOError, ImportError): readme = '' requirements = [ 'transitions==0....
alexafsm-master
setup.py
import pytest import json from tests.skillsearch.policy import Policy from alexafsm.utils import validate, events_states_transitions, unused_events_states_transitions from alexafsm.test_helpers import get_requests_responses from tests.skillsearch.skill_settings import SkillSettings def test_validate_policy(): po...
alexafsm-master
tests/test_skillsearch.py
from collections import namedtuple from alexafsm.session_attributes import SessionAttributes as ISessionAttributes, INITIAL_STATE import alexafsm.make_json_serializable # NOQA Slots = namedtuple('Slots', ['love', 'money']) class SessionAttributes(ISessionAttributes): slots_cls = Slots not_sent_fields = ['i...
alexafsm-master
tests/test_session_attributes.py
# -*- coding: utf-8 -*-
alexafsm-master
tests/__init__.py
""" Representation of the Skill type in Elasticsearch """ from elasticsearch_dsl import DocType, Text, Keyword, Double, Integer INDEX = 'chat_prod' class Skill(DocType): """ Representation of a skill inside ES """ name = Text(fields={'raw': Keyword()}) creator = Keyword() category = Keyword(...
alexafsm-master
tests/skillsearch/skill.py
""" Intent constants for this skill """ NEW_SEARCH = 'NewSearch' NTH_SKILL = 'NthSkill' PREVIOUS_SKILL = 'PreviousSkill' NEXT_SKILL = 'NextSkill' DESCRIBE_RATINGS = 'DescribeRatings'
alexafsm-master
tests/skillsearch/intent.py
"""This demonstrates a Flask server that uses alexafsm-based skill search""" import getopt import json import logging import sys from elasticsearch_dsl.connections import connections from flask import Flask, request as flask_request from livereload import Server from voicelabs.voicelabs import VoiceInsights from tes...
alexafsm-master
tests/skillsearch/server.py
"""Interface to DynamoDB""" import boto3 class DynamoDB: table = None def __init__(self, table_name: str = None): if not DynamoDB.table: assert table_name is not None, 'Using DynamoDB without initializing it!' DynamoDB.table = boto3.resource('dynamodb').Table(table_name) ...
alexafsm-master
tests/skillsearch/dynamodb.py
"""Settings for Alexa skills app""" class SkillSettings: """Singleton settings for app""" settings = None class SkillSettingsImpl: # how far back in time a request can be, in seconds; cannot be greater than 150 according to # https://developer.amazon.com/public/solutions/alexa/alexa-skill...
alexafsm-master
tests/skillsearch/skill_settings.py
import logging from alexafsm.policy import Policy as PolicyBase from tests.skillsearch.clients import get_es_skills, get_user_info, register_new_user from tests.skillsearch.states import States, MAX_SKILLS logger = logging.getLogger(__name__) class Policy(PolicyBase): def __init__(self, states: States, request...
alexafsm-master
tests/skillsearch/policy.py
# -*- coding: utf-8 -*-
alexafsm-master
tests/skillsearch/__init__.py
"""Client that handles query to elasticsearch""" import string from typing import List from elasticsearch_dsl import Search from alexafsm.test_helpers import recordable as rec from elasticsearch_dsl.response import Response from tests.skillsearch.skill_settings import SkillSettings from tests.skillsearch.skill impo...
alexafsm-master
tests/skillsearch/clients.py
from alexafsm.states import with_transitions, States as StatesBase from alexafsm import response from alexafsm import amazon_intent from tests.skillsearch.skill import Skill from tests.skillsearch.intent import NTH_SKILL, PREVIOUS_SKILL, NEXT_SKILL, NEW_SEARCH, \ DESCRIBE_RATINGS from tests.skillsearch.session_att...
alexafsm-master
tests/skillsearch/states.py
from collections import namedtuple from typing import List from alexafsm.session_attributes import SessionAttributes as SessionAttributesBase, INITIAL_STATE from tests.skillsearch.skill import Skill Slots = namedtuple('Slots', ['query', 'nth']) NUMBER_SUFFIXES = {'st', 'nd', 'rd', 'th'} ENGLISH_NUMBERS = ['first', '...
alexafsm-master
tests/skillsearch/session_attributes.py
from alexafsm.utils import print_machine from tests.skillsearch.policy import Policy if __name__ == '__main__': print_machine(Policy.initialize())
alexafsm-master
tests/skillsearch/bin/print_machine.py
import sys from alexafsm.utils import graph from tests.skillsearch.policy import Policy if __name__ == '__main__': png_file = sys.argv[1] print(f"Drawing FSM graph for {Policy} to {png_file}") graph(Policy, png_file)
alexafsm-master
tests/skillsearch/bin/graph.py
""" Module that monkey-patches json module when it's imported so JSONEncoder.default() automatically checks for a special "to_json()" method and uses it to encode the object if found. See http://stackoverflow.com/a/18561055/257583 """ from json import JSONEncoder def _default(self, obj): return getattr(obj.__cl...
alexafsm-master
alexafsm/make_json_serializable.py
import importlib import logging import os import json from transitions import MachineError from voicelabs import VoiceInsights from alexafsm import response from alexafsm.session_attributes import SessionAttributes from alexafsm.states import States logger = logging.getLogger(__name__) class Policy: """ Fi...
alexafsm-master
alexafsm/policy.py
# -*- coding: utf-8 -*- __author__ = """Allen AI""" __email__ = 'a-dialog-research@allenai.org' __version__ = '0.1.11'
alexafsm-master
alexafsm/__init__.py
from collections import namedtuple from alexafsm.session_attributes import SessionAttributes class Response(namedtuple('Response', ['speech', 'card', 'card_content', 'reprompt', 'should_end', 'image', 'session_attributes'])): """Pythonic representation of the response to be...
alexafsm-master
alexafsm/response.py
""" Amazon built-in intents """ YES = 'AMAZON.YesIntent' NO = 'AMAZON.NoIntent' HELP = 'AMAZON.HelpIntent' STOP = 'AMAZON.StopIntent' CANCEL = 'AMAZON.CancelIntent'
alexafsm-master
alexafsm/amazon_intent.py
import inspect import json from typing import Set from alexafsm.policy import Policy from alexafsm.session_attributes import INITIAL_STATE def validate(policy: Policy, schema_file: str, ignore_intents: Set[str] = ()): """Check for inconsistencies in policy definition""" schema = {} with open(schema_file...
alexafsm-master
alexafsm/utils.py
import hashlib import pickle import json import inspect def recordable(record_dir_function, is_playback, is_record): """ Record results of functions that depend on external resources record_dir_function is a function specifying which directory to save results to/read results from Pass record=Tru...
alexafsm-master
alexafsm/test_helpers.py
import inspect from alexafsm.session_attributes import SessionAttributes, INITIAL_STATE TRANSITIONS = 'transitions' def with_transitions(*transitions): """ Add the provided in-bound transitions to the state """ def decorate(state): def transition_enabled_state(*args): return sta...
alexafsm-master
alexafsm/states.py
INITIAL_STATE = 'initial' class SessionAttributes: """Base class for all session attributes that keep track of the state of conversation""" # "Abstract" class properties to be overridden/set in inherited classes # Inherited classes should override this like so: # Slots = namedtuple('Slots', ['foo', '...
alexafsm-master
alexafsm/session_attributes.py
from datetime import datetime from pathlib import Path with open("VERSION") as version_file: VERSION = version_file.read().strip() def main(): changelog = Path("CHANGELOG.md") with changelog.open() as f: lines = f.readlines() insert_index: int = -1 for i in range(len(lines)): li...
beaker-action-main
scripts/prepare_changelog.py
# encoding: utf-8 """ Prepares markdown release notes for GitHub releases. """ import os from typing import List, Optional import packaging.version TAG = os.environ["TAG"] ADDED_HEADER = "### Added 🎉" CHANGED_HEADER = "### Changed ⚠️" FIXED_HEADER = "### Fixed ✅" REMOVED_HEADER = "### Removed 👋" def get_change...
beaker-action-main
scripts/release_notes.py
import numpy as np from collections import Counter import string import re import argparse import os import json import nltk from matplotlib_venn import venn2 from matplotlib import pyplot as plt class Question: def __init__(self, id, question_text, ground_truth, model_names): self.id = id self.qu...
bi-att-flow-master
visualization/compare_models.py
import json from json import encoder import os import tensorflow as tf from tree.evaluator import Evaluation from my.utils import short_floats class GraphHandler(object): def __init__(self, config): self.config = config self.saver = tf.train.Saver() self.writer = None self.save_p...
bi-att-flow-master
tree/graph_handler.py
bi-att-flow-master
tree/__init__.py
import nltk import numpy as np import tensorflow as tf from tensorflow.python.ops.rnn_cell import BasicLSTMCell from my.nltk_utils import tree2matrix, find_max_f1_subtree, load_compressed_tree, set_span from tree.read_data import DataSet from my.tensorflow import exp_mask, get_initializer from my.tensorflow.nn import ...
bi-att-flow-master
tree/model.py
import os from pprint import pprint import tensorflow as tf from tree.main import main as m flags = tf.app.flags flags.DEFINE_string("model_name", "tree", "Model name [tree]") flags.DEFINE_string("data_dir", "data/squad", "Data dir [data/squad]") flags.DEFINE_integer("run_id", 0, "Run ID [0]") flags.DEFINE_integer...
bi-att-flow-master
tree/cli.py
import json import os import random import itertools import math import nltk from my.nltk_utils import load_compressed_tree from my.utils import index class DataSet(object): def __init__(self, data, data_type, shared=None, valid_idxs=None): total_num_examples = len(next(iter(data.values()))) sel...
bi-att-flow-master
tree/read_data.py
import tensorflow as tf from tree.model import Model class Trainer(object): def __init__(self, config, model): assert isinstance(model, Model) self.config = config self.model = model self.opt = tf.train.AdagradOptimizer(config.init_lr) self.loss = model.get_loss() ...
bi-att-flow-master
tree/trainer.py
import argparse import json import math import os import shutil from pprint import pprint import tensorflow as tf from tqdm import tqdm import numpy as np from tree.evaluator import AccuracyEvaluator2, Evaluator from tree.graph_handler import GraphHandler from tree.model import Model from tree.trainer import Trainer ...
bi-att-flow-master
tree/main.py
import numpy as np import tensorflow as tf from tree.read_data import DataSet from my.nltk_utils import span_f1 class Evaluation(object): def __init__(self, data_type, global_step, idxs, yp): self.data_type = data_type self.global_step = global_step self.idxs = idxs self.yp = yp ...
bi-att-flow-master
tree/evaluator.py
import shutil from collections import OrderedDict import http.server import socketserver import argparse import json import os import numpy as np from tqdm import tqdm from jinja2 import Environment, FileSystemLoader def bool_(string): if string == 'True': return True elif string == 'False': ...
bi-att-flow-master
tree/visualizer.py
import sys import json from collections import Counter, defaultdict import re def key_func(pair): return pair[1] def get_func(vals, probs): counter = Counter(vals) # return max(zip(vals, probs), key=lambda pair: pair[1])[0] # return max(zip(vals, probs), key=lambda pair: pair[1] * counter[pair[0]] / ...
bi-att-flow-master
basic/ensemble_fast.py
import gzip import json from json import encoder import os import tensorflow as tf from basic.evaluator import Evaluation, F1Evaluation from my.utils import short_floats import pickle class GraphHandler(object): def __init__(self, config, model): self.config = config self.model = model ...
bi-att-flow-master
basic/graph_handler.py
bi-att-flow-master
basic/__init__.py
import random import itertools import numpy as np import tensorflow as tf from tensorflow.python.ops.rnn_cell import BasicLSTMCell from basic.read_data import DataSet from my.tensorflow import get_initializer from my.tensorflow.nn import softsel, get_logits, highway_network, multi_conv1d from my.tensorflow.rnn import...
bi-att-flow-master
basic/model.py
import os import tensorflow as tf from basic.main import main as m flags = tf.app.flags # Names and directories flags.DEFINE_string("model_name", "basic", "Model name [basic]") flags.DEFINE_string("data_dir", "data/squad", "Data dir [data/squad]") flags.DEFINE_string("run_id", "0", "Run ID [0]") flags.DEFINE_string...
bi-att-flow-master
basic/cli.py
import argparse import functools import gzip import json import pickle from collections import defaultdict from operator import mul from tqdm import tqdm from squad.utils import get_phrase, get_best_span def get_args(): parser = argparse.ArgumentParser() parser.add_argument('paths', nargs='+') parser.add...
bi-att-flow-master
basic/ensemble.py
import json import os import random import itertools import math from collections import defaultdict import numpy as np from my.tensorflow import grouper from my.utils import index class Data(object): def get_size(self): raise NotImplementedError() def get_by_idxs(self, idxs): """ E...
bi-att-flow-master
basic/read_data.py
import tensorflow as tf from basic.model import Model from my.tensorflow import average_gradients class Trainer(object): def __init__(self, config, model): assert isinstance(model, Model) self.config = config self.model = model self.opt = tf.train.AdadeltaOptimizer(config.init_lr)...
bi-att-flow-master
basic/trainer.py
import argparse import json import math import os import shutil from pprint import pprint import tensorflow as tf from tqdm import tqdm import numpy as np from basic.evaluator import ForwardEvaluator, MultiGPUF1Evaluator from basic.graph_handler import GraphHandler from basic.model import get_multi_gpu_models from ba...
bi-att-flow-master
basic/main.py
import numpy as np import tensorflow as tf from basic.read_data import DataSet from my.nltk_utils import span_f1 from my.tensorflow import padded_reshape from my.utils import argmax from squad.utils import get_phrase, get_best_span class Evaluation(object): def __init__(self, data_type, global_step, idxs, yp, te...
bi-att-flow-master
basic/evaluator.py
import shutil from collections import OrderedDict import http.server import socketserver import argparse import json import os import numpy as np from tqdm import tqdm from jinja2 import Environment, FileSystemLoader from basic.evaluator import get_span_score_pairs from squad.utils import get_best_span, get_span_scor...
bi-att-flow-master
basic/visualizer.py
bi-att-flow-master
cnn_dm/__init__.py
import argparse import json import os # data: q, cq, (dq), (pq), y, *x, *cx # shared: x, cx, (dx), (px), word_counter, char_counter, word2vec # no metadata from collections import Counter from tqdm import tqdm from my.utils import process_tokens from squad.utils import get_word_span, process_tokens def bool_(arg): ...
bi-att-flow-master
cnn_dm/prepro.py
import json import os import sys root_dir = sys.argv[1] answer_path = sys.argv[2] file_names = os.listdir(root_dir) num_correct = 0 num_wrong = 0 with open(answer_path, 'r') as fh: id2answer_dict = json.load(fh) for file_name in file_names: if not file_name.endswith(".question"): continue with o...
bi-att-flow-master
cnn_dm/evaluate.py
""" Official evaluation script for v1.1 of the SQuAD dataset. """ from __future__ import print_function from collections import Counter import string import re import argparse import json import sys def normalize_answer(s): """Lower text and remove punctuation, articles and extra whitespace.""" def remove_art...
bi-att-flow-master
squad/evaluate-v1.1.py
import argparse import json import os # data: q, cq, (dq), (pq), y, *x, *cx # shared: x, cx, (dx), (px), word_counter, char_counter, word2vec # no metadata from collections import Counter import nltk from tqdm import tqdm from my.nltk_utils import load_compressed_tree def bool_(arg): if arg == 'True': r...
bi-att-flow-master
squad/prepro_aug.py
bi-att-flow-master
squad/__init__.py
import argparse import json import os # data: q, cq, (dq), (pq), y, *x, *cx # shared: x, cx, (dx), (px), word_counter, char_counter, word2vec # no metadata from collections import Counter from tqdm import tqdm from squad.utils import get_word_span, get_word_idx, process_tokens def main(): args = get_args() ...
bi-att-flow-master
squad/prepro.py
import re def get_2d_spans(text, tokenss): spanss = [] cur_idx = 0 for tokens in tokenss: spans = [] for token in tokens: if text.find(token, cur_idx) < 0: print(tokens) print("{} {} {}".format(token, cur_idx, text)) raise Excepti...
bi-att-flow-master
squad/utils.py
""" Official evaluation script for v1.1 of the SQuAD dataset. [Changed name for external importing]""" from __future__ import print_function from collections import Counter import string import re import argparse import json import sys def normalize_answer(s): """Lower text and remove punctuation, articles and ex...
bi-att-flow-master
squad/evaluate.py
import json import sys from tqdm import tqdm from my.corenlp_interface import CoreNLPInterface in_path = sys.argv[1] out_path = sys.argv[2] url = sys.argv[3] port = int(sys.argv[4]) data = json.load(open(in_path, 'r')) h = CoreNLPInterface(url, port) def find_all(a_str, sub): start = 0 while True: ...
bi-att-flow-master
squad/aug_squad.py
import logging import requests import nltk import json import networkx as nx import time class CoreNLPInterface(object): def __init__(self, url, port): self._url = url self._port = port def get(self, type_, in_, num_max_requests=100): in_ = in_.encode("utf-8") url = "http://{...
bi-att-flow-master
my/corenlp_interface.py
bi-att-flow-master
my/__init__.py
import json from collections import deque import numpy as np from tqdm import tqdm def mytqdm(list_, desc="", show=True): if show: pbar = tqdm(list_) pbar.set_description(desc) return pbar return list_ def json_pretty_dump(obj, fh): return json.dump(obj, fh, sort_keys=True, inde...
bi-att-flow-master
my/utils.py
import argparse import os import shutil from zipfile import ZipFile from tqdm import tqdm def get_args(): parser = argparse.ArgumentParser() parser.add_argument('paths', nargs='+') parser.add_argument('-o', '--out', default='save.zip') args = parser.parse_args() return args def zip_save(args):...
bi-att-flow-master
my/zip_save.py