qlemesle commited on
Commit
a0da963
·
1 Parent(s): f753c69
Files changed (1) hide show
  1. parapluie.py +8 -18
parapluie.py CHANGED
@@ -52,31 +52,21 @@ It has shown the highest correlation with human judgement on paraphrase classifi
52
  """
53
 
54
 
55
- # TODO: Add description of the arguments of the module here
56
  _KWARGS_DESCRIPTION = """
57
- Calculates how good are predictions given some references, using certain scores
58
  Args:
59
- predictions: list of predictions to score. Each predictions
60
- should be a string with tokens separated by spaces.
61
- references: list of reference for each prediction. Each
62
- reference should be a string with tokens separated by spaces.
63
  Returns:
64
- accuracy: description of the first score,
65
- another_score: description of the second score,
66
  Examples:
67
- Examples should be written in doctest format, and should illustrate how
68
- to use the function.
69
-
70
- >>> my_new_module = evaluate.load("my_new_module")
71
- >>> results = my_new_module.compute(references=[0, 1], predictions=[0, 1])
72
- >>> print(results)
73
- {'accuracy': 1.0}
74
  """
75
 
76
 
77
- # BAD_WORDS_URL = "http://url/to/external/resource/bad_words.txt"
78
-
79
-
80
  @evaluate.utils.file_utils.add_start_docstrings(_DESCRIPTION, _KWARGS_DESCRIPTION)
81
  class Parapluie(evaluate.Metric):
82
  """TODO: Short description of my evaluation module."""
 
52
  """
53
 
54
 
 
55
  _KWARGS_DESCRIPTION = """
 
56
  Args:
57
+ sources (`list` of `string`): Source sentences.
58
+ hypotheses (`list` of `string`): Hypothetical paraphrases.
 
 
59
  Returns:
60
+ score (`float`): ParaPLUIE score. Minimum possible value is -inf. Maximum possible value is +inf. A score greater than 0 mean that sentences are paraphrases. A score lower than 0 mean the opposite.
 
61
  Examples:
62
+ Example 1-A simple example
63
+ >>> accuracy_metric = evaluate.load("accuracy")
64
+ >>> results = accuracy_metric.compute(references=[0, 1, 2, 0, 1, 2], predictions=[0, 1, 1, 2, 1, 0])
65
+ >>> print(results)
66
+ {'accuracy': 0.5}
 
 
67
  """
68
 
69
 
 
 
 
70
  @evaluate.utils.file_utils.add_start_docstrings(_DESCRIPTION, _KWARGS_DESCRIPTION)
71
  class Parapluie(evaluate.Metric):
72
  """TODO: Short description of my evaluation module."""