shing12345 commited on
Commit
adf9017
·
verified ·
1 Parent(s): de2a372

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +9 -26
README.md CHANGED
@@ -15,7 +15,9 @@ language:
15
  library_name: transformers
16
  ---
17
  import torch
18
- from transformers import BartForConditionalGeneration, BartTokenizer
 
 
19
 
20
  class AdvancedSummarizer:
21
  def __init__(self, model_name="facebook/bart-large-cnn"):
@@ -38,30 +40,14 @@ class AdvancedSummarizer:
38
  summary = self.tokenizer.decode(summary_ids[0], skip_special_tokens=True)
39
  return summary
40
 
41
- def main():
42
  # Example usage
43
  summarizer = AdvancedSummarizer()
44
-
45
- text = """
46
- Artificial intelligence (AI) is intelligence demonstrated by machines, as opposed to natural intelligence displayed by animals including humans. AI research has been defined as the field of study of intelligent agents, which refers to any system that perceives its environment and takes actions that maximize its chance of achieving its goals.
47
-
48
- The term "artificial intelligence" had previously been used to describe machines that mimic and display "human" cognitive skills that are associated with the human mind, such as "learning" and "problem-solving". This definition has since been rejected by major AI researchers who now describe AI in terms of rationality and acting rationally, which does not limit how intelligence can be articulated.
49
-
50
- AI applications include advanced web search engines, recommendation systems, understanding human speech, self-driving cars, automated decision-making and competing at the highest level in strategic game systems. As machines become increasingly capable, tasks considered to require "intelligence" are often removed from the definition of AI, a phenomenon known as the AI effect. For instance, optical character recognition is frequently excluded from things considered to be AI, having become a routine technology.
51
- """
52
-
53
  summary = summarizer.summarize(text)
54
  print("Summary:")
55
  print(summary)
56
 
57
- if __name__ == "__main__":
58
- main()
59
- ---
60
- import torch
61
- from transformers import GPT2LMHeadModel, GPT2Tokenizer
62
- import argparse
63
- import sys
64
-
65
  class AdvancedTextGenerator:
66
  def __init__(self, model_name="gpt2-medium"):
67
  try:
@@ -78,7 +64,6 @@ class AdvancedTextGenerator:
78
  try:
79
  input_ids = self.tokenizer.encode(prompt, return_tensors="pt").to(self.device)
80
 
81
- # Configure output parameters
82
  output_sequences = self.model.generate(
83
  input_ids=input_ids,
84
  max_length=max_length + len(input_ids[0]),
@@ -91,8 +76,7 @@ class AdvancedTextGenerator:
91
  )
92
 
93
  generated_sequences = []
94
- for generated_sequence_idx, generated_sequence in enumerate(output_sequences):
95
- generated_sequence = generated_sequence.tolist()
96
  text = self.tokenizer.decode(generated_sequence, clean_up_tokenization_spaces=True)
97
  total_sequence = text[len(self.tokenizer.decode(input_ids[0], clean_up_tokenization_spaces=True)):]
98
  generated_sequences.append(total_sequence)
@@ -101,7 +85,7 @@ class AdvancedTextGenerator:
101
  except Exception as e:
102
  return [f"Error during text generation: {e}"]
103
 
104
- def main():
105
  parser = argparse.ArgumentParser(description="Advanced Text Generator")
106
  parser.add_argument("--prompt", type=str, help="Starting prompt for text generation")
107
  parser.add_argument("--max_length", type=int, default=100, help="Maximum length of generated text")
@@ -137,6 +121,5 @@ def main():
137
  print(text)
138
 
139
  if __name__ == "__main__":
140
- main()
141
- ---
142
-
 
15
  library_name: transformers
16
  ---
17
  import torch
18
+ from transformers import BartForConditionalGeneration, BartTokenizer, GPT2LMHeadModel, GPT2Tokenizer
19
+ import argparse
20
+ import sys
21
 
22
  class AdvancedSummarizer:
23
  def __init__(self, model_name="facebook/bart-large-cnn"):
 
40
  summary = self.tokenizer.decode(summary_ids[0], skip_special_tokens=True)
41
  return summary
42
 
43
+ def main_summarizer():
44
  # Example usage
45
  summarizer = AdvancedSummarizer()
46
+ text = """...""" # Your text here
 
 
 
 
 
 
 
 
47
  summary = summarizer.summarize(text)
48
  print("Summary:")
49
  print(summary)
50
 
 
 
 
 
 
 
 
 
51
  class AdvancedTextGenerator:
52
  def __init__(self, model_name="gpt2-medium"):
53
  try:
 
64
  try:
65
  input_ids = self.tokenizer.encode(prompt, return_tensors="pt").to(self.device)
66
 
 
67
  output_sequences = self.model.generate(
68
  input_ids=input_ids,
69
  max_length=max_length + len(input_ids[0]),
 
76
  )
77
 
78
  generated_sequences = []
79
+ for generated_sequence in output_sequences:
 
80
  text = self.tokenizer.decode(generated_sequence, clean_up_tokenization_spaces=True)
81
  total_sequence = text[len(self.tokenizer.decode(input_ids[0], clean_up_tokenization_spaces=True)):]
82
  generated_sequences.append(total_sequence)
 
85
  except Exception as e:
86
  return [f"Error during text generation: {e}"]
87
 
88
+ def main_generator():
89
  parser = argparse.ArgumentParser(description="Advanced Text Generator")
90
  parser.add_argument("--prompt", type=str, help="Starting prompt for text generation")
91
  parser.add_argument("--max_length", type=int, default=100, help="Maximum length of generated text")
 
121
  print(text)
122
 
123
  if __name__ == "__main__":
124
+ main_summarizer() # Call the summarizer main function
125
+ main_generator() # Call the text generator main function