ankitkushwaha90 commited on
Commit
4980e1c
·
verified ·
1 Parent(s): f621ca4

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +336 -1
README.md CHANGED
@@ -10,4 +10,339 @@ size_categories:
10
  ---
11
  # linux command
12
  # window command
13
- # mac command
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  ---
11
  # linux command
12
  # window command
13
+ # mac command
14
+
15
+ ```python
16
+ # Complete code to fine-tune a T5 model using the provided "all_commands.csv" dataset.
17
+ # Task: We define a text-to-text task where the input is a prompt like "Describe the command: {name} in {source}"
18
+ # and the target output is the description.
19
+ # This turns the model into a command description generator/query tool.
20
+ #
21
+ # Prerequisites:
22
+ # - Install required libraries: pip install transformers datasets torch sentencepiece
23
+ # - Save the provided CSV content as "all_commands.csv" in the working directory.
24
+ # The CSV has columns: name, description, source
25
+ #
26
+ # Notes:
27
+ # - Using T5-small for efficiency; change to t5-base or larger if needed.
28
+ # - Training on a GPU is recommended for larger models/epochs.
29
+ # - Adjust hyperparameters as needed (e.g., epochs, batch size).
30
+
31
+ from transformers import T5ForConditionalGeneration, T5Tokenizer, Trainer, TrainingArguments
32
+ from datasets import load_dataset
33
+ import torch
34
+
35
+ # Load model and tokenizer
36
+ model_name = "t5-small" # You can use "t5-base" for better performance (requires more resources)
37
+ model = T5ForConditionalGeneration.from_pretrained(model_name)
38
+ tokenizer = T5Tokenizer.from_pretrained(model_name)
39
+
40
+ # Load the dataset from CSV
41
+ # Assuming "all_commands.csv" is in the current directory with columns: name, description, source
42
+ dataset = load_dataset("csv", data_files={"train": "all_commands.csv"})
43
+
44
+ # Split into train and validation (80/20 split for simplicity; adjust as needed)
45
+ dataset = dataset["train"].train_test_split(test_size=0.2)
46
+ dataset["validation"] = dataset["test"] # Rename 'test' to 'validation' for Trainer
47
+
48
+ # Preprocess function: Create input-output pairs
49
+ # Input: "Describe the command: {name} in {source}"
50
+ # Target: "{description}"
51
+ def preprocess_function(examples):
52
+ inputs = [f"Describe the command: {name} in {source}" for name, source in zip(examples["name"], examples["source"])]
53
+ targets = examples["description"]
54
+
55
+ # Tokenize inputs
56
+ model_inputs = tokenizer(inputs, max_length=128, truncation=True, padding="max_length")
57
+
58
+ # Tokenize targets (labels)
59
+ labels = tokenizer(targets, max_length=256, truncation=True, padding="max_length")
60
+ model_inputs["labels"] = labels["input_ids"]
61
+
62
+ return model_inputs
63
+
64
+ # Apply preprocessing to the dataset
65
+ tokenized_dataset = dataset.map(preprocess_function, batched=True, remove_columns=dataset["train"].column_names)
66
+
67
+ # Training arguments
68
+ training_args = TrainingArguments(
69
+ output_dir="./new_cmd_model", # Directory to save checkpoints
70
+ evaluation_strategy="epoch", # Evaluate at the end of each epoch
71
+ learning_rate=5e-5, # Learning rate
72
+ per_device_train_batch_size=8, # Batch size for training (adjust based on GPU memory)
73
+ per_device_eval_batch_size=8, # Batch size for evaluation
74
+ num_train_epochs=3, # Number of epochs (increase for better results)
75
+ weight_decay=0.01, # Weight decay for regularization
76
+ save_strategy="epoch", # Save checkpoint at end of each epoch
77
+ load_best_model_at_end=True, # Load the best model at the end
78
+ metric_for_best_model="eval_loss", # Use evaluation loss to determine best model
79
+ greater_is_better=False, # Lower loss is better
80
+ fp16=True, # Enable mixed precision for faster training (if GPU supports)
81
+ )
82
+
83
+ # Initialize Trainer
84
+ trainer = Trainer(
85
+ model=model,
86
+ args=training_args,
87
+ train_dataset=tokenized_dataset["train"],
88
+ eval_dataset=tokenized_dataset["validation"],
89
+ )
90
+
91
+ # Train the model
92
+ trainer.train()
93
+
94
+ # Save the fine-tuned model and tokenizer
95
+ model.save_pretrained("./new_cmd_model")
96
+ tokenizer.save_pretrained("./new_cmd_model")
97
+
98
+ print("Fine-tuning complete. Model saved to './new_cmd_model'.")
99
+
100
+ # Optional: Example inference code to test the model
101
+ # Load the fine-tuned model for testing
102
+ fine_tuned_model = T5ForConditionalGeneration.from_pretrained("./new_cmd_model")
103
+ fine_tuned_tokenizer = T5Tokenizer.from_pretrained("./new_cmd_model")
104
+
105
+ # Example prompt
106
+ prompt = "Describe the command: ls in linux"
107
+
108
+ # Tokenize and generate
109
+ inputs = fine_tuned_tokenizer(prompt, return_tensors="pt")
110
+ outputs = fine_tuned_model.generate(inputs["input_ids"], max_length=100, num_beams=4, early_stopping=True)
111
+ generated_text = fine_tuned_tokenizer.decode(outputs[0], skip_special_tokens=True)
112
+
113
+ print("Example generated description:", generated_text)
114
+ ```
115
+
116
+ # spritual dataset set using for fine-tuning
117
+ ### using in this dataset in fine-tuning
118
+ ```python
119
+ # Fine-Tuning Code for Understanding English and Sanskrit
120
+ # Focused on Stotrams of Mahakali and Lord Shiva, Sanatan Hindu Calendar, and Astrology
121
+ # This script uses Hugging Face Transformers to fine-tune a language model (distilgpt2) on collected texts.
122
+ # The dataset includes Sanskrit stotrams (transliterated/Devanagari), English meanings, and explanations.
123
+ # Run this in an environment with Python, and install required libraries if not present.
124
+
125
+ # Step 1: Install required libraries (if not already installed)
126
+ # !pip install transformers datasets torch
127
+
128
+ import torch
129
+ from transformers import (
130
+ GPT2Tokenizer,
131
+ GPT2LMHeadModel,
132
+ TextDataset,
133
+ DataCollatorForLanguageModeling,
134
+ Trainer,
135
+ TrainingArguments
136
+ )
137
+ from datasets import Dataset
138
+
139
+ # Step 2: Prepare the dataset
140
+ # Collected texts from sources:
141
+ # - Sri Maha Kali Stotram (transliterated Sanskrit)
142
+ # - Shiva Tandava Stotram (Sanskrit in Devanagari with English meanings)
143
+ # - Detailed explanation of Sanatan Hindu Calendar (Panchang)
144
+ # - Basics of Hindu Astrology (Jyotisha)
145
+
146
+ mahakali_stotram = """
147
+ Dhyanam:
148
+ śavārūḍhāṃ mahābhīmāṃ ghōradaṃṣṭrāṃ varapradāṃ hāsyayuktāṃ triṇētrāñcha kapāla kartrikā karām ।
149
+ muktakēśīṃ lalajjihvāṃ pibantīṃ rudhiraṃ muhuḥ chaturbāhuyutāṃ dēvīṃ varābhayakarāṃ smarēt ॥
150
+ śavārūḍhāṃ mahābhīmāṃ ghōradaṃṣṭrāṃ hasanmukhīṃ chaturbhujāṃ khaḍgamuṇḍavarābhayakarāṃ śivām ।
151
+ muṇḍamālādharāṃ dēvīṃ lalajjihvāṃ digambarāṃ ēvaṃ sañchintayētkāḻīṃ śmaśanālayavāsinīm ॥
152
+
153
+ Stotram:
154
+ 1. viśvēśvarīṃ jagaddhātrīṃ sthitisaṃhārakāriṇīm ।
155
+ nidrāṃ bhagavatīṃ viṣṇōratulāṃ tējasaḥ prabhām ॥ 1 ॥
156
+
157
+ 2. tvaṃ svāhā tvaṃ svadhā tvaṃ hi vaṣaṭkāraḥ svarātmikā ।
158
+ sudhā tvamakṣarē nityē tridhā mātrātmikā sthitā ॥ 2 ॥
159
+
160
+ 3. arthamātrāsthitā nityā yānuchchāryā viśēṣataḥ ।
161
+ tvamēva sandhyā sāvitrī tvaṃ dēvī jananī parā ॥ 3 ॥
162
+
163
+ 4. tvayaitaddhāryatē viśvaṃ tvayaitadsṛjyatē jagat ।
164
+ tvayaitatpālyatē dēvi tvamatsyantē cha sarvadā ॥ 4 ॥
165
+
166
+ 5. visṛṣṭau sṛṣṭirūpā tvaṃ sthitirūpā cha pālanē ।
167
+ tathā saṃhṛtirūpāntē jagatō'sya jaganmayē ॥ 5 ॥
168
+
169
+ 6. mahāvidyā mahāmāyā mahāmēdhā mahāsmṛtiḥ ।
170
+ mahāmōhā cha bhavatī mahādēvī mahēśvarī ॥ 6 ॥
171
+
172
+ 7. prakṛtistvaṃ cha sarvasya guṇatrayavibhāvinī ।
173
+ kālarātrirmahārātrirmōharātriścha dāruṇā ॥ 7 ॥
174
+
175
+ 8. tvaṃ śrīstvamīśvarī tvaṃ hrīstvaṃ buddhirbōdhalakṣaṇā ।
176
+ lajjā puṣṭistathā tuṣṭistvaṃ śāntiḥ kṣāntirēva cha ॥ 8 ॥
177
+
178
+ 9. khaḍginī śūlinī ghōrā gadinī chakriṇī tathā ।
179
+ śaṅkhinī chāpinī bāṇabhuśuṇḍīparighāyudhā ॥ 9 ॥
180
+
181
+ 10. saumyā saumyatarāśēṣā saumyēbhyastvatisundarī ।
182
+ parāparāṇāṃ paramā tvamēva paramēśvarī ॥ 10 ॥
183
+
184
+ 11. yachcha kiñchit kvachidvastu sadasadvākhilātmikē ।
185
+ tasya sarvasya yā śaktiḥ sā tvaṃ kiṃ stūyasē tadā ॥ 11 ॥
186
+
187
+ 12. yayā tvayā jagatsraṣṭā jagatpātyatti yō jagat ।
188
+ sō'pi nidrāvaśaṃ nītaḥ kastvāṃ stōtumihēśvaraḥ ॥ 12 ॥
189
+
190
+ 13. viṣṇuḥ śarīragrahaṇamahamīśāna ēva cha
191
+ """
192
+
193
+ shiva_tandava_stotram = """
194
+ Verse 1:
195
+ Sanskrit Text:
196
+ जटाटवीगलज्जलप्रवाहपावितस्थले गलेऽवलम्ब्य लम्बितां भुजङ्गतुङ्गमालिकाम् ।
197
+ डमड्डमड्डमड्डमन्निनादवड्डमर्वयं चकार चण्डताण्डवं तनोतु नः शिवः शिवम् ॥१॥
198
+
199
+ English Meaning:
200
+ (My Prostrations to Lord Shiva, the description of whose great Tandava Dance sends a thrill of Blessedness through the Devotees)
201
+ 1.1: (There dances Shiva His Great Tandava) From His Huge Matted Hair like a Forest, is Pouring out and Flowing down the Sacred Water of the River Ganges, and making the Ground Holy; on that Holy Ground Shiva is dancing His Great Tandava Dance;
202
+ 1.2: Supporting His Neck and Hanging down are the Lofty Serpents which are Adorning His Neck like Lofty Garlands,
203
+ 1.3: His Damaru is continuously Weaving out the Sound - Damad, Damad, Damad, Damad - and filling the Air all around,
204
+ 1.4: Shiva Performed such a Passionate Tandava; O my Lord Shiva, Please Extend the Auspicious Tandava Dance within our beings also.
205
+
206
+ Verse 2:
207
+ Sanskrit Text:
208
+ जटाकटाहसम्भ्रमभ्रमन्निलिम्पनिर्झरी विलोलवीचिवल्लरीविराजमानमूर्धनि ।
209
+ धगद्धगद्धगज्जलल्ललाटपट्टपावके किशोरचन्द्रशेखरे रतिः प्रतिक्षणं मम ॥२॥
210
+
211
+ English Meaning:
212
+ (My Prostrations to Lord Shiva, the description of whose great Tandava Dance sends a thrill of Blessedness through the Devotees)
213
+ 2.1: (There dances Shiva His Great Tandava) His Huge Matted Hair like a Caldron is Revolving round and round; and Whirling with it is the Great River Goddess Ganga, ...
214
+ 2.2: ... and the Strands of His Matted Hair which are like Huge Creepers are Waving like Huge Waves; His Forehead is Brilliantly Effulgent and ...
215
+ 2.3: ... on the Surface of that Huge Forehead is Burning a Blazing Fire with the sound - Dhagad, Dhagad, Dhagad (referring to His Third Eye), ...
216
+ 2.4: ... and a Young Crescent Moon is Shining on the Peak (i.e. on His Head); O my Lord Shiva, Your Great Tandava Dance is passing a surge of Delight Every Moment through my being.
217
+
218
+ Verse 3:
219
+ Sanskrit Text:
220
+ धराधरेन्द्रनन्दिनीविलासबन्धुबन्धुर स्फुरद्दिगन्तसन्ततिप्रमोदमानमानसे ।
221
+ कृपाकटाक्षधोरणीनिरुद्धदुर्धरापदि क्वचिद्दिगम्बरे मनो विनोदमेतु वस्तुनि ॥३॥
222
+
223
+ English Meaning:
224
+ (My Prostrations to Lord Shiva, the description of whose great Tandava Dance sends a thrill of Blessedness through the Devotees)
225
+ 3.1: (There dances Shiva His Great Tandava) And Now He is accompanied by the Beautiful Divine Mother Who is the Supporter of the Earth and the Daughter of the Mountain King; She is ever His Companion in His various Divine Sports,
226
+ 3.2: The Entire Horizon is Shaking with the force of that Tandava, and the subtle waves of the Tandava is entering the sphere of the Mind and raising waves of Excessive Joy,
227
+ 3.3: That Shiva, the Flow of whose Graceful Side Glance can Restrain even the Unrestrainable Calamities, and ...
228
+ 3.4: ... Who is Digambara (clothed with sky signifying He is ever-free and without any desire), Sometimes in His Mind Materializes the wish to Play the Divine Sports (and hence this Great Tandava).
229
+
230
+ Verse 4:
231
+ Sanskrit Text:
232
+ जटाभुजङ्गपिङ्गलस्फुरत्फणामणिप्रभा कदम्बकुङ्कुमद्रव
233
+ """
234
+
235
+ hindu_calendar_explanation = """
236
+ The Hindu calendar is a traditional lunisolar calendar system used in the Indian subcontinent and Southeast Asia, with regional variations for social and religious purposes. It is based on the sidereal year for the solar cycle and adjusts lunar cycles approximately every three years to align with seasonal and agricultural needs. Unlike the Gregorian calendar, which adds days to months, the Hindu calendar inserts an extra full month, known as Adhika Masa, every 32–33 months to ensure festivals and rituals occur in the appropriate season. This system has been in use since Vedic times and remains significant for setting Hindu festival dates, as well as being adopted by early Buddhist and Jain communities for their calendars.
237
+
238
+ Lunisolar Nature
239
+ The Hindu calendar combines lunar and solar elements, using the lunar cycle for months and days, and the solar cycle for the year. It accounts for the mismatch between the lunar year (approximately 354 days) and the solar year (approximately 365 days) through intercalary months, ensuring alignment with natural and agricultural cycles.
240
+
241
+ Components of the Panchang
242
+ The Panchang, or Panjika in Eastern India, is a comprehensive almanac with five key components (angas):
243
+ - Tithi: Lunar day, based on the angular distance between the Sun and Moon, varying between 21.5 and 26 hours, used for timing rituals and festivals.
244
+ - Vara: Weekday, corresponding to celestial bodies (e.g., Ravi for Sunday, Soma for Monday), with names aligned with Indo-European calendars, divided into 60 ghatika (24 minutes each).
245
+ - Nakshatra: Divisions of the ecliptic, each 13° 20', starting from 0° Aries, used for astrological and ritual purposes.
246
+ - Yoga: Calculated by adding the longitudes of the Sun and Moon, normalized to 0°–360°, and divided into 27 parts, each 800 arcminutes, influencing auspicious timings.
247
+ - Karana: Half of a tithi, defined by the angular distance between the Sun and Moon increasing by 6°, with 11 preset karaṇas (4 fixed and 7 repeating) to cover 30 tithis.
248
+
249
+ Months
250
+ The Hindu calendar has both solar and lunar months:
251
+ - Solar Months: Based on the Sun’s transit through the zodiac (rāśi), divided into 12 parts of 30° each, named after constellations (e.g., Mesha for Aries, Vṛṣabha for Taurus). Regional calendars like Tamil, Bengali, and Malayalam adapt these names, with variations in pronunciation and order. Solar months correspond to seasons (ṛtu), such as Vasanta (spring) and Grīṣma (summer), and are linked to Gregorian months (e.g., Mesha spans April–May).
252
+ - Lunar Months: Based on lunar cycles, with two fortnights (pakṣa): Shukla Paksha (waxing, bright half ending in full moon) and Krishna Paksha (waning, dark half ending in new moon). There are two traditions:
253
+ - Amānta: Ends on new moon day, followed by most peninsular states except Tamil Nadu, Kerala, etc.
254
+ - Purnimānta: Ends on full moon day, followed in northern India and Nepal, restored by King Vikramaditya in 57 BCE. Lunar months are named variably across regions (e.g., Chaitra, Vaishakha), with the Sun’s transit into Mesha marking Chaitra as the first month.
255
+
256
+ To align lunar and solar calendars, an extra month (Adhika Masa or Purushottam Masa) is inserted every 32.5 months on average, with complex rules to avoid repetition of certain months (Mārgaśīrṣa, Pausha, Magha). Rare corrections include dropping a month (kshaya month) under specific astronomical conditions, such as in 1 BCE when Pausha was omitted.
257
+
258
+ Eras (Samvat)
259
+ The calendar uses several eras (samvat), with three significant ones:
260
+ - Vikram Samvat: Starts in 57 BCE, linked to King Vikramaditya, common in northern, western, central, and eastern India, with the new year in Vaishakha.
261
+ - Shaka Samvat: Includes the Old Shaka Era (epoch uncertain, possibly 1st millennium BCE) and Saka Era of 78 CE, prevalent in southern India and Southeast Asia, with inscriptions like Kedukan Bukit (682 CE) using it.
262
+ - Indian National Calendar: A modern standardized calendar combining Hindu systems, though traditional calendars remain in use.
263
+
264
+ Regional Variations
265
+ The Hindu Calendar Reform Committee identified over 30 variants, broadly categorized as:
266
+ - Lunar Emphasizing: Vikrama calendar (western, northern India, Nepal) and Shalivahana Shaka (Deccan region, e.g.
267
+ """
268
+
269
+ hindu_astrology_explanation = """
270
+ Jyotisha, also known as Vedic astrology or Hindu astrology, is a traditional system rooted in the study of the Vedas and is one of the six auxiliary disciplines, or Vedangas, in Hinduism. It is derived from the Sanskrit word "jyotish," meaning light, such as that of the sun, moon, or heavenly bodies, and encompasses astronomy, astrology, and timekeeping using celestial movements. Its primary historical purpose was to maintain calendars and predict auspicious times for Vedic rituals.
271
+
272
+ History
273
+ Jyotisha's earliest texts are found in the Vedanga Jyotisha, linked to the Rigveda and Yajurveda, with versions consisting of 36 and 43 verses, respectively. Early jyotisha focused on calendar preparation for sacrificial rituals, with no mention of planets, and included references to eclipse-causing entities like Rahu and Svarbhānu in texts like the Atharvaveda and Chandogya Upanishad. The practice is based on the Vedic concept of bandhu, connecting the microcosm and macrocosm. There is debate over its origins, with some scholars suggesting Hellenistic influences, particularly after the Yavanajātaka (2nd century CE), while others argue for independent development, possibly with interactions with Greek astrology. The Āryabhaṭīya (5th century CE) and later texts like the Bṛhat Parāśara Horāśāstra (7th-8th centuries CE) and Sārāvalī (around 800 CE) form the basis of classical Indian astrology.
274
+
275
+ The Six Limbs (Vedangas)
276
+ Jyotisha is one of the six Vedangas, which are auxiliary disciplines supporting Vedic rituals. The other Vedangas include phonetics, grammar, etymology, metrics, and ritual. Jyotisha specifically deals with astronomy and astrology for timekeeping and ritual timing, but the content does not detail the other five Vedangas beyond their mention as part of the six.
277
+
278
+ Core Elements
279
+
280
+ Grahas (Planets)
281
+ The navagraha, or nine celestial bodies, include Surya (Sun), Chandra (Moon), Budha (Mercury), Shukra (Venus), Mangala (Mars), Bṛhaspati or Guru (Jupiter), Shani (Saturn), Rahu (North node of the Moon), and Ketu (South node of the Moon). These are believed to influence human affairs, with Rahu and Ketu, known as shadow planets, associated with eclipses and having an 18-year orbital cycle. Planets are considered indicators (karakas) of major life aspects like profession, marriage, and longevity, with Atmakaraka being the most significant for broad life contours.
282
+
283
+ Rashis (Zodiac Signs)
284
+ Jyotisha uses the sidereal zodiac, an imaginary 360-degree belt divided into 12 equal parts called rashis, each spanning 30 degrees. Unlike Western astrology, which uses the tropical zodiac, Jyotisha accounts for the precession of the equinoxes with an ayanāṃśa adjustment, aligning with constellations. The rashis, with their Sanskrit names and corresponding English signs, include Aries (Meṣa), Taurus (Vṛṣabha), Gemini (Mithuna), Cancer (Karka), Leo (Siṃha), Virgo (Kanyā), Libra (Tulā), Scorpio (Vṛścika), Sagittarius (Dhanuṣa), Capricorn (Makara), Aquarius (Kumbha), and Pisces (Mīna). Each rashi is associated with elements (fire, earth, air, water), qualities (movable, fixed, dual), and ruling bodies, excluding Uranus, Neptune, and Pluto, which are disregarded.
285
+
286
+ Bhavas (Houses)
287
+ The birth chart, or bhāva chakra, divides the 360-degree circle into 12 houses, each spanning 30 degrees, representing different life aspects. Bhavas are categorized into four purusharthas (aims in life): Dharma (duty, 1st, 5th, 9th houses), Artha (resources, 2nd, 6th, 10th), Kama (pleasure, 3rd, 7th, 11th), and Moksha (liberation, 4th, 8th, 12th). These houses personalize astrological signs to the individual, with each house influenced by associated karaka planets.
288
+
289
+ Nakshatras
290
+ Nakshatras, or lunar mansions, are 27 equal divisions of the night sky, each covering 13° 20′ of the ecliptic, identified by prominent stars. Historically, 28 nakshatras were enumerated, but modern practice uses 27, with the missing 28th being Abhijeeta. Each nakshatra is divided into four padas (quarters) of 3° 20′. The junction of
291
+ """
292
+
293
+ # Concatenate all texts into one large corpus for language modeling
294
+ full_text = mahakali_stotram + "\n\n" + shiva_tandava_stotram + "\n\n" + hindu_calendar_explanation + "\n\n" + hindu_astrology_explanation
295
+
296
+ # Save to a file for TextDataset
297
+ with open("fine_tune_dataset.txt", "w", encoding="utf-8") as f:
298
+ f.write(full_text)
299
+
300
+ # Step 3: Load tokenizer and model
301
+ model_name = "distilgpt2" # Small model for fine-tuning; supports English and can learn Sanskrit patterns
302
+ tokenizer = GPT2Tokenizer.from_pretrained(model_name)
303
+ model = GPT2LMHeadModel.from_pretrained(model_name)
304
+
305
+ # Add padding token if not present
306
+ if tokenizer.pad_token is None:
307
+ tokenizer.pad_token = tokenizer.eos_token
308
+
309
+ # Step 4: Create dataset
310
+ dataset = TextDataset(
311
+ tokenizer=tokenizer,
312
+ file_path="fine_tune_dataset.txt",
313
+ block_size=128 # Adjust based on context length
314
+ )
315
+
316
+ data_collator = DataCollatorForLanguageModeling(
317
+ tokenizer=tokenizer,
318
+ mlm=False # Causal LM, not masked
319
+ )
320
+
321
+ # Step 5: Set up training arguments
322
+ training_args = TrainingArguments(
323
+ output_dir="./fine_tuned_model",
324
+ overwrite_output_dir=True,
325
+ num_train_epochs=3, # Adjust epochs as needed
326
+ per_device_train_batch_size=4,
327
+ save_steps=500,
328
+ save_total_limit=2,
329
+ logging_steps=100,
330
+ learning_rate=5e-5,
331
+ )
332
+
333
+ # Step 6: Initialize Trainer and fine-tune
334
+ trainer = Trainer(
335
+ model=model,
336
+ args=training_args,
337
+ data_collator=data_collator,
338
+ train_dataset=dataset,
339
+ )
340
+
341
+ trainer.train()
342
+
343
+ # Step 7: Save the fine-tuned model
344
+ trainer.save_model("./fine_tuned_model")
345
+ tokenizer.save_pretrained("./fine_tuned_model")
346
+
347
+ print("Fine-tuning completed. The model can now be used for generation tasks related to English, Sanskrit stotrams, Hindu calendar, and astrology.")
348
+ ```