Spaces:
Build error
Build error
| import os | |
| import xml.etree.ElementTree as ET | |
| from groq import Groq | |
| # Initialize Groq Client | |
| client = Groq(api_key=os.environ.get("GROQ_API_KEY")) | |
| def extract_uavariable_chunks(xml_file): | |
| """ | |
| Extracts all UAVariable chunks from the XML file. | |
| """ | |
| tree = ET.parse(xml_file) | |
| root = tree.getroot() | |
| ns = { | |
| '': 'http://opcfoundation.org/UA/2011/03/UANodeSet.xsd', # Default namespace | |
| 'uax': 'http://opcfoundation.org/UA/2008/02/Types.xsd' # Example namespace | |
| } | |
| uavariables = [] | |
| for uavariable in root.findall('.//UAVariable', ns): | |
| # Get the entire XML chunk for UAVariable as a string | |
| chunk = ET.tostring(uavariable, encoding='unicode') | |
| uavariables.append(chunk) | |
| return uavariables | |
| def send_to_llm(client, prompt, chunk): | |
| """ | |
| Sends the given XML chunk to the Groq LLaMA model with the provided prompt. | |
| """ | |
| response = client.chat.completions.create( | |
| messages=[ | |
| { | |
| "role": "user", | |
| "content": f"{prompt}\n\n{chunk}", | |
| } | |
| ], | |
| model="llama3-8b-8192", | |
| ) | |
| return response.choices[0].message.content.strip() | |
| def process_xml_and_generate_natural_language(xml_file): | |
| """ | |
| Processes the XML file, extracts UAVariable chunks, sends them to LLaMA model, | |
| and returns the natural language description. | |
| Limits the number of UAVariable chunks to 15. | |
| """ | |
| prompt = ("Convert this xml chunk to natural language. The hierarchy should be taken into account " | |
| "and no values should be missed. Do not provide extra text. Just the conversion of xml to natural language.") | |
| uavariables = extract_uavariable_chunks(xml_file) | |
| output_object = [] | |
| limit = 15 # Set a limit for the number of variables to be processed | |
| for idx, chunk in enumerate(uavariables): | |
| if idx >= limit: | |
| break # Stop processing after 15 UAVariables | |
| try: | |
| natural_language = send_to_llm(client, prompt, chunk) | |
| output_object.append(natural_language) | |
| except Exception as e: | |
| print(f"Error processing chunk: {e}") | |
| continue | |
| return output_object | |