File size: 2,992 Bytes
582dd5b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
"""
Dataset loading script for Fenic 0.4.0 API Documentation.

This script can be used with the Hugging Face datasets library to load the dataset.
"""

import pandas as pd
from datasets import Dataset, DatasetDict, Features, Value, Sequence


def load_fenic_api_dataset(data_dir="."):
    """
    Load the Fenic API documentation dataset.
    
    Args:
        data_dir: Directory containing the parquet files
        
    Returns:
        DatasetDict with three splits: api, hierarchy, and summary
    """
    
    # Load the parquet files
    api_df = pd.read_parquet(f"{data_dir}/api_df.parquet")
    hierarchy_df = pd.read_parquet(f"{data_dir}/hierarchy_df.parquet")
    summary_df = pd.read_parquet(f"{data_dir}/fenic_summary.parquet")
    
    # Convert DataFrames to Hugging Face Datasets
    api_dataset = Dataset.from_pandas(api_df)
    hierarchy_dataset = Dataset.from_pandas(hierarchy_df)
    summary_dataset = Dataset.from_pandas(summary_df)
    
    # Create a DatasetDict
    dataset_dict = DatasetDict({
        'api': api_dataset,
        'hierarchy': hierarchy_dataset,
        'summary': summary_dataset
    })
    
    return dataset_dict


def get_dataframe_methods(dataset):
    """
    Get all DataFrame methods from the dataset.
    
    Args:
        dataset: The loaded Fenic API dataset
        
    Returns:
        Filtered dataset containing only DataFrame methods
    """
    api_data = dataset['api']
    
    # Filter for DataFrame methods
    df_methods = []
    for item in api_data:
        if (item['qualified_name'] and 
            'fenic.api.dataframe.DataFrame.' in item['qualified_name'] and
            item['type'] == 'method' and
            item['is_public']):
            df_methods.append(item)
    
    return Dataset.from_list(df_methods)


def get_semantic_functions(dataset):
    """
    Get all semantic functions from the dataset.
    
    Args:
        dataset: The loaded Fenic API dataset
        
    Returns:
        Filtered dataset containing only semantic functions
    """
    api_data = dataset['api']
    
    # Filter for semantic functions
    semantic_funcs = []
    for item in api_data:
        if (item['qualified_name'] and 
            'fenic.api.functions.semantic.' in item['qualified_name'] and
            item['type'] == 'function'):
            semantic_funcs.append(item)
    
    return Dataset.from_list(semantic_funcs)


if __name__ == "__main__":
    # Example usage
    dataset = load_fenic_api_dataset()
    
    print("Dataset loaded successfully!")
    print(f"API entries: {len(dataset['api'])}")
    print(f"Hierarchy entries: {len(dataset['hierarchy'])}")
    print(f"Summary entries: {len(dataset['summary'])}")
    
    # Get DataFrame methods
    df_methods = get_dataframe_methods(dataset)
    print(f"\nDataFrame methods found: {len(df_methods)}")
    
    # Get semantic functions
    semantic_funcs = get_semantic_functions(dataset)
    print(f"Semantic functions found: {len(semantic_funcs)}")