PabloAccuosto commited on
Commit
4bfcb4c
·
verified ·
1 Parent(s): 41a54a2

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. .argilla/dataset.json +16 -0
  2. .argilla/settings.json +140 -0
  3. .argilla/version.json +3 -0
  4. README.md +167 -70
.argilla/dataset.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "id": "3f1fd9f5-17e9-49b1-a9c0-eef5771e790e",
3
+ "name": "scilake-maritime-vesseltype",
4
+ "guidelines": "# Vessel type validation guidelines\n## Task Description\nYour task is to validate the extraction of vessel type entities and their linking to their closest matching entries in the AIS taxonomy.\n\n## What to Validate\nFor each record, please verify the following:\n1. **Entity Spans:** Are all text spans correctly identified? Are the span boundaries accurate?\n2. **Entity Types:** Are entity types correctly assigned?\n3. **Entity Linking:** Are the matching entities in the AIS taxonomy correctly assigned?\n\n## Instructions\n1. Carefully read the texts.\n2. Review the NER spans and correct them if:\n- The boundaries (start/end) are incorrect\n- The entity label is wrong\n3. Verify that the extracted entities are correctly linked to their closest match in the AIS taxonomy\n4. Add any comments or feedback you deem relevant\n\n## Validation Guidelines\n- Entity Annotations: Mark spans as \"Correct\" only if boundaries and labels are accurate.\n- Entity Extraction: Mark as \"Correct\" if all energy (storage) types mentioned are extracted; \"Partially correct\" if any are missing or incorrect.\n- IRENA Linking: Mark as \"Correct\" if all links are to the appropriate entries. Use \"Partially correct\" if any are incorrect.",
5
+ "allow_extra_metadata": false,
6
+ "status": "ready",
7
+ "distribution": {
8
+ "strategy": "overlap",
9
+ "min_submitted": 2
10
+ },
11
+ "metadata": null,
12
+ "workspace_id": "0756eadb-468f-4c06-88c4-51a3fa6f665f",
13
+ "last_activity_at": "2025-06-17T16:40:01.098468",
14
+ "inserted_at": "2025-04-09T09:51:56.464035",
15
+ "updated_at": "2025-04-16T09:44:02.699563"
16
+ }
.argilla/settings.json ADDED
@@ -0,0 +1,140 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "guidelines": "# Vessel type validation guidelines\n## Task Description\nYour task is to validate the extraction of vessel type entities and their linking to their closest matching entries in the AIS taxonomy.\n\n## What to Validate\nFor each record, please verify the following:\n1. **Entity Spans:** Are all text spans correctly identified? Are the span boundaries accurate?\n2. **Entity Types:** Are entity types correctly assigned?\n3. **Entity Linking:** Are the matching entities in the AIS taxonomy correctly assigned?\n\n## Instructions\n1. Carefully read the texts.\n2. Review the NER spans and correct them if:\n- The boundaries (start/end) are incorrect\n- The entity label is wrong\n3. Verify that the extracted entities are correctly linked to their closest match in the AIS taxonomy\n4. Add any comments or feedback you deem relevant\n\n## Validation Guidelines\n- Entity Annotations: Mark spans as \"Correct\" only if boundaries and labels are accurate.\n- Entity Extraction: Mark as \"Correct\" if all energy (storage) types mentioned are extracted; \"Partially correct\" if any are missing or incorrect.\n- IRENA Linking: Mark as \"Correct\" if all links are to the appropriate entries. Use \"Partially correct\" if any are incorrect.",
3
+ "allow_extra_metadata": false,
4
+ "distribution": {
5
+ "strategy": "overlap",
6
+ "min_submitted": 2
7
+ },
8
+ "fields": [
9
+ {
10
+ "id": "73826773-5fb9-4914-af61-4fe879c0b273",
11
+ "name": "text",
12
+ "title": "Text",
13
+ "required": true,
14
+ "settings": {
15
+ "type": "text",
16
+ "use_markdown": false
17
+ },
18
+ "dataset_id": "3f1fd9f5-17e9-49b1-a9c0-eef5771e790e",
19
+ "inserted_at": "2025-04-09T09:51:56.668666",
20
+ "updated_at": "2025-04-16T09:44:02.825359"
21
+ },
22
+ {
23
+ "id": "6c50ec21-89a3-47f6-8dea-a8b228de9e86",
24
+ "name": "links",
25
+ "title": "Linked entities",
26
+ "required": true,
27
+ "settings": {
28
+ "type": "text",
29
+ "use_markdown": true
30
+ },
31
+ "dataset_id": "3f1fd9f5-17e9-49b1-a9c0-eef5771e790e",
32
+ "inserted_at": "2025-04-09T09:51:56.751406",
33
+ "updated_at": "2025-04-16T09:44:03.831068"
34
+ }
35
+ ],
36
+ "questions": [
37
+ {
38
+ "id": "e741d31a-4b09-462c-b1fd-1c6d53adb96e",
39
+ "name": "span_label",
40
+ "title": "Select and classify the tokens according to the specified categories.",
41
+ "description": null,
42
+ "required": true,
43
+ "settings": {
44
+ "type": "span",
45
+ "field": "text",
46
+ "options": [
47
+ {
48
+ "value": "vesselType",
49
+ "text": "vesselType",
50
+ "description": null
51
+ }
52
+ ],
53
+ "visible_options": null,
54
+ "allow_overlapping": true,
55
+ "allow_character_annotation": true
56
+ },
57
+ "dataset_id": "3f1fd9f5-17e9-49b1-a9c0-eef5771e790e",
58
+ "inserted_at": "2025-04-09T09:51:56.837144",
59
+ "updated_at": "2025-04-16T09:44:04.366389"
60
+ },
61
+ {
62
+ "id": "1ee18031-6f2a-4180-baff-3069e40594b7",
63
+ "name": "assess_ner",
64
+ "title": "Extracted entity validation",
65
+ "description": "Are the extracted entities correct?",
66
+ "required": true,
67
+ "settings": {
68
+ "type": "label_selection",
69
+ "options": [
70
+ {
71
+ "value": "Correct",
72
+ "text": "Correct",
73
+ "description": null
74
+ },
75
+ {
76
+ "value": "Partially correct",
77
+ "text": "Partially correct",
78
+ "description": null
79
+ },
80
+ {
81
+ "value": "Incorrect",
82
+ "text": "Incorrect",
83
+ "description": null
84
+ }
85
+ ],
86
+ "visible_options": 3
87
+ },
88
+ "dataset_id": "3f1fd9f5-17e9-49b1-a9c0-eef5771e790e",
89
+ "inserted_at": "2025-04-09T09:51:56.919907",
90
+ "updated_at": "2025-04-16T09:44:04.444919"
91
+ },
92
+ {
93
+ "id": "e2a7a2a3-8268-4c00-98ee-b3f87cfafb3f",
94
+ "name": "assess_nel",
95
+ "title": "Linked AIS entity validation",
96
+ "description": "Are the linked entities in the AIS taxonomy correct?",
97
+ "required": true,
98
+ "settings": {
99
+ "type": "label_selection",
100
+ "options": [
101
+ {
102
+ "value": "Correct",
103
+ "text": "Correct",
104
+ "description": null
105
+ },
106
+ {
107
+ "value": "Partially correct",
108
+ "text": "Partially correct",
109
+ "description": null
110
+ },
111
+ {
112
+ "value": "Incorrect",
113
+ "text": "Incorrect",
114
+ "description": null
115
+ }
116
+ ],
117
+ "visible_options": 3
118
+ },
119
+ "dataset_id": "3f1fd9f5-17e9-49b1-a9c0-eef5771e790e",
120
+ "inserted_at": "2025-04-09T09:51:56.992282",
121
+ "updated_at": "2025-04-16T09:44:04.526322"
122
+ },
123
+ {
124
+ "id": "b73864dc-022b-43be-b17a-e7f2d7de032b",
125
+ "name": "comments",
126
+ "title": "Comments",
127
+ "description": "Additional comments",
128
+ "required": false,
129
+ "settings": {
130
+ "type": "text",
131
+ "use_markdown": false
132
+ },
133
+ "dataset_id": "3f1fd9f5-17e9-49b1-a9c0-eef5771e790e",
134
+ "inserted_at": "2025-04-09T09:51:57.087627",
135
+ "updated_at": "2025-04-16T09:44:04.599804"
136
+ }
137
+ ],
138
+ "metadata": [],
139
+ "vectors": []
140
+ }
.argilla/version.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "argilla": "2.6.0"
3
+ }
README.md CHANGED
@@ -1,72 +1,169 @@
1
  ---
2
- dataset_info:
3
- features:
4
- - name: id
5
- dtype: string
6
- - name: status
7
- dtype: string
8
- - name: inserted_at
9
- dtype: timestamp[us]
10
- - name: updated_at
11
- dtype: timestamp[us]
12
- - name: _server_id
13
- dtype: string
14
- - name: text
15
- dtype: string
16
- - name: links
17
- dtype: string
18
- - name: span_label.responses
19
- list:
20
- list:
21
- - name: end
22
- dtype: int64
23
- - name: label
24
- dtype: string
25
- - name: start
26
- dtype: int64
27
- - name: span_label.responses.users
28
- sequence: string
29
- - name: span_label.responses.status
30
- sequence: string
31
- - name: assess_ner.responses
32
- sequence: string
33
- - name: assess_ner.responses.users
34
- sequence: string
35
- - name: assess_ner.responses.status
36
- sequence: string
37
- - name: assess_nel.responses
38
- sequence: string
39
- - name: assess_nel.responses.users
40
- sequence: string
41
- - name: assess_nel.responses.status
42
- sequence: string
43
- - name: comments.responses
44
- sequence: string
45
- - name: comments.responses.users
46
- sequence: string
47
- - name: comments.responses.status
48
- sequence: string
49
- - name: span_label.suggestion
50
- list:
51
- - name: end
52
- dtype: int64
53
- - name: label
54
- dtype: string
55
- - name: start
56
- dtype: int64
57
- - name: span_label.suggestion.agent
58
- dtype: 'null'
59
- - name: span_label.suggestion.score
60
- dtype: 'null'
61
- splits:
62
- - name: train
63
- num_bytes: 497701
64
- num_examples: 189
65
- download_size: 253842
66
- dataset_size: 497701
67
- configs:
68
- - config_name: default
69
- data_files:
70
- - split: train
71
- path: data/train-*
72
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ tags:
3
+ - rlfh
4
+ - argilla
5
+ - human-feedback
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  ---
7
+
8
+ # Dataset Card for scilake-maritime
9
+
10
+
11
+
12
+
13
+
14
+
15
+
16
+ This dataset has been created with [Argilla](https://github.com/argilla-io/argilla). As shown in the sections below, this dataset can be loaded into your Argilla server as explained in [Load with Argilla](#load-with-argilla), or used directly with the `datasets` library in [Load with `datasets`](#load-with-datasets).
17
+
18
+
19
+ ## Using this dataset with Argilla
20
+
21
+ To load with Argilla, you'll just need to install Argilla as `pip install argilla --upgrade` and then use the following code:
22
+
23
+ ```python
24
+ import argilla as rg
25
+
26
+ ds = rg.Dataset.from_hub("SIRIS-Lab/scilake-maritime", settings="auto")
27
+ ```
28
+
29
+ This will load the settings and records from the dataset repository and push them to you Argilla server for exploration and annotation.
30
+
31
+ ## Using this dataset with `datasets`
32
+
33
+ To load the records of this dataset with `datasets`, you'll just need to install `datasets` as `pip install datasets --upgrade` and then use the following code:
34
+
35
+ ```python
36
+ from datasets import load_dataset
37
+
38
+ ds = load_dataset("SIRIS-Lab/scilake-maritime")
39
+ ```
40
+
41
+ This will only load the records of the dataset, but not the Argilla settings.
42
+
43
+ ## Dataset Structure
44
+
45
+ This dataset repo contains:
46
+
47
+ * Dataset records in a format compatible with HuggingFace `datasets`. These records will be loaded automatically when using `rg.Dataset.from_hub` and can be loaded independently using the `datasets` library via `load_dataset`.
48
+ * The [annotation guidelines](#annotation-guidelines) that have been used for building and curating the dataset, if they've been defined in Argilla.
49
+ * A dataset configuration folder conforming to the Argilla dataset format in `.argilla`.
50
+
51
+ The dataset is created in Argilla with: **fields**, **questions**, **suggestions**, **metadata**, **vectors**, and **guidelines**.
52
+
53
+ ### Fields
54
+
55
+ The **fields** are the features or text of a dataset's records. For example, the 'text' column of a text classification dataset of the 'prompt' column of an instruction following dataset.
56
+
57
+ | Field Name | Title | Type | Required |
58
+ | ---------- | ----- | ---- | -------- |
59
+ | text | Text | text | True |
60
+ | links | Linked entities | text | True |
61
+
62
+
63
+ ### Questions
64
+
65
+ The **questions** are the questions that will be asked to the annotators. They can be of different types, such as rating, text, label_selection, multi_label_selection, or ranking.
66
+
67
+ | Question Name | Title | Type | Required | Description | Values/Labels |
68
+ | ------------- | ----- | ---- | -------- | ----------- | ------------- |
69
+ | span_label | Select and classify the tokens according to the specified categories. | span | True | N/A | ['vesselType'] |
70
+ | assess_ner | Extracted entity validation | label_selection | True | Are the extracted entities correct? | ['Correct', 'Partially correct', 'Incorrect'] |
71
+ | assess_nel | Linked AIS entity validation | label_selection | True | Are the linked entities in the AIS taxonomy correct? | ['Correct', 'Partially correct', 'Incorrect'] |
72
+ | comments | Comments | text | False | Additional comments | N/A |
73
+
74
+
75
+ <!-- check length of metadata properties -->
76
+
77
+
78
+
79
+
80
+ ### Data Splits
81
+
82
+ The dataset contains a single split, which is `train`.
83
+
84
+ ## Dataset Creation
85
+
86
+ ### Curation Rationale
87
+
88
+ [More Information Needed]
89
+
90
+ ### Source Data
91
+
92
+ #### Initial Data Collection and Normalization
93
+
94
+ [More Information Needed]
95
+
96
+ #### Who are the source language producers?
97
+
98
+ [More Information Needed]
99
+
100
+ ### Annotations
101
+
102
+ #### Annotation guidelines
103
+
104
+ # Vessel type validation guidelines
105
+ ## Task Description
106
+ Your task is to validate the extraction of vessel type entities and their linking to their closest matching entries in the AIS taxonomy.
107
+
108
+ ## What to Validate
109
+ For each record, please verify the following:
110
+ 1. **Entity Spans:** Are all text spans correctly identified? Are the span boundaries accurate?
111
+ 2. **Entity Types:** Are entity types correctly assigned?
112
+ 3. **Entity Linking:** Are the matching entities in the AIS taxonomy correctly assigned?
113
+
114
+ ## Instructions
115
+ 1. Carefully read the texts.
116
+ 2. Review the NER spans and correct them if:
117
+ - The boundaries (start/end) are incorrect
118
+ - The entity label is wrong
119
+ 3. Verify that the extracted entities are correctly linked to their closest match in the AIS taxonomy
120
+ 4. Add any comments or feedback you deem relevant
121
+
122
+ ## Validation Guidelines
123
+ - Entity Annotations: Mark spans as "Correct" only if boundaries and labels are accurate.
124
+ - Entity Extraction: Mark as "Correct" if all energy (storage) types mentioned are extracted; "Partially correct" if any are missing or incorrect.
125
+ - IRENA Linking: Mark as "Correct" if all links are to the appropriate entries. Use "Partially correct" if any are incorrect.
126
+
127
+ #### Annotation process
128
+
129
+ [More Information Needed]
130
+
131
+ #### Who are the annotators?
132
+
133
+ [More Information Needed]
134
+
135
+ ### Personal and Sensitive Information
136
+
137
+ [More Information Needed]
138
+
139
+ ## Considerations for Using the Data
140
+
141
+ ### Social Impact of Dataset
142
+
143
+ [More Information Needed]
144
+
145
+ ### Discussion of Biases
146
+
147
+ [More Information Needed]
148
+
149
+ ### Other Known Limitations
150
+
151
+ [More Information Needed]
152
+
153
+ ## Additional Information
154
+
155
+ ### Dataset Curators
156
+
157
+ [More Information Needed]
158
+
159
+ ### Licensing Information
160
+
161
+ [More Information Needed]
162
+
163
+ ### Citation Information
164
+
165
+ [More Information Needed]
166
+
167
+ ### Contributions
168
+
169
+ [More Information Needed]