asahi417 commited on
Commit
bad90d6
·
1 Parent(s): 1b46590
Files changed (45) hide show
  1. .gitattributes +39 -0
  2. data/tweet_sentiment_small/test.jsonl +3 -0
  3. data/tweet_sentiment_small/train.jsonl +3 -0
  4. data/tweet_sentiment_small/validation.jsonl +3 -0
  5. data/tweet_sentiment_small_test0_seed0/test.jsonl +3 -0
  6. data/tweet_sentiment_small_test0_seed0/train.jsonl +3 -0
  7. data/tweet_sentiment_small_test0_seed0/validation.jsonl +3 -0
  8. data/tweet_sentiment_small_test0_seed1/test.jsonl +3 -0
  9. data/tweet_sentiment_small_test0_seed1/train.jsonl +3 -0
  10. data/tweet_sentiment_small_test0_seed1/validation.jsonl +3 -0
  11. data/tweet_sentiment_small_test0_seed2/test.jsonl +3 -0
  12. data/tweet_sentiment_small_test0_seed2/train.jsonl +3 -0
  13. data/tweet_sentiment_small_test0_seed2/validation.jsonl +3 -0
  14. data/tweet_sentiment_small_test1_seed0/test.jsonl +3 -0
  15. data/tweet_sentiment_small_test1_seed0/train.jsonl +3 -0
  16. data/tweet_sentiment_small_test1_seed0/validation.jsonl +3 -0
  17. data/tweet_sentiment_small_test1_seed1/test.jsonl +3 -0
  18. data/tweet_sentiment_small_test1_seed1/train.jsonl +3 -0
  19. data/tweet_sentiment_small_test1_seed1/validation.jsonl +3 -0
  20. data/tweet_sentiment_small_test1_seed2/test.jsonl +3 -0
  21. data/tweet_sentiment_small_test1_seed2/train.jsonl +3 -0
  22. data/tweet_sentiment_small_test1_seed2/validation.jsonl +3 -0
  23. data/tweet_sentiment_small_test2_seed0/test.jsonl +3 -0
  24. data/tweet_sentiment_small_test2_seed0/train.jsonl +3 -0
  25. data/tweet_sentiment_small_test2_seed0/validation.jsonl +3 -0
  26. data/tweet_sentiment_small_test2_seed1/test.jsonl +3 -0
  27. data/tweet_sentiment_small_test2_seed1/train.jsonl +3 -0
  28. data/tweet_sentiment_small_test2_seed1/validation.jsonl +3 -0
  29. data/tweet_sentiment_small_test2_seed2/test.jsonl +3 -0
  30. data/tweet_sentiment_small_test2_seed2/train.jsonl +3 -0
  31. data/tweet_sentiment_small_test2_seed2/validation.jsonl +3 -0
  32. data/tweet_sentiment_small_test3_seed0/test.jsonl +3 -0
  33. data/tweet_sentiment_small_test3_seed0/train.jsonl +3 -0
  34. data/tweet_sentiment_small_test3_seed0/validation.jsonl +3 -0
  35. data/tweet_sentiment_small_test3_seed1/test.jsonl +3 -0
  36. data/tweet_sentiment_small_test3_seed1/train.jsonl +3 -0
  37. data/tweet_sentiment_small_test3_seed1/validation.jsonl +3 -0
  38. data/tweet_sentiment_small_test3_seed2/test.jsonl +3 -0
  39. data/tweet_sentiment_small_test3_seed2/train.jsonl +3 -0
  40. data/tweet_sentiment_small_test3_seed2/validation.jsonl +3 -0
  41. experiments/main.sh +18 -7
  42. process/tweet_sentiment.py +1 -0
  43. process/tweet_sentiment_small.py +71 -0
  44. statistics.py +13 -0
  45. tweet_temporal_shift.py +7 -0
.gitattributes CHANGED
@@ -472,3 +472,42 @@ data/tweet_sentiment_test1_seed0/validation.jsonl filter=lfs diff=lfs merge=lfs
472
  data/tweet_sentiment_test2_seed0/train.jsonl filter=lfs diff=lfs merge=lfs -text
473
  data/tweet_sentiment_test3_seed2/train.jsonl filter=lfs diff=lfs merge=lfs -text
474
  data/tweet_sentiment/validation.jsonl filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
472
  data/tweet_sentiment_test2_seed0/train.jsonl filter=lfs diff=lfs merge=lfs -text
473
  data/tweet_sentiment_test3_seed2/train.jsonl filter=lfs diff=lfs merge=lfs -text
474
  data/tweet_sentiment/validation.jsonl filter=lfs diff=lfs merge=lfs -text
475
+ data/tweet_sentiment_small/validation.jsonl filter=lfs diff=lfs merge=lfs -text
476
+ data/tweet_sentiment_small_test1_seed1/train.jsonl filter=lfs diff=lfs merge=lfs -text
477
+ data/tweet_sentiment_small_test1_seed2/validation.jsonl filter=lfs diff=lfs merge=lfs -text
478
+ data/tweet_sentiment_small_test2_seed0/train.jsonl filter=lfs diff=lfs merge=lfs -text
479
+ data/tweet_sentiment_small_test3_seed0/train.jsonl filter=lfs diff=lfs merge=lfs -text
480
+ data/tweet_sentiment_small_test3_seed2/test.jsonl filter=lfs diff=lfs merge=lfs -text
481
+ data/tweet_sentiment_small/test.jsonl filter=lfs diff=lfs merge=lfs -text
482
+ data/tweet_sentiment_small_test0_seed0/validation.jsonl filter=lfs diff=lfs merge=lfs -text
483
+ data/tweet_sentiment_small_test0_seed1/train.jsonl filter=lfs diff=lfs merge=lfs -text
484
+ data/tweet_sentiment_small_test2_seed0/validation.jsonl filter=lfs diff=lfs merge=lfs -text
485
+ data/tweet_sentiment_small_test2_seed2/test.jsonl filter=lfs diff=lfs merge=lfs -text
486
+ data/tweet_sentiment_small_test0_seed0/train.jsonl filter=lfs diff=lfs merge=lfs -text
487
+ data/tweet_sentiment_small_test0_seed2/test.jsonl filter=lfs diff=lfs merge=lfs -text
488
+ data/tweet_sentiment_small_test1_seed0/test.jsonl filter=lfs diff=lfs merge=lfs -text
489
+ data/tweet_sentiment_small_test3_seed2/validation.jsonl filter=lfs diff=lfs merge=lfs -text
490
+ data/tweet_sentiment_small_test0_seed0/test.jsonl filter=lfs diff=lfs merge=lfs -text
491
+ data/tweet_sentiment_small_test0_seed2/validation.jsonl filter=lfs diff=lfs merge=lfs -text
492
+ data/tweet_sentiment_small_test1_seed2/test.jsonl filter=lfs diff=lfs merge=lfs -text
493
+ data/tweet_sentiment_small_test2_seed2/train.jsonl filter=lfs diff=lfs merge=lfs -text
494
+ data/tweet_sentiment_small_test2_seed2/validation.jsonl filter=lfs diff=lfs merge=lfs -text
495
+ data/tweet_sentiment_small_test3_seed2/train.jsonl filter=lfs diff=lfs merge=lfs -text
496
+ data/tweet_sentiment_small_test1_seed0/validation.jsonl filter=lfs diff=lfs merge=lfs -text
497
+ data/tweet_sentiment_small_test1_seed2/train.jsonl filter=lfs diff=lfs merge=lfs -text
498
+ data/tweet_sentiment_small_test2_seed0/test.jsonl filter=lfs diff=lfs merge=lfs -text
499
+ data/tweet_sentiment_small_test3_seed1/test.jsonl filter=lfs diff=lfs merge=lfs -text
500
+ data/tweet_sentiment_small/train.jsonl filter=lfs diff=lfs merge=lfs -text
501
+ data/tweet_sentiment_small_test0_seed2/train.jsonl filter=lfs diff=lfs merge=lfs -text
502
+ data/tweet_sentiment_small_test2_seed1/test.jsonl filter=lfs diff=lfs merge=lfs -text
503
+ data/tweet_sentiment_small_test3_seed0/test.jsonl filter=lfs diff=lfs merge=lfs -text
504
+ data/tweet_sentiment_small_test0_seed1/test.jsonl filter=lfs diff=lfs merge=lfs -text
505
+ data/tweet_sentiment_small_test1_seed1/validation.jsonl filter=lfs diff=lfs merge=lfs -text
506
+ data/tweet_sentiment_small_test3_seed1/validation.jsonl filter=lfs diff=lfs merge=lfs -text
507
+ data/tweet_sentiment_small_test0_seed1/validation.jsonl filter=lfs diff=lfs merge=lfs -text
508
+ data/tweet_sentiment_small_test1_seed0/train.jsonl filter=lfs diff=lfs merge=lfs -text
509
+ data/tweet_sentiment_small_test1_seed1/test.jsonl filter=lfs diff=lfs merge=lfs -text
510
+ data/tweet_sentiment_small_test2_seed1/train.jsonl filter=lfs diff=lfs merge=lfs -text
511
+ data/tweet_sentiment_small_test2_seed1/validation.jsonl filter=lfs diff=lfs merge=lfs -text
512
+ data/tweet_sentiment_small_test3_seed0/validation.jsonl filter=lfs diff=lfs merge=lfs -text
513
+ data/tweet_sentiment_small_test3_seed1/train.jsonl filter=lfs diff=lfs merge=lfs -text
data/tweet_sentiment_small/test.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16b4c43d7af04f80c58c93a4b3da1984519ed0cd8ff78361885677cede78e003
3
+ size 188450
data/tweet_sentiment_small/train.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a4e19028ea5b3d4510d8b69d9af74a1821a74bfae16fd74d8b7856d79262a0e
3
+ size 691715
data/tweet_sentiment_small/validation.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9250261dcf45e5bca91a6757b29685036ce810708247cbe588a1a92963f4e85f
3
+ size 187256
data/tweet_sentiment_small_test0_seed0/test.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09b915a486b09ab99730b2177ed285467e9db60f2c782768411d241d28f50a21
3
+ size 47187
data/tweet_sentiment_small_test0_seed0/train.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb29b1640b8d040d84f499a7be85d8c0c1a2e0d5c219a96d7e59fee91e028283
3
+ size 693690
data/tweet_sentiment_small_test0_seed0/validation.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d455aa627a3f64075cfa6f92f46be1c70f2253b248bbbb862a0176cb4b75d3cc
3
+ size 187256
data/tweet_sentiment_small_test0_seed1/test.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09b915a486b09ab99730b2177ed285467e9db60f2c782768411d241d28f50a21
3
+ size 47187
data/tweet_sentiment_small_test0_seed1/train.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bad40d185e7e5091a38738d1c7ce9f5fb9c2bd3774a40891fd537af3e294294d
3
+ size 693342
data/tweet_sentiment_small_test0_seed1/validation.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28f36c1ff7cf7ebc22470f6441bc5687a5d38ee6fb3e51916db003b1ce21d607
3
+ size 187256
data/tweet_sentiment_small_test0_seed2/test.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09b915a486b09ab99730b2177ed285467e9db60f2c782768411d241d28f50a21
3
+ size 47187
data/tweet_sentiment_small_test0_seed2/train.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:827bd0ad88ef5b9513b2bf8e6c733d35adacec30d43bceae3dcf9d91b101b78c
3
+ size 694096
data/tweet_sentiment_small_test0_seed2/validation.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a73d92f1a7dec42909ae154175de9c21a5f6851d524b7691163ef4be3c8c3ef0
3
+ size 187256
data/tweet_sentiment_small_test1_seed0/test.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1bf7c917640a83060e28a6450999bca55da3fead2a522993c1c721380700ffc3
3
+ size 47265
data/tweet_sentiment_small_test1_seed0/train.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:986866ae2e73151ae9b9205e0592c2a118c29a07c23e1551d83e0c77a3d1f266
3
+ size 694027
data/tweet_sentiment_small_test1_seed0/validation.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f94503184ee82f8f12d8c16fbcebdfcb915f74143643e26dd89a0a572b0eaa9
3
+ size 187256
data/tweet_sentiment_small_test1_seed1/test.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1bf7c917640a83060e28a6450999bca55da3fead2a522993c1c721380700ffc3
3
+ size 47265
data/tweet_sentiment_small_test1_seed1/train.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a60a2f1ae7bdbf4f139477b17014f1133e2dd9bb2b3a60c30af5601245320d6
3
+ size 694124
data/tweet_sentiment_small_test1_seed1/validation.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d133c09396c40b114d755a07913d2d9562da3523cfd4fa61f9609867c0f38a3
3
+ size 187256
data/tweet_sentiment_small_test1_seed2/test.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1bf7c917640a83060e28a6450999bca55da3fead2a522993c1c721380700ffc3
3
+ size 47265
data/tweet_sentiment_small_test1_seed2/train.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9575bdadf4ae3de8eb354ea520b9ab5d35cf6eaaaa4a202ffda25b8c2788287
3
+ size 693765
data/tweet_sentiment_small_test1_seed2/validation.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf5fa46561883333a42ed602df67c22bb041bf342d3f227221ccd4c0b062fad0
3
+ size 187256
data/tweet_sentiment_small_test2_seed0/test.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26bdf025b4e171101f303be4dcead3e75e5e7461124132abaab9ed7bdc6ac3c7
3
+ size 47824
data/tweet_sentiment_small_test2_seed0/train.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c84b5be06a70297ade73615b4195373a16c34196847a1540b81f67bf365c3589
3
+ size 692031
data/tweet_sentiment_small_test2_seed0/validation.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec01d63ba6f603aa1e40c7151fbb14c0557c27d37a76dea2fb712e0c155f83be
3
+ size 187256
data/tweet_sentiment_small_test2_seed1/test.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26bdf025b4e171101f303be4dcead3e75e5e7461124132abaab9ed7bdc6ac3c7
3
+ size 47824
data/tweet_sentiment_small_test2_seed1/train.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2dea020a4572d6c4ca6c83861e0e0f7d847d5a9ca4205a8befa451679c610be
3
+ size 692103
data/tweet_sentiment_small_test2_seed1/validation.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2827b324e68b7c007aba908bff038b9416ae9ed250ca3bdde10d7943f9c00684
3
+ size 187256
data/tweet_sentiment_small_test2_seed2/test.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26bdf025b4e171101f303be4dcead3e75e5e7461124132abaab9ed7bdc6ac3c7
3
+ size 47824
data/tweet_sentiment_small_test2_seed2/train.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28429edc222382eb2b7407d2f09b52691724822baa0df07f7e252b6fa45cbbc1
3
+ size 692384
data/tweet_sentiment_small_test2_seed2/validation.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:855657e67e61720f3c622c089a168674c8af516205f34e12c18576e73f2ddd1c
3
+ size 187256
data/tweet_sentiment_small_test3_seed0/test.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:395be43c04fa28853568e1c0771e450e0aac1dfaf55b986ed77cde76662fb51e
3
+ size 46171
data/tweet_sentiment_small_test3_seed0/train.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19392e2769e3fa011bc0e108dc578100d95ce315fd746355ef4c98ebd296c187
3
+ size 694740
data/tweet_sentiment_small_test3_seed0/validation.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b5c4bd6f073175119e1d158dcacfef4fce3119e5e881106134e196d452bebf5
3
+ size 187256
data/tweet_sentiment_small_test3_seed1/test.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:395be43c04fa28853568e1c0771e450e0aac1dfaf55b986ed77cde76662fb51e
3
+ size 46171
data/tweet_sentiment_small_test3_seed1/train.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df6c900a3411f5b7db03436bd862d5854f672bbe3a570142025c99bb6cbe23af
3
+ size 694752
data/tweet_sentiment_small_test3_seed1/validation.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:beab432ab7afb24e83df30b00f655f98ef457e6097a6ff82556a4a16fb47a285
3
+ size 187256
data/tweet_sentiment_small_test3_seed2/test.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:395be43c04fa28853568e1c0771e450e0aac1dfaf55b986ed77cde76662fb51e
3
+ size 46171
data/tweet_sentiment_small_test3_seed2/train.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c99e502259b679e06898b0b2711b5f319814284a9e2a68bbc2099897bcfefd3
3
+ size 694703
data/tweet_sentiment_small_test3_seed2/validation.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cea3144fe17b4d74d2cd5628006adf248d218996c492f4567e57d94d7d4b5e3a
3
+ size 187256
experiments/main.sh CHANGED
@@ -1,13 +1,23 @@
1
- # topic, nerd[hw], ner, sentiment[hw]
 
2
  MODEL="vinai/bertweet-base"
3
- # topic, nerd[hw], ner[ukri], sentiment[hw]
4
  MODEL="cardiffnlp/twitter-roberta-base-2022-154m"
5
- # topic, nerd[ukri], ner[stone], sentiment[todo]
6
  MODEL="jhu-clsp/bernice"
7
- # topic, nerd[hw], ner[stone], sentiment[todo]
8
  MODEL="cardiffnlp/twitter-roberta-base-2021-124m"
9
- # topic, nerd[hw], ner[ukri], sentiment[todo]
10
- MODEL="roberta-base"
 
 
 
 
 
 
 
 
 
 
 
 
11
 
12
  # SENTIMENT
13
  python model_finetuning_sentiment.py -m "${MODEL}" -d "sentiment_temporal"
@@ -24,6 +34,8 @@ python model_finetuning_sentiment.py -m "${MODEL}" -d "sentiment_random1_seed2"
24
  python model_finetuning_sentiment.py -m "${MODEL}" -d "sentiment_random2_seed2"
25
  python model_finetuning_sentiment.py -m "${MODEL}" -d "sentiment_random3_seed2"
26
 
 
 
27
  # NERD
28
  python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_temporal"
29
  python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_random0_seed0"
@@ -34,7 +46,6 @@ python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_random0_seed1"
34
  python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_random1_seed1"
35
  python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_random2_seed1"
36
  python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_random3_seed1"
37
-
38
  python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_random0_seed2"
39
  python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_random1_seed2"
40
  python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_random2_seed2"
 
1
+ # MAIN EXPERIMENT
2
+ MODEL="roberta-base"
3
  MODEL="vinai/bertweet-base"
 
4
  MODEL="cardiffnlp/twitter-roberta-base-2022-154m"
 
5
  MODEL="jhu-clsp/bernice"
 
6
  MODEL="cardiffnlp/twitter-roberta-base-2021-124m"
7
+
8
+ # topic, ner, nerd, sentiment[stone]
9
+ MODEL="roberta-large"
10
+ # topic, ner, nerd[hk], sentiment[ukri]
11
+ MODEL="vinai/bertweet-large"
12
+ # topic, ner, nerd[hk], sentiment[ukri]
13
+ MODEL="cardiffnlp/twitter-roberta-large-2022-154m"
14
+
15
+ # ABLATION (TimeLMs)
16
+ ## Topic & NER
17
+ MODEL="twitter-roberta-base-jun2020"
18
+ MODEL="twitter-roberta-base-sep2021"
19
+ ## NERD
20
+ MODEL="twitter-roberta-base-jun2021"
21
 
22
  # SENTIMENT
23
  python model_finetuning_sentiment.py -m "${MODEL}" -d "sentiment_temporal"
 
34
  python model_finetuning_sentiment.py -m "${MODEL}" -d "sentiment_random2_seed2"
35
  python model_finetuning_sentiment.py -m "${MODEL}" -d "sentiment_random3_seed2"
36
 
37
+ python model_finetuning_sentiment.py -m "${MODEL}" -d "sentiment_random1_seed2"
38
+ rm -rf ckpt
39
  # NERD
40
  python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_temporal"
41
  python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_random0_seed0"
 
46
  python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_random1_seed1"
47
  python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_random2_seed1"
48
  python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_random3_seed1"
 
49
  python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_random0_seed2"
50
  python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_random1_seed2"
51
  python model_finetuning_nerd.py -m "${MODEL}" -d "nerd_random2_seed2"
process/tweet_sentiment.py CHANGED
@@ -22,6 +22,7 @@ n_train = len(train)
22
  n_validation = len(validation)
23
  n_test = int(len(test)/4)
24
 
 
25
  def sampler(dataset_test, r_seed):
26
  seed(r_seed)
27
  shuffle(dataset_test)
 
22
  n_validation = len(validation)
23
  n_test = int(len(test)/4)
24
 
25
+
26
  def sampler(dataset_test, r_seed):
27
  seed(r_seed)
28
  shuffle(dataset_test)
process/tweet_sentiment_small.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # TODO
2
+ import json
3
+ import os
4
+ from random import shuffle, seed
5
+
6
+
7
+ with open("data/tweet_sentiment/test.jsonl") as f:
8
+ test = [json.loads(i) for i in f if len(i)]
9
+ with open("data/tweet_sentiment/test_1.jsonl") as f:
10
+ test_1 = [json.loads(i) for i in f if len(i)]
11
+ with open("data/tweet_sentiment/test_2.jsonl") as f:
12
+ test_2 = [json.loads(i) for i in f if len(i)]
13
+ with open("data/tweet_sentiment/test_3.jsonl") as f:
14
+ test_3 = [json.loads(i) for i in f if len(i)]
15
+ with open("data/tweet_sentiment/test_4.jsonl") as f:
16
+ test_4 = [json.loads(i) for i in f if len(i)]
17
+ with open("data/tweet_sentiment/train.jsonl") as f:
18
+ train = [json.loads(i) for i in f if len(i)]
19
+ with open("data/tweet_sentiment/validation.jsonl") as f:
20
+ validation = [json.loads(i) for i in f if len(i)]
21
+
22
+
23
+ os.makedirs(f"data/tweet_sentiment_small", exist_ok=True)
24
+ with open(f"data/tweet_sentiment_small/test.jsonl", "w") as f:
25
+ f.write("\n".join([json.dumps(i) for i in test]))
26
+ with open(f"data/tweet_sentiment_small/validation.jsonl", "w") as f:
27
+ f.write("\n".join([json.dumps(i) for i in validation]))
28
+
29
+ # down sample training set
30
+ n_train_p = 2500
31
+ n_train_n = 2500
32
+ seed(123)
33
+ shuffle(train)
34
+ train_p = [i for i in train if i["gold_label_binary"] == 0][:n_train_p]
35
+ train_n = [i for i in train if i["gold_label_binary"] == 1][:n_train_n]
36
+ train = train_p + train_n
37
+ shuffle(train)
38
+ with open(f"data/tweet_sentiment_small/train.jsonl", "w") as f:
39
+ f.write("\n".join([json.dumps(i) for i in train]))
40
+
41
+
42
+ n_train = len(train)
43
+ n_validation = len(validation)
44
+ n_test = int(len(test)/4)
45
+
46
+ def sampler(dataset_test, r_seed):
47
+ seed(r_seed)
48
+ shuffle(dataset_test)
49
+ shuffle(train)
50
+ shuffle(validation)
51
+ test_tr = dataset_test[:int(n_train / 2)]
52
+ test_vl = dataset_test[int(n_train / 2): int(n_train / 2) + int(n_validation / 2)]
53
+ new_train = test_tr + train[:n_train - len(test_tr)]
54
+ new_validation = test_vl + validation[:n_validation - len(test_vl)]
55
+ return new_train, new_validation
56
+
57
+ id2test = {n: t for n, t in enumerate([test_1, test_2, test_3, test_4])}
58
+ for n, _test in enumerate([
59
+ test_4 + test_2 + test_3,
60
+ test_1 + test_4 + test_3,
61
+ test_1 + test_2 + test_4,
62
+ test_1 + test_2 + test_3]):
63
+ for s in range(3):
64
+ os.makedirs(f"data/tweet_sentiment_small_test{n}_seed{s}", exist_ok=True)
65
+ _train, _valid = sampler(_test, s)
66
+ with open(f"data/tweet_sentiment_small_test{n}_seed{s}/train.jsonl", "w") as f:
67
+ f.write("\n".join([json.dumps(i) for i in _train]))
68
+ with open(f"data/tweet_sentiment_small_test{n}_seed{s}/validation.jsonl", "w") as f:
69
+ f.write("\n".join([json.dumps(i) for i in _valid]))
70
+ with open(f"data/tweet_sentiment_small_test{n}_seed{s}/test.jsonl", "w") as f:
71
+ f.write("\n".join([json.dumps(i) for i in id2test[n]]))
statistics.py CHANGED
@@ -25,3 +25,16 @@ for i in ["nerd_temporal", "ner_temporal", "topic_temporal", "sentiment_temporal
25
  })
26
  df = pd.DataFrame(stats)
27
  print(df)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  })
26
  df = pd.DataFrame(stats)
27
  print(df)
28
+ pretty_name = {
29
+ "nerd_temporal": "NERD",
30
+ "ner_temporal": "NER",
31
+ "topic_temporal": "Topic",
32
+ "sentiment_temporal": "Sentiment"
33
+ }
34
+ df.index = [pretty_name[i] for i in df.pop("data")]
35
+ df = df[["split", "size", "date"]]
36
+ pretty_name_split = {"train": "Train", "validation": "Valid", "test": "Test"}
37
+ df["split"] = [pretty_name_split[i] for i in df["split"]]
38
+ df.columns = [i.capitalize() for i in df.columns]
39
+ df['Size'] = df['Size'].map('{:,}'.format)
40
+ print(df.to_latex())
tweet_temporal_shift.py CHANGED
@@ -124,6 +124,13 @@ class TweetTemporalShift(datasets.GeneratorBasedBuilder):
124
  features=["gold_label_binary", "text", "date"],
125
  data_url=f"{_ROOT_URL}/tweet_sentiment",
126
  ),
 
 
 
 
 
 
 
127
  ]
128
  for s in range(3):
129
  for i in range(4):
 
124
  features=["gold_label_binary", "text", "date"],
125
  data_url=f"{_ROOT_URL}/tweet_sentiment",
126
  ),
127
+ TweetTemporalShiftConfig(
128
+ name="sentiment_temporal_small",
129
+ description=_TWEET_SENTIMENT_DESCRIPTION,
130
+ citation=_TWEET_SENTIMENT_CITATION,
131
+ features=["gold_label_binary", "text", "date"],
132
+ data_url=f"{_ROOT_URL}/sentiment_temporal_small",
133
+ ),
134
  ]
135
  for s in range(3):
136
  for i in range(4):