Spaces:
Sleeping
Sleeping
Upload 11 files
Browse files- .gitattributes +2 -0
- acronym.json +1 -0
- app.py +146 -0
- contractions.json +1 -0
- dis_image.png +3 -0
- requirements.txt +7 -0
- stopwords.csv +451 -0
- transfer_tweet/fingerprint.pb +3 -0
- transfer_tweet/keras_metadata.pb +3 -0
- transfer_tweet/saved_model.pb +3 -0
- transfer_tweet/variables/variables.data-00000-of-00001 +3 -0
- transfer_tweet/variables/variables.index +0 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
dis_image.png filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
transfer_tweet/variables/variables.data-00000-of-00001 filter=lfs diff=lfs merge=lfs -text
|
acronym.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"aka":"also known as","asap":"as soon as possible","brb":"be right back","btw":"by the way","dob":"date of birth","faq":"frequently asked questions","fyi":"for your information","idk":"i don't know","idc":"i don't care","iirc":"if i recall correctly","imo":"in my opinion","irl":"in real life","lmk":"let me know","lol":"laugh out loud","ngl":"not gonna lie","noyb":"none of your business","nvm":"never mind","ofc":"of course","omg":"oh my god","pfa":"please find attached","rofl":"rolling on the floor laughing","stfu":"shut the fuck up","tba":"to be announced","tbc":"to be continued","tbd":"to be determined","tbh":"to be honest","ttyl":"talk to you later","wtf":"what the fuck","wth":"what the heck"}
|
app.py
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# importing Libraries
|
| 2 |
+
|
| 3 |
+
import streamlit as st
|
| 4 |
+
import PIL
|
| 5 |
+
from PIL import Image
|
| 6 |
+
import tensorflow as tf
|
| 7 |
+
from nltk.stem import WordNetLemmatizer
|
| 8 |
+
from nltk.tokenize import RegexpTokenizer
|
| 9 |
+
import re
|
| 10 |
+
import string
|
| 11 |
+
import numpy as np
|
| 12 |
+
import pandas as pd
|
| 13 |
+
import nltk
|
| 14 |
+
|
| 15 |
+
try: # Check if wordnet is installed
|
| 16 |
+
nltk.find("corpora/wordnet.zip")
|
| 17 |
+
except LookupError:
|
| 18 |
+
nltk.download('wordnet')
|
| 19 |
+
|
| 20 |
+
# ----------------------------------------------------------------------------------
|
| 21 |
+
# read files
|
| 22 |
+
try:
|
| 23 |
+
acronyms_dict, contractions_dict, stops
|
| 24 |
+
except NameError:
|
| 25 |
+
acronyms_dict = pd.read_json("acronym.json", typ = "series")
|
| 26 |
+
contractions_dict = pd.read_json("contractions.json", typ = "series")
|
| 27 |
+
stops = list(pd.read_csv('stopwords.csv').values.flatten())
|
| 28 |
+
|
| 29 |
+
# ----------------------------------------------------------------------------------
|
| 30 |
+
# Defining tokenizer
|
| 31 |
+
regexp = RegexpTokenizer("[\w']+")
|
| 32 |
+
|
| 33 |
+
# preprocess Function
|
| 34 |
+
def preprocess(text):
|
| 35 |
+
|
| 36 |
+
text = text.lower() # lowercase
|
| 37 |
+
text = text.strip() # whitespaces
|
| 38 |
+
|
| 39 |
+
# Removing html tags
|
| 40 |
+
html = re.compile(r'<.*?>')
|
| 41 |
+
text = html.sub(r'', text) # html tags
|
| 42 |
+
|
| 43 |
+
# Removing emoji patterns
|
| 44 |
+
emoji_pattern = re.compile("["
|
| 45 |
+
u"\U0001F600-\U0001F64F" # emoticons
|
| 46 |
+
u"\U0001F300-\U0001F5FF" # symbols & pictographs
|
| 47 |
+
u"\U0001F680-\U0001F6FF" # transport & map symbols
|
| 48 |
+
u"\U0001F1E0-\U0001F1FF" # flags (iOS)
|
| 49 |
+
u"\U00002702-\U000027B0"
|
| 50 |
+
u"\U000024C2-\U0001F251"
|
| 51 |
+
"]+", flags = re.UNICODE)
|
| 52 |
+
text = emoji_pattern.sub(r'', text) # unicode char
|
| 53 |
+
|
| 54 |
+
# Removing urls
|
| 55 |
+
http = "https?://\S+|www\.\S+" # matching strings beginning with http (but not just "http")
|
| 56 |
+
pattern = r"({})".format(http) # creating pattern
|
| 57 |
+
text = re.sub(pattern, "", text) # remove urls
|
| 58 |
+
|
| 59 |
+
# Removing twitter usernames
|
| 60 |
+
pattern = r'@[\w_]+'
|
| 61 |
+
text = re.sub(pattern, "", text) # remove @twitter usernames
|
| 62 |
+
|
| 63 |
+
# Removing punctuations and numbers
|
| 64 |
+
punct_str = string.punctuation + string.digits
|
| 65 |
+
punct_str = punct_str.replace("'", "")
|
| 66 |
+
punct_str = punct_str.replace("-", "")
|
| 67 |
+
text = text.translate(str.maketrans('', '', punct_str)) # punctuation and numbers
|
| 68 |
+
|
| 69 |
+
# Replacing "-" in text with empty space
|
| 70 |
+
text = text.replace("-", " ") # "-"
|
| 71 |
+
|
| 72 |
+
# Substituting acronyms
|
| 73 |
+
words = []
|
| 74 |
+
for word in regexp.tokenize(text):
|
| 75 |
+
if word in acronyms_dict.index:
|
| 76 |
+
words = words + acronyms_dict[word].split()
|
| 77 |
+
else:
|
| 78 |
+
words = words + word.split()
|
| 79 |
+
text = ' '.join(words) # acronyms
|
| 80 |
+
|
| 81 |
+
# Substituting Contractions
|
| 82 |
+
words = []
|
| 83 |
+
for word in regexp.tokenize(text):
|
| 84 |
+
if word in contractions_dict.index:
|
| 85 |
+
words = words + contractions_dict[word].split()
|
| 86 |
+
else:
|
| 87 |
+
words = words + word.split()
|
| 88 |
+
text = " ".join(words) # contractions
|
| 89 |
+
|
| 90 |
+
punct_str = string.punctuation
|
| 91 |
+
text = text.translate(str.maketrans('', '', punct_str)) # punctuation again to remove "'"
|
| 92 |
+
|
| 93 |
+
# lemmatization
|
| 94 |
+
lemmatizer = WordNetLemmatizer()
|
| 95 |
+
text = " ".join([lemmatizer.lemmatize(word) for word in regexp.tokenize(text)]) # lemmatize
|
| 96 |
+
|
| 97 |
+
# Stopwords Removal
|
| 98 |
+
text = ' '.join([word for word in regexp.tokenize(text) if word not in stops]) # stopwords
|
| 99 |
+
|
| 100 |
+
# Removing all characters except alphabets and " " (space)
|
| 101 |
+
filter = string.ascii_letters + " "
|
| 102 |
+
text = "".join([chr for chr in text if chr in filter]) # remove all characters except alphabets and " " (space)
|
| 103 |
+
|
| 104 |
+
# Removing words with one alphabet occuring more than 3 times continuously
|
| 105 |
+
pattern = r'\b\w*?(.)\1{2,}\w*\b'
|
| 106 |
+
text = re.sub(pattern, "", text).strip() # remove words with one alphabet occuring more than 3 times continuously
|
| 107 |
+
|
| 108 |
+
# Removing words with less than 3 characters
|
| 109 |
+
short_words = r'\b\w{1,2}\b'
|
| 110 |
+
text = re.sub(short_words, "", text) # remove words with less than 3 characters
|
| 111 |
+
|
| 112 |
+
# return final output
|
| 113 |
+
return text
|
| 114 |
+
|
| 115 |
+
# ===============================================================================================================
|
| 116 |
+
# STREAMLIT
|
| 117 |
+
|
| 118 |
+
# App Devolopment Starts
|
| 119 |
+
st.set_page_config(layout="wide")
|
| 120 |
+
st.write("# Disaster Tweet Predictor")
|
| 121 |
+
|
| 122 |
+
img = Image.open("dis_image.png")
|
| 123 |
+
st.image(img)
|
| 124 |
+
|
| 125 |
+
tweet = st.text_input(label = "Enter or paste your tweet here", value = "")
|
| 126 |
+
|
| 127 |
+
# Defining a function to store the model in streamlit cache memory
|
| 128 |
+
@st.cache_resource
|
| 129 |
+
def cache_model(model_name):
|
| 130 |
+
model = tf.keras.models.load_model(model_name)
|
| 131 |
+
return model
|
| 132 |
+
|
| 133 |
+
model = cache_model("transfer_tweet")
|
| 134 |
+
|
| 135 |
+
# if user gives any input
|
| 136 |
+
if len(tweet) > 0:
|
| 137 |
+
clean_tweet = preprocess(tweet) # cleans tweet
|
| 138 |
+
y_pred = model.predict([clean_tweet]) # gives probability of class = 1
|
| 139 |
+
y_pred_num = int(np.round(y_pred)[0][0]) # get final prediction of output class
|
| 140 |
+
|
| 141 |
+
if y_pred_num == 0:
|
| 142 |
+
st.write(f"#### Non-Disaster tweet with disaster probability {round(y_pred[0][0]*100, 4)}%")
|
| 143 |
+
else:
|
| 144 |
+
st.write(f"#### Disaster tweet with disaster probability {round(y_pred[0][0]*100, 4)}%")
|
| 145 |
+
|
| 146 |
+
# ==============================================================================================================
|
contractions.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"'aight":"alright","ain't":"are not","amn't":"am not","arencha":"are not you","aren't":"are not","'bout":"about","can't":"cannot","cap'n":"captain","'cause":"because","'cept":"except","could've":"could have","couldn't":"could not","couldn't've":"could not have","dammit":"damn it","daren't":"dare not","daresn't":"dare not","dasn't":"dare not","didn't":"did not","doesn't":"does not","doin'":"doing","don't":"do not","dunno":"do not know","d'ye":"do you","e'en":"even","e'er":"ever","'em":"them","everybody's":"everybody is","everyone's":"everyone is","fo'c'sle":"forecastle","finna":"fixing to","'gainst":"against","g'day":"good day","gimme":"give me","giv'n":"given","gonna":"going to","gon't":"go not","gotcha":"got you","gotta":"got to","gtg":"got to go","hadn't":"had not","had've":"had have","hasn't":"has not","haven't":"have not","he'd":"he had","he'll":"he shall","helluva":"hell of a","he's":"he is","here's":"here is","he've":"he have","how'd":"how would","howdy":"how do you do","how'll":"how will","how're":"how are","how's":"how is","i'd":"i had","i'd've":"i would have","i'll":"i shall","i'm":"i am","imma":"i am about to","i'm'a":"i am about to","i'm'o":"i am going to","innit":"is it not","ion":"i do not","i've":"i have","isn't":"is not","it'd":"it would","it'll":"it shall","it's":"it is","iunno":"i do not know","kinda":"kind of","let's":"let us","li'l":"little","ma'am":"madam","mayn't":"may not","may've":"may have","methinks":"me thinks","mightn't":"might not","might've":"might have","mustn't":"must not","mustn't've":"must not have","must've":"must have","'neath":"beneath","needn't":"need not","nal":"and all","ne'er":"never","o'clock":"of the clock","o'er":"over","ol'":"old","oughtn't":"ought not","'round":"around","'s":"is","shalln't":"shall not","shan't":"shall not","she'd":"she had","she'll":"she shall","she's":"she is","should've":"should have","shouldn't":"should not","shouldn't've":"should not have","somebody's":"somebody is","someone's":"someone is","something's":"something is","so're":"so are","so's":"so is","so've":"so have","that'll":"that shall","that're":"that are","that's":"that is","that'd":"that would","there'd":"there had","there'll":"there shall","there're":"there are","there's":"there is","these're":"these are","these've":"these have","they'd":"they had","they'll":"they shall","they're":"they are","they've":"they have","this's":"this is","those're":"those are","those've":"those have","'thout":"without","'til":"until","'tis":"it is","to've":"to have","'twas":"it was","'tween":"between","'twhere":"it were","wanna":"want to","wasn't":"was not","we'd":"we had","we'd've":"we would have","we'll":"we shall","we're":"we are","we've":"we have","weren't":"were not","whatcha":"what are you","what'd":"what did","what'll":"what shall","what're":"what are","what's":"what is","what've":"what have","when's":"when is","where'd":"where did","where'll":"where shall","where're":"where are","where's":"where is","where've":"where have","which'd":"which had","which'll":"which shall","which're":"which are","which's":"which is","which've":"which have","who'd":"who would","who'd've":"who would have","who'll":"who shall","who're":"who are","who's":"who is","who've":"who have","why'd":"why did","why're":"why are","why's":"why is","willn't":"will not","won't":"will not","wonnot":"will not","would've":"would have","wouldn't":"would not","wouldn't've":"would not have","y'all":"you all","y'all'd've":"you all would have","y'all'd'n't've":"you all would not have","y'all're":"you all are","y'all'ren't":"you all are not","y'at":"you at","yes'm":"yes madam","yessir":"yes sir","you'd":"you had","you'll":"you shall","you're":"you are","you've":"you have","aight":"alright","aint":"are not","amnt":"am not","arent":"are not","cant":"cannot","cause":"because","couldve":"could have","couldnt":"could not","couldntve":"could not have","darent":"dare not","daresnt":"dare not","dasnt":"dare not","didnt":"did not","doesnt":"does not","doin":"doing","dont":"do not","eer":"ever","everybodys":"everybody is","everyones":"everyone is","gday":"good day","givn":"given","gont":"go not","hadnt":"had not","hadve":"had have","hasnt":"has not","havent":"have not","hed":"he had","hell":"he shall","hes":"he is","heve":"he have","howd":"how did","howll":"how will","howre":"how are","hows":"how is","idve":"i would have","ill":"i shall","im":"i am","ima":"i am about to","imo":"i am going to","ive":"i have","isnt":"is not","itd":"it would","itll":"it shall","its":"it is","lets":"let us","lil":"little","maam":"madam","maynt":"may not","mayve":"may have","mightnt":"might not","mightve":"might have","mustnt":"must not","mustntve":"must not have","mustve":"must have","neednt":"need not","neer":"never","oclock":"of the clock","oer":"over","ol":"old","oughtnt":"ought not","shallnt":"shall not","shant":"shall not","shed":"she had","shell":"she shall","shes":"she is","shouldve":"should have","shouldnt":"should not","shouldntve":"should not have","somebodys":"somebody is","someones":"someone is","somethings":"something is","thatll":"that shall","thatre":"that are","thatd":"that would","thered":"there had","therell":"there shall","therere":"there are","theres":"there is","thesere":"these are","theseve":"these have","theyd":"they had","theyll":"they shall","theyre":"they are","theyve":"they have","thiss":"this is","thosere":"those are","thoseve":"those have","tis":"it is","tove":"to have","twas":"it was","wasnt":"was not","wed":"we had","wedve":"we would have","were":"we are","weve":"we have","werent":"were not","whatd":"what did","whatll":"what shall","whatre":"what are","whats":"what is","whatve":"what have","whens":"when is","whered":"where did","wherell":"where shall","wherere":"where are","wheres":"where is","whereve":"where have","whichd":"which had","whichll":"which shall","whichre":"which are","whichs":"which is","whichve":"which have","whod":"who would","whodve":"who would have","wholl":"who shall","whore":"who are","whos":"who is","whove":"who have","whyd":"why did","whyre":"why are","whys":"why is","wont":"will not","wouldve":"would have","wouldnt":"would not","wouldntve":"would not have","yall":"you all","yalldve":"you all would have","yallre":"you all are","youd":"you had","youll":"you shall","youre":"you are","youve":"you have","'re":"are","thats":"that is"}
|
dis_image.png
ADDED
|
Git LFS Details
|
requirements.txt
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
streamlit == 1.27.1
|
| 2 |
+
Pillow == 9.4.0
|
| 3 |
+
nltk == 3.8.1
|
| 4 |
+
numpy == 1.24.3
|
| 5 |
+
pandas == 2.0.3
|
| 6 |
+
tensorflow == 2.14.0
|
| 7 |
+
regex == 2022.7.9
|
stopwords.csv
ADDED
|
@@ -0,0 +1,451 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
0
|
| 2 |
+
provided that
|
| 3 |
+
sixty
|
| 4 |
+
although
|
| 5 |
+
don't
|
| 6 |
+
's
|
| 7 |
+
ve
|
| 8 |
+
which
|
| 9 |
+
can
|
| 10 |
+
anywhere
|
| 11 |
+
name
|
| 12 |
+
down
|
| 13 |
+
versus
|
| 14 |
+
seem
|
| 15 |
+
formerly
|
| 16 |
+
hereupon
|
| 17 |
+
鈥檙e
|
| 18 |
+
u
|
| 19 |
+
you're
|
| 20 |
+
do
|
| 21 |
+
them
|
| 22 |
+
has
|
| 23 |
+
didn
|
| 24 |
+
it's
|
| 25 |
+
beside
|
| 26 |
+
so
|
| 27 |
+
z
|
| 28 |
+
third
|
| 29 |
+
whether
|
| 30 |
+
here
|
| 31 |
+
whether or not
|
| 32 |
+
didn't
|
| 33 |
+
hasn
|
| 34 |
+
isn't
|
| 35 |
+
where
|
| 36 |
+
our
|
| 37 |
+
full
|
| 38 |
+
six
|
| 39 |
+
anyhow
|
| 40 |
+
most
|
| 41 |
+
outside
|
| 42 |
+
out
|
| 43 |
+
those
|
| 44 |
+
hadn't
|
| 45 |
+
again
|
| 46 |
+
against
|
| 47 |
+
won
|
| 48 |
+
ours
|
| 49 |
+
having
|
| 50 |
+
both
|
| 51 |
+
all
|
| 52 |
+
together
|
| 53 |
+
me
|
| 54 |
+
been
|
| 55 |
+
latterly
|
| 56 |
+
go
|
| 57 |
+
have
|
| 58 |
+
other
|
| 59 |
+
anything
|
| 60 |
+
if
|
| 61 |
+
please
|
| 62 |
+
did
|
| 63 |
+
aren
|
| 64 |
+
any
|
| 65 |
+
mustn't
|
| 66 |
+
minus
|
| 67 |
+
several
|
| 68 |
+
n鈥檛
|
| 69 |
+
thru
|
| 70 |
+
itself
|
| 71 |
+
anyway
|
| 72 |
+
鈥檒l
|
| 73 |
+
toward
|
| 74 |
+
as much as
|
| 75 |
+
yourself
|
| 76 |
+
alone
|
| 77 |
+
during
|
| 78 |
+
must
|
| 79 |
+
himself
|
| 80 |
+
eight
|
| 81 |
+
j
|
| 82 |
+
hers
|
| 83 |
+
鈥榙
|
| 84 |
+
'll
|
| 85 |
+
wasn
|
| 86 |
+
'm
|
| 87 |
+
needn't
|
| 88 |
+
doing
|
| 89 |
+
already
|
| 90 |
+
into
|
| 91 |
+
s
|
| 92 |
+
between
|
| 93 |
+
shouldn
|
| 94 |
+
would
|
| 95 |
+
either
|
| 96 |
+
yet
|
| 97 |
+
mightn
|
| 98 |
+
part
|
| 99 |
+
eleven
|
| 100 |
+
meanwhile
|
| 101 |
+
of
|
| 102 |
+
after
|
| 103 |
+
whatever
|
| 104 |
+
become
|
| 105 |
+
she's
|
| 106 |
+
nowhere
|
| 107 |
+
that
|
| 108 |
+
us
|
| 109 |
+
the
|
| 110 |
+
latter
|
| 111 |
+
没茂
|
| 112 |
+
herself
|
| 113 |
+
like
|
| 114 |
+
this
|
| 115 |
+
becoming
|
| 116 |
+
beforehand
|
| 117 |
+
g
|
| 118 |
+
l
|
| 119 |
+
despite
|
| 120 |
+
whoever
|
| 121 |
+
supposing
|
| 122 |
+
still
|
| 123 |
+
may
|
| 124 |
+
always
|
| 125 |
+
her
|
| 126 |
+
not
|
| 127 |
+
'd
|
| 128 |
+
as long as
|
| 129 |
+
upon
|
| 130 |
+
near
|
| 131 |
+
get
|
| 132 |
+
twelve
|
| 133 |
+
his
|
| 134 |
+
d
|
| 135 |
+
with
|
| 136 |
+
could
|
| 137 |
+
done
|
| 138 |
+
shouldn't
|
| 139 |
+
twenty
|
| 140 |
+
thus
|
| 141 |
+
鈥檝e
|
| 142 |
+
nor
|
| 143 |
+
鈥榤
|
| 144 |
+
ourselves
|
| 145 |
+
see
|
| 146 |
+
sometimes
|
| 147 |
+
below
|
| 148 |
+
in order that
|
| 149 |
+
than
|
| 150 |
+
least
|
| 151 |
+
that'll
|
| 152 |
+
没貌
|
| 153 |
+
take
|
| 154 |
+
thereafter
|
| 155 |
+
unlike
|
| 156 |
+
behind
|
| 157 |
+
round
|
| 158 |
+
没陋s
|
| 159 |
+
or
|
| 160 |
+
call
|
| 161 |
+
on
|
| 162 |
+
say
|
| 163 |
+
their
|
| 164 |
+
within
|
| 165 |
+
across
|
| 166 |
+
well
|
| 167 |
+
serious
|
| 168 |
+
nevertheless
|
| 169 |
+
neither
|
| 170 |
+
what
|
| 171 |
+
four
|
| 172 |
+
c
|
| 173 |
+
who
|
| 174 |
+
via
|
| 175 |
+
没陋ve
|
| 176 |
+
further
|
| 177 |
+
keep
|
| 178 |
+
鈥檚
|
| 179 |
+
over
|
| 180 |
+
about
|
| 181 |
+
for
|
| 182 |
+
till
|
| 183 |
+
when
|
| 184 |
+
often
|
| 185 |
+
its
|
| 186 |
+
someone
|
| 187 |
+
became
|
| 188 |
+
him
|
| 189 |
+
being
|
| 190 |
+
how
|
| 191 |
+
be
|
| 192 |
+
per
|
| 193 |
+
are
|
| 194 |
+
whole
|
| 195 |
+
perhaps
|
| 196 |
+
lest
|
| 197 |
+
noone
|
| 198 |
+
two
|
| 199 |
+
mine
|
| 200 |
+
q
|
| 201 |
+
others
|
| 202 |
+
thence
|
| 203 |
+
doesn't
|
| 204 |
+
amongst
|
| 205 |
+
really
|
| 206 |
+
isn
|
| 207 |
+
wouldn't
|
| 208 |
+
to
|
| 209 |
+
until
|
| 210 |
+
inside
|
| 211 |
+
before
|
| 212 |
+
ten
|
| 213 |
+
yourselves
|
| 214 |
+
he
|
| 215 |
+
such
|
| 216 |
+
my
|
| 217 |
+
shan't
|
| 218 |
+
at
|
| 219 |
+
whereafter
|
| 220 |
+
side
|
| 221 |
+
v
|
| 222 |
+
themselves
|
| 223 |
+
somehow
|
| 224 |
+
鈥榲e
|
| 225 |
+
nine
|
| 226 |
+
empty
|
| 227 |
+
few
|
| 228 |
+
through
|
| 229 |
+
anyone
|
| 230 |
+
whence
|
| 231 |
+
we
|
| 232 |
+
following
|
| 233 |
+
shan
|
| 234 |
+
amid
|
| 235 |
+
only if
|
| 236 |
+
cannot
|
| 237 |
+
whereupon
|
| 238 |
+
矛帽
|
| 239 |
+
plus
|
| 240 |
+
hadn
|
| 241 |
+
h
|
| 242 |
+
up
|
| 243 |
+
鈥檇
|
| 244 |
+
many
|
| 245 |
+
put
|
| 246 |
+
you'd
|
| 247 |
+
beyond
|
| 248 |
+
much
|
| 249 |
+
enough
|
| 250 |
+
没贸we
|
| 251 |
+
theirs
|
| 252 |
+
around
|
| 253 |
+
besides
|
| 254 |
+
f
|
| 255 |
+
were
|
| 256 |
+
y
|
| 257 |
+
hereafter
|
| 258 |
+
is
|
| 259 |
+
then
|
| 260 |
+
you've
|
| 261 |
+
aren't
|
| 262 |
+
矛
|
| 263 |
+
rather
|
| 264 |
+
made
|
| 265 |
+
almost
|
| 266 |
+
elsewhere
|
| 267 |
+
whose
|
| 268 |
+
fifteen
|
| 269 |
+
should've
|
| 270 |
+
back
|
| 271 |
+
e
|
| 272 |
+
very
|
| 273 |
+
haven
|
| 274 |
+
ca
|
| 275 |
+
once
|
| 276 |
+
x
|
| 277 |
+
also
|
| 278 |
+
else
|
| 279 |
+
show
|
| 280 |
+
whereas
|
| 281 |
+
underneath
|
| 282 |
+
unless
|
| 283 |
+
yours
|
| 284 |
+
鈥榬e
|
| 285 |
+
regarding
|
| 286 |
+
wherein
|
| 287 |
+
you
|
| 288 |
+
otherwise
|
| 289 |
+
she
|
| 290 |
+
k
|
| 291 |
+
less
|
| 292 |
+
aboard
|
| 293 |
+
o
|
| 294 |
+
茫
|
| 295 |
+
whereby
|
| 296 |
+
first
|
| 297 |
+
mustn
|
| 298 |
+
make
|
| 299 |
+
you'll
|
| 300 |
+
sometime
|
| 301 |
+
towards
|
| 302 |
+
due
|
| 303 |
+
give
|
| 304 |
+
in case
|
| 305 |
+
bottom
|
| 306 |
+
茂
|
| 307 |
+
considering
|
| 308 |
+
concerning
|
| 309 |
+
onto
|
| 310 |
+
does
|
| 311 |
+
while
|
| 312 |
+
had
|
| 313 |
+
now that
|
| 314 |
+
since
|
| 315 |
+
same
|
| 316 |
+
without
|
| 317 |
+
therefore
|
| 318 |
+
by
|
| 319 |
+
doesn
|
| 320 |
+
mostly
|
| 321 |
+
as though
|
| 322 |
+
should
|
| 323 |
+
every
|
| 324 |
+
some
|
| 325 |
+
seems
|
| 326 |
+
off
|
| 327 |
+
w
|
| 328 |
+
by the time
|
| 329 |
+
am
|
| 330 |
+
没
|
| 331 |
+
a
|
| 332 |
+
from
|
| 333 |
+
couldn't
|
| 334 |
+
没贸
|
| 335 |
+
thereby
|
| 336 |
+
front
|
| 337 |
+
top
|
| 338 |
+
whither
|
| 339 |
+
as
|
| 340 |
+
indeed
|
| 341 |
+
r
|
| 342 |
+
therein
|
| 343 |
+
used
|
| 344 |
+
now
|
| 345 |
+
i
|
| 346 |
+
will
|
| 347 |
+
even
|
| 348 |
+
ll
|
| 349 |
+
none
|
| 350 |
+
ever
|
| 351 |
+
even if
|
| 352 |
+
becomes
|
| 353 |
+
couldn
|
| 354 |
+
above
|
| 355 |
+
just
|
| 356 |
+
in the event that
|
| 357 |
+
nothing
|
| 358 |
+
没陋m
|
| 359 |
+
whenever
|
| 360 |
+
except
|
| 361 |
+
hereby
|
| 362 |
+
along
|
| 363 |
+
don
|
| 364 |
+
was
|
| 365 |
+
own
|
| 366 |
+
mightn't
|
| 367 |
+
three
|
| 368 |
+
p
|
| 369 |
+
n鈥榯
|
| 370 |
+
throughout
|
| 371 |
+
even though
|
| 372 |
+
wherever
|
| 373 |
+
afterwards
|
| 374 |
+
and
|
| 375 |
+
n
|
| 376 |
+
they
|
| 377 |
+
there
|
| 378 |
+
last
|
| 379 |
+
never
|
| 380 |
+
b
|
| 381 |
+
鈥檓
|
| 382 |
+
these
|
| 383 |
+
t
|
| 384 |
+
former
|
| 385 |
+
amount
|
| 386 |
+
myself
|
| 387 |
+
nobody
|
| 388 |
+
没陋
|
| 389 |
+
wouldn
|
| 390 |
+
among
|
| 391 |
+
each
|
| 392 |
+
m
|
| 393 |
+
your
|
| 394 |
+
something
|
| 395 |
+
hasn't
|
| 396 |
+
however
|
| 397 |
+
needn
|
| 398 |
+
but
|
| 399 |
+
hence
|
| 400 |
+
five
|
| 401 |
+
wasn't
|
| 402 |
+
seemed
|
| 403 |
+
too
|
| 404 |
+
everyone
|
| 405 |
+
as if
|
| 406 |
+
haven't
|
| 407 |
+
n't
|
| 408 |
+
re
|
| 409 |
+
thereupon
|
| 410 |
+
one
|
| 411 |
+
no
|
| 412 |
+
various
|
| 413 |
+
why
|
| 414 |
+
under
|
| 415 |
+
whom
|
| 416 |
+
somewhere
|
| 417 |
+
in
|
| 418 |
+
namely
|
| 419 |
+
might
|
| 420 |
+
more
|
| 421 |
+
fifty
|
| 422 |
+
another
|
| 423 |
+
everything
|
| 424 |
+
next
|
| 425 |
+
ma
|
| 426 |
+
forty
|
| 427 |
+
won't
|
| 428 |
+
seeming
|
| 429 |
+
as soon as
|
| 430 |
+
鈥榣l
|
| 431 |
+
everywhere
|
| 432 |
+
an
|
| 433 |
+
quite
|
| 434 |
+
weren
|
| 435 |
+
though
|
| 436 |
+
beneath
|
| 437 |
+
hundred
|
| 438 |
+
it
|
| 439 |
+
鈥榮
|
| 440 |
+
ain
|
| 441 |
+
using
|
| 442 |
+
weren't
|
| 443 |
+
氓
|
| 444 |
+
moreover
|
| 445 |
+
because
|
| 446 |
+
没陋re
|
| 447 |
+
herein
|
| 448 |
+
move
|
| 449 |
+
've
|
| 450 |
+
only
|
| 451 |
+
're
|
transfer_tweet/fingerprint.pb
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:376041931ca2355913e3c847180a34408d8ffa2ebd01ddbd964f4fce7cb476d0
|
| 3 |
+
size 57
|
transfer_tweet/keras_metadata.pb
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7dca4e0121922693287b38f14447d072fabd04e089cd6487964e6e5261e32ff9
|
| 3 |
+
size 21175
|
transfer_tweet/saved_model.pb
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ecab5be566dc4fd13a897026c2e7ab549989858410f1a5fa1a091842a75e77dd
|
| 3 |
+
size 10031550
|
transfer_tweet/variables/variables.data-00000-of-00001
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fb76923e9a19466c1ef9aa371b9783c8218f05cdc91340af81ddcae1bae72689
|
| 3 |
+
size 1029458023
|
transfer_tweet/variables/variables.index
ADDED
|
Binary file (14.7 kB). View file
|
|
|