forked from pournaki/twitter-explorer
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcollector.py
More file actions
182 lines (162 loc) · 7.01 KB
/
collector.py
File metadata and controls
182 lines (162 loc) · 7.01 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
#!/usr/bin/env python
"""Interactive interface for Twitter Search API.
Connects to Twitter Search API and collects tweets based on keyword.
Saves collected tweets as jsonl in ./data.
"""
__author__ = "Armin Pournaki"
__copyright__ = "Copyright 2020, Armin Pournaki"
__credits__ = ["Felix Gaisbauer", "Sven Banisch", "Eckehard Olbrich"]
__license__ = "GPLv3"
__version__ = "0.1"
__email__ = "pournaki@mis.mpg.de"
import streamlit as st
import tweepy
import json
import os
import datetime
import time
import sys
# ------- UI --------- #
# Some CSS changes
st.markdown('<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;\
700&display=swap" rel="stylesheet"> ',
unsafe_allow_html=True)
st.markdown(
'<style>.reportview-container .markdown-text-container{font-family:\
"Inter", -apple-system,system-ui,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica\
Neue",Arial,sans-serif}\
#titlelink {color: white;\
text-decoration: none}\
#titlelink:hover {color: #cccccc;\
text-decoration: none}</style>', unsafe_allow_html=True)
st.markdown('<style>.ae{font-family:"Inter",-apple-system,system-ui,BlinkMacSystemFont,\
"Segoe UI",Roboto,"Helvetica Neue",Arial,sans-serif}</style>',
unsafe_allow_html=True)
st.markdown('<style>body{font-family:"Inter",-apple-system,system-ui,BlinkMacSystemFont,\
"Segoe UI",Roboto,"Helvetica Neue",Arial,sans-serif}</style>',
unsafe_allow_html=True)
st.markdown('<style>code{color:black}</style>', unsafe_allow_html=True)
st.markdown('<style>.reportview-container .markdown-text-container a{color:rgba\
(83,106,160,1)}</style>', unsafe_allow_html=True)
st.markdown('<head><title>twitter explorer</title></head>',
unsafe_allow_html=True)
st.markdown('<p style="font-size: 30pt; font-weight: bold; color: white; \
background-color: #000"> \
<a id="titlelink" href="https://twitterexplorer.org">twitter explorer\
<span style="font-size:10pt;">BETA</span></a>\
</p>', unsafe_allow_html=True)
st.title("Collector")
st.write("Use the [Twitter Search API](https://developer.twitter.com/en/docs/tweets/search/api-reference/get-search-tweets) to collect a set of relevant tweets from the last 7 days based on a keyword search.")
st.subheader("Authentication")
st.write("Save your Twitter credentials to `./twitter_apikeys.txt` in the following format:")
st.write("""```
# api_key
<insert api_key here>
# api_secret_key
<insert api_secret_key here>
```""")
# st.write("""```
# # api_key
# <insert api_key here>
# # api_secret_key
# <insert api_secret_key here>
# # access_token
# <insert access_token here>
# # access_token_secret
# <insert access_token_secret here>
# ```""")
credentials = []
try:
for line in open("./twitter_apikeys.txt"):
li=line.strip()
if not li.startswith("#"):
credentials.append(li)
except FileNotFoundError:
st.error("Please follow the above instructions to use the collector.")
a = credentials[0]
b = credentials[1]
# c = credentials[2]
# d = credentials[3]
# authenticate and initialise api
auth = tweepy.AppAuthHandler(a, b)
# auth.set_access_token(c, d)
api = tweepy.API(auth)
st.subheader("Keyword search")
keywords = st.text_input("Insert keyword(s) here", value='')
savename = keywords.replace(" ", "_")
# create directories before it's too late
datadir = "./data"
if not os.path.exists(datadir):
os.makedirs(datadir)
datetoday = datetime.date.today()
datelastweek = datetoday - datetime.timedelta(weeks=1)
count = 0
advanced = st.checkbox("Advanced API settings")
if advanced:
language = st.text_input("Restrict collected tweets to the given language, given by an ISO 639-1 code (leave empty for all languages)")
timerange = st.slider("Collect tweets in the following timerange",
value=(datelastweek, datetoday),
min_value=datelastweek,
max_value=datetoday)
since_date = timerange[0]
until_date = timerange[1] + datetime.timedelta(days=1)
restype = st.radio(label="Type of result", options=["mixed (include both popular and real time results in the response)", "recent (return only the most recent results in the response)","popular (return only the most popular results in the response)"], index=0)
restype = restype.split('(')[0]
if st.button("Start collecting"):
if advanced:
st.write("Collecting...")
c = tweepy.Cursor(api.search,q=keywords,rpp=100, tweet_mode='extended', lang=language, until=until_date, result_type=restype).items()
while True:
try:
tweet = c.next()
tweet = (tweet._json)
tweetdatetime = datetime.datetime.strptime(tweet['created_at'], '%a %b %d %H:%M:%S +0000 %Y')
if tweetdatetime.date() < since_date:
print("Collected all the tweets in the desired timerange! Collected {count} tweets.")
break
else:
count += 1
with open (f"{datadir}/{datetoday}_tweets_{savename}.jsonl", "a", encoding = "utf-8") as f:
json.dump(tweet, f, ensure_ascii=False)
f.write("\n")
# when you attain the rate limit:
except tweepy.TweepError:
st.write(f"Attained the rate limit. Going to sleep. Collected {count} tweets.")
# go to sleep and wait until rate limit
st.write("Sleeping...")
my_bar = st.progress(0)
for i in range (900):
time.sleep(1)
my_bar.progress((i+1)/900)
st.write("Collecting...")
continue
# when you collected all possible tweets:
except StopIteration:
st.write(f"Collected all possible {count} tweets from last week.")
break
else:
st.write("Collecting...")
c = tweepy.Cursor(api.search,q=keywords,rpp=100, tweet_mode='extended').items()
while True:
try:
tweet = c.next()
tweet = (tweet._json)
count += 1
with open (f"{datadir}/{datetoday}_tweets_{savename}.jsonl", "a", encoding = "utf-8") as f:
json.dump(tweet, f, ensure_ascii=False)
f.write("\n")
# when you attain the rate limit:
except tweepy.TweepError:
st.write(f"Attained the rate limit. Going to sleep. Collected {count} tweets.")
# go to sleep and wait until rate limit
st.write("Sleeping...")
my_bar = st.progress(0)
for i in range (900):
time.sleep(1)
my_bar.progress((i+1)/900)
st.write("Collecting...")
continue
# when you collected all possible tweets:
except StopIteration:
st.write(f"Collected all possible {count} tweets from last week.")
break