-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathproduct-fetcher-async.py
272 lines (237 loc) · 7.23 KB
/
product-fetcher-async.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
import asyncio
import aiohttp
import requests
import pandas as pd
# start timing the execution
import time
start_time = time.time()
# Set the GraphQL endpoint and the headers
endpoint = "https://www.producthunt.com/frontend/graphql"
headers = {"Content-Type": "application/json"}
# Define the GraphQL query and variables
query = """
query ArchivePage(
$year: Int
$month: Int
$day: Int
$cursor: String
$order: PostsOrder
) {
posts(
first: 200
year: $year
month: $month
day: $day
order: $order
after: $cursor
) {
edges {
node {
id
...PostItemList
__typename
}
__typename
}
pageInfo {
endCursor
hasNextPage
__typename
}
__typename
}
}
fragment PostItemList on Post {
id
...PostItem
__typename
}
fragment PostItem on Post {
id
commentsCount
name
shortenedUrl
slug
tagline
updatedAt
pricingType
topics(first: 1) {
edges {
node {
id
name
slug
__typename
}
__typename
}
__typename
}
redirectToProduct {
id
slug
__typename
}
...PostThumbnail
...PostVoteButton
__typename
}
fragment PostThumbnail on Post {
id
name
thumbnailImageUuid
...PostStatusIcons
__typename
}
fragment PostStatusIcons on Post {
id
name
productState
__typename
}
fragment PostVoteButton on Post {
id
featuredAt
updatedAt
createdAt
disabledWhenScheduled
hasVoted
... on Votable {
id
votesCount
__typename
}
__typename
}
"""
# Initialize an empty list to store the results
results = []
# start_date
start_date = {
"year": 2022,
"month": 1,
"day": 1,
}
# end_date
end_date = {
"year": 2022,
"month": 1,
"day": 15,
}
# get_date_range returns a list of dates between the start date and end date
def get_date_range(start_date, end_date):
from datetime import date, timedelta
start_date = date(start_date["year"], start_date["month"], start_date["day"])
end_date = date(end_date["year"], end_date["month"], end_date["day"])
delta = end_date - start_date
date_range = []
for i in range(delta.days + 1):
date_range.append(start_date + timedelta(days=i))
return date_range
async def fetch_products(session, variables):
# Set the cursor to None and the hasNextPage flag to True
cursor = None
hasNextPage = True
# Fetch the products in a loop, until there are no more pages
while hasNextPage:
# Update the cursor in the variables
variables["cursor"] = cursor
# Set the GraphQL request payload
payload = {"query": query, "variables": variables}
# Send the POST request to the endpoint
async with session.post(endpoint, json=payload, headers=headers) as response:
# Check the status code of the response
if response.status == 200:
# Get the data from the response
data = await response.json()
data = data["data"]
# Extract the list of edges and pageInfo from the data
edges = data["posts"]["edges"]
pageInfo = data["posts"]["pageInfo"]
# Extract the cursor and hasNextPage flag from the pageInfo
cursor = pageInfo["endCursor"]
hasNextPage = pageInfo["hasNextPage"]
# Iterate over the edges and extract the node from each edge
for edge in edges:
node = edge["node"]
# Extract the relevant fields from the node
id = node["id"]
name = node["name"]
slug = node["slug"]
tagline = node["tagline"]
shortenedUrl = node["shortenedUrl"]
commentsCount = node["commentsCount"]
updatedAt = node["updatedAt"]
pricingType = node["pricingType"]
# Extract the topic from the node, if it exists
topic = (
node["topics"]["edges"][0]["node"]
if node["topics"]["edges"]
else None
)
topic_id = topic["id"] if topic else None
topic_name = topic["name"] if topic else None
topic_slug = topic["slug"] if topic else None
# Extract the redirectToProduct from the node, if it exists
redirectToProduct = node["redirectToProduct"]
redirectToProduct_id = (
redirectToProduct["id"] if redirectToProduct else None
)
redirectToProduct_slug = (
redirectToProduct["slug"] if redirectToProduct else None
)
# Extract complete info from PostVoteButton and PostThumbnail
featuredAt = node["featuredAt"]
createdAt = node["createdAt"]
disabledWhenScheduled = node["disabledWhenScheduled"]
votesCount = node["votesCount"]
productState = node["productState"]
thumbnailImageUuid = node["thumbnailImageUuid"]
# Append the extracted fields to the results list
results.append(
{
"id": id,
"name": name,
"slug": slug,
"tagline": tagline,
"shortenedUrl": shortenedUrl,
"commentsCount": commentsCount,
"createdAt": createdAt,
"featuredAt": featuredAt,
"updatedAt": updatedAt,
"pricingType": pricingType,
"topic_id": topic_id,
"topic_name": topic_name,
"topic_slug": topic_slug,
"redirectToProduct_id": redirectToProduct_id,
"redirectToProduct_slug": redirectToProduct_slug,
"disabledWhenScheduled": disabledWhenScheduled,
"votesCount": votesCount,
"productState": productState,
"thumbnailImageUuid": thumbnailImageUuid,
}
)
async def main():
async with aiohttp.ClientSession() as session:
# Iterate over the date range
for single_date in get_date_range(start_date, end_date):
print(single_date.strftime("%Y-%m-%d"))
# Update the date in the variables
variables = {
"year": single_date.year,
"month": single_date.month,
"day": single_date.day,
"order": "DAILY_RANK",
}
# Fetch the products for the current date
await fetch_products(session, variables)
# Run the main function
asyncio.run(main())
# Convert the results list to a Pandas DataFrame
df = pd.DataFrame(results)
# Write the DataFrame to a CSV file
df.to_csv("products_async.csv", index=False)
# end the timer
end_time = time.time()
# print the time taken to run the script
print(f"Time taken to run the script: {end_time - start_time} seconds")