-
Notifications
You must be signed in to change notification settings - Fork 0
/
verify.py
89 lines (51 loc) · 1.95 KB
/
verify.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
#!/usr/bin/python
# -*- coding: utf-8 -*-
wordList = []
import requests
from concurrent.futures import ThreadPoolExecutor
ids = []
#from bs4 import BeautifulSoup
with open("clean_list.txt","r",encoding = "utf-8") as data:
for i in data:
wordList.append(i[:-1])
print(len(wordList))
def fetch(r,wordList):
global ids
with r.get("https://www.1secmail.com/api/v1/?action=getMessages&login="+wordList+"&domain=1secmail.com") as resp:
#response = requests.get("https://www.1secmail.com/api/v1/?action=getMessages&login="+i+"&domain=1secmail.com")
resp_json = resp.json()
print(resp_json)
try:
identifier = resp_json[0]['id']
ids.append((identifier,wordList))
except:
pass
if __name__ == '__main__':
with ThreadPoolExecutor(max_workers=10) as executor:
with requests.Session() as session:
executor.map(fetch, [session] * len(wordList), wordList)
executor.shutdown(wait=True)
#file_q = open('id.txt', 'w', encoding='utf-8')
#for i in ids:
# file_q.write(str(i) + '\n')
#file_q.close()
print("Stage-2 Started")
#State-2 starting from here
extract_urls = []
def fetch(r,id_word):
global extract_urls
with r.get("https://www.1secmail.com/mailbox/?action=mailBody&id="+str(id_word[0])+"&login="+id_word[1]+"&domain=1secmail.com") as resp2:
#response = requests.get("https://www.1secmail.com/api/v1/?action=getMessages&login="+i+"&domain=1secmail.com")
resp2 = resp2.text
url2 = resp2[resp2.find("<a href=\'")+9:resp2.find("\'>Confirm your")]
print(url2)
extract_urls.append(url2)
if __name__ == '__main__':
with ThreadPoolExecutor(max_workers=10) as executor:
with requests.Session() as session:
executor.map(fetch, [session] * len(ids), ids)
executor.shutdown(wait=True)
file_q = open('extracted_url.txt', 'w', encoding='utf-8')
for i in extract_urls:
file_q.write(i + '\n')
file_q.close()