This repository has been archived by the owner on Apr 14, 2019. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 2
/
scrape.py
76 lines (57 loc) · 1.62 KB
/
scrape.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
import sys
import requests
from bs4 import BeautifulSoup
import csv
from texttable import Texttable
def url_builder(username):
url = 'https://www.codewars.com/users/' + username
return url
def store(data):
'''
This Function stores the
data in a csv file
'''
csv.register_dialect('Dialect', delimiter=chr(9), quoting=csv.QUOTE_NONE)
File = open("user_info.csv", "a")
with File:
writer = csv.writer(File, dialect='Dialect')
writer.writerow(data)
File.close()
def fetch(url):
'''
This function fetches the
info about the user
'''
counter = 0
r = requests.get(url)
if r.status_code == 500:
print("Username does not exist, exiting...")
return
soup = BeautifulSoup(r.content, 'html.parser')
user_info = soup.find_all('div', attrs={'class': 'stat'})
table = Texttable()
profile_stats = []
data = ['Attributes', 'Values']
profile_stats.append(data)
for info in user_info:
counter += 1
if(counter >= 5 and counter <= 7):
continue
data = [info.contents[0].string, info.contents[1].string]
profile_stats.append(data)
store(data)
if(counter == 16):
break
table.add_rows(profile_stats)
print(table.draw())
def main(argv):
if len(argv) == 1:
fetch(url_builder(argv[0]))
elif len(argv) > 1:
print("Too many arguments!")
print("Pass a username as an argument.")
else:
print("No arguments provided!")
print("Pass a username as an argument.")
if __name__ == '__main__':
main(sys.argv[1:])