-
Notifications
You must be signed in to change notification settings - Fork 41
/
Copy pathindex.py
executable file
·85 lines (62 loc) · 2.37 KB
/
index.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
#!/usr/bin/env python
import os
from os.path import join, dirname, exists
import requests
from bs4 import BeautifulSoup
from datetime import datetime, timedelta
LOC = dirname(__file__)
def main():
# yesterdays file.
yesterday = datetime.now() - timedelta(days=1)
ymonth = yesterday.strftime("%Y-%m")
ydate = yesterday.strftime("%Y-%m-%d")
yfilename = join(LOC, ymonth, ydate + ".md")
if os.path.exists(yfilename):
with open(yfilename) as e:
yfilecontents = e.read()
else:
yfilecontents = ""
month = datetime.now().strftime("%Y-%m")
date = datetime.now().strftime("%Y-%m-%d")
file_location = join(LOC, month)
filename = join(file_location, date + ".md")
filename_short = join(file_location, date + "_short.md")
if not exists(file_location):
os.makedirs(file_location)
with open(filename, "w") as e:
e.write("### " + date + "\n")
with open(filename_short, "w") as e:
e.write("### " + date + "\n")
e.write("diff between today and yesterday\n")
scrape("python", filename, filename_short, yfilecontents)
scrape("go", filename, filename_short, yfilecontents)
scrape("rust", filename, filename_short, yfilecontents)
scrape("cpp", filename, filename_short, yfilecontents)
scrape("javascript", filename, filename_short, yfilecontents)
scrape("typescript", filename, filename_short, yfilecontents)
cmd = "cd " + LOC + ";git add --all; git commit -m '" + date + "'; git push"
print(cmd)
os.system(cmd)
def scrape(language, filename, filename_short, yfilecontents):
req = requests.get("https://github.com/trending?l=" + language)
soup = BeautifulSoup(req.content, "html.parser")
e = open(filename, "a")
d = open(filename_short, "a")
e.write("\n#### " + language + "\n")
d.write("\n#### " + language + "\n")
for item in soup.find_all("article", class_="Box-row"):
url = "https://github.com" + item.h2.a.get("href")
title = item.h2.a.get("href")[1:]
try:
description = item.p.text.rstrip().lstrip().split("\n")[0]
except:
description = ""
line = "* [" + title + "](" + url + "): " + description + "\n"
e.write(line)
if url in yfilecontents:
continue
d.write(line)
e.close()
d.close()
if __name__ == '__main__':
main()