1
0
Fork 0
mirror of https://github.com/Lynnesbian/FediBooks/ synced 2024-11-25 16:48:58 +00:00

Compare commits

..

6 commits

Author SHA1 Message Date
0997de535b
turns out fedibooks is even buggier than i thought, and i already thought it was pretty shit 2020-05-27 22:27:20 +10:00
0a4ddb7687
Merge pull request #48 from Lynnesbian/dependabot/pip/beautifulsoup4-4.9.1
Bump beautifulsoup4 from 4.9.0 to 4.9.1
2020-05-27 22:11:33 +10:00
8d89736373
lil more not broken 2020-05-27 21:59:29 +10:00
a5c72eb424
a lil more info 2020-05-27 21:58:12 +10:00
ac4cb2ab7a
okay, NOW multithreading works properly, and can i just say: holy fuck it's SO MUCH FASTER 2020-05-27 21:51:58 +10:00
dependabot-preview[bot]
2f8b3050db
Bump beautifulsoup4 from 4.9.0 to 4.9.1
Bumps [beautifulsoup4](http://www.crummy.com/software/BeautifulSoup/bs4/) from 4.9.0 to 4.9.1.

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2020-05-18 19:13:15 +00:00
5 changed files with 20 additions and 25 deletions

3
.gitignore vendored
View file

@ -3,4 +3,5 @@ config.json
planning.txt
*.pyc
/debug
lynnesbian.json
lynnesbian.json
test.py

View file

@ -21,12 +21,10 @@ def extract_post(post):
post = html.unescape(post) # convert HTML escape codes to text
soup = BeautifulSoup(post, "html.parser")
for lb in soup.select("br"): # replace <br> with linebreak
lb.insert_after("\n")
lb.decompose()
lb.replace_with("\n")
for p in soup.select("p"): # ditto for <p>
p.insert_after("\n")
p.unwrap()
p.replace_with("\n")
for ht in soup.select("a.hashtag"): # convert hashtags from links to text
ht.unwrap()
@ -34,8 +32,7 @@ def extract_post(post):
for link in soup.select("a"): #ocnvert <a href='https://example.com>example.com</a> to just https://example.com
if 'href' in link:
# apparently not all a tags have a href, which is understandable if you're doing normal web stuff, but on a social media platform??
link.insert_after(link["href"])
link.decompose()
link.replace_with(link["href"])
text = soup.get_text()
text = re.sub(r"https://([^/]+)/(@[^\s]+)", r"\2@\1", text) # put mastodon-style mentions back in
@ -191,22 +188,19 @@ def make_post(args):
db.commit()
c.close()
def do_in_pool(function, data, timeout=30, silent=False):
with ProcessPool(max_workers=cfg['service_threads']) as p:
index = 0
future = p.map(function, data)
iterator = future.result()
def task_done(future):
try:
result = future.result() # blocks until results are ready
except TimeoutError as error:
if not future.silent: print("Timed out on {}.".format(future.function_data))
while True:
try:
result = next(iterator)
except StopIteration:
# all threads are done
break
except TimeoutError as error:
if not silent: print("Timed out on {}.".format(data[index]))
finally:
index += 1
def do_in_pool(function, data, timeout=30, silent=False):
with ProcessPool(max_workers=5, max_tasks=10) as pool:
for i in data:
future = pool.schedule(function, args=[i], timeout=timeout)
future.silent = silent
future.function_data = i
future.add_done_callback(task_done)
def get_key():
db = MySQLdb.connect(

View file

@ -50,7 +50,7 @@ def scrape_posts(account):
# here we go!
# warning: scraping posts from outbox.json is messy stuff
while not done and len(j['orderedItems']) > 0:
while not done and 'orderedItems' in j and len(j['orderedItems']) > 0:
for oi in j['orderedItems']:
if oi['type'] == "Create":
# this is a status/post/toot/florp/whatever

View file

@ -30,7 +30,7 @@ def update_icon(bot):
if r.status_code != 200:
raise
except:
print("{} is down.".format(url))
print("{} is down - can't update icon for {}.".format(url, bot['handle']))
return
client = Mastodon(

View file

@ -1,6 +1,6 @@
Mastodon.py==1.5.1
markovify==0.8.0
beautifulsoup4==4.9.0
beautifulsoup4==4.9.1
requests==2.23.0
Flask==1.1.2
flask-mysqldb==0.2.0