mirror of
https://github.com/Lynnesbian/FediBooks/
synced 2024-11-25 16:48:58 +00:00
Compare commits
6 commits
954544205e
...
0997de535b
Author | SHA1 | Date | |
---|---|---|---|
0997de535b | |||
0a4ddb7687 | |||
8d89736373 | |||
a5c72eb424 | |||
ac4cb2ab7a | |||
|
2f8b3050db |
5 changed files with 20 additions and 25 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -3,4 +3,5 @@ config.json
|
||||||
planning.txt
|
planning.txt
|
||||||
*.pyc
|
*.pyc
|
||||||
/debug
|
/debug
|
||||||
lynnesbian.json
|
lynnesbian.json
|
||||||
|
test.py
|
||||||
|
|
|
@ -21,12 +21,10 @@ def extract_post(post):
|
||||||
post = html.unescape(post) # convert HTML escape codes to text
|
post = html.unescape(post) # convert HTML escape codes to text
|
||||||
soup = BeautifulSoup(post, "html.parser")
|
soup = BeautifulSoup(post, "html.parser")
|
||||||
for lb in soup.select("br"): # replace <br> with linebreak
|
for lb in soup.select("br"): # replace <br> with linebreak
|
||||||
lb.insert_after("\n")
|
lb.replace_with("\n")
|
||||||
lb.decompose()
|
|
||||||
|
|
||||||
for p in soup.select("p"): # ditto for <p>
|
for p in soup.select("p"): # ditto for <p>
|
||||||
p.insert_after("\n")
|
p.replace_with("\n")
|
||||||
p.unwrap()
|
|
||||||
|
|
||||||
for ht in soup.select("a.hashtag"): # convert hashtags from links to text
|
for ht in soup.select("a.hashtag"): # convert hashtags from links to text
|
||||||
ht.unwrap()
|
ht.unwrap()
|
||||||
|
@ -34,8 +32,7 @@ def extract_post(post):
|
||||||
for link in soup.select("a"): #ocnvert <a href='https://example.com>example.com</a> to just https://example.com
|
for link in soup.select("a"): #ocnvert <a href='https://example.com>example.com</a> to just https://example.com
|
||||||
if 'href' in link:
|
if 'href' in link:
|
||||||
# apparently not all a tags have a href, which is understandable if you're doing normal web stuff, but on a social media platform??
|
# apparently not all a tags have a href, which is understandable if you're doing normal web stuff, but on a social media platform??
|
||||||
link.insert_after(link["href"])
|
link.replace_with(link["href"])
|
||||||
link.decompose()
|
|
||||||
|
|
||||||
text = soup.get_text()
|
text = soup.get_text()
|
||||||
text = re.sub(r"https://([^/]+)/(@[^\s]+)", r"\2@\1", text) # put mastodon-style mentions back in
|
text = re.sub(r"https://([^/]+)/(@[^\s]+)", r"\2@\1", text) # put mastodon-style mentions back in
|
||||||
|
@ -191,22 +188,19 @@ def make_post(args):
|
||||||
db.commit()
|
db.commit()
|
||||||
c.close()
|
c.close()
|
||||||
|
|
||||||
def do_in_pool(function, data, timeout=30, silent=False):
|
def task_done(future):
|
||||||
with ProcessPool(max_workers=cfg['service_threads']) as p:
|
try:
|
||||||
index = 0
|
result = future.result() # blocks until results are ready
|
||||||
future = p.map(function, data)
|
except TimeoutError as error:
|
||||||
iterator = future.result()
|
if not future.silent: print("Timed out on {}.".format(future.function_data))
|
||||||
|
|
||||||
while True:
|
def do_in_pool(function, data, timeout=30, silent=False):
|
||||||
try:
|
with ProcessPool(max_workers=5, max_tasks=10) as pool:
|
||||||
result = next(iterator)
|
for i in data:
|
||||||
except StopIteration:
|
future = pool.schedule(function, args=[i], timeout=timeout)
|
||||||
# all threads are done
|
future.silent = silent
|
||||||
break
|
future.function_data = i
|
||||||
except TimeoutError as error:
|
future.add_done_callback(task_done)
|
||||||
if not silent: print("Timed out on {}.".format(data[index]))
|
|
||||||
finally:
|
|
||||||
index += 1
|
|
||||||
|
|
||||||
def get_key():
|
def get_key():
|
||||||
db = MySQLdb.connect(
|
db = MySQLdb.connect(
|
||||||
|
|
|
@ -50,7 +50,7 @@ def scrape_posts(account):
|
||||||
|
|
||||||
# here we go!
|
# here we go!
|
||||||
# warning: scraping posts from outbox.json is messy stuff
|
# warning: scraping posts from outbox.json is messy stuff
|
||||||
while not done and len(j['orderedItems']) > 0:
|
while not done and 'orderedItems' in j and len(j['orderedItems']) > 0:
|
||||||
for oi in j['orderedItems']:
|
for oi in j['orderedItems']:
|
||||||
if oi['type'] == "Create":
|
if oi['type'] == "Create":
|
||||||
# this is a status/post/toot/florp/whatever
|
# this is a status/post/toot/florp/whatever
|
||||||
|
|
|
@ -30,7 +30,7 @@ def update_icon(bot):
|
||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
raise
|
raise
|
||||||
except:
|
except:
|
||||||
print("{} is down.".format(url))
|
print("{} is down - can't update icon for {}.".format(url, bot['handle']))
|
||||||
return
|
return
|
||||||
|
|
||||||
client = Mastodon(
|
client = Mastodon(
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
Mastodon.py==1.5.1
|
Mastodon.py==1.5.1
|
||||||
markovify==0.8.0
|
markovify==0.8.0
|
||||||
beautifulsoup4==4.9.0
|
beautifulsoup4==4.9.1
|
||||||
requests==2.23.0
|
requests==2.23.0
|
||||||
Flask==1.1.2
|
Flask==1.1.2
|
||||||
flask-mysqldb==0.2.0
|
flask-mysqldb==0.2.0
|
||||||
|
|
Loading…
Reference in a new issue