forked from ruangrupa/konfluks
Compare commits
26 Commits
e0fd2c40a6
...
master
Author | SHA1 | Date | |
---|---|---|---|
b0f77831bd | |||
5ba944b6d1 | |||
ad591ea9cf | |||
9c824fcd3f | |||
cab36c8ac6 | |||
c84a975887 | |||
2ca61c6197 | |||
fecf5cd64e | |||
6e64d64772 | |||
3b390d1ecb | |||
ce3bfc58b0 | |||
c5af3610a0 | |||
3ea798b301
|
|||
7d3863641d | |||
f6a1a684c0 | |||
58afd189a7 | |||
19ab610dfc | |||
a809433410
|
|||
cf8b1ff7e9
|
|||
2fbc952a72
|
|||
bac9bbd7b3
|
|||
8c4a36791f | |||
dfa4b40d52 | |||
0aaa711538 | |||
c40f740f50
|
|||
f69c092548
|
@ -5,6 +5,7 @@ from hashlib import md5
|
|||||||
from ast import literal_eval as make_tuple
|
from ast import literal_eval as make_tuple
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
from re import sub
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
import feedparser
|
import feedparser
|
||||||
@ -84,6 +85,15 @@ def create_frontmatter(entry):
|
|||||||
for t in entry.tags:
|
for t in entry.tags:
|
||||||
tags.append(t['term'])
|
tags.append(t['term'])
|
||||||
|
|
||||||
|
if "featured_image" in entry:
|
||||||
|
featured_image = entry.featured_image
|
||||||
|
else:
|
||||||
|
featured_image = ''
|
||||||
|
|
||||||
|
card_type = "network"
|
||||||
|
if entry.feed_name == "pen.lumbung.space":
|
||||||
|
card_type = "pen"
|
||||||
|
|
||||||
if "opds" in entry:
|
if "opds" in entry:
|
||||||
frontmatter = {
|
frontmatter = {
|
||||||
'title':entry.title,
|
'title':entry.title,
|
||||||
@ -104,7 +114,9 @@ def create_frontmatter(entry):
|
|||||||
'author': author,
|
'author': author,
|
||||||
'original_link': entry.link,
|
'original_link': entry.link,
|
||||||
'feed_name': entry['feed_name'],
|
'feed_name': entry['feed_name'],
|
||||||
'tags': str(tags)
|
'tags': str(tags),
|
||||||
|
'card_type': card_type,
|
||||||
|
'featured_image': featured_image
|
||||||
}
|
}
|
||||||
|
|
||||||
return frontmatter
|
return frontmatter
|
||||||
@ -130,11 +142,33 @@ def sanitize_yaml (frontmatter):
|
|||||||
|
|
||||||
return frontmatter
|
return frontmatter
|
||||||
|
|
||||||
|
def parse_enclosures(post_dir, entry):
|
||||||
|
"""
|
||||||
|
Parses feed enclosures which are featured media
|
||||||
|
Can be featured image but also podcast entries
|
||||||
|
https://pythonhosted.org/feedparser/reference-entry-enclosures.html
|
||||||
|
"""
|
||||||
|
#TODO parse more than images
|
||||||
|
#TODO handle the fact it could be multiple items
|
||||||
|
|
||||||
|
for e in entry.enclosures:
|
||||||
|
if "type" in e:
|
||||||
|
print("found enclosed media", e.type)
|
||||||
|
if "image/" in e.type:
|
||||||
|
featured_image = grab_media(post_dir, e.href)
|
||||||
|
entry["featured_image"] = featured_image
|
||||||
|
else:
|
||||||
|
print("FIXME:ignoring enclosed", e.type)
|
||||||
|
return entry
|
||||||
|
|
||||||
|
|
||||||
def create_post(post_dir, entry):
|
def create_post(post_dir, entry):
|
||||||
"""
|
"""
|
||||||
write hugo post based on RSS entry
|
write hugo post based on RSS entry
|
||||||
"""
|
"""
|
||||||
|
if "enclosures" in entry:
|
||||||
|
entry = parse_enclosures(post_dir, entry)
|
||||||
|
|
||||||
frontmatter = create_frontmatter(entry)
|
frontmatter = create_frontmatter(entry)
|
||||||
|
|
||||||
if not os.path.exists(post_dir):
|
if not os.path.exists(post_dir):
|
||||||
@ -163,18 +197,25 @@ def grab_media(post_directory, url, prefered_name=None):
|
|||||||
"""
|
"""
|
||||||
media_item = urlparse(url).path.split('/')[-1]
|
media_item = urlparse(url).path.split('/')[-1]
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'https://git.autonomic.zone/ruangrupa/lumbunglib',
|
||||||
|
'From': 'info@lumbung.space' # This is another valid field
|
||||||
|
}
|
||||||
if prefered_name:
|
if prefered_name:
|
||||||
media_item = prefered_name
|
media_item = prefered_name
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if not os.path.exists(os.path.join(post_directory, media_item)):
|
if not os.path.exists(os.path.join(post_directory, media_item)):
|
||||||
#TODO: stream is true is a conditional so we could check the headers for things, mimetype etc
|
#TODO: stream is true is a conditional so we could check the headers for things, mimetype etc
|
||||||
response = requests.get(url, stream=True)
|
response = requests.get(url, headers=headers, stream=True)
|
||||||
if response.ok:
|
if response.ok:
|
||||||
with open(os.path.join(post_directory, media_item), 'wb') as media_file:
|
with open(os.path.join(post_directory, media_item), 'wb') as media_file:
|
||||||
shutil.copyfileobj(response.raw, media_file)
|
shutil.copyfileobj(response.raw, media_file)
|
||||||
print('Downloaded media item', media_item)
|
print('Downloaded media item', media_item)
|
||||||
return media_item
|
return media_item
|
||||||
|
else:
|
||||||
|
print("Download failed", response.status_code)
|
||||||
|
return url
|
||||||
return media_item
|
return media_item
|
||||||
elif os.path.exists(os.path.join(post_directory, media_item)):
|
elif os.path.exists(os.path.join(post_directory, media_item)):
|
||||||
return media_item
|
return media_item
|
||||||
@ -195,6 +236,7 @@ def parse_posts(post_dir, post_content):
|
|||||||
allowed_iframe_sources = ["youtube.com", "vimeo.com", "tv.lumbung.space"]
|
allowed_iframe_sources = ["youtube.com", "vimeo.com", "tv.lumbung.space"]
|
||||||
|
|
||||||
for img in soup(["img", "object"]):
|
for img in soup(["img", "object"]):
|
||||||
|
if img.get("src") != None:
|
||||||
local_image = grab_media(post_dir, img["src"])
|
local_image = grab_media(post_dir, img["src"])
|
||||||
if img["src"] != local_image:
|
if img["src"] != local_image:
|
||||||
img["src"] = local_image
|
img["src"] = local_image
|
||||||
@ -228,6 +270,7 @@ def grab_feed(feed_url):
|
|||||||
print(e)
|
print(e)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if "status" in data:
|
||||||
print(data.status, feed_url)
|
print(data.status, feed_url)
|
||||||
if data.status == 200:
|
if data.status == 200:
|
||||||
# 304 means the feed has not been modified since we last checked
|
# 304 means the feed has not been modified since we last checked
|
||||||
|
@ -19,13 +19,16 @@ hashtags = [
|
|||||||
"majelisakbar",
|
"majelisakbar",
|
||||||
"warungkopi",
|
"warungkopi",
|
||||||
"lumbungkios",
|
"lumbungkios",
|
||||||
|
"kassel_ecosystem",
|
||||||
|
"ruruhaus",
|
||||||
|
"offbeatentrack_kassel",
|
||||||
|
"lumbungofpublishers",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def login_mastodon_bot():
|
def login_mastodon_bot():
|
||||||
mastodon = Mastodon(
|
mastodon = Mastodon(
|
||||||
access_token=os.environ.get("MASTODON_AUTH_TOKEN"),
|
access_token=os.environ.get("MASTODON_AUTH_TOKEN"), api_base_url=instance
|
||||||
api_base_url = instance
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return mastodon
|
return mastodon
|
||||||
@ -69,9 +72,9 @@ def create_post(post_directory, post_metadata):
|
|||||||
|
|
||||||
template_dir = os.path.join(Path(__file__).parent.resolve(), "templates")
|
template_dir = os.path.join(Path(__file__).parent.resolve(), "templates")
|
||||||
env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir))
|
env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir))
|
||||||
name = post_metadata['account']['display_name']
|
name = post_metadata["account"]["display_name"]
|
||||||
name = sub('"', '\\"', name)
|
name = sub('"', '\\"', name)
|
||||||
post_metadata['account']['display_name'] = name
|
post_metadata["account"]["display_name"] = name
|
||||||
env.filters["localize_media_url"] = localize_media_url
|
env.filters["localize_media_url"] = localize_media_url
|
||||||
env.filters["filter_mastodon_urls"] = filter_mastodon_urls
|
env.filters["filter_mastodon_urls"] = filter_mastodon_urls
|
||||||
|
|
||||||
@ -136,7 +139,10 @@ def main():
|
|||||||
create_post(post_dir, post_metadata)
|
create_post(post_dir, post_metadata)
|
||||||
all_existing_posts.append(str(post_metadata["id"]))
|
all_existing_posts.append(str(post_metadata["id"]))
|
||||||
else:
|
else:
|
||||||
print("not pulling post %s (post is local only)" % (post_metadata["id"]))
|
print(
|
||||||
|
"not pulling post %s (post is local only)"
|
||||||
|
% (post_metadata["id"])
|
||||||
|
)
|
||||||
|
|
||||||
# if we already have the post do nothing, possibly update
|
# if we already have the post do nothing, possibly update
|
||||||
elif str(post_metadata["id"]) in existing_posts:
|
elif str(post_metadata["id"]) in existing_posts:
|
||||||
@ -145,7 +151,10 @@ def main():
|
|||||||
str(post_metadata["id"])
|
str(post_metadata["id"])
|
||||||
) # create list of posts which have not been returned in the feed
|
) # create list of posts which have not been returned in the feed
|
||||||
elif str(post_metadata["id"]) in all_existing_posts:
|
elif str(post_metadata["id"]) in all_existing_posts:
|
||||||
print("skipping post %s as it was already pulled with a different hashtag." % (str(post_metadata["id"])))
|
print(
|
||||||
|
"skipping post %s as it was already pulled with a different hashtag."
|
||||||
|
% (str(post_metadata["id"]))
|
||||||
|
)
|
||||||
|
|
||||||
for post in existing_posts:
|
for post in existing_posts:
|
||||||
print(
|
print(
|
||||||
|
@ -3,11 +3,13 @@ title: "{{ frontmatter.title }}"
|
|||||||
date: "{{ frontmatter.date }}" #2021-06-10T10:46:33+02:00
|
date: "{{ frontmatter.date }}" #2021-06-10T10:46:33+02:00
|
||||||
draft: false
|
draft: false
|
||||||
summary: "{{ frontmatter.summary }}"
|
summary: "{{ frontmatter.summary }}"
|
||||||
author: "{{ frontmatter.author }}"
|
authors: {% if frontmatter.author %} ["{{ frontmatter.author }}"] {% endif %}
|
||||||
original_link: "{{ frontmatter.original_link }}"
|
original_link: "{{ frontmatter.original_link }}"
|
||||||
feed_name: "{{ frontmatter.feed_name}}"
|
feed_name: "{{ frontmatter.feed_name}}"
|
||||||
categories: ["network", "{{ frontmatter.feed_name}}"]
|
categories: ["{{ frontmatter.card_type }}", "{{ frontmatter.feed_name}}"]
|
||||||
|
contributors: ["{{ frontmatter.feed_name}}"]
|
||||||
tags: {{ frontmatter.tags }}
|
tags: {{ frontmatter.tags }}
|
||||||
|
{% if frontmatter.featured_image %}featured_image: "{{frontmatter.featured_image}}"{% endif %}
|
||||||
---
|
---
|
||||||
|
|
||||||
{{ content }}
|
{{ content }}
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
---
|
---
|
||||||
date: "{{ post_metadata.created_at }}" #2021-06-10T10:46:33+02:00
|
date: {{ post_metadata.created_at }} #2021-06-10T10:46:33+02:00
|
||||||
draft: false
|
draft: false
|
||||||
author: "{{ post_metadata.account.display_name }}"
|
authors: ["{{ post_metadata.account.display_name }}"]
|
||||||
avatar: "{{ post_metadata.account.avatar }}"
|
contributors: ["{{ post_metadata.account.acct}}"]
|
||||||
|
avatar: {{ post_metadata.account.avatar }}
|
||||||
categories: ["shouts"]
|
categories: ["shouts"]
|
||||||
images: [{% for i in post_metadata.media_attachments %} "{{ i.url }}", {% endfor %}]
|
images: [{% for i in post_metadata.media_attachments %} {{ i.url }}, {% endfor %}]
|
||||||
title: "{{ post_metadata.account.display_name }}"
|
title: {{ post_metadata.account.display_name }}
|
||||||
tags: [{% for i in post_metadata.tags %} "{{ i.name }}", {% endfor %}]
|
tags: [{% for i in post_metadata.tags %} "{{ i.name }}", {% endfor %}]
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -6,6 +6,7 @@ uuid: "{{v.uuid}}"
|
|||||||
video_duration: "{{ v.duration | duration }} "
|
video_duration: "{{ v.duration | duration }} "
|
||||||
video_channel: "{{ v.channel.display_name }}"
|
video_channel: "{{ v.channel.display_name }}"
|
||||||
channel_url: "{{ v.channel.url }}"
|
channel_url: "{{ v.channel.url }}"
|
||||||
|
contributors: ["{{ v.account.display_name }}"]
|
||||||
preview_image: "{{ preview_image }}"
|
preview_image: "{{ preview_image }}"
|
||||||
images: ["./{{ preview_image }}"]
|
images: ["./{{ preview_image }}"]
|
||||||
categories: ["tv","{{ v.channel.display_name }}"]
|
categories: ["tv","{{ v.channel.display_name }}"]
|
||||||
|
@ -102,8 +102,8 @@ def main():
|
|||||||
v = peertube.VideoApi(client)
|
v = peertube.VideoApi(client)
|
||||||
count = 100
|
count = 100
|
||||||
page = 0
|
page = 0
|
||||||
|
try:
|
||||||
response = v.videos_get(count=count, filter="local", tags_one_of="publish", start=page)
|
response = v.videos_get(count=count, filter="local", tags_one_of="publish", start=page)
|
||||||
|
|
||||||
videos = response.to_dict()
|
videos = response.to_dict()
|
||||||
total = videos['total']
|
total = videos['total']
|
||||||
videos = videos['data']
|
videos = videos['data']
|
||||||
@ -148,6 +148,14 @@ def main():
|
|||||||
post_name
|
post_name
|
||||||
) # create list of posts which have not been returned by peertube
|
) # create list of posts which have not been returned by peertube
|
||||||
|
|
||||||
|
except:
|
||||||
|
print("didn't get a response from peertube, instance might have been taken down or made private. removing all posts.")
|
||||||
|
output_dir = os.environ.get("OUTPUT_DIR")
|
||||||
|
if not os.path.exists(output_dir):
|
||||||
|
os.mkdir(output_dir)
|
||||||
|
existing_posts = os.listdir(output_dir)
|
||||||
|
|
||||||
for post in existing_posts:
|
for post in existing_posts:
|
||||||
print("deleted", post) # rm posts not returned
|
print("deleted", post) # rm posts not returned
|
||||||
shutil.rmtree(os.path.join(output_dir, post))
|
shutil.rmtree(os.path.join(output_dir, post))
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user