30 Commits

Author SHA1 Message Date
539d3fa6d0 Merge branch 'master' of ssh://git.autonomic.zone:2222/ruangrupa/lumbunglib 2022-02-10 12:53:13 +01:00
34d84dcde2 cal, vid: slugify instead of sanitize_name 2022-02-10 12:52:55 +01:00
709921daf1 hacking docs 2022-02-07 13:47:11 +01:00
40bf9416b8 cal: fix post deletion logic 2022-02-07 11:50:41 +01:00
58024c775a vid: fix post deletion logic 2022-02-07 11:45:00 +01:00
5366ef3abe hash: add new hashtag 2022-02-04 11:57:46 +01:00
77b8c9e0af video: use video title for filename 2022-02-03 16:09:05 +01:00
a4f22b95b7 feed: truncate file name if too long 2022-02-03 15:57:01 +01:00
e32743ecd5 cal: set featured_image in template 2022-02-03 15:32:25 +01:00
1f21d0475f cal: add event name to filename 2022-02-03 12:50:15 +01:00
3297991d83 cal: check if image exists before downloading 2022-02-03 12:35:54 +01:00
1fe2fa3bcf Merge pull request 'feat: add opengraph generation for social and tv' (#23) from knoflook/lumbunglib:master into master
Reviewed-on: ruangrupa/lumbunglib#23
2022-02-02 14:15:41 +01:00
e1ec05cda4 Merge branch 'master' into master 2022-02-02 14:15:28 +01:00
eaadfd2023 feat: add opengraph generation for social and tv 2022-02-02 14:14:11 +01:00
22d9a62c20 Merge pull request 'fix: don't pull the same post twice but for real' (#22) from knoflook/lumbunglib:master into master
Reviewed-on: ruangrupa/lumbunglib#22
2022-01-28 15:50:46 +01:00
4e1da7c3e4 Merge branch 'master' into master 2022-01-28 15:50:29 +01:00
7e45112280 fix: don't pull the same post twice but for real 2022-01-28 15:49:40 +01:00
aefdb15d48 Merge pull request 'fix: don't pull the same post twice' (#19) from knoflook/lumbunglib:master into master
Reviewed-on: ruangrupa/lumbunglib#19
2022-01-28 13:22:04 +01:00
36b4ca1ba4 Merge branch 'master' into master 2022-01-28 13:21:41 +01:00
2c5e94c265 fix: don't expect title field for feeds 2022-01-28 13:09:17 +01:00
df19668260 feat: remove local data for nonexisting feeds 2022-01-28 12:18:19 +01:00
c4142145e9 fix: don't pull the same post twice 2022-01-27 17:52:26 +01:00
b1c9c05b6d sort + new tags 2022-01-14 09:39:37 +01:00
05f7fc7a3f hard code hashtags for now 2022-01-12 09:53:29 +01:00
6c3814dd5b Revert "feat: support loading hashtags from env"
This reverts commit d9bcb29f85.
2022-01-12 09:45:41 +01:00
d9bcb29f85 feat: support loading hashtags from env 2022-01-12 09:34:01 +01:00
db0ce65b89 Merge pull request 'feat: add hometown integration' (#18) from knoflook/lumbunglib:master into master
Reviewed-on: ruangrupa/lumbunglib#18
2022-01-10 15:15:07 +01:00
77d72745ab feat: pull posts from mastodon 2022-01-10 15:07:04 +01:00
f3b2b032de feat: support feed generation 2022-01-05 13:39:01 +01:00
b1f2d52a68 docs: poetry tips 2022-01-05 12:17:37 +01:00
13 changed files with 194 additions and 27 deletions

5
.gitignore vendored
View File

@ -1,4 +1,7 @@
test *.txt
*.txt.*
__pycache__ __pycache__
etags
test
.venv .venv
content content

View File

@ -1,3 +1,40 @@
# lumbunglib # lumbunglib
> Python lib which powers `lumbung[dot]space` automation > Python lib which powers `lumbung.space` automation
## hacking
Install [poetry](https://python-poetry.org/docs/#osx--linux--bashonwindows-install-instructions):
```bash
curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python -
```
We use Poetry because it locks the dependencies all the way down and makes it
easier to manage installation & maintenance in the long-term. Then install the
dependencies & have them managed by Poetry:
```
poetry install
```
Each script requires some environment variables to run, you can see the latest
deployment configuration over
[here](https://git.autonomic.zone/ruangrupa/lumbung.space/src/branch/main/compose.yml),
look for the values under the `environment: ...` stanza.
All scripts have an entrypoint described in the
[`pypoetry.toml`](https://git.autonomic.zone/ruangrupa/lumbunglib/src/commit/40bf9416b8792c08683ad8ac878093c7ef1b2f5d/pyproject.toml#L27-L31)
which you can run via `poetry run ...`. For example, if you want to run the
[`lumbunglib/video.py`](./lumbunglib/video.py) script, you'd do:
```
mkdir -p testdir
export OUTPUT_DIR=/testdir
poetry run lumbunglib-vid
```
Run `poetry run poetry2setup > setup.py` if updating the poetry dependencies.
This allows us to run `pip install .` in the deployment and Pip will understand
that it is just a regular Python package. If adding a new cli command, extend
`pyproject.toml` with a new `[tool.poetry.scripts]` entry.

View File

@ -3,6 +3,7 @@ import re
import shutil import shutil
from pathlib import Path from pathlib import Path
from urllib.parse import urlparse from urllib.parse import urlparse
from slugify import slugify
import arrow import arrow
import jinja2 import jinja2
@ -75,6 +76,7 @@ def create_metadata(event):
"duration": date.compress(event.duration), "duration": date.compress(event.duration),
"location": event.location, "location": event.location,
"uid": event.uid, "uid": event.uid,
"featured_image": "",
"images": find_imageURLS(event.description), # currently not used in template "images": find_imageURLS(event.description), # currently not used in template
} }
@ -110,7 +112,6 @@ def localize_time(date):
) )
return localized_begins return localized_begins
def create_event_post(post_dir, event): def create_event_post(post_dir, event):
""" """
Create HUGO post based on calendar event metadata Create HUGO post based on calendar event metadata
@ -146,12 +147,15 @@ def create_event_post(post_dir, event):
if not os.path.exists(local_image): if not os.path.exists(local_image):
# download preview image # download preview image
response = requests.get(img, stream=True) response = requests.get(img, stream=True)
with open(local_image, "wb") as img_file: if response.status_code == 200:
shutil.copyfileobj(response.raw, img_file) with open(local_image, "wb") as img_file:
print('Downloaded image for event "{}"'.format(event.name)) shutil.copyfileobj(response.raw, img_file)
event_metadata["description"] = event_metadata["description"].replace( print('Downloaded image for event "{}"'.format(event.name))
img, "![]({})".format(img_name) event_metadata["description"] = event_metadata["description"].replace(
) img, "![]({})".format(img_name)
)
if event_metadata["featured_image"] == "":
event_metadata["featured_image"] = img_name
if img_name in existing_images: if img_name in existing_images:
existing_images.remove(img_name) existing_images.remove(img_name)
@ -184,18 +188,18 @@ def update_event_post(post_dir, event):
def main(): def main():
for event in list(cal.events): for event in list(cal.events):
post_name = slugify(event.name) + "-" + event.uid
post_dir = os.path.join(output_dir, post_name)
post_dir = os.path.join(output_dir, event.uid) if post_name not in existing_posts:
if event.uid not in existing_posts:
# if there is an event we dont already have, make it # if there is an event we dont already have, make it
create_event_post(post_dir, event) create_event_post(post_dir, event)
elif event.uid in existing_posts: elif post_name in existing_posts:
# if we already have it, update # if we already have it, update
update_event_post(post_dir, event) update_event_post(post_dir, event)
existing_posts.remove( existing_posts.remove(
event.uid post_name
) # create list of posts which have not been returned by the calendar ) # create list of posts which have not been returned by the calendar
for post in existing_posts: for post in existing_posts:

View File

@ -1,6 +1,7 @@
import os import os
import shutil import shutil
import time import time
from hashlib import md5
from ast import literal_eval as make_tuple from ast import literal_eval as make_tuple
from pathlib import Path from pathlib import Path
from urllib.parse import urlparse from urllib.parse import urlparse
@ -60,6 +61,11 @@ def create_frontmatter(entry):
else: else:
author = "" author = ""
if "title" in entry:
title = entry.title
else:
title = ""
tags = [] tags = []
if "tags" in entry: if "tags" in entry:
# TODO finish categories # TODO finish categories
@ -67,7 +73,7 @@ def create_frontmatter(entry):
tags.append(t["term"]) tags.append(t["term"])
frontmatter = { frontmatter = {
"title": entry.title, "title": title,
"date": published.format(), "date": published.format(),
"summary": "", "summary": "",
"author": author, "author": author,
@ -194,13 +200,25 @@ def main():
if not os.path.exists(output_dir): if not os.path.exists(output_dir):
os.makedirs(output_dir) os.makedirs(output_dir)
feed_dict = dict()
for url in feed_urls:
feed_name = urlparse(url).netloc
feed_dict[url] = feed_name
feed_names = feed_dict.values()
content_dirs = os.listdir(output_dir)
for i in content_dirs:
if i not in feed_names:
shutil.rmtree(os.path.join(output_dir, i))
print("%s not in feeds_list.txt, removing local data" %(i))
# add iframe to the allowlist of feedparser's sanitizer, # add iframe to the allowlist of feedparser's sanitizer,
# this is now handled in parse_post() # this is now handled in parse_post()
feedparser.sanitizer._HTMLSanitizer.acceptable_elements |= {"iframe"} feedparser.sanitizer._HTMLSanitizer.acceptable_elements |= {"iframe"}
for feed_url in feed_urls: for feed_url in feed_urls:
feed_name = urlparse(feed_url).netloc feed_name = feed_dict[feed_url]
feed_dir = os.path.join(output_dir, feed_name) feed_dir = os.path.join(output_dir, feed_name)
@ -221,6 +239,13 @@ def main():
entry["feed_name"] = feed_name entry["feed_name"] = feed_name
post_name = slugify(entry.title) post_name = slugify(entry.title)
# pixelfed returns the whole post text as the post name. max
# filename length is 255 on many systems. here we're shortening
# the name and adding a hash to it to avoid a conflict in a
# situation where 2 posts start with exactly the same text.
if len(post_name) > 150:
post_hash = md5(bytes(post_name, "utf-8"))
post_name = post_name[:150] + "-" + post_hash.hexdigest()
post_dir = os.path.join(output_dir, feed_name, post_name) post_dir = os.path.join(output_dir, feed_name, post_name)
if post_name not in existing_posts: if post_name not in existing_posts:

View File

@ -9,7 +9,16 @@ from mastodon import Mastodon
instance = "https://social.lumbung.space" instance = "https://social.lumbung.space"
email = "" email = ""
password = "" password = ""
hashtags = ["jalansesama"] hashtags = [
"documentafifteen",
"harvestedbyputra",
"jalansesama",
"lumbungdotspace",
"majelisakakbar",
"majelisakbar",
"warungkopi",
"lumbungkios",
]
def login_mastodon_bot(): def login_mastodon_bot():
@ -95,6 +104,10 @@ def main():
if not os.path.exists(output_dir): if not os.path.exists(output_dir):
os.mkdir(output_dir) os.mkdir(output_dir)
all_existing_posts = []
for i in os.listdir(output_dir):
all_existing_posts += os.listdir(os.path.join(output_dir, i))
for hashtag in hashtags: for hashtag in hashtags:
hashtag_dir = os.path.join(output_dir, hashtag) hashtag_dir = os.path.join(output_dir, hashtag)
@ -112,14 +125,13 @@ def main():
for post_metadata in timeline: for post_metadata in timeline:
post_dir = os.path.join(hashtag_dir, str(post_metadata["id"])) post_dir = os.path.join(hashtag_dir, str(post_metadata["id"]))
# if there is a post in the feed we dont already have locally, make it # if there is a post in the feed we dont already have locally, make it
if str(post_metadata["id"]) not in existing_posts: if str(post_metadata["id"]) not in all_existing_posts:
if not post_metadata[ if not post_metadata[
"local_only" "local_only"
]: # if you get an error here then you are using vanilla Mastodon, this is a Hometown or Glitch only feature ]: # if you get an error here then you are using vanilla Mastodon, this is a Hometown or Glitch only feature
create_post(post_dir, post_metadata) create_post(post_dir, post_metadata)
all_existing_posts.append(str(post_metadata["id"]))
else: else:
print("not pulling post %s (post is local only)" % (post_metadata["id"])) print("not pulling post %s (post is local only)" % (post_metadata["id"]))
@ -129,6 +141,8 @@ def main():
existing_posts.remove( existing_posts.remove(
str(post_metadata["id"]) str(post_metadata["id"])
) # create list of posts which have not been returned in the feed ) # create list of posts which have not been returned in the feed
elif str(post_metadata["id"]) in all_existing_posts:
print("skipping post %s as it was already pulled with a different hashtag." % (str(post_metadata["id"])))
for post in existing_posts: for post in existing_posts:
print( print(

View File

@ -8,6 +8,9 @@ event_end: "{{ event.end }}"
duration: "{{ event.duration }}" duration: "{{ event.duration }}"
localized_begin: "{{ event.localized_begin }}" localized_begin: "{{ event.localized_begin }}"
uid: "{{ event.uid }}" uid: "{{ event.uid }}"
{% if event.featured_image %}
featured_image: "{{ event.featured_image }}"
{% endif %}
{% if event.location %} {% if event.location %}
location: "{{ event.location }}" location: "{{ event.location }}"
{% endif %} {% endif %}

View File

@ -7,7 +7,7 @@ author: "{{ frontmatter.author }}"
original_link: "{{ frontmatter.original_link }}" original_link: "{{ frontmatter.original_link }}"
feed_name: "{{ frontmatter.feed_name}}" feed_name: "{{ frontmatter.feed_name}}"
categories: ["network", "{{ frontmatter.feed_name}}"] categories: ["network", "{{ frontmatter.feed_name}}"]
tags: { { frontmatter.tags } } tags: {{ frontmatter.tags }}
--- ---
{{ content }} {{ content }}

View File

@ -4,6 +4,8 @@ draft: false
author: "{{ post_metadata.account.display_name }}" author: "{{ post_metadata.account.display_name }}"
avatar: "{{ post_metadata.account.avatar }}" avatar: "{{ post_metadata.account.avatar }}"
categories: ["shouts"] categories: ["shouts"]
images: [{% for i in post_metadata.media_attachments %} "{{ i.url }}", {% endfor %}]
title: "{{ post_metadata.account.display_name }}"
tags: [{% for i in post_metadata.tags %} "{{ i.name }}", {% endfor %}] tags: [{% for i in post_metadata.tags %} "{{ i.name }}", {% endfor %}]
--- ---

View File

@ -7,6 +7,7 @@ video_duration: "{{ v.duration | duration }} "
video_channel: "{{ v.channel.display_name }}" video_channel: "{{ v.channel.display_name }}"
channel_url: "{{ v.channel.url }}" channel_url: "{{ v.channel.url }}"
preview_image: "{{ preview_image }}" preview_image: "{{ preview_image }}"
images: ["./{{ preview_image }}"]
categories: ["tv","{{ v.channel.display_name }}"] categories: ["tv","{{ v.channel.display_name }}"]
is_live: {{ v.is_live }} is_live: {{ v.is_live }}
--- ---

View File

@ -4,6 +4,7 @@ import json
import os import os
import shutil import shutil
from pathlib import Path from pathlib import Path
from slugify import slugify
import arrow import arrow
import jinja2 import jinja2
@ -97,7 +98,6 @@ def update_post(post_directory, video_metadata, host):
# compat for when there is no timestamp yet.. # compat for when there is no timestamp yet..
create_post(post_directory, video_metadata, host) create_post(post_directory, video_metadata, host)
def main(): def main():
v = peertube.VideoApi(client) v = peertube.VideoApi(client)
@ -114,10 +114,11 @@ def main():
existing_posts = os.listdir(output_dir) existing_posts = os.listdir(output_dir)
for video_metadata in videos: for video_metadata in videos:
post_dir = os.path.join(output_dir, video_metadata["uuid"]) post_name = slugify(video_metadata["name"]) + "-" + video_metadata["uuid"]
post_dir = os.path.join(output_dir, post_name)
if ( if (
video_metadata["uuid"] not in existing_posts post_name not in existing_posts
): # if there is a video we dont already have, make it ): # if there is a video we dont already have, make it
print( print(
"New: ", video_metadata["name"], "({})".format(video_metadata["uuid"]) "New: ", video_metadata["name"], "({})".format(video_metadata["uuid"])
@ -125,11 +126,11 @@ def main():
create_post(post_dir, video_metadata, host) create_post(post_dir, video_metadata, host)
elif ( elif (
video_metadata["uuid"] in existing_posts post_name in existing_posts
): # if we already have the video do nothing, possibly update ): # if we already have the video do nothing, possibly update
update_post(post_dir, video_metadata, host) update_post(post_dir, video_metadata, host)
existing_posts.remove( existing_posts.remove(
video_metadata["uuid"] post_name
) # create list of posts which have not been returned by peertube ) # create list of posts which have not been returned by peertube
for post in existing_posts: for post in existing_posts:

73
poetry.lock generated
View File

@ -9,6 +9,21 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.dependencies] [package.dependencies]
python-dateutil = "*" python-dateutil = "*"
[[package]]
name = "beautifulsoup4"
version = "4.10.0"
description = "Screen-scraping library"
category = "main"
optional = false
python-versions = ">3.0.0"
[package.dependencies]
soupsieve = ">1.2"
[package.extras]
html5lib = ["html5lib"]
lxml = ["lxml"]
[[package]] [[package]]
name = "blurhash" name = "blurhash"
version = "1.1.4" version = "1.1.4"
@ -20,6 +35,17 @@ python-versions = "*"
[package.extras] [package.extras]
test = ["pillow", "numpy", "pytest"] test = ["pillow", "numpy", "pytest"]
[[package]]
name = "bs4"
version = "0.0.1"
description = "Dummy package for Beautiful Soup"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
beautifulsoup4 = "*"
[[package]] [[package]]
name = "certifi" name = "certifi"
version = "2021.10.8" version = "2021.10.8"
@ -47,6 +73,17 @@ category = "main"
optional = false optional = false
python-versions = ">=3.5" python-versions = ">=3.5"
[[package]]
name = "feedparser"
version = "6.0.8"
description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds"
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
sgmllib3k = "*"
[[package]] [[package]]
name = "ics" name = "ics"
version = "0.7" version = "0.7"
@ -223,6 +260,14 @@ urllib3 = ">=1.21.1,<1.27"
socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
[[package]]
name = "sgmllib3k"
version = "1.0.0"
description = "Py3k port of sgmllib."
category = "main"
optional = false
python-versions = "*"
[[package]] [[package]]
name = "six" name = "six"
version = "1.16.0" version = "1.16.0"
@ -231,6 +276,14 @@ category = "main"
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "soupsieve"
version = "2.3.1"
description = "A modern CSS selector implementation for Beautiful Soup."
category = "main"
optional = false
python-versions = ">=3.6"
[[package]] [[package]]
name = "tatsu" name = "tatsu"
version = "5.7.0" version = "5.7.0"
@ -266,17 +319,24 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.9" python-versions = "^3.9"
content-hash = "d3c9d528613826932cb3f316a3a69066e87e69011d4c8d3a2492521833d3851f" content-hash = "c5c987253f949737210f4a3d3c3c24b0affd4a9c7d06de386c9bd514c592db8b"
[metadata.files] [metadata.files]
arrow = [ arrow = [
{file = "arrow-0.14.7-py2.py3-none-any.whl", hash = "sha256:4bfacea734ead51495dc47df00421ecfd4ca1f2c0fbe58b9a26eaeddedc31caf"}, {file = "arrow-0.14.7-py2.py3-none-any.whl", hash = "sha256:4bfacea734ead51495dc47df00421ecfd4ca1f2c0fbe58b9a26eaeddedc31caf"},
{file = "arrow-0.14.7.tar.gz", hash = "sha256:67f8be7c0cf420424bc62d8d7dc40b44e4bb2f7b515f9cc2954fb36e35797656"}, {file = "arrow-0.14.7.tar.gz", hash = "sha256:67f8be7c0cf420424bc62d8d7dc40b44e4bb2f7b515f9cc2954fb36e35797656"},
] ]
beautifulsoup4 = [
{file = "beautifulsoup4-4.10.0-py3-none-any.whl", hash = "sha256:9a315ce70049920ea4572a4055bc4bd700c940521d36fc858205ad4fcde149bf"},
{file = "beautifulsoup4-4.10.0.tar.gz", hash = "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891"},
]
blurhash = [ blurhash = [
{file = "blurhash-1.1.4-py2.py3-none-any.whl", hash = "sha256:7611c1bc41383d2349b6129208587b5d61e8792ce953893cb49c38beeb400d1d"}, {file = "blurhash-1.1.4-py2.py3-none-any.whl", hash = "sha256:7611c1bc41383d2349b6129208587b5d61e8792ce953893cb49c38beeb400d1d"},
{file = "blurhash-1.1.4.tar.gz", hash = "sha256:da56b163e5a816e4ad07172f5639287698e09d7f3dc38d18d9726d9c1dbc4cee"}, {file = "blurhash-1.1.4.tar.gz", hash = "sha256:da56b163e5a816e4ad07172f5639287698e09d7f3dc38d18d9726d9c1dbc4cee"},
] ]
bs4 = [
{file = "bs4-0.0.1.tar.gz", hash = "sha256:36ecea1fd7cc5c0c6e4a1ff075df26d50da647b75376626cc186e2212886dd3a"},
]
certifi = [ certifi = [
{file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
{file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
@ -289,6 +349,10 @@ decorator = [
{file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
] ]
feedparser = [
{file = "feedparser-6.0.8-py3-none-any.whl", hash = "sha256:1b7f57841d9cf85074deb316ed2c795091a238adb79846bc46dccdaf80f9c59a"},
{file = "feedparser-6.0.8.tar.gz", hash = "sha256:5ce0410a05ab248c8c7cfca3a0ea2203968ee9ff4486067379af4827a59f9661"},
]
ics = [ ics = [
{file = "ics-0.7-py2.py3-none-any.whl", hash = "sha256:bf5fbdef6e1e073afdadf1b996f0271186dd114a148e38e795919a1ae644d6ac"}, {file = "ics-0.7-py2.py3-none-any.whl", hash = "sha256:bf5fbdef6e1e073afdadf1b996f0271186dd114a148e38e795919a1ae644d6ac"},
{file = "ics-0.7-py3.7.egg", hash = "sha256:3b606205b9582ad27dff77f9b227a30d02fdac532731927fe39df1f1ddf8673f"}, {file = "ics-0.7-py3.7.egg", hash = "sha256:3b606205b9582ad27dff77f9b227a30d02fdac532731927fe39df1f1ddf8673f"},
@ -409,10 +473,17 @@ requests = [
{file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
{file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"},
] ]
sgmllib3k = [
{file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"},
]
six = [ six = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
] ]
soupsieve = [
{file = "soupsieve-2.3.1-py3-none-any.whl", hash = "sha256:1a3cca2617c6b38c0343ed661b1fa5de5637f257d4fe22bd9f1338010a1efefb"},
{file = "soupsieve-2.3.1.tar.gz", hash = "sha256:b8d49b1cd4f037c7082a9683dfa1801aa2597fb11c3a1155b7a5b94829b4f1f9"},
]
tatsu = [ tatsu = [
{file = "TatSu-5.7.0-py2.py3-none-any.whl", hash = "sha256:9eebadfc2889d8e82e197df22913df56ff204bf4cfc62db49a5c7edd084e10b4"}, {file = "TatSu-5.7.0-py2.py3-none-any.whl", hash = "sha256:9eebadfc2889d8e82e197df22913df56ff204bf4cfc62db49a5c7edd084e10b4"},
{file = "TatSu-5.7.0.zip", hash = "sha256:428136cd4aa9600fcd01428bd5667fc752062f54bd0148dc1e64fee7b8d05fa4"}, {file = "TatSu-5.7.0.zip", hash = "sha256:428136cd4aa9600fcd01428bd5667fc752062f54bd0148dc1e64fee7b8d05fa4"},

View File

@ -13,6 +13,8 @@ natural = "^0.2.0"
python-slugify = "^5.0.2" python-slugify = "^5.0.2"
requests = "^2.26.0" requests = "^2.26.0"
peertube = {git = "https://framagit.org/framasoft/peertube/clients/python.git"} peertube = {git = "https://framagit.org/framasoft/peertube/clients/python.git"}
feedparser = "^6.0.8"
bs4 = "^0.0.1"
"Mastodon.py" = "^1.5.1" "Mastodon.py" = "^1.5.1"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
@ -25,4 +27,5 @@ build-backend = "poetry.core.masonry.api"
[tool.poetry.scripts] [tool.poetry.scripts]
lumbunglib-cal = "lumbunglib.cloudcal:main" lumbunglib-cal = "lumbunglib.cloudcal:main"
lumbunglib-vid = "lumbunglib.video:main" lumbunglib-vid = "lumbunglib.video:main"
lumbunglib-feed = "lumbunglib.feed:main"
lumbunglib-hash = "lumbunglib.hashtag:main" lumbunglib-hash = "lumbunglib.hashtag:main"

View File

@ -10,6 +10,8 @@ package_data = \
install_requires = \ install_requires = \
['Jinja2>=3.0.3,<4.0.0', ['Jinja2>=3.0.3,<4.0.0',
'Mastodon.py>=1.5.1,<2.0.0', 'Mastodon.py>=1.5.1,<2.0.0',
'bs4>=0.0.1,<0.0.2',
'feedparser>=6.0.8,<7.0.0',
'ics>=0.7,<0.8', 'ics>=0.7,<0.8',
'natural>=0.2.0,<0.3.0', 'natural>=0.2.0,<0.3.0',
'peertube @ ' 'peertube @ '
@ -19,6 +21,7 @@ install_requires = \
entry_points = \ entry_points = \
{'console_scripts': ['lumbunglib-cal = lumbunglib.cloudcal:main', {'console_scripts': ['lumbunglib-cal = lumbunglib.cloudcal:main',
'lumbunglib-feed = lumbunglib.feed:main',
'lumbunglib-hash = lumbunglib.hashtag:main', 'lumbunglib-hash = lumbunglib.hashtag:main',
'lumbunglib-vid = lumbunglib.video:main']} 'lumbunglib-vid = lumbunglib.video:main']}