forked from ruangrupa/konfluks
vid: remove all vids if API down
This commit is contained in:
parent
8c4a36791f
commit
bac9bbd7b3
@ -102,52 +102,60 @@ def main():
|
|||||||
v = peertube.VideoApi(client)
|
v = peertube.VideoApi(client)
|
||||||
count = 100
|
count = 100
|
||||||
page = 0
|
page = 0
|
||||||
response = v.videos_get(count=count, filter="local", tags_one_of="publish", start=page)
|
try:
|
||||||
|
response = v.videos_get(count=count, filter="local", tags_one_of="publish", start=page)
|
||||||
videos = response.to_dict()
|
videos = response.to_dict()
|
||||||
total = videos['total']
|
total = videos['total']
|
||||||
videos = videos['data']
|
videos = videos['data']
|
||||||
total -= count
|
total -= count
|
||||||
if total > 0:
|
if total > 0:
|
||||||
to_download = total // count
|
to_download = total // count
|
||||||
last_page = total % count
|
last_page = total % count
|
||||||
for i in range(to_download):
|
for i in range(to_download):
|
||||||
page += 1
|
page += 1
|
||||||
response = v.videos_get(count=count, filter="local", tags_one_of="publish", start=page)
|
response = v.videos_get(count=count, filter="local", tags_one_of="publish", start=page)
|
||||||
videos += response.to_dict()['data']
|
videos += response.to_dict()['data']
|
||||||
if last_page > 0:
|
if last_page > 0:
|
||||||
page += 1
|
page += 1
|
||||||
response = v.videos_get(count=count, filter="local", tags_one_of="publish", start=page)
|
response = v.videos_get(count=count, filter="local", tags_one_of="publish", start=page)
|
||||||
videos += response.to_dict()['data'][-1*last_page:]
|
videos += response.to_dict()['data'][-1*last_page:]
|
||||||
|
|
||||||
|
|
||||||
output_dir = os.environ.get("OUTPUT_DIR")
|
output_dir = os.environ.get("OUTPUT_DIR")
|
||||||
|
|
||||||
if not os.path.exists(output_dir):
|
if not os.path.exists(output_dir):
|
||||||
os.mkdir(output_dir)
|
os.mkdir(output_dir)
|
||||||
|
|
||||||
existing_posts = os.listdir(output_dir)
|
existing_posts = os.listdir(output_dir)
|
||||||
|
|
||||||
for video_metadata in videos:
|
for video_metadata in videos:
|
||||||
post_name = slugify(video_metadata["name"]) + "-" + video_metadata["uuid"]
|
post_name = slugify(video_metadata["name"]) + "-" + video_metadata["uuid"]
|
||||||
post_dir = os.path.join(output_dir, post_name)
|
post_dir = os.path.join(output_dir, post_name)
|
||||||
|
|
||||||
if (
|
if (
|
||||||
post_name not in existing_posts
|
post_name not in existing_posts
|
||||||
): # if there is a video we dont already have, make it
|
): # if there is a video we dont already have, make it
|
||||||
print(
|
print(
|
||||||
"New: ", video_metadata["name"], "({})".format(video_metadata["uuid"])
|
"New: ", video_metadata["name"], "({})".format(video_metadata["uuid"])
|
||||||
)
|
)
|
||||||
create_post(post_dir, video_metadata, host)
|
create_post(post_dir, video_metadata, host)
|
||||||
|
|
||||||
elif (
|
elif (
|
||||||
post_name in existing_posts
|
post_name in existing_posts
|
||||||
): # if we already have the video do nothing, possibly update
|
): # if we already have the video do nothing, possibly update
|
||||||
update_post(post_dir, video_metadata, host)
|
update_post(post_dir, video_metadata, host)
|
||||||
existing_posts.remove(
|
existing_posts.remove(
|
||||||
post_name
|
post_name
|
||||||
) # create list of posts which have not been returned by peertube
|
) # create list of posts which have not been returned by peertube
|
||||||
|
|
||||||
|
except:
|
||||||
|
print("didn't get a response from peertube, instance might have been taken down or made private. removing all posts.")
|
||||||
|
output_dir = os.environ.get("OUTPUT_DIR")
|
||||||
|
if not os.path.exists(output_dir):
|
||||||
|
os.mkdir(output_dir)
|
||||||
|
existing_posts = os.listdir(output_dir)
|
||||||
|
|
||||||
for post in existing_posts:
|
for post in existing_posts:
|
||||||
print("deleted", post) # rm posts not returned
|
print("deleted", post) # rm posts not returned
|
||||||
shutil.rmtree(os.path.join(output_dir, post))
|
shutil.rmtree(os.path.join(output_dir, post))
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user