Automated downloading of videos from put.io

I wanted to automatically download new videos from put.io as they got added to my account, so I took a look at their API and built a python script to do it.   The script descends into the parent folder and any child folders and looks for video files that are above a certain size, and don’t contain the word “sample” in them.  After all the videos that meet that criteria have been downloaded (using aria2c), the script deletes all the folders and cleans the History and Transfers tab.

Here is the script:

import requests
import json
import time
import subprocess
import os
import sys
import datetime

# put.io monitoring and downloading

# Check if file exists before proceeding
file = "/tmp/tv_download"
one_day = datetime.datetime.now() - datetime.timedelta(days=1)

# If the file exists, exit
if os.path.isfile(file):
    filetime = datetime.datetime.fromtimestamp(os.path.getctime(file))
    
    if filetime > one_day:
        os.remove(file)
    else:
        sys.exit(2)

# If the file doesn't exist create it
else:
    use_file = open(file, "w")
    use_file.write("In use")
    use_file.close()

# Base URL and OAUTH token
url = "https://api.put.io/v2/"
oauth = "?oauth_token=<INSERT OAUTH TOKEN>"
headers = {'Accept': 'application/json', 'Content-Type': 'application/json'}
file_urls = {}

# This function adds {file_id: download_url} of files we want to file_urls, recursively
def get_video_urls(file):

    # If the file type is a FOLDER, recursively descend into it
    if file['file_type'] == 'FOLDER':
        
        # Grab the folder id
        folder_id = str(file['id'])

        # If its not an empty folder
        if file['size'] != 0:

            # Get the list of children in the folder
            folder_list = requests.get(url + "files/list" + oauth,
                                       headers=headers,
                                       params={'parent_id': folder_id}).json()

            # Process each child
            for child in folder_list['files']:
                get_video_urls(child)
                
        # If it is empty, add it to the file_urls dict and mark it as null for deletion        
        else:
            file_urls.update({folder_id: "null"})
    
    # If its a file we want
    if file['file_type'] == "VIDEO" and "sample" not in file['name'] and file['size'] > 50000000:

        # Grab it's ID
        video_id = str(file['id'])

        # Get the download URL
        video_url_request = requests.head(url + "files/" + video_id + "/download" + oauth, headers=headers)
        video_url_headers = video_url_request.headers
        video_url = str(video_url_headers['Location'])

        # Add the ID and URL to file_urls{}
        file_urls.update({video_id: video_url})
    
    # Else its a junk file
    else:
        
        # Grab it's ID
        video_id = str(file['id'])
        
        # If it is junk add it to the file_urls dict and mark it as null for deletion
        file_urls.update({folder_id: "null"})

# Get files/folder in the specific folder given
other_child_list = requests.get(url + "files/list" + oauth,
                                headers=headers,
                                params={'parent_id':'<FOLDER ID>'}).json()

# Kick off the descending into the "Other" folder
for child in other_child_list['files']:
    get_video_urls(child)

# For each URL we get in the file_urls dict
for file_url in file_urls:

    # If we have a video file to download
    if file_urls.get(file_url) != "null":
        
        # Download it with aria2c and store the return code
        return_code = subprocess.call(["aria2c", "-c", "-q", "-x8", "-d",
                                       "/downloads", "--log-level=error", file_urls.get(file_url)])
        time.sleep(30)

        # If aria2c completed successfully delete the file
        if return_code == 0:
            file_deletion_data = {'file_ids': str(file_url)}
            file_delete_request = requests.post(url + "files/delete" + oauth,
                                                headers=headers,
                                                data=json.dumps(file_deletion_data))
    
    # Else we have an empty folder to delete
    else:
        file_deletion_data = {'file_ids': str(file_url)}
        file_delete_request = requests.post(url + "files/delete" + oauth,
                                            headers=headers,
                                            data=json.dumps(file_deletion_data))

# Pause to let the put.io API catch up
time.sleep(5)

# Clean the history tab
requests.post(url + "events/delete" + oauth)

# Pause again to let the request kick in
time.sleep(5)

# Clean the tranfers tab
requests.post(url + "transfers/clean" + oauth)

# Remove the file when we are done
os.remove(file)