View Raw
#!/usr/bin/env python3
"""
CCLS High-Speed Downloader (Silent Version with Ctrl+Z stop)
--------------------------------------------
A high-performance Python script for downloading games from CCLS with improved
connection handling, retry logic, resume capabilities, and URL encoding fixes,
with minimal output to keep the console clean. Uses Ctrl+Z to stop downloads.
"""

import os
import sys
import time
import requests
import signal
import random
import msvcrt  # For Windows key detection
import urllib.parse
from datetime import datetime, timedelta
from threading import Thread, Event

# Constants for downloading
CHUNK_SIZE = 1024 * 1024  # 1MB chunks for better performance
USER_AGENT = "CCLS-CLI/2.0 (Python Downloader)"
MAX_RETRIES = 5  # Increased from 3
CONNECT_TIMEOUT = 30  # Connection timeout in seconds
READ_TIMEOUT = 60  # Read timeout in seconds
RETRY_BASE_DELAY = 2  # Base delay for exponential backoff
RETRY_MAX_DELAY = 60  # Maximum delay between retries
DEBUG_MODE = False  # Set to True to enable debug messages

# Global event for signaling cancellation between threads
cancellation_event = Event()

# Thread for checking for Ctrl+Z key press
def key_monitor():
    """Monitor for Ctrl+Z (ASCII 26) key press to cancel downloads"""
    while not cancellation_event.is_set():
        if msvcrt.kbhit():
            key = msvcrt.getch()
            # Check for Ctrl+Z (ASCII 26, or SUB character)
            if key == b'\x1a':
                print("\nCtrl+Z detected - Cancelling download and removing partial file...")
                cancellation_event.set()
                break
        time.sleep(0.1)  # Small sleep to prevent CPU hogging

def debug_print(message):
    """Print only if debug mode is enabled"""
    if DEBUG_MODE:
        print(message)

def format_size(size_bytes):
    """Format bytes into human-readable format"""
    if size_bytes >= 1_000_000_000:  # GB
        return f"{size_bytes / 1_000_000_000:.2f} GB"
    elif size_bytes >= 1_000_000:  # MB
        return f"{size_bytes / 1_000_000:.2f} MB"
    elif size_bytes >= 1000:  # KB
        return f"{size_bytes / 1000:.2f} KB"
    else:
        return f"{size_bytes} B"

def format_time(seconds):
    """Format seconds into HH:MM:SS format"""
    hours = seconds // 3600
    minutes = (seconds % 3600) // 60
    secs = seconds % 60
    if hours > 0:
        return f"{hours:02d}:{minutes:02d}:{secs:02d}"
    else:
        return f"{minutes:02d}:{secs:02d}"

def normalize_url(url):
    """
    Normalize URL by ensuring proper encoding of special characters
    while preserving URL structure and query parameters
    """
    try:
        # Parse the URL into components
        parsed = urllib.parse.urlparse(url)
        
        # Handle the path - split by '/' and encode each segment
        if parsed.path:
            path_parts = parsed.path.split('/')
            encoded_parts = []
            
            for part in path_parts:
                if part:  # Skip empty parts
                    # Only encode if not already encoded
                    if '%' not in part or not any(c in '0123456789ABCDEFabcdef' for c in part.split('%')[1:][:2] if len(part.split('%')) > 1):
                        encoded_parts.append(urllib.parse.quote(part, safe=''))
                    else:
                        encoded_parts.append(part)
                else:
                    encoded_parts.append('')
            
            encoded_path = '/'.join(encoded_parts)
        else:
            encoded_path = parsed.path
        
        # Handle query parameters
        if parsed.query:
            # Parse query parameters and re-encode them properly
            query_params = urllib.parse.parse_qs(parsed.query, keep_blank_values=True)
            encoded_query_parts = []
            
            for key, values in query_params.items():
                for value in values:
                    encoded_key = urllib.parse.quote_plus(str(key))
                    encoded_value = urllib.parse.quote_plus(str(value))
                    encoded_query_parts.append(f"{encoded_key}={encoded_value}")
            
            encoded_query = '&'.join(encoded_query_parts)
        else:
            encoded_query = parsed.query
        
        # Reconstruct the URL
        normalized_url = urllib.parse.urlunparse((
            parsed.scheme,
            parsed.netloc,
            encoded_path,
            parsed.params,
            encoded_query,
            parsed.fragment
        ))
        
        return normalized_url
        
    except Exception as e:
        debug_print(f"Error normalizing URL: {str(e)}")
        # Return original URL if normalization fails
        return url

def get_file_size(url, headers):
    """Get the size of the file to be downloaded"""
    # First, make sure the URL is properly encoded
    normalized_url = normalize_url(url)
    
    for attempt in range(MAX_RETRIES):
        try:
            # Only get the headers, dont download the content yet
            response = requests.head(normalized_url, headers=headers, 
                                 timeout=(CONNECT_TIMEOUT, READ_TIMEOUT),
                                 allow_redirects=True)
            response.raise_for_status()
            
            # Return the content length if available
            content_length = response.headers.get('content-length')
            if content_length:
                return int(content_length)
            
            # If we cant get the size via HEAD request, return None
            return None
        
        except (requests.exceptions.RequestException, IOError) as e:
            delay = min(RETRY_BASE_DELAY * (2 ** attempt) + random.uniform(0, 1), RETRY_MAX_DELAY)
            if attempt < MAX_RETRIES - 1:
                debug_print(f"Error getting file size: {str(e)}. Retrying in {delay:.1f} seconds...")
                time.sleep(delay)
            else:
                debug_print(f"Failed to get file size after {MAX_RETRIES} attempts: {str(e)}")
                return None

def can_resume(url, headers):
    """Check if the server supports resuming downloads"""
    normalized_url = normalize_url(url)
    
    try:
        # Add range header to check if the server supports it
        resume_headers = headers.copy()
        resume_headers['Range'] = 'bytes=0-0'
        
        response = requests.head(normalized_url, headers=resume_headers, 
                             timeout=(CONNECT_TIMEOUT, READ_TIMEOUT),
                             allow_redirects=True)
        
        # If we get a 206 status, the server supports resume
        return response.status_code == 206
    except Exception as e:
        debug_print(f"Error checking resume capability: {str(e)}")
        # If theres any error, assume we cant resume to be safe
        return False

def download_file(url, destination, game_name, game_id, expected_size=None):
    """Download a file with progress tracking and resume support"""
    global cancellation_event
    cancellation_event.clear()  # Reset the cancellation event
    
    # Start key monitoring thread
    key_thread = Thread(target=key_monitor)
    key_thread.daemon = True
    key_thread.start()
    
    # Print information about stopping
    print("\nDownload started! Press Ctrl+Z to stop the download at any time.\n")
    
    headers = {
        "User-Agent": USER_AGENT,
        "Connection": "keep-alive",
        "Accept-Encoding": "gzip, deflate"
    }
    
    # Normalize the URL to handle special characters
    normalized_url = normalize_url(url)
    debug_print(f"Using normalized URL: {normalized_url}")
    
    # Get the file size if not provided
    total_size = expected_size
    if total_size is None:
        total_size = get_file_size(normalized_url, headers)

    # Check if we can resume
    supports_resume = can_resume(normalized_url, headers)
    debug_print(f"Server {'supports' if supports_resume else 'does not support'} resume capability.")
    
    # Check if the file already exists and if we should resume
    downloaded = 0
    if os.path.exists(destination) and supports_resume:
        downloaded = os.path.getsize(destination)
        if total_size and downloaded >= total_size:
            print(f"\nFile already completely downloaded: {destination}")
            cancellation_event.set()  # Signal to stop key monitor thread
            return True
        elif downloaded > 0:
            print(f"\nResuming download from {format_size(downloaded)}")
            headers['Range'] = f'bytes={downloaded}-'
    
    # Prepare for progress tracking
    start_time = time.time()
    init_downloaded = downloaded  # Keep the initial download amount for speed calculation
    last_update_time = start_time
    last_downloaded = downloaded  # For calculating current download speed
    
    # Setup for retrying connection
    for attempt in range(MAX_RETRIES):
        # Check if download was cancelled by user
        if cancellation_event.is_set():
            print("\nDownload cancelled by user. Removing partial file...")
            try:
                if os.path.exists(destination):
                    os.remove(destination)
                    print(f"Partial file deleted successfully.")
            except Exception as e:
                print(f"Note: Could not delete partial file: {str(e)}")
            return False
        
        try:
            # Open the file in append mode if resuming, otherwise write mode
            file_mode = 'ab' if downloaded > 0 else 'wb'
            
            with requests.get(normalized_url, headers=headers, stream=True, 
                             timeout=(CONNECT_TIMEOUT, READ_TIMEOUT),
                             allow_redirects=True) as response:
                response.raise_for_status()
                
                # If we requested a range but got the whole file, adjust our counter
                if downloaded > 0 and response.status_code != 206:
                    debug_print("Warning: Server doesnt support resuming. Starting from beginning.")
                    downloaded = 0
                    file_mode = 'wb'
                
                # Update total_size if we can get it from headers
                if total_size is None and 'content-length' in response.headers:
                    total_size = int(response.headers['content-length']) + downloaded
                
                print(f"[Download Progress - {game_name} ({game_id})]")
                
                with open(destination, file_mode) as f:
                    for chunk in response.iter_content(chunk_size=CHUNK_SIZE):
                        # Check for cancellation
                        if cancellation_event.is_set():
                            print("\nDownload cancelled by user. Removing partial file...")
                            f.close()  # Close file handle before deleting
                            try:
                                os.remove(destination)
                                print(f"Partial file deleted successfully.")
                            except Exception as e:
                                print(f"Note: Could not delete partial file: {str(e)}")
                            return False
                            
                        if chunk:
                            f.write(chunk)
                            downloaded += len(chunk)
                            current_time = time.time()
                            
                            # Update progress every 0.5 seconds
                            if (current_time - last_update_time) >= 0.5:
                                last_update_time = current_time
                                
                                # Calculate progress values
                                elapsed_time = current_time - start_time
                                elapsed_seconds = int(elapsed_time)
                                progress_percent = int((downloaded / total_size) * 100) if total_size and total_size > 0 else 0
                                
                                # Calculate overall average speed
                                avg_download_speed = (downloaded - init_downloaded) / elapsed_time if elapsed_time > 0 else 0
                                avg_download_speed_mbps = avg_download_speed * 8 / 1024 / 1024  # Convert to Mbps
                                
                                # Calculate current window speed (last 0.5 seconds)
                                current_window_size = downloaded - last_downloaded
                                current_speed = current_window_size / (current_time - last_update_time + 0.001)
                                current_speed_mbps = current_speed * 8 / 1024 / 1024  # Convert to Mbps
                                last_downloaded = downloaded
                                
                                # Calculate remaining time based on average speed
                                remaining_bytes = total_size - downloaded if total_size else 0
                                if avg_download_speed > 0 and remaining_bytes > 0:
                                    remaining_seconds = int(remaining_bytes / avg_download_speed)
                                else:
                                    remaining_seconds = 0
                                
                                # Simple output - replace previous line with new status
                                # Carriage return to move cursor to beginning of line
                                sys.stdout.write("\r" + " " * 80 + "\r")  # Clear line
                                
                                # Print progress information
                                if total_size and total_size > 0:
                                    prog_str = f"Progress: {progress_percent}% | "
                                else:
                                    prog_str = ""
                                
                                # Show actual progress info
                                size_info = f"of {format_size(total_size)}" if total_size else ""
                                status = f"{prog_str}Downloaded: {format_size(downloaded)} {size_info} | Speed: {avg_download_speed_mbps:.2f} Mbps"
                                sys.stdout.write(status)
                                sys.stdout.flush()
            
            # Final update
            elapsed_time = time.time() - start_time
            elapsed_seconds = int(elapsed_time)
            avg_download_speed = (downloaded - init_downloaded) / elapsed_time if elapsed_time > 0 else 0
            avg_download_speed_mbps = avg_download_speed * 8 / 1024 / 1024
            
            # Print final stats on new lines
            print("\n\nDownload completed successfully!")
            print(f"Total Size: {format_size(downloaded)}")
            print(f"Average Speed: {avg_download_speed_mbps:.2f} Mbps")
            print(f"Time Elapsed: {format_time(elapsed_seconds)}")
            
            # Signal to stop key monitor thread
            cancellation_event.set()
            return True
            
        except (requests.exceptions.Timeout, requests.exceptions.ConnectionError) as e:
            # Handle connection timeout with exponential backoff retry
            if cancellation_event.is_set():
                print("\nDownload cancelled by user. Removing partial file...")
                try:
                    if os.path.exists(destination):
                        os.remove(destination)
                        print(f"Partial file deleted successfully.")
                except Exception as e:
                    print(f"Note: Could not delete partial file: {str(e)}")
                return False
                
            delay = min(RETRY_BASE_DELAY * (2 ** attempt) + random.uniform(0, 1), RETRY_MAX_DELAY)
            if attempt < MAX_RETRIES - 1:
                print(f"\nConnection timed out or lost: {str(e)}. Retrying in {delay:.1f} seconds...")
                print(f"Downloaded so far: {format_size(downloaded)}")
                time.sleep(delay)
                # Update headers for resuming from the current position
                headers['Range'] = f'bytes={downloaded}-'
                last_update_time = time.time()  # Reset the update timer
                # Print a new header for the next attempt
                print(f"\n[Download Progress - {game_name} ({game_id}) - Attempt {attempt+2}]")
            else:
                print(f"\nDownload failed after {MAX_RETRIES} attempts: {str(e)}")
                print(f"Note: Partial file will be removed.")
                try:
                    if os.path.exists(destination):
                        os.remove(destination)
                        print(f"Partial file deleted successfully.")
                except Exception as ex:
                    print(f"Note: Could not delete partial file: {str(ex)}")
                cancellation_event.set()  # Signal to stop key monitor thread
                return False
            
        except requests.exceptions.RequestException as e:
            # Handle other request exceptions
            if cancellation_event.is_set():
                print("\nDownload cancelled by user. Removing partial file...")
                try:
                    if os.path.exists(destination):
                        os.remove(destination)
                        print(f"Partial file deleted successfully.")
                except Exception as e:
                    print(f"Note: Could not delete partial file: {str(e)}")
                return False
                
            if attempt < MAX_RETRIES - 1:
                delay = min(RETRY_BASE_DELAY * (2 ** attempt) + random.uniform(0, 1), RETRY_MAX_DELAY)
                print(f"\nDownload error: {str(e)}. Retrying in {delay:.1f} seconds...")
                time.sleep(delay)
                # Update headers for resuming from the current position
                headers['Range'] = f'bytes={downloaded}-'
                last_update_time = time.time()
                # Print a new header for the next attempt
                print(f"\n[Download Progress - {game_name} ({game_id}) - Attempt {attempt+2}]")
            else:
                print(f"\nDownload failed after {MAX_RETRIES} attempts: {str(e)}")
                print(f"Note: Partial file will be removed.")
                try:
                    if os.path.exists(destination):
                        os.remove(destination)
                        print(f"Partial file deleted successfully.")
                except Exception as ex:
                    print(f"Note: Could not delete partial file: {str(ex)}")
                cancellation_event.set()  # Signal to stop key monitor thread
                return False
        
        except IOError as e:
            # Handle file I/O errors
            print(f"\nFile I/O error: {str(e)}")
            print(f"Note: Partial file will be removed.")
            try:
                if os.path.exists(destination):
                    os.remove(destination)
                    print(f"Partial file deleted successfully.")
            except Exception as ex:
                print(f"Note: Could not delete partial file: {str(ex)}")
            cancellation_event.set()  # Signal to stop key monitor thread
            return False
    
    cancellation_event.set()  # Signal to stop key monitor thread
    return False

def main():
    """Main entry point for the script"""
    # Check if we have enough arguments
    if len(sys.argv) < 5:
        print("Usage: python ccls_downloader.py <download_url> <output_file> <game_name> <game_id> [<total_size_bytes>]")
        return 1
    
    try:
        download_url = sys.argv[1]
        output_file = sys.argv[2]
        game_name = sys.argv[3]
        game_id = sys.argv[4]
        
        debug_print(f"Download URL: {download_url}")
        debug_print(f"Output file: {output_file}")
        debug_print(f"Item name: {game_name}")
        debug_print(f"Item ID: {game_id}")
        
        # Optional size parameter
        expected_size = None
        if len(sys.argv) > 5:
            try:
                expected_size = int(sys.argv[5])
                debug_print(f"Expected size: {format_size(expected_size)}")
            except ValueError:
                debug_print(f"Warning: Invalid size parameter '{sys.argv[5]}', will use content-length from server")
        
        # Create output directory if it doesnt exist
        output_dir = os.path.dirname(output_file)
        if output_dir and not os.path.exists(output_dir):
            try:
                os.makedirs(output_dir)
                debug_print(f"Created output directory: {output_dir}")
            except Exception as e:
                print(f"Error: Could not create directory: {str(e)}")
                return 1
        
        # Start download
        result = download_file(download_url, output_file, game_name, game_id, expected_size)
        
        # Return success or failure code
        return 0 if result else 1
        
    except Exception as e:
        print(f"Error: Unexpected error: {str(e)}")
        if DEBUG_MODE:
            import traceback
            traceback.print_exc()
        return 1

if __name__ == "__main__":
    try:
        sys.exit(main())
    except KeyboardInterrupt:
        # This shouldnt be reached now that we use our own key handler
        print("\nKeyboard interrupt detected.")
        sys.exit(1)