import argparse
import httpx
from bs4 import BeautifulSoup
import sqlite3
from urllib.parse import urlparse, urljoin

def check_title_length(soup: BeautifulSoup) -> int:
    """Checks the length of the title tag.

    Args:
        soup: BeautifulSoup object representing the HTML content.

    Returns:
        Score (0-10) based on title length.
    """
    title_tag = soup.find("title")
    if not title_tag:
        return 0
    title_length = len(title_tag.text)
    if title_length < 30:
        return 2
    elif title_length > 60:
        return 5
    else:
        return 10

def check_meta_description(soup: BeautifulSoup) -> int:
    """Checks for the presence and length of the meta description tag.

    Args:
        soup: BeautifulSoup object representing the HTML content.

    Returns:
        Score (0-10) based on meta description.
    """
    meta_description = soup.find("meta", attrs={"name": "description"})
    if not meta_description:
        return 0
    description_length = len(meta_description["content"])
    if description_length < 70:
        return 5
    elif description_length > 160:
        return 5
    else:
        return 10

def check_h1_count(soup: BeautifulSoup) -> int:
    """Checks the number of h1 tags on the page.

    Args:
        soup: BeautifulSoup object representing the HTML content.

    Returns:
        Score (0-10) based on the number of h1 tags.
    """
    h1_tags = soup.find_all("h1")
    if len(h1_tags) == 0:
        return 0
    elif len(h1_tags) == 1:
        return 10
    else:
        return 5

def check_image_alt_tags(soup: BeautifulSoup) -> int:
    """Checks for the presence of alt tags on all images.

    Args:
        soup: BeautifulSoup object representing the HTML content.

    Returns:
        Score (0-10) based on alt tag presence.
    """
    images = soup.find_all("img")
    alt_tags_present = True
    for img in images:
        if not img.get("alt"):
            alt_tags_present = False
            break
    if alt_tags_present:
        return 10
    else:
        return 0

def find_broken_links(url: str, soup: BeautifulSoup) -> int:
    """Finds broken links on the page.

    Args:
        url: The URL of the page.
        soup: BeautifulSoup object representing the HTML content.

    Returns:
        Score (0-10) based on the number of broken links.
    """
    links = soup.find_all("a", href=True)
    broken_link_count = 0
    for link in links:
        href = link["href"]
        absolute_url = urljoin(url, href)
        try:
            response = httpx.head(absolute_url, timeout=5)
            if response.status_code >= 400:
                broken_link_count += 1
        except httpx.ConnectError:
            broken_link_count += 1
        except httpx.TimeoutError:
            broken_link_count += 1
        except Exception:
            broken_link_count += 1
    if broken_link_count == 0:
        return 10
    elif broken_link_count <= 5:
        return 5
    else:
        return 0

def analyze_seo(url: str) -> tuple[int, list[str]]:
    """Analyzes the SEO of a given URL.

    Args:
        url: The URL to analyze.

    Returns:
        A tuple containing the SEO score (0-100) and a list of recommendations.
    """
    try:
        response = httpx.get(url, timeout=10)
        response.raise_for_status()
        soup = BeautifulSoup(response.text, "html.parser")

        title_score = check_title_length(soup)
        description_score = check_meta_description(soup)
        h1_score = check_h1_count(soup)
        image_alt_score = check_image_alt_tags(soup)
        broken_link_score = find_broken_links(url, soup)

        total_score = (
            title_score + description_score + h1_score + image_alt_score + broken_link_score
        )

        recommendations = []
        if title_score < 10:
            recommendations.append(
                "Optimize title tag length (30-60 characters)."
            )
        if description_score < 10:
            recommendations.append(
                "Optimize meta description length (70-160 characters)."
            )
        if h1_score < 10:
            recommendations.append("Use only one h1 tag per page.")
        if image_alt_score < 10:
            recommendations.append("Add alt tags to all images.")
        if broken_link_score < 10:
            recommendations.append("Fix broken links on the page.")

        return total_score, recommendations

    except httpx.HTTPError as e:
        return 0, [f"HTTP Error: {e}"]
    except httpx.ConnectError as e:
        return 0, [f"Connection Error: {e}"]
    except httpx.TimeoutError as e:
        return 0, [f"Timeout Error: {e}"]
    except Exception as e:
        return 0, [f"An unexpected error occurred: {e}"]

if __name__ == "__main__":
    parser = argparse.ArgumentParser(description="SEO Checker CLI Tool")
    parser.add_argument("url", help="The URL to check")
    args = parser.parse_args()

    score, recommendations = analyze_seo(args.url)

    print(f"SEO Score: {score}/50")
    if recommendations:
        print("\nRecommendations:")
        for recommendation in recommendations:
            print(f"- {recommendation}")
    else:
        print("\nNo SEO issues found!")