import os
import time
import sys
import http.client
from urllib.parse import urlparse, parse_qs
from threading import Thread
from queue import Queue
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()

file = sys.argv[1]
undone = sys.argv[2]

# Function to get server's IP address from environment variables
def get_server_ip():
    server_ip = os.getenv('SERVER_IP')
    if not server_ip:
        print("SERVER_IP environment variable is not set.")
        sys.exit(1)
    return server_ip

def doWork():
    while True:
        url = q.get()
        status, url = getStatus(url)
        doSomethingWithResult(status, url)
        q.task_done()

def getStatus(ourl):
    try:
        url = urlparse(ourl)
        usdot_value = parse_qs(urlparse(ourl).query).get('usdot', [None])[0]
        conn = http.client.HTTPConnection(server_ip, timeout=10)

        conn.request("GET", f"/leads/scrapper.php?usdot={usdot_value}")
        response = conn.getresponse()
        
        return response.status, ourl
    except Exception as e:
        print(f"Request error: {e}")
        return "error", ourl
    finally:
        if conn:
            conn.close()

def doSomethingWithResult(status, url):
    print(status, url)
    if status != 200:
        with open(undone, "a") as myfile:
            myfile.write(urlparse(url).query.split('=')[1] + "\n")

def get_number_of_threads_per_loop():
    try:
        with open('/proc/meminfo') as f:
            meminfo = f.read()
        free_memory = int([line for line in meminfo.splitlines() if 'MemFree:' in line][0].split()[1])
        if free_memory > 5000000:
            return 70
        elif free_memory > 4000000:
            return 70
        elif free_memory > 3000000:
            return 60
        elif free_memory > 2000000:
            return 50
        elif free_memory > 1000000:
            return 40
        elif free_memory > 500000:
            return 30
        elif free_memory > 250000:
            return 20
        else:
            return 25
    except (IndexError, ValueError, IOError):
        return 5

if __name__ == "__main__":
    server_ip = get_server_ip()

    concurrent = get_number_of_threads_per_loop()
    print('Concurrent level ' + str(concurrent))

    q = Queue(concurrent * 2)
    for i in range(concurrent):
        t = Thread(target=doWork)
        t.daemon = True
        t.start()

    try:
        with open(file) as opened_file:
            for idx, url in enumerate(opened_file, start=1):
                newurl = f'http://{server_ip}/leads/scrapper.php?usdot={url.strip()}'
                if idx % 1000 == 0:
                    print(f"Idx : {idx}, New URL : {newurl}, Sleep Activated")
                    time.sleep(15)
                q.put(newurl)
        q.join()
    except IOError:
        print(f"Error opening file: {file}")
        sys.exit(1)
    except KeyboardInterrupt:
        sys.exit(1)
