import socket
import requests
from textblob import TextBlob
from datetime import datetime, timedelta, timezone

def start_server():
    """Starts the server that waits for incoming connections."""
    server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    server_socket.bind(('127.0.0.1', 65432))
    server_socket.listen(1)
    print("Python server started and waiting for connections...")

    while True:
        client_socket, addr = server_socket.accept()
        print(f"Connection from {addr}")
        
        try:
            data = client_socket.recv(1024)
            data = data.decode('utf-8', errors='ignore')
            print(f"Received data: {data}")

            inputs = data.split(',')
            if len(inputs) != 8:
                raise ValueError("Eight inputs were expected")

            symbol, twitter_api_key, twitter_api_secret, twitter_access_token, twitter_access_token_secret, twitter_bearer_token, client_id, client_secret = inputs

            result = process_data(symbol, twitter_bearer_token)
            result_string = f"{result['tweet_sentiment']}"
            client_socket.sendall(result_string.encode('utf-8'))
            print(f"Response sent to client: {result_string}")
        except Exception as e:
            print(f"Communication error: {e}")
            error_message = f"ERROR,{str(e)}"
            client_socket.sendall(error_message.encode('utf-8'))
        finally:
            client_socket.shutdown(socket.SHUT_RDWR)
            client_socket.close()
            print("Connection closed")

def analyze_tweets(bearer_token, symbol):
    """Analyzes recent tweets related to the given symbol."""
    try:
        headers = {
            'Authorization': f'Bearer {bearer_token}',
        }
        query = f"{symbol} lang:en -is:retweet"

        # Get the current time and subtract an hour
        end_time = datetime.now(timezone.utc) - timedelta(seconds=10)  # Subtract 10 seconds from the current time
        start_time = end_time - timedelta(hours=4)

        # Convert to RFC 3339 (ISO 8601) format with second precision and 'Z' at the end
        start_time_str = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
        end_time_str = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')

        search_url = f"https://api.twitter.com/2/tweets/search/recent?query={query}&max_results=100&start_time={start_time_str}&end_time={end_time_str}&sort_order=relevancy"
        
        print(f"Performing tweet search with query: {query}")
        print(f"Search URL: {search_url}")

        response = requests.get(search_url, headers=headers)
        print(f"Response status code: {response.status_code}")
        print(f"Response text: {response.text}")

        if response.status_code != 200:
            raise Exception(f"Error searching tweets: {response.status_code} - {response.text}")

        tweets = response.json().get('data', [])
        if not tweets:
            print("No tweets found")
            return 0
        
        sentiments = [TextBlob(tweet['text']).sentiment.polarity for tweet in tweets]
        if not sentiments:
            print("No sentiments found")
            return 0
        
        average_sentiment = sum(sentiments) / len(sentiments)
        return average_sentiment
    except Exception as e:
        print(f"Error: {e}")
        raise Exception(f"Error analyzing tweets: {e}")

def process_data(symbol, bearer_token):
    """Processes the data obtained from news and tweets."""
    result = { "tweet_sentiment": 0}

    try:
        result["tweet_sentiment"] = analyze_tweets(bearer_token, symbol)
    except Exception as e:
        raise Exception(f"Error processing data: {e}")
    
    print(f"Data processed. Result: {result}")
    return result

if __name__ == "__main__":
    start_server()
