Fetch Live Option Chain Using Kotak Neo API

					import neo_api_client
from neo_api_client import NeoAPI

					def create_exchange_tokens_live(df, custom_segment_name):
    exch_tokens = []

    for index, row in df.iterrows():
        # Use the custom_segment_name provided and add it to the end
        token_dict = {
            "instrument_token": str(row['pSymbol']),
            "exchange_segment": custom_segment_name.lower() 


    return exch_tokens
					def create_exchange_tokens(data, custom_segment_name):
    exch_tokens = []

    for symbol in data:
        token_dict = {
            "instrument_token": symbol,
            "exchange_segment": custom_segment_name.lower()


    return exch_tokens
					def filter_symbols(df, underlying, expiry_date):
    # Convert 'pExpiryDate' column to datetime format
    df['pExpiryDate'] = pd.to_datetime(df['pExpiryDate'])

    # Convert the input expiry_date to datetime format
    expiry_date = pd.to_datetime(expiry_date, format='%Y-%m-%d %H:%M:%S')

    # Filter the DataFrame based on the condition
    filtered_df = df[
        (df['pTrdSymbol'].str.upper().str.contains(underlying.upper())) &
        (df['pExpiryDate'] == expiry_date)

    # Reset the index of the filtered DataFrame
    filtered_df.reset_index(drop=True, inplace=True)

    return filtered_df
					CK= "Consumerkey"
CS= "Consumer_secret"
Mob = "MobileNumber"
pwd  = "Password"
					from threading import Thread
import time

def on_close(message):
    print('[OnClose]: ', message)
# Initialize the lock
from threading import Thread

live_data = {}

# Initialize previous data dictionary
previous_data = {}

def on_message(message):
    global previous_data, live_data
        #print(f"Received message: {message}")
        for i in message:
            tk = i.get('tk')
            # If key doesn't exist in previous_data, default ltp and oi to 0
            if tk not in previous_data:
                ltp = float(i.get('ltp'))
                oi = float(i.get('oi'))
                v = float(i.get('v'))
                ltp = float(i.get('ltp')) if i.get('ltp') is not None else previous_data[tk].get('ltp',0)
                oi = float(i.get('oi')) if i.get('oi') is not None else previous_data[tk].get('oi',0)
                v  = float(i.get('v')) if i.get('v') is not None else previous_data[tk].get('v',0)
            # Update live_data with current values
            live_data[tk] = {'ltp': ltp, 'oi': oi,'v': v}
            # Update previous_data with current values
            previous_data[tk] = {'ltp': ltp, 'oi': oi , 'v': v}
    except Exception as e:
        print(f"Error in on_message: {e}")
def on_error(message):
    result = message
    print('[OnError]: ', result)
					client = NeoAPI(consumer_key=CK, consumer_secret=CS, 
                environment='prod', on_message=on_message, on_error=on_error, on_close=None, on_open=None)

#Initiate login by passing any of the combinations mobilenumber & password (or) pan & password (or) userid & password
# Also this will generate the OTP to complete 2FA
ret = client.login(mobilenumber=Mob, password=pwd)

token= 'TOTP'
start = client.session_2fa(OTP=token)
					import pandas as pd
import urllib.request
from io import StringIO

fp = client.scrip_master()
file_paths = fp['filesPaths'] 
# Initialize an empty list to store DataFrames
dfs = []

# Download and read each CSV file
for file_path in file_paths:
    with urllib.request.urlopen(file_path) as response:
        content = response.read().decode('utf-8')
        df = pd.read_csv(StringIO(content))

        # Select specific columns
        selected_columns = df[['pSymbol','pExchSeg' ,'pTrdSymbol', 'pInstType', 'lLotSize', 'pExpiryDate', 'pSegment', 'lFreezeQty']]

        # Convert 'pExpiryDate' to the desired format
        selected_columns.loc[:, 'pExpiryDate'] = pd.to_datetime(selected_columns['pExpiryDate'], unit='s') + pd.DateOffset(years=10)

        # Append the selected columns DataFrame to the list

# Combine all DataFrames into a single DataFrame
combined_database = pd.concat(dfs, ignore_index=True)

import numpy as np
import pandas as pd

def get_tokens_for_quotes(security_identifier, combined_database):
    # Check if the input is a security name or a trading symbol
    condition = (
                (combined_database['pTrdSymbol'].astype(str).str.strip().str.upper() == security_identifier.strip().upper()) |
                (combined_database['pTrdSymbol'].apply(lambda x: isinstance(x, float) and np.isnan(x)) & pd.isna(security_identifier))
            ) &
                (combined_database['pTrdSymbol'].astype(str).str.strip().str.upper() == security_identifier.strip().upper()) |
                (combined_database['pTrdSymbol'].apply(lambda x: isinstance(x, float) and np.isnan(x)) & pd.isna(security_identifier))
    # Filter the DataFrame based on the condition
    symbols_info = combined_database[condition][['pSymbol', 'pExchSeg']]

    if symbols_info.empty:
        return []  # Return an empty list if there are no matching records

    # Reset the index of the filtered DataFrame
    symbols_info.reset_index(drop=True, inplace=True)

    # Rename the columns
    symbols_info.rename(columns={'pSymbol': 'instrument_token', 'pExchSeg': 'exchange_segment'}, inplace=True)

    # Ensure that instrument_token values are enclosed in single quotes
    symbols_info['instrument_token'] = "" + symbols_info['instrument_token'].astype(str) + ""

    # Create a list of dictionaries in the desired format
    output_list = symbols_info.to_dict(orient='records')

    return output_list

					tok2=get_tokens_for_quotes('NIFTY', combined_database)

inst_tokens = tok2

def stream():
        client.subscribe(instrument_tokens=inst_tokens, isIndex=False, isDepth=False) 
    except Exception as e:
        print(f"Error while connecting to the WebSocket: {e}")

# Assuming you have a properly initialized `client` object elsewhere
					atm = str(round(float(live_data['26000']['ltp'])/50)* 50)

expiry_date = '24MAR' #use 24321 format for weekly
strike_price = atm
option_type = 'CE'
atm_symbol = f'NIFTY{expiry_date}{strike_price}{option_type}'

original_symbol = atm_symbol

symbol = 'NIFTY'
# Extracting information from the original symbol
expiry_date = original_symbol[5:10]
base_strike_price = int(original_symbol[10:15])
option_type = original_symbol[16:]

# Creating new symbols with a gap of 50 for CE (Call)
combined_symbols = []

for i in range(-20, 21):
    # Creating new symbols with a gap of 50 for CE (Call)
    ce_strike_price = base_strike_price + i * 50
    ce_symbol = f'{symbol}{expiry_date}{ce_strike_price:05d}CE'

    # Creating new symbols with a gap of 50 for PE (Put)
    pe_strike_price = base_strike_price + i * 50
    pe_symbol = f'{symbol}{expiry_date}{pe_strike_price:05d}PE'

symbol_data = combined_database
filtered_symbols = filter_symbols(symbol_data, 'NIFTY', '2024-03-29 14:30:00')
r_df = filtered_symbols[filtered_symbols['pTrdSymbol'].isin(combined_symbols)].copy()

# Reset the index of the result DataFrame
r_df.reset_index(drop=True, inplace=True)

result_tokens = create_exchange_tokens_live(r_df, 'nse_fo')

					client.subscribe(instrument_tokens=result_tokens, isIndex=False, isDepth=False)
					def update_live_data_with_symbols(live_data, combined_database):
    # Create a mapping dictionary from pSymbol to pTrdSymbol
    symbol_mapping = dict(zip(combined_database['pSymbol'], combined_database['pTrdSymbol']))
    # Update live_data with trading symbols
    for key, value in live_data.items():
        if key in symbol_mapping:
            value['trading_symbol'] = symbol_mapping[key]
    return live_data

# Example usage
live_data_with_symbols = update_live_data_with_symbols(live_data, combined_database)

					import os
def write_live_data_to_csv(live_data, file_name):
    # Convert live_data to DataFrame
    live_data_df = pd.DataFrame(live_data).T
    # Get current directory
    current_dir = os.getcwd()
    # Define file path
    file_path = os.path.join(current_dir, file_name)
    # Write DataFrame to CSV file, overwriting existing file
    live_data_df.to_csv(file_path, index_label='tk')
					while True:
        live_data_with_symbols = update_live_data_with_symbols(live_data, combined_database)
        write_live_data_to_csv(live_data_with_symbols, 'live_data.csv')