Python Forum
Signals Processing - Remove Rows After Trigger
Thread Rating:
  • 1 Vote(s) - 4 Average
  • 1
  • 2
  • 3
  • 4
  • 5
Signals Processing - Remove Rows After Trigger
#1
Hello People


We have file (signals4testing.csv) updated hourly on one hand, and live prices on the other.
(This is the csv sample: http://www.mediafire.com/file/4qj67emuqx...esting.csv)
And we want to compare and log the signals as they come in another csv file (signals_log.csv).

I've coded most of it (I'm new to python) and it works, but it keeps writing each signal in the log every time the schedule runs.
I want to know how to log each signal only once, preventing repetitions.

Here's the code:
import time
import datetime
import pandas as pd
import numpy as np
import schedule

# Get the updated prices
def getprices():
    prices_df = pd.read_json("https://api.binance.com/api/v1/ticker/allPrices")
    prices_df.columns = ["Price","Symbol"]
    prices_df = prices_df.set_index("Symbol")    
    return prices_df

# Get the updated signals.csv
def getcsv():
    signalscsvraw = pd.read_csv("c:/signals/signals4testing.csv")
    signalscsv_df = pd.DataFrame(signalscsvraw)
    signalscsv_df = signalscsv_df.set_index("Symbol")
    return signalscsv_df

# Merge the signals.csv with the updated prices
def signals_prices_merged():
    try:
        prices_df = getprices()
        signalscsv_df = getcsv()
        merged = pd.merge(getprices(),getcsv(), on="Symbol")
        merged["DateTime"] = time.strftime("%m/%d/%Y %H:%M:%S")
        return merged
    except Exception as e:
        print(e)
        print("Exception in signals_prices_merged..")

# Signals Log 
def signals_to_csv(): 
    try:
        merged = signals_prices_merged()  

        # Sell Signals
        sellsignals = pd.DataFrame(merged)
        sellsignals = sellsignals.loc[sellsignals['Sell'] == 1] 
        sellsignals = sellsignals.reset_index()
            
        selldf = pd.DataFrame()
        selldf["DateTime"] = sellsignals["DateTime"]
        selldf["Symbol"] = sellsignals["Symbol"]
        selldf["Action"] = "SELL"  
        selldf["Price"] = sellsignals["Price"]
        selldf["MaxPrice"] = 0
        
        selldf.columns = ["DateTime", "Symbol", "Action", "Price", "MaxPrice"]
        selldf.to_csv("c:/signals/signals_log.csv", mode = "a", index = False, header = False)
        # Delete the Sell Signals Rows after the log from the signals4testing.csv file
        # Or find another way to stop recording the same signals
        

        # Buy Signals 
        buysignals = pd.DataFrame(merged)     
        buysignals = buysignals.loc[buysignals['BuyCoin'] == 1]
        buysignals[["Price","BuyTrigger"]] = buysignals[["Price","BuyTrigger"]].apply(pd.to_numeric)
        buysignals["BuySignals"] = np.where(buysignals["Price"] < buysignals["BuyTrigger"], 1, 0)
        buysignals = buysignals.loc[buysignals['BuySignals'] == 1] 
        buysignals = buysignals.reset_index()
        
        buydf = pd.DataFrame()
        buydf["DateTime"] = buysignals["DateTime"]
        buydf["Symbol"] = buysignals["Symbol"]
        buydf["Action"] = "BUY"  
        buydf["Price"] = buysignals["Price"]
        buydf["MaxPrice"] = buysignals["BuyTrigger"]
        
        buydf.columns = ["DateTime", "Symbol", "Action", "Price", "MaxPrice"]
        buydf.to_csv("c:/signals/signals_log.csv", mode = "a", index = False, header = False)
        # Delete the Buy Signals Rows after the log from the signals4testing.csv file
        # Or find another way to stop recording the same signals


        print("30 secs passed, working now..")

    except Exception as e:
        print(e)
        print("Exception in signals_to_csv..")

# signals_to_csv()

try:
    # schedule.clear()
    schedule.every(30).seconds.do(signals_to_csv)
    while True:
        schedule.run_pending()
        time.sleep(1)
except Exception as e:
        print(e)
        print("Exception in schedule..")
How can we do it?
Reply


Forum Jump:

User Panel Messages

Announcements
Announcement #1 8/1/2020
Announcement #2 8/2/2020
Announcement #3 8/6/2020