Python Forum

Full Version: How to parse JSON DIC?
You're currently viewing a stripped down version of our content. View the full version with proper formatting.
Hello I'm trying to parse the JSON DIC, to get only the values of each item of the response and save them in a mysql table.
Does anybody can help me please?

Regards,
Orlando Gautier

import hmac
import time
import hashlib
import requests
import pickle
import json
from urllib.parse import urlencode
import mysql.connector
from mysql.connector import Error
from datetime import date
from datetime import datetime


mydb = mysql.connector.connect(
host="localhost",
user="xxxx",
password="xxxxx",
database="xxxxx"
)

KEY = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
SECRET = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'

# BASE_URL = 'https://fapi.binance.com' # production base url
BASE_URL = 'https://fapi.binance.com' # testnet base url
#timestamp = '1598295228'

def hashing(query_string):
    return hmac.new(SECRET.encode('utf-8'), query_string.encode('utf-8'), hashlib.sha256).hexdigest()

def get_timestamp():
    return int(time.time() * 1000)

def dispatch_request(http_method):
    session = requests.Session()
    session.headers.update({
        'Content-Type': 'application/json;charset=utf-8',
        'X-MBX-APIKEY': KEY
    })
    return {
        'GET': session.get,
        'DELETE': session.delete,
        'PUT': session.put,
        'POST': session.post,
    }.get(http_method, 'GET')


def send_signed_request(http_method, url_path, payload={}):
    query_string = urlencode(payload)
    #Reemplazar comillas simples por comillas dobles
    query_string = query_string.replace("%27", "%22")
    if query_string:
        query_string = "{}&timestamp={}".format(query_string, get_timestamp())
    else:
        query_string = "timestamp={}".format(get_timestamp())

    url = BASE_URL + url_path + '?' + query_string + '&signature=' + hashing(query_string)
    print("{} {}".format(http_method, url))
    params = {'url': url, 'params': {}}
    response = dispatch_request(http_method)(**params)
    return response.json()

def send_public_request(url_path, payload={}):
    query_string = urlencode(payload, True)
    url = BASE_URL + url_path
    if query_string:
        url = url + '?' + query_string
    print("{}".format(url))
    response = dispatch_request('GET')(url=url)
    return response.json()

''' ======  end of functions ====== '''

params = {
    "symbol": "SXPUSDT",
    "side": "BUY",
    "type": "LIMIT",
    "timeInForce":"GTC",
    #"leverage": 10,
    #"maxNotionalValue": "1000",
    #"dualSidePosition": "false",
    "quantity": 7,
    #"origQty": "10",
    "price": 1.7560,
    "positionSide": "LONG",    
    "recvWindow":"20000"
    
}
response = send_signed_request('POST', '/fapi/v1/order', params)
print(response)


if response != 0:
    #print("There are data",response)
    mycursor=mydb.cursor()      
    sql=("INSERT INTO TABLE(orderId,symbol,status,clientOrderId) VALUES(%s,%s,%s,%s)",(response['orderId'],response['symbol'],response['status'],response['clientOrderId']))   
    mycursor.execute(sql)    
else:
    print("There are no data")
sample json
Output:
{"orderId": 232501486, "symbol": "SXPUSDT", "status": "NEW", "clientOrderId": "EYn1aQbo5jMJNMCjxxpshq", "price": "1.7999", "avgPrice": "0.00000", "origQty": "7", "executedQty": "0", "cumQty": "0", "cumQuote": "0", "timeInForce": "GTC", "type": "LIMIT", "reduceOnly": false, "closePosition": false, "side": "BUY", "positionSide": "LONG", "stopPrice": "0", "workingType": "CONTRACT_PRICE", "priceProtect": "false", "origType": "LIMIT", "updateTime": 1600173879564}
you want
sql= "INSERT INTO TABLE(orderId,symbol,status,clientOrderId) VALUES(%s,%s,%s,%s)"   
mycursor.execute(sql, (response['orderId'],response['symbol'],response['status'],response['clientOrderId']))
Hello buran,

I´m sorry, and than you.

Yes, I want to save in mysql table, the result of that JSON response.

Best Regards,

Orlando Gautier
does it solve your problem?
Yes Buran...it solve the problem.

Thank you very much!!