python - restituire la tabella SQL come JSON in python




4 Answers

Personalmente preferisco SQLObject per questo genere di cose. Ho adattato un codice di test rapido e sporco che ho dovuto ottenere:

import simplejson

from sqlobject import *

# Replace this with the URI for your actual database
connection = connectionForURI('sqlite:/:memory:')
sqlhub.processConnection = connection

# This defines the columns for your database table. See SQLObject docs for how it
# does its conversions for class attributes <-> database columns (underscores to camel
# case, generally)

class Song(SQLObject):

    name = StringCol()
    artist = StringCol()
    album = StringCol()

# Create fake data for demo - this is not needed for the real thing
def MakeFakeDB():
    Song.createTable()
    s1 = Song(name="B Song",
              artist="Artist1",
              album="Album1")
    s2 = Song(name="A Song",
              artist="Artist2",
              album="Album2")

def Main():
    # This is an iterable, not a list
    all_songs = Song.select().orderBy(Song.q.name)

    songs_as_dict = []

    for song in all_songs:
        song_as_dict = {
            'name' : song.name,
            'artist' : song.artist,
            'album' : song.album}
        songs_as_dict.append(song_as_dict)

    print simplejson.dumps(songs_as_dict)


if __name__ == "__main__":
    MakeFakeDB()
    Main()

Sto giocando con una piccola app web in web.py e sto impostando un URL per restituire un oggetto JSON. Qual è il modo migliore per convertire una tabella SQL in JSON usando python?




import sqlite3
import json

DB = "./the_database.db"

def get_all_users( json_str = False ):
    conn = sqlite3.connect( DB )
    conn.row_factory = sqlite3.Row # This enables column access by name: row['column_name'] 
    db = conn.cursor()

    rows = db.execute('''
    SELECT * from Users
    ''').fetchall()

    conn.commit()
    conn.close()

    if json_str:
        return json.dumps( [dict(ix) for ix in rows] ) #CREATE JSON

    return rows

Callin the method no json ...

print get_all_users()

stampe:

[(1, u'orvar', u'password123'), (2, u'kalle', u'password123')]

Chiamare il metodo con JSON ...

print get_all_users( json_str = True )

stampe:

[{"password": "password123", "id": 1, "name": "orvar"}, {"password": "password123", "id": 2, "name": "kalle"}]



Ho messo insieme un breve script che scarica tutti i dati da tutte le tabelle, come dicts del nome della colonna: value. A differenza di altre soluzioni, non richiede alcuna informazione su cosa siano le tabelle o le colonne, trova solo tutto e lo scarica. Spero che qualcuno lo trovi utile!

from contextlib import closing
from datetime import datetime
import json
import MySQLdb
DB_NAME = 'x'
DB_USER = 'y'
DB_PASS = 'z'

def get_tables(cursor):
    cursor.execute('SHOW tables')
    return [r[0] for r in cursor.fetchall()] 

def get_rows_as_dicts(cursor, table):
    cursor.execute('select * from {}'.format(table))
    columns = [d[0] for d in cursor.description]
    return [dict(zip(columns, row)) for row in cursor.fetchall()]

def dump_date(thing):
    if isinstance(thing, datetime):
        return thing.isoformat()
    return str(thing)


with closing(MySQLdb.connect(user=DB_USER, passwd=DB_PASS, db=DB_NAME)) as conn, closing(conn.cursor()) as cursor:
    dump = {}
    for table in get_tables(cursor):
        dump[table] = get_rows_as_dicts(cursor, table)
    print(json.dumps(dump, default=dump_date, indent=2))



Vorrei integrare la risposta Demz con la versione di psycopg2:

import psycopg2 
import psycopg2.extras
import json
connection = psycopg2.connect(dbname=_cdatabase, host=_chost, port=_cport , user=_cuser, password=_cpassword)
cursor = connection.cursor(cursor_factory=psycopg2.extras.DictCursor) # This line allows dictionary access.
#select some records into "rows"
jsonout= json.dumps([dict(ix) for ix in rows])



Related

python sql json