How to upsert pandas DataFrame to PostgreSQL table?
I've scraped some data from web sources and stored it all in a pandas DataFrame. Now, in order harness the powerful db tools afforded by SQLAlchemy, I want to convert said DataFrame into a Table() object and eventually upsert all data into a PostgreSQL table. If this is practical, what is a workable method of going about accomplishing this task?
If you are using PostgreSQL 9.5 or later you can perform the UPSERT using a temporary table and an INSERT ... ON CONFLICT
statement:
import sqlalchemy as sa
# …
with engine.begin() as conn:
# step 0.0 - create test environment
conn.execute(sa.text("DROP TABLE IF EXISTS main_table"))
conn.execute(
sa.text(
"CREATE TABLE main_table (id int primary key, txt varchar(50))"
)
)
conn.execute(
sa.text(
"INSERT INTO main_table (id, txt) VALUES (1, 'row 1 old text')"
)
)
# step 0.1 - create DataFrame to UPSERT
df = pd.DataFrame(
[(2, "new row 2 text"), (1, "row 1 new text")], columns=["id", "txt"]
)
# step 1 - create temporary table and upload DataFrame
conn.execute(
sa.text(
"CREATE TEMPORARY TABLE temp_table (id int primary key, txt varchar(50))"
)
)
df.to_sql("temp_table", conn, index=False, if_exists="append")
# step 2 - merge temp_table into main_table
conn.execute(
sa.text("""\
INSERT INTO main_table (id, txt)
SELECT id, txt FROM temp_table
ON CONFLICT (id) DO
UPDATE SET txt = EXCLUDED.txt
"""
)
)
# step 3 - confirm results
result = conn.execute(sa.text("SELECT * FROM main_table ORDER BY id")).fetchall()
print(result) # [(1, 'row 1 new text'), (2, 'new row 2 text')]
I have needed this so many times, I ended up creating a gist for it.
The function is below, it will create the table if it is the first time persisting the dataframe and will update the table if it already exists:
import pandas as pd
import sqlalchemy
import uuid
import os
def upsert_df(df: pd.DataFrame, table_name: str, engine: sqlalchemy.engine.Engine):
"""Implements the equivalent of pd.DataFrame.to_sql(..., if_exists='update')
(which does not exist). Creates or updates the db records based on the
dataframe records.
Conflicts to determine update are based on the dataframes index.
This will set unique keys constraint on the table equal to the index names
1. Create a temp table from the dataframe
2. Insert/update from temp table into table_name
Returns: True if successful
"""
# If the table does not exist, we should just use to_sql to create it
if not engine.execute(
f"""SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = '{table_name}');
"""
).first()[0]:
df.to_sql(table_name, engine)
return True
# If it already exists...
temp_table_name = f"temp_{uuid.uuid4().hex[:6]}"
df.to_sql(temp_table_name, engine, index=True)
index = list(df.index.names)
index_sql_txt = ", ".join([f'"{i}"' for i in index])
columns = list(df.columns)
headers = index + columns
headers_sql_txt = ", ".join(
[f'"{i}"' for i in headers]
) # index1, index2, ..., column 1, col2, ...
# col1 = exluded.col1, col2=excluded.col2
update_column_stmt = ", ".join([f'"{col}" = EXCLUDED."{col}"' for col in columns])
# For the ON CONFLICT clause, postgres requires that the columns have unique constraint
query_pk = f"""
ALTER TABLE "{table_name}" DROP CONSTRAINT IF EXISTS unique_constraint_for_upsert;
ALTER TABLE "{table_name}" ADD CONSTRAINT unique_constraint_for_upsert UNIQUE ({index_sql_txt});
"""
engine.execute(query_pk)
# Compose and execute upsert query
query_upsert = f"""
INSERT INTO "{table_name}" ({headers_sql_txt})
SELECT {headers_sql_txt} FROM "{temp_table_name}"
ON CONFLICT ({index_sql_txt}) DO UPDATE
SET {update_column_stmt};
"""
engine.execute(query_upsert)
engine.execute(f"DROP TABLE {temp_table_name}")
return True