Edit a Delta table
Use this recipe to read, edit, and write back data in a Unity Catalog table using the Databricks SQL Connector.
Code snippet
app.py
import pandas as pd
import streamlit as st
from databricks import sql
from databricks.sdk.core import Config
cfg = Config() # Set the DATABRICKS_HOST environment variable when running locally
@st.cache_resource(ttl=300, show_spinner=True)
def get_connection(http_path):
return sql.connect(
server_hostname=cfg.host,
http_path=http_path,
credentials_provider=lambda: cfg.authenticate,
)
def read_table(table_name: str, conn) -> pd.DataFrame:
with conn.cursor() as cursor:
cursor.execute(f"SELECT * FROM {table_name}")
return cursor.fetchall_arrow().to_pandas()
def insert_overwrite_table(table_name: str, df: pd.DataFrame, conn):
progress = st.empty()
with conn.cursor() as cursor:
rows = list(df.itertuples(index=False, name=None))
if not rows:
return
cols = list(df.columns)
num_cols = len(cols)
params = {}
values_sql_parts = []
p = 0
for row in rows:
ph = []
for v in row:
key = f"p{p}"
ph.append(f":{key}")
params[key] = v
p += 1
values_sql_parts.append("(" + ",".join(ph) + ")")
values_sql = ",".join(values_sql_parts)
col_list_sql = ",".join(cols)
with progress:
st.info("Calling Databricks SQL...")
cursor.execute(f"INSERT OVERWRITE {table_name} ({col_list_sql}) VALUES {values_sql}", params)
http_path_input = st.text_input(
"Specify the HTTP Path to your Databricks SQL Warehouse:",
placeholder="/sql/1.0/warehouses/xxxxxx",
)
table_name = st.text_input(
"Specify a Catalog table name:", placeholder="catalog.schema.table"
)
if http_path_input and table_name:
conn = get_connection(http_path_input)
original_df = read_table(table_name, conn)
edited_df = st.data_editor(original_df, num_rows="dynamic", hide_index=True)
df_diff = pd.concat([original_df, edited_df]).drop_duplicates(keep=False)
if not df_diff.empty:
if st.button("Save changes"):
insert_overwrite_table(table_name, edited_df, conn)
else:
st.warning("Provide both the warehouse path and a table name to load data.")
info
This sample uses Streamlit's st.cache_resource with a 300-second TTL (time-to-live) to cache the database connection across users, sessions, and reruns. The cached connection will automatically expire after 1 hour, ensuring connections don't become stale. Use Streamlit's caching decorators and TTL parameter to implement a caching strategy that works for your use case.
Resources
Permissions
Your app service principal needs the following permissions:
MODIFYon the Unity Catalog tableCAN USEon the SQL warehouse
See Unity Catalog privileges and securable objects for more information.
Dependencies
- Databricks SDK -
databricks-sdk - Databricks SQL Connector -
databricks-sql-connector - Pandas -
pandas - Streamlit -
streamlit
requirements.txt
databricks-sdk
databricks-sql-connector
pandas
streamlit