Python Database Operations

Python offers several ways to interact with databases, from simple file-based options like SQLite to enterprise-level solutions. This guide covers the most common database operations in Python, focusing on SQL and NoSQL databases.

SQLite Database

SQLite is a lightweight, file-based, serverless SQL database engine that comes built-in with Python. It's perfect for small applications, prototyping, and testing.

import sqlite3

# Connect to a database (creates it if it doesn't exist)
connection = sqlite3.connect('example.db')

# Create a cursor object to execute SQL commands
cursor = connection.cursor()

# Create a table
cursor.execute('''
CREATE TABLE IF NOT EXISTS users (
    id INTEGER PRIMARY KEY,
    name TEXT NOT NULL,
    email TEXT UNIQUE,
    age INTEGER
)
''')

# Insert data
cursor.execute('''
INSERT INTO users (name, email, age) VALUES (?, ?, ?)
''', ('John Doe', 'john@example.com', 30))

# Insert multiple rows
users_data = [
    ('Jane Smith', 'jane@example.com', 25),
    ('Bob Johnson', 'bob@example.com', 35)
]
cursor.executemany('INSERT INTO users (name, email, age) VALUES (?, ?, ?)', users_data)

# Commit changes
connection.commit()

# Query data
cursor.execute('SELECT * FROM users')
all_users = cursor.fetchall()
print("All users:", all_users)

# Query with filtering
cursor.execute('SELECT name, email FROM users WHERE age > ?', (25,))
older_users = cursor.fetchall()
print("Older users:", older_users)

# Update data
cursor.execute('UPDATE users SET age = ? WHERE name = ?', (31, 'John Doe'))
connection.commit()

# Delete data
cursor.execute('DELETE FROM users WHERE name = ?', ('Bob Johnson',))
connection.commit()

# Close the connection
connection.close()

Using Context Managers

Using context managers is the recommended way to handle database connections to ensure proper cleanup.

import sqlite3

# Using with statement for automatic cleanup
with sqlite3.connect('example.db') as connection:
    cursor = connection.cursor()
    
    # Execute queries
    cursor.execute('SELECT * FROM users')
    users = cursor.fetchall()
    
    # No need to commit explicitly or close connection
    # when using context manager

MySQL and PostgreSQL

For more robust applications, you might want to use MySQL or PostgreSQL. These require additional libraries to interact with Python.

# For MySQL
# pip install mysql-connector-python
import mysql.connector

# Connect to MySQL
connection = mysql.connector.connect(
    host="localhost",
    user="username",
    password="password",
    database="mydatabase"
)

cursor = connection.cursor()

# Create a table
cursor.execute('''
CREATE TABLE IF NOT EXISTS products (
    id INT AUTO_INCREMENT PRIMARY KEY,
    name VARCHAR(255) NOT NULL,
    price DECIMAL(10, 2),
    category VARCHAR(100)
)
''')

# Insert data
cursor.execute('''
INSERT INTO products (name, price, category) VALUES (%s, %s, %s)
''', ('Laptop', 999.99, 'Electronics'))

# Commit changes
connection.commit()

# Close connections
cursor.close()
connection.close()
# For PostgreSQL
# pip install psycopg2
import psycopg2

# Connect to PostgreSQL
connection = psycopg2.connect(
    host="localhost",
    database="mydatabase",
    user="postgres",
    password="password"
)

cursor = connection.cursor()

# Create a table
cursor.execute('''
CREATE TABLE IF NOT EXISTS orders (
    id SERIAL PRIMARY KEY,
    customer_id INTEGER,
    order_date DATE,
    total_amount DECIMAL(10, 2)
)
''')

# Insert data
cursor.execute('''
INSERT INTO orders (customer_id, order_date, total_amount) VALUES (%s, %s, %s)
''', (101, '2023-01-15', 299.99))

# Commit changes
connection.commit()

# Close connections
cursor.close()
connection.close()

SQLAlchemy ORM

SQLAlchemy is a powerful Object-Relational Mapping (ORM) library that allows you to interact with databases using Python objects instead of raw SQL.

# pip install sqlalchemy
from sqlalchemy import create_engine, Column, Integer, String, Float, ForeignKey
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship

# Create an engine
engine = create_engine('sqlite:///sqlalchemy_example.db')

# Create a base class for declarative models
Base = declarative_base()

# Define models
class Customer(Base):
    __tablename__ = 'customers'
    
    id = Column(Integer, primary_key=True)
    name = Column(String, nullable=False)
    email = Column(String, unique=True)
    orders = relationship("Order", back_populates="customer")
    
    def __repr__(self):
        return f"Customer(id={self.id}, name='{self.name}')"

class Order(Base):
    __tablename__ = 'orders'
    
    id = Column(Integer, primary_key=True)
    customer_id = Column(Integer, ForeignKey('customers.id'))
    product = Column(String, nullable=False)
    price = Column(Float)
    customer = relationship("Customer", back_populates="orders")
    
    def __repr__(self):
        return f"Order(id={self.id}, product='{self.product}', price={self.price})"

# Create tables
Base.metadata.create_all(engine)

# Create a session
Session = sessionmaker(bind=engine)
session = Session()

# Add data
new_customer = Customer(name="Alice Smith", email="alice@example.com")
session.add(new_customer)
session.commit()

# Add related data
new_order = Order(customer_id=new_customer.id, product="Headphones", price=89.99)
session.add(new_order)
session.commit()

# Query data
customers = session.query(Customer).all()
print("All customers:", customers)

# Filter queries
alice = session.query(Customer).filter_by(name="Alice Smith").first()
print("Alice's orders:", alice.orders)

# Update data
customer_to_update = session.query(Customer).filter_by(name="Alice Smith").first()
customer_to_update.email = "alice.smith@example.com"
session.commit()

# Delete data
order_to_delete = session.query(Order).filter_by(product="Headphones").first()
if order_to_delete:
    session.delete(order_to_delete)
    session.commit()

# Close session
session.close()

MongoDB (NoSQL)

MongoDB is a popular NoSQL document database that stores data in JSON-like format. PyMongo is the Python driver for MongoDB.

# pip install pymongo
import pymongo
from pymongo import MongoClient

# Connect to MongoDB
client = MongoClient('mongodb://localhost:27017/')

# Create or access a database
db = client['my_database']

# Create or access a collection (similar to a table in SQL)
collection = db['users']

# Insert a document
user = {
    "name": "John Doe",
    "email": "john@example.com",
    "age": 30,
    "interests": ["coding", "reading", "hiking"]
}
result = collection.insert_one(user)
print(f"Inserted document ID: {result.inserted_id}")

# Insert multiple documents
more_users = [
    {"name": "Jane Smith", "email": "jane@example.com", "age": 25},
    {"name": "Bob Johnson", "email": "bob@example.com", "age": 35}
]
results = collection.insert_many(more_users)
print(f"Inserted document IDs: {results.inserted_ids}")

# Find documents
user = collection.find_one({"name": "John Doe"})
print("Found user:", user)

# Find multiple documents
users_over_25 = collection.find({"age": {"$gt": 25}})
for user in users_over_25:
    print(user)

# Update a document
update_result = collection.update_one(
    {"name": "John Doe"},
    {"$set": {"age": 31, "interests": ["coding", "reading", "hiking", "gaming"]}}
)
print(f"Modified {update_result.modified_count} document(s)")

# Delete a document
delete_result = collection.delete_one({"name": "Bob Johnson"})
print(f"Deleted {delete_result.deleted_count} document(s)")

# Close connection
client.close()

Redis (Key-Value Store)

Redis is an in-memory key-value data store, commonly used for caching, real-time analytics, and messaging queues. Python interacts with Redis using the redis-py library.

# pip install redis
import redis

# Connect to Redis
r = redis.Redis(host='localhost', port=6379, db=0)

# String operations
r.set('username', 'john_doe')
username = r.get('username')
print(f"Username: {username.decode('utf-8')}")

# Set expiration
r.setex('session_token', 3600, 'abc123')  # expires in 1 hour

# Increment values
r.set('counter', 0)
r.incr('counter')
r.incr('counter', 5)
counter = r.get('counter')
print(f"Counter: {counter.decode('utf-8')}")

# Lists
r.rpush('tasks', 'task1', 'task2')
r.lpush('tasks', 'urgent_task')
tasks = r.lrange('tasks', 0, -1)
print("Tasks:", [task.decode('utf-8') for task in tasks])

# Sets
r.sadd('tags', 'python', 'redis', 'database')
r.sadd('tags', 'python')  # won't add duplicate
tags = r.smembers('tags')
print("Tags:", [tag.decode('utf-8') for tag in tags])

# Hashes (dictionaries)
r.hset('user:1000', mapping={
    'username': 'john_doe',
    'email': 'john@example.com',
    'visits': 10
})
r.hincrby('user:1000', 'visits', 1)
email = r.hget('user:1000', 'email')
print(f"Email: {email.decode('utf-8')}")
user_data = r.hgetall('user:1000')
print("User data:", {k.decode('utf-8'): v.decode('utf-8') for k, v in user_data.items()})

# Delete keys
r.delete('counter')

# Check if key exists
exists = r.exists('username')
print(f"Username key exists: {exists}")

Database Best Practices

Security

  • Always use parameterized queries to prevent SQL injection
  • Never store database credentials in code; use environment variables or config files
  • Implement proper access controls and user permissions
  • Encrypt sensitive data before storing it

Performance

  • Use database indexes for frequently queried fields
  • Implement connection pooling for multi-user applications
  • Keep connections open only as long as necessary
  • Batch inserts and updates when possible
  • Use appropriate data types and normalization

Error Handling

  • Always handle database exceptions
  • Implement proper retry mechanisms for transient errors
  • Use transactions for related operations
  • Implement proper logging for database operations
# Example of proper error handling and transactions
import sqlite3

def add_user_with_profile(name, email, profile_bio):
    connection = None
    try:
        connection = sqlite3.connect('database.db')
        connection.execute('PRAGMA foreign_keys = 1')  # Enable foreign key constraints
        
        # Start transaction
        with connection:
            cursor = connection.cursor()
            
            # Insert user
            cursor.execute(
                'INSERT INTO users (name, email) VALUES (?, ?)',
                (name, email)
            )
            user_id = cursor.lastrowid
            
            # Insert profile
            cursor.execute(
                'INSERT INTO profiles (user_id, bio) VALUES (?, ?)',
                (user_id, profile_bio)
            )
            
            # Transaction will automatically commit if no exceptions
            return user_id
            
    except sqlite3.IntegrityError as e:
        print(f"Database integrity error: {e}")
        return None
    except sqlite3.Error as e:
        print(f"Database error: {e}")
        return None
    finally:
        if connection:
            connection.close()

Database Migrations

Database migrations are a way to manage changes to your database schema over time. Several Python tools can help with this:

  • Alembic: Used with SQLAlchemy for schema migrations
  • Django Migrations: Built into Django for managing database changes
  • Flask-Migrate: Extension for Flask applications using SQLAlchemy
# Example using Alembic with SQLAlchemy
# pip install alembic

# Initialize Alembic
# $ alembic init migrations

# Create a migration
# $ alembic revision -m "Create users table"

# Edit the migration file (example)
"""
from alembic import op
import sqlalchemy as sa

def upgrade():
    op.create_table(
        'users',
        sa.Column('id', sa.Integer, primary_key=True),
        sa.Column('name', sa.String(50), nullable=False),
        sa.Column('email', sa.String(100), unique=True),
        sa.Column('created_at', sa.DateTime, server_default=sa.func.now())
    )

def downgrade():
    op.drop_table('users')
"""

# Apply the migration
# $ alembic upgrade head

# Roll back the migration
# $ alembic downgrade -1

Practice Exercises

  1. Create a SQLite database for a library with tables for books, authors, and borrowers.
  2. Implement a simple address book application using SQLAlchemy ORM.
  3. Build a caching system using Redis to store and retrieve API response data.
  4. Create a MongoDB database for a blog application with posts, comments, and users.
  5. Implement database migrations for an existing SQLite database using Alembic.