It seems common practice in Flask to start like this:
from flask import Flask from flaskext.sqlalchemy import SQLAlchemy app = Flask(__name__) SQLALCHEMY_DATABASE_URI = 'something' app.config.from_object(__name__) db = SQLAlchemy(app)
And then import and use
db everywhere. But when you create
db like this, it grabs configuration from the app, and it seems that this configuration can't ever be overridden once it happens. There are some pages on Flask's website about making application factories, but it's not clear how I would be able to still use
db everywhere if I did that.
How do I write a script to test my Flask application with a different database? How should I structure my application to make this possible? Do I have to use
Your instinct to use environment variables is correct. However, there is some danger of running unit tests with the wrong db. Also, you may not want to
connect_db with every request and everywhere you want to use
db. You can use a config directory and environment variables which you set explicitly. This is the best I've come up with so far.
run.py shell.py config/__init__.py config/test.py config/postgres.py ... main/__init__.py main/someapp/__init__.py main/someapp/models.py ... main/tests/__init__.py main/tests/testutils.py
so, the config files may be:
# config/test.py SQLALCHEMY_DATABASE_URI = "sqlite://"
# config/postgres.py SQLALCHEMY_DATABASE_URI = 'postgresql://user:pw@localhost/somedb'
So, I can explicitly set the db in my base TestCase:
import os from flask.ext.testing import TestCase os.environ["DIAG_CONFIG_MODULE"] = "config.test" from main import app, db class SQLAlchemyTest(TestCase): def create_app(self): return app def setUp(self): db.create_all() def tearDown(self): db.session.remove() db.drop_all()
main/__init__.py, for me:
import os from flask import Flask, render_template, g from flask.ext.sqlalchemy import SQLAlchemy # by default, let's use a DB we don't care about # but, we can override if we want config_obj = os.environ.get("DIAG_CONFIG_MODULE", "config.test") app = Flask(__name__) app.config.from_object(config_obj) db = SQLAlchemy(app) @app.before_request def before_request(): g.db = db g.app = app # ... @app.route('/', methods=['GET']) def get(): return render_template('home.html') # ... from main.someapp.api import mod as someappmod app.register_blueprint(someappmod)
Then, in the other files, where I know what config I want to run, potentially:
# run.py import os os.environ["DIAG_CONFIG_MODULE"] = "config.postgres" from main import app app.run(debug=True)
# shell.py import os os.environ["DIAG_CONFIG_MODULE"] = "config.postgres" from main import app, db from main.symdiag.models import * from main.auth.models import * print sorted(k for k in locals().keys() if not k.startswith("_")) import IPython IPython.embed()
Maybe .. best so far :P.
You won't want to make connecting to the db happen at import time. Go ahead and configure your app at import time because you can always tweak the configuration in your tests before attempting to test or run your app. In the example below you'll have your db connection behind some functions that use the application config so in a unittest you can actually change the db connection to point to a different file and then go ahead and connect explicitly in your setup.
Say you have a myapp package containing myapp.py which looks like:
# myapp/myapp.py from __future__ import with_statement from sqlite3 import dbapi2 as sqlite3 from contextlib import closing from flask import Flask, request, session, g, redirect, url_for, abort, \ render_template, flash # configuration DATABASE = '/tmp/flaskr.db' DEBUG = True SECRET_KEY = 'development key' USERNAME = 'admin' PASSWORD = 'default' # create our little application :) app = Flask(__name__) app.config.from_object(__name__) app.config.from_envvar('MYAPP_SETTINGS', silent=True) def connect_db(): """Returns a new connection to the database.""" return sqlite3.connect(app.config['DATABASE']) def init_db(): """Creates the database tables.""" with closing(connect_db()) as db: with app.open_resource('schema.sql') as f: db.cursor().executescript(f.read()) db.commit() @app.before_request def before_request(): """Make sure we are connected to the database each request.""" g.db = connect_db() @app.after_request def after_request(response): """Closes the database again at the end of the request.""" g.db.close() return response @app.route('/') def show_entries(): cur = g.db.execute('select title, text from entries order by id desc') entries = [dict(title=row, text=row) for row in cur.fetchall()] return render_template('show_entries.html', entries=entries) if __name__=="__main__": app.run()
Your test file myapp/test_myapp.py will look like this:
import os import myapp import unittest import tempfile class MyappTestCase(unittest.TestCase): def setUp(self): self.db_fd, myapp.app.config['DATABASE'] = tempfile.mkstemp() self.app = myapp.app.test_client() myapp.init_db() def tearDown(self): os.close(self.db_fd) os.unlink(myapp.app.config['DATABASE']) def test_empty_db(self): rv = self.app.get('/') assert 'No entries here so far' in rv.data
Of course if you'd like to use SQLAlchemy you'll have to update the connect_db and init_db functions appropriately but hopefully you get the idea.