I\'m using Alembic with SQL Alchemy. With SQL Alchemy, I tend to follow a pattern where I don\'t store the connect string with the versioned code. Instead I have file secr
I've tried all the answer here, but not working. Then I try to deal by myself, as below:
.ini file:
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic
# template used to generate migration files
file_template = %%(rev)s_%%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d_%%(minute).2d_%%(second).2d
# timezone to use when rendering the date
# within the migration file as well as the filename.
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
#truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; this defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
databases = auth_engine
[auth_engine]
sqlalchemy.url = mysql+mysqldb://{}:{}@{}:{}/auth_db
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
.env file(it is in the root folder of my project):
DB_USER='root'
DB_PASS='12345678'
DB_HOST='127.0.0.1'
DB_PORT='3306'
env.py file:
from __future__ import with_statement
import os
import re
import sys
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
DB_USER = os.getenv("DB_USER")
DB_PASS = os.getenv("DB_PASS")
DB_HOST = os.getenv("DB_HOST")
DB_PORT = os.getenv("DB_PORT")
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# gather section names referring to different
# databases. These are named "engine1", "engine2"
# in the sample .ini file.
db_names = config.get_main_option('databases')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
sys.path.append(os.path.join(os.path.dirname(__file__), "../../../"))
from db_models.auth_db import auth_db_base
target_metadata = {
'auth_engine': auth_db_base.auth_metadata
}
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
engines = {}
for name in re.split(r',\s*', db_names):
engines[name] = rec = {}
section = context.config.get_section(name)
url = section['sqlalchemy.url'].format(DB_USER, DB_PASS, DB_HOST, DB_PORT)
section['sqlalchemy.url'] = url
rec['url'] = url
# rec['url'] = context.config.get_section_option(name, "sqlalchemy.url")
for name, rec in engines.items():
print("Migrating database %s" % name)
file_ = "%s.sql" % name
print("Writing output to %s" % file_)
with open(file_, 'w') as buffer:
context.configure(url=rec['url'], output_buffer=buffer,
target_metadata=target_metadata.get(name),
compare_type=True,
compare_server_default=True
)
with context.begin_transaction():
context.run_migrations(engine_name=name)
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engines = {}
for name in re.split(r',\s*', db_names):
engines[name] = rec = {}
section = context.config.get_section(name)
url = section['sqlalchemy.url'].format(DB_USER, DB_PASS, DB_HOST, DB_PORT)
section['sqlalchemy.url'] = url
rec['engine'] = engine_from_config(
section,
prefix='sqlalchemy.',
poolclass=pool.NullPool)
for name, rec in engines.items():
engine = rec['engine']
rec['connection'] = conn = engine.connect()
rec['transaction'] = conn.begin()
try:
for name, rec in engines.items():
print("Migrating database %s" % name)
context.configure(
connection=rec['connection'],
upgrade_token="%s_upgrades" % name,
downgrade_token="%s_downgrades" % name,
target_metadata=target_metadata.get(name),
compare_type=True,
compare_server_default=True
)
context.run_migrations(engine_name=name)
for rec in engines.values():
rec['transaction'].commit()
except:
for rec in engines.values():
rec['transaction'].rollback()
raise
finally:
for rec in engines.values():
rec['connection'].close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
Wish can help someone else.